Compare commits

...

28 commits
beta ... master

Author SHA1 Message Date
Iván Ovejero
10dbf32596
feat(core): Scale expression isolate pool to 0 after inactivity (#28472)
Some checks are pending
Build: Benchmark Image / build (push) Waiting to run
CI: Master (Build, Test, Lint) / Build for Github Cache (push) Waiting to run
CI: Master (Build, Test, Lint) / Unit tests (push) Waiting to run
CI: Master (Build, Test, Lint) / Lint (push) Waiting to run
CI: Master (Build, Test, Lint) / Performance (push) Waiting to run
CI: Master (Build, Test, Lint) / Notify Slack on failure (push) Blocked by required conditions
Util: Sync API Docs / sync-public-api (push) Waiting to run
Co-authored-by: Danny Martini <danny@n8n.io>
2026-04-21 15:20:01 +00:00
RomanDavydchuk
4869e0a463
fix(editor): HTTP request node showing warning about credentials not set when they are set (#28270) 2026-04-21 15:16:08 +00:00
Irénée
3bd7a2847c
feat(core): Make SSO connection settings configurable via env vars (#28714)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2026-04-21 15:14:00 +00:00
Dimitri Lavrenük
9494f41c34
feat: Track computer use connect events (no-changelog) (#28815) 2026-04-21 14:49:48 +00:00
RomanDavydchuk
713c4981b7
fix(editor): Move tooltip for required RMC fields to the right (#28803) 2026-04-21 14:44:45 +00:00
Albert Alises
6db02fe928
fix(MCP Server Trigger Node): Only return error name and message in tool error responses (#28791)
Co-authored-by: Anand Reddy Jonnalagadda <15153801+joan1011@users.noreply.github.com>
2026-04-21 13:43:20 +00:00
Alex Grozav
a88f847708
refactor(editor): Migrate nodeMetadata to workflowDocumentStore (no-changelog) (#28788) 2026-04-21 13:22:52 +00:00
Svetoslav Dekov
7d74c1f04b
fix(editor): Resolve node parameter defaults in Instance AI setup wizard (no-changelog) (#28800)
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-21 13:04:15 +00:00
Daria
b1ca129496
feat(core): Broadcast workflow updates from MCP tools to open editors (#28709) 2026-04-21 12:26:56 +00:00
Michael Kret
8e49800421
fix: Additional keys in routing nodes (#28758) 2026-04-21 12:24:43 +00:00
Albert Alises
782b2d18b2
fix(ai-builder): Prevent duplicate workflow creation on parallel submits in instance AI (#28793) 2026-04-21 12:21:48 +00:00
Milorad FIlipović
76358a60be
fix(editor): Allow name parameters to be defined by AI (#28763) 2026-04-21 11:52:25 +00:00
Jaakko Husso
86ceb68a05
feat(core): Include workflow names on instance AI confirmations (no-changelog) (#28719)
Co-authored-by: Albert Alises <albert.alises@gmail.com>
2026-04-21 11:24:16 +00:00
Jaakko Husso
2d624a521e
fix(core): Generate title once there's enough user context (#28721) 2026-04-21 10:28:19 +00:00
Matsuuu
ba2c5488c7
Merge tag 'n8n@2.18.0' 2026-04-21 13:32:15 +03:00
Daria
d1c7b31237
fix: Stop persisting client id in session storage to fix duplicate tab problem (no-changelog) (#28769) 2026-04-21 10:02:43 +00:00
Ricardo Espinoza
26ecadcf94
fix(core): Use upsert for MCP OAuth consent to allow re-authorization (#28703) 2026-04-21 09:58:01 +00:00
Svetoslav Dekov
45b5b9e383
fix(editor): Fix instance-ai setup parameter issues not resetting on input (no-changelog) (#28689) 2026-04-21 09:55:29 +00:00
Matsu
cb9882ce9c
ci: Run ci-pr-quality only on n8n team PRs (#28773) 2026-04-21 09:50:16 +00:00
Jaakko Husso
6592ed8047
refactor(core): Move instance AI user settings under actual user settings (no-changelog) (#28706) 2026-04-21 09:47:36 +00:00
Michael Kret
92f1dac835
chore(Microsoft Agent 365 Trigger Node): Change label on toggle to enable Microsoft MCP Servers (#28766) 2026-04-21 09:38:33 +00:00
Vitalii Borovyk
a88ee76553
fix(MongoDB Chat Memory Node): Add connection pool limit (#28042)
Co-authored-by: Eugene <eugene@n8n.io>
2026-04-21 09:21:40 +00:00
Suguru Inoue
b444a95e11
refactor(editor): Migrate workflow object usages (#28534) 2026-04-21 09:17:45 +00:00
Declan Carroll
5e8002ab28
test: Refactor test workflow initialization (#28772) 2026-04-21 09:15:26 +00:00
Guillaume Jacquart
c012b52ac2
feat(core): Bootstrap encryption key set from environment (#28716)
Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
2026-04-21 09:13:11 +00:00
Garrit Franke
fc5424477d
feat(core): Add require-node-api-error ESLint rule for community nodes (no-changelog) (#28454) 2026-04-21 09:12:51 +00:00
Jaakko Husso
cb1244c041
refactor: Use napi-rs/image instead of sharp for screenshots (#28586) 2026-04-21 09:12:14 +00:00
Albert Alises
9ea2ef1840
fix(core): Hide pre-resolved setup requests from Instance AI wizard (#28731) 2026-04-21 08:34:59 +00:00
239 changed files with 6638 additions and 2464 deletions

View file

@ -48,6 +48,7 @@ jobs:
# by checking the checkbox in the PR summary.
if: |
github.event_name == 'pull_request' &&
github.event.pull_request.head.repo.full_name == github.repository &&
!contains(github.event.pull_request.labels.*.name, 'automation:backport') &&
!contains(github.event.pull_request.title, '(backport to')
runs-on: ubuntu-latest
@ -76,6 +77,7 @@ jobs:
# Allows for override via '/size-limit-override' comment
if: |
github.event_name == 'pull_request' &&
github.event.pull_request.head.repo.full_name == github.repository &&
!contains(github.event.pull_request.labels.*.name, 'automation:backport') &&
!contains(github.event.pull_request.title, '(backport to')
runs-on: ubuntu-latest

View file

@ -31,15 +31,15 @@ describe('generateTitleFromMessage', () => {
expect(mockGenerateText).not.toHaveBeenCalled();
});
it('returns the message itself for trivial greetings without calling the LLM', async () => {
it('returns null for trivial greetings without calling the LLM', async () => {
const result = await generateTitleFromMessage(fakeModel, 'hey');
expect(result).toBe('hey');
expect(result).toBeNull();
expect(mockGenerateText).not.toHaveBeenCalled();
});
it('skips the LLM for short multi-word messages', async () => {
it('returns null for short multi-word messages without calling the LLM', async () => {
const result = await generateTitleFromMessage(fakeModel, 'hi there');
expect(result).toBe('hi there');
expect(result).toBeNull();
expect(mockGenerateText).not.toHaveBeenCalled();
});

View file

@ -22,10 +22,10 @@ const TRIVIAL_MESSAGE_MAX_WORDS = 3;
const MAX_TITLE_LENGTH = 80;
/**
* Whether a user message is too trivial to bother sending to an LLM for
* title generation (e.g. "hey", "hello"). For these, the LLM tends to
* hallucinate an assistant-voice reply as the title instead of echoing
* the user intent it's better to just use the message itself.
* Whether a user message has too little substance to title a conversation
* (e.g. "hey", "hello"). For these, the LLM tends to hallucinate an
* assistant-voice reply as the title better to signal "defer, not enough
* signal yet" so the caller can retry once more context accumulates.
*/
function isTrivialMessage(message: string): boolean {
const normalized = message.trim();
@ -69,7 +69,7 @@ export async function generateTitleFromMessage(
if (!trimmed) return null;
if (isTrivialMessage(trimmed)) {
return sanitizeTitle(trimmed) || null;
return null;
}
const result = await generateText({

View file

@ -39,11 +39,11 @@
"@jitsi/robotjs": "^0.6.21",
"@modelcontextprotocol/sdk": "1.26.0",
"@n8n/mcp-browser": "workspace:*",
"@napi-rs/image": "^1.12.0",
"@vscode/ripgrep": "^1.17.1",
"eventsource": "^3.0.6",
"node-screenshots": "^0.2.8",
"picocolors": "catalog:",
"sharp": "^0.34.5",
"yargs-parser": "21.1.1",
"zod": "catalog:",
"zod-to-json-schema": "catalog:"

View file

@ -1,18 +0,0 @@
declare module 'sharp' {
interface Sharp {
resize(width: number, height?: number): Sharp;
png(): Sharp;
jpeg(options?: { quality?: number }): Sharp;
toBuffer(): Promise<Buffer>;
metadata(): Promise<{ width?: number; height?: number; format?: string }>;
}
interface SharpOptions {
raw?: { width: number; height: number; channels: 1 | 2 | 3 | 4 };
}
function sharp(input?: Buffer | string, options?: SharpOptions): Sharp;
// eslint-disable-next-line import-x/no-default-export
export default sharp;
}

View file

@ -5,11 +5,14 @@ import { screenshotTool, screenshotRegionTool } from './screenshot';
jest.mock('node-screenshots');
const mockSharp = jest.fn<unknown, unknown[]>();
jest.mock('sharp', () => ({
const mockFromRgbaPixels = jest.fn<unknown, unknown[]>();
jest.mock('@napi-rs/image', () => ({
__esModule: true,
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
default: (...args: unknown[]) => mockSharp(...args),
// eslint-disable-next-line @typescript-eslint/naming-convention
Transformer: {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
fromRgbaPixels: (...args: unknown[]) => mockFromRgbaPixels(...args),
},
}));
const MockMonitor = Monitor as jest.MockedClass<typeof Monitor>;
@ -75,13 +78,11 @@ function makeMockMonitor(opts: {
}
beforeEach(() => {
// sharp(buffer, opts)[.resize()].jpeg().toBuffer() → fake JPEG
const mockToBuffer = jest.fn().mockResolvedValue(Buffer.from('fake-jpeg'));
const mockJpeg = jest.fn().mockReturnValue({ toBuffer: mockToBuffer });
const mockJpeg = jest.fn().mockResolvedValue(Buffer.from('fake-jpeg'));
const mockResize = jest.fn();
const pipeline = { resize: mockResize, jpeg: mockJpeg };
mockResize.mockReturnValue(pipeline);
mockSharp.mockReturnValue(pipeline);
mockFromRgbaPixels.mockReturnValue(pipeline);
});
describe('screen_screenshot tool', () => {
@ -136,7 +137,7 @@ describe('screen_screenshot tool', () => {
await screenshotTool.execute({}, DUMMY_CONTEXT);
const pipeline = mockSharp.mock.results[0].value as { resize: jest.Mock };
const pipeline = mockFromRgbaPixels.mock.results[0].value as { resize: jest.Mock };
expect(pipeline.resize).toHaveBeenCalledWith(1920, 1080);
});
@ -151,7 +152,7 @@ describe('screen_screenshot tool', () => {
await screenshotTool.execute({}, DUMMY_CONTEXT);
const pipeline = mockSharp.mock.results[0].value as { resize: jest.Mock };
const pipeline = mockFromRgbaPixels.mock.results[0].value as { resize: jest.Mock };
// No HiDPI resize, but LLM downscale kicks in (1920x1080 → 1024x576)
expect(pipeline.resize).toHaveBeenCalledWith(1024, 576);
});
@ -252,7 +253,7 @@ describe('screen_screenshot_region tool', () => {
await screenshotRegionTool.execute({ x: 100, y: 200, width: 400, height: 300 }, DUMMY_CONTEXT);
// Cropped image (800×600 physical) must be resized to logical 400×300
const pipeline = mockSharp.mock.results[0].value as { resize: jest.Mock };
const pipeline = mockFromRgbaPixels.mock.results[0].value as { resize: jest.Mock };
expect(pipeline.resize).toHaveBeenCalledWith(400, 300);
});
});

View file

@ -19,8 +19,8 @@ async function toJpeg(
logicalWidth?: number,
logicalHeight?: number,
): Promise<Buffer> {
const { default: sharp } = await import('sharp');
let pipeline = sharp(rawBuffer, { raw: { width, height, channels: 4 } });
const { Transformer } = await import('@napi-rs/image');
let pipeline = Transformer.fromRgbaPixels(rawBuffer, width, height);
if (logicalWidth && logicalHeight && (width !== logicalWidth || height !== logicalHeight)) {
pipeline = pipeline.resize(logicalWidth, logicalHeight);
}
@ -32,7 +32,7 @@ async function toJpeg(
const scale = maxDim / Math.max(w, h);
pipeline = pipeline.resize(Math.round(w * scale), Math.round(h * scale));
}
return await pipeline.jpeg({ quality: 85 }).toBuffer();
return await pipeline.jpeg(85);
}
export const screenshotTool: ToolDefinition<typeof screenshotSchema> = {

View file

@ -20,6 +20,21 @@ describe('ExpressionEngineConfig', () => {
});
});
describe('N8N_EXPRESSION_ENGINE_IDLE_TIMEOUT', () => {
test('overrides idleTimeout', () => {
jest.replaceProperty(process, 'env', { N8N_EXPRESSION_ENGINE_IDLE_TIMEOUT: '60' });
const config = Container.get(ExpressionEngineConfig);
expect(config.idleTimeout).toBe(60);
});
test('parses "0" as the number 0 (distinct from undefined/unset)', () => {
jest.replaceProperty(process, 'env', { N8N_EXPRESSION_ENGINE_IDLE_TIMEOUT: '0' });
const config = Container.get(ExpressionEngineConfig);
expect(config.idleTimeout).toBe(0);
expect(config.idleTimeout).not.toBeUndefined();
});
});
describe('N8N_EXPRESSION_ENGINE_TIMEOUT', () => {
test('overrides bridgeTimeout', () => {
jest.replaceProperty(process, 'env', { N8N_EXPRESSION_ENGINE_TIMEOUT: '1000' });

View file

@ -33,4 +33,8 @@ export class ExpressionEngineConfig {
/** Memory limit in MB for the V8 isolate used by the VM bridge. */
@Env('N8N_EXPRESSION_ENGINE_MEMORY_LIMIT')
bridgeMemoryLimit: number = 128;
/** If set, scale the pool to 0 warm isolates after this many seconds with no acquire. */
@Env('N8N_EXPRESSION_ENGINE_IDLE_TIMEOUT')
idleTimeout?: number;
}

View file

@ -75,4 +75,17 @@ export class InstanceSettingsLoaderConfig {
@Env('N8N_PERSONAL_SPACE_SHARING_ENABLED')
personalSpaceSharingEnabled: boolean = true;
// --- SAML ---
/** XML metadata string from the identity provider. */
@Env('N8N_SSO_SAML_METADATA')
samlMetadata: string = '';
/** URL to fetch SAML metadata from (mutually exclusive with metadata). */
@Env('N8N_SSO_SAML_METADATA_URL')
samlMetadataUrl: string = '';
@Env('N8N_SSO_SAML_LOGIN_ENABLED')
samlLoginEnabled: boolean = false;
}

View file

@ -40,6 +40,7 @@ export const LOG_SCOPES = [
'instance-version-history',
'instance-settings-loader',
'instance-registry',
'encryption-key-manager',
] as const;
export type LogScope = (typeof LOG_SCOPES)[number];

View file

@ -515,6 +515,9 @@ describe('GlobalConfig', () => {
mfaEnforcedEnabled: false,
personalSpacePublishingEnabled: true,
personalSpaceSharingEnabled: true,
samlMetadata: '',
samlMetadataUrl: '',
samlLoginEnabled: false,
},
} satisfies GlobalConfigShape;

View file

@ -41,30 +41,32 @@ export default [
✅ Set in the `recommended` configuration.\
☑️ Set in the `recommendedWithoutN8nCloudSupport` configuration.\
🔧 Automatically fixable by the [`--fix` CLI option](https://eslint.org/docs/user-guide/command-line-interface#--fix).\
💡 Manually fixable by [editor suggestions](https://eslint.org/docs/latest/use/core-concepts#rule-suggestions).
💡 Manually fixable by [editor suggestions](https://eslint.org/docs/latest/use/core-concepts#rule-suggestions).\
❌ Deprecated.
| Name                                | Description | 💼 | ⚠️ | 🔧 | 💡 |
| :--------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------ | :--- | :--- | :- | :- |
| [ai-node-package-json](docs/rules/ai-node-package-json.md) | Enforce consistency between n8n.aiNodeSdkVersion and ai-node-sdk peer dependency in community node packages | ✅ ☑️ | | | |
| [cred-class-field-icon-missing](docs/rules/cred-class-field-icon-missing.md) | Credential class must have an `icon` property defined | ✅ ☑️ | | | 💡 |
| [credential-documentation-url](docs/rules/credential-documentation-url.md) | Enforce valid credential documentationUrl format (URL or lowercase alphanumeric slug) | ✅ ☑️ | | 🔧 | |
| [credential-password-field](docs/rules/credential-password-field.md) | Ensure credential fields with sensitive names have typeOptions.password = true | ✅ ☑️ | | 🔧 | |
| [credential-test-required](docs/rules/credential-test-required.md) | Ensure credentials have a credential test | ✅ ☑️ | | | 💡 |
| [icon-validation](docs/rules/icon-validation.md) | Validate node and credential icon files exist, are SVG format, and light/dark icons are different | ✅ ☑️ | | | 💡 |
| [missing-paired-item](docs/rules/missing-paired-item.md) | Require pairedItem on INodeExecutionData objects in execute() methods to preserve item linking. | ✅ ☑️ | | | |
| [no-credential-reuse](docs/rules/no-credential-reuse.md) | Prevent credential re-use security issues by ensuring nodes only reference credentials from the same package | ✅ ☑️ | | | 💡 |
| [no-deprecated-workflow-functions](docs/rules/no-deprecated-workflow-functions.md) | Disallow usage of deprecated functions and types from n8n-workflow package | ✅ ☑️ | | | 💡 |
| [no-forbidden-lifecycle-scripts](docs/rules/no-forbidden-lifecycle-scripts.md) | Ban lifecycle scripts (prepare, preinstall, postinstall, etc.) in community node packages | ✅ ☑️ | | | |
| [no-http-request-with-manual-auth](docs/rules/no-http-request-with-manual-auth.md) | Disallow this.helpers.httpRequest() in functions that call this.getCredentials(). Use this.helpers.httpRequestWithAuthentication() instead. | ✅ ☑️ | | | |
| [no-restricted-globals](docs/rules/no-restricted-globals.md) | Disallow usage of restricted global variables in community nodes. | ✅ | | | |
| [no-restricted-imports](docs/rules/no-restricted-imports.md) | Disallow usage of restricted imports in community nodes. | ✅ | | | |
| [node-class-description-icon-missing](docs/rules/node-class-description-icon-missing.md) | **Deprecated.** Node class description must have an `icon` property defined. Use `require-node-description-fields` instead. | ✅ ☑️ | | | 💡 |
| [node-connection-type-literal](docs/rules/node-connection-type-literal.md) | Disallow string literals in node description `inputs`/`outputs` — use `NodeConnectionTypes` enum instead | ✅ ☑️ | | 🔧 | |
| [node-usable-as-tool](docs/rules/node-usable-as-tool.md) | Ensure node classes have usableAsTool property | ✅ ☑️ | | 🔧 | |
| [options-sorted-alphabetically](docs/rules/options-sorted-alphabetically.md) | Enforce alphabetical ordering of options arrays in n8n node properties | | ✅ ☑️ | | |
| [package-name-convention](docs/rules/package-name-convention.md) | Enforce correct package naming convention for n8n community nodes | ✅ ☑️ | | | 💡 |
| [require-continue-on-fail](docs/rules/require-continue-on-fail.md) | Require continueOnFail() handling in execute() methods of node classes | ✅ ☑️ | | | |
| [require-node-description-fields](docs/rules/require-node-description-fields.md) | Node class description must define all required fields | ✅ ☑️ | | | |
| [resource-operation-pattern](docs/rules/resource-operation-pattern.md) | Enforce proper resource/operation pattern for better UX in n8n nodes | | ✅ ☑️ | | |
| Name                                | Description | 💼 | ⚠️ | 🔧 | 💡 | ❌ |
| :--------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------------------------------------------ | :--- | :--- | :- | :- | :- |
| [ai-node-package-json](docs/rules/ai-node-package-json.md) | Enforce consistency between n8n.aiNodeSdkVersion and ai-node-sdk peer dependency in community node packages | ✅ ☑️ | | | | |
| [cred-class-field-icon-missing](docs/rules/cred-class-field-icon-missing.md) | Credential class must have an `icon` property defined | ✅ ☑️ | | | 💡 | |
| [credential-documentation-url](docs/rules/credential-documentation-url.md) | Enforce valid credential documentationUrl format (URL or lowercase alphanumeric slug) | ✅ ☑️ | | 🔧 | | |
| [credential-password-field](docs/rules/credential-password-field.md) | Ensure credential fields with sensitive names have typeOptions.password = true | ✅ ☑️ | | 🔧 | | |
| [credential-test-required](docs/rules/credential-test-required.md) | Ensure credentials have a credential test | ✅ ☑️ | | | 💡 | |
| [icon-validation](docs/rules/icon-validation.md) | Validate node and credential icon files exist, are SVG format, and light/dark icons are different | ✅ ☑️ | | | 💡 | |
| [missing-paired-item](docs/rules/missing-paired-item.md) | Require pairedItem on INodeExecutionData objects in execute() methods to preserve item linking. | ✅ ☑️ | | | | |
| [no-credential-reuse](docs/rules/no-credential-reuse.md) | Prevent credential re-use security issues by ensuring nodes only reference credentials from the same package | ✅ ☑️ | | | 💡 | |
| [no-deprecated-workflow-functions](docs/rules/no-deprecated-workflow-functions.md) | Disallow usage of deprecated functions and types from n8n-workflow package | ✅ ☑️ | | | 💡 | |
| [no-forbidden-lifecycle-scripts](docs/rules/no-forbidden-lifecycle-scripts.md) | Ban lifecycle scripts (prepare, preinstall, postinstall, etc.) in community node packages | ✅ ☑️ | | | | |
| [no-http-request-with-manual-auth](docs/rules/no-http-request-with-manual-auth.md) | Disallow this.helpers.httpRequest() in functions that call this.getCredentials(). Use this.helpers.httpRequestWithAuthentication() instead. | ✅ ☑️ | | | | |
| [no-restricted-globals](docs/rules/no-restricted-globals.md) | Disallow usage of restricted global variables in community nodes. | ✅ | | | | |
| [no-restricted-imports](docs/rules/no-restricted-imports.md) | Disallow usage of restricted imports in community nodes. | ✅ | | | | |
| [node-class-description-icon-missing](docs/rules/node-class-description-icon-missing.md) | Node class description must have an `icon` property defined. Deprecated: use `require-node-description-fields` instead. | | | | 💡 | ❌ |
| [node-connection-type-literal](docs/rules/node-connection-type-literal.md) | Disallow string literals in node description `inputs`/`outputs` — use `NodeConnectionTypes` enum instead | ✅ ☑️ | | 🔧 | | |
| [node-usable-as-tool](docs/rules/node-usable-as-tool.md) | Ensure node classes have usableAsTool property | ✅ ☑️ | | 🔧 | | |
| [options-sorted-alphabetically](docs/rules/options-sorted-alphabetically.md) | Enforce alphabetical ordering of options arrays in n8n node properties | | ✅ ☑️ | | | |
| [package-name-convention](docs/rules/package-name-convention.md) | Enforce correct package naming convention for n8n community nodes | ✅ ☑️ | | | 💡 | |
| [require-continue-on-fail](docs/rules/require-continue-on-fail.md) | Require continueOnFail() handling in execute() methods of node classes | ✅ ☑️ | | | | |
| [require-node-api-error](docs/rules/require-node-api-error.md) | Require NodeApiError or NodeOperationError for error wrapping in catch blocks. Raw errors lose HTTP context in the n8n UI. | ✅ ☑️ | | | | |
| [require-node-description-fields](docs/rules/require-node-description-fields.md) | Node class description must define all required fields: icon, subtitle | ✅ ☑️ | | | | |
| [resource-operation-pattern](docs/rules/resource-operation-pattern.md) | Enforce proper resource/operation pattern for better UX in n8n nodes | | ✅ ☑️ | | | |
<!-- end auto-generated rules list -->

View file

@ -1,11 +1,13 @@
# Node class description must have an `icon` property defined (`@n8n/community-nodes/node-class-description-icon-missing`)
# Node class description must have an `icon` property defined. Deprecated: use `require-node-description-fields` instead (`@n8n/community-nodes/node-class-description-icon-missing`)
❌ This rule is **deprecated**. Use [`require-node-description-fields`](require-node-description-fields.md) instead.
❌ This rule is deprecated.
💡 This rule is manually fixable by [editor suggestions](https://eslint.org/docs/latest/use/core-concepts#rule-suggestions).
<!-- end auto-generated rule header -->
> **Deprecated:** Use [`require-node-description-fields`](require-node-description-fields.md) instead.
## Rule Details
Validates that node classes define an `icon` property in their `description` object. Icons are required for nodes to display correctly in the n8n editor.

View file

@ -0,0 +1,62 @@
# Require NodeApiError or NodeOperationError for error wrapping in catch blocks. Raw errors lose HTTP context in the n8n UI (`@n8n/community-nodes/require-node-api-error`)
💼 This rule is enabled in the following configs: ✅ `recommended`, ☑️ `recommendedWithoutN8nCloudSupport`.
<!-- end auto-generated rule header -->
## Rule Details
When errors are caught and re-thrown in n8n nodes, they must be wrapped in
`NodeApiError` or `NodeOperationError`. Raw re-throws and generic `Error`
constructors lose HTTP context (status code, response body, etc.) that the n8n
UI relies on to display meaningful error information to users.
## Examples
### Incorrect
```js
try {
await apiRequest();
} catch (error) {
throw error;
}
```
```js
try {
await apiRequest();
} catch (error) {
throw new Error('Request failed');
}
```
### Correct
```js
try {
await apiRequest();
} catch (error) {
throw new NodeApiError(this.getNode(), error as JsonObject);
}
```
```js
try {
await apiRequest();
} catch (error) {
throw new NodeOperationError(this.getNode(), 'Operation failed', { itemIndex: i });
}
```
```js
try {
await apiRequest();
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message } });
continue;
}
throw new NodeApiError(this.getNode(), error as JsonObject);
}
```

View file

@ -1,4 +1,4 @@
# Node class description must define all required fields (`@n8n/community-nodes/require-node-description-fields`)
# Node class description must define all required fields: icon, subtitle (`@n8n/community-nodes/require-node-description-fields`)
💼 This rule is enabled in the following configs: ✅ `recommended`, ☑️ `recommendedWithoutN8nCloudSupport`.

View file

@ -40,6 +40,7 @@ const configs = {
'@n8n/community-nodes/missing-paired-item': 'error',
'@n8n/community-nodes/require-community-node-keyword': 'warn',
'@n8n/community-nodes/require-continue-on-fail': 'error',
'@n8n/community-nodes/require-node-api-error': 'error',
'@n8n/community-nodes/require-node-description-fields': 'error',
},
},
@ -67,6 +68,7 @@ const configs = {
'@n8n/community-nodes/missing-paired-item': 'error',
'@n8n/community-nodes/require-community-node-keyword': 'warn',
'@n8n/community-nodes/require-continue-on-fail': 'error',
'@n8n/community-nodes/require-node-api-error': 'error',
'@n8n/community-nodes/require-node-description-fields': 'error',
},
},

View file

@ -20,6 +20,7 @@ import { OptionsSortedAlphabeticallyRule } from './options-sorted-alphabetically
import { PackageNameConventionRule } from './package-name-convention.js';
import { RequireCommunityNodeKeywordRule } from './require-community-node-keyword.js';
import { RequireContinueOnFailRule } from './require-continue-on-fail.js';
import { RequireNodeApiErrorRule } from './require-node-api-error.js';
import { RequireNodeDescriptionFieldsRule } from './require-node-description-fields.js';
import { ResourceOperationPatternRule } from './resource-operation-pattern.js';
@ -45,5 +46,6 @@ export const rules = {
'missing-paired-item': MissingPairedItemRule,
'require-community-node-keyword': RequireCommunityNodeKeywordRule,
'require-continue-on-fail': RequireContinueOnFailRule,
'require-node-api-error': RequireNodeApiErrorRule,
'require-node-description-fields': RequireNodeDescriptionFieldsRule,
} satisfies Record<string, AnyRuleModule>;

View file

@ -0,0 +1,199 @@
import { RuleTester } from '@typescript-eslint/rule-tester';
import { RequireNodeApiErrorRule } from './require-node-api-error.js';
const ruleTester = new RuleTester();
ruleTester.run('require-node-api-error', RequireNodeApiErrorRule, {
valid: [
{
name: 'throw NodeApiError in catch block',
code: `
try {
await apiRequest();
} catch (error) {
throw new NodeApiError(this.getNode(), error as JsonObject);
}`,
},
{
name: 'throw NodeOperationError in catch block',
code: `
try {
await apiRequest();
} catch (error) {
throw new NodeOperationError(this.getNode(), 'Operation failed', { itemIndex: i });
}`,
},
{
name: 'throw outside catch block (not in scope)',
code: `
function validate(input: string) {
if (!input) {
throw new Error('Input required');
}
}`,
},
{
name: 'throw new Error outside catch block (not in scope)',
code: `
throw new Error('Something went wrong');`,
},
{
name: 'continueOnFail pattern with NodeApiError',
code: `
try {
responseData = await apiRequest.call(this, 'POST', '/tasks', body);
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message } });
continue;
}
throw new NodeApiError(this.getNode(), error as JsonObject);
}`,
},
{
name: 'conditional handling then NodeApiError in else',
code: `
try {
await ftp.put(data, path);
} catch (error) {
if (error.code === 553) {
await ftp.mkdir(dirPath, true);
await ftp.put(data, path);
} else {
throw new NodeApiError(this.getNode(), error as JsonObject);
}
}`,
},
{
name: 'throw wrapped error stored in variable',
code: `
try {
await apiRequest();
} catch (error) {
const wrapped = new NodeApiError(this.getNode(), error as JsonObject);
throw wrapped;
}`,
},
{
name: 'shadowed variable with same name as catch param',
code: `
try {
await apiRequest();
} catch (error) {
const fn = (error: Error) => {
throw error;
};
}`,
},
{
name: 'no throw in catch block',
code: `
try {
await apiRequest();
} catch (error) {
console.error(error);
}`,
},
{
name: 'bare re-throw in credential file (skipped)',
filename: '/path/to/MyCredential.credentials.ts',
code: `
try {
await apiRequest();
} catch (error) {
throw error;
}`,
},
{
name: 'bare re-throw in .js file (skipped)',
filename: '/path/to/helper.js',
code: `
try {
apiRequest();
} catch (error) {
throw error;
}`,
},
],
invalid: [
{
name: 'bare re-throw of caught error',
code: `
try {
await apiRequest();
} catch (error) {
throw error;
}`,
errors: [{ messageId: 'useNodeApiError' }],
},
{
name: 'throw new Error in catch block',
code: `
try {
await apiRequest();
} catch (error) {
throw new Error('Request failed');
}`,
errors: [
{
messageId: 'useNodeApiErrorInsteadOfGeneric',
data: { errorClass: 'Error' },
},
],
},
{
name: 'bare re-throw after continueOnFail',
code: `
try {
responseData = await apiRequest.call(this, 'POST', '/tasks', body);
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message } });
continue;
}
throw error;
}`,
errors: [{ messageId: 'useNodeApiError' }],
},
{
name: 'throw new TypeError in catch block',
code: `
try {
JSON.parse(data);
} catch (error) {
throw new TypeError('Invalid JSON');
}`,
errors: [
{
messageId: 'useNodeApiErrorInsteadOfGeneric',
data: { errorClass: 'TypeError' },
},
],
},
{
name: 'bare re-throw in nested catch',
code: `
try {
try {
await apiRequest();
} catch (innerError) {
throw innerError;
}
} catch (outerError) {
throw new NodeApiError(this.getNode(), outerError as JsonObject);
}`,
errors: [{ messageId: 'useNodeApiError' }],
},
{
name: 'throw named variable in catch',
code: `
try {
await apiRequest();
} catch (e) {
throw e;
}`,
errors: [{ messageId: 'useNodeApiError' }],
},
],
});

View file

@ -0,0 +1,90 @@
import { DefinitionType } from '@typescript-eslint/scope-manager';
import { AST_NODE_TYPES, type TSESTree } from '@typescript-eslint/utils';
import { isFileType } from '../utils/index.js';
import { createRule } from '../utils/rule-creator.js';
const ALLOWED_ERROR_CLASSES = new Set(['NodeApiError', 'NodeOperationError']);
function getThrowCalleeName(argument: TSESTree.Expression): string | null {
if (argument.type === AST_NODE_TYPES.NewExpression) {
if (argument.callee.type === AST_NODE_TYPES.Identifier) {
return argument.callee.name;
}
}
return null;
}
function isInsideCatchClause(node: TSESTree.Node): boolean {
let current: TSESTree.Node | undefined = node.parent;
while (current) {
if (current.type === AST_NODE_TYPES.CatchClause) {
return true;
}
current = current.parent;
}
return false;
}
export const RequireNodeApiErrorRule = createRule({
name: 'require-node-api-error',
meta: {
type: 'problem',
docs: {
description:
'Require NodeApiError or NodeOperationError for error wrapping in catch blocks. ' +
'Raw errors lose HTTP context in the n8n UI.',
},
messages: {
useNodeApiError:
'Use `NodeApiError` or `NodeOperationError` instead of re-throwing raw errors. ' +
'Example: `throw new NodeApiError(this.getNode(), error as JsonObject)`',
useNodeApiErrorInsteadOfGeneric:
'Use `NodeApiError` or `NodeOperationError` instead of `{{ errorClass }}`. ' +
'Example: `throw new NodeApiError(this.getNode(), error as JsonObject)`',
},
schema: [],
},
defaultOptions: [],
create(context) {
const isNodeFile = isFileType(context.filename, '.node.ts');
const isHelperFile =
context.filename.endsWith('.ts') &&
!isNodeFile &&
!isFileType(context.filename, '.credentials.ts');
if (!isNodeFile && !isHelperFile) {
return {};
}
return {
ThrowStatement(node) {
if (!isInsideCatchClause(node)) return;
if (!node.argument) return;
const { argument } = node;
if (argument.type === AST_NODE_TYPES.Identifier) {
const scope = context.sourceCode.getScope(node);
const ref = scope.references.find((r) => r.identifier === argument);
const isCatchParam =
ref?.resolved?.defs.some((def) => def.type === DefinitionType.CatchClause) ?? false;
if (isCatchParam) {
context.report({ node, messageId: 'useNodeApiError' });
}
return;
}
const calleeName = getThrowCalleeName(argument);
if (calleeName !== null && !ALLOWED_ERROR_CLASSES.has(calleeName)) {
context.report({
node,
messageId: 'useNodeApiErrorInsteadOfGeneric',
data: { errorClass: calleeName },
});
}
},
};
},
});

View file

@ -0,0 +1,55 @@
import { setImmediate } from 'node:timers/promises';
import type { Mock } from 'vitest';
import { vi } from 'vitest';
import type { ObservabilityProvider, RuntimeBridge } from '../types';
export function createMockBridge(): RuntimeBridge {
return {
initialize: vi.fn().mockResolvedValue(undefined),
execute: vi.fn().mockReturnValue('result'),
dispose: vi.fn().mockResolvedValue(undefined),
isDisposed: vi.fn().mockReturnValue(false),
};
}
export async function flushMicrotasks(): Promise<void> {
await setImmediate();
}
export function createDeferredBridgeFactory(): {
factory: Mock<() => Promise<RuntimeBridge>>;
pendingResolvers: Array<(bridge: RuntimeBridge) => void>;
} {
const pendingResolvers: Array<(bridge: RuntimeBridge) => void> = [];
const factory = vi.fn<() => Promise<RuntimeBridge>>().mockImplementation(
() =>
new Promise<RuntimeBridge>((resolve) => {
pendingResolvers.push(resolve);
}),
);
return { factory, pendingResolvers };
}
export function createMockObservability(): ObservabilityProvider {
return {
metrics: {
counter: vi.fn(),
gauge: vi.fn(),
histogram: vi.fn(),
},
traces: {
startSpan: vi.fn().mockReturnValue({
setStatus: vi.fn(),
setAttribute: vi.fn(),
recordException: vi.fn(),
end: vi.fn(),
}),
},
logs: {
error: vi.fn(),
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
},
};
}

View file

@ -1,39 +1,7 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { ExpressionEvaluator } from '../expression-evaluator';
import type { RuntimeBridge, ObservabilityProvider } from '../../types';
function createMockBridge(): RuntimeBridge {
return {
initialize: vi.fn().mockResolvedValue(undefined),
execute: vi.fn().mockReturnValue('result'),
dispose: vi.fn().mockResolvedValue(undefined),
isDisposed: vi.fn().mockReturnValue(false),
};
}
function createMockObservability(): ObservabilityProvider {
return {
metrics: {
counter: vi.fn(),
gauge: vi.fn(),
histogram: vi.fn(),
},
traces: {
startSpan: vi.fn().mockReturnValue({
setStatus: vi.fn(),
setAttribute: vi.fn(),
recordException: vi.fn(),
end: vi.fn(),
}),
},
logs: {
error: vi.fn(),
warn: vi.fn(),
info: vi.fn(),
debug: vi.fn(),
},
};
}
import { createMockBridge, createMockObservability } from '../../__tests__/helpers';
describe('ExpressionEvaluator cache', () => {
let bridge: RuntimeBridge;

View file

@ -0,0 +1,50 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { ExpressionEvaluator } from '../expression-evaluator';
import type { RuntimeBridge } from '../../types';
import { createMockBridge, flushMicrotasks } from '../../__tests__/helpers';
const IDLE_TIMEOUT_MS = 5_000;
describe('ExpressionEvaluator', () => {
beforeEach(() => {
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
it('should cold-start a bridge when the pool has scaled to zero', async () => {
const bridges: RuntimeBridge[] = [];
const createBridge = () => {
const bridge = createMockBridge();
vi.mocked(bridge.execute).mockReturnValue('result');
bridges.push(bridge);
return bridge;
};
const evaluator = new ExpressionEvaluator({
createBridge,
idleTimeoutMs: IDLE_TIMEOUT_MS,
maxCodeCacheSize: 100,
});
await evaluator.initialize();
await flushMicrotasks();
// Let the pool scale to 0
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(bridges[0].dispose).toHaveBeenCalled();
// Pool now throws PoolExhaustedError on acquire → evaluator must cold-start
const caller = {};
await evaluator.acquire(caller);
// Cold-start bridge should be usable for evaluation
const result = evaluator.evaluate('$json.x', { $json: { x: 1 } }, caller);
expect(result).toBe('result');
await evaluator.release(caller);
await evaluator.dispose();
});
});

View file

@ -8,6 +8,8 @@ import type {
} from '../types';
import { DEFAULT_BRIDGE_CONFIG } from '../types/bridge';
import { IsolateError } from '@n8n/errors';
import { IdleScalingPool } from '../pool/idle-scaling-pool';
import type { IPool } from '../pool/isolate-pool';
import { IsolatePool, PoolDisposedError, PoolExhaustedError } from '../pool/isolate-pool';
import { LruCache } from './lru-cache';
@ -23,7 +25,7 @@ export class ExpressionEvaluator implements IExpressionEvaluator {
// Cache hit rate in production: ~99.9% (same expressions repeat within a workflow)
private codeCache: LruCache<string, string>;
private pool: IsolatePool;
private pool: IPool;
private bridgesByCaller = new WeakMap<object, RuntimeBridge>();
@ -40,15 +42,23 @@ export class ExpressionEvaluator implements IExpressionEvaluator {
await bridge.initialize();
return bridge;
};
this.pool = new IsolatePool(
this.createBridge,
config.poolSize ?? 1,
(error) => {
logger.error('[IsolatePool] Failed to replenish bridge', { error });
config.observability?.metrics.counter('expression.pool.replenish_failed', 1);
},
logger,
);
const onReplenishFailed = (error: unknown) => {
logger.error('[IsolatePool] Failed to replenish bridge', { error });
config.observability?.metrics.counter('expression.pool.replenish_failed', 1);
};
this.pool =
config.idleTimeoutMs === undefined
? new IsolatePool(this.createBridge, config.poolSize ?? 1, onReplenishFailed, logger)
: new IdleScalingPool(
this.createBridge,
config.poolSize ?? 1,
config.idleTimeoutMs,
onReplenishFailed,
logger,
config.observability,
);
}
async initialize(): Promise<void> {

View file

@ -0,0 +1,415 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { RuntimeBridge } from '../../types';
import { IdleScalingPool } from '../idle-scaling-pool';
import { PoolDisposedError, PoolExhaustedError } from '../isolate-pool';
import {
createDeferredBridgeFactory,
createMockBridge,
createMockObservability,
flushMicrotasks,
} from '../../__tests__/helpers';
const IDLE_TIMEOUT_MS = 5_000;
function createFactory() {
return vi.fn().mockImplementation(async () => createMockBridge());
}
describe('IdleScalingPool', () => {
beforeEach(() => {
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
it('should initialize with the configured number of bridges', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 3, IDLE_TIMEOUT_MS);
await pool.initialize();
expect(factory).toHaveBeenCalledTimes(3);
await pool.dispose();
});
it('should throw PoolDisposedError on acquire after dispose', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await pool.dispose();
expect(() => pool.acquire()).toThrow(PoolDisposedError);
});
it('should scale to 0 after idleTimeoutMs elapses with no acquire', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 2, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
const initialBridges = await Promise.all(
factory.mock.results.map((r) => r.value as Promise<RuntimeBridge>),
);
expect(factory).toHaveBeenCalledTimes(2);
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
for (const bridge of initialBridges) {
expect(bridge.dispose).toHaveBeenCalled();
}
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await pool.waitForReplenishment();
await pool.dispose();
});
it('should trigger scale-up on acquire after scale-down and warm back to full size', async () => {
const POOL_SIZE = 4;
const factory = createFactory();
const pool = new IdleScalingPool(factory, POOL_SIZE, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
expect(factory).toHaveBeenCalledTimes(POOL_SIZE);
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await pool.waitForReplenishment();
await flushMicrotasks();
const acquired: RuntimeBridge[] = [];
for (let i = 0; i < POOL_SIZE; i++) {
acquired.push(pool.acquire());
}
expect(acquired.length).toBe(POOL_SIZE);
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await pool.dispose();
});
it('should not trigger a duplicate scale-up on a second acquire while scaling up', async () => {
const { factory, pendingResolvers } = createDeferredBridgeFactory();
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
const initPromise = pool.initialize();
pendingResolvers.shift()!(createMockBridge());
await initPromise;
await flushMicrotasks();
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await flushMicrotasks();
const callsAfterFirstAcquire = factory.mock.calls.length;
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await flushMicrotasks();
expect(factory.mock.calls.length).toBe(callsAfterFirstAcquire);
while (pendingResolvers.length > 0) {
pendingResolvers.shift()!(createMockBridge());
}
await pool.dispose();
});
it('should reset the idle timer on each acquire', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
for (let i = 0; i < 10; i++) {
vi.advanceTimersByTime(IDLE_TIMEOUT_MS / 2);
await flushMicrotasks();
const bridge = pool.acquire();
await pool.release(bridge);
await flushMicrotasks();
await pool.waitForReplenishment();
}
const callsBeforeQuiet = factory.mock.calls.length;
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await flushMicrotasks();
expect(factory.mock.calls.length).toBeGreaterThan(callsBeforeQuiet);
await pool.dispose();
});
it('should not replenish on release while idle', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 2, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
const bridge = pool.acquire();
await flushMicrotasks();
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
const callsBeforeRelease = factory.mock.calls.length;
await pool.release(bridge);
await flushMicrotasks();
expect(factory.mock.calls.length).toBe(callsBeforeRelease);
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await pool.dispose();
});
it('should clear the idle timer on dispose', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
await pool.dispose();
const callsBeforeAdvance = factory.mock.calls.length;
vi.advanceTimersByTime(IDLE_TIMEOUT_MS * 2);
await flushMicrotasks();
expect(factory.mock.calls.length).toBe(callsBeforeAdvance);
});
it('should await in-flight scale-up during dispose', async () => {
const { factory, pendingResolvers } = createDeferredBridgeFactory();
const scaleUpBridges = [createMockBridge(), createMockBridge()];
const pool = new IdleScalingPool(factory, 2, IDLE_TIMEOUT_MS);
const initPromise = pool.initialize();
pendingResolvers.shift()!(createMockBridge());
pendingResolvers.shift()!(createMockBridge());
await initPromise;
await flushMicrotasks();
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await flushMicrotasks();
expect(pendingResolvers.length).toBe(2);
let disposeResolved = false;
const disposePromise = pool.dispose().then(() => {
disposeResolved = true;
});
await flushMicrotasks();
expect(disposeResolved).toBe(false);
pendingResolvers.shift()!(scaleUpBridges[0]);
pendingResolvers.shift()!(scaleUpBridges[1]);
await flushMicrotasks();
await disposePromise;
expect(disposeResolved).toBe(true);
expect(scaleUpBridges[0].dispose).toHaveBeenCalled();
expect(scaleUpBridges[1].dispose).toHaveBeenCalled();
});
it('should await in-flight scale-down during dispose', async () => {
const initialBridge = createMockBridge();
let resolveDispose: (() => void) | null = null;
vi.mocked(initialBridge.dispose).mockImplementation(
() =>
new Promise<void>((resolve) => {
resolveDispose = resolve;
}),
);
const factory = vi.fn().mockResolvedValue(initialBridge);
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(initialBridge.dispose).toHaveBeenCalled();
expect(resolveDispose).not.toBeNull();
let disposeResolved = false;
const disposePromise = pool.dispose().then(() => {
disposeResolved = true;
});
await flushMicrotasks();
expect(disposeResolved).toBe(false);
resolveDispose!();
await flushMicrotasks();
await disposePromise;
expect(disposeResolved).toBe(true);
});
it('should preserve current behavior when idleTimeoutMs is unset', async () => {
const factory = createFactory();
const pool = new IdleScalingPool(factory, 2);
await pool.initialize();
await flushMicrotasks();
const initialBridges = await Promise.all(
factory.mock.results.map((r) => r.value as Promise<RuntimeBridge>),
);
vi.advanceTimersByTime(60 * 60 * 1000);
await flushMicrotasks();
for (const bridge of initialBridges) {
expect(bridge.dispose).not.toHaveBeenCalled();
}
await pool.dispose();
});
it('should emit scaled_to_zero and scaled_up metrics once per transition', async () => {
const factory = createFactory();
const observability = createMockObservability();
const pool = new IdleScalingPool(
factory,
1,
IDLE_TIMEOUT_MS,
undefined,
undefined,
observability,
);
await pool.initialize();
await flushMicrotasks();
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(observability.metrics.counter).toHaveBeenCalledWith('expression.pool.scaled_to_zero', 1);
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
expect(observability.metrics.counter).toHaveBeenCalledWith('expression.pool.scaled_up', 1);
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await flushMicrotasks();
const scaledUpCalls = vi
.mocked(observability.metrics.counter)
.mock.calls.filter((call) => call[0] === 'expression.pool.scaled_up');
const scaledToZeroCalls = vi
.mocked(observability.metrics.counter)
.mock.calls.filter((call) => call[0] === 'expression.pool.scaled_to_zero');
expect(scaledUpCalls.length).toBe(1);
expect(scaledToZeroCalls.length).toBe(1);
await pool.waitForReplenishment();
await pool.dispose();
});
it('should make concurrent dispose calls wait for the same teardown', async () => {
const bridge = createMockBridge();
let resolveDispose: (() => void) | null = null;
vi.mocked(bridge.dispose).mockImplementation(
() =>
new Promise<void>((resolve) => {
resolveDispose = resolve;
}),
);
const factory = vi.fn().mockResolvedValue(bridge);
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
let firstResolved = false;
let secondResolved = false;
const first = pool.dispose().then(() => {
firstResolved = true;
});
const second = pool.dispose().then(() => {
secondResolved = true;
});
await flushMicrotasks();
expect(firstResolved).toBe(false);
expect(secondResolved).toBe(false);
resolveDispose!();
await flushMicrotasks();
await Promise.all([first, second]);
expect(firstResolved).toBe(true);
expect(secondResolved).toBe(true);
expect(factory).toHaveBeenCalledTimes(1);
expect(bridge.dispose).toHaveBeenCalledTimes(1);
});
it('should recover from scale-up failure by retrying on next acquire', async () => {
const initialBridge = createMockBridge();
let callCount = 0;
const factory = vi.fn().mockImplementation(async () => {
const idx = callCount++;
if (idx === 0) return initialBridge;
if (idx === 1) throw new Error('transient scale-up failure');
return createMockBridge();
});
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await pool.waitForReplenishment();
await flushMicrotasks();
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await pool.waitForReplenishment();
await flushMicrotasks();
const bridge = pool.acquire();
expect(bridge).toBeDefined();
await pool.dispose();
});
it('should not clobber a concurrent scale-up when scale-down finishes', async () => {
const oldBridge = createMockBridge();
let resolveOldDispose: (() => void) | null = null;
vi.mocked(oldBridge.dispose).mockImplementation(
() =>
new Promise<void>((resolve) => {
resolveOldDispose = resolve;
}),
);
const newBridge = createMockBridge();
let call = 0;
const factory = vi.fn().mockImplementation(async () => {
call++;
if (call === 1) return oldBridge;
if (call === 2) return newBridge;
return createMockBridge();
});
const pool = new IdleScalingPool(factory, 1, IDLE_TIMEOUT_MS);
await pool.initialize();
await flushMicrotasks();
// Trigger scale-down — oldBridge.dispose hangs until we resolve it
vi.advanceTimersByTime(IDLE_TIMEOUT_MS);
await flushMicrotasks();
expect(oldBridge.dispose).toHaveBeenCalled();
expect(resolveOldDispose).not.toBeNull();
// Acquire during scale-down triggers a concurrent scale-up
expect(() => pool.acquire()).toThrow(PoolExhaustedError);
await flushMicrotasks();
// Let scale-down finish — its .finally must not clobber the new inner pool
resolveOldDispose!();
await flushMicrotasks();
await pool.waitForReplenishment();
// New pool should still be serving
const bridge = pool.acquire();
expect(bridge).toBe(newBridge);
await pool.dispose();
});
});

View file

@ -1,15 +1,7 @@
import { describe, it, expect, vi } from 'vitest';
import type { RuntimeBridge } from '../../types';
import { IsolatePool, PoolDisposedError, PoolExhaustedError } from '../isolate-pool';
function createMockBridge(): RuntimeBridge {
return {
initialize: vi.fn().mockResolvedValue(undefined),
execute: vi.fn().mockReturnValue('result'),
dispose: vi.fn().mockResolvedValue(undefined),
isDisposed: vi.fn().mockReturnValue(false),
};
}
import { createMockBridge } from '../../__tests__/helpers';
function createFactory() {
return vi.fn().mockImplementation(async () => createMockBridge());

View file

@ -0,0 +1,142 @@
import type { ObservabilityProvider, RuntimeBridge } from '../types';
import type { Logger } from '../types/bridge';
import type { IPool } from './isolate-pool';
import { IsolatePool, PoolDisposedError, PoolExhaustedError } from './isolate-pool';
/**
* Wraps an `IsolatePool`, disposing it after a configured idle period and
* recreating it on the next acquire. The inner pool either fully exists or
* does not; callers never observe a partial state.
*/
export class IdleScalingPool implements IPool {
private innerPool: IsolatePool | null = null;
private pendingScaleUp: Promise<void> | null = null;
private pendingScaleDown: Promise<void> | null = null;
private idleTimer?: NodeJS.Timeout;
private disposed = false;
private disposePromise?: Promise<void>;
constructor(
private readonly createBridge: () => Promise<RuntimeBridge>,
private readonly size: number,
private readonly idleTimeoutMs?: number,
private readonly onReplenishFailed?: (error: unknown) => void,
private readonly logger?: Logger,
private readonly observability?: ObservabilityProvider,
) {}
async initialize(): Promise<void> {
this.innerPool = this.createInnerPool();
await this.innerPool.initialize();
this.resetIdleTimer();
}
acquire(): RuntimeBridge {
if (this.disposed) throw new PoolDisposedError();
if (!this.innerPool) {
this.triggerScaleUp();
throw new PoolExhaustedError();
}
this.resetIdleTimer();
return this.innerPool.acquire();
}
async release(bridge: RuntimeBridge): Promise<void> {
if (this.innerPool && !this.disposed) {
// Pool is live: delegate so the inner pool disposes and replenishes.
await this.innerPool.release(bridge);
} else if (!bridge.isDisposed()) {
// Pool is idle, disposed, or the bridge came from cold-start fallback:
// no pool to delegate to, just dispose to free the V8 isolate.
await bridge.dispose();
}
}
async dispose(): Promise<void> {
return (this.disposePromise ??= this.doDispose());
}
private async doDispose(): Promise<void> {
this.disposed = true;
if (this.idleTimer) {
clearTimeout(this.idleTimer);
this.idleTimer = undefined;
}
await this.drainPendingTransitions();
if (this.innerPool) {
const toDispose = this.innerPool;
this.innerPool = null;
await toDispose.dispose();
}
}
async waitForReplenishment(): Promise<void> {
await this.drainPendingTransitions();
if (this.innerPool) await this.innerPool.waitForReplenishment();
}
private async drainPendingTransitions(): Promise<void> {
// Snapshot both fields before awaiting so a transition starting between reads
// can't be missed. Waits for transitions in flight now, not ones started later.
const promises: Promise<void>[] = [];
if (this.pendingScaleUp) promises.push(this.pendingScaleUp.catch(() => {}));
if (this.pendingScaleDown) promises.push(this.pendingScaleDown.catch(() => {}));
await Promise.all(promises);
}
private createInnerPool(): IsolatePool {
return new IsolatePool(this.createBridge, this.size, this.onReplenishFailed, this.logger);
}
private resetIdleTimer(): void {
if (this.idleTimeoutMs === undefined) return;
if (this.idleTimer) clearTimeout(this.idleTimer);
this.idleTimer = setTimeout(() => this.triggerScaleDown(), this.idleTimeoutMs);
this.idleTimer.unref();
}
private triggerScaleUp(): void {
if (this.pendingScaleUp || this.innerPool || this.disposed) return;
this.logger?.info('[IdleScalingPool] Scaling up from idle');
this.observability?.metrics.counter('expression.pool.scaled_up', 1);
const newInner = this.createInnerPool();
this.pendingScaleUp = newInner
.initialize()
.then(async () => {
if (this.disposed) {
await newInner.dispose();
return;
}
this.innerPool = newInner;
this.resetIdleTimer();
})
.catch(async (error: unknown) => {
this.logger?.error('[IdleScalingPool] Scale-up failed', { error });
await newInner.dispose().catch(() => {});
})
.finally(() => {
this.pendingScaleUp = null;
});
}
private triggerScaleDown(): void {
if (this.pendingScaleDown || this.pendingScaleUp || !this.innerPool || this.disposed) return;
this.logger?.info('[IdleScalingPool] Scaling to 0 after inactivity', {
idleTimeoutMs: this.idleTimeoutMs,
});
this.observability?.metrics.counter('expression.pool.scaled_to_zero', 1);
const oldInner = this.innerPool;
this.innerPool = null;
this.pendingScaleDown = oldInner
.dispose()
.catch((error: unknown) => {
this.logger?.error('[IdleScalingPool] Scale-down dispose failed', { error });
})
.finally(() => {
this.pendingScaleDown = null;
});
}
}

View file

@ -16,7 +16,16 @@ export class PoolExhaustedError extends IsolateError {
}
}
export class IsolatePool {
/** Public contract shared by IsolatePool and IdleScalingPool. */
export interface IPool {
initialize(): Promise<void>;
acquire(): RuntimeBridge;
release(bridge: RuntimeBridge): Promise<void>;
dispose(): Promise<void>;
waitForReplenishment(): Promise<void>;
}
export class IsolatePool implements IPool {
private bridges: RuntimeBridge[] = [];
private disposed = false;
/** Number of bridges currently being created. */

View file

@ -42,6 +42,9 @@ export interface EvaluatorConfig {
*/
poolSize?: number;
/** If set, scale the pool to 0 warm bridges after this many ms with no isolate acquire. */
idleTimeoutMs?: number;
/** Optional logger. Passed through to pool. Falls back to no-op. */
logger?: Logger;
}

View file

@ -12,7 +12,9 @@ function createMockContext(
): InstanceAiContext {
return {
userId: 'user-1',
workflowService: {} as never,
workflowService: {
get: jest.fn().mockResolvedValue({ id: 'wf-1', name: 'Fetched Name' }),
} as unknown as InstanceAiContext['workflowService'],
executionService: {
list: jest.fn(),
getStatus: jest.fn(),
@ -138,36 +140,41 @@ describe('executions tool', () => {
expect(context.executionService.run).not.toHaveBeenCalled();
});
it('should suspend for confirmation when approval is needed (default permission)', async () => {
it('should suspend for confirmation using the looked-up workflow name', async () => {
const suspendFn = jest.fn();
const context = createMockContext({
permissions: {},
});
(context.workflowService.get as jest.Mock).mockResolvedValue({
id: 'wf-1',
name: 'My Workflow',
});
const tool = createExecutionsTool(context);
await tool.execute!(
{
action: 'run' as const,
workflowId: 'wf-1',
workflowName: 'My Workflow',
},
createAgentCtx({ suspend: suspendFn }) as never,
);
expect(context.workflowService.get).toHaveBeenCalledWith('wf-1');
expect(suspendFn).toHaveBeenCalled();
const suspendPayload = suspendFn.mock.calls[0][0] as Record<string, unknown>;
expect(suspendPayload).toEqual(
expect.objectContaining({
message: expect.stringContaining('My Workflow'),
message: 'Execute workflow "My Workflow" (ID: wf-1)?',
severity: 'warning',
requestId: expect.any(String),
}),
);
});
it('should use workflowId in message when workflowName is not provided', async () => {
it('should fall back to workflowId in message when lookup fails', async () => {
const suspendFn = jest.fn();
const context = createMockContext({ permissions: {} });
(context.workflowService.get as jest.Mock).mockRejectedValue(new Error('not found'));
const tool = createExecutionsTool(context);
await tool.execute!(
@ -179,7 +186,7 @@ describe('executions tool', () => {
const suspendPayload = suspendFn.mock.calls[0][0] as Record<string, unknown>;
expect(suspendPayload).toEqual(
expect.objectContaining({
message: expect.stringContaining('wf-42'),
message: 'Execute workflow "wf-42" (ID: wf-42)?',
}),
);
});

View file

@ -202,19 +202,40 @@ describe('workflows tool', () => {
});
});
it('should suspend for confirmation when no resumeData', async () => {
it('should suspend for confirmation using the looked-up workflow name', async () => {
const context = createMockContext();
(context.workflowService.get as jest.Mock).mockResolvedValue({
id: 'wf1',
name: 'My WF',
});
const suspend = jest.fn();
const tool = createWorkflowsTool(context, 'full');
await tool.execute!({ action: 'delete', workflowId: 'wf1', workflowName: 'My WF' }, {
await tool.execute!({ action: 'delete', workflowId: 'wf1' }, {
agent: { suspend, resumeData: undefined },
} as never);
expect(context.workflowService.get).toHaveBeenCalledWith('wf1');
expect(suspend).toHaveBeenCalled();
expect(suspend.mock.calls[0][0]).toMatchObject({
message: expect.stringContaining('My WF'),
severity: 'warning',
});
});
it('should fall back to workflowId in message when lookup fails', async () => {
const context = createMockContext();
(context.workflowService.get as jest.Mock).mockRejectedValue(new Error('not found'));
const suspend = jest.fn();
const tool = createWorkflowsTool(context, 'full');
await tool.execute!({ action: 'delete', workflowId: 'wf1' }, {
agent: { suspend, resumeData: undefined },
} as never);
expect(suspend).toHaveBeenCalled();
expect(suspend.mock.calls[0][0]).toMatchObject({
message: expect.stringContaining('My WF'),
severity: 'warning',
message: expect.stringContaining('"wf1"'),
});
});
@ -278,6 +299,27 @@ describe('workflows tool', () => {
});
expect(result).toEqual({ success: true, activeVersionId: 'v2' });
});
it('should suspend for confirmation using the looked-up workflow name', async () => {
const context = createMockContext();
(context.workflowService.get as jest.Mock).mockResolvedValue({
id: 'wf1',
name: 'My WF',
});
const suspend = jest.fn();
const tool = createWorkflowsTool(context, 'full');
await tool.execute!({ action: 'publish', workflowId: 'wf1' }, {
agent: { suspend, resumeData: undefined },
} as never);
expect(context.workflowService.get).toHaveBeenCalledWith('wf1');
expect(suspend).toHaveBeenCalled();
expect(suspend.mock.calls[0][0]).toMatchObject({
message: 'Publish workflow "My WF" (ID: wf1)?',
severity: 'warning',
});
});
});
describe('setup action', () => {
@ -335,5 +377,26 @@ describe('workflows tool', () => {
expect(context.workflowService.unpublish).toHaveBeenCalledWith('wf1');
expect(result).toEqual({ success: true });
});
it('should suspend for confirmation using the looked-up workflow name', async () => {
const context = createMockContext();
(context.workflowService.get as jest.Mock).mockResolvedValue({
id: 'wf1',
name: 'My WF',
});
const suspend = jest.fn();
const tool = createWorkflowsTool(context, 'full');
await tool.execute!({ action: 'unpublish', workflowId: 'wf1' }, {
agent: { suspend, resumeData: undefined },
} as never);
expect(context.workflowService.get).toHaveBeenCalledWith('wf1');
expect(suspend).toHaveBeenCalled();
expect(suspend.mock.calls[0][0]).toMatchObject({
message: 'Unpublish workflow "My WF" (ID: wf1)?',
severity: 'warning',
});
});
});
});

View file

@ -39,7 +39,6 @@ const getAction = z.object({
const runAction = z.object({
action: z.literal('run').describe('Execute a workflow and wait for completion'),
workflowId: z.string().describe('Workflow ID'),
workflowName: z.string().optional().describe('Name of the workflow (for confirmation message)'),
inputData: z
.record(z.unknown())
.optional()
@ -144,9 +143,13 @@ async function handleRun(
// If approval is required and this is the first call, suspend for confirmation
if (needsApproval && (resumeData === undefined || resumeData === null)) {
const workflowName = await context.workflowService
.get(input.workflowId)
.then((wf) => wf.name)
.catch(() => input.workflowId);
await suspend?.({
requestId: nanoid(),
message: `Execute workflow "${input.workflowName ?? input.workflowId}" (ID: ${input.workflowId})?`,
message: `Execute workflow "${workflowName}" (ID: ${input.workflowId})?`,
severity: 'warning' as const,
});
return {

View file

@ -44,10 +44,8 @@ import type { BuilderWorkspace } from '../../workspace/builder-sandbox-factory';
import { readFileViaSandbox } from '../../workspace/sandbox-fs';
import { getWorkspaceRoot } from '../../workspace/sandbox-setup';
import { buildCredentialMap, type CredentialMap } from '../workflows/resolve-credentials';
import {
createSubmitWorkflowTool,
type SubmitWorkflowAttempt,
} from '../workflows/submit-workflow.tool';
import { createIdentityEnforcedSubmitWorkflowTool } from '../workflows/submit-workflow-identity';
import { type SubmitWorkflowAttempt } from '../workflows/submit-workflow.tool';
/** Trigger types that cannot be test-fired programmatically (need an external request). */
const UNTESTABLE_TRIGGERS = new Set([
@ -363,11 +361,12 @@ export async function startBuildWorkflowAgentTask(
}
const mainWorkflowPath = `${root}/src/workflow.ts`;
builderTools['submit-workflow'] = createSubmitWorkflowTool(
domainContext,
builderTools['submit-workflow'] = createIdentityEnforcedSubmitWorkflowTool({
context: domainContext,
workspace,
credMap,
async (attempt) => {
credentialMap: credMap,
root,
onAttempt: async (attempt) => {
submitAttempts.set(attempt.filePath, attempt);
submitAttemptHistory.push(attempt);
if (attempt.filePath !== mainWorkflowPath || !context.workflowTaskService) {
@ -385,7 +384,7 @@ export async function startBuildWorkflowAgentTask(
),
);
},
);
});
const tracedBuilderTools = traceSubAgentTools(
context,

View file

@ -38,7 +38,6 @@ const getAsCodeAction = z.object({
const deleteAction = z.object({
action: z.literal('delete').describe('Archive a workflow by ID (soft delete)'),
workflowId: z.string().describe('ID of the workflow'),
workflowName: z.string().optional().describe('Name of the workflow (for confirmation message)'),
});
const setupAction = z.object({
@ -54,7 +53,6 @@ const publishBaseAction = z.object({
.literal('publish')
.describe('Publish a workflow version to production (omit versionId for latest draft)'),
workflowId: z.string().describe('ID of the workflow'),
workflowName: z.string().optional().describe('Name of the workflow (for confirmation message)'),
versionId: z.string().optional().describe('Version ID'),
});
@ -66,7 +64,6 @@ const publishExtendedAction = publishBaseAction.extend({
const unpublishAction = z.object({
action: z.literal('unpublish').describe('Unpublish a workflow — stop it from running'),
workflowId: z.string().describe('ID of the workflow'),
workflowName: z.string().optional().describe('Name of the workflow (for confirmation message)'),
});
const listVersionsAction = z.object({
@ -161,6 +158,16 @@ function buildInputSchema(context: InstanceAiContext, surface: 'full' | 'orchest
// ── Handlers ────────────────────────────────────────────────────────────────
async function resolveWorkflowName(
context: InstanceAiContext,
workflowId: string,
): Promise<string> {
return await context.workflowService
.get(workflowId)
.then((wf) => wf.name)
.catch(() => workflowId);
}
async function handleList(context: InstanceAiContext, input: Extract<Input, { action: 'list' }>) {
const workflows = await context.workflowService.list({
limit: input.limit,
@ -208,9 +215,10 @@ async function handleDelete(
// First call — suspend for confirmation (unless always_allow)
if (needsApproval && (resumeData === undefined || resumeData === null)) {
const workflowName = await resolveWorkflowName(context, input.workflowId);
await suspend?.({
requestId: nanoid(),
message: `Archive workflow "${input.workflowName ?? input.workflowId}"? This will deactivate it if needed and can be undone later.`,
message: `Archive workflow "${workflowName}" (ID: ${input.workflowId})? This will deactivate it if needed and can be undone later.`,
severity: 'warning' as const,
});
// suspend() never resolves — this line is unreachable but satisfies the type checker
@ -440,13 +448,13 @@ async function handlePublish(
const needsApproval = context.permissions?.publishWorkflow !== 'always_allow';
if (needsApproval && (resumeData === undefined || resumeData === null)) {
const label = input.workflowName ?? input.workflowId;
const workflowName = await resolveWorkflowName(context, input.workflowId);
await suspend?.({
requestId: nanoid(),
message: input.versionId
? `Publish version "${input.versionId}" of workflow "${label}"?`
: `Publish workflow "${label}"?`,
? `Publish version "${input.versionId}" of workflow "${workflowName}" (ID: ${input.workflowId})?`
: `Publish workflow "${workflowName}" (ID: ${input.workflowId})?`,
severity: 'warning' as const,
});
return { success: false };
@ -490,9 +498,10 @@ async function handleUnpublish(
const needsApproval = context.permissions?.publishWorkflow !== 'always_allow';
if (needsApproval && (resumeData === undefined || resumeData === null)) {
const workflowName = await resolveWorkflowName(context, input.workflowId);
await suspend?.({
requestId: nanoid(),
message: `Unpublish workflow "${input.workflowName ?? input.workflowId}"?`,
message: `Unpublish workflow "${workflowName}" (ID: ${input.workflowId})?`,
severity: 'warning' as const,
});
return { success: false };

View file

@ -539,7 +539,7 @@ describe('analyzeWorkflow', () => {
expect(result[0].credentialType).toBe('slackApi');
});
it('marks needsAction correctly after credentials are applied', async () => {
it('hides credential-only requests whose credential is already set and tests OK', async () => {
const node = makeNode({
credentials: { slackApi: { id: 'cred-1', name: 'My Slack' } },
});
@ -557,10 +557,91 @@ describe('analyzeWorkflow', () => {
const result = await analyzeWorkflow(context, 'wf-1');
expect(result).toHaveLength(0);
});
it('keeps credential-only requests whose credential test fails', async () => {
const node = makeNode({
credentials: { slackApi: { id: 'cred-1', name: 'My Slack' } },
});
(context.workflowService.getAsWorkflowJSON as jest.Mock).mockResolvedValue(
makeWorkflowJSON([node]),
);
(context.nodeService.getDescription as jest.Mock).mockResolvedValue({
group: [],
credentials: [{ name: 'slackApi' }],
});
(context.credentialService.list as jest.Mock).mockResolvedValue([
{ id: 'cred-1', name: 'My Slack', updatedAt: '2025-01-01T00:00:00.000Z' },
]);
(context.credentialService.test as jest.Mock).mockResolvedValue({
success: false,
message: 'Invalid token',
});
const result = await analyzeWorkflow(context, 'wf-1');
expect(result).toHaveLength(1);
expect(result[0].needsAction).toBe(true);
});
it('keeps testable trigger requests even when their credential is already valid', async () => {
const trigger = makeNode({
name: 'Webhook',
type: 'n8n-nodes-base.webhook',
id: 'n-trigger',
credentials: { httpHeaderAuth: { id: 'cred-1', name: 'My Auth' } },
});
(context.workflowService.getAsWorkflowJSON as jest.Mock).mockResolvedValue(
makeWorkflowJSON([trigger]),
);
(context.nodeService.getDescription as jest.Mock).mockResolvedValue({
group: ['trigger'],
credentials: [{ name: 'httpHeaderAuth' }],
webhooks: [{}],
});
(context.credentialService.list as jest.Mock).mockResolvedValue([
{ id: 'cred-1', name: 'My Auth', updatedAt: '2025-01-01T00:00:00.000Z' },
]);
(context.credentialService.test as jest.Mock).mockResolvedValue({ success: true });
const result = await analyzeWorkflow(context, 'wf-1');
expect(result).toHaveLength(1);
expect(result[0].isTrigger).toBe(true);
expect(result[0].isTestable).toBe(true);
expect(result[0].needsAction).toBe(false);
});
it('keeps requests with parameter issues regardless of credential validity', async () => {
const node = makeNode({
credentials: { slackApi: { id: 'cred-1', name: 'My Slack' } },
});
(context.workflowService.getAsWorkflowJSON as jest.Mock).mockResolvedValue(
makeWorkflowJSON([node]),
);
(context.nodeService.getDescription as jest.Mock).mockResolvedValue({
group: [],
credentials: [{ name: 'slackApi' }],
properties: [{ name: 'resource', displayName: 'Resource', type: 'string' }],
});
(context.nodeService as unknown as Record<string, unknown>).getParameterIssues = jest
.fn()
.mockResolvedValue({
resource: ['Parameter "resource" is required'],
});
(context.credentialService.list as jest.Mock).mockResolvedValue([
{ id: 'cred-1', name: 'My Slack', updatedAt: '2025-01-01T00:00:00.000Z' },
]);
(context.credentialService.test as jest.Mock).mockResolvedValue({ success: true });
const result = await analyzeWorkflow(context, 'wf-1');
expect(result).toHaveLength(1);
expect(result[0].needsAction).toBe(true);
expect(result[0].parameterIssues).toBeDefined();
});
it('sorts by execution order with triggers first', async () => {
const trigger = makeNode({
name: 'Webhook',

View file

@ -0,0 +1,187 @@
import { wrapSubmitExecuteWithIdentity } from '../submit-workflow-identity';
import type { SubmitWorkflowInput, SubmitWorkflowOutput } from '../submit-workflow.tool';
const ROOT = '/home/daytona/workspace';
const MAIN_PATH = `${ROOT}/src/workflow.ts`;
const CHUNK_PATH = `${ROOT}/src/chunk.ts`;
function resolvePath(rawFilePath: string | undefined): string {
if (!rawFilePath) return MAIN_PATH;
if (rawFilePath.startsWith('/')) return rawFilePath;
return `${ROOT}/${rawFilePath}`;
}
/**
* Build a fake underlying `execute` that:
* - On a call without `workflowId`, returns a freshly-minted id (simulating create).
* - On a call with `workflowId`, returns that same id (simulating update).
* - Records every call it received.
* An optional `gate` promise lets tests hold dispatch mid-flight to exercise races.
*/
function makeUnderlying(opts: { idPrefix?: string; gate?: Promise<void> } = {}) {
const prefix = opts.idPrefix ?? 'wf';
let counter = 0;
const calls: SubmitWorkflowInput[] = [];
const execute = async (input: SubmitWorkflowInput): Promise<SubmitWorkflowOutput> => {
calls.push({ ...input });
if (opts.gate) await opts.gate;
if (input.workflowId) {
return { success: true, workflowId: input.workflowId };
}
counter += 1;
return { success: true, workflowId: `${prefix}_${counter}` };
};
return { execute, calls };
}
describe('wrapSubmitExecuteWithIdentity', () => {
it('parallel submits for the same filePath produce one create and N-1 updates sharing the workflowId', async () => {
let release: () => void = () => {};
const gate = new Promise<void>((res) => {
release = res;
});
const { execute, calls } = makeUnderlying({ gate });
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const inFlight = Array.from({ length: 5 }, async () => await wrapped({}));
// Let the dispatcher land first, then release.
await Promise.resolve();
release();
const results = await Promise.all(inFlight);
const ids = results.map((r) => r.workflowId);
expect(new Set(ids).size).toBe(1);
expect(results.every((r) => r.success)).toBe(true);
const createCalls = calls.filter((c) => !c.workflowId);
const updateCalls = calls.filter((c) => c.workflowId);
expect(createCalls).toHaveLength(1);
expect(updateCalls).toHaveLength(4);
expect(updateCalls.every((c) => c.workflowId === ids[0])).toBe(true);
});
it('sequential submits for the same filePath reuse the bound workflowId', async () => {
const { execute, calls } = makeUnderlying();
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const first = await wrapped({});
const second = await wrapped({});
expect(first.workflowId).toBe('wf_1');
expect(second.workflowId).toBe('wf_1');
expect(calls).toHaveLength(2);
expect(calls[0].workflowId).toBeUndefined();
expect(calls[1].workflowId).toBe('wf_1');
});
it('overrides an LLM-supplied workflowId once the wrapper has bound one', async () => {
const { execute, calls } = makeUnderlying();
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const first = await wrapped({});
const second = await wrapped({ workflowId: 'llm_hallucinated_id' });
expect(first.workflowId).toBe('wf_1');
expect(second.workflowId).toBe('wf_1');
expect(calls[1].workflowId).toBe('wf_1');
});
it('different filePaths dispatch independently (chunk + main composition)', async () => {
const { execute, calls } = makeUnderlying();
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const [mainResult, chunkResult] = await Promise.all([
wrapped({ filePath: MAIN_PATH }),
wrapped({ filePath: CHUNK_PATH }),
]);
expect(mainResult.workflowId).not.toBe(chunkResult.workflowId);
const createCalls = calls.filter((c) => !c.workflowId);
expect(createCalls).toHaveLength(2);
const mainAgain = await wrapped({ filePath: MAIN_PATH });
expect(mainAgain.workflowId).toBe(mainResult.workflowId);
const chunkAgain = await wrapped({ filePath: CHUNK_PATH });
expect(chunkAgain.workflowId).toBe(chunkResult.workflowId);
});
it('resolves differently-spelled paths to the same identity', async () => {
const { execute } = makeUnderlying();
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const absolute = await wrapped({ filePath: MAIN_PATH });
const relative = await wrapped({ filePath: 'src/workflow.ts' });
const defaulted = await wrapped({});
expect(absolute.workflowId).toBe('wf_1');
expect(relative.workflowId).toBe('wf_1');
expect(defaulted.workflowId).toBe('wf_1');
});
it('clears the map when the first dispatch fails so subsequent calls can retry', async () => {
let call = 0;
const execute = async (input: SubmitWorkflowInput): Promise<SubmitWorkflowOutput> => {
await Promise.resolve();
call += 1;
if (call === 1) {
return { success: false, errors: ['transient failure'] };
}
if (input.workflowId) return { success: true, workflowId: input.workflowId };
return { success: true, workflowId: 'wf_recovered' };
};
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const failed = await wrapped({});
expect(failed.success).toBe(false);
const retried = await wrapped({});
expect(retried.success).toBe(true);
expect(retried.workflowId).toBe('wf_recovered');
});
it('reports a failure to concurrent waiters when the dispatcher fails', async () => {
let release: () => void = () => {};
const gate = new Promise<void>((res) => {
release = res;
});
let call = 0;
const execute = async (): Promise<SubmitWorkflowOutput> => {
call += 1;
await gate;
if (call === 1) return { success: false, errors: ['create failed'] };
return { success: true, workflowId: 'wf_unused' };
};
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
const a = wrapped({});
const b = wrapped({});
await Promise.resolve();
release();
const [aResult, bResult] = await Promise.all([a, b]);
expect(aResult.success).toBe(false);
expect(bResult.success).toBe(false);
expect(bResult.errors?.[0]).toContain('Previous submit-workflow for this file failed');
});
it('propagates thrown errors from the dispatcher and clears the map', async () => {
let call = 0;
const execute = async (input: SubmitWorkflowInput): Promise<SubmitWorkflowOutput> => {
await Promise.resolve();
call += 1;
if (call === 1) throw new Error('boom');
if (input.workflowId) return { success: true, workflowId: input.workflowId };
return { success: true, workflowId: 'wf_after_throw' };
};
const wrapped = wrapSubmitExecuteWithIdentity(execute, resolvePath);
await expect(wrapped({})).rejects.toThrow('boom');
const retried = await wrapped({});
expect(retried.workflowId).toBe('wf_after_throw');
});
});

View file

@ -770,7 +770,12 @@ export async function analyzeWorkflow(
req.credentialType !== undefined ||
req.isTrigger ||
(req.parameterIssues && Object.keys(req.parameterIssues).length > 0),
);
)
// Hide cards the user has nothing to do on: credentials already set and
// tested, no parameter issues, not a trigger awaiting testing. Trigger
// steps are always kept — triggers require user testing regardless of
// credential state.
.filter((req) => !!req.needsAction || (req.isTrigger && !!req.isTestable));
sortByExecutionOrder(
setupRequests,

View file

@ -0,0 +1,130 @@
/**
* Identity-enforcing wrapper for the sandbox submit-workflow tool.
*
* The builder sub-agent can emit multiple parallel submit-workflow calls within
* a single assistant turn. When the LLM drops `workflowId` on calls 2..N, each
* call takes the create branch and persists a duplicate row with the same name.
*
* This wrapper keys identity per resolved `filePath`. The first call for a given
* path synchronously installs a deferred in the pending map before dispatching,
* so concurrent calls for the same path await the first result and inject the
* bound `workflowId` forcing the update branch.
*
* The map is scoped to the builder-task closure: it dies with the task. No
* cross-module coordinator, eviction hook, or TTL sweep is required.
*/
import { createTool } from '@mastra/core/tools';
import type { Workspace } from '@mastra/core/workspace';
import type { CredentialMap } from './resolve-credentials';
import {
createSubmitWorkflowTool,
resolveSandboxWorkflowFilePath,
submitWorkflowInputSchema,
submitWorkflowOutputSchema,
type SubmitWorkflowAttempt,
type SubmitWorkflowInput,
type SubmitWorkflowOutput,
} from './submit-workflow.tool';
import type { InstanceAiContext } from '../../types';
export type SubmitExecute = (input: SubmitWorkflowInput) => Promise<SubmitWorkflowOutput>;
/**
* Wrap a submit-workflow `execute` with per-filePath identity enforcement.
*
* - First call for a given resolved path dispatches and populates the map on success.
* - Concurrent calls for the same path await the first result and inject the bound id.
* - On dispatch failure, the map entry is cleared and waiters see a failure result.
*
* Exposed separately from the tool factory so it can be unit-tested without
* constructing a Mastra tool or a sandbox workspace.
*/
export function wrapSubmitExecuteWithIdentity(
underlying: SubmitExecute,
resolvePath: (rawFilePath: string | undefined) => string,
): SubmitExecute {
const pending = new Map<string, Promise<string>>();
return async (input) => {
const resolvedPath = resolvePath(input.filePath);
const existing = pending.get(resolvedPath);
if (existing) {
let boundId: string;
try {
boundId = await existing;
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
return {
success: false,
errors: [`Previous submit-workflow for this file failed: ${message}`],
};
}
return await underlying({ ...input, workflowId: boundId });
}
let resolveFn: ((id: string) => void) | undefined;
let rejectFn: ((reason: unknown) => void) | undefined;
const promise = new Promise<string>((res, rej) => {
resolveFn = res;
rejectFn = rej;
});
// Swallow rejections on the stored promise so Node doesn't warn about
// unhandled rejections when no concurrent waiter happens to attach.
promise.catch(() => {});
pending.set(resolvedPath, promise);
try {
const result = await underlying(input);
if (result.success && typeof result.workflowId === 'string') {
resolveFn?.(result.workflowId);
} else {
rejectFn?.(new Error(result.errors?.join(' ') ?? 'submit-workflow failed'));
pending.delete(resolvedPath);
}
return result;
} catch (error) {
rejectFn?.(error);
pending.delete(resolvedPath);
throw error;
}
};
}
/**
* Build a submit-workflow Mastra tool wired with identity enforcement.
* Convenience factory used at the builder-agent callsite.
*/
export function createIdentityEnforcedSubmitWorkflowTool(args: {
context: InstanceAiContext;
workspace: Workspace;
credentialMap?: CredentialMap;
onAttempt: (attempt: SubmitWorkflowAttempt) => Promise<void> | void;
root: string;
}) {
const underlying = createSubmitWorkflowTool(
args.context,
args.workspace,
args.credentialMap,
args.onAttempt,
);
const underlyingExecute = underlying.execute as SubmitExecute | undefined;
if (!underlyingExecute) {
throw new Error('createSubmitWorkflowTool returned a tool without an execute handler');
}
const wrappedExecute = wrapSubmitExecuteWithIdentity(underlyingExecute, (rawFilePath) =>
resolveSandboxWorkflowFilePath(rawFilePath, args.root),
);
return createTool({
id: 'submit-workflow',
description: underlying.description ?? '',
inputSchema: submitWorkflowInputSchema,
outputSchema: submitWorkflowOutputSchema,
execute: wrappedExecute,
});
}

View file

@ -148,6 +148,45 @@ export const submitWorkflowInputSchema = z.object({
name: z.string().optional().describe('Workflow name (required for new workflows)'),
});
export const submitWorkflowOutputSchema = z.object({
success: z.boolean(),
workflowId: z.string().optional(),
workflowName: z.string().optional(),
/** Node names whose credentials were mocked via pinned data. */
mockedNodeNames: z.array(z.string()).optional(),
/** Credential types that were mocked (not resolved to real credentials). */
mockedCredentialTypes: z.array(z.string()).optional(),
/** Map of node name → credential types that were mocked on that node. */
mockedCredentialsByNode: z.record(z.array(z.string())).optional(),
/** Verification-only pin data — scoped to this build, never persisted to workflow. */
verificationPinData: z.record(z.array(z.record(z.unknown()))).optional(),
errors: z.array(z.string()).optional(),
warnings: z.array(z.string()).optional(),
});
export type SubmitWorkflowInput = z.infer<typeof submitWorkflowInputSchema>;
export type SubmitWorkflowOutput = z.infer<typeof submitWorkflowOutputSchema>;
/**
* Resolve a raw `filePath` tool argument into an absolute path under the sandbox root.
* Exported so identity wrappers can key state by the same resolved path the tool uses.
*/
export function resolveSandboxWorkflowFilePath(
rawFilePath: string | undefined,
root: string,
): string {
if (!rawFilePath) {
return `${root}/src/workflow.ts`;
}
if (rawFilePath.startsWith('~/')) {
return `${root.replace(/\/workspace$/, '')}/${rawFilePath.slice(2)}`;
}
if (!rawFilePath.startsWith('/')) {
return `${root}/${rawFilePath}`;
}
return rawFilePath;
}
export function createSubmitWorkflowTool(
context: InstanceAiContext,
workspace: Workspace,
@ -161,38 +200,15 @@ export function createSubmitWorkflowTool(
'and saves it to n8n as a draft. The workflow must be explicitly published via ' +
'publish-workflow before it will run on its triggers in production.',
inputSchema: submitWorkflowInputSchema,
outputSchema: z.object({
success: z.boolean(),
workflowId: z.string().optional(),
/** Node names whose credentials were mocked via pinned data. */
mockedNodeNames: z.array(z.string()).optional(),
/** Credential types that were mocked (not resolved to real credentials). */
mockedCredentialTypes: z.array(z.string()).optional(),
/** Map of node name → credential types that were mocked on that node. */
mockedCredentialsByNode: z.record(z.array(z.string())).optional(),
/** Verification-only pin data — scoped to this build, never persisted to workflow. */
verificationPinData: z.record(z.array(z.record(z.unknown()))).optional(),
errors: z.array(z.string()).optional(),
warnings: z.array(z.string()).optional(),
}),
outputSchema: submitWorkflowOutputSchema,
execute: async ({
filePath: rawFilePath,
workflowId,
projectId,
name,
}: z.infer<typeof submitWorkflowInputSchema>) => {
// Resolve file path: relative paths resolve against workspace root, ~ is expanded
}: SubmitWorkflowInput) => {
const root = await getWorkspaceRoot(workspace);
let filePath: string;
if (!rawFilePath) {
filePath = `${root}/src/workflow.ts`;
} else if (rawFilePath.startsWith('~/')) {
filePath = `${root.replace(/\/workspace$/, '')}/${rawFilePath.slice(2)}`;
} else if (!rawFilePath.startsWith('/')) {
filePath = `${root}/${rawFilePath}`;
} else {
filePath = rawFilePath;
}
const filePath = resolveSandboxWorkflowFilePath(rawFilePath, root);
const sourceHash = hashContent(await readFileViaSandbox(workspace, filePath));
const reportAttempt = async (

View file

@ -505,10 +505,10 @@ export class McpServer {
return MessageFormatter.formatToolResult(result);
} catch (error) {
this.logger.error(
`Error while executing Tool ${toolName}: ${error instanceof Error ? error.message : String(error)}`,
);
const errorObject = error instanceof Error ? error : new Error(String(error));
this.logger.error(`Error while executing Tool ${toolName}: ${errorObject.message}`, {
error: errorObject,
});
return MessageFormatter.formatError(errorObject);
}
});

View file

@ -23,13 +23,9 @@ export class MessageFormatter {
}
static formatError(error: Error): McpToolResult {
const errorDetails = [`${error.name}: ${error.message}`];
if (error.stack) {
errorDetails.push(error.stack);
}
return {
isError: true,
content: [{ type: 'text', text: errorDetails.join('\n') }],
content: [{ type: 'text', text: `${error.name}: ${error.message}` }],
};
}
}

View file

@ -169,12 +169,14 @@ describe('MessageFormatter', () => {
expect(result.content[0].text).toContain('CustomError: Custom error message');
});
it('should include stack trace when available', () => {
it('should not include stack trace in the response', () => {
const error = new Error('Test error');
error.stack =
'Error: Test error\n at Context.<anonymous> (test.ts:1:1)\n at /internal/path/node.js:100:5';
const result = MessageFormatter.formatError(error);
expect(result.content[0].text).toContain('Error: Test error');
expect(result.content[0].text).toContain('at ');
expect(result.content[0].text).toBe('Error: Test error');
expect(result.content[0].text).not.toContain('internal/path');
});
});
});

View file

@ -127,8 +127,13 @@ export class MemoryMongoDbChat implements INodeType {
);
}
const client = new MongoClient(connectionString, {
minPoolSize: 0,
maxPoolSize: 1,
maxIdleTimeMS: 30000,
});
try {
const client = new MongoClient(connectionString);
await client.connect();
const db = client.db(dbName);
@ -157,6 +162,7 @@ export class MemoryMongoDbChat implements INodeType {
response: logWrapper(memory, this),
};
} catch (error) {
void client.close();
throw new NodeOperationError(this.getNode(), `MongoDB connection error: ${error.message}`);
}
}

View file

@ -124,7 +124,7 @@ export class MicrosoftAgent365Trigger implements INodeType {
},
},
{
displayName: 'Enable Microsoft MCP Tools',
displayName: 'Enable Microsoft Work IQ Tools for A365',
name: 'useMcpTools',
type: 'boolean',
default: false,

View file

@ -274,6 +274,18 @@ export class CollaborationService {
return await this.state.getWriteLock(workflowId);
}
/**
* Throws if any user currently holds the write lock for the given workflow.
*/
async ensureWorkflowEditable(workflowId: Workflow['id']): Promise<void> {
const lock = await this.state.getWriteLock(workflowId);
if (lock) {
throw new LockedError(
'Cannot modify workflow while it is being edited by a user in the editor.',
);
}
}
/**
* Validates that if a write lock exists for a workflow, the requesting client holds it.
* Throws ConflictError (409) if same user but different tab holds the lock.

View file

@ -175,7 +175,7 @@ export abstract class BaseCommand<F = never> {
await Container.get(TelemetryEventRelay).init();
Container.get(WorkflowFailureNotificationEventRelay).init();
const { engine, poolSize, maxCodeCacheSize, bridgeTimeout, bridgeMemoryLimit } =
const { engine, poolSize, maxCodeCacheSize, bridgeTimeout, bridgeMemoryLimit, idleTimeout } =
this.globalConfig.expressionEngine;
await Expression.initExpressionEngine({
engine,
@ -183,6 +183,7 @@ export abstract class BaseCommand<F = never> {
maxCodeCacheSize,
bridgeTimeout,
bridgeMemoryLimit,
idleTimeoutMs: idleTimeout === undefined ? undefined : idleTimeout * 1000,
});
}

View file

@ -11,6 +11,7 @@ import jwt from 'jsonwebtoken';
import { AUTH_COOKIE_NAME } from '@/constants';
import { MeController } from '@/controllers/me.controller';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error';
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
@ -476,8 +477,8 @@ describe('MeController', () => {
mock({ email: user.email, firstName: 'John', lastName: 'Doe' }),
),
).rejects.toThrowError(
new BadRequestError(
'This account is managed via environment variables and cannot be updated here',
new ForbiddenError(
'This account is managed via environment variables and cannot be modified through the API',
),
);
});
@ -497,8 +498,8 @@ describe('MeController', () => {
mock({ currentPassword: 'old_password', newPassword: 'NewPassword123' }),
),
).rejects.toThrowError(
new BadRequestError(
'This account is managed via environment variables and cannot be updated here',
new ForbiddenError(
'This account is managed via environment variables and cannot be modified through the API',
),
);
});

View file

@ -14,6 +14,7 @@ import { Response } from 'express';
import { AuthService } from '@/auth/auth.service';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error';
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
@ -57,8 +58,8 @@ export class MeController {
} = req.user;
if (this.isUserManagedByEnv(req.user)) {
throw new BadRequestError(
'This account is managed via environment variables and cannot be updated here',
throw new ForbiddenError(
'This account is managed via environment variables and cannot be modified through the API',
);
}
@ -194,8 +195,8 @@ export class MeController {
const { currentPassword, newPassword, mfaCode } = payload;
if (this.isUserManagedByEnv(user)) {
throw new BadRequestError(
'This account is managed via environment variables and cannot be updated here',
throw new ForbiddenError(
'This account is managed via environment variables and cannot be modified through the API',
);
}

View file

@ -33,7 +33,7 @@ export class MFAController {
async enforceMFA(req: MFA.Enforce) {
if (this.instanceSettingsLoaderConfig.securityPolicyManagedByEnv) {
throw new ForbiddenError(
'MFA enforcement is managed via environment variables and cannot be updated through the API',
'MFA enforcement is managed via environment variables and cannot be modified through the API',
);
}

View file

@ -54,7 +54,7 @@ export class SecuritySettingsController {
) {
if (this.instanceSettingsLoaderConfig.securityPolicyManagedByEnv) {
throw new ForbiddenError(
'Security settings are managed via environment variables and cannot be updated through the API',
'Security settings are managed via environment variables and cannot be modified through the API',
);
}

View file

@ -0,0 +1,42 @@
import { mock } from 'jest-mock-extended';
import type { Logger } from '@n8n/backend-common';
import { InstanceSettingsLoaderService } from '../instance-settings-loader.service';
import type { OwnerInstanceSettingsLoader } from '../loaders/owner.instance-settings-loader';
import type { SecurityPolicyInstanceSettingsLoader } from '../loaders/security-policy.instance-settings-loader';
import type { SsoInstanceSettingsLoader } from '../loaders/sso.instance-settings-loader';
describe('InstanceSettingsLoaderService', () => {
const logger = mock<Logger>({ scoped: jest.fn().mockReturnThis() });
const ownerLoader = mock<OwnerInstanceSettingsLoader>();
const ssoLoader = mock<SsoInstanceSettingsLoader>();
const securityPolicyLoader = mock<SecurityPolicyInstanceSettingsLoader>();
beforeEach(() => {
jest.resetAllMocks();
logger.scoped.mockReturnThis();
ownerLoader.run.mockResolvedValue('skipped');
ssoLoader.run.mockResolvedValue('skipped');
securityPolicyLoader.run.mockResolvedValue('skipped');
});
const createService = () =>
new InstanceSettingsLoaderService(logger, ownerLoader, ssoLoader, securityPolicyLoader);
it('should run all loaders', async () => {
await createService().init();
expect(ownerLoader.run).toHaveBeenCalled();
expect(ssoLoader.run).toHaveBeenCalled();
expect(securityPolicyLoader.run).toHaveBeenCalled();
});
it('should stop execution if a loader throws', async () => {
ssoLoader.run.mockRejectedValue(new Error('sso failure'));
await expect(createService().init()).rejects.toThrow('sso failure');
expect(ownerLoader.run).toHaveBeenCalled();
expect(securityPolicyLoader.run).not.toHaveBeenCalled();
});
});

View file

@ -1,103 +0,0 @@
import { mock } from 'jest-mock-extended';
import type { Logger } from '@n8n/backend-common';
import type { InstanceSettingsLoaderConfig } from '@n8n/config';
import type { SettingsRepository } from '@n8n/db';
import type { Cipher } from 'n8n-core';
import { OidcInstanceSettingsLoader } from '../loaders/oidc.instance-settings-loader';
describe('OidcInstanceSettingsLoader', () => {
const logger = mock<Logger>({ scoped: jest.fn().mockReturnThis() });
const settingsRepository = mock<SettingsRepository>();
const cipher = mock<Cipher>();
const validConfig: Partial<InstanceSettingsLoaderConfig> = {
ssoManagedByEnv: true,
oidcClientId: 'my-client-id',
oidcClientSecret: 'my-client-secret',
oidcDiscoveryEndpoint: 'https://idp.example.com/.well-known/openid-configuration',
oidcLoginEnabled: false,
oidcPrompt: 'select_account',
oidcAcrValues: '',
ssoUserRoleProvisioning: 'disabled',
};
const createLoader = (configOverrides: Partial<InstanceSettingsLoaderConfig> = {}) => {
const config = {
ssoManagedByEnv: false,
oidcClientId: '',
oidcClientSecret: '',
oidcDiscoveryEndpoint: '',
oidcLoginEnabled: false,
oidcPrompt: 'select_account',
oidcAcrValues: '',
ssoUserRoleProvisioning: 'disabled',
...configOverrides,
} as InstanceSettingsLoaderConfig;
return new OidcInstanceSettingsLoader(config, settingsRepository, cipher, logger);
};
beforeEach(() => {
jest.resetAllMocks();
logger.scoped.mockReturnThis();
cipher.encrypt.mockReturnValue('encrypted-secret');
});
it('should skip when ssoManagedByEnv is false', async () => {
const loader = createLoader({ ssoManagedByEnv: false });
const result = await loader.run();
expect(result).toBe('skipped');
expect(settingsRepository.upsert).not.toHaveBeenCalled();
});
it('should throw when clientId is missing', async () => {
const loader = createLoader({ ...validConfig, oidcClientId: '' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_CLIENT_ID is required');
});
it('should throw when clientSecret is missing', async () => {
const loader = createLoader({ ...validConfig, oidcClientSecret: '' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_CLIENT_SECRET is required');
});
it('should throw when discoveryEndpoint is not a valid URL', async () => {
const loader = createLoader({ ...validConfig, oidcDiscoveryEndpoint: 'not-a-url' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_DISCOVERY_ENDPOINT');
});
it('should throw when oidcPrompt is an invalid value', async () => {
const loader = createLoader({ ...validConfig, oidcPrompt: 'invalid' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_PROMPT');
});
it('should throw when ssoUserRoleProvisioning is an invalid value', async () => {
const loader = createLoader({ ...validConfig, ssoUserRoleProvisioning: 'invalid' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_USER_ROLE_PROVISIONING must be one of');
});
it('should handle messy ACR values with extra commas and whitespace', async () => {
const loader = createLoader({ ...validConfig, oidcAcrValues: ',mfa,, phrh ,,' });
await loader.run();
const savedValue = JSON.parse(
(settingsRepository.upsert.mock.calls[0][0] as { value: string }).value,
);
expect(savedValue.authenticationContextClassReference).toEqual(['mfa', 'phrh']);
});
describe('isConfiguredByEnv', () => {
it('should return false when ssoManagedByEnv is false', () => {
const loader = createLoader({ ssoManagedByEnv: false });
expect(loader.isConfiguredByEnv()).toBe(false);
});
it('should return true when ssoManagedByEnv is true', () => {
const loader = createLoader({ ssoManagedByEnv: true });
expect(loader.isConfiguredByEnv()).toBe(true);
});
});
});

View file

@ -0,0 +1,399 @@
import { mock } from 'jest-mock-extended';
import type { Logger } from '@n8n/backend-common';
import type { InstanceSettingsLoaderConfig } from '@n8n/config';
import type { SettingsRepository } from '@n8n/db';
import type { Cipher } from 'n8n-core';
import { SsoInstanceSettingsLoader } from '../loaders/sso.instance-settings-loader';
const mockSetCurrentAuthenticationMethod = jest.fn();
const mockGetCurrentAuthenticationMethod = jest.fn().mockReturnValue('email');
jest.mock('@/sso.ee/sso-helpers', () => ({
setCurrentAuthenticationMethod: (...args: unknown[]) =>
mockSetCurrentAuthenticationMethod(...args),
getCurrentAuthenticationMethod: () => mockGetCurrentAuthenticationMethod(),
}));
describe('SsoInstanceSettingsLoader', () => {
const logger = mock<Logger>({ scoped: jest.fn().mockReturnThis() });
const settingsRepository = mock<SettingsRepository>();
const cipher = mock<Cipher>();
const baseConfig: Partial<InstanceSettingsLoaderConfig> = {
ssoManagedByEnv: false,
oidcClientId: '',
oidcClientSecret: '',
oidcDiscoveryEndpoint: '',
oidcLoginEnabled: false,
oidcPrompt: 'select_account',
oidcAcrValues: '',
samlMetadata: '',
samlMetadataUrl: '',
samlLoginEnabled: false,
ssoUserRoleProvisioning: 'disabled',
};
const validSamlConfig: Partial<InstanceSettingsLoaderConfig> = {
...baseConfig,
ssoManagedByEnv: true,
samlMetadata: '<xml>metadata</xml>',
samlLoginEnabled: true,
};
const validOidcConfig: Partial<InstanceSettingsLoaderConfig> = {
...baseConfig,
ssoManagedByEnv: true,
oidcClientId: 'my-client-id',
oidcClientSecret: 'my-client-secret',
oidcDiscoveryEndpoint: 'https://idp.example.com/.well-known/openid-configuration',
oidcLoginEnabled: true,
};
const createLoader = (configOverrides: Partial<InstanceSettingsLoaderConfig> = {}) => {
const config = { ...baseConfig, ...configOverrides } as InstanceSettingsLoaderConfig;
return new SsoInstanceSettingsLoader(config, settingsRepository, cipher, logger);
};
const getSaveCall = (key: string) =>
settingsRepository.save.mock.calls.find(
(call) => (call[0] as { key: string }).key === key,
)?.[0] as { key: string; value: string } | undefined;
const getUpsertCall = (key: string) =>
settingsRepository.upsert.mock.calls.find(
(call) => (call[0] as { key: string }).key === key,
)?.[0] as { key: string; value: string } | undefined;
beforeEach(() => {
jest.resetAllMocks();
logger.scoped.mockReturnThis();
cipher.encrypt.mockImplementation((v: string) => `encrypted:${v}`);
mockGetCurrentAuthenticationMethod.mockReturnValue('email');
});
describe('ssoManagedByEnv gate', () => {
it('should skip when ssoManagedByEnv is false', async () => {
const loader = createLoader({ ssoManagedByEnv: false });
const result = await loader.run();
expect(result).toBe('skipped');
expect(settingsRepository.save).not.toHaveBeenCalled();
expect(settingsRepository.upsert).not.toHaveBeenCalled();
});
});
describe('mutual exclusion', () => {
it('should throw when both SAML and OIDC login are enabled', async () => {
const loader = createLoader({
...validSamlConfig,
...validOidcConfig,
samlLoginEnabled: true,
oidcLoginEnabled: true,
});
await expect(loader.run()).rejects.toThrow(
'N8N_SSO_SAML_LOGIN_ENABLED and N8N_SSO_OIDC_LOGIN_ENABLED cannot both be true',
);
});
});
describe('SAML config', () => {
it('should throw when neither metadata nor metadataUrl is provided and loginEnabled is true', async () => {
const loader = createLoader({
...validSamlConfig,
samlMetadata: '',
samlMetadataUrl: '',
});
await expect(loader.run()).rejects.toThrow(
'At least one of N8N_SSO_SAML_METADATA or N8N_SSO_SAML_METADATA_URL is required',
);
});
it('should save SAML preferences when valid config with metadata is provided', async () => {
const loader = createLoader(validSamlConfig);
const result = await loader.run();
expect(result).toBe('created');
const saved = getSaveCall('features.saml');
expect(saved).toBeDefined();
expect(JSON.parse(saved!.value)).toEqual({
metadata: '<xml>metadata</xml>',
loginEnabled: true,
});
});
it('should save SAML preferences when valid config with metadataUrl is provided', async () => {
const loader = createLoader({
...validSamlConfig,
samlMetadata: '',
samlMetadataUrl: 'https://idp.example.com/metadata',
});
await loader.run();
const saved = JSON.parse(getSaveCall('features.saml')!.value);
expect(saved.metadataUrl).toBe('https://idp.example.com/metadata');
expect(saved.metadata).toBeUndefined();
});
it('should set authentication method to saml when SAML login is enabled', async () => {
const loader = createLoader(validSamlConfig);
await loader.run();
expect(mockSetCurrentAuthenticationMethod).toHaveBeenCalledWith('saml');
});
});
describe('OIDC config', () => {
it('should throw when clientId is missing and loginEnabled is true', async () => {
const loader = createLoader({ ...validOidcConfig, oidcClientId: '' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_CLIENT_ID is required');
});
it('should throw when clientSecret is missing and loginEnabled is true', async () => {
const loader = createLoader({ ...validOidcConfig, oidcClientSecret: '' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_CLIENT_SECRET is required');
});
it('should throw when discoveryEndpoint is not a valid URL and loginEnabled is true', async () => {
const loader = createLoader({ ...validOidcConfig, oidcDiscoveryEndpoint: 'not-a-url' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_DISCOVERY_ENDPOINT');
});
it('should throw when oidcPrompt has an invalid value', async () => {
const loader = createLoader({ ...validOidcConfig, oidcPrompt: 'invalid' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_OIDC_PROMPT');
});
it('should upsert OIDC preferences when valid config is provided', async () => {
const loader = createLoader(validOidcConfig);
const result = await loader.run();
expect(result).toBe('created');
const upserted = getUpsertCall('features.oidc');
expect(upserted).toBeDefined();
const parsed = JSON.parse(upserted!.value);
expect(parsed.clientId).toBe('my-client-id');
expect(parsed.clientSecret).toBe('encrypted:my-client-secret');
expect(parsed.loginEnabled).toBe(true);
});
it('should handle messy ACR values with extra commas and whitespace', async () => {
const loader = createLoader({ ...validOidcConfig, oidcAcrValues: ',mfa,, phrh ,,' });
await loader.run();
const parsed = JSON.parse(getUpsertCall('features.oidc')!.value);
expect(parsed.authenticationContextClassReference).toEqual(['mfa', 'phrh']);
});
it('should set authentication method to oidc when OIDC login is enabled', async () => {
const loader = createLoader(validOidcConfig);
await loader.run();
expect(mockSetCurrentAuthenticationMethod).toHaveBeenCalledWith('oidc');
});
});
describe('provisioning', () => {
it('should throw when ssoUserRoleProvisioning has an invalid value with SAML enabled', async () => {
const loader = createLoader({ ...validSamlConfig, ssoUserRoleProvisioning: 'invalid' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_USER_ROLE_PROVISIONING must be one of');
});
it('should throw when ssoUserRoleProvisioning has an invalid value with OIDC enabled', async () => {
const loader = createLoader({ ...validOidcConfig, ssoUserRoleProvisioning: 'invalid' });
await expect(loader.run()).rejects.toThrow('N8N_SSO_USER_ROLE_PROVISIONING must be one of');
});
it('should write disabled provisioning config when SAML is enabled', async () => {
const loader = createLoader(validSamlConfig);
await loader.run();
expect(settingsRepository.upsert).toHaveBeenCalledWith(
expect.objectContaining({
key: 'features.provisioning',
value: JSON.stringify({
scopesProvisionInstanceRole: false,
scopesProvisionProjectRoles: false,
}),
}),
{ conflictPaths: ['key'] },
);
});
it('should write instance_role provisioning config', async () => {
const loader = createLoader({ ...validSamlConfig, ssoUserRoleProvisioning: 'instance_role' });
await loader.run();
expect(settingsRepository.upsert).toHaveBeenCalledWith(
expect.objectContaining({
key: 'features.provisioning',
value: JSON.stringify({
scopesProvisionInstanceRole: true,
scopesProvisionProjectRoles: false,
}),
}),
{ conflictPaths: ['key'] },
);
});
it('should write instance_and_project_roles provisioning config', async () => {
const loader = createLoader({
...validOidcConfig,
ssoUserRoleProvisioning: 'instance_and_project_roles',
});
await loader.run();
expect(settingsRepository.upsert).toHaveBeenCalledWith(
expect.objectContaining({
key: 'features.provisioning',
value: JSON.stringify({
scopesProvisionInstanceRole: true,
scopesProvisionProjectRoles: true,
}),
}),
{ conflictPaths: ['key'] },
);
});
it('should not write provisioning config when neither protocol is enabled', async () => {
const loader = createLoader({ ssoManagedByEnv: true });
await loader.run();
expect(
settingsRepository.upsert.mock.calls.find(
(call) => (call[0] as { key: string }).key === 'features.provisioning',
),
).toBeUndefined();
});
});
describe('disabled config', () => {
it('should write loginEnabled=false for both protocols when no env vars are set', async () => {
const loader = createLoader({ ssoManagedByEnv: true });
const result = await loader.run();
expect(result).toBe('created');
expect(getSaveCall('features.saml')?.value).toBe(JSON.stringify({ loginEnabled: false }));
expect(getUpsertCall('features.oidc')?.value).toBe(JSON.stringify({ loginEnabled: false }));
});
it('should ignore SAML env vars and write loginEnabled=false when SAML is not enabled', async () => {
const loader = createLoader({
...validSamlConfig,
samlLoginEnabled: false,
});
const result = await loader.run();
expect(result).toBe('created');
expect(getSaveCall('features.saml')?.value).toBe(JSON.stringify({ loginEnabled: false }));
});
it('should ignore OIDC env vars and write loginEnabled=false when OIDC is not enabled', async () => {
const loader = createLoader({
...validOidcConfig,
oidcLoginEnabled: false,
});
const result = await loader.run();
expect(result).toBe('created');
expect(getUpsertCall('features.oidc')?.value).toBe(JSON.stringify({ loginEnabled: false }));
});
it('should ignore invalid OIDC env vars and write loginEnabled=false when OIDC is not enabled', async () => {
const loader = createLoader({
ssoManagedByEnv: true,
oidcClientId: 'some-id',
oidcDiscoveryEndpoint: 'not-a-url',
});
const result = await loader.run();
expect(result).toBe('created');
expect(getUpsertCall('features.oidc')?.value).toBe(JSON.stringify({ loginEnabled: false }));
});
});
describe('auth method sync', () => {
it('should reset auth method to email when current is saml and neither protocol is enabled', async () => {
mockGetCurrentAuthenticationMethod.mockReturnValue('saml');
const loader = createLoader({ ssoManagedByEnv: true });
await loader.run();
expect(mockSetCurrentAuthenticationMethod).toHaveBeenCalledWith('email');
});
it('should reset auth method to email when current is oidc and neither protocol is enabled', async () => {
mockGetCurrentAuthenticationMethod.mockReturnValue('oidc');
const loader = createLoader({ ssoManagedByEnv: true });
await loader.run();
expect(mockSetCurrentAuthenticationMethod).toHaveBeenCalledWith('email');
});
it('should not change auth method when current is email and neither protocol is enabled', async () => {
mockGetCurrentAuthenticationMethod.mockReturnValue('email');
const loader = createLoader({ ssoManagedByEnv: true });
await loader.run();
expect(mockSetCurrentAuthenticationMethod).not.toHaveBeenCalled();
});
it('should set auth method to saml (not email) when switching from oidc to saml', async () => {
mockGetCurrentAuthenticationMethod.mockReturnValue('oidc');
const loader = createLoader(validSamlConfig);
await loader.run();
expect(mockSetCurrentAuthenticationMethod).toHaveBeenCalledWith('saml');
expect(mockSetCurrentAuthenticationMethod).not.toHaveBeenCalledWith('email');
});
it('should set auth method to oidc (not email) when switching from saml to oidc', async () => {
mockGetCurrentAuthenticationMethod.mockReturnValue('saml');
const loader = createLoader(validOidcConfig);
await loader.run();
expect(mockSetCurrentAuthenticationMethod).toHaveBeenCalledWith('oidc');
expect(mockSetCurrentAuthenticationMethod).not.toHaveBeenCalledWith('email');
});
});
describe('cross-protocol state', () => {
it('should write OIDC loginEnabled=false when SAML is enabled and OIDC has no env vars', async () => {
const loader = createLoader(validSamlConfig);
await loader.run();
expect(getUpsertCall('features.oidc')?.value).toBe(JSON.stringify({ loginEnabled: false }));
});
it('should write SAML loginEnabled=false when OIDC is enabled and SAML has no env vars', async () => {
const loader = createLoader(validOidcConfig);
await loader.run();
expect(getSaveCall('features.saml')?.value).toBe(JSON.stringify({ loginEnabled: false }));
});
});
});

View file

@ -1,9 +1,9 @@
import { Logger } from '@n8n/backend-common';
import { Service } from '@n8n/di';
import { OidcInstanceSettingsLoader } from './loaders/oidc.instance-settings-loader';
import { OwnerInstanceSettingsLoader } from './loaders/owner.instance-settings-loader';
import { SecurityPolicyInstanceSettingsLoader } from './loaders/security-policy.instance-settings-loader';
import { SsoInstanceSettingsLoader } from './loaders/sso.instance-settings-loader';
type LoaderResult = 'created' | 'skipped';
@ -12,7 +12,7 @@ export class InstanceSettingsLoaderService {
constructor(
private logger: Logger,
private readonly ownerLoader: OwnerInstanceSettingsLoader,
private readonly oidcLoader: OidcInstanceSettingsLoader,
private readonly ssoLoader: SsoInstanceSettingsLoader,
private readonly securityPolicyLoader: SecurityPolicyInstanceSettingsLoader,
) {
this.logger = this.logger.scoped('instance-settings-loader');
@ -20,7 +20,7 @@ export class InstanceSettingsLoaderService {
async init(): Promise<void> {
await this.run('owner', async () => await this.ownerLoader.run());
await this.run('oidc', async () => await this.oidcLoader.run());
await this.run('sso', async () => await this.ssoLoader.run());
await this.run('security-policy', async () => await this.securityPolicyLoader.run());
}

View file

@ -1,126 +0,0 @@
import { OIDC_PROMPT_VALUES } from '@n8n/api-types';
import { Logger } from '@n8n/backend-common';
import { InstanceSettingsLoaderConfig } from '@n8n/config';
import { SettingsRepository } from '@n8n/db';
import { Service } from '@n8n/di';
import { Cipher } from 'n8n-core';
import { z } from 'zod';
import { InstanceBootstrappingError } from '../instance-bootstrapping.error';
import { OIDC_PREFERENCES_DB_KEY } from '@/modules/sso-oidc/constants';
import { PROVISIONING_PREFERENCES_DB_KEY } from '@/modules/provisioning.ee/constants';
const PROVISIONING_MODES = ['disabled', 'instance_role', 'instance_and_project_roles'] as const;
const ssoEnvSchema = z
.object({
oidcClientId: z
.string()
.min(1, 'N8N_SSO_OIDC_CLIENT_ID is required when configuring OIDC via environment variables'),
oidcClientSecret: z
.string()
.min(
1,
'N8N_SSO_OIDC_CLIENT_SECRET is required when configuring OIDC via environment variables',
),
oidcDiscoveryEndpoint: z.string().url('N8N_SSO_OIDC_DISCOVERY_ENDPOINT must be a valid URL'),
oidcLoginEnabled: z.boolean(),
oidcPrompt: z.enum(OIDC_PROMPT_VALUES, {
errorMap: () => ({
message: `N8N_SSO_OIDC_PROMPT must be one of: ${OIDC_PROMPT_VALUES.join(', ')}`,
}),
}),
oidcAcrValues: z.string(),
ssoUserRoleProvisioning: z.enum(PROVISIONING_MODES, {
errorMap: () => ({
message: `N8N_SSO_USER_ROLE_PROVISIONING must be one of: ${PROVISIONING_MODES.join(', ')}`,
}),
}),
})
.transform((input) => ({
oidc: {
clientId: input.oidcClientId,
clientSecret: input.oidcClientSecret,
discoveryEndpoint: input.oidcDiscoveryEndpoint,
loginEnabled: input.oidcLoginEnabled,
prompt: input.oidcPrompt,
authenticationContextClassReference: input.oidcAcrValues
? input.oidcAcrValues
.split(',')
.map((v) => v.trim())
.filter(Boolean)
: [],
},
provisioning: {
scopesProvisionInstanceRole:
input.ssoUserRoleProvisioning === 'instance_role' ||
input.ssoUserRoleProvisioning === 'instance_and_project_roles',
scopesProvisionProjectRoles: input.ssoUserRoleProvisioning === 'instance_and_project_roles',
},
}));
@Service()
export class OidcInstanceSettingsLoader {
constructor(
private readonly instanceSettingsLoaderConfig: InstanceSettingsLoaderConfig,
private readonly settingsRepository: SettingsRepository,
private readonly cipher: Cipher,
private logger: Logger,
) {
this.logger = this.logger.scoped('instance-settings-loader');
}
isConfiguredByEnv(): boolean {
return this.instanceSettingsLoaderConfig.ssoManagedByEnv;
}
async run(): Promise<'created' | 'skipped'> {
const { ssoManagedByEnv, oidcClientId, oidcClientSecret, oidcDiscoveryEndpoint } =
this.instanceSettingsLoaderConfig;
if (!ssoManagedByEnv) {
if (oidcClientId || oidcClientSecret || oidcDiscoveryEndpoint) {
this.logger.warn(
'N8N_SSO_OIDC_* env vars are set but N8N_SSO_MANAGED_BY_ENV is not enabled — ignoring SSO env vars',
);
}
return 'skipped';
}
this.logger.info('N8N_SSO_MANAGED_BY_ENV is enabled — applying OIDC SSO env vars');
const result = ssoEnvSchema.safeParse(this.instanceSettingsLoaderConfig);
if (!result.success) {
throw new InstanceBootstrappingError(result.error.issues[0].message);
}
const { oidc, provisioning } = result.data;
await this.settingsRepository.upsert(
{
key: OIDC_PREFERENCES_DB_KEY,
value: JSON.stringify({
...oidc,
clientSecret: this.cipher.encrypt(oidc.clientSecret),
}),
loadOnStartup: true,
},
{ conflictPaths: ['key'] },
);
await this.settingsRepository.upsert(
{
key: PROVISIONING_PREFERENCES_DB_KEY,
value: JSON.stringify(provisioning),
loadOnStartup: true,
},
{ conflictPaths: ['key'] },
);
this.logger.debug('OIDC configuration applied from environment variables');
return 'created';
}
}

View file

@ -0,0 +1,227 @@
import { OIDC_PROMPT_VALUES } from '@n8n/api-types';
import { Logger } from '@n8n/backend-common';
import { InstanceSettingsLoaderConfig } from '@n8n/config';
import { SettingsRepository } from '@n8n/db';
import { Service } from '@n8n/di';
import { Cipher } from 'n8n-core';
import { OperationalError } from 'n8n-workflow';
import { z } from 'zod';
import { PROVISIONING_PREFERENCES_DB_KEY } from '@/modules/provisioning.ee/constants';
import { OIDC_PREFERENCES_DB_KEY } from '@/modules/sso-oidc/constants';
import { SAML_PREFERENCES_DB_KEY } from '@/modules/sso-saml/constants';
import {
getCurrentAuthenticationMethod,
setCurrentAuthenticationMethod,
} from '@/sso.ee/sso-helpers';
const PROVISIONING_MODES = ['disabled', 'instance_role', 'instance_and_project_roles'] as const;
const provisioningSchema = z
.object({
ssoUserRoleProvisioning: z.enum(PROVISIONING_MODES, {
errorMap: () => ({
message: `N8N_SSO_USER_ROLE_PROVISIONING must be one of: ${PROVISIONING_MODES.join(', ')}`,
}),
}),
})
.transform((input) => ({
scopesProvisionInstanceRole:
input.ssoUserRoleProvisioning === 'instance_role' ||
input.ssoUserRoleProvisioning === 'instance_and_project_roles',
scopesProvisionProjectRoles: input.ssoUserRoleProvisioning === 'instance_and_project_roles',
}));
const samlEnvSchema = z
.object({
samlMetadata: z.string(),
samlMetadataUrl: z.string(),
samlLoginEnabled: z.boolean(),
})
.refine((data) => data.samlMetadata || data.samlMetadataUrl, {
message:
'At least one of N8N_SSO_SAML_METADATA or N8N_SSO_SAML_METADATA_URL is required when configuring SAML via environment variables',
})
.transform(({ samlMetadata, samlMetadataUrl, samlLoginEnabled }) => ({
...(samlMetadata ? { metadata: samlMetadata } : {}),
...(samlMetadataUrl ? { metadataUrl: samlMetadataUrl } : {}),
loginEnabled: samlLoginEnabled,
}));
const oidcEnvSchema = z
.object({
oidcClientId: z
.string()
.min(1, 'N8N_SSO_OIDC_CLIENT_ID is required when configuring OIDC via environment variables'),
oidcClientSecret: z
.string()
.min(
1,
'N8N_SSO_OIDC_CLIENT_SECRET is required when configuring OIDC via environment variables',
),
oidcDiscoveryEndpoint: z.string().url('N8N_SSO_OIDC_DISCOVERY_ENDPOINT must be a valid URL'),
oidcLoginEnabled: z.boolean(),
oidcPrompt: z.enum(OIDC_PROMPT_VALUES, {
errorMap: () => ({
message: `N8N_SSO_OIDC_PROMPT must be one of: ${OIDC_PROMPT_VALUES.join(', ')}`,
}),
}),
oidcAcrValues: z.string(),
})
.transform((input) => ({
clientId: input.oidcClientId,
clientSecret: input.oidcClientSecret,
discoveryEndpoint: input.oidcDiscoveryEndpoint,
loginEnabled: input.oidcLoginEnabled,
prompt: input.oidcPrompt,
authenticationContextClassReference: input.oidcAcrValues
? input.oidcAcrValues
.split(',')
.map((v) => v.trim())
.filter(Boolean)
: [],
}));
@Service()
export class SsoInstanceSettingsLoader {
constructor(
private readonly config: InstanceSettingsLoaderConfig,
private readonly settingsRepository: SettingsRepository,
private readonly cipher: Cipher,
private logger: Logger,
) {
this.logger = this.logger.scoped('instance-settings-loader');
}
async run(): Promise<'created' | 'skipped'> {
if (!this.config.ssoManagedByEnv) {
this.logger.debug('ssoManagedByEnv is disabled — skipping SSO config');
return 'skipped';
}
const { samlLoginEnabled, oidcLoginEnabled } = this.config;
if (samlLoginEnabled && oidcLoginEnabled) {
throw new OperationalError(
'N8N_SSO_SAML_LOGIN_ENABLED and N8N_SSO_OIDC_LOGIN_ENABLED cannot both be true. Only one SSO protocol can be enabled at a time.',
);
}
if (samlLoginEnabled || oidcLoginEnabled) {
await this.writeProvisioning();
}
await this.applySamlConfig();
await this.applyOidcConfig();
await this.syncAuthMethod();
return 'created';
}
private async applySamlConfig(): Promise<void> {
if (!this.config.samlLoginEnabled) {
await this.writeSamlLoginDisabled();
return;
}
this.logger.info('SAML login is enabled — applying SAML SSO env vars');
const parsed = samlEnvSchema.safeParse(this.config);
if (!parsed.success) {
throw new OperationalError(parsed.error.issues[0].message);
}
await this.writeSamlPreferences(parsed.data);
}
private async applyOidcConfig(): Promise<void> {
if (!this.config.oidcLoginEnabled) {
await this.writeOidcLoginDisabled();
return;
}
this.logger.info('OIDC login is enabled — applying OIDC SSO env vars');
const parsed = oidcEnvSchema.safeParse(this.config);
if (!parsed.success) {
throw new OperationalError(parsed.error.issues[0].message);
}
await this.writeOidcPreferences(parsed.data);
}
private async syncAuthMethod(): Promise<void> {
const { samlLoginEnabled, oidcLoginEnabled } = this.config;
if (samlLoginEnabled) {
await setCurrentAuthenticationMethod('saml');
return;
}
if (oidcLoginEnabled) {
await setCurrentAuthenticationMethod('oidc');
return;
}
const current = getCurrentAuthenticationMethod();
if (current === 'saml' || current === 'oidc') {
await setCurrentAuthenticationMethod('email');
}
}
private async writeSamlPreferences(preferences: Record<string, unknown>): Promise<void> {
await this.settingsRepository.save({
key: SAML_PREFERENCES_DB_KEY,
value: JSON.stringify(preferences),
loadOnStartup: true,
});
}
private async writeSamlLoginDisabled(): Promise<void> {
await this.settingsRepository.save({
key: SAML_PREFERENCES_DB_KEY,
value: JSON.stringify({ loginEnabled: false }),
loadOnStartup: true,
});
}
private async writeOidcPreferences(preferences: {
clientSecret: string;
[key: string]: unknown;
}): Promise<void> {
await this.settingsRepository.upsert(
{
key: OIDC_PREFERENCES_DB_KEY,
value: JSON.stringify({
...preferences,
clientSecret: this.cipher.encrypt(preferences.clientSecret),
}),
loadOnStartup: true,
},
{ conflictPaths: ['key'] },
);
}
private async writeOidcLoginDisabled(): Promise<void> {
await this.settingsRepository.upsert(
{
key: OIDC_PREFERENCES_DB_KEY,
value: JSON.stringify({ loginEnabled: false }),
loadOnStartup: true,
},
{ conflictPaths: ['key'] },
);
}
private async writeProvisioning(): Promise<void> {
const parsed = provisioningSchema.safeParse(this.config);
if (!parsed.success) {
throw new OperationalError(parsed.error.issues[0].message);
}
await this.settingsRepository.upsert(
{
key: PROVISIONING_PREFERENCES_DB_KEY,
value: JSON.stringify(parsed.data),
loadOnStartup: true,
},
{ conflictPaths: ['key'] },
);
}
}

View file

@ -0,0 +1,64 @@
import { mockInstance, testDb } from '@n8n/backend-test-utils';
import { DeploymentKeyRepository } from '@n8n/db';
import { Container } from '@n8n/di';
import { InstanceSettings } from 'n8n-core';
import { EncryptionBootstrapService } from '../encryption-bootstrap.service';
beforeAll(async () => {
mockInstance(InstanceSettings, {
encryptionKey: 'legacy-encryption-key',
n8nFolder: '/tmp/n8n-test',
});
await testDb.init();
});
beforeEach(async () => {
await testDb.truncate(['DeploymentKey']);
});
afterAll(async () => {
await testDb.terminate();
});
describe('EncryptionBootstrapService (integration)', () => {
it('seeds the legacy CBC key as active when the table is empty', async () => {
await Container.get(EncryptionBootstrapService).run();
const rows = await Container.get(DeploymentKeyRepository).find({
where: { type: 'data_encryption' },
});
expect(rows).toHaveLength(1);
expect(rows[0]).toMatchObject({
type: 'data_encryption',
value: 'legacy-encryption-key',
algorithm: 'aes-256-cbc',
status: 'active',
});
});
it('is a no-op on a second run', async () => {
const service = Container.get(EncryptionBootstrapService);
const repository = Container.get(DeploymentKeyRepository);
await service.run();
const [firstRow] = await repository.find({ where: { type: 'data_encryption' } });
await service.run();
const rows = await repository.find({ where: { type: 'data_encryption' } });
expect(rows).toHaveLength(1);
expect(rows[0].id).toBe(firstRow.id);
});
it('produces exactly one active row under concurrent runs', async () => {
const service = Container.get(EncryptionBootstrapService);
await Promise.all([service.run(), service.run(), service.run()]);
const rows = await Container.get(DeploymentKeyRepository).find({
where: { type: 'data_encryption', status: 'active' },
});
expect(rows).toHaveLength(1);
});
});

View file

@ -0,0 +1,27 @@
import { mockLogger } from '@n8n/backend-test-utils';
import type { MockProxy } from 'jest-mock-extended';
import { mock } from 'jest-mock-extended';
import type { InstanceSettings } from 'n8n-core';
import { EncryptionBootstrapService } from '../encryption-bootstrap.service';
import type { KeyManagerService } from '../key-manager.service';
describe('EncryptionBootstrapService', () => {
let keyManager: MockProxy<KeyManagerService>;
let instanceSettings: MockProxy<InstanceSettings>;
beforeEach(() => {
jest.clearAllMocks();
keyManager = mock<KeyManagerService>();
instanceSettings = mock<InstanceSettings>({ encryptionKey: 'legacy-key' });
});
const createService = () =>
new EncryptionBootstrapService(keyManager, instanceSettings, mockLogger());
it('delegates to KeyManagerService.bootstrapLegacyKey with the instance encryption key', async () => {
await createService().run();
expect(keyManager.bootstrapLegacyKey).toHaveBeenCalledWith('legacy-key');
});
});

View file

@ -90,6 +90,30 @@ describe('KeyManagerService', () => {
});
});
describe('bootstrapLegacyKey()', () => {
it('is a no-op when an active key already exists', async () => {
repository.findActiveByType.mockResolvedValue(makeKey());
await Container.get(KeyManagerService).bootstrapLegacyKey('legacy-value');
expect(repository.findActiveByType).toHaveBeenCalledWith('data_encryption');
expect(repository.insertOrIgnore).not.toHaveBeenCalled();
});
it('inserts the legacy CBC key when no active key exists', async () => {
repository.findActiveByType.mockResolvedValue(null);
await Container.get(KeyManagerService).bootstrapLegacyKey('legacy-value');
expect(repository.insertOrIgnore).toHaveBeenCalledWith({
type: 'data_encryption',
value: 'legacy-value',
status: 'active',
algorithm: 'aes-256-cbc',
});
});
});
describe('addKey()', () => {
it('inserts as inactive when setAsActive is not set', async () => {
const saved = makeKey({ id: 'new-key', status: 'inactive' });

View file

@ -0,0 +1,21 @@
import { Logger } from '@n8n/backend-common';
import { Service } from '@n8n/di';
import { InstanceSettings } from 'n8n-core';
import { KeyManagerService } from './key-manager.service';
@Service()
export class EncryptionBootstrapService {
constructor(
private readonly keyManager: KeyManagerService,
private readonly instanceSettings: InstanceSettings,
private readonly logger: Logger,
) {
this.logger = this.logger.scoped('encryption-key-manager');
}
async run(): Promise<void> {
await this.keyManager.bootstrapLegacyKey(this.instanceSettings.encryptionKey);
this.logger.debug('Encryption key bootstrap complete');
}
}

View file

@ -1,9 +1,13 @@
import type { ModuleInterface } from '@n8n/decorators';
import { BackendModule } from '@n8n/decorators';
import { Container } from '@n8n/di';
@BackendModule({ name: 'encryption-key-manager', instanceTypes: ['main'] })
export class EncryptionKeyManagerModule implements ModuleInterface {
async init() {
await import('./key-manager.service');
const { EncryptionBootstrapService } = await import('./encryption-bootstrap.service');
await Container.get(EncryptionBootstrapService).run();
}
}

View file

@ -42,6 +42,23 @@ export class KeyManagerService {
return { id: key.id, value: key.value, algorithm: key.algorithm! };
}
/**
* Seeds the legacy aes-256-cbc key as active if no active encryption key exists.
* Race-safe across concurrent mains: the DB's partial unique index serializes
* insert attempts, and losers are silently ignored.
*/
async bootstrapLegacyKey(value: string): Promise<void> {
const existing = await this.deploymentKeyRepository.findActiveByType('data_encryption');
if (existing) return;
await this.deploymentKeyRepository.insertOrIgnore({
type: 'data_encryption',
value,
status: 'active',
algorithm: 'aes-256-cbc',
});
}
/** Inserts a new encryption key. If setAsActive, atomically deactivates the current key first. */
async addKey(value: string, algorithm: string, setAsActive = false): Promise<{ id: string }> {
if (!setAsActive) {

View file

@ -1,9 +1,10 @@
import type { InstanceAiConfig } from '@n8n/config';
import type { SettingsRepository, User } from '@n8n/db';
import type { SettingsRepository, User, UserRepository } from '@n8n/db';
import { mock } from 'jest-mock-extended';
import { UnprocessableRequestError } from '@/errors/response-errors/unprocessable.error';
import type { AiService } from '@/services/ai.service';
import type { UserService } from '@/services/user.service';
import type { CredentialsFinderService } from '@/credentials/credentials-finder.service';
import type { CredentialsService } from '@/credentials/credentials.service';
@ -33,6 +34,8 @@ describe('InstanceAiSettingsService', () => {
deployment: { type: 'default' },
});
const settingsRepository = mock<SettingsRepository>();
const userRepository = mock<UserRepository>();
const userService = mock<UserService>();
const aiService = mock<AiService>();
const credentialsService = mock<CredentialsService>();
const credentialsFinderService = mock<CredentialsFinderService>();
@ -44,6 +47,8 @@ describe('InstanceAiSettingsService', () => {
service = new InstanceAiSettingsService(
globalConfig as never,
settingsRepository,
userRepository,
userService,
aiService,
credentialsService,
credentialsFinderService,
@ -112,11 +117,28 @@ describe('InstanceAiSettingsService', () => {
it('should allow non-proxy-managed fields when proxy is enabled', async () => {
aiService.isProxyEnabled.mockReturnValue(true);
settingsRepository.upsert.mockResolvedValue(undefined as never);
await expect(
service.updateUserPreferences(user, { localGatewayDisabled: true }),
).resolves.toBeDefined();
expect(userService.updateSettings).toHaveBeenCalledWith('user-1', {
instanceAi: { localGatewayDisabled: true },
});
});
it('should merge new fields with existing instanceAi settings on update', async () => {
aiService.isProxyEnabled.mockReturnValue(false);
const existingUser = mock<User>({
id: 'user-2',
settings: { instanceAi: { credentialId: 'cred-old', modelName: 'gpt-3.5' } },
});
await service.updateUserPreferences(existingUser, { modelName: 'gpt-4' });
expect(userService.updateSettings).toHaveBeenCalledWith('user-2', {
instanceAi: { credentialId: 'cred-old', modelName: 'gpt-4' },
});
});
});
@ -195,11 +217,13 @@ describe('InstanceAiSettingsService', () => {
});
it('should allow localGatewayDisabled on cloud', async () => {
settingsRepository.upsert.mockResolvedValue(undefined as never);
await expect(
service.updateUserPreferences(user, { localGatewayDisabled: true }),
).resolves.toBeDefined();
expect(userService.updateSettings).toHaveBeenCalledWith('user-1', {
instanceAi: { localGatewayDisabled: true },
});
});
});
});

View file

@ -1,8 +1,4 @@
import { GlobalConfig } from '@n8n/config';
import type { InstanceAiConfig, DeploymentConfig } from '@n8n/config';
import { SettingsRepository } from '@n8n/db';
import type { User } from '@n8n/db';
import { Service } from '@n8n/di';
import { DEFAULT_INSTANCE_AI_PERMISSIONS } from '@n8n/api-types';
import type {
InstanceAiAdminSettingsResponse,
InstanceAiAdminSettingsUpdateRequest,
@ -11,17 +7,24 @@ import type {
InstanceAiModelCredential,
InstanceAiPermissions,
} from '@n8n/api-types';
import { DEFAULT_INSTANCE_AI_PERMISSIONS } from '@n8n/api-types';
import { GlobalConfig } from '@n8n/config';
import type { InstanceAiConfig, DeploymentConfig } from '@n8n/config';
import { SettingsRepository, UserRepository } from '@n8n/db';
import type { User } from '@n8n/db';
import { Service } from '@n8n/di';
import type { ModelConfig } from '@n8n/instance-ai';
import type { IUserSettings } from 'n8n-workflow';
import { jsonParse } from 'n8n-workflow';
import { AiService } from '@/services/ai.service';
import { CredentialsFinderService } from '@/credentials/credentials-finder.service';
import { CredentialsService } from '@/credentials/credentials.service';
import { UnprocessableRequestError } from '@/errors/response-errors/unprocessable.error';
import { AiService } from '@/services/ai.service';
import { UserService } from '@/services/user.service';
const ADMIN_SETTINGS_KEY = 'instanceAi.settings';
const USER_PREFERENCES_KEY_PREFIX = 'instanceAi.preferences.';
type UserInstanceAiPreferences = NonNullable<IUserSettings['instanceAi']>;
/** Credential types we support and their Mastra provider mapping. */
const CREDENTIAL_TO_MASTRA_PROVIDER: Record<string, string> = {
@ -77,13 +80,6 @@ interface PersistedAdminSettings {
optinModalDismissed?: boolean;
}
/** Per-user preferences stored under USER_PREFERENCES_KEY_PREFIX + userId. */
interface PersistedUserPreferences {
credentialId?: string | null;
modelName?: string;
localGatewayDisabled?: boolean;
}
@Service()
export class InstanceAiSettingsService {
private readonly config: InstanceAiConfig;
@ -105,12 +101,11 @@ export class InstanceAiSettingsService {
private optinModalDismissed: boolean = false;
/** In-memory cache of per-user preferences keyed by userId. */
private readonly userPreferences = new Map<string, PersistedUserPreferences>();
constructor(
globalConfig: GlobalConfig,
private readonly settingsRepository: SettingsRepository,
private readonly userRepository: UserRepository,
private readonly userService: UserService,
private readonly aiService: AiService,
private readonly credentialsService: CredentialsService,
private readonly credentialsFinderService: CredentialsFinderService,
@ -213,7 +208,7 @@ export class InstanceAiSettingsService {
// ── User preferences ──────────────────────────────────────────────────
async getUserPreferences(user: User): Promise<InstanceAiUserPreferencesResponse> {
const prefs = await this.loadUserPreferences(user.id);
const prefs = this.readUserPreferences(user);
const credentialId = prefs.credentialId ?? null;
let credentialType: string | null = null;
@ -255,13 +250,13 @@ export class InstanceAiSettingsService {
'proxy',
);
}
const prefs = await this.loadUserPreferences(user.id);
const prefs: UserInstanceAiPreferences = { ...this.readUserPreferences(user) };
if (update.credentialId !== undefined) prefs.credentialId = update.credentialId;
if (update.modelName !== undefined) prefs.modelName = update.modelName;
if (update.localGatewayDisabled !== undefined)
prefs.localGatewayDisabled = update.localGatewayDisabled;
this.userPreferences.set(user.id, prefs);
await this.persistUserPreferences(user.id, prefs);
await this.userService.updateSettings(user.id, { instanceAi: prefs });
user.settings = { ...(user.settings ?? {}), instanceAi: prefs };
return await this.getUserPreferences(user);
}
@ -394,8 +389,9 @@ export class InstanceAiSettingsService {
async isLocalGatewayDisabledForUser(userId: string): Promise<boolean> {
if (!this.enabled) return true;
if (this.config.localGatewayDisabled) return true;
const prefs = await this.loadUserPreferences(userId);
return prefs?.localGatewayDisabled ?? false;
const user = await this.userRepository.findOneBy({ id: userId });
if (!user) return true;
return this.readUserPreferences(user).localGatewayDisabled ?? false;
}
/** Whether the n8n Agent is enabled by the admin. */
@ -415,13 +411,13 @@ export class InstanceAiSettingsService {
/** Resolve just the model name (e.g. 'claude-sonnet-4-20250514') for proxy routing. */
async resolveModelName(user: User): Promise<string> {
const prefs = await this.loadUserPreferences(user.id);
const prefs = this.readUserPreferences(user);
return prefs.modelName || this.extractModelName(this.config.model);
}
/** Resolve the current model configuration for an agent run. */
async resolveModelConfig(user: User): Promise<ModelConfig> {
const prefs = await this.loadUserPreferences(user.id);
const prefs = this.readUserPreferences(user);
const credentialId = prefs.credentialId ?? null;
if (!credentialId) {
@ -565,18 +561,8 @@ export class InstanceAiSettingsService {
this.optinModalDismissed = persisted.optinModalDismissed;
}
private async loadUserPreferences(userId: string): Promise<PersistedUserPreferences> {
const cached = this.userPreferences.get(userId);
if (cached) return { ...cached };
const row = await this.settingsRepository.findByKey(`${USER_PREFERENCES_KEY_PREFIX}${userId}`);
if (row) {
const prefs = jsonParse<PersistedUserPreferences>(row.value, { fallbackValue: {} });
this.userPreferences.set(userId, prefs);
return { ...prefs };
}
return {};
private readUserPreferences(user: User): UserInstanceAiPreferences {
return user.settings?.instanceAi ?? {};
}
private async persistAdminSettings(): Promise<void> {
@ -606,18 +592,4 @@ export class InstanceAiSettingsService {
['key'],
);
}
private async persistUserPreferences(
userId: string,
prefs: PersistedUserPreferences,
): Promise<void> {
await this.settingsRepository.upsert(
{
key: `${USER_PREFERENCES_KEY_PREFIX}${userId}`,
value: JSON.stringify(prefs),
loadOnStartup: false,
},
['key'],
);
}
}

View file

@ -943,6 +943,10 @@ export class InstanceAiService {
initGateway(userId: string, data: InstanceAiGatewayCapabilities): void {
this.gatewayRegistry.initGateway(userId, data);
this.telemetry.track('User connected to Computer Use', {
user_id: userId,
tool_groups: data.toolCategories.filter((c) => c.enabled).map((c) => c.name),
});
}
resolveGatewayRequest(
@ -2457,14 +2461,14 @@ export class InstanceAiService {
// Skip if thread already has an LLM-refined title
if (thread.metadata?.titleRefined) return;
// Get first user message
// Concat all recalled user messages so retries after a trivial first message
// (e.g. "hey") have enough signal to produce a good title.
const result = await memory.recall({ threadId, resourceId: userId, perPage: 5 });
const firstUserMsg = result.messages.find((m) => m.role === 'user');
if (!firstUserMsg) return;
const userText =
typeof firstUserMsg.content === 'string'
? firstUserMsg.content
: JSON.stringify(firstUserMsg.content);
const userTexts = result.messages
.filter((m) => m.role === 'user')
.map((m) => (typeof m.content === 'string' ? m.content : JSON.stringify(m.content)));
if (userTexts.length === 0) return;
const userText = userTexts.join('\n');
const llmTitle = await generateTitleForRun(modelId, userText);
if (!llmTitle) return;

View file

@ -1,9 +1,11 @@
import { mockInstance } from '@n8n/backend-test-utils';
import { User } from '@n8n/db';
import { User, WorkflowEntity } from '@n8n/db';
import { createArchiveWorkflowTool } from '../tools/workflow-builder/delete-workflow.tool';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { Telemetry } from '@/telemetry';
import { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import { WorkflowService } from '@/workflows/workflow.service';
jest.mock('@n8n/ai-workflow-builder', () => ({
@ -25,19 +27,44 @@ const parseResult = (result: { content: Array<{ type: string; text?: string }> }
describe('archive-workflow MCP tool', () => {
const user = Object.assign(new User(), { id: 'user-1' });
let workflowFinderService: WorkflowFinderService;
let workflowService: WorkflowService;
let telemetry: Telemetry;
let collaborationService: CollaborationService;
const mockExistingWorkflow = Object.assign(new WorkflowEntity(), {
id: 'wf-1',
name: 'My Workflow',
nodes: [],
connections: {},
isArchived: false,
settings: { availableInMCP: true },
});
beforeEach(() => {
jest.clearAllMocks();
workflowFinderService = mockInstance(WorkflowFinderService, {
findWorkflowForUser: jest.fn().mockResolvedValue(mockExistingWorkflow),
});
workflowService = mockInstance(WorkflowService);
telemetry = mockInstance(Telemetry, {
track: jest.fn(),
});
collaborationService = mockInstance(CollaborationService, {
ensureWorkflowEditable: jest.fn().mockResolvedValue(undefined),
broadcastWorkflowUpdate: jest.fn().mockResolvedValue(undefined),
});
});
const createTool = () => createArchiveWorkflowTool(user, workflowService, telemetry);
const createTool = () =>
createArchiveWorkflowTool(
user,
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
describe('smoke tests', () => {
test('creates tool with correct name and destructiveHint=true', () => {
@ -72,10 +99,43 @@ describe('archive-workflow MCP tool', () => {
expect(response.workflowId).toBe('wf-1');
expect(response.name).toBe('My Workflow');
expect(result.isError).toBeUndefined();
expect(collaborationService.broadcastWorkflowUpdate).toHaveBeenCalledWith('wf-1', user.id);
});
test('returns error when workflow has active write lock', async () => {
(collaborationService.ensureWorkflowEditable as jest.Mock).mockRejectedValue(
new Error('Cannot modify workflow while it is being edited by a user in the editor.'),
);
const tool = createTool();
const result = await tool.handler({ workflowId: 'wf-1' }, {} as never);
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain('being edited by a user');
expect(workflowService.archive).not.toHaveBeenCalled();
});
test('succeeds even when broadcastWorkflowUpdate rejects', async () => {
(workflowService.archive as jest.Mock).mockResolvedValue({
id: 'wf-1',
name: 'My Workflow',
});
(collaborationService.broadcastWorkflowUpdate as jest.Mock).mockRejectedValue(
new Error('Cache unavailable'),
);
const tool = createTool();
const result = await tool.handler({ workflowId: 'wf-1' }, {} as never);
const response = parseResult(result);
expect(response.archived).toBe(true);
expect(result.isError).toBeUndefined();
});
test('returns error when workflow not found or no permission to archive', async () => {
(workflowService.archive as jest.Mock).mockResolvedValue(null);
(workflowFinderService.findWorkflowForUser as jest.Mock).mockResolvedValue(null);
const tool = createTool();
const result = await tool.handler({ workflowId: 'wf-missing' }, {} as never);
@ -83,7 +143,7 @@ describe('archive-workflow MCP tool', () => {
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain('not found or');
expect(response.error).toContain('permission to archive');
expect(response.error).toContain('permission to access');
});
test('returns error when service throws', async () => {

View file

@ -151,7 +151,7 @@ describe('McpOAuthConsentService', () => {
clientId: 'client-123',
userId: 'user-123',
});
expect(userConsentRepository.insert).not.toHaveBeenCalled();
expect(userConsentRepository.upsert).not.toHaveBeenCalled();
});
it('should handle user approval and generate authorization code', async () => {
@ -166,18 +166,21 @@ describe('McpOAuthConsentService', () => {
const authCode = 'generated-auth-code';
oauthSessionService.verifySession.mockReturnValue(sessionPayload);
userConsentRepository.insert.mockResolvedValue(mock());
userConsentRepository.upsert.mockResolvedValue(mock());
authorizationCodeService.createAuthorizationCode.mockResolvedValue(authCode);
const result = await service.handleConsentDecision(sessionToken, userId, true);
expect(result.redirectUrl).toContain('code=generated-auth-code');
expect(result.redirectUrl).toContain('state=state-xyz');
expect(userConsentRepository.insert).toHaveBeenCalledWith({
userId: 'user-123',
clientId: 'client-123',
grantedAt: expect.any(Number),
});
expect(userConsentRepository.upsert).toHaveBeenCalledWith(
{
userId: 'user-123',
clientId: 'client-123',
grantedAt: expect.any(Number),
},
['userId', 'clientId'],
);
expect(authorizationCodeService.createAuthorizationCode).toHaveBeenCalledWith(
'client-123',
'user-123',
@ -203,7 +206,7 @@ describe('McpOAuthConsentService', () => {
const authCode = 'generated-auth-code';
oauthSessionService.verifySession.mockReturnValue(sessionPayload);
userConsentRepository.insert.mockResolvedValue(mock());
userConsentRepository.upsert.mockResolvedValue(mock());
authorizationCodeService.createAuthorizationCode.mockResolvedValue(authCode);
const result = await service.handleConsentDecision(sessionToken, userId, true);
@ -212,6 +215,37 @@ describe('McpOAuthConsentService', () => {
expect(result.redirectUrl).not.toContain('state=');
});
it('should handle re-authorization for existing consent by upserting', async () => {
const sessionToken = 'valid-session-token';
const userId = 'user-123';
const sessionPayload = {
clientId: 'client-123',
redirectUri: 'https://example.com/callback',
codeChallenge: 'challenge-abc',
state: 'state-xyz',
};
const authCode = 'generated-auth-code';
oauthSessionService.verifySession.mockReturnValue(sessionPayload);
userConsentRepository.upsert.mockResolvedValue(mock());
authorizationCodeService.createAuthorizationCode.mockResolvedValue(authCode);
// First authorization
await service.handleConsentDecision(sessionToken, userId, true);
// Re-authorization with same userId + clientId should not throw
await service.handleConsentDecision(sessionToken, userId, true);
expect(userConsentRepository.upsert).toHaveBeenCalledTimes(2);
expect(userConsentRepository.upsert).toHaveBeenCalledWith(
{
userId: 'user-123',
clientId: 'client-123',
grantedAt: expect.any(Number),
},
['userId', 'clientId'],
);
});
it('should throw error when session verification fails', async () => {
const sessionToken = 'invalid-session-token';
const userId = 'user-123';

View file

@ -16,6 +16,7 @@ import { McpService } from '../mcp.service';
import { WorkflowBuilderToolsService } from '../tools/workflow-builder/workflow-builder-tools.service';
import { ActiveExecutions } from '@/active-executions';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { CredentialsService } from '@/credentials/credentials.service';
import { DataTableProxyService } from '@/modules/data-table/data-table-proxy.service';
import { NodeTypes } from '@/node-types';
@ -71,6 +72,7 @@ describe('McpService', () => {
mockInstance(ExecutionRepository),
mockInstance(ExecutionService),
mockInstance(DataTableProxyService),
mockInstance(CollaborationService),
);
});
@ -110,6 +112,7 @@ describe('McpService', () => {
mockInstance(ExecutionRepository),
mockInstance(ExecutionService),
mockInstance(DataTableProxyService),
mockInstance(CollaborationService),
);
expect(queueMcpService.isQueueMode).toBe(true);
@ -314,6 +317,7 @@ describe('McpService', () => {
mockInstance(ExecutionRepository),
mockInstance(ExecutionService),
mockInstance(DataTableProxyService),
mockInstance(CollaborationService),
);
const server = await service.getServer(user);
@ -355,6 +359,7 @@ describe('McpService', () => {
mockInstance(ExecutionRepository),
mockInstance(ExecutionService),
mockInstance(DataTableProxyService),
mockInstance(CollaborationService),
);
const server = await service.getServer(user);

View file

@ -5,6 +5,7 @@ import { v4 as uuid } from 'uuid';
import { createWorkflow } from './mock.utils';
import { createPublishWorkflowTool } from '../tools/publish-workflow.tool';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { Telemetry } from '@/telemetry';
import { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import { WorkflowService } from '@/workflows/workflow.service';
@ -14,6 +15,7 @@ describe('publish-workflow MCP tool', () => {
let workflowFinderService: WorkflowFinderService;
let workflowService: WorkflowService;
let telemetry: Telemetry;
let collaborationService: CollaborationService;
beforeEach(() => {
workflowFinderService = mockInstance(WorkflowFinderService);
@ -21,6 +23,10 @@ describe('publish-workflow MCP tool', () => {
telemetry = mockInstance(Telemetry, {
track: jest.fn(),
});
collaborationService = mockInstance(CollaborationService, {
ensureWorkflowEditable: jest.fn().mockResolvedValue(undefined),
broadcastWorkflowUpdate: jest.fn().mockResolvedValue(undefined),
});
});
describe('smoke tests', () => {
@ -30,6 +36,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
expect(tool.name).toBe('publish_workflow');
@ -52,6 +59,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
@ -68,6 +76,35 @@ describe('publish-workflow MCP tool', () => {
});
});
describe('write lock', () => {
test('returns error when workflow has active write lock', async () => {
const workflow = createWorkflow({ settings: { availableInMCP: true } });
(workflowFinderService.findWorkflowForUser as jest.Mock).mockResolvedValue(workflow);
(collaborationService.ensureWorkflowEditable as jest.Mock).mockRejectedValue(
new Error('Cannot modify workflow while it is being edited by a user in the editor.'),
);
const tool = createPublishWorkflowTool(
user,
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
{ workflowId: 'wf-1', versionId: undefined },
{} as Parameters<typeof tool.handler>[1],
);
expect(result.structuredContent).toMatchObject({
success: false,
error: expect.stringContaining('being edited by a user'),
});
expect(workflowService.activateWorkflow).not.toHaveBeenCalled();
});
});
describe('successful publish', () => {
test('publishes workflow successfully', async () => {
const workflow = createWorkflow({ settings: { availableInMCP: true } });
@ -82,6 +119,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
@ -99,6 +137,8 @@ describe('publish-workflow MCP tool', () => {
versionId: undefined,
source: 'n8n-mcp',
});
expect(collaborationService.broadcastWorkflowUpdate).toHaveBeenCalledWith('wf-1', user.id);
});
test('publishes specific version when versionId provided', async () => {
@ -114,6 +154,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
@ -148,6 +189,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
await tool.handler(
@ -180,6 +222,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
await tool.handler(
@ -216,6 +259,7 @@ describe('publish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(

View file

@ -1,6 +1,7 @@
import { mockInstance } from '@n8n/backend-test-utils';
import { User } from '@n8n/db';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { Telemetry } from '@/telemetry';
import { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import { WorkflowService } from '@/workflows/workflow.service';
@ -13,6 +14,7 @@ describe('unpublish-workflow MCP tool', () => {
let workflowFinderService: WorkflowFinderService;
let workflowService: WorkflowService;
let telemetry: Telemetry;
let collaborationService: CollaborationService;
beforeEach(() => {
workflowFinderService = mockInstance(WorkflowFinderService);
@ -20,6 +22,10 @@ describe('unpublish-workflow MCP tool', () => {
telemetry = mockInstance(Telemetry, {
track: jest.fn(),
});
collaborationService = mockInstance(CollaborationService, {
ensureWorkflowEditable: jest.fn().mockResolvedValue(undefined),
broadcastWorkflowUpdate: jest.fn().mockResolvedValue(undefined),
});
});
describe('smoke tests', () => {
@ -29,6 +35,7 @@ describe('unpublish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
expect(tool.name).toBe('unpublish_workflow');
@ -51,6 +58,7 @@ describe('unpublish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
@ -66,6 +74,35 @@ describe('unpublish-workflow MCP tool', () => {
});
});
describe('write lock', () => {
test('returns error when workflow has active write lock', async () => {
const workflow = createWorkflow({ settings: { availableInMCP: true } });
(workflowFinderService.findWorkflowForUser as jest.Mock).mockResolvedValue(workflow);
(collaborationService.ensureWorkflowEditable as jest.Mock).mockRejectedValue(
new Error('Cannot modify workflow while it is being edited by a user in the editor.'),
);
const tool = createUnpublishWorkflowTool(
user,
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
{ workflowId: 'wf-1' },
{} as Parameters<typeof tool.handler>[1],
);
expect(result.structuredContent).toMatchObject({
success: false,
error: expect.stringContaining('being edited by a user'),
});
expect(workflowService.deactivateWorkflow).not.toHaveBeenCalled();
});
});
describe('successful unpublish', () => {
test('unpublishes workflow successfully', async () => {
const workflow = createWorkflow({ settings: { availableInMCP: true } });
@ -79,6 +116,7 @@ describe('unpublish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(
@ -94,6 +132,8 @@ describe('unpublish-workflow MCP tool', () => {
expect(workflowService.deactivateWorkflow).toHaveBeenCalledWith(user, 'wf-1', {
source: 'n8n-mcp',
});
expect(collaborationService.broadcastWorkflowUpdate).toHaveBeenCalledWith('wf-1', user.id);
});
});
@ -110,6 +150,7 @@ describe('unpublish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
await tool.handler({ workflowId: 'wf-1' }, {} as Parameters<typeof tool.handler>[1]);
@ -138,6 +179,7 @@ describe('unpublish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
await tool.handler(
@ -174,6 +216,7 @@ describe('unpublish-workflow MCP tool', () => {
workflowFinderService,
workflowService,
telemetry,
collaborationService,
);
const result = await tool.handler(

View file

@ -4,6 +4,7 @@ import type { INode } from 'n8n-workflow';
import { createUpdateWorkflowTool } from '../tools/workflow-builder/update-workflow.tool';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { CredentialsService } from '@/credentials/credentials.service';
import { NodeTypes } from '@/node-types';
import { UrlService } from '@/services/url.service';
@ -87,6 +88,7 @@ describe('update-workflow MCP tool', () => {
let credentialsService: CredentialsService;
let sharedWorkflowRepository: SharedWorkflowRepository;
let nodeTypes: ReturnType<typeof mockInstance<NodeTypes>>;
let collaborationService: CollaborationService;
const mockExistingWorkflow = Object.assign(new WorkflowEntity(), {
id: 'wf-1',
@ -121,6 +123,10 @@ describe('update-workflow MCP tool', () => {
findOneOrFail: jest.fn().mockResolvedValue({ projectId: 'project-1' }),
});
nodeTypes = mockInstance(NodeTypes);
collaborationService = mockInstance(CollaborationService, {
ensureWorkflowEditable: jest.fn().mockResolvedValue(undefined),
broadcastWorkflowUpdate: jest.fn().mockResolvedValue(undefined),
});
mockParseAndValidate.mockImplementation(async () => ({
workflow: { ...mockWorkflowJson, nodes: mockNodes.map((n) => ({ ...n })) },
@ -139,6 +145,7 @@ describe('update-workflow MCP tool', () => {
nodeTypes,
credentialsService,
sharedWorkflowRepository,
collaborationService,
);
// Helper to call handler with proper typing (optional fields default to undefined)
@ -182,6 +189,19 @@ describe('update-workflow MCP tool', () => {
});
describe('handler tests', () => {
test('returns error when workflow has active write lock', async () => {
(collaborationService.ensureWorkflowEditable as jest.Mock).mockRejectedValue(
new Error('Cannot modify workflow while it is being edited by a user in the editor.'),
);
const result = await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
const response = parseResult(result);
expect(result.isError).toBe(true);
expect(response.error).toContain('being edited by a user');
expect(workflowService.update).not.toHaveBeenCalled();
});
test('successfully updates workflow and returns expected response', async () => {
const result = await callHandler({ workflowId: 'wf-1', code: 'const wf = ...' });
@ -192,6 +212,8 @@ describe('update-workflow MCP tool', () => {
expect(response.url).toBe('https://n8n.example.com/workflow/wf-1');
expect(response.autoAssignedCredentials).toEqual([]);
expect(result.isError).toBeUndefined();
expect(collaborationService.broadcastWorkflowUpdate).toHaveBeenCalledWith('wf-1', user.id);
});
test('sets correct workflow entity defaults', async () => {

View file

@ -83,11 +83,14 @@ export class McpOAuthConsentService {
return { redirectUrl };
}
await this.userConsentRepository.insert({
userId,
clientId: sessionPayload.clientId,
grantedAt: Date.now(),
});
await this.userConsentRepository.upsert(
{
userId,
clientId: sessionPayload.clientId,
grantedAt: Date.now(),
},
['userId', 'clientId'],
);
const code = await this.authorizationCodeService.createAuthorizationCode(
sessionPayload.clientId,

View file

@ -47,6 +47,7 @@ import { createValidateWorkflowCodeTool } from './tools/workflow-builder/validat
import { WorkflowBuilderToolsService } from './tools/workflow-builder/workflow-builder-tools.service';
import { ActiveExecutions } from '@/active-executions';
import { CollaborationService } from '@/collaboration/collaboration.service';
import { CredentialsService } from '@/credentials/credentials.service';
import { DataTableProxyService } from '@/modules/data-table/data-table-proxy.service';
import { NodeTypes } from '@/node-types';
@ -102,6 +103,7 @@ export class McpService {
private readonly executionRepository: ExecutionRepository,
private readonly executionService: ExecutionService,
private readonly dataTableProxyService: DataTableProxyService,
private readonly collaborationService: CollaborationService,
) {}
async getServer(user: User) {
@ -174,6 +176,7 @@ export class McpService {
this.workflowFinderService,
this.workflowService,
this.telemetry,
this.collaborationService,
);
server.registerTool(
publishWorkflowTool.name,
@ -186,6 +189,7 @@ export class McpService {
this.workflowFinderService,
this.workflowService,
this.telemetry,
this.collaborationService,
);
server.registerTool(
unpublishWorkflowTool.name,
@ -352,7 +356,13 @@ export class McpService {
searchFoldersTool.handler,
);
const archiveTool = createArchiveWorkflowTool(user, this.workflowService, this.telemetry);
const archiveTool = createArchiveWorkflowTool(
user,
this.workflowFinderService,
this.workflowService,
this.telemetry,
this.collaborationService,
);
server.registerTool(archiveTool.name, archiveTool.config, archiveTool.handler);
const updateTool = createUpdateWorkflowTool(
@ -364,6 +374,7 @@ export class McpService {
this.nodeTypes,
this.credentialsService,
this.sharedWorkflowRepository,
this.collaborationService,
);
server.registerTool(updateTool.name, updateTool.config, updateTool.handler);

View file

@ -7,6 +7,7 @@ import { WorkflowAccessError } from '../mcp.errors';
import type { ToolDefinition, UserCalledMCPToolEventPayload } from '../mcp.types';
import { getMcpWorkflow } from './workflow-validation.utils';
import type { CollaborationService } from '@/collaboration/collaboration.service';
import type { Telemetry } from '@/telemetry';
import type { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import type { WorkflowService } from '@/workflows/workflow.service';
@ -40,6 +41,7 @@ export const createPublishWorkflowTool = (
workflowFinderService: WorkflowFinderService,
workflowService: WorkflowService,
telemetry: Telemetry,
collaborationService: CollaborationService,
): ToolDefinition<typeof inputSchema.shape> => ({
name: 'publish_workflow',
config: {
@ -65,11 +67,15 @@ export const createPublishWorkflowTool = (
try {
await getMcpWorkflow(workflowId, user, ['workflow:publish'], workflowFinderService);
await collaborationService.ensureWorkflowEditable(workflowId);
const activatedWorkflow = await workflowService.activateWorkflow(user, workflowId, {
versionId,
source: 'n8n-mcp',
});
void collaborationService.broadcastWorkflowUpdate(workflowId, user.id).catch(() => {});
const output: PublishWorkflowOutput = {
success: true,
workflowId: activatedWorkflow.id,

View file

@ -7,6 +7,7 @@ import { WorkflowAccessError } from '../mcp.errors';
import type { ToolDefinition, UserCalledMCPToolEventPayload } from '../mcp.types';
import { getMcpWorkflow } from './workflow-validation.utils';
import type { CollaborationService } from '@/collaboration/collaboration.service';
import type { Telemetry } from '@/telemetry';
import type { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import type { WorkflowService } from '@/workflows/workflow.service';
@ -32,6 +33,7 @@ export const createUnpublishWorkflowTool = (
workflowFinderService: WorkflowFinderService,
workflowService: WorkflowService,
telemetry: Telemetry,
collaborationService: CollaborationService,
): ToolDefinition<typeof inputSchema.shape> => ({
name: 'unpublish_workflow',
config: {
@ -57,10 +59,14 @@ export const createUnpublishWorkflowTool = (
try {
await getMcpWorkflow(workflowId, user, ['workflow:unpublish'], workflowFinderService);
await collaborationService.ensureWorkflowEditable(workflowId);
await workflowService.deactivateWorkflow(user, workflowId, {
source: 'n8n-mcp',
});
void collaborationService.broadcastWorkflowUpdate(workflowId, user.id).catch(() => {});
const output: UnpublishWorkflowOutput = {
success: true,
workflowId,

View file

@ -5,9 +5,13 @@ import { USER_CALLED_MCP_TOOL_EVENT } from '../../mcp.constants';
import type { ToolDefinition, UserCalledMCPToolEventPayload } from '../../mcp.types';
import { MCP_ARCHIVE_WORKFLOW_TOOL } from './constants';
import type { CollaborationService } from '@/collaboration/collaboration.service';
import type { Telemetry } from '@/telemetry';
import type { WorkflowFinderService } from '@/workflows/workflow-finder.service';
import type { WorkflowService } from '@/workflows/workflow.service';
import { getMcpWorkflow } from '../workflow-validation.utils';
const inputSchema = {
workflowId: z.string().describe('The ID of the workflow to archive'),
} satisfies z.ZodRawShape;
@ -23,8 +27,10 @@ const outputSchema = {
*/
export const createArchiveWorkflowTool = (
user: User,
workflowFinderService: WorkflowFinderService,
workflowService: WorkflowService,
telemetry: Telemetry,
collaborationService: CollaborationService,
): ToolDefinition<typeof inputSchema> => ({
name: MCP_ARCHIVE_WORKFLOW_TOOL.toolName,
config: {
@ -47,12 +53,18 @@ export const createArchiveWorkflowTool = (
};
try {
await getMcpWorkflow(workflowId, user, ['workflow:delete'], workflowFinderService);
await collaborationService.ensureWorkflowEditable(workflowId);
const workflow = await workflowService.archive(user, workflowId, { skipArchived: true });
if (!workflow) {
throw new Error("Workflow not found or you don't have permission to archive it.");
}
void collaborationService.broadcastWorkflowUpdate(workflowId, user.id).catch(() => {});
telemetryPayload.results = {
success: true,
data: { workflowId },

View file

@ -7,6 +7,7 @@ import type { ToolDefinition, UserCalledMCPToolEventPayload } from '../../mcp.ty
import { CODE_BUILDER_VALIDATE_TOOL, MCP_UPDATE_WORKFLOW_TOOL } from './constants';
import { autoPopulateNodeCredentials, stripNullCredentialStubs } from './credentials-auto-assign';
import type { CollaborationService } from '@/collaboration/collaboration.service';
import type { CredentialsService } from '@/credentials/credentials.service';
import type { NodeTypes } from '@/node-types';
import type { UrlService } from '@/services/url.service';
@ -79,6 +80,7 @@ export const createUpdateWorkflowTool = (
nodeTypes: NodeTypes,
credentialsService: CredentialsService,
sharedWorkflowRepository: SharedWorkflowRepository,
collaborationService: CollaborationService,
): ToolDefinition<typeof inputSchema> => ({
name: MCP_UPDATE_WORKFLOW_TOOL.toolName,
config: {
@ -119,6 +121,8 @@ export const createUpdateWorkflowTool = (
workflowFinderService,
);
await collaborationService.ensureWorkflowEditable(existingWorkflow.id);
const { ParseValidateHandler, stripImportStatements } = await import(
'@n8n/ai-workflow-builder'
);
@ -177,6 +181,8 @@ export const createUpdateWorkflowTool = (
source: 'n8n-mcp',
});
void collaborationService.broadcastWorkflowUpdate(workflowId, user.id).catch(() => {});
const baseUrl = urlService.getInstanceBaseUrl();
const workflowUrl = `${baseUrl}/workflow/${updatedWorkflow.id}`;

View file

@ -1,21 +1,23 @@
import type { LicenseState } from '@n8n/backend-common';
import type { InstanceSettingsLoaderConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { ProvisioningController } from '../provisioning.controller.ee';
import { type ProvisioningService } from '@/modules/provisioning.ee/provisioning.service.ee';
import type { OidcInstanceSettingsLoader } from '@/instance-settings-loader/loaders/oidc.instance-settings-loader';
import { type Response } from 'express';
import { type AuthenticatedRequest } from '@n8n/db';
import { type ProvisioningConfigDto } from '@n8n/api-types';
const provisioningService = mock<ProvisioningService>();
const licenseState = mock<LicenseState>();
const oidcSettingsLoader = mock<OidcInstanceSettingsLoader>();
const instanceSettingsLoaderConfig = mock<InstanceSettingsLoaderConfig>({
ssoManagedByEnv: false,
});
const controller = new ProvisioningController(
provisioningService,
licenseState,
oidcSettingsLoader,
instanceSettingsLoaderConfig,
);
describe('ProvisioningController', () => {
@ -70,6 +72,21 @@ describe('ProvisioningController', () => {
expect(res.status).toHaveBeenCalledWith(403);
});
it('should reject writes when managed by env', async () => {
const envManagedConfig = mock<InstanceSettingsLoaderConfig>({ ssoManagedByEnv: true });
const envManagedController = new ProvisioningController(
provisioningService,
licenseState,
envManagedConfig,
);
licenseState.isProvisioningLicensed.mockReturnValue(true);
await expect(envManagedController.patchConfig(req, res)).rejects.toThrow(
'cannot be modified through the API',
);
});
it('should patch the provisioning config', async () => {
const configResponse: ProvisioningConfigDto = {
scopesProvisionInstanceRole: false,

View file

@ -1,19 +1,19 @@
import { LicenseState } from '@n8n/backend-common';
import { InstanceSettingsLoaderConfig } from '@n8n/config';
import { AuthenticatedRequest } from '@n8n/db';
import { Get, GlobalScope, Patch, RestController } from '@n8n/decorators';
import { LicenseState } from '@n8n/backend-common';
import { Response } from 'express';
import { OidcInstanceSettingsLoader } from '@/instance-settings-loader/loaders/oidc.instance-settings-loader';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { ProvisioningService } from './provisioning.service.ee';
import { Response } from 'express';
@RestController('/sso/provisioning')
export class ProvisioningController {
constructor(
private readonly provisioningService: ProvisioningService,
private readonly licenseState: LicenseState,
private readonly oidcSettingsLoader: OidcInstanceSettingsLoader,
private readonly instanceSettingsLoaderConfig: InstanceSettingsLoaderConfig,
) {}
@Get('/config')
@ -33,9 +33,9 @@ export class ProvisioningController {
return res.status(403).json({ message: 'Provisioning is not licensed' });
}
if (this.oidcSettingsLoader.isConfiguredByEnv()) {
throw new BadRequestError(
'Provisioning configuration is managed via environment variables and cannot be modified through the UI',
if (this.instanceSettingsLoaderConfig.ssoManagedByEnv) {
throw new ForbiddenError(
'Provisioning configuration is managed via environment variables and cannot be modified through the API',
);
}

View file

@ -1,5 +1,5 @@
import type { Logger } from '@n8n/backend-common';
import type { GlobalConfig } from '@n8n/config';
import type { GlobalConfig, InstanceSettingsLoaderConfig } from '@n8n/config';
import { Time } from '@n8n/constants';
import { GLOBAL_MEMBER_ROLE, type AuthenticatedRequest, type User } from '@n8n/db';
import { type Request, type Response } from 'express';
@ -7,7 +7,6 @@ import { mock } from 'jest-mock-extended';
import type { AuthService } from '@/auth/auth.service';
import { OIDC_NONCE_COOKIE_NAME, OIDC_STATE_COOKIE_NAME } from '@/constants';
import type { OidcInstanceSettingsLoader } from '@/instance-settings-loader/loaders/oidc.instance-settings-loader';
import type { AuthlessRequest } from '@/requests';
import type { UrlService } from '@/services/url.service';
@ -19,14 +18,16 @@ const oidcService = mock<OidcService>();
const urlService = mock<UrlService>();
const globalConfig = mock<GlobalConfig>();
const logger = mock<Logger>();
const oidcSettingsLoader = mock<OidcInstanceSettingsLoader>();
const instanceSettingsLoaderConfig = mock<InstanceSettingsLoaderConfig>({
ssoManagedByEnv: false,
});
const controller = new OidcController(
oidcService,
authService,
urlService,
globalConfig,
logger,
oidcSettingsLoader,
instanceSettingsLoaderConfig,
);
const user = mock<User>({
@ -258,6 +259,31 @@ describe('OidcController', () => {
});
});
describe('OIDC env-managed write protection', () => {
const envManagedConfig = mock<InstanceSettingsLoaderConfig>({ ssoManagedByEnv: true });
const envManagedController = new OidcController(
oidcService,
authService,
urlService,
globalConfig,
logger,
envManagedConfig,
);
test('saveConfiguration should reject writes when managed by env', async () => {
const req = mock<AuthenticatedRequest>();
const res = mock<Response>();
await expect(
envManagedController.saveConfiguration(req, res, {
clientId: 'id',
clientSecret: 'secret',
discoveryEndpoint: 'https://example.com',
} as any),
).rejects.toThrow('cannot be modified through the API');
});
});
describe('redirectToAuthProvider', () => {
test('Should redirect to generated authorization URL', async () => {
const req = mock<Request>();

View file

@ -1,6 +1,6 @@
import { OidcConfigDto } from '@n8n/api-types';
import { Logger } from '@n8n/backend-common';
import { GlobalConfig } from '@n8n/config';
import { GlobalConfig, InstanceSettingsLoaderConfig } from '@n8n/config';
import { Time } from '@n8n/constants';
import { AuthenticatedRequest } from '@n8n/db';
import { Body, Get, GlobalScope, Licensed, Post, RestController } from '@n8n/decorators';
@ -9,7 +9,7 @@ import { Request, Response } from 'express';
import { AuthService } from '@/auth/auth.service';
import { OIDC_NONCE_COOKIE_NAME, OIDC_STATE_COOKIE_NAME } from '@/constants';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { OidcInstanceSettingsLoader } from '@/instance-settings-loader/loaders/oidc.instance-settings-loader';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { AuthlessRequest } from '@/requests';
import { UrlService } from '@/services/url.service';
@ -25,7 +25,7 @@ export class OidcController {
private readonly urlService: UrlService,
private readonly globalConfig: GlobalConfig,
private readonly logger: Logger,
private readonly oidcSettingsLoader: OidcInstanceSettingsLoader,
private readonly instanceSettingsLoaderConfig: InstanceSettingsLoaderConfig,
) {}
@Get('/config')
@ -47,9 +47,9 @@ export class OidcController {
_res: Response,
@Body payload: OidcConfigDto,
) {
if (this.oidcSettingsLoader.isConfiguredByEnv()) {
throw new BadRequestError(
'OIDC configuration is managed via environment variables and cannot be modified through the UI',
if (this.instanceSettingsLoaderConfig.ssoManagedByEnv) {
throw new ForbiddenError(
'OIDC configuration is managed via environment variables and cannot be modified through the API',
);
}
await this.oidcService.updateConfig(payload);

View file

@ -1,9 +1,10 @@
import { GLOBAL_OWNER_ROLE, type User } from '@n8n/db';
import { GLOBAL_OWNER_ROLE, type AuthenticatedRequest, type User } from '@n8n/db';
import { type Response } from 'express';
import { mock } from 'jest-mock-extended';
import type { AuthService } from '@/auth/auth.service';
import type { EventService } from '@/events/event.service';
import type { InstanceSettingsLoaderConfig } from '@n8n/config';
import type { AuthlessRequest } from '@/requests';
import type { UrlService } from '@/services/url.service';
import { isSamlLicensedAndEnabled } from '@/sso.ee/sso-helpers';
@ -27,7 +28,14 @@ const authService = mock<AuthService>();
const samlService = mock<SamlService>();
const urlService = mock<UrlService>();
const eventService = mock<EventService>();
const controller = new SamlController(authService, samlService, urlService, eventService);
const instanceSettingsLoaderConfig = mock<InstanceSettingsLoaderConfig>({ ssoManagedByEnv: false });
const controller = new SamlController(
authService,
samlService,
urlService,
eventService,
instanceSettingsLoaderConfig,
);
const user = mock<User>({
id: '123',
@ -241,3 +249,32 @@ describe('SAML Login Flow', () => {
});
});
});
describe('SAML env-managed write protection', () => {
const envManagedConfig = mock<InstanceSettingsLoaderConfig>({ ssoManagedByEnv: true });
const envManagedController = new SamlController(
authService,
samlService,
urlService,
eventService,
envManagedConfig,
);
test('configPost should reject writes when managed by env', async () => {
const req = mock<AuthenticatedRequest>();
const res = mock<Response>();
await expect(
envManagedController.configPost(req, res, { loginEnabled: true } as any),
).rejects.toThrow('cannot be modified through the API');
});
test('toggleEnabledPost should reject writes when managed by env', async () => {
const req = mock<AuthenticatedRequest>();
const res = mock<Response>();
await expect(
envManagedController.toggleEnabledPost(req, res, { loginEnabled: true }),
).rejects.toThrow('cannot be modified through the API');
});
});

View file

@ -9,7 +9,9 @@ import url from 'url';
import { AuthService } from '@/auth/auth.service';
import { AuthError } from '@/errors/response-errors/auth.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { EventService } from '@/events/event.service';
import { InstanceSettingsLoaderConfig } from '@n8n/config';
import { AuthlessRequest } from '@/requests';
import { sendErrorResponse } from '@/response-helper';
import { UrlService } from '@/services/url.service';
@ -37,6 +39,7 @@ export class SamlController {
private readonly samlService: SamlService,
private readonly urlService: UrlService,
private readonly eventService: EventService,
private readonly instanceSettingsLoaderConfig: InstanceSettingsLoaderConfig,
) {}
@Get('/metadata', { skipAuth: true })
@ -66,6 +69,11 @@ export class SamlController {
@Post('/config', { middlewares: [samlLicensedMiddleware] })
@GlobalScope('saml:manage')
async configPost(_req: AuthenticatedRequest, _res: Response, @Body payload: SamlPreferences) {
if (this.instanceSettingsLoaderConfig.ssoManagedByEnv) {
throw new ForbiddenError(
'SSO configuration is managed via environment variables and cannot be modified through the API',
);
}
const result = await this.samlService.setSamlPreferences(payload);
if (!result) return;
return {
@ -84,6 +92,11 @@ export class SamlController {
res: Response,
@Body { loginEnabled }: SamlToggleDto,
) {
if (this.instanceSettingsLoaderConfig.ssoManagedByEnv) {
throw new ForbiddenError(
'SSO configuration is managed via environment variables and cannot be modified through the API',
);
}
await this.samlService.setSamlPreferences({ loginEnabled });
return res.sendStatus(200);
}

View file

@ -3,10 +3,11 @@ import { GlobalConfig } from '@n8n/config';
import { SettingsRepository } from '@n8n/db';
import { Container } from '@n8n/di';
import { OidcInstanceSettingsLoader } from '@/instance-settings-loader/loaders/oidc.instance-settings-loader';
import { SsoInstanceSettingsLoader } from '@/instance-settings-loader/loaders/sso.instance-settings-loader';
import { PROVISIONING_PREFERENCES_DB_KEY } from '@/modules/provisioning.ee/constants';
import { OIDC_PREFERENCES_DB_KEY } from '@/modules/sso-oidc/constants';
import { OidcService } from '@/modules/sso-oidc/oidc.service.ee';
import { SAML_PREFERENCES_DB_KEY } from '@/modules/sso-saml/constants';
beforeAll(async () => {
await testDb.init();
@ -16,7 +17,7 @@ afterAll(async () => {
await testDb.terminate();
});
describe('OidcInstanceSettingsLoader → OidcService roundtrip', () => {
describe('SsoInstanceSettingsLoader → OidcService roundtrip', () => {
let originalConfig: Record<string, unknown>;
beforeEach(() => {
@ -33,6 +34,7 @@ describe('OidcInstanceSettingsLoader → OidcService roundtrip', () => {
// Clean up DB rows
const settingsRepository = Container.get(SettingsRepository);
await settingsRepository.delete({ key: OIDC_PREFERENCES_DB_KEY });
await settingsRepository.delete({ key: SAML_PREFERENCES_DB_KEY });
await settingsRepository.delete({ key: PROVISIONING_PREFERENCES_DB_KEY });
});
@ -49,7 +51,7 @@ describe('OidcInstanceSettingsLoader → OidcService roundtrip', () => {
ssoUserRoleProvisioning: 'instance_and_project_roles',
});
const loader = Container.get(OidcInstanceSettingsLoader);
const loader = Container.get(SsoInstanceSettingsLoader);
await loader.run();
const oidcService = Container.get(OidcService);

View file

@ -0,0 +1,80 @@
import { testDb } from '@n8n/backend-test-utils';
import { GlobalConfig } from '@n8n/config';
import { SettingsRepository } from '@n8n/db';
import { Container } from '@n8n/di';
import { SsoInstanceSettingsLoader } from '@/instance-settings-loader/loaders/sso.instance-settings-loader';
import { PROVISIONING_PREFERENCES_DB_KEY } from '@/modules/provisioning.ee/constants';
import { OIDC_PREFERENCES_DB_KEY } from '@/modules/sso-oidc/constants';
import { SAML_PREFERENCES_DB_KEY } from '@/modules/sso-saml/constants';
import { SamlService } from '@/modules/sso-saml/saml.service.ee';
beforeAll(async () => {
await testDb.init();
});
afterAll(async () => {
await testDb.terminate();
});
describe('SsoInstanceSettingsLoader → SamlService roundtrip', () => {
let originalConfig: Record<string, unknown>;
beforeEach(() => {
const globalConfig = Container.get(GlobalConfig);
const loader = globalConfig.instanceSettingsLoader;
originalConfig = { ...loader };
});
afterEach(async () => {
const globalConfig = Container.get(GlobalConfig);
Object.assign(globalConfig.instanceSettingsLoader, originalConfig);
const settingsRepository = Container.get(SettingsRepository);
await settingsRepository.delete({ key: OIDC_PREFERENCES_DB_KEY });
await settingsRepository.delete({ key: SAML_PREFERENCES_DB_KEY });
await settingsRepository.delete({ key: PROVISIONING_PREFERENCES_DB_KEY });
});
it('should write config that SamlService reads back with correct values', async () => {
const globalConfig = Container.get(GlobalConfig);
Object.assign(globalConfig.instanceSettingsLoader, {
ssoManagedByEnv: true,
samlMetadata: '<xml>metadata</xml>',
samlMetadataUrl: '',
samlLoginEnabled: true,
ssoUserRoleProvisioning: 'instance_and_project_roles',
});
const loader = Container.get(SsoInstanceSettingsLoader);
await loader.run();
const samlService = Container.get(SamlService);
const prefs = await samlService.loadFromDbAndApplySamlPreferences(false);
expect(prefs).toBeDefined();
expect(prefs!.metadata).toBe('<xml>metadata</xml>');
expect(prefs!.metadataUrl).toBeUndefined();
expect(prefs!.loginEnabled).toBe(true);
});
it('should persist metadataUrl when used instead of metadata', async () => {
const globalConfig = Container.get(GlobalConfig);
Object.assign(globalConfig.instanceSettingsLoader, {
ssoManagedByEnv: true,
samlMetadata: '',
samlMetadataUrl: 'https://idp.example.com/metadata',
samlLoginEnabled: true,
ssoUserRoleProvisioning: 'disabled',
});
const loader = Container.get(SsoInstanceSettingsLoader);
await loader.run();
const samlService = Container.get(SamlService);
const prefs = await samlService.loadFromDbAndApplySamlPreferences(false);
expect(prefs!.metadataUrl).toBe('https://idp.example.com/metadata');
expect(prefs!.metadata).toBeUndefined();
});
});

View file

@ -100,6 +100,7 @@ const getExecuteSingleFunctions = (
describe('RoutingNode', () => {
const nodeTypes = NodeTypes();
const additionalData = mock<IWorkflowExecuteAdditionalData>({
executionId: 'test-exec-123',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
});
@ -753,6 +754,7 @@ describe('RoutingNode', () => {
mode,
connectionInputData,
runExecutionData,
nodeType,
});
const routingNode = new RoutingNode(executeFunctions, nodeType);
@ -790,7 +792,7 @@ describe('RoutingNode', () => {
nodeType: {
properties?: INodeProperties[];
credentials?: INodeCredentialDescription[];
requestDefaults?: IHttpRequestOptions;
requestDefaults?: DeclarativeRestApiSettings.HttpRequestOptions;
requestOperations?: IN8nRequestOperations;
};
node: {
@ -2057,6 +2059,124 @@ describe('RoutingNode', () => {
],
],
},
{
description: 'single parameter, routing.request.url resolves $execution.id',
input: {
node: {
parameters: {
resource: 'executions',
},
},
nodeType: {
requestDefaults: {
baseURL: 'http://127.0.0.1:5678',
},
properties: [
{
displayName: 'Resource',
name: 'resource',
type: 'string',
routing: {
request: {
method: 'GET',
url: '=/{{$value}}/{{ $execution.id }}',
},
},
default: '',
},
],
},
},
output: [
[
{
json: {
headers: {},
statusCode: 200,
requestOptions: {
url: '/executions/test-exec-123',
method: 'GET',
headers: {},
qs: {},
body: {},
baseURL: 'http://127.0.0.1:5678',
returnFullResponse: true,
timeout: 300000,
},
},
},
],
],
},
{
description:
'options parameter with routing.request.url on selected option resolves $execution.id via $parameter',
input: {
node: {
parameters: {
operation: 'get',
executionId: '={{ $execution.id }}',
},
},
nodeType: {
requestDefaults: {
baseURL: 'http://127.0.0.1:5678',
},
properties: [
{
displayName: 'Operation',
name: 'operation',
type: 'options',
noDataExpression: true,
default: 'get',
options: [
{
name: 'Get',
value: 'get',
routing: {
request: {
method: 'GET',
url: '=/executions/{{ $parameter.executionId }}',
},
},
},
],
},
{
displayName: 'Execution ID',
name: 'executionId',
type: 'string',
default: '',
displayOptions: {
show: {
operation: ['get'],
},
},
},
],
},
},
output: [
[
{
json: {
headers: {},
statusCode: 200,
requestOptions: {
url: '/executions/test-exec-123',
method: 'GET',
headers: {},
qs: {},
body: {},
baseURL: 'http://127.0.0.1:5678',
returnFullResponse: true,
timeout: 300000,
},
},
},
],
],
},
];
const baseNode: INode = {

View file

@ -40,6 +40,7 @@ import type {
import url from 'node:url';
import { type ExecuteContext, ExecuteSingleContext } from './node-execution-context';
import { getAdditionalKeys } from './node-execution-context/utils/get-additional-keys';
export class RoutingNode {
constructor(
@ -132,6 +133,8 @@ export class RoutingNode {
};
}
const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData);
if (nodeType.description.requestDefaults) {
for (const key of Object.keys(nodeType.description.requestDefaults)) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -142,7 +145,7 @@ export class RoutingNode {
itemIndex,
runIndex,
executeData,
{ $credentials: credentials, $version: node.typeVersion },
{ ...additionalKeys, $credentials: credentials, $version: node.typeVersion },
false,
) as string;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -158,7 +161,7 @@ export class RoutingNode {
itemIndex,
runIndex,
executeData,
{ $credentials: credentials, $version: node.typeVersion },
{ ...additionalKeys, $credentials: credentials, $version: node.typeVersion },
false,
) as string | NodeParameterValue;
@ -168,7 +171,12 @@ export class RoutingNode {
itemIndex,
runIndex,
'',
{ $credentials: credentials, $value: value, $version: node.typeVersion },
{
...additionalKeys,
$credentials: credentials,
$value: value,
$version: node.typeVersion,
},
);
this.mergeOptions(itemContext[itemIndex].requestData, tempOptions);
@ -850,7 +858,7 @@ export class RoutingNode {
itemIndex,
runIndex,
executeSingleFunctions.getExecuteData(),
additionalKeys,
{ ...additionalKeys, $value: parameterValue },
true,
) as string;
@ -993,7 +1001,7 @@ export class RoutingNode {
itemIndex,
runIndex,
`${basePath}${nodeProperties.name}`,
{ $value: optionValue, $version: node.typeVersion },
{ ...additionalKeys, $value: optionValue, $version: node.typeVersion },
);
this.mergeOptions(returnData, tempOptions);
@ -1017,7 +1025,7 @@ export class RoutingNode {
itemIndex,
runIndex,
`${basePath}${nodeProperties.name}`,
{ $version: node.typeVersion },
{ ...additionalKeys, $version: node.typeVersion },
);
this.mergeOptions(returnData, tempOptions);
@ -1061,7 +1069,11 @@ export class RoutingNode {
itemIndex,
runIndex,
nodeProperties.typeOptions?.multipleValues ? `${loopBasePath}[${i}]` : loopBasePath,
{ ...(additionalKeys || {}), $index: i, $parent: value[i] },
{
...(additionalKeys || {}),
$index: i,
$parent: value[i],
},
);
this.mergeOptions(returnData, tempOptions);

View file

@ -4390,7 +4390,6 @@
"settings.sso.settings.oidc.prompt.consent": "Consent (Ask the user to consent)",
"settings.sso.settings.oidc.prompt.select_account": "Select Account (Allow the user to select an account)",
"settings.sso.settings.oidc.prompt.create": "Create (Ask the OP to show the registration page first)",
"settings.sso.settings.oidc.overrideBanner": "OIDC connection is configured via environment variables. To modify, update the environment variables and restart n8n.",
"settings.sso.settings.userRoleProvisioning.label": "User role provisioning",
"settings.sso.settings.userRoleProvisioning.help": "Manage instance and project roles from your SSO provider.",
"settings.sso.settings.userRoleProvisioning.help.linkText": "Link to docs",
@ -4450,6 +4449,7 @@
"settings.sso.settings.save.activate.cancel": "Cancel",
"settings.sso.settings.save.error": "Error saving SAML SSO configuration",
"settings.sso.settings.save.error_oidc": "Error saving OIDC SSO configuration",
"settings.sso.settings.envConfigBanner": "SSO connection is configured via environment variables. To modify, update the environment variables and restart n8n.",
"settings.sso.actionBox.title": "Available on the Enterprise plan",
"settings.sso.actionBox.description": "Use Single Sign On to consolidate authentication into a single platform to improve security and agility.",
"settings.sso.actionBox.buttonText": "See plans",

View file

@ -35,18 +35,6 @@ export type RootStoreState = {
};
export const useRootStore = defineStore(STORES.ROOT, () => {
// Generate or retrieve client ID from sessionStorage
const getClientId = (): string => {
const storageKey = 'n8n-client-id';
const existingId = sessionStorage.getItem(storageKey);
if (existingId) {
return existingId;
}
const newId = randomString(10).toLowerCase();
sessionStorage.setItem(storageKey, newId);
return newId;
};
const state = ref<RootStoreState>({
baseUrl: VUE_APP_URL_BASE_API ?? window.BASE_PATH,
restEndpoint: getConfigFromMetaTag('rest-endpoint') ?? 'rest',
@ -65,7 +53,7 @@ export const useRootStore = defineStore(STORES.ROOT, () => {
versionCli: '0.0.0',
oauthCallbackUrls: {},
n8nMetadata: {},
pushRef: getClientId(),
pushRef: randomString(10).toLowerCase(),
urlBaseWebhook: 'http://localhost:5678/',
urlBaseEditor: 'http://localhost:5678',
instanceId: '',

View file

@ -29,9 +29,7 @@ const i18n = useI18n();
const rootStore = useRootStore();
const workflowsStore = useWorkflowsStore();
const workflowDocumentStore = computed(() =>
workflowsStore.workflowId
? useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId))
: undefined,
useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId)),
);
type ChatEmbedModalTabValue = 'cdn' | 'vue' | 'react' | 'other';
@ -61,7 +59,7 @@ const currentTab = ref<ChatEmbedModalTabValue>('cdn');
const webhookNode = computed(() => {
for (const type of [CHAT_TRIGGER_NODE_TYPE, WEBHOOK_NODE_TYPE]) {
const node = (workflowDocumentStore.value?.allNodes ?? []).find((node) => node.type === type);
const node = workflowDocumentStore.value.allNodes.find((node) => node.type === type);
if (node) {
// This has to be kept up-to-date with the mode in the Chat-Trigger node
if (type === CHAT_TRIGGER_NODE_TYPE && !node.parameters.public) {

View file

@ -38,7 +38,6 @@ import type { INodeUi, TargetNodeParameterContext } from '@/Interface';
import { useTelemetry } from '@/app/composables/useTelemetry';
import { computedAsync } from '@vueuse/core';
import { useExecutionData } from '@/features/execution/executions/composables/useExecutionData';
import { useWorkflowsStore } from '@/app/stores/workflows.store';
import { injectWorkflowDocumentStore } from '@/app/stores/workflowDocument.store';
import ExperimentalNodeDetailsDrawer from '@/features/workflows/canvas/experimental/components/ExperimentalNodeDetailsDrawer.vue';
import { useExperimentalNdvStore } from '@/features/workflows/canvas/experimental/experimentalNdv.store';
@ -73,7 +72,6 @@ const locale = useI18n();
const nodeHelpers = useNodeHelpers();
const focusPanelStore = useFocusPanelStore();
const workflowId = useInjectWorkflowId();
const workflowsStore = useWorkflowsStore();
const workflowDocumentStore = injectWorkflowDocumentStore();
const workflowState = injectWorkflowState();
const nodeTypesStore = useNodeTypesStore();
@ -154,7 +152,7 @@ const { workflowRunData } = useExecutionData({ node });
const hasNodeRun = computed(() => {
if (!node.value) return true;
const parentNode = workflowsStore.workflowObject.getParentNodes(node.value.name, 'main', 1)[0];
const parentNode = workflowDocumentStore?.value?.getParentNodes(node.value.name, 'main', 1)[0];
return Boolean(
parentNode &&
workflowRunData.value &&

View file

@ -18,6 +18,11 @@ import { type MockedStore, mockedStore } from '@/__tests__/utils';
const { mockWorkflowDocumentStore } = vi.hoisted(() => ({
mockWorkflowDocumentStore: {
getNodeByName: vi.fn(),
getChildNodes: vi.fn().mockReturnValue([]),
allNodes: [] as Array<{ id: string; name: string; type: string }>,
name: '',
settings: {},
getPinDataSnapshot: () => ({}),
},
}));

View file

@ -42,23 +42,21 @@ const ndvStore = useNDVStore();
const modalBus = createEventBus();
const workflowsStore = useWorkflowsStore();
const workflowDocumentStore = computed(() =>
workflowsStore.workflowId
? useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId))
: undefined,
useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId)),
);
const router = useRouter();
const { runWorkflow } = useRunWorkflow({ router });
const agentRequestStore = useAgentRequestStore();
const node = computed(() =>
props.data.nodeName ? workflowDocumentStore.value?.getNodeByName(props.data.nodeName) : undefined,
props.data.nodeName ? workflowDocumentStore.value.getNodeByName(props.data.nodeName) : undefined,
);
const parentNode = computed(() => {
if (!node.value) return undefined;
const parentNodes = workflowsStore.workflowObject.getChildNodes(node.value.name, 'ALL', 1);
const parentNodes = workflowDocumentStore.value.getChildNodes(node.value.name, 'ALL', 1);
if (parentNodes.length === 0) return undefined;
return workflowDocumentStore.value?.getNodeByName(parentNodes[0])?.name;
return workflowDocumentStore.value.getNodeByName(parentNodes[0])?.name;
});
const { getToolName, parameters, error, updateSelectedTool } = useToolParameters({ node });

View file

@ -377,7 +377,6 @@ describe('NodeExecuteButton', () => {
const node = mockNode({ name: 'test-node', type: SET_NODE_TYPE });
vi.spyOn(workflowDocumentStore, 'getNodeByName').mockReturnValue(node);
workflowsStore.checkIfNodeHasChatParent.mockReturnValue(true);
workflowsStore.workflowObject.getStartNode = vi.fn().mockReturnValue(undefined);
const { getByRole } = renderComponent();

View file

@ -1,11 +1,12 @@
<script setup lang="ts">
import type { SimplifiedNodeType } from '@/Interface';
import { getNodeIconSource, type NodeIconSource } from '@/app/utils/nodeIcon';
import { type NodeIconSource } from '@/app/utils/nodeIcon';
import type { VersionNode } from '@n8n/rest-api-client/api/versions';
import type { INode } from 'n8n-workflow';
import { computed } from 'vue';
import { N8nNodeIcon } from '@n8n/design-system';
import { useNodeIconSource } from '../composables/useNodeIconSource';
type Props = {
size?: number;
disabled?: boolean;
@ -39,10 +40,9 @@ const emit = defineEmits<{
click: [];
}>();
const iconSource = computed(() => {
if (props.iconSource) return props.iconSource;
return getNodeIconSource(props.nodeType, props.node ?? null);
});
const iconSourceFromNodeType = useNodeIconSource(props.nodeType, props.node ?? null);
const iconSource = computed(() => props.iconSource ?? iconSourceFromNodeType.value);
const iconType = computed(() => iconSource.value?.type ?? 'unknown');
const src = computed(() => {

View file

@ -58,24 +58,21 @@ const route = useRoute();
const workflowSaving = useWorkflowSaving({ router });
const workflowDocumentStore = computed(() =>
data.id ? useWorkflowDocumentStore(createWorkflowDocumentId(data.id)) : undefined,
useWorkflowDocumentStore(createWorkflowDocumentId(data.id)),
);
const workflowListEntry = computed(() => workflowsListStore.workflowsById[data.id]);
const workflowId = computed(() => data.id);
const workflowName = computed(
() => workflowListEntry.value?.name ?? workflowDocumentStore.value?.name ?? '',
() => workflowListEntry.value?.name ?? workflowDocumentStore.value.name,
);
const workflowHomeProject = computed(
() =>
workflowListEntry.value?.homeProject ??
workflowDocumentStore.value?.homeProject ??
workflowDocumentStore.value.homeProject ??
workflowsStore.workflow.homeProject,
);
const workflowScopes = computed(
() =>
workflowListEntry.value?.scopes ??
workflowDocumentStore.value?.scopes ??
workflowsStore.workflow.scopes,
() => workflowListEntry.value?.scopes ?? workflowDocumentStore.value.scopes,
);
const workflowSharedWithProjects = computed(
() =>

View file

@ -34,8 +34,7 @@ export function useCalloutHelpers() {
const template = getRagStarterWorkflowJson();
const routeTemplateId = route.query.templateId;
const workflowObject = workflowsStore.workflowObject;
const workflow = workflowsListStore.getWorkflowById(workflowObject.id);
const workflow = workflowsListStore.getWorkflowById(workflowsStore.workflowId);
// Hide the RAG starter callout if we're currently on the RAG starter template
if ((routeTemplateId ?? workflow?.meta?.templateId) === template.meta.templateId) {

View file

@ -191,9 +191,7 @@ export function useCanvasOperations() {
const focusPanelStore = useFocusPanelStore();
const setupPanelStore = useSetupPanelStore();
const workflowDocumentStore = computed(() =>
workflowsStore.workflowId
? useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId))
: undefined,
useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId)),
);
const i18n = useI18n();
@ -213,7 +211,10 @@ export function useCanvasOperations() {
const preventOpeningNDV = !!localStorage.getItem('NodeView.preventOpeningNDV');
const editableWorkflow = computed<IWorkflowDb>(() => workflowsStore.workflow);
const editableWorkflowObject = computed(() => workflowsStore.workflowObject as Workflow);
const editableWorkflowObject = computed(() =>
workflowDocumentStore.value ? workflowDocumentStore.value.getSnapshot() : undefined,
);
const triggerNodes = computed<INodeUi[]>(() => {
return workflowsStore.workflowTriggerNodes;
@ -275,7 +276,7 @@ export function useCanvasOperations() {
position: CanvasNode['position'],
{ trackHistory = false } = {},
) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
@ -283,7 +284,7 @@ export function useCanvasOperations() {
const oldPosition: XYPosition = [...node.position];
const newPosition: XYPosition = [position.x, position.y];
workflowDocumentStore?.value?.setNodePositionById(id, newPosition);
workflowDocumentStore.value.setNodePositionById(id, newPosition);
if (trackHistory) {
historyStore.pushCommandToUndo(
@ -293,7 +294,7 @@ export function useCanvasOperations() {
}
function revertUpdateNodePosition(nodeName: string, position: CanvasNode['position']) {
const node = workflowDocumentStore?.value?.getNodeByName(nodeName);
const node = workflowDocumentStore.value.getNodeByName(nodeName);
if (!node) {
return;
}
@ -307,13 +308,13 @@ export function useCanvasOperations() {
newParameters: INodeParameters,
{ trackHistory = false, trackBulk = true } = {},
) {
const node = workflowDocumentStore?.value?.getNodeById(nodeId);
const node = workflowDocumentStore.value.getNodeById(nodeId);
if (!node) return;
if (trackHistory && trackBulk) {
historyStore.startRecordingUndo();
}
workflowDocumentStore?.value?.setNodeParameters({
workflowDocumentStore.value.setNodeParameters({
name: node.name,
value: newParameters,
});
@ -378,8 +379,8 @@ export function useCanvasOperations() {
// Update also last selected node and execution data
workflowsStore.renameNodeSelectedAndExecution({ old: currentName, new: newName });
workflowDocumentStore?.value?.setNodes(Object.values(workflow.nodes));
workflowDocumentStore?.value?.setConnections(workflow.connectionsBySourceNode);
workflowDocumentStore.value.setNodes(Object.values(workflow.nodes));
workflowDocumentStore.value.setConnections(workflow.connectionsBySourceNode);
const isRenamingActiveNode = ndvStore.activeNodeName === currentName;
if (isRenamingActiveNode) {
@ -398,23 +399,25 @@ export function useCanvasOperations() {
}
function connectAdjacentNodes(id: string, { trackHistory = false } = {}) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
const outputConnectionsByType =
workflowDocumentStore.value?.outgoingConnectionsByNodeName(node.name) ?? {};
const incomingConnectionsByType =
workflowDocumentStore.value?.incomingConnectionsByNodeName(node.name) ?? {};
const outputConnectionsByType = workflowDocumentStore.value.outgoingConnectionsByNodeName(
node.name,
);
const incomingConnectionsByType = workflowDocumentStore.value.incomingConnectionsByNodeName(
node.name,
);
for (const [type, incomingConnectionsByInputIndex] of Object.entries(
incomingConnectionsByType,
) as Array<[NodeConnectionType, NodeInputConnections]>) {
// Only connect nodes connected to the first input of a type
for (const incomingConnection of incomingConnectionsByInputIndex.at(0) ?? []) {
const incomingNodeId = workflowDocumentStore?.value?.getNodeByName(
const incomingNodeId = workflowDocumentStore.value.getNodeByName(
incomingConnection.node,
)?.id;
@ -423,7 +426,7 @@ export function useCanvasOperations() {
// Only connect to nodes connected to the first output of a type
// For example on an If node, connect to the "true" main output
for (const outgoingConnection of outputConnectionsByType[type]?.at(0) ?? []) {
const outgoingNodeId = workflowDocumentStore?.value?.getNodeByName(
const outgoingNodeId = workflowDocumentStore.value.getNodeByName(
outgoingConnection.node,
)?.id;
@ -469,7 +472,7 @@ export function useCanvasOperations() {
}
function deleteNode(id: string, { trackHistory = false, trackBulk = true } = {}) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
@ -486,7 +489,7 @@ export function useCanvasOperations() {
deleteConnectionsByNodeId(id, { trackHistory, trackBulk: false });
workflowsStore.removeNodeExecutionDataById(id);
workflowDocumentStore?.value?.removeNodeById(id);
workflowDocumentStore.value.removeNodeById(id);
if (trackHistory) {
historyStore.pushCommandToUndo(new RemoveNodeCommand(node, Date.now()));
@ -512,12 +515,12 @@ export function useCanvasOperations() {
}
function revertDeleteNode(node: INodeUi) {
workflowDocumentStore?.value?.addNode(node);
workflowDocumentStore.value.addNode(node);
uiStore.markStateDirty();
}
function trackDeleteNode(id: string) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
@ -540,24 +543,24 @@ export function useCanvasOperations() {
newId: string,
{ trackHistory = false, trackBulk = true, replaceInputs = true, replaceOutputs = true } = {},
) {
const previousNode = workflowDocumentStore?.value?.getNodeById(previousId);
const newNode = workflowDocumentStore?.value?.getNodeById(newId);
const previousNode = workflowDocumentStore.value.getNodeById(previousId);
const newNode = workflowDocumentStore.value.getNodeById(newId);
if (!previousNode || !newNode) {
return;
}
const inputNodeNames = replaceInputs
? uniq(workflowDocumentStore.value?.getParentNodes(previousNode.name, 'ALL', 1))
? uniq(workflowDocumentStore.value.getParentNodes(previousNode.name, 'ALL', 1))
: [];
const outputNodeNames = replaceOutputs
? uniq(workflowDocumentStore.value?.getChildNodes(previousNode.name, 'ALL', 1))
? uniq(workflowDocumentStore.value.getChildNodes(previousNode.name, 'ALL', 1))
: [];
const connectionPairs = [
...(workflowDocumentStore.value?.getConnectionsBetweenNodes(inputNodeNames, [
...(workflowDocumentStore.value.getConnectionsBetweenNodes(inputNodeNames, [
previousNode.name,
]) ?? []),
...(workflowDocumentStore.value?.getConnectionsBetweenNodes(
...(workflowDocumentStore.value.getConnectionsBetweenNodes(
[previousNode.name],
outputNodeNames,
) ?? []),
@ -567,8 +570,8 @@ export function useCanvasOperations() {
historyStore.startRecordingUndo();
}
for (const pair of connectionPairs) {
const sourceNode = workflowDocumentStore?.value?.getNodeByName(pair[0].node);
const targetNode = workflowDocumentStore?.value?.getNodeByName(pair[1].node);
const sourceNode = workflowDocumentStore.value.getNodeByName(pair[0].node);
const targetNode = workflowDocumentStore.value.getNodeByName(pair[1].node);
if (!sourceNode || !targetNode) continue;
const oldCanvasConnection = mapLegacyConnectionToCanvasConnection(
sourceNode,
@ -616,12 +619,12 @@ export function useCanvasOperations() {
}
function setNodeActive(id: string, source: TelemetryNdvSource) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
workflowsStore.setNodePristine(node.name, false);
workflowDocumentStore.value.setNodePristine(node.name, false);
setNodeActiveByName(node.name, source);
}
@ -634,12 +637,12 @@ export function useCanvasOperations() {
}
function setNodeParameters(id: string, parameters: Record<string, unknown>) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
workflowDocumentStore?.value?.setNodeParameters(
workflowDocumentStore.value.setNodeParameters(
{
name: node.name,
value: parameters as NodeParameterValueType,
@ -655,7 +658,7 @@ export function useCanvasOperations() {
return;
}
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
if (!node) {
return;
}
@ -669,7 +672,7 @@ export function useCanvasOperations() {
historyStore.startRecordingUndo();
}
const nodes = workflowDocumentStore?.value?.getNodesByIds(ids) ?? [];
const nodes = workflowDocumentStore.value.getNodesByIds(ids);
nodeHelpers.disableNodes(nodes, { trackHistory, trackBulk: false });
if (trackHistory && trackBulk) {
@ -678,7 +681,7 @@ export function useCanvasOperations() {
}
function revertToggleNodeDisabled(nodeName: string) {
const node = workflowDocumentStore?.value?.getNodeByName(nodeName);
const node = workflowDocumentStore.value.getNodeByName(nodeName);
if (node) {
nodeHelpers.disableNodes([node]);
}
@ -693,7 +696,7 @@ export function useCanvasOperations() {
historyStore.startRecordingUndo();
}
const nodes = workflowDocumentStore?.value?.getNodesByIds(ids) ?? [];
const nodes = workflowDocumentStore.value.getNodesByIds(ids);
// Filter to only pinnable nodes
const pinnableNodesWithPinnedData = nodes
@ -829,11 +832,11 @@ export function useCanvasOperations() {
}
function updatePositionForNodeWithMultipleInputs(node: INodeUi) {
const inputNodes = workflowDocumentStore.value?.getParentNodesByDepth(node.name, 1) ?? [];
const inputNodes = workflowDocumentStore.value.getParentNodesByDepth(node.name, 1);
if (inputNodes.length > 1) {
inputNodes.slice(1).forEach((inputNode, index) => {
const nodeUi = workflowDocumentStore?.value?.getNodeByName(inputNode.name);
const nodeUi = workflowDocumentStore.value.getNodeByName(inputNode.name);
if (!nodeUi) return;
updateNodePosition(nodeUi.id, {
@ -875,7 +878,7 @@ export function useCanvasOperations() {
throw new Error(i18n.baseText('nodeViewV2.showError.failedToCreateNode'));
}
workflowDocumentStore?.value?.addNode(nodeData);
workflowDocumentStore.value.addNode(nodeData);
if (options.trackHistory) {
historyStore.pushCommandToUndo(new AddNodeCommand(nodeData, Date.now()));
}
@ -889,7 +892,7 @@ export function useCanvasOperations() {
uiStore.markStateDirty();
}
workflowsStore.setNodePristine(nodeData.name, true);
workflowDocumentStore.value.setNodePristine(nodeData.name, true);
nodeHelpers.matchCredentials(nodeData);
nodeHelpers.updateNodeParameterIssues(nodeData);
nodeHelpers.updateNodeCredentialIssues(nodeData);
@ -928,7 +931,7 @@ export function useCanvasOperations() {
}
async function revertAddNode(nodeName: string) {
const node = workflowDocumentStore?.value?.getNodeByName(nodeName);
const node = workflowDocumentStore.value.getNodeByName(nodeName);
if (!node) {
return;
}
@ -1042,7 +1045,7 @@ export function useCanvasOperations() {
if (lastInteractedWithNodeConnection) {
deleteConnection(lastInteractedWithNodeConnection, trackOptions);
const targetNode = workflowDocumentStore?.value?.getNodeById(
const targetNode = workflowDocumentStore.value.getNodeById(
lastInteractedWithNodeConnection.target,
);
if (targetNode) {
@ -1234,16 +1237,12 @@ export function useCanvasOperations() {
let position: XYPosition | undefined = node.position;
if (position) {
return NodeViewUtils.getNewNodePosition(
workflowDocumentStore?.value?.allNodes ?? [],
position,
{
offset: pushOffsets,
size: nodeSize,
viewport: options.viewport,
normalize: false,
},
);
return NodeViewUtils.getNewNodePosition(workflowDocumentStore.value.allNodes, position, {
offset: pushOffsets,
size: nodeSize,
viewport: options.viewport,
normalize: false,
});
}
if (lastInteractedWithNode) {
@ -1251,7 +1250,7 @@ export function useCanvasOperations() {
lastInteractedWithNode.type,
lastInteractedWithNode.typeVersion,
);
const lastInteractedWithNodeObject = workflowDocumentStore.value?.getNodeByName(
const lastInteractedWithNodeObject = workflowDocumentStore.value.getNodeByName(
lastInteractedWithNode.name,
);
@ -1281,7 +1280,7 @@ export function useCanvasOperations() {
connectionType === NodeConnectionTypes.AiTool
) {
// Get the source node (main node) from the connection
const toolUserNode = workflowDocumentStore?.value?.getNodeById(
const toolUserNode = workflowDocumentStore.value.getNodeById(
lastInteractedWithNodeConnection.target,
);
if (toolUserNode) {
@ -1315,7 +1314,7 @@ export function useCanvasOperations() {
}
return NodeViewUtils.getNewNodePosition(
workflowDocumentStore?.value?.allNodes ?? [],
workflowDocumentStore.value.allNodes,
position,
{
offset: pushOffsets,
@ -1326,11 +1325,14 @@ export function useCanvasOperations() {
}
}
const lastInteractedWithNodeInputs = NodeHelpers.getNodeInputs(
editableWorkflowObject.value,
lastInteractedWithNodeObject,
lastInteractedWithNodeTypeDescription,
);
const expression = workflowDocumentStore.value.getExpressionHandler();
const lastInteractedWithNodeInputs = expression
? NodeHelpers.getNodeInputs(
{ expression },
lastInteractedWithNodeObject,
lastInteractedWithNodeTypeDescription,
)
: [];
const lastInteractedWithNodeInputTypes = NodeHelpers.getConnectionTypes(
lastInteractedWithNodeInputs,
);
@ -1339,11 +1341,13 @@ export function useCanvasOperations() {
lastInteractedWithNodeInputTypes || []
).filter((input) => input !== NodeConnectionTypes.Main);
const lastInteractedWithNodeOutputs = NodeHelpers.getNodeOutputs(
editableWorkflowObject.value,
lastInteractedWithNodeObject,
lastInteractedWithNodeTypeDescription,
);
const lastInteractedWithNodeOutputs = expression
? NodeHelpers.getNodeOutputs(
{ expression },
lastInteractedWithNodeObject,
lastInteractedWithNodeTypeDescription,
)
: [];
const lastInteractedWithNodeOutputTypes = NodeHelpers.getConnectionTypes(
lastInteractedWithNodeOutputs,
);
@ -1405,11 +1409,9 @@ export function useCanvasOperations() {
// outputs here is to calculate the position, it is fine to assume
// that they have no outputs and are so treated as a regular node
// with only "main" outputs.
outputs = NodeHelpers.getNodeOutputs(
editableWorkflowObject.value,
node as INode,
nodeTypeDescription,
);
outputs = expression
? NodeHelpers.getNodeOutputs({ expression }, node as INode, nodeTypeDescription)
: [];
} catch (e) {}
const outputTypes = NodeHelpers.getConnectionTypes(outputs);
@ -1488,15 +1490,11 @@ export function useCanvasOperations() {
}
}
return NodeViewUtils.getNewNodePosition(
workflowDocumentStore?.value?.allNodes ?? [],
position,
{
offset: pushOffsets,
size: nodeSize,
viewport: options.viewport,
},
);
return NodeViewUtils.getNewNodePosition(workflowDocumentStore.value.allNodes, position, {
offset: pushOffsets,
size: nodeSize,
viewport: options.viewport,
});
}
function resolveNodeName(node: INodeUi) {
@ -1589,7 +1587,7 @@ export function useCanvasOperations() {
stickiesToMoveAndStretch: INodeUi[];
stickyAssociatedNodes: Map<string, INodeUi[]>;
} {
const allNodes = workflowDocumentStore?.value?.allNodes ?? [];
const allNodes = workflowDocumentStore.value.allNodes;
const insertX = insertPosition[0];
const insertY = insertPosition[1];
const yTolerance = DEFAULT_NODE_SIZE[1] * 2; // Nodes within ~2 node heights are considered "similar Y"
@ -1670,12 +1668,14 @@ export function useCanvasOperations() {
// Step 2: Add all downstream connected nodes from initial candidates
const candidateNames = new Set(initialCandidates.map((node) => node.name));
for (const candidate of initialCandidates) {
const downstream =
workflowDocumentStore.value?.getConnectedNodes('downstream', candidate.name) ?? [];
const downstream = workflowDocumentStore.value.getConnectedNodes(
'downstream',
candidate.name,
);
downstream
// Filter the downstream nodes to find candidates that need to be shifted right.
.filter((name) => {
const node = workflowDocumentStore?.value?.getNodeByName(name);
const node = workflowDocumentStore.value.getNodeByName(name);
if (!node) {
return false;
}
@ -1708,7 +1708,7 @@ export function useCanvasOperations() {
...regularNodesToMove.map((n) => n.position[1] + DEFAULT_NODE_SIZE[1]),
);
const sourceNode = workflowDocumentStore?.value?.getNodeByName(sourceNodeName);
const sourceNode = workflowDocumentStore.value.getNodeByName(sourceNodeName);
const nodeCenterThreshold = {
x: nodeSize[0] / 2,
y: nodeSize[1] / 2,
@ -1824,7 +1824,7 @@ export function useCanvasOperations() {
// Expand to include all associated nodes (get fresh positions from store as they may have moved)
for (const node of associatedNodes) {
const updatedNode = workflowDocumentStore?.value?.getNodeById(node.id);
const updatedNode = workflowDocumentStore.value.getNodeById(node.id);
if (!updatedNode) continue;
const nodeLeft = updatedNode.position[0] - padding;
const nodeRight = updatedNode.position[0] + DEFAULT_NODE_SIZE[0] + padding;
@ -1861,7 +1861,7 @@ export function useCanvasOperations() {
nodeSize = DEFAULT_NODE_SIZE,
}: { trackHistory?: boolean; nodeSize?: [number, number] },
) {
const sourceNode = workflowDocumentStore?.value?.getNodeByName(sourceNodeName);
const sourceNode = workflowDocumentStore.value.getNodeByName(sourceNodeName);
if (!sourceNode) return;
// Calculate insertion position (to the right of source node)
@ -1872,7 +1872,7 @@ export function useCanvasOperations() {
];
// Get all nodes except source and stickies
const nodesToCheck = (workflowDocumentStore?.value?.allNodes ?? []).filter(
const nodesToCheck = workflowDocumentStore.value.allNodes.filter(
(n) => n.name !== sourceNodeName && n.type !== STICKY_NODE_TYPE,
);
@ -1899,7 +1899,7 @@ export function useCanvasOperations() {
// Stretch stickies that moved and also need to encompass the new node
for (const sticky of stickiesToMoveAndStretch) {
const updatedSticky = workflowDocumentStore?.value?.getNodeById(sticky.id);
const updatedSticky = workflowDocumentStore.value.getNodeById(sticky.id);
if (!updatedSticky) continue;
const associatedNodes = stickyAssociatedNodes.get(sticky.id) ?? [];
stretchStickyNote(updatedSticky, insertPosition, nodeSize, associatedNodes, { trackHistory });
@ -1920,8 +1920,8 @@ export function useCanvasOperations() {
connection: Connection,
{ trackHistory = false, keepPristine = false } = {},
) {
const sourceNode = workflowDocumentStore?.value?.getNodeById(connection.source);
const targetNode = workflowDocumentStore?.value?.getNodeById(connection.target);
const sourceNode = workflowDocumentStore.value.getNodeById(connection.source);
const targetNode = workflowDocumentStore.value.getNodeById(connection.target);
if (!sourceNode || !targetNode) {
return;
}
@ -1945,7 +1945,7 @@ export function useCanvasOperations() {
return;
}
workflowDocumentStore.value?.addConnection({
workflowDocumentStore.value.addConnection({
connection: mappedConnection,
});
@ -1961,9 +1961,9 @@ export function useCanvasOperations() {
function revertCreateConnection(connection: [IConnection, IConnection]) {
const sourceNodeName = connection[0].node;
const sourceNode = workflowDocumentStore?.value?.getNodeByName(sourceNodeName);
const sourceNode = workflowDocumentStore.value.getNodeByName(sourceNodeName);
const targetNodeName = connection[1].node;
const targetNode = workflowDocumentStore?.value?.getNodeByName(targetNodeName);
const targetNode = workflowDocumentStore.value.getNodeByName(targetNodeName);
if (!sourceNode || !targetNode) {
return;
@ -1976,7 +1976,7 @@ export function useCanvasOperations() {
targetNodeId: string,
{ trackHistory = false, trackBulk = true } = {},
) {
const targetNode = workflowDocumentStore?.value?.getNodeById(targetNodeId);
const targetNode = workflowDocumentStore.value.getNodeById(targetNodeId);
if (!targetNode) {
return;
}
@ -1985,9 +1985,9 @@ export function useCanvasOperations() {
historyStore.startRecordingUndo();
}
const connections = cloneDeep(workflowDocumentStore.value?.connectionsBySourceNode ?? {});
const connections = cloneDeep(workflowDocumentStore.value.connectionsBySourceNode);
for (const nodeName of Object.keys(connections)) {
const node = workflowDocumentStore?.value?.getNodeByName(nodeName);
const node = workflowDocumentStore.value.getNodeByName(nodeName);
if (!node) {
continue;
}
@ -2001,7 +2001,7 @@ export function useCanvasOperations() {
continue;
}
const connectionDataNode = workflowDocumentStore?.value?.getNodeByName(
const connectionDataNode = workflowDocumentStore.value.getNodeByName(
connectionData.node,
);
if (
@ -2031,7 +2031,7 @@ export function useCanvasOperations() {
}
}
workflowDocumentStore.value?.removeAllNodeConnection(targetNode);
workflowDocumentStore.value.removeAllNodeConnection(targetNode);
if (trackHistory && trackBulk) {
historyStore.stopRecordingUndo();
@ -2042,8 +2042,8 @@ export function useCanvasOperations() {
connection: Connection,
{ trackHistory = false, trackBulk = true } = {},
) {
const sourceNode = workflowDocumentStore?.value?.getNodeById(connection.source);
const targetNode = workflowDocumentStore?.value?.getNodeById(connection.target);
const sourceNode = workflowDocumentStore.value.getNodeById(connection.source);
const targetNode = workflowDocumentStore.value.getNodeById(connection.target);
if (!sourceNode || !targetNode) {
return;
}
@ -2058,7 +2058,7 @@ export function useCanvasOperations() {
historyStore.startRecordingUndo();
}
workflowDocumentStore.value?.removeConnection({
workflowDocumentStore.value.removeConnection({
connection: mappedConnection,
});
@ -2077,13 +2077,13 @@ export function useCanvasOperations() {
}
function revertDeleteConnection(connection: [IConnection, IConnection]) {
workflowDocumentStore.value?.addConnection({
workflowDocumentStore.value.addConnection({
connection,
});
}
function revalidateNodeConnections(id: string, connectionMode: CanvasConnectionMode) {
const node = workflowDocumentStore?.value?.getNodeById(id);
const node = workflowDocumentStore.value.getNodeById(id);
const isInput = connectionMode === CanvasConnectionMode.Input;
if (!node) {
return;
@ -2095,8 +2095,8 @@ export function useCanvasOperations() {
}
const connections = mapLegacyConnectionsToCanvasConnections(
workflowDocumentStore?.value?.connectionsBySourceNode ?? {},
workflowDocumentStore?.value?.allNodes ?? [],
workflowDocumentStore.value.connectionsBySourceNode,
workflowDocumentStore.value.allNodes,
);
connections.forEach((connection) => {
@ -2105,7 +2105,7 @@ export function useCanvasOperations() {
if (isRelevantConnection) {
const otherNodeId = isInput ? connection.source : connection.target;
const otherNode = workflowDocumentStore?.value?.getNodeById(otherNodeId);
const otherNode = workflowDocumentStore.value.getNodeById(otherNodeId);
if (!otherNode || !connection.data) {
return;
}
@ -2183,19 +2183,20 @@ export function useCanvasOperations() {
}
const sourceNodeType = getNodeType(sourceNode);
const sourceWorkflowNode = workflowDocumentStore.value?.getNodeByName(sourceNode.name);
const sourceWorkflowNode = workflowDocumentStore.value.getNodeByName(sourceNode.name);
if (!sourceWorkflowNode) {
return false;
}
let sourceNodeOutputs: Array<NodeConnectionType | INodeOutputConfiguration> = [];
if (sourceNodeType) {
sourceNodeOutputs =
NodeHelpers.getNodeOutputs(
editableWorkflowObject.value,
sourceWorkflowNode,
sourceNodeType,
) || [];
sourceNodeOutputs = workflowDocumentStore.value
? NodeHelpers.getNodeOutputs(
{ expression: workflowDocumentStore.value.getExpressionHandler() },
sourceWorkflowNode,
sourceNodeType,
) || []
: [];
}
const sourceOutputsOfType = filterConnectionsByType(sourceNodeOutputs, sourceConnection.type);
@ -2216,19 +2217,20 @@ export function useCanvasOperations() {
}
const targetNodeType = getNodeType(targetNode);
const targetWorkflowNode = workflowDocumentStore.value?.getNodeByName(targetNode.name);
const targetWorkflowNode = workflowDocumentStore.value.getNodeByName(targetNode.name);
if (!targetWorkflowNode) {
return false;
}
let targetNodeInputs: Array<NodeConnectionType | INodeInputConfiguration> = [];
if (targetNodeType) {
targetNodeInputs =
NodeHelpers.getNodeInputs(
editableWorkflowObject.value,
targetWorkflowNode,
targetNodeType,
) || [];
targetNodeInputs = workflowDocumentStore.value
? NodeHelpers.getNodeInputs(
{ expression: workflowDocumentStore.value.getExpressionHandler() },
targetWorkflowNode,
targetNodeType,
) || []
: [];
}
const targetInputsOfType = filterConnectionsByType(targetNodeInputs, targetConnection.type);
@ -2360,10 +2362,10 @@ export function useCanvasOperations() {
nodeHelpers.matchCredentials(node);
resolveNodeParameters(node, nodeTypeDescription);
resolveNodeWebhook(node, nodeTypeDescription);
const existingNode = workflowDocumentStore?.value?.getNodeByName(node.name);
const existingNode = workflowDocumentStore.value.getNodeByName(node.name);
if (existingNode) {
// make sure that preview node type is always removed
workflowDocumentStore?.value?.updateNodeById(existingNode.id, {
workflowDocumentStore.value.updateNodeById(existingNode.id, {
...node,
type: removePreviewToken(node.type),
});
@ -2727,7 +2729,7 @@ export function useCanvasOperations() {
}
if (workflowData.name) {
workflowDocumentStore.value?.setName(workflowData.name);
workflowDocumentStore.value.setName(workflowData.name);
if (setStateDirty) {
uiStore.markStateDirty('metadata');
}
@ -2769,7 +2771,7 @@ export function useCanvasOperations() {
return accu;
}, []);
workflowDocumentStore?.value?.addTags(tagIds);
workflowDocumentStore.value.addTags(tagIds);
}
async function fetchWorkflowDataFromUrl(url: string): Promise<WorkflowDataUpdate | undefined> {
@ -2806,7 +2808,7 @@ export function useCanvasOperations() {
for (const node of nodes) {
const nodeSaveData = workflowHelpers.getNodeDataToSave(node);
const pinDataForNode = workflowDocumentStore?.value
const pinDataForNode = workflowDocumentStore.value
? pinDataToExecutionData(workflowDocumentStore.value.pinData)[node.name]
: undefined;
@ -2820,7 +2822,7 @@ export function useCanvasOperations() {
) {
nodeSaveData.credentials = filterAllowedCredentials(
nodeSaveData.credentials,
workflowDocumentStore?.value?.usedCredentials ?? {},
workflowDocumentStore.value.usedCredentials,
);
}
@ -2856,8 +2858,9 @@ export function useCanvasOperations() {
const connections: Record<string, INodeConnections> = {};
for (const node of nodes) {
const outgoingConnections =
workflowDocumentStore.value?.outgoingConnectionsByNodeName(node.name) ?? {};
const outgoingConnections = workflowDocumentStore.value.outgoingConnectionsByNodeName(
node.name,
);
if (!Object.keys(outgoingConnections).length) continue;
const filteredConnections = filterConnectionsByNodes(outgoingConnections, includeNodeNames);
@ -2889,9 +2892,7 @@ export function useCanvasOperations() {
}
async function duplicateNodes(ids: string[], options: { viewport?: ViewportBoundaries } = {}) {
const workflowData = deepCopy(
getNodesToSave(workflowDocumentStore?.value?.getNodesByIds(ids) ?? []),
);
const workflowData = deepCopy(getNodesToSave(workflowDocumentStore.value.getNodesByIds(ids)));
const result = await importWorkflowData(workflowData, 'duplicate', {
viewport: options.viewport,
importTags: false,
@ -2901,13 +2902,11 @@ export function useCanvasOperations() {
}
async function copyNodes(ids: string[]) {
const workflowData = deepCopy(
getNodesToSave(workflowDocumentStore?.value?.getNodesByIds(ids) ?? []),
);
const workflowData = deepCopy(getNodesToSave(workflowDocumentStore.value.getNodesByIds(ids)));
workflowData.meta = {
...workflowData.meta,
...workflowDocumentStore?.value?.meta,
...workflowDocumentStore.value.meta,
instanceId: rootStore.instanceId,
};
@ -2950,11 +2949,11 @@ export function useCanvasOperations() {
workflowState.setWorkflowExecutionData(data);
if (!['manual', 'evaluation'].includes(data.mode)) {
workflowDocumentStore?.value?.setPinData({});
workflowDocumentStore.value.setPinData({});
}
if (nodeId) {
const node = workflowDocumentStore?.value?.getNodeById(nodeId);
const node = workflowDocumentStore.value.getNodeById(nodeId);
if (node) {
ndvStore.setActiveNodeName(node.name, 'other');
} else {
@ -2971,14 +2970,14 @@ export function useCanvasOperations() {
}
function startChat(source?: 'node' | 'main') {
if (!(workflowDocumentStore?.value?.allNodes ?? []).some(isChatNode)) {
if (!workflowDocumentStore.value.allNodes.some(isChatNode)) {
return;
}
logsStore.toggleOpen(true);
const payload = {
workflow_id: workflowDocumentStore.value?.workflowId,
workflow_id: workflowDocumentStore.value.workflowId,
button_type: source,
};
@ -3004,18 +3003,18 @@ export function useCanvasOperations() {
}
if (workflow.connections) {
workflowDocumentStore.value?.setConnections(workflow.connections);
workflowDocumentStore.value.setConnections(workflow.connections);
}
await addNodes(convertedNodes ?? [], { keepPristine: true });
const workflowData = await workflowState.getNewWorkflowData(
name,
projectsStore.currentProjectId,
);
workflowDocumentStore.value?.setName(workflowData.name);
workflowDocumentStore.value.setName(workflowData.name);
}
function tryToOpenSubworkflowInNewTab(nodeId: string): boolean {
const node = workflowDocumentStore?.value?.getNodeById(nodeId);
const node = workflowDocumentStore.value.getNodeById(nodeId);
if (!node) return false;
const subWorkflowId = NodeHelpers.getSubworkflowId(node);
if (!subWorkflowId) return false;
@ -3028,8 +3027,8 @@ export function useCanvasOperations() {
newId: string,
{ trackHistory = true, trackBulk = true } = {},
) {
const previousNode = workflowDocumentStore?.value?.getNodeById(previousId);
const newNode = workflowDocumentStore?.value?.getNodeById(newId);
const previousNode = workflowDocumentStore.value.getNodeById(previousId);
const newNode = workflowDocumentStore.value.getNodeById(newId);
if (!previousNode || !newNode) return;
@ -3186,7 +3185,7 @@ export function useCanvasOperations() {
await importTemplate({ name: data.name, workflow: data.workflow });
workflowDocumentStore?.value?.addToMeta({ templateId: `${templateId}` });
workflowDocumentStore.value.addToMeta({ templateId: `${templateId}` });
openSetupPanelIfEnabled();
@ -3240,7 +3239,7 @@ export function useCanvasOperations() {
workflow,
});
workflowDocumentStore?.value?.addToMeta({ templateId: `${templateId}` });
workflowDocumentStore.value.addToMeta({ templateId: `${templateId}` });
openSetupPanelIfEnabled();

View file

@ -211,10 +211,10 @@ export function useDataSchema() {
if (!node) return [];
const workflowsStore = useWorkflowsStore();
const workflowDocumentStore = workflowsStore.workflowId
? useWorkflowDocumentStore(createWorkflowDocumentId(workflowsStore.workflowId))
: undefined;
const pinnedData = workflowDocumentStore?.getNodePinData(node.name)?.map((item) => item.json);
const workflowDocumentStore = useWorkflowDocumentStore(
createWorkflowDocumentId(workflowsStore.workflowId),
);
const pinnedData = workflowDocumentStore.getNodePinData(node.name)?.map((item) => item.json);
let inputData = getNodeInputData(node, runIndex, outputIndex);
if (pinnedData) {

Some files were not shown because too many files have changed in this diff Show more