feat(Moonshot Kimi Node): Add new node (#28189)

This commit is contained in:
Dawid Myslak 2026-04-10 16:48:12 +02:00 committed by GitHub
parent 4c3a1501fe
commit e30d2eee60
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 1138 additions and 0 deletions

View file

@ -0,0 +1,17 @@
import type { IExecuteFunctions, INodeType } from 'n8n-workflow';
import { router } from './actions/router';
import { versionDescription } from './actions/versionDescription';
import { listSearch } from './methods';
export class Moonshot implements INodeType {
description = versionDescription;
methods = {
listSearch,
};
async execute(this: IExecuteFunctions) {
return await router.call(this);
}
}

View file

@ -0,0 +1,26 @@
import type { INodeProperties } from 'n8n-workflow';
export const modelRLC: INodeProperties = {
displayName: 'Model',
name: 'modelId',
type: 'resourceLocator',
default: { mode: 'list', value: '' },
required: true,
modes: [
{
displayName: 'From List',
name: 'list',
type: 'list',
typeOptions: {
searchListMethod: 'modelSearch',
searchable: true,
},
},
{
displayName: 'ID',
name: 'id',
type: 'string',
placeholder: 'e.g. kimi-k2.5',
},
],
};

View file

@ -0,0 +1,123 @@
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
import { updateDisplayOptions } from 'n8n-workflow';
import type { ChatCompletionResponse, ContentBlock } from '../../helpers/interfaces';
import { prepareBinaryPropertyList } from '../../helpers/utils';
import { apiRequest } from '../../transport';
import { modelRLC } from '../descriptions';
const properties: INodeProperties[] = [
modelRLC,
{
displayName: 'Text Input',
name: 'text',
type: 'string',
placeholder: "e.g. What's in this image?",
default: "What's in this image?",
typeOptions: {
rows: 2,
},
},
{
displayName: 'Input Data Field Name(s)',
name: 'binaryPropertyName',
type: 'string',
default: 'data',
placeholder: 'e.g. data',
hint: 'The name of the input field containing the binary file data to be processed',
description:
'Name of the binary field(s) which contains the image(s), separate multiple field names with commas',
typeOptions: {
binaryDataProperty: true,
},
},
{
displayName: 'Simplify Output',
name: 'simplify',
type: 'boolean',
default: true,
description: 'Whether to return a simplified version of the response instead of the raw data',
},
{
displayName: 'Options',
name: 'options',
placeholder: 'Add Option',
type: 'collection',
default: {},
options: [
{
displayName: 'Maximum Number of Tokens',
description: 'Fewer tokens will result in shorter, less detailed image description',
name: 'maxTokens',
type: 'number',
default: 1024,
typeOptions: {
minValue: 1,
},
},
],
},
];
const displayOptions = {
show: {
operation: ['analyze'],
resource: ['image'],
},
};
export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const model = this.getNodeParameter('modelId', i, '', { extractValue: true }) as string;
const text = this.getNodeParameter('text', i, '') as string;
const simplify = this.getNodeParameter('simplify', i, true) as boolean;
const options = this.getNodeParameter('options', i, {}) as { maxTokens?: number };
const content: ContentBlock[] = [];
const binaryPropertyNames = this.getNodeParameter('binaryPropertyName', i, 'data');
for (const binaryPropertyName of prepareBinaryPropertyList(binaryPropertyNames)) {
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
const buffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
const base64 = buffer.toString('base64');
const dataUrl = `data:${binaryData.mimeType};base64,${base64}`;
content.push({ type: 'image_url', image_url: { url: dataUrl } });
}
content.push({ type: 'text', text });
const body = {
model,
max_tokens: options.maxTokens ?? 1024,
thinking: { type: 'disabled' as const },
messages: [
{
role: 'user',
content,
},
],
};
const response = (await apiRequest.call(this, 'POST', '/chat/completions', {
body,
})) as ChatCompletionResponse;
if (simplify) {
const message = response.choices?.[0]?.message;
return [
{
json: {
content: message?.content ?? '',
},
pairedItem: { item: i },
},
];
}
return [
{
json: { ...response },
pairedItem: { item: i },
},
];
}

View file

@ -0,0 +1,29 @@
import type { INodeProperties } from 'n8n-workflow';
import * as analyze from './analyze.operation';
export { analyze };
export const description: INodeProperties[] = [
{
displayName: 'Operation',
name: 'operation',
type: 'options',
noDataExpression: true,
options: [
{
name: 'Analyze Image',
value: 'analyze',
action: 'Analyze image',
description: 'Analyze an image and answer questions about it',
},
],
default: 'analyze',
displayOptions: {
show: {
resource: ['image'],
},
},
},
...analyze.description,
];

View file

@ -0,0 +1,8 @@
import type { AllEntities } from 'n8n-workflow';
type NodeMap = {
text: 'message';
image: 'analyze';
};
export type MoonshotType = AllEntities<NodeMap>;

View file

@ -0,0 +1,89 @@
import { mockDeep } from 'jest-mock-extended';
import type { IExecuteFunctions } from 'n8n-workflow';
import * as image from './image';
import { router } from './router';
import * as text from './text';
describe('Moonshot router', () => {
const mockExecuteFunctions = mockDeep<IExecuteFunctions>();
const mockImage = jest.spyOn(image.analyze, 'execute');
const mockText = jest.spyOn(text.message, 'execute');
const operationMocks = [
[mockImage, 'image', 'analyze'],
[mockText, 'text', 'message'],
];
beforeEach(() => {
jest.resetAllMocks();
});
it.each(operationMocks)('should call the correct method', async (mock, resource, operation) => {
mockExecuteFunctions.getNodeParameter.mockImplementation((parameter) =>
parameter === 'resource' ? resource : operation,
);
mockExecuteFunctions.getInputData.mockReturnValue([{ json: {} }]);
(mock as jest.Mock).mockResolvedValue([{ json: { foo: 'bar' } }]);
const result = await router.call(mockExecuteFunctions);
expect(mock).toHaveBeenCalledWith(0);
expect(result).toEqual([[{ json: { foo: 'bar' } }]]);
});
it('should return an error if the resource is not supported', async () => {
mockExecuteFunctions.getNodeParameter.mockImplementation((parameter) =>
parameter === 'resource' ? 'foo' : 'bar',
);
mockExecuteFunctions.getInputData.mockReturnValue([{ json: {} }]);
await expect(router.call(mockExecuteFunctions)).rejects.toThrow(
'The resource "foo" is not supported!',
);
});
it('should loop over all items', async () => {
mockExecuteFunctions.getNodeParameter.mockImplementation((parameter) =>
parameter === 'resource' ? 'text' : 'message',
);
mockExecuteFunctions.getInputData.mockReturnValue([
{ json: { text: 'item 1' } },
{ json: { text: 'item 2' } },
]);
mockText.mockResolvedValueOnce([{ json: { response: 'foo' } }]);
mockText.mockResolvedValueOnce([{ json: { response: 'bar' } }]);
const result = await router.call(mockExecuteFunctions);
expect(result).toEqual([[{ json: { response: 'foo' } }, { json: { response: 'bar' } }]]);
});
it('should continue on fail', async () => {
mockExecuteFunctions.continueOnFail.mockReturnValue(true);
mockExecuteFunctions.getNodeParameter.mockImplementation((parameter) =>
parameter === 'resource' ? 'text' : 'message',
);
mockExecuteFunctions.getInputData.mockReturnValue([{ json: {} }, { json: {} }]);
mockText.mockRejectedValue(new Error('Some error'));
const result = await router.call(mockExecuteFunctions);
expect(result).toEqual([
[
{ json: { error: 'Some error' }, pairedItem: { item: 0 } },
{ json: { error: 'Some error' }, pairedItem: { item: 1 } },
],
]);
});
it('should throw an error if continueOnFail is false', async () => {
mockExecuteFunctions.continueOnFail.mockReturnValue(false);
mockExecuteFunctions.getNodeParameter.mockImplementation((parameter) =>
parameter === 'resource' ? 'text' : 'message',
);
mockExecuteFunctions.getInputData.mockReturnValue([{ json: {} }]);
mockText.mockRejectedValue(new Error('Some error'));
await expect(router.call(mockExecuteFunctions)).rejects.toThrow('Some error');
});
});

View file

@ -0,0 +1,49 @@
import { NodeOperationError, type IExecuteFunctions, type INodeExecutionData } from 'n8n-workflow';
import * as image from './image';
import type { MoonshotType } from './node.type';
import * as text from './text';
export async function router(this: IExecuteFunctions) {
const returnData: INodeExecutionData[] = [];
const items = this.getInputData();
const resource = this.getNodeParameter('resource', 0);
const operation = this.getNodeParameter('operation', 0);
const moonshotTypeData = {
resource,
operation,
} as MoonshotType;
let execute;
switch (moonshotTypeData.resource) {
case 'image':
execute = image[moonshotTypeData.operation].execute;
break;
case 'text':
execute = text[moonshotTypeData.operation].execute;
break;
default:
throw new NodeOperationError(this.getNode(), `The resource "${resource}" is not supported!`);
}
for (let i = 0; i < items.length; i++) {
try {
const responseData = await execute.call(this, i);
returnData.push(...responseData);
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: i } });
continue;
}
throw new NodeOperationError(this.getNode(), error, {
itemIndex: i,
description: error.description,
});
}
}
return [returnData];
}

View file

@ -0,0 +1,29 @@
import type { INodeProperties } from 'n8n-workflow';
import * as message from './message.operation';
export { message };
export const description: INodeProperties[] = [
{
displayName: 'Operation',
name: 'operation',
type: 'options',
noDataExpression: true,
options: [
{
name: 'Message a Model',
value: 'message',
action: 'Message a model',
description: 'Send a message and get a response from a Moonshot Kimi model',
},
],
default: 'message',
displayOptions: {
show: {
resource: ['text'],
},
},
},
...message.description,
];

View file

@ -0,0 +1,471 @@
import type { Tool } from '@langchain/core/tools';
import type {
IDataObject,
IExecuteFunctions,
INodeExecutionData,
INodeProperties,
} from 'n8n-workflow';
import { accumulateTokenUsage, jsonParse, updateDisplayOptions } from 'n8n-workflow';
import zodToJsonSchema from 'zod-to-json-schema';
import { getConnectedTools } from '@utils/helpers';
import type {
BuiltinTool,
ChatCompletionResponse,
ChatMessage,
ContentBlock,
ToolCall,
ToolFunction,
} from '../../helpers/interfaces';
import { prepareBinaryPropertyList } from '../../helpers/utils';
import { apiRequest } from '../../transport';
import { modelRLC } from '../descriptions';
const properties: INodeProperties[] = [
modelRLC,
{
displayName: 'Messages',
name: 'messages',
type: 'fixedCollection',
typeOptions: {
sortable: true,
multipleValues: true,
},
placeholder: 'Add Message',
default: { values: [{ content: '', role: 'user' }] },
options: [
{
displayName: 'Values',
name: 'values',
values: [
{
displayName: 'Prompt',
name: 'content',
type: 'string',
description: 'The content of the message to be sent',
default: '',
placeholder: 'e.g. Hello, how can you help me?',
typeOptions: {
rows: 2,
},
},
{
displayName: 'Role',
name: 'role',
type: 'options',
description:
"Role in shaping the model's response, it tells the model how it should behave and interact with the user",
options: [
{
name: 'User',
value: 'user',
description: 'Send a message as a user and get a response from the model',
},
{
name: 'Assistant',
value: 'assistant',
description: 'Tell the model to adopt a specific tone or personality',
},
],
default: 'user',
},
],
},
],
},
{
displayName: 'Add Attachments',
name: 'addAttachments',
type: 'boolean',
default: false,
description: 'Whether to add image attachments to the message',
},
{
displayName: 'Attachment Input Data Field Name(s)',
name: 'binaryPropertyName',
type: 'string',
default: 'data',
placeholder: 'e.g. data',
description:
'Name of the binary field(s) which contains the image(s) to attach, separate multiple field names with commas',
typeOptions: {
binaryDataProperty: true,
},
displayOptions: {
show: {
addAttachments: [true],
},
},
},
{
displayName: 'Simplify Output',
name: 'simplify',
type: 'boolean',
default: true,
description: 'Whether to return a simplified version of the response instead of the raw data',
},
{
displayName: 'Options',
name: 'options',
placeholder: 'Add Option',
type: 'collection',
default: {},
options: [
{
displayName: 'Frequency Penalty',
name: 'frequencyPenalty',
default: 0,
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
description:
"Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim",
type: 'number',
},
{
displayName: 'Include Merged Response',
name: 'includeMergedResponse',
type: 'boolean',
default: false,
description:
'Whether to include a single output string merging all text parts of the response',
},
{
displayName: 'Maximum Number of Tokens',
name: 'maxTokens',
default: 1024,
description: 'The maximum number of tokens to generate in the completion',
type: 'number',
typeOptions: {
minValue: 1,
numberPrecision: 0,
},
},
{
displayName: 'Max Tool Calls Iterations',
name: 'maxToolsIterations',
type: 'number',
default: 15,
description:
'The maximum number of tool iteration cycles the LLM will run before stopping. A single iteration can contain multiple tool calls. Set to 0 for no limit.',
typeOptions: {
minValue: 0,
numberPrecision: 0,
},
},
{
displayName: 'Output Randomness (Temperature)',
name: 'temperature',
default: 0.7,
description:
'Controls the randomness of the output. Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.',
type: 'number',
typeOptions: {
minValue: 0,
maxValue: 1,
numberPrecision: 1,
},
},
{
displayName: 'Output Randomness (Top P)',
name: 'topP',
default: 1,
description: 'The maximum cumulative probability of tokens to consider when sampling',
type: 'number',
typeOptions: {
minValue: 0,
maxValue: 1,
numberPrecision: 1,
},
},
{
displayName: 'Presence Penalty',
name: 'presencePenalty',
default: 0,
typeOptions: { maxValue: 2, minValue: -2, numberPrecision: 1 },
description:
"Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics",
type: 'number',
},
{
displayName: 'Response Format',
name: 'responseFormat',
default: 'text',
type: 'options',
options: [
{
name: 'Text',
value: 'text',
description: 'Regular text response',
},
{
name: 'JSON',
value: 'json_object',
description:
'Enables JSON mode, which should guarantee the message the model generates is valid JSON',
},
],
},
{
displayName: 'System Message',
name: 'system',
type: 'string',
default: '',
placeholder: 'e.g. You are a helpful assistant',
},
{
displayName: 'Thinking Mode',
name: 'thinkingMode',
type: 'boolean',
default: false,
description:
'Whether to enable thinking mode for deep reasoning. The model will include reasoning steps in the response. Cannot be used together with Web Search.',
},
{
displayName: 'Web Search',
name: 'webSearch',
type: 'boolean',
default: false,
description:
'Whether to enable built-in web search. The model will search the web for relevant information. Cannot be used together with Thinking Mode.',
},
],
},
];
const displayOptions = {
show: {
operation: ['message'],
resource: ['text'],
},
};
export const description = updateDisplayOptions(displayOptions, properties);
interface MessageOptions {
includeMergedResponse?: boolean;
maxTokens?: number;
system?: string;
temperature?: number;
topP?: number;
frequencyPenalty?: number;
presencePenalty?: number;
responseFormat?: string;
thinkingMode?: boolean;
webSearch?: boolean;
}
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const model = this.getNodeParameter('modelId', i, '', { extractValue: true }) as string;
const rawMessages = this.getNodeParameter('messages.values', i, []) as Array<{
content: string;
role: string;
}>;
const addAttachments = this.getNodeParameter('addAttachments', i, false) as boolean;
const simplify = this.getNodeParameter('simplify', i, true) as boolean;
const options = this.getNodeParameter('options', i, {}) as MessageOptions;
const messages: ChatMessage[] = [];
if (options.system) {
messages.push({ role: 'system', content: options.system });
}
for (const msg of rawMessages) {
messages.push({ role: msg.role as 'user' | 'assistant', content: msg.content });
}
if (addAttachments) {
await addAttachmentsToMessages.call(this, i, messages);
}
const { tools, connectedTools } = await getToolDefinitions.call(this, options);
const body: IDataObject = {
model,
messages,
max_tokens: options.maxTokens ?? 1024,
};
if (options.temperature !== undefined) body.temperature = options.temperature;
if (options.topP !== undefined) body.top_p = options.topP;
if (options.frequencyPenalty !== undefined) body.frequency_penalty = options.frequencyPenalty;
if (options.presencePenalty !== undefined) body.presence_penalty = options.presencePenalty;
if (tools.length > 0) {
body.tools = tools;
}
if (options.responseFormat && options.responseFormat !== 'text') {
body.response_format = { type: options.responseFormat };
}
if (options.thinkingMode && !options.webSearch) {
body.thinking = { type: 'enabled' };
} else {
body.thinking = { type: 'disabled' };
}
let response = (await apiRequest.call(this, 'POST', '/chat/completions', {
body,
})) as ChatCompletionResponse;
const captureUsage = () => {
const usage = response.usage;
if (usage) {
accumulateTokenUsage(this, usage.prompt_tokens, usage.completion_tokens);
}
};
captureUsage();
const maxToolsIterations = this.getNodeParameter('options.maxToolsIterations', i, 15) as number;
const abortSignal = this.getExecutionCancelSignal();
let currentIteration = 0;
while (true) {
if (abortSignal?.aborted) {
break;
}
const choice = response.choices?.[0];
if (choice?.finish_reason !== 'tool_calls' || !choice.message.tool_calls?.length) {
break;
}
if (maxToolsIterations > 0 && currentIteration >= maxToolsIterations) {
break;
}
const assistantMsg: ChatMessage = {
role: 'assistant',
content: choice.message.content ?? '',
tool_calls: choice.message.tool_calls,
};
if (choice.message.reasoning_content) {
assistantMsg.reasoning_content = choice.message.reasoning_content;
}
messages.push(assistantMsg);
await handleToolUse.call(this, choice.message.tool_calls, messages, connectedTools);
currentIteration++;
response = (await apiRequest.call(this, 'POST', '/chat/completions', {
body,
})) as ChatCompletionResponse;
captureUsage();
}
const finalMessage = response.choices?.[0]?.message;
const mergedResponse = options.includeMergedResponse ? (finalMessage?.content ?? '') : undefined;
if (simplify) {
const result: IDataObject = {
content: finalMessage?.content ?? '',
};
if (options.thinkingMode && finalMessage?.reasoning_content) {
result.reasoning_content = finalMessage.reasoning_content;
}
if (mergedResponse !== undefined) {
result.merged_response = mergedResponse;
}
return [
{
json: result,
pairedItem: { item: i },
},
];
}
return [
{
json: { ...response, merged_response: mergedResponse },
pairedItem: { item: i },
},
];
}
async function getToolDefinitions(this: IExecuteFunctions, options: MessageOptions) {
let connectedTools: Tool[] = [];
const nodeInputs = this.getNodeInputs();
if (nodeInputs.some((input) => input.type === 'ai_tool')) {
connectedTools = await getConnectedTools(this, true);
}
const tools: Array<ToolFunction | BuiltinTool> = connectedTools.map((t) => ({
type: 'function' as const,
function: {
name: t.name,
description: t.description,
parameters: zodToJsonSchema(t.schema) as IDataObject,
},
}));
if (options.webSearch) {
tools.push({
type: 'builtin_function',
function: { name: '$web_search' },
});
}
return { tools, connectedTools };
}
async function addAttachmentsToMessages(
this: IExecuteFunctions,
i: number,
messages: ChatMessage[],
) {
const binaryPropertyNames = this.getNodeParameter('binaryPropertyName', i, 'data');
const content: ContentBlock[] = [];
for (const binaryPropertyName of prepareBinaryPropertyList(binaryPropertyNames)) {
const binaryData = this.helpers.assertBinaryData(i, binaryPropertyName);
const buffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
const base64 = buffer.toString('base64');
const dataUrl = `data:${binaryData.mimeType};base64,${base64}`;
content.push({ type: 'image_url', image_url: { url: dataUrl } });
}
if (content.length === 0) {
return;
}
const lastUserMessage = [...messages].reverse().find((m) => m.role === 'user');
if (lastUserMessage && typeof lastUserMessage.content === 'string') {
content.push({ type: 'text', text: lastUserMessage.content });
lastUserMessage.content = content;
} else {
messages.push({ role: 'user', content });
}
}
async function handleToolUse(
this: IExecuteFunctions,
toolCalls: ToolCall[],
messages: ChatMessage[],
connectedTools: Tool[],
) {
for (const toolCall of toolCalls) {
let toolResponse: unknown;
for (const connectedTool of connectedTools) {
if (connectedTool.name === toolCall.function.name) {
const args = jsonParse<IDataObject>(toolCall.function.arguments);
toolResponse = await connectedTool.invoke(args);
}
}
messages.push({
role: 'tool',
content:
typeof toolResponse === 'object'
? JSON.stringify(toolResponse)
: ((toolResponse as string) ?? ''),
tool_call_id: toolCall.id,
});
}
}

View file

@ -0,0 +1,72 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
import { NodeConnectionTypes, type INodeTypeDescription } from 'n8n-workflow';
import * as image from './image';
import * as text from './text';
export const versionDescription: INodeTypeDescription = {
displayName: 'Moonshot Kimi',
name: 'moonshot',
icon: { light: 'file:moonshot.svg', dark: 'file:moonshot.dark.svg' },
group: ['transform'],
version: 1,
subtitle: '={{ $parameter["operation"] + ": " + $parameter["resource"] }}',
description: 'Interact with Moonshot Kimi AI models',
defaults: {
name: 'Moonshot Kimi',
},
usableAsTool: true,
codex: {
alias: ['kimi', 'moonshot', 'LangChain', 'image', 'vision'],
categories: ['AI'],
subcategories: {
AI: ['Agents', 'Miscellaneous', 'Root Nodes'],
},
resources: {
primaryDocumentation: [
{
url: 'https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-langchain.moonshot/',
},
],
},
},
inputs: `={{
(() => {
const resource = $parameter.resource;
const operation = $parameter.operation;
if (resource === 'text' && operation === 'message') {
return [{ type: 'main' }, { type: 'ai_tool', displayName: 'Tools' }];
}
return ['main'];
})()
}}`,
outputs: [NodeConnectionTypes.Main],
credentials: [
{
name: 'moonshotApi',
required: true,
},
],
properties: [
{
displayName: 'Resource',
name: 'resource',
type: 'options',
noDataExpression: true,
options: [
{
name: 'Image',
value: 'image',
},
{
name: 'Text',
value: 'text',
},
],
default: 'text',
},
...image.description,
...text.description,
],
};

View file

@ -0,0 +1,60 @@
import type { IDataObject } from 'n8n-workflow';
export interface ChatMessage {
role: 'system' | 'user' | 'assistant' | 'tool';
content: string | ContentBlock[];
tool_call_id?: string;
tool_calls?: ToolCall[];
reasoning_content?: string;
}
export type ContentBlock =
| { type: 'text'; text: string }
| { type: 'image_url'; image_url: { url: string } };
export interface ToolFunction {
type: 'function';
function: {
name: string;
description?: string;
parameters?: IDataObject;
};
}
export interface BuiltinTool {
type: 'builtin_function';
function: {
name: string;
};
}
export interface ToolCall {
id: string;
type: 'function';
function: {
name: string;
arguments: string;
};
}
export interface ChatCompletionResponse {
id: string;
object: string;
created: number;
model: string;
choices: Array<{
index: number;
message: {
role: string;
content: string | null;
reasoning_content?: string | null;
tool_calls?: ToolCall[];
};
finish_reason: string;
}>;
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}

View file

@ -0,0 +1,10 @@
import type { IBinaryData } from 'n8n-workflow';
export function prepareBinaryPropertyList(data: string | string[] | IBinaryData | IBinaryData[]) {
if (Array.isArray(data)) return data;
if (typeof data === 'object') return [data];
return data
.split(',')
.map((item: string) => item.trim())
.filter((item: string) => item.length > 0);
}

View file

@ -0,0 +1 @@
export * as listSearch from './listSearch';

View file

@ -0,0 +1,24 @@
import type { ILoadOptionsFunctions, INodeListSearchResult } from 'n8n-workflow';
import { apiRequest } from '../transport';
export async function modelSearch(
this: ILoadOptionsFunctions,
filter?: string,
): Promise<INodeListSearchResult> {
const response = (await apiRequest.call(this, 'GET', '/models')) as {
data: Array<{ id: string }>;
};
let models = response.data;
if (filter) {
models = models.filter((model) => model.id.toLowerCase().includes(filter.toLowerCase()));
}
return {
results: models.map((model) => ({
name: model.id,
value: model.id,
})),
};
}

View file

@ -0,0 +1,4 @@
<svg width="40" height="40" viewBox="-5 -2.5 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M21.7202 0.939941C22.9502 0.939941 23.9502 1.93994 23.9502 3.16994C23.9502 4.39994 22.9502 5.39994 21.7202 5.39994H19.7502C19.6002 5.39994 19.4902 5.27994 19.4902 5.13994V3.16994C19.4902 1.93994 20.4902 0.939941 21.7202 0.939941Z" fill="#1783FF"/>
<path d="M9.39 13.9501L17.82 5.59012C17.98 5.43012 17.89 5.12012 17.68 5.12012H13.14C13.14 5.12012 13.04 5.14012 13 5.18012L3.92 14.1901C3.78 14.3301 3.57 14.2101 3.57 13.9801V5.39012C3.57 5.24012 3.47 5.12012 3.35 5.12012H0.219999C0.0999993 5.12012 0 5.24012 0 5.39012V23.9201C0 24.0701 0.0999993 24.1901 0.219999 24.1901H3.35C3.47 24.1901 3.57 24.0701 3.57 23.9201V20.1401C3.57 20.0601 3.6 19.9801 3.65 19.9301L6.47 17.1401C6.54 17.0701 6.63 17.0601 6.71 17.1101L14.24 22.6501C15.47 23.4801 16.85 23.9901 18.25 24.1401C18.37 24.1501 18.48 24.0301 18.48 23.8701V20.3101C18.48 20.1701 18.4 20.0601 18.29 20.0501C17.47 19.9201 16.66 19.6001 15.94 19.1101L9.42 14.3901C9.28 14.3001 9.27 14.0701 9.39 13.9501Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

@ -0,0 +1,4 @@
<svg width="40" height="40" viewBox="-5 -2.5 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M21.7202 0.939941C22.9502 0.939941 23.9502 1.93994 23.9502 3.16994C23.9502 4.39994 22.9502 5.39994 21.7202 5.39994H19.7502C19.6002 5.39994 19.4902 5.27994 19.4902 5.13994V3.16994C19.4902 1.93994 20.4902 0.939941 21.7202 0.939941Z" fill="#1783FF"/>
<path d="M9.39 13.9501L17.82 5.59012C17.98 5.43012 17.89 5.12012 17.68 5.12012H13.14C13.14 5.12012 13.04 5.14012 13 5.18012L3.92 14.1901C3.78 14.3301 3.57 14.2101 3.57 13.9801V5.39012C3.57 5.24012 3.47 5.12012 3.35 5.12012H0.219999C0.0999993 5.12012 0 5.24012 0 5.39012V23.9201C0 24.0701 0.0999993 24.1901 0.219999 24.1901H3.35C3.47 24.1901 3.57 24.0701 3.57 23.9201V20.1401C3.57 20.0601 3.6 19.9801 3.65 19.9301L6.47 17.1401C6.54 17.0701 6.63 17.0601 6.71 17.1101L14.24 22.6501C15.47 23.4801 16.85 23.9901 18.25 24.1401C18.37 24.1501 18.48 24.0301 18.48 23.8701V20.3101C18.48 20.1701 18.4 20.0601 18.29 20.0501C17.47 19.9201 16.66 19.6001 15.94 19.1101L9.42 14.3901C9.28 14.3001 9.27 14.0701 9.39 13.9501Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

@ -0,0 +1,78 @@
import type { IExecuteFunctions } from 'n8n-workflow';
import { mockDeep } from 'jest-mock-extended';
import { apiRequest } from '.';
describe('Moonshot transport', () => {
const executeFunctionsMock = mockDeep<IExecuteFunctions>();
beforeEach(() => {
jest.clearAllMocks();
});
it('should call httpRequestWithAuthentication with correct parameters', async () => {
executeFunctionsMock.getCredentials.mockResolvedValue({
url: 'https://api.moonshot.ai/v1',
});
await apiRequest.call(executeFunctionsMock, 'POST', '/chat/completions', {
body: {
model: 'kimi-k2.5',
messages: [{ role: 'user', content: 'Hello' }],
},
});
expect(executeFunctionsMock.helpers.httpRequestWithAuthentication).toHaveBeenCalledWith(
'moonshotApi',
{
method: 'POST',
url: 'https://api.moonshot.ai/v1/chat/completions',
json: true,
body: {
model: 'kimi-k2.5',
messages: [{ role: 'user', content: 'Hello' }],
},
headers: {},
},
);
});
it('should use the default url if no custom url is provided', async () => {
executeFunctionsMock.getCredentials.mockResolvedValue({});
await apiRequest.call(executeFunctionsMock, 'GET', '/models');
expect(executeFunctionsMock.helpers.httpRequestWithAuthentication).toHaveBeenCalledWith(
'moonshotApi',
{
method: 'GET',
url: 'https://api.moonshot.ai/v1/models',
json: true,
headers: {},
},
);
});
it('should override the values with option', async () => {
executeFunctionsMock.getCredentials.mockResolvedValue({
url: 'https://api.moonshot.ai/v1',
});
await apiRequest.call(executeFunctionsMock, 'GET', '', {
option: {
url: 'https://override-url.com',
returnFullResponse: true,
},
});
expect(executeFunctionsMock.helpers.httpRequestWithAuthentication).toHaveBeenCalledWith(
'moonshotApi',
{
method: 'GET',
url: 'https://override-url.com',
json: true,
returnFullResponse: true,
headers: {},
},
);
});
});

View file

@ -0,0 +1,41 @@
import type {
IDataObject,
IExecuteFunctions,
IHttpRequestMethods,
ILoadOptionsFunctions,
} from 'n8n-workflow';
type RequestParameters = {
headers?: IDataObject;
body?: IDataObject;
qs?: IDataObject;
option?: IDataObject;
};
export async function apiRequest(
this: IExecuteFunctions | ILoadOptionsFunctions,
method: IHttpRequestMethods,
endpoint: string,
parameters?: RequestParameters,
) {
const { body, qs, option, headers } = parameters ?? {};
const credentials = await this.getCredentials('moonshotApi');
const baseUrl = (credentials.url as string) ?? 'https://api.moonshot.ai/v1';
const url = `${baseUrl}${endpoint}`;
const options = {
headers: headers ?? {},
method,
body,
qs,
url,
json: true,
};
if (option && Object.keys(option).length !== 0) {
Object.assign(options, option);
}
return await this.helpers.httpRequestWithAuthentication.call(this, 'moonshotApi', options);
}

View file

@ -83,6 +83,7 @@
"dist/nodes/vendors/AlibabaCloud/AlibabaCloud.node.js",
"dist/nodes/vendors/Anthropic/Anthropic.node.js",
"dist/nodes/vendors/GoogleGemini/GoogleGemini.node.js",
"dist/nodes/vendors/Moonshot/Moonshot.node.js",
"dist/nodes/vendors/Ollama/Ollama.node.js",
"dist/nodes/vendors/OpenAi/OpenAi.node.js",
"dist/nodes/vendors/Microsoft/MicrosoftAgent365Trigger.node.js",

View file

@ -118,6 +118,7 @@ export const ANTHROPIC_LANGCHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.anthropic
export const OLLAMA_LANGCHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.ollama';
export const GOOGLE_GEMINI_LANGCHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.googleGemini';
export const ALIBABA_CLOUD_LANGCHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.alibabaCloud';
export const MOONSHOT_LANGCHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.moonshot';
export const AI_VENDOR_NODE_TYPES = [
OPENAI_LANGCHAIN_NODE_TYPE,
@ -125,6 +126,7 @@ export const AI_VENDOR_NODE_TYPES = [
OLLAMA_LANGCHAIN_NODE_TYPE,
GOOGLE_GEMINI_LANGCHAIN_NODE_TYPE,
ALIBABA_CLOUD_LANGCHAIN_NODE_TYPE,
MOONSHOT_LANGCHAIN_NODE_TYPE,
];
export const LANGCHAIN_LM_NODE_TYPE_PREFIX = '@n8n/n8n-nodes-langchain.lm';