fix(Mistral Cloud Chat Mode Node): Support JSON response from latest Magistral models (#20853)

This commit is contained in:
Mutasem Aldmour 2025-10-16 14:52:16 +02:00 committed by GitHub
parent 91c09bb9dd
commit b8f43d32cd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 1977 additions and 357 deletions

View file

@ -89,6 +89,7 @@
],
"overrides": {
"@azure/identity": "^4.3.0",
"@mistralai/mistralai": "^1.10.0",
"@n8n/typeorm>@sentry/node": "catalog:",
"@types/node": "^20.17.50",
"axios": "1.12.0",

View file

@ -77,6 +77,11 @@ export function getOutputParserForLLM(
return new NaiveJsonOutputParser();
}
// For example Mistral's Magistral models (LmChatMistralCloud node)
if (llm.metadata?.output_format === 'json') {
return new NaiveJsonOutputParser();
}
return new StringOutputParser();
}

View file

@ -76,6 +76,26 @@ describe('chainExecutor', () => {
const parser = chainExecutor.getOutputParserForLLM(model as unknown as BaseChatModel);
expect(parser).toBeInstanceOf(NaiveJsonOutputParser);
});
it('should return NaiveJsonOutputParser for models with metadata output_format set to json', () => {
const model = mock<BaseChatModel>({
metadata: {
output_format: 'json',
},
});
const parser = chainExecutor.getOutputParserForLLM(model);
expect(parser).toBeInstanceOf(NaiveJsonOutputParser);
});
it('should return StringOutputParser for models with metadata output_format not set to json', () => {
const model = mock<BaseChatModel>({
metadata: {
output_format: 'text',
},
});
const parser = chainExecutor.getOutputParserForLLM(model);
expect(parser).toBeInstanceOf(StringOutputParser);
});
});
describe('NaiveJsonOutputParser', () => {

View file

@ -13,6 +13,8 @@ import { getConnectionHintNoticeField } from '@utils/sharedFields';
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
import { N8nLlmTracing } from '../N8nLlmTracing';
const deprecatedMagistralModelsWithTextOutput = ['magistral-small-2506', 'magistral-medium-2506'];
export class LmChatMistralCloud implements INodeType {
description: INodeTypeDescription = {
displayName: 'Mistral Cloud Chat Model',
@ -192,6 +194,9 @@ export class LmChatMistralCloud implements INodeType {
...options,
callbacks: [new N8nLlmTracing(this)],
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
metadata: {
output_format: isModelWithJSONOutput(modelName) ? 'json' : undefined,
},
});
return {
@ -199,3 +204,20 @@ export class LmChatMistralCloud implements INodeType {
};
}
}
function isModelWithJSONOutput(modelName: string): boolean {
if (!modelName.includes('magistral')) {
return false;
}
if (deprecatedMagistralModelsWithTextOutput.includes(modelName)) {
// Deprecated Magistral models return text output
// Includes <think></think> chunks as part of text content
return false;
}
// All future Magistral models will return JSON output
// Which include "thinking" json types
// https://docs.mistral.ai/capabilities/reasoning/
return true;
}

View file

@ -183,7 +183,7 @@
"@langchain/google-genai": "0.2.17",
"@langchain/google-vertexai": "0.2.18",
"@langchain/groq": "0.2.3",
"@langchain/mistralai": "0.2.1",
"@langchain/mistralai": "0.2.3",
"@langchain/mongodb": "^0.1.0",
"@langchain/ollama": "0.2.3",
"@langchain/openai": "catalog:",

File diff suppressed because it is too large Load diff