Add Sentry AI agent monitoring with telemetry configuration (#15301)

This PR implements Sentry's AI agent monitoring by:

- Configuring vercelAIIntegration with recordInputs and recordOutputs
options
- Adding sendDefaultPii to Sentry.init() for better debugging
- Creating a shared AI_TELEMETRY_CONFIG constant to DRY up the telemetry
configuration
- Adding experimental_telemetry to all AI SDK calls (generateText,
generateObject, streamText)

All AI operations are now fully monitored in Sentry with complete
input/output recording for debugging and performance analysis.

Note: Currently on Sentry v9.26.0, which is compatible with this
implementation. No breaking changes from the v9-to-v10 migration guide
were found in the codebase.
This commit is contained in:
Félix Malfait 2025-10-23 18:06:28 +02:00 committed by GitHub
parent 7edfe4bc7a
commit bbe16c23bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 552 additions and 403 deletions

View file

@ -69,9 +69,9 @@
"@ptc-org/nestjs-query-typeorm": "4.2.1-alpha.2",
"@react-email/render": "^1.2.3",
"@revertdotdev/revert-react": "^0.0.21",
"@sentry/nestjs": "^8.55.0",
"@sentry/node": "9.26.0",
"@sentry/profiling-node": "9.26.0",
"@sentry/nestjs": "^10.0.0",
"@sentry/node": "^10.0.0",
"@sentry/profiling-node": "^10.0.0",
"@sniptt/guards": "0.2.0",
"addressparser": "1.0.1",
"ai": "^5.0.44",

View file

@ -0,0 +1,5 @@
export const AI_TELEMETRY_CONFIG = {
isEnabled: true,
recordInputs: true,
recordOutputs: true,
};

View file

@ -2,6 +2,7 @@ import { Injectable } from '@nestjs/common';
import { LanguageModel, type ModelMessage, streamText } from 'ai';
import { AI_TELEMETRY_CONFIG } from 'src/engine/core-modules/ai/constants/ai-telemetry.const';
import { AiModelRegistryService } from 'src/engine/core-modules/ai/services/ai-model-registry.service';
@Injectable()
@ -40,6 +41,7 @@ export class AiService {
messages,
temperature: options?.temperature,
maxOutputTokens: options?.maxOutputTokens,
experimental_telemetry: AI_TELEMETRY_CONFIG,
});
}
}

View file

@ -15,6 +15,7 @@ import { getAppPath } from 'twenty-shared/utils';
import { In } from 'typeorm';
import { getAllSelectableFields } from 'src/engine/api/utils/get-all-selectable-fields.utils';
import { AI_TELEMETRY_CONFIG } from 'src/engine/core-modules/ai/constants/ai-telemetry.const';
import { AIBillingService } from 'src/engine/core-modules/ai/services/ai-billing.service';
import { AiModelRegistryService } from 'src/engine/core-modules/ai/services/ai-model-registry.service';
import { WorkspaceDomainsService } from 'src/engine/core-modules/domain/workspace-domains/services/workspace-domains.service';
@ -131,6 +132,7 @@ export class AgentExecutionService implements AgentExecutionContext {
messages: convertToModelMessages(messages),
stopWhen: stepCountIs(AGENT_CONFIG.MAX_STEPS),
providerOptions,
experimental_telemetry: AI_TELEMETRY_CONFIG,
experimental_repairToolCall: async ({
toolCall,
tools: toolsForRepair,

View file

@ -2,6 +2,7 @@ import { Injectable, Logger } from '@nestjs/common';
import { generateText } from 'ai';
import { AI_TELEMETRY_CONFIG } from 'src/engine/core-modules/ai/constants/ai-telemetry.const';
import { AiModelRegistryService } from 'src/engine/core-modules/ai/services/ai-model-registry.service';
@Injectable()
@ -25,6 +26,7 @@ export class AgentTitleGenerationService {
const result = await generateText({
model: defaultModel.model,
prompt: `Generate a concise, descriptive title (maximum 60 characters) for a chat thread based on the following message. The title should capture the main topic or purpose of the conversation. Return only the title, nothing else. Message: "${messageContent}"`,
experimental_telemetry: AI_TELEMETRY_CONFIG,
});
return this.cleanTitle(result.text);

View file

@ -1,6 +1,8 @@
import { generateObject, type LanguageModel, NoSuchToolError } from 'ai';
import { type z } from 'zod';
import { AI_TELEMETRY_CONFIG } from 'src/engine/core-modules/ai/constants/ai-telemetry.const';
type ToolCall = {
type: 'tool-call';
toolCallId: string;
@ -57,6 +59,7 @@ export const repairToolCall = async ({
`- Object structures must match the schema shape`,
`- Array items must follow the specified format`,
].join('\n'),
experimental_telemetry: AI_TELEMETRY_CONFIG,
});
return {

View file

@ -10,6 +10,7 @@ import {
import { Repository } from 'typeorm';
import { z } from 'zod';
import { AI_TELEMETRY_CONFIG } from 'src/engine/core-modules/ai/constants/ai-telemetry.const';
import { ModelId } from 'src/engine/core-modules/ai/constants/ai-models.const';
import { AiModelRegistryService } from 'src/engine/core-modules/ai/services/ai-model-registry.service';
import { AgentEntity } from 'src/engine/metadata-modules/agent/agent.entity';
@ -82,6 +83,7 @@ export class AiRouterService {
prompt: userPrompt,
schema: routerDecisionSchema,
temperature: 0.1,
experimental_telemetry: AI_TELEMETRY_CONFIG,
});
return availableAgents.find(

View file

@ -40,11 +40,15 @@ if (process.env.EXCEPTION_HANDLER_DRIVER === ExceptionHandlerDriver.SENTRY) {
Sentry.expressIntegration(),
Sentry.graphqlIntegration(),
Sentry.postgresIntegration(),
Sentry.vercelAIIntegration(),
Sentry.vercelAIIntegration({
recordInputs: true,
recordOutputs: true,
}),
nodeProfilingIntegration(),
],
tracesSampleRate: 0.1,
profilesSampleRate: 0.3,
sendDefaultPii: true,
debug: process.env.NODE_ENV === NodeEnvironment.DEVELOPMENT,
});
}

View file

@ -4,6 +4,7 @@ import { InjectRepository } from '@nestjs/typeorm';
import { generateObject, generateText, stepCountIs, ToolSet } from 'ai';
import { Repository } from 'typeorm';
import { AI_TELEMETRY_CONFIG } from 'src/engine/core-modules/ai/constants/ai-telemetry.const';
import { AiModelRegistryService } from 'src/engine/core-modules/ai/services/ai-model-registry.service';
import { ToolAdapterService } from 'src/engine/core-modules/ai/services/tool-adapter.service';
import { ToolService } from 'src/engine/core-modules/ai/services/tool.service';
@ -117,6 +118,7 @@ export class AiAgentExecutorService {
model: registeredModel.model,
prompt: userPrompt,
stopWhen: stepCountIs(AGENT_CONFIG.MAX_STEPS),
experimental_telemetry: AI_TELEMETRY_CONFIG,
});
if (Object.keys(schema).length === 0) {
@ -134,6 +136,7 @@ export class AiAgentExecutorService {
Please generate the structured output based on the execution results and context above.`,
schema: convertOutputSchemaToZod(schema),
experimental_telemetry: AI_TELEMETRY_CONFIG,
});
return {

924
yarn.lock

File diff suppressed because it is too large Load diff