mirror of
https://github.com/twentyhq/twenty
synced 2026-04-21 21:47:38 +00:00
Added: - An "Ask AI" command to the command menu. - A simple GraphQL resolver that converts the user's question into a relevant SQL query using an LLM, runs the query, and returns the result. <img width="428" alt="Screenshot 2024-06-09 at 20 53 09" src="https://github.com/twentyhq/twenty/assets/171685816/57127f37-d4a6-498d-b253-733ffa0d209f"> No security concerns have been addressed, this is only a proof-of-concept and not intended to be enabled in production. All changes are behind a feature flag called `IS_ASK_AI_ENABLED`. --------- Co-authored-by: Félix Malfait <felix.malfait@gmail.com>
14 lines
381 B
TypeScript
14 lines
381 B
TypeScript
import { ModuleMetadata, FactoryProvider } from '@nestjs/common';
|
|
|
|
export enum LLMChatModelDriver {
|
|
OpenAI = 'openai',
|
|
}
|
|
|
|
export interface LLMChatModelModuleOptions {
|
|
type: LLMChatModelDriver;
|
|
}
|
|
|
|
export type LLMChatModelModuleAsyncOptions = {
|
|
useFactory: (...args: any[]) => LLMChatModelModuleOptions;
|
|
} & Pick<ModuleMetadata, 'imports'> &
|
|
Pick<FactoryProvider, 'inject'>;
|