ToolJet/marketplace/plugins/openai/lib/query_operations.ts
Arpit 636c4a62c5
[chore] Eslint fixes (#5988)
* lint fixes: frontend

* lint fixes: marketplace

* lint fixes: plugins-client js files

* typo fix

* eslint updates/fixes for marketplace

* eslint updates/fixes for plugins

* removing "@typescript-eslint/no-floating-promises for plugins"

* ignore client.js and server.js

* ignore client.js and server.js

* ignore client.js and server.tss

* Delete client.js

* ignore cypress-test/**
2023-04-11 15:34:58 +05:30

58 lines
1.7 KiB
TypeScript

import { OpenAIApi } from 'openai';
import { QueryOptions } from './types';
export async function getCompletion(
openai: OpenAIApi,
options: QueryOptions
): Promise<string | { error: string; statusCode: number }> {
const { prompt, max_tokens, temperature, stop_sequence, suffix } = options;
try {
const { data } = await openai.createCompletion({
model: 'text-davinci-003',
prompt: prompt,
temperature: typeof temperature === 'string' ? parseFloat(temperature) : temperature || 0,
max_tokens: typeof max_tokens === 'string' ? parseInt(max_tokens) : max_tokens || 67,
stop: stop_sequence || null,
suffix: suffix || null,
});
return data.choices[0]['text'];
} catch (error) {
console.log('error openapi ===============', error);
return {
error: error?.message,
statusCode: error?.response?.status,
};
}
}
export async function getChatCompletion(
openai: OpenAIApi,
options: QueryOptions
): Promise<string | { error: string; statusCode: number }> {
const { prompt, max_tokens, temperature, stop_sequence } = options;
try {
const { data } = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
temperature: typeof temperature === 'string' ? parseFloat(temperature) : temperature || 0,
max_tokens: typeof max_tokens === 'string' ? parseInt(max_tokens) : max_tokens || 67,
stop: stop_sequence || null,
messages: [
{
role: 'assistant',
content: prompt,
},
],
});
return data.choices[0]['message']['content'];
} catch (error) {
return {
error: error?.message,
statusCode: error?.response?.status,
};
}
}