-
-
Notifications
You must be signed in to change notification settings - Fork 1
/
llm-api.js
49 lines (41 loc) · 1.4 KB
/
llm-api.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
// ----------------------
// -- import variables --
// ----------------------
import { llmDistilleryVars } from './vars.js';
const LLM_TEMPERATURE = parseFloat(llmDistilleryVars.LLM_TEMPERATURE);
// ---------------------------
// -- Import the OpenAI API --
// ---------------------------
import OpenAI from 'openai';
// ---------------------------------------------------
// -- Function to fetch the completion from the LLM --
// ---------------------------------------------------
export async function fetchChatCompletion(prompt, baseUrl, apiKey, llmModel, stopTokens, llmMaxGenLength) {
const openai = new OpenAI({
baseURL: baseUrl,
apiKey: apiKey,
});
if (typeof prompt === 'string') {
prompt = JSON.parse(prompt);
}
let stop = stopTokens;
if (typeof stop === 'string') {
stop = JSON.parse(stop);
}
const chatCompletion = await openai.chat.completions.create({
messages: prompt,
model: llmModel,
max_tokens: llmMaxGenLength,
stop: stop,
temperature: LLM_TEMPERATURE,
top_p: 0.7,
stream: false,
})
let chatCompletionResponse;
if (typeof chatCompletion === 'string') {
chatCompletionResponse = JSON.parse(chatCompletion).choices[0]?.message?.content;
} else {
chatCompletionResponse = chatCompletion.choices[0]?.message?.content;
}
return chatCompletionResponse;
}