diff --git a/README.md b/README.md index 08b3ac5..b90e751 100644 --- a/README.md +++ b/README.md @@ -265,14 +265,11 @@ To enable Helicone observability in RAGChat, you simply need to pass your Helico import { RAGChat, custom } from "ragchat"; const ragChat = new RAGChat({ - model: custom( - "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", - { - apiKey: "YOUR_TOGETHER_AI_SECRET_KEY", - baseUrl: "https://api.together.xyz", - }, - { token: "YOUR_HELICONE_SECRET_KEY" } - ), + model: custom("meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", { + apiKey: "xxx", + baseUrl: "https://api.together.xyz", + analytics: { name: "helicone", token: process.env.HELICONE_API_KEY! }, + }), }); ``` @@ -282,14 +279,10 @@ const ragChat = new RAGChat({ import { RAGChat, openai } from "ragchat"; const ragChat = new RAGChat({ - model: openai( - "gpt-3.5-turbo", - { - temperature: 0, - apiKey: "YOUR_OPEN_AI_KEY", - }, - { token: "YOUR_HELICONE_SECRET_KEY" } - ), + model: openai("gpt-3.5-turbo", { + apiKey: process.env.OPENAI_API_KEY!, + analytics: { name: "helicone", token: process.env.HELICONE_API_KEY! }, + }), }); ``` diff --git a/src/models.ts b/src/models.ts index 594f0a9..d192e90 100644 --- a/src/models.ts +++ b/src/models.ts @@ -42,7 +42,36 @@ export type LLMClientConfig = { baseUrl: string; }; -type ModelOptions = Omit; +type ModelOptions = Omit & { + analytics?: { name: "helicone"; token: string }; +}; + +const analyticsBaseUrlMap = ( + analyticsName: "helicone", + analyticsToken: string, + providerApiKey: string, + providerBaseUrl?: string +) => { + return { + helicone: { + custom: { + baseURL: "https://gateway.helicone.ai", + defaultHeaders: { + "Helicone-Auth": `Bearer ${analyticsToken}`, + "Helicone-Target-Url": providerBaseUrl, + Authorization: `Bearer ${providerApiKey}`, + }, + }, + openai: { + basePath: "https://oai.helicone.ai/v1", + defaultHeaders: { + "Helicone-Auth": `Bearer ${analyticsToken}`, + Authorization: `Bearer ${providerApiKey}`, + }, + }, + }, + }[analyticsName]; +}; export const upstash = (model: UpstashChatModel, options?: Omit) => { const apiKey = options?.apiKey ?? process.env.QSTASH_TOKEN ?? ""; @@ -64,22 +93,20 @@ export const upstash = (model: UpstashChatModel, options?: Omit { +export const custom = (model: string, options?: ModelOptions) => { if (!options?.baseUrl) throw new Error("baseUrl cannot be empty or undefined."); return new ChatOpenAI({ modelName: model, ...options, - ...(helicone + ...(options.analytics ? { - configuration: { - baseURL: "https://gateway.helicone.ai", - defaultHeaders: { - "Helicone-Auth": `Bearer ${helicone.token}`, - "Helicone-Target-Url": options.baseUrl, - Authorization: `Bearer ${options.apiKey}`, - }, - }, + configuration: analyticsBaseUrlMap( + options.analytics.name, + options.analytics.token, + options.apiKey, + options.baseUrl + ).custom, } : { configuration: { @@ -90,25 +117,17 @@ export const custom = (model: string, options?: ModelOptions, helicone?: { token }); }; -export const openai = ( - model: OpenAIChatModel, - options?: Omit, - helicone?: { token: string } -) => { +export const openai = (model: OpenAIChatModel, options?: Omit) => { + const apiKey = process.env.OPENAI_API_KEY ?? options?.apiKey ?? ""; + const { analytics, ...optionsWithout } = options ?? {}; + return new ChatOpenAI({ modelName: model, temperature: 0, - apiKey: process.env.OPENAI_API_KEY ?? options?.apiKey ?? "", - ...(helicone - ? { - configuration: { - basePath: "https://oai.helicone.ai/v1", - defaultHeaders: { - "Helicone-Auth": `Bearer ${helicone.token}`, - }, - }, - } + ...optionsWithout, + apiKey, + ...(analytics + ? { configuration: analyticsBaseUrlMap(analytics.name, analytics.token, apiKey).openai } : {}), - ...options, }); };