From d4342f6fdb1394eae75940d31a8eb468347fa042 Mon Sep 17 00:00:00 2001 From: Peter Kirkham Date: Tue, 21 Jan 2025 18:39:15 -0800 Subject: [PATCH] feat: map vercel to same format (#358) --- posthog-ai/README.md | 23 ++++---- posthog-ai/package.json | 2 +- posthog-ai/src/openai/index.ts | 22 ++++---- posthog-ai/src/utils.ts | 38 +++++-------- posthog-ai/src/vercel/middleware.ts | 88 ++++++++++++++++++++++------- 5 files changed, 104 insertions(+), 69 deletions(-) diff --git a/posthog-ai/README.md b/posthog-ai/README.md index 8583674d..06c34a88 100644 --- a/posthog-ai/README.md +++ b/posthog-ai/README.md @@ -14,25 +14,22 @@ npm install @posthog/ai import { OpenAI } from '@posthog/ai' import { PostHog } from 'posthog-node' -const phClient = new PostHog( - '', - { host: 'https://us.i.posthog.com' } -); +const phClient = new PostHog('', { host: 'https://us.i.posthog.com' }) const client = new OpenAI({ apiKey: '', posthog: phClient, -}); +}) const completion = await client.chat.completions.create({ - model: "gpt-3.5-turbo", - messages: [{ role: "user", content: "Tell me a fun fact about hedgehogs" }], - posthogDistinctId: "user_123", // optional - posthogTraceId: "trace_123", // optional - posthogProperties: { conversation_id: "abc123", paid: true }, //optional - posthogGroups: { company: "company_id_in_your_db" }, // optional - posthogPrivacyMode: false // optional -}); + model: 'gpt-3.5-turbo', + messages: [{ role: 'user', content: 'Tell me a fun fact about hedgehogs' }], + posthogDistinctId: 'user_123', // optional + posthogTraceId: 'trace_123', // optional + posthogProperties: { conversation_id: 'abc123', paid: true }, //optional + posthogGroups: { company: 'company_id_in_your_db' }, // optional + posthogPrivacyMode: false, // optional +}) console.log(completion.choices[0].message.content) diff --git a/posthog-ai/package.json b/posthog-ai/package.json index b6bdf167..2f65d650 100644 --- a/posthog-ai/package.json +++ b/posthog-ai/package.json @@ -1,6 +1,6 @@ { "name": "@posthog/ai", - "version": "2.0.1", + "version": "2.1.0", "description": "PostHog Node.js AI integrations", "repository": { "type": "git", diff --git a/posthog-ai/src/openai/index.ts b/posthog-ai/src/openai/index.ts index f22154c6..b6be01d2 100644 --- a/posthog-ai/src/openai/index.ts +++ b/posthog-ai/src/openai/index.ts @@ -89,9 +89,9 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions { return parentPromise.then((value) => { const passThroughStream = new PassThrough({ objectMode: true }) let accumulatedContent = '' - let usage: { input_tokens: number; output_tokens: number } = { - input_tokens: 0, - output_tokens: 0, + let usage: { inputTokens: number; outputTokens: number } = { + inputTokens: 0, + outputTokens: 0, } if ('tee' in value) { const openAIStream = value @@ -102,8 +102,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions { accumulatedContent += delta if (chunk.usage) { usage = { - input_tokens: chunk.usage.prompt_tokens ?? 0, - output_tokens: chunk.usage.completion_tokens ?? 0, + inputTokens: chunk.usage.prompt_tokens ?? 0, + outputTokens: chunk.usage.completion_tokens ?? 0, } } passThroughStream.write(chunk) @@ -139,8 +139,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions { params: body, httpStatus: 500, usage: { - input_tokens: 0, - output_tokens: 0, + inputTokens: 0, + outputTokens: 0, }, }) passThroughStream.emit('error', error) @@ -167,8 +167,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions { params: body, httpStatus: 200, usage: { - input_tokens: result.usage?.prompt_tokens ?? 0, - output_tokens: result.usage?.completion_tokens ?? 0, + inputTokens: result.usage?.prompt_tokens ?? 0, + outputTokens: result.usage?.completion_tokens ?? 0, }, }) } @@ -188,8 +188,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions { params: body, httpStatus: 500, usage: { - input_tokens: 0, - output_tokens: 0, + inputTokens: 0, + outputTokens: 0, }, }) throw error diff --git a/posthog-ai/src/utils.ts b/posthog-ai/src/utils.ts index 9f8e834d..d10e7909 100644 --- a/posthog-ai/src/utils.ts +++ b/posthog-ai/src/utils.ts @@ -34,26 +34,6 @@ export const getModelParams = (params: ChatCompletionCreateParamsBase & Monitori return modelParams } -export const getUsage = (response: any, provider: string): { input_tokens: number; output_tokens: number } => { - if (!response?.usage) { - return { input_tokens: 0, output_tokens: 0 } - } - - if (provider === 'anthropic') { - return { - input_tokens: response.usage.input_tokens ?? 0, - output_tokens: response.usage.output_tokens ?? 0, - } - } else if (provider === 'openai') { - return { - input_tokens: response.usage.prompt_tokens ?? 0, - output_tokens: response.usage.completion_tokens ?? 0, - } - } - - return { input_tokens: 0, output_tokens: 0 } -} - /** * Helper to format responses (non-streaming) for consumption, mirroring Python's openai vs. anthropic approach. */ @@ -123,7 +103,7 @@ export type SendEventToPosthogParams = { latency: number baseURL: string httpStatus: number - usage?: { input_tokens?: number; output_tokens?: number } + usage?: { inputTokens?: number; outputTokens?: number } params: ChatCompletionCreateParamsBase & MonitoringParams } @@ -141,6 +121,18 @@ export const sendEventToPosthog = ({ httpStatus = 200, usage = {}, }: SendEventToPosthogParams): void => { + console.log('sendEventToPosthog', { + client, + distinctId, + traceId, + model, + provider, + input, + output, + latency, + baseURL, + params, + }) if (client.capture) { client.capture({ distinctId: distinctId ?? traceId, @@ -152,8 +144,8 @@ export const sendEventToPosthog = ({ $ai_input: withPrivacyMode(client, params.posthogPrivacyMode ?? false, input), $ai_output_choices: withPrivacyMode(client, params.posthogPrivacyMode ?? false, output), $ai_http_status: httpStatus, - $ai_input_tokens: usage.input_tokens ?? 0, - $ai_output_tokens: usage.output_tokens ?? 0, + $ai_input_tokens: usage.inputTokens ?? 0, + $ai_output_tokens: usage.outputTokens ?? 0, $ai_latency: latency, $ai_trace_id: traceId, $ai_base_url: baseURL, diff --git a/posthog-ai/src/vercel/middleware.ts b/posthog-ai/src/vercel/middleware.ts index 77e10998..c56e885c 100644 --- a/posthog-ai/src/vercel/middleware.ts +++ b/posthog-ai/src/vercel/middleware.ts @@ -2,10 +2,11 @@ import { experimental_wrapLanguageModel as wrapLanguageModel } from 'ai' import type { LanguageModelV1, Experimental_LanguageModelV1Middleware as LanguageModelV1Middleware, + LanguageModelV1Prompt, LanguageModelV1StreamPart, } from 'ai' import { v4 as uuidv4 } from 'uuid' -import type { PostHog } from 'posthog-node' +import { PostHog } from 'posthog-node' import { sendEventToPosthog } from '../utils' interface CreateInstrumentationMiddlewareOptions { @@ -13,7 +14,46 @@ interface CreateInstrumentationMiddlewareOptions { posthogTraceId: string posthogProperties?: Record posthogPrivacyMode?: boolean - posthogGroups?: string[] + posthogGroups?: Record +} + +interface PostHogInput { + content: string + role: string +} + +const mapVercelParams = (params: any): Record => { + return { + temperature: params.temperature, + max_tokens: params.maxTokens, + top_p: params.topP, + frequency_penalty: params.frequencyPenalty, + presence_penalty: params.presencePenalty, + stop: params.stopSequences, + stream: params.stream, + } +} + +const mapVercelPrompt = (prompt: LanguageModelV1Prompt): PostHogInput[] => { + return prompt.map((p) => { + let content = '' + if (Array.isArray(p.content)) { + content = p.content + .map((c) => { + if (c.type === 'text') { + return c.text + } + return '' + }) + .join('') + } else { + content = p.content + } + return { + role: p.role, + content, + } + }) } export const createInstrumentationMiddleware = ( @@ -24,7 +64,10 @@ export const createInstrumentationMiddleware = ( const middleware: LanguageModelV1Middleware = { wrapGenerate: async ({ doGenerate, params }) => { const startTime = Date.now() - + let mergedParams = { + ...options, + ...mapVercelParams(params), + } try { const result = await doGenerate() const latency = (Date.now() - startTime) / 1000 @@ -35,15 +78,15 @@ export const createInstrumentationMiddleware = ( traceId: options.posthogTraceId, model: model.modelId, provider: 'vercel', - input: options.posthogPrivacyMode ? '' : params.prompt, + input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt), output: [{ content: result.text, role: 'assistant' }], latency, baseURL: '', - params: { posthog_properties: options } as any, + params: mergedParams as any, httpStatus: 200, usage: { - input_tokens: result.usage.promptTokens, - output_tokens: result.usage.completionTokens, + inputTokens: result.usage.promptTokens, + outputTokens: result.usage.completionTokens, }, }) @@ -55,15 +98,15 @@ export const createInstrumentationMiddleware = ( traceId: options.posthogTraceId, model: model.modelId, provider: 'vercel', - input: options.posthogPrivacyMode ? '' : params.prompt, + input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt), output: [], latency: 0, baseURL: '', - params: { posthog_properties: options } as any, + params: mergedParams as any, httpStatus: 500, usage: { - input_tokens: 0, - output_tokens: 0, + inputTokens: 0, + outputTokens: 0, }, }) throw error @@ -73,8 +116,11 @@ export const createInstrumentationMiddleware = ( wrapStream: async ({ doStream, params }) => { const startTime = Date.now() let generatedText = '' - let usage: { input_tokens?: number; output_tokens?: number } = {} - + let usage: { inputTokens?: number; outputTokens?: number } = {} + let mergedParams = { + ...options, + ...mapVercelParams(params), + } try { const { stream, ...rest } = await doStream() @@ -85,8 +131,8 @@ export const createInstrumentationMiddleware = ( } if (chunk.type === 'finish') { usage = { - input_tokens: chunk.usage?.promptTokens, - output_tokens: chunk.usage?.completionTokens, + inputTokens: chunk.usage?.promptTokens, + outputTokens: chunk.usage?.completionTokens, } } controller.enqueue(chunk) @@ -100,11 +146,11 @@ export const createInstrumentationMiddleware = ( traceId: options.posthogTraceId, model: model.modelId, provider: 'vercel', - input: options.posthogPrivacyMode ? '' : params.prompt, + input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt), output: [{ content: generatedText, role: 'assistant' }], latency, baseURL: '', - params: { posthog_properties: options } as any, + params: mergedParams as any, httpStatus: 200, usage, }) @@ -122,15 +168,15 @@ export const createInstrumentationMiddleware = ( traceId: options.posthogTraceId, model: model.modelId, provider: 'vercel', - input: options.posthogPrivacyMode ? '' : params.prompt, + input: options.posthogPrivacyMode ? '' : mapVercelPrompt(params.prompt), output: [], latency: 0, baseURL: '', - params: { posthog_properties: options } as any, + params: mergedParams as any, httpStatus: 500, usage: { - input_tokens: 0, - output_tokens: 0, + inputTokens: 0, + outputTokens: 0, }, }) throw error