Skip to content

Commit

Permalink
chore: remove console log (#359)
Browse files Browse the repository at this point in the history
  • Loading branch information
k11kirky authored Jan 22, 2025
1 parent d4342f6 commit af9a557
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 22 deletions.
2 changes: 1 addition & 1 deletion posthog-ai/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@posthog/ai",
"version": "2.1.0",
"version": "2.1.1",
"description": "PostHog Node.js AI integrations",
"repository": {
"type": "git",
Expand Down
16 changes: 8 additions & 8 deletions posthog-ai/src/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,14 @@ interface MonitoringOpenAIConfig {

export class PostHogOpenAI extends OpenAIOrignal {
private readonly phClient: PostHog
public chat: WrappedChat

constructor(config: MonitoringOpenAIConfig) {
const { posthog, ...openAIConfig } = config
super(openAIConfig)
this.phClient = posthog
this.chat = new WrappedChat(this, this.phClient)
}

public chat: WrappedChat
}

export class WrappedChat extends OpenAIOrignal.Chat {
Expand Down Expand Up @@ -94,8 +93,8 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
outputTokens: 0,
}
if ('tee' in value) {
const openAIStream = value
;(async () => {
const openAIStream = value;
(async () => {
try {
for await (const chunk of openAIStream) {
const delta = chunk?.choices?.[0]?.delta?.content ?? ''
Expand All @@ -115,7 +114,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
traceId,
model: openAIParams.model,
provider: 'openai',
input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
input: mergeSystemPrompt(openAIParams, 'openai'),
output: [{ content: accumulatedContent, role: 'assistant' }],
latency,
baseURL: (this as any).baseURL ?? '',
Expand All @@ -132,7 +131,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
traceId,
model: openAIParams.model,
provider: 'openai',
input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
input: mergeSystemPrompt(openAIParams, 'openai'),
output: [],
latency: 0,
baseURL: (this as any).baseURL ?? '',
Expand Down Expand Up @@ -160,7 +159,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
traceId,
model: openAIParams.model,
provider: 'openai',
input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
input: mergeSystemPrompt(openAIParams, 'openai'),
output: [{ content: result.choices[0].message.content, role: 'assistant' }],
latency,
baseURL: (this as any).baseURL ?? '',
Expand All @@ -181,7 +180,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
traceId,
model: openAIParams.model,
provider: 'openai',
input: posthogPrivacyMode ? '' : mergeSystemPrompt(openAIParams, 'openai'),
input: mergeSystemPrompt(openAIParams, 'openai'),
output: [],
latency: 0,
baseURL: (this as any).baseURL ?? '',
Expand All @@ -199,6 +198,7 @@ export class WrappedCompletions extends OpenAIOrignal.Chat.Completions {
return wrappedPromise
}
}

}

export default PostHogOpenAI
14 changes: 1 addition & 13 deletions posthog-ai/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -121,18 +121,6 @@ export const sendEventToPosthog = ({
httpStatus = 200,
usage = {},
}: SendEventToPosthogParams): void => {
console.log('sendEventToPosthog', {
client,
distinctId,
traceId,
model,
provider,
input,
output,
latency,
baseURL,
params,
})
if (client.capture) {
client.capture({
distinctId: distinctId ?? traceId,
Expand All @@ -155,4 +143,4 @@ export const sendEventToPosthog = ({
groups: params.posthogGroups,
})
}
}
}

0 comments on commit af9a557

Please sign in to comment.