Skip to content

Commit

Permalink
feat(js/plugins): added experimental_debugTraces option to googleai…
Browse files Browse the repository at this point in the history
… and vertexai plugins (#2023)
  • Loading branch information
pavelgj authored Feb 19, 2025
1 parent 1343e6f commit 4a10ee8
Show file tree
Hide file tree
Showing 7 changed files with 261 additions and 160 deletions.
150 changes: 95 additions & 55 deletions js/plugins/googleai/src/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ import {
downloadRequestMedia,
simulateSystemPrompt,
} from 'genkit/model/middleware';
import { runInNewSpan } from 'genkit/tracing';
import { getApiKeyFromEnvVar } from './common';
import { handleCacheIfNeeded } from './context-caching';
import { extractCacheConfig } from './context-caching/utils';
Expand Down Expand Up @@ -633,15 +634,25 @@ export function cleanSchema(schema: JSONSchema): JSONSchema {
/**
* Defines a new GoogleAI model.
*/
export function defineGoogleAIModel(
ai: Genkit,
name: string,
apiKey?: string,
apiVersion?: string,
baseUrl?: string,
info?: ModelInfo,
defaultConfig?: GeminiConfig
): ModelAction {
export function defineGoogleAIModel({
ai,
name,
apiKey,
apiVersion,
baseUrl,
info,
defaultConfig,
debugTraces,
}: {
ai: Genkit;
name: string;
apiKey?: string;
apiVersion?: string;
baseUrl?: string;
info?: ModelInfo;
defaultConfig?: GeminiConfig;
debugTraces?: boolean;
}): ModelAction {
if (!apiKey) {
apiKey = getApiKeyFromEnvVar();
}
Expand Down Expand Up @@ -832,54 +843,83 @@ export function defineGoogleAIModel(
);
}

if (sendChunk) {
const result = await genModel
.startChat(updatedChatRequest)
.sendMessageStream(msg.parts, options);
for await (const item of result.stream) {
(item as GenerateContentResponse).candidates?.forEach((candidate) => {
const c = fromJSONModeScopedGeminiCandidate(candidate);
sendChunk({
index: c.index,
content: c.message.content,
const callGemini = async () => {
if (sendChunk) {
const result = await genModel
.startChat(updatedChatRequest)
.sendMessageStream(msg.parts, options);
for await (const item of result.stream) {
(item as GenerateContentResponse).candidates?.forEach(
(candidate) => {
const c = fromJSONModeScopedGeminiCandidate(candidate);
sendChunk({
index: c.index,
content: c.message.content,
});
}
);
}
const response = await result.response;
const candidates = response.candidates || [];
if (response.candidates?.['undefined']) {
candidates.push(response.candidates['undefined']);
}
if (!candidates.length) {
throw new GenkitError({
status: 'FAILED_PRECONDITION',
message: 'No valid candidates returned.',
});
});
}
return {
candidates: candidates.map(fromJSONModeScopedGeminiCandidate) || [],
custom: response,
};
} else {
const result = await genModel
.startChat(updatedChatRequest)
.sendMessage(msg.parts, options);
if (!result.response.candidates?.length)
throw new Error('No valid candidates returned.');
const responseCandidates =
result.response.candidates.map(fromJSONModeScopedGeminiCandidate) ||
[];
return {
candidates: responseCandidates,
custom: result.response,
usage: {
...getBasicUsageStats(request.messages, responseCandidates),
inputTokens: result.response.usageMetadata?.promptTokenCount,
outputTokens: result.response.usageMetadata?.candidatesTokenCount,
totalTokens: result.response.usageMetadata?.totalTokenCount,
},
};
}
const response = await result.response;
const candidates = response.candidates || [];
if (response.candidates?.['undefined']) {
candidates.push(response.candidates['undefined']);
}
if (!candidates.length) {
throw new GenkitError({
status: 'FAILED_PRECONDITION',
message: 'No valid candidates returned.',
});
}
return {
candidates: candidates.map(fromJSONModeScopedGeminiCandidate) || [],
custom: response,
};
} else {
const result = await genModel
.startChat(updatedChatRequest)
.sendMessage(msg.parts, options);
if (!result.response.candidates?.length)
throw new Error('No valid candidates returned.');
const responseCandidates =
result.response.candidates.map(fromJSONModeScopedGeminiCandidate) ||
[];
return {
candidates: responseCandidates,
custom: result.response,
usage: {
...getBasicUsageStats(request.messages, responseCandidates),
inputTokens: result.response.usageMetadata?.promptTokenCount,
outputTokens: result.response.usageMetadata?.candidatesTokenCount,
totalTokens: result.response.usageMetadata?.totalTokenCount,
},
};
}
};
// If debugTraces is enable, we wrap the actual model call with a span, add raw
// API params as for input.
return debugTraces
? await runInNewSpan(
ai.registry,
{
metadata: {
name: sendChunk ? 'sendMessageStream' : 'sendMessage',
},
},
async (metadata) => {
metadata.input = {
sdk: '@google/generative-ai',
cache: cache,
model: genModel.model,
chatOptions: updatedChatRequest,
parts: msg.parts,
options,
};
const response = await callGemini();
metadata.output = response.custom;
return response;
}
)
: await callGemini();
}
);
}
Expand Down
50 changes: 27 additions & 23 deletions js/plugins/googleai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export interface PluginOptions {
| ModelReference</** @ignore */ typeof GeminiConfigSchema>
| string
)[];
experimental_debugTraces?: boolean;
}

/**
Expand All @@ -78,33 +79,36 @@ export function googleAI(options?: PluginOptions): GenkitPlugin {

if (apiVersions.includes('v1beta')) {
Object.keys(SUPPORTED_V15_MODELS).forEach((name) =>
defineGoogleAIModel(
defineGoogleAIModel({
ai,
name,
options?.apiKey,
'v1beta',
options?.baseUrl
)
apiKey: options?.apiKey,
apiVersion: 'v1beta',
baseUrl: options?.baseUrl,
debugTraces: options?.experimental_debugTraces,
})
);
}
if (apiVersions.includes('v1')) {
Object.keys(SUPPORTED_V1_MODELS).forEach((name) =>
defineGoogleAIModel(
defineGoogleAIModel({
ai,
name,
options?.apiKey,
undefined,
options?.baseUrl
)
apiKey: options?.apiKey,
apiVersion: undefined,
baseUrl: options?.baseUrl,
debugTraces: options?.experimental_debugTraces,
})
);
Object.keys(SUPPORTED_V15_MODELS).forEach((name) =>
defineGoogleAIModel(
defineGoogleAIModel({
ai,
name,
options?.apiKey,
undefined,
options?.baseUrl
)
apiKey: options?.apiKey,
apiVersion: undefined,
baseUrl: options?.baseUrl,
debugTraces: options?.experimental_debugTraces,
})
);
Object.keys(EMBEDDER_MODELS).forEach((name) =>
defineGoogleAIEmbedder(ai, name, { apiKey: options?.apiKey })
Expand All @@ -120,17 +124,17 @@ export function googleAI(options?: PluginOptions): GenkitPlugin {
modelOrRef.name.split('/')[1];
const modelRef =
typeof modelOrRef === 'string' ? gemini(modelOrRef) : modelOrRef;
defineGoogleAIModel(
defineGoogleAIModel({
ai,
modelName,
options?.apiKey,
undefined,
options?.baseUrl,
{
name: modelName,
apiKey: options?.apiKey,
baseUrl: options?.baseUrl,
info: {
...modelRef.info,
label: `Google AI - ${modelName}`,
}
);
},
debugTraces: options?.experimental_debugTraces,
});
}
}
});
Expand Down
2 changes: 2 additions & 0 deletions js/plugins/vertexai/src/common/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ export interface CommonPluginOptions {
location: string;
/** Provide custom authentication configuration for connecting to Vertex AI. */
googleAuth?: GoogleAuthOptions;
/** Enables additional debug traces (e.g. raw model API call details). */
experimental_debugTraces?: boolean;
}

/** Combined plugin options, extending common options with subplugin-specific options */
Expand Down
Loading

0 comments on commit 4a10ee8

Please sign in to comment.