Skip to content

Commit

Permalink
experimental_debugTraces
Browse files Browse the repository at this point in the history
  • Loading branch information
pavelgj committed Feb 19, 2025
1 parent 9bee7ee commit b78a954
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 19 deletions.
9 changes: 8 additions & 1 deletion js/plugins/googleai/src/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -866,9 +866,16 @@ export function defineGoogleAIModel({
return debugTraces
? await runInNewSpan(
ai.registry,
{ metadata: { name } },
{
metadata: {
name: sendChunk ? 'sendMessageStream' : 'sendMessage',
},
},
async (metadata) => {
metadata.input = {
sdk: '@google/generative-ai',
cache: cache,
model: genModel.model,
chatOptions: updatedChatRequest,
parts: msg.parts,
options,
Expand Down
10 changes: 5 additions & 5 deletions js/plugins/googleai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ export interface PluginOptions {
| ModelReference</** @ignore */ typeof GeminiConfigSchema>
| string
)[];
debugTraces?: boolean;
experimental_debugTraces?: boolean;
}

/**
Expand All @@ -85,7 +85,7 @@ export function googleAI(options?: PluginOptions): GenkitPlugin {
apiKey: options?.apiKey,
apiVersion: 'v1beta',
baseUrl: options?.baseUrl,
debugTraces: options?.debugTraces,
debugTraces: options?.experimental_debugTraces,
})
);
}
Expand All @@ -97,7 +97,7 @@ export function googleAI(options?: PluginOptions): GenkitPlugin {
apiKey: options?.apiKey,
apiVersion: undefined,
baseUrl: options?.baseUrl,
debugTraces: options?.debugTraces,
debugTraces: options?.experimental_debugTraces,
})
);
Object.keys(SUPPORTED_V15_MODELS).forEach((name) =>
Expand All @@ -107,7 +107,7 @@ export function googleAI(options?: PluginOptions): GenkitPlugin {
apiKey: options?.apiKey,
apiVersion: undefined,
baseUrl: options?.baseUrl,
debugTraces: options?.debugTraces,
debugTraces: options?.experimental_debugTraces,
})
);
Object.keys(EMBEDDER_MODELS).forEach((name) =>
Expand All @@ -133,7 +133,7 @@ export function googleAI(options?: PluginOptions): GenkitPlugin {
...modelRef.info,
label: `Google AI - ${modelName}`,
},
debugTraces: options?.debugTraces,
debugTraces: options?.experimental_debugTraces,
});
}
}
Expand Down
4 changes: 2 additions & 2 deletions js/plugins/vertexai/src/common/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ export interface CommonPluginOptions {
location: string;
/** Provide custom authentication configuration for connecting to Vertex AI. */
googleAuth?: GoogleAuthOptions;
/** Enables additional, detailed debug traces (e.g. raw model API call details). */
debugTraces?: boolean;
/** Enables additional debug traces (e.g. raw model API call details). */
experimental_debugTraces?: boolean;
}

/** Combined plugin options, extending common options with subplugin-specific options */
Expand Down
15 changes: 11 additions & 4 deletions js/plugins/vertexai/src/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -758,7 +758,7 @@ export function defineGeminiModel({
configSchema: GeminiConfigSchema,
use: middlewares,
},
async (request, streamingCallback) => {
async (request, sendChunk) => {
const vertex = vertexClientFactory(request);

// Make a copy of messages to avoid side-effects
Expand Down Expand Up @@ -898,7 +898,7 @@ export function defineGeminiModel({

const callGemini = async () => {
// Handle streaming and non-streaming responses
if (streamingCallback) {
if (sendChunk) {
const result = await genModel
.startChat(updatedChatRequest)
.sendMessageStream(msg.parts);
Expand All @@ -907,7 +907,7 @@ export function defineGeminiModel({
(item as GenerateContentResponse).candidates?.forEach(
(candidate) => {
const c = fromGeminiCandidate(candidate, jsonMode);
streamingCallback({
sendChunk({
index: c.index,
content: c.message.content,
});
Expand Down Expand Up @@ -956,9 +956,16 @@ export function defineGeminiModel({
return debugTraces
? await runInNewSpan(
ai.registry,
{ metadata: { name: version } },
{
metadata: {
name: sendChunk ? 'sendMessageStream' : 'sendMessage',
},
},
async (metadata) => {
metadata.input = {
sdk: '@google-cloud/vertexai',
cache: cache,
model: genModel.getModelName(),
chatOptions: updatedChatRequest,
parts: msg.parts,
options,
Expand Down
4 changes: 2 additions & 2 deletions js/plugins/vertexai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ export function vertexAI(options?: PluginOptions): GenkitPlugin {
projectId,
location,
},
options?.debugTraces
options?.experimental_debugTraces
)
);
if (options?.models) {
Expand All @@ -117,7 +117,7 @@ export function vertexAI(options?: PluginOptions): GenkitPlugin {
projectId,
location,
},
debugTraces: options.debugTraces,
debugTraces: options.experimental_debugTraces,
});
}
}
Expand Down
11 changes: 8 additions & 3 deletions js/testapps/context-caching/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,10 @@ import { genkit, z } from 'genkit'; // Import Genkit framework and Zod for schem
import { logger } from 'genkit/logging'; // Import logging utility from Genkit

const ai = genkit({
plugins: [vertexAI(), googleAI()], // Initialize Genkit with the Google AI plugin
plugins: [
vertexAI({ experimental_debugTraces: true, location: 'us-central1' }),
googleAI({ experimental_debugTraces: true }),
], // Initialize Genkit with the Google AI plugin
});

logger.setLogLevel('debug'); // Set the logging level to debug for detailed output
Expand All @@ -38,7 +41,7 @@ export const lotrFlowVertex = ai.defineFlow(
}),
outputSchema: z.string(), // Define the expected output as a string
},
async ({ query, textFilePath }) => {
async ({ query, textFilePath }, { sendChunk }) => {
const defaultQuery = 'What is the text i provided you with?'; // Default query to use if none is provided

// Read the content from the file if the path is provided
Expand Down Expand Up @@ -69,6 +72,7 @@ export const lotrFlowVertex = ai.defineFlow(
},
model: gemini15Flash, // Specify the model (gemini15Flash) to use for generation
prompt: query || defaultQuery, // Use the provided query or fall back to the default query
onChunk: sendChunk,
});

return llmResponse.text; // Return the generated text from the model
Expand All @@ -84,7 +88,7 @@ export const lotrFlowGoogleAI = ai.defineFlow(
}),
outputSchema: z.string(), // Define the expected output as a string
},
async ({ query, textFilePath }) => {
async ({ query, textFilePath }, { sendChunk }) => {
const defaultQuery = 'What is the text i provided you with?'; // Default query to use if none is provided

// Read the content from the file if the path is provided
Expand Down Expand Up @@ -115,6 +119,7 @@ export const lotrFlowGoogleAI = ai.defineFlow(
},
model: gemini15FlashGoogleAI, // Specify the model (gemini15Flash) to use for generation
prompt: query || defaultQuery, // Use the provided query or fall back to the default query
onChunk: sendChunk,
});

return llmResponse.text; // Return the generated text from the model
Expand Down
4 changes: 2 additions & 2 deletions js/testapps/flow-simple-ai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ enableGoogleCloudTelemetry({

const ai = genkit({
plugins: [
googleAI({ debugTraces: false }),
vertexAI({ location: 'us-central1', debugTraces: false }),
googleAI({ experimental_debugTraces: true }),
vertexAI({ location: 'us-central1', experimental_debugTraces: true }),
],
});

Expand Down

0 comments on commit b78a954

Please sign in to comment.