-
Notifications
You must be signed in to change notification settings - Fork 80
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Prompt Schema for Gemini Pro. This prompt schema doesn't define a schema for Gemini's Safety Settings As those are more complicated, they will come on top. API Docs for Generation Config: https://ai.google.dev/api/python/google/ai/generativelanguage/GenerationConfig Skipped these attributes as part of the prompt schema. - skipped candidate count - didn't add max_output_tokens. I was getting weird index out of bounds exceptions when setting max_output_tokens which occured at the output parsing step. I tried with 35, 400, 200. Thought it would be better UX to just leave it out for now before figuring out why. - safety settings See: Code Ref: https://github.com/google/generative-ai-python/blob/main/google/generativeai/types/safety_types.py#L218-L221 API Doc Ref: https://ai.google.dev/docs/safety_setting_gemini#code-examples ## Testplan <img width="1310" alt="Screenshot 2024-02-26 at 11 20 04 AM" src="https://github.com/lastmile-ai/aiconfig/assets/141073967/9572a4c0-2005-4e6c-9e93-c1eb0c3859dd">
- Loading branch information
1 parent
20a144c
commit 0f6475d
Showing
3 changed files
with
72 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
55 changes: 55 additions & 0 deletions
55
python/src/aiconfig/editor/client/src/shared/prompt_schemas/GeminiPromptSchema.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import { PromptSchema } from "../../utils/promptUtils"; | ||
// This does not support Gemini Vision. Model parser does not support it. | ||
// TODO: Safety Settings, Candidate Count, max_output_tokens | ||
export const GeminiParserPromptSchema: PromptSchema = { | ||
// https://ai.google.dev/api/python/google/ai/generativelanguage/GenerationConfig | ||
input: { | ||
type: "string", | ||
}, | ||
model_settings: { | ||
type: "object", | ||
properties: { | ||
generation_config: { | ||
type: "object", | ||
properties: { | ||
candidate_count: {}, | ||
temperature: { | ||
type: "number", | ||
description: "Controls the randomness of the output.", | ||
minimum: 0.0, | ||
maximum: 1.0, | ||
}, | ||
top_p: { | ||
type: "number", | ||
description: | ||
"The maximum cumulative probability of tokens to consider when sampling.", | ||
}, | ||
top_k: { | ||
type: "integer", | ||
description: | ||
"The maximum number of tokens to consider when sampling.", | ||
}, | ||
stop_sequences: { | ||
type: "array", | ||
description: | ||
"The set of character sequences (up to 5) that will stop output generation", | ||
items: { | ||
type: "string", | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
}, | ||
prompt_metadata: { | ||
type: "object", | ||
properties: { | ||
remember_chat_context: { | ||
type: "boolean", | ||
}, | ||
stream: { | ||
type: "boolean", | ||
}, | ||
}, | ||
}, | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters