Skip to content

Commit

Permalink
Merge pull request #161 from miurla/provider
Browse files Browse the repository at this point in the history
Refactor AI model usage
  • Loading branch information
miurla authored May 19, 2024
2 parents 9d64523 + ad5bee9 commit 6ba4efd
Show file tree
Hide file tree
Showing 8 changed files with 36 additions and 52 deletions.
Binary file modified bun.lockb
Binary file not shown.
9 changes: 2 additions & 7 deletions lib/agents/inquire.tsx
Original file line number Diff line number Diff line change
@@ -1,24 +1,19 @@
import { OpenAI } from '@ai-sdk/openai'
import { Copilot } from '@/components/copilot'
import { createStreamableUI, createStreamableValue } from 'ai/rsc'
import { CoreMessage, streamObject } from 'ai'
import { PartialInquiry, inquirySchema } from '@/lib/schema/inquiry'
import { getModel } from '../utils'

export async function inquire(
uiStream: ReturnType<typeof createStreamableUI>,
messages: CoreMessage[]
) {
const openai = new OpenAI({
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
organization: '' // optional organization
})
const objectStream = createStreamableValue<PartialInquiry>()
uiStream.update(<Copilot inquiry={objectStream.value} />)

let finalInquiry: PartialInquiry = {}
await streamObject({
model: openai.chat(process.env.OPENAI_API_MODEL || 'gpt-4o'),
model: getModel(),
system: `As a professional web researcher, your role is to deepen your understanding of the user's input by conducting further inquiries when necessary.
After receiving an initial response from the user, carefully assess whether additional questions are absolutely essential to provide a comprehensive and accurate answer. Only proceed with further inquiries if the available information is insufficient or ambiguous.
Expand Down
9 changes: 2 additions & 7 deletions lib/agents/query-suggestor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,12 @@ import { CoreMessage, streamObject } from 'ai'
import { PartialRelated, relatedSchema } from '@/lib/schema/related'
import { Section } from '@/components/section'
import SearchRelated from '@/components/search-related'
import { OpenAI } from '@ai-sdk/openai'
import { getModel } from '../utils'

export async function querySuggestor(
uiStream: ReturnType<typeof createStreamableUI>,
messages: CoreMessage[]
) {
const openai = new OpenAI({
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
organization: '' // optional organization
})
const objectStream = createStreamableValue<PartialRelated>()
uiStream.append(
<Section title="Related" separator={true}>
Expand All @@ -23,7 +18,7 @@ export async function querySuggestor(

let finalRelatedQueries: PartialRelated = {}
await streamObject({
model: openai.chat(process.env.OPENAI_API_MODEL || 'gpt-4o'),
model: getModel(),
system: `As a professional web researcher, your task is to generate a set of three queries that explore the subject matter more deeply, building upon the initial query and the information uncovered in its search results.
For instance, if the original query was "Starship's third test flight key milestones", your output should follow this format:
Expand Down
10 changes: 2 additions & 8 deletions lib/agents/researcher.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,16 @@ import {
streamText as nonexperimental_streamText
} from 'ai'
import { Section } from '@/components/section'
import { OpenAI } from '@ai-sdk/openai'
import { BotMessage } from '@/components/message'
import { getTools } from './tools'
import { getModel } from '../utils'

export async function researcher(
uiStream: ReturnType<typeof createStreamableUI>,
streamText: ReturnType<typeof createStreamableValue<string>>,
messages: CoreMessage[],
useSpecificModel?: boolean
) {
const openai = new OpenAI({
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
organization: '' // optional organization
})

let fullResponse = ''
let hasError = false
const answerSection = (
Expand All @@ -33,7 +27,7 @@ export async function researcher(
let isFirstToolResponse = true
const currentDate = new Date().toLocaleString()
const result = await nonexperimental_streamText({
model: openai.chat(process.env.OPENAI_API_MODEL || 'gpt-4o'),
model: getModel(),
maxTokens: 2500,
system: `As a professional search expert, you possess the ability to search for any information on the web.
or any information on the web.
Expand Down
10 changes: 2 additions & 8 deletions lib/agents/task-manager.tsx
Original file line number Diff line number Diff line change
@@ -1,18 +1,12 @@
import { CoreMessage, generateObject } from 'ai'
import { OpenAI } from '@ai-sdk/openai'
import { nextActionSchema } from '../schema/next-action'
import { getModel } from '../utils'

// Decide whether inquiry is required for the user input
export async function taskManager(messages: CoreMessage[]) {
const openai = new OpenAI({
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
organization: '' // optional organization
})

try {
const result = await generateObject({
model: openai.chat(process.env.OPENAI_API_MODEL || 'gpt-4o'),
model: getModel(),
system: `As a professional web researcher, your primary objective is to fully comprehend the user's query, conduct thorough web searches to gather the necessary information, and provide an appropriate response.
To achieve this, you must first analyze the user's input and determine the optimal course of action. You have two options at your disposal:
1. "proceed": If the provided information is sufficient to address the query effectively, choose this option to proceed with the research and formulate a response.
Expand Down
24 changes: 3 additions & 21 deletions lib/agents/writer.tsx
Original file line number Diff line number Diff line change
@@ -1,27 +1,14 @@
import { OpenAI } from '@ai-sdk/openai'
import { createStreamableUI, createStreamableValue } from 'ai/rsc'
import { CoreMessage, streamText as nonexperimental_streamText } from 'ai'
import { Section } from '@/components/section'
import { BotMessage } from '@/components/message'
import { createAnthropic } from '@ai-sdk/anthropic'
import { getModel } from '../utils'

export async function writer(
uiStream: ReturnType<typeof createStreamableUI>,
streamText: ReturnType<typeof createStreamableValue<string>>,
messages: CoreMessage[]
) {
var openai, anthropic
if (process.env.SPECIFIC_PROVIDER === 'anthropic') {
anthropic = createAnthropic({
baseUrl: process.env.SPECIFIC_API_BASE,
apiKey: process.env.SPECIFIC_API_KEY
})
} else {
openai = new OpenAI({
baseUrl: process.env.SPECIFIC_API_BASE,
apiKey: process.env.SPECIFIC_API_KEY,
organization: '' // optional organization
})
}
let fullResponse = ''
const answerSection = (
<Section title="Answer">
Expand All @@ -31,12 +18,7 @@ export async function writer(
uiStream.append(answerSection)

await nonexperimental_streamText({
model:
process.env.SPECIFIC_PROVIDER === 'anthropic'
? anthropic!(
process.env.SPECIFIC_API_MODEL || 'claude-3-haiku-20240307'
)
: openai!.chat(process.env.SPECIFIC_API_MODEL || 'llama3-70b-8192'),
model: getModel(),
maxTokens: 2500,
system: `As a professional writer, your job is to generate a comprehensive and informative, yet concise answer of 400 words or less for the given question based solely on the provided search results (URL and content). You must only use information from the provided search results. Use an unbiased and journalistic tone. Combine search results together into a coherent answer. Do not repeat text. If there are any images relevant to your answer, be sure to include them as well. Aim to directly address the user's question, augmenting your response with insights gleaned from the search results.
Whenever quoting or referencing information from a specific URL, always cite the source URL explicitly. Please match the language of the response to the user's language.
Expand Down
23 changes: 23 additions & 0 deletions lib/utils/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,29 @@
import { type ClassValue, clsx } from 'clsx'
import { twMerge } from 'tailwind-merge'
import { OpenAI } from '@ai-sdk/openai'
import { createGoogleGenerativeAI } from '@ai-sdk/google'
import { createAnthropic } from '@ai-sdk/anthropic'

export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}

export function getModel() {
// Currently does not work with Google or Anthropic
// if (process.env.GOOGLE_GENERATIVE_AI_API_KEY) {
// const google = createGoogleGenerativeAI()
// return google('models/gemini-1.5-pro-latest')
// }

// if (process.env.ANTHROPIC_API_KEY) {
// const anthropic = createAnthropic()
// return anthropic('claude-3-haiku-20240307')
// }

const openai = new OpenAI({
baseUrl: process.env.OPENAI_API_BASE, // optional base URL for proxies etc.
apiKey: process.env.OPENAI_API_KEY, // optional API key, default to env property OPENAI_API_KEY
organization: '' // optional organization
})
return openai.chat(process.env.OPENAI_API_MODEL || 'gpt-4o')
}
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
"lint": "next lint"
},
"dependencies": {
"@ai-sdk/anthropic": "^0.0.7",
"@ai-sdk/anthropic": "^0.0.14",
"@ai-sdk/google": "^0.0.14",
"@ai-sdk/openai": "^0.0.2",
"@radix-ui/react-alert-dialog": "^1.0.5",
"@radix-ui/react-avatar": "^1.0.4",
Expand Down

0 comments on commit 6ba4efd

Please sign in to comment.