Skip to content

Commit

Permalink
OpenAI package (#9)
Browse files Browse the repository at this point in the history
Move OpenAI client into a separate package.
  • Loading branch information
mme authored Nov 17, 2023
1 parent 79e19c4 commit c9a1dbb
Show file tree
Hide file tree
Showing 24 changed files with 336 additions and 380 deletions.
8 changes: 5 additions & 3 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,12 @@ jobs:
yarn install
- name: Build (core)
run: |
yarn workspace @beakjs/core build
- name: Build (react)
yarn build
- name: Publish (openai)
run: |
yarn workspace @beakjs/react build
yarn workspace @beakjs/openai publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Publish (core)
run: |
yarn workspace @beakjs/core publish --access public
Expand Down
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,4 @@ Thumbs.db
TODO.md
TODO.later.md

packages/core/dist
packages/react/dist
packages/*/dist
3 changes: 1 addition & 2 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"**/.cache": true,
"**/node_modules": true,
".placeholder": true,
"packages/core/dist": true,
"packages/react/dist": true
"packages/*/dist": true
}
}
3 changes: 2 additions & 1 deletion demo/presentation/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
"dev": "vite",
"build": "tsc && vite build",
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
"preview": "vite preview"
"preview": "vite preview",
"clean": "rm -rf dist"
},
"dependencies": {
"react": "^18.2.0",
Expand Down
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@
},
"scripts": {
"dev": "vite",
"build": "yarn workspace @beakjs/core build && yarn workspace @beakjs/react build",
"build": "yarn workspaces run build",
"prepublishOnly": "yarn build",
"serve": "vite preview",
"test": "jest",
"sync-versions": "ts-node scripts/sync-versions.ts",
"clean": "rm -rf dist && rm -rf src/react/dist && rm -rf src/core/dist"
"clean": "yarn workspaces run clean"
},
"jest": {
"testTimeout": 60000,
Expand All @@ -53,6 +53,7 @@
],
"private": true,
"workspaces": [
"packages/openai",
"packages/core",
"packages/react"
]
Expand Down
30 changes: 25 additions & 5 deletions packages/core/package.json
Original file line number Diff line number Diff line change
@@ -1,30 +1,50 @@
{
"author": "Markus Ecker",
"license": "MIT",
"name": "@beakjs/core",
"version": "0.0.5",
"description": "BeakJS core library",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"prepublishOnly": "yarn build"
"prepublishOnly": "yarn build",
"clean": "rm -rf dist",
"test": "jest"
},
"files": [
"dist/**/*"
],
"dependencies": {
"@beakjs/openai": "0.0.5",
"eventemitter3": "^5.0.1",
"jsonrepair": "^3.2.4",
"uuid": "^9.0.1"
},
"devDependencies": {
"@types/uuid": "^9.0.6",
"typescript": "^5.0.4"
"typescript": "^5.0.4",
"@types/jest": "^29.5.1",
"jest": "^29.5.0",
"ts-jest": "^29.1.0"
},
"peerDependencies": {},
"repository": {
"type": "git",
"url": "https://github.com/mme/beakjs.git"
},
"author": "Markus Ecker",
"license": "MIT"
"jest": {
"testTimeout": 60000,
"transform": {
"^.+\\.(ts|tsx)$": "ts-jest"
},
"testEnvironment": "node",
"moduleFileExtensions": [
"ts",
"tsx",
"js"
],
"testPathIgnorePatterns": [
"/node_modules/"
]
}
}
27 changes: 15 additions & 12 deletions packages/core/src/beak.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import { EventEmitter } from "eventemitter3";
import { OpenAI, OpenAIModel } from "./openai";
import { OpenAI, OpenAIModel } from "@beakjs/openai";
import {
LLMClient,
LLMAdapter,
Message,
FunctionDefinition,
FunctionCall,
QueryChatCompletionParams,
DebugLogger,
} from "./types";
import { v4 as uuidv4 } from "uuid";
import { OpenAIAdapter } from "./openai";

const DEFAULT_INSTRUCTIONS =
"Assistant is running inside a web application. Assistant never returns JSON " +
Expand Down Expand Up @@ -57,7 +58,7 @@ export class BeakCore extends EventEmitter<BeakEvents> {
super();
this.configuration = configuration;

let client = this.newClient();
let client = this.newAdapter();
this.instructionsMessage = new Message({
role: "system",
content: this.configuration.instructions || DEFAULT_INSTRUCTIONS,
Expand Down Expand Up @@ -135,7 +136,7 @@ export class BeakCore extends EventEmitter<BeakEvents> {
content: content,
status: "success",
});
userMessage.calculateNumTokens(this.newClient());
userMessage.calculateNumTokens(this.newAdapter());

this._messages.push(userMessage);
this.emit("change", userMessage.copy());
Expand All @@ -152,7 +153,7 @@ export class BeakCore extends EventEmitter<BeakEvents> {
this._messages.push(message);
this.emit("change", message.copy());

const client = this.newClient();
const client = this.newAdapter();

const contextMessage = this.infoMessage();
let newMessages: Message[] = [];
Expand Down Expand Up @@ -314,7 +315,7 @@ export class BeakCore extends EventEmitter<BeakEvents> {
}

private async runChatCompletionAsync(
client: LLMClient,
client: LLMAdapter,
params: QueryChatCompletionParams
) {
return new Promise<Message[]>((resolve, reject) => {
Expand Down Expand Up @@ -394,11 +395,13 @@ export class BeakCore extends EventEmitter<BeakEvents> {
});
}

private newClient(): LLMClient {
return new OpenAI({
apiKey: this.configuration.openAIApiKey,
model: this.configuration.openAIModel,
debugLogger: this.configuration.debugLogger,
});
private newAdapter(): LLMAdapter {
return new OpenAIAdapter(
new OpenAI({
apiKey: this.configuration.openAIApiKey,
model: this.configuration.openAIModel,
debugLogger: this.configuration.debugLogger,
})
);
}
}
2 changes: 1 addition & 1 deletion packages/core/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export { BeakCore } from "./beak";
export type { FunctionDefinition } from "./types";
export { Message, DebugLogger } from "./types";
export type { OpenAIModel } from "./openai";
export type { OpenAIModel } from "@beakjs/openai";
119 changes: 119 additions & 0 deletions packages/core/src/openai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
import {
OpenAI,
FetchChatCompletionParams,
OpenAIFunction,
OpenAIMessage,
} from "@beakjs/openai";
import {
FunctionCall,
FunctionDefinition,
LLMAdapter,
LLMEvent,
Message,
QueryChatCompletionParams,
} from "./types";

export class OpenAIAdapter implements LLMAdapter {
constructor(private openai: OpenAI) {}

countTokens(message: Message): number {
return this.openai.countTokens(message);
}

async queryChatCompletion(params: QueryChatCompletionParams): Promise<void> {
const openAIParams: FetchChatCompletionParams = {
messages: params.messages.map(messageToOpenAI),
functions: functionsToOpenAIFormat(params.functions),
maxTokens: params.maxTokens,
temperature: params.temperature,
};
return await this.openai.queryChatCompletion(openAIParams);
}

on(event: LLMEvent, listener: (...args: any[]) => void): this {
this.openai.on(event, listener);
return this;
}

off(event: LLMEvent, listener?: (...args: any[]) => void): this {
this.openai.off(event, listener);
return this;
}
}

function messageToOpenAI(message: Message): OpenAIMessage {
const content = message.content || "";
if (message.role === "system") {
return { role: message.role, content };
} else if (message.role === "function") {
return {
role: message.role,
content,
name: message.name,
};
} else {
let functionCall = functionCallToOpenAI(message.functionCall);

return {
role: message.role,
content,
...(functionCall !== undefined && { function_call: functionCall }),
};
}
}

function functionsToOpenAIFormat(
functions?: FunctionDefinition[]
): OpenAIFunction[] | undefined {
if (functions === undefined) {
return undefined;
}
return functions.map((fun) => {
const args = fun.parameters;
let openAiProperties: { [key: string]: any } = {};
let required: string[] = [];

if (args) {
for (const [name, arg] of Object.entries(args)) {
const description = arg.description;
if (typeof arg.type === "string" || arg.type === undefined) {
const type = arg.type || "string";
openAiProperties[name] = {
type: arg.type,
...(description ? { description } : {}),
};
} else if (Array.isArray(arg.type)) {
openAiProperties[name] = {
type: "enum",
enum: arg.type,
...(description ? { description } : {}),
};
}

if (arg.optional !== true) {
required.push(name);
}
}
}

return {
name: fun.name,
description: fun.description,
parameters: {
type: "object",
properties: openAiProperties,
...(required.length ? { required } : {}),
},
};
});
}

function functionCallToOpenAI(functionCall?: FunctionCall): any {
if (functionCall === undefined) {
return undefined;
}
return {
name: functionCall.name,
arguments: JSON.stringify(functionCall.arguments),
};
}
2 changes: 0 additions & 2 deletions packages/core/src/openai/index.ts

This file was deleted.

4 changes: 2 additions & 2 deletions packages/core/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ export class Message {
Object.assign(this, options);
}

calculateNumTokens(llm: LLMClient) {
calculateNumTokens(llm: LLMAdapter) {
this.numTokens = llm.countTokens(this);
}

Expand Down Expand Up @@ -112,7 +112,7 @@ export interface FunctionCall {

export type LLMEvent = "content" | "function" | "partial" | "error" | "end";

export abstract class LLMClient {
export abstract class LLMAdapter {
abstract countTokens(message: Message): number;

abstract queryChatCompletion(
Expand Down
Loading

0 comments on commit c9a1dbb

Please sign in to comment.