Skip to content

Commit

Permalink
change tokeniser to tiktoken
Browse files Browse the repository at this point in the history
  • Loading branch information
ztjhz committed Mar 25, 2023
1 parent 01b6fc7 commit 9cff66f
Show file tree
Hide file tree
Showing 9 changed files with 163 additions and 50,490 deletions.
6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"preview": "vite preview"
},
"dependencies": {
"@dqbd/tiktoken": "^1.0.2",
"html2canvas": "^1.4.1",
"i18next": "^22.4.11",
"i18next-browser-languagedetector": "^7.0.1",
Expand All @@ -29,7 +30,6 @@
},
"devDependencies": {
"@tailwindcss/typography": "^0.5.9",
"@types/dompurify": "^2.4.0",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"@types/react-scroll-to-bottom": "^4.2.0",
Expand All @@ -39,6 +39,8 @@
"postcss": "^8.4.21",
"tailwindcss": "^3.2.7",
"typescript": "^4.9.3",
"vite": "^4.1.0"
"vite": "^4.1.0",
"vite-plugin-top-level-await": "^1.3.0",
"vite-plugin-wasm": "^3.2.2"
}
}
10 changes: 8 additions & 2 deletions src/components/TokenCount/TokenCount.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import React, { useEffect, useState } from 'react';
import useStore from '@store/store';
import { shallow } from 'zustand/shallow';

import { countMessagesToken } from '@utils/messageUtils';
import countTokens from '@utils/messageUtils';

const TokenCount = React.memo(() => {
const [tokenCount, setTokenCount] = useState<number>(0);
Expand All @@ -13,8 +13,14 @@ const TokenCount = React.memo(() => {
shallow
);

const model = useStore((state) =>
state.chats
? state.chats[state.currentChatIndex].config.model
: 'gpt-3.5-turbo'
);

useEffect(() => {
if (!generating) setTokenCount(countMessagesToken(messages));
if (!generating) setTokenCount(countTokens(messages, model));
}, [messages, generating]);

return (
Expand Down
4 changes: 2 additions & 2 deletions src/constants/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@ Current date: ${dateString}`;

export const modelOptions: ModelOptions[] = [
'gpt-3.5-turbo',
// 'gpt-3.5-turbo-0301',
'gpt-4',
// 'gpt-4-0314',
'gpt-4-32k',
// 'gpt-3.5-turbo-0301',
// 'gpt-4-0314',
// 'gpt-4-32k-0314',
];

Expand Down
6 changes: 5 additions & 1 deletion src/hooks/useSubmit.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,13 @@ const useSubmit = () => {

try {
let stream;
if (chats[currentChatIndex].messages.length === 0)
throw new Error('No messages submitted!');

const messages = limitMessageTokens(
chats[currentChatIndex].messages,
chats[currentChatIndex].config.max_tokens
chats[currentChatIndex].config.max_tokens,
chats[currentChatIndex].config.model
);
if (messages.length === 0) throw new Error('Message exceed max token!');

Expand Down
11 changes: 4 additions & 7 deletions src/types/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,10 @@ export interface ConfigInterface {
frequency_penalty: number;
}

export type ModelOptions =
| 'gpt-4'
| 'gpt-4-0314'
| 'gpt-4-32k'
| 'gpt-4-32k-0314'
| 'gpt-3.5-turbo'
| 'gpt-3.5-turbo-0301';
export type ModelOptions = 'gpt-4' | 'gpt-4-32k' | 'gpt-3.5-turbo';
// | 'gpt-3.5-turbo-0301';
// | 'gpt-4-0314'
// | 'gpt-4-32k-0314'

export interface LocalStorageInterfaceV0ToV1 {
chats: ChatInterface[];
Expand Down
Loading

0 comments on commit 9cff66f

Please sign in to comment.