Skip to content

Commit

Permalink
feat: move json parse to api
Browse files Browse the repository at this point in the history
  • Loading branch information
jhen0409 committed Dec 19, 2023
1 parent f229662 commit 427a856
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 7 deletions.
8 changes: 3 additions & 5 deletions example/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -176,17 +176,15 @@ export default function App() {
addSystemMessage(`Heat up time: ${tHeat}ms`)

addSystemMessage('Benchmarking the model...')
const result = await context.bench(512, 128, 1, 3)

const [
const {
modelDesc,
modelSize,
modelNParams,
ppAvg,
ppStd,
tgAvg,
tgStd,
] = JSON.parse(result)
} = await context.bench(512, 128, 1, 3)

const size = `${(modelSize / 1024.0 / 1024.0 / 1024.0).toFixed(2)} GiB`
const nParams = `${(modelNParams / 1e9).toFixed(2)}B`
Expand All @@ -209,7 +207,7 @@ export default function App() {
return
case '/save-session':
context.saveSession(`${dirs.DocumentDir}/llama-session.bin`).then(tokensSaved => {
console.log('Session saved:', result)
console.log('Session tokens saved:', tokensSaved)
addSystemMessage(`Session saved! ${tokensSaved} tokens saved.`)
}).catch(e => {
console.log('Session save failed:', e)
Expand Down
32 changes: 30 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,16 @@ export type ContextParams = NativeContextParams

export type CompletionParams = Omit<NativeCompletionParams, 'emit_partial_completion'>

export type BenchResult = {
modelDesc: string
modelSize: number
modelNParams: number
ppAvg: number
ppStd: number
tgAvg: number
tgStd: number
}

export class LlamaContext {
id: number

Expand Down Expand Up @@ -116,8 +126,26 @@ export class LlamaContext {
return RNLlama.embedding(this.id, text)
}

bench(pp: number, tg: number, pl: number, nr: number): Promise<string> {
return RNLlama.bench(this.id, pp, tg, pl, nr)
async bench(pp: number, tg: number, pl: number, nr: number): Promise<BenchResult> {
const result = await RNLlama.bench(this.id, pp, tg, pl, nr)
const [
modelDesc,
modelSize,
modelNParams,
ppAvg,
ppStd,
tgAvg,
tgStd,
] = JSON.parse(result)
return {
modelDesc,
modelSize,
modelNParams,
ppAvg,
ppStd,
tgAvg,
tgStd,
}
}

async release(): Promise<void> {
Expand Down

0 comments on commit 427a856

Please sign in to comment.