Skip to content

Commit

Permalink
feat: anthropic tool streaming
Browse files Browse the repository at this point in the history
  • Loading branch information
adrienbrault committed May 17, 2024
1 parent ce929c2 commit ae54c0a
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 28 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,9 @@ Throughputs from https://artificialanalysis.ai/leaderboards/providers .
| OpenAI | GPT-4o | 128k | 🚀 |
| OpenAI | GPT-4 Turbo | 128k | 🚀 24/s |
| OpenAI | GPT-3.5 Turbo | 16k | 🚀 72/s |
| Anthropic | Claude 3 Haiku | 200k | 📄 88/s |
| Anthropic | Claude 3 Sonnet | 200k | 📄 59/s |
| Anthropic | Claude 3 Opus | 200k | 📄 26/s |
| Anthropic | Claude 3 Haiku | 200k | 🚀 88/s |
| Anthropic | Claude 3 Sonnet | 200k | 🚀 59/s |
| Anthropic | Claude 3 Opus | 200k | 🚀 26/s |
| Perplexity | Sonar Small Chat | 16k | 📄 |
| Perplexity | Sonar Small Online | 12k | 📄 |
| Perplexity | Sonar Medium Chat | 16k | 📄 |
Expand Down
29 changes: 24 additions & 5 deletions src/LLM/AnthropicLLM.php
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
use function Psl\Type\optional;
use function Psl\Type\shape;
use function Psl\Type\string;
use function Psl\Type\union;

class AnthropicLLM implements LLMInterface
{
Expand Down Expand Up @@ -47,6 +48,17 @@ public function get(
'max_tokens' => 4000,
'stream' => true,
'system' => \call_user_func($this->config->systemPrompt, $schema, $prompt),
'tools' => [
[
'name' => 'extract',
'description' => 'Extract the relevant information',
'input_schema' => $schema,
],
],
'tool_choice' => [
'type' => 'tool',
'name' => 'extract',
],
];

// Tool and json modes do not support streaming.
Expand All @@ -61,7 +73,7 @@ public function get(
[
...$this->config->headers,
'anthropic-version' => '2023-06-01',
'anthropic-beta' => 'tools-2024-04-04',
'anthropic-beta' => 'tools-2024-05-16',
],
);

Expand All @@ -82,9 +94,16 @@ public function get(
$contentBlockDeltaType = shape([
'type' => literal_scalar('content_block_delta'),
'delta' => optional(
shape([
'text' => optional(string()),
], true)
union(
shape([
'type' => literal_scalar('text_delta'),
'text' => string(),
], true),
shape([
'type' => literal_scalar('input_json_delta'),
'partial_json' => string(),
], true),
)
),
'usage' => optional(
shape([
Expand Down Expand Up @@ -118,7 +137,7 @@ public function get(
$promptTokens = $data['usage']['input_tokens'] ?? $promptTokens;
$completionTokens = $data['usage']['output_tokens'] ?? $completionTokens;

$content .= $data['delta']['text'] ?? '';
$content .= $data['delta']['partial_json'] ?? '';

if ($content === $lastContent) {
// If the content hasn't changed, we stop
Expand Down
21 changes: 1 addition & 20 deletions src/LLM/Provider/Anthropic.php
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
use AdrienBrault\Instructrice\LLM\Cost;
use AdrienBrault\Instructrice\LLM\LLMConfig;

use function Psl\Json\encode;

enum Anthropic: string implements ProviderModel
{
case CLAUDE3_HAIKU = 'claude-3-haiku-20240307';
Expand All @@ -23,24 +21,7 @@ public function getApiKeyEnvVar(): ?string
public function createConfig(string $apiKey): LLMConfig
{
$systemPrompt = function ($schema, string $prompt): string {
$encodedSchema = encode($schema);

return <<<PROMPT
You are a helpful assistant that answers ONLY in JSON.
<schema>
{$encodedSchema}
</schema>
<instructions>
{$prompt}
</instructions>
Reply with:
```json
{"...
```
PROMPT;
return $prompt;
};

return new LLMConfig(
Expand Down

0 comments on commit ae54c0a

Please sign in to comment.