Skip to content

Commit

Permalink
test: fix test case
Browse files Browse the repository at this point in the history
  • Loading branch information
imotai committed Oct 29, 2023
1 parent 37d35a6 commit 4016881
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 11 deletions.
1 change: 1 addition & 0 deletions agent/src/og_agent/agent_api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
#
# SPDX-License-Identifier: Elastic-2.0

import sys
import asyncio
import uvicorn
import json
Expand Down
22 changes: 11 additions & 11 deletions agent/tests/codellama_agent_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import pytest

from og_sdk.kernel_sdk import KernelSDK
from og_agent.codellama_agent import CodellamaAgent
from og_agent.llama_agent import LlamaAgent
from og_proto.agent_server_pb2 import ProcessOptions, TaskResponse
import asyncio
import pytest_asyncio
Expand Down Expand Up @@ -56,13 +56,13 @@ def done(self):
return False


class CodellamaMockClient:
class LlamaMockClient:

def __init__(self, payloads):
self.payloads = payloads
self.index = 0

async def prompt(self, question, chat_history=[]):
async def chat(self, question, name, max_tokens=1024):
if self.index >= len(self.payloads):
raise StopAsyncIteration
self.index += 1
Expand Down Expand Up @@ -90,8 +90,8 @@ async def test_codellama_agent_execute_bash_code(kernel_sdk):
"language": "en",
"is_final_answer": False,
}
client = CodellamaMockClient([json.dumps(sentence1), json.dumps(sentence2)])
agent = CodellamaAgent(client, kernel_sdk)
client = LlamaMockClient([json.dumps(sentence1), json.dumps(sentence2)])
agent = LlamaAgent(client, kernel_sdk)
task_opt = ProcessOptions(
streaming=True,
llm_name="codellama",
Expand Down Expand Up @@ -140,8 +140,8 @@ async def test_codellama_agent_execute_python_code(kernel_sdk):
"language": "en",
"is_final_answer": False,
}
client = CodellamaMockClient([json.dumps(sentence1), json.dumps(sentence2)])
agent = CodellamaAgent(client, kernel_sdk)
client = LlamaMockClient([json.dumps(sentence1), json.dumps(sentence2)])
agent = LlamaAgent(client, kernel_sdk)
task_opt = ProcessOptions(
streaming=True,
llm_name="codellama",
Expand Down Expand Up @@ -181,8 +181,8 @@ async def test_codellama_agent_show_demo_code(kernel_sdk):
"language": "shell",
"is_final_answer": True,
}
client = CodellamaMockClient([json.dumps(sentence)])
agent = CodellamaAgent(client, kernel_sdk)
client = LlamaMockClient([json.dumps(sentence)])
agent = LlamaAgent(client, kernel_sdk)
task_opt = ProcessOptions(
streaming=True,
llm_name="codellama",
Expand Down Expand Up @@ -217,8 +217,8 @@ async def test_codellama_agent_smoke_test(kernel_sdk):
"language": "en",
"is_final_answer": True,
}
client = CodellamaMockClient([json.dumps(sentence)])
agent = CodellamaAgent(client, kernel_sdk)
client = LlamaMockClient([json.dumps(sentence)])
agent = LlamaAgent(client, kernel_sdk)
task_opt = ProcessOptions(
streaming=True,
llm_name="codellama",
Expand Down

0 comments on commit 4016881

Please sign in to comment.