From 7f30ba845e9f364046b40db2a07663e564879cf6 Mon Sep 17 00:00:00 2001 From: "Ankush Pala ankush@lastmileai.dev" <> Date: Wed, 31 Jan 2024 15:26:36 -0500 Subject: [PATCH] use async client openai --- python/src/aiconfig/default_parsers/openai.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/src/aiconfig/default_parsers/openai.py b/python/src/aiconfig/default_parsers/openai.py index 7fc6a1bd7..31424059c 100644 --- a/python/src/aiconfig/default_parsers/openai.py +++ b/python/src/aiconfig/default_parsers/openai.py @@ -305,7 +305,7 @@ async def run_inference( completion_data["stream"] = stream - response = self.client.chat.completions.create(**completion_data) + response = await self.client.chat.completions.create(**completion_data) outputs = [] if not stream: # # OpenAI>1.0.0 uses pydantic models for response @@ -397,7 +397,7 @@ async def run_inference( ) ) return prompt.outputs - + def initialize_openai_client(self) -> None: """ Initializes the client to be used with the OpenAI Module. @@ -407,7 +407,7 @@ def initialize_openai_client(self) -> None: openai_api_key = get_api_key_from_environment( "OPENAI_API_KEY" ).unwrap() - self.client = openai.Client(api_key=openai_api_key) + self.client = openai.AsyncOpenAI(api_key=openai_api_key) def get_prompt_template( self, prompt: Prompt, aiconfig: "AIConfigRuntime"