Skip to content

Commit

Permalink
wip exception handling
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathanlastmileai committed Jan 10, 2024
1 parent 78fd3e1 commit aef38d3
Showing 1 changed file with 10 additions and 2 deletions.
12 changes: 10 additions & 2 deletions python/src/aiconfig/editor/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,11 @@ def generate(cancellation_token_id: str): # type: ignore
# Use multi-threading so that we don't block run command from
# displaying the streamed output (if streaming is supported)
def run_async_config_in_thread():
asyncio.run(aiconfig.run(prompt_name=prompt_name, params=params, run_with_dependencies=False, options=inference_options)) # type: ignore
try:
asyncio.run(aiconfig.run(prompt_name=prompt_name, params=params, run_with_dependencies=False, options=inference_options)) # type: ignore
except Exception as e:
output_text_queue.put(e)

output_text_queue.put(STOP_STREAMING_SIGNAL) # type: ignore

def create_cancellation_payload():
Expand Down Expand Up @@ -307,7 +311,11 @@ def kill_thread(thread_id: int | None):
yield from handle_cancellation()
return

if isinstance(text, str):
if isinstance(text, Exception):
yield "["
yield json.dumps({"message": "exception", "output": str(text)})
yield "]"
elif isinstance(text, str):
accumulated_output_text += text
elif isinstance(text, dict) and "content" in text:
# TODO: Fix streaming output format so that it returns text
Expand Down

0 comments on commit aef38d3

Please sign in to comment.