-
Notifications
You must be signed in to change notification settings - Fork 2.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix issue where cached JSON responses weren't being parsed (#8)
- Loading branch information
1 parent
b191c11
commit b08f3d7
Showing
2 changed files
with
40 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
# Copyright (c) 2024 Microsoft Corporation. All rights reserved. | ||
|
||
"""An LLM that unpacks cached JSON responses.""" | ||
|
||
import json | ||
from typing import cast | ||
|
||
from typing_extensions import Unpack | ||
|
||
from graphrag.llm.types import ( | ||
LLM, | ||
CompletionInput, | ||
CompletionLLM, | ||
CompletionOutput, | ||
LLMInput, | ||
LLMOutput, | ||
) | ||
|
||
|
||
class JsonParsingLLM(LLM[CompletionInput, CompletionOutput]): | ||
"""An OpenAI History-Tracking LLM.""" | ||
|
||
_delegate: CompletionLLM | ||
|
||
def __init__(self, delegate: CompletionLLM): | ||
self._delegate = delegate | ||
|
||
async def __call__( | ||
self, | ||
input: CompletionInput, | ||
**kwargs: Unpack[LLMInput], | ||
) -> LLMOutput[CompletionOutput]: | ||
"""Call the LLM with the input and kwargs.""" | ||
result = await self._delegate(input, **kwargs) | ||
if kwargs.get("json") and result.json is None and result.output is not None: | ||
result.json = cast(dict, json.loads(result.output)) | ||
return result |