Skip to content

Commit

Permalink
ran pre-commit2
Browse files Browse the repository at this point in the history
  • Loading branch information
MarianoMolina committed May 3, 2024
1 parent 2dd6b14 commit bf32bf0
Show file tree
Hide file tree
Showing 2 changed files with 394 additions and 3 deletions.
12 changes: 9 additions & 3 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,7 +1163,9 @@ def _reflection_with_llm_as_summary(sender, recipient, summary_args):
if role and not isinstance(role, str):
raise ValueError("The summary_role in summary_arg must be a string.")
try:
summary = sender._reflection_with_llm(prompt, msg_list, llm_agent=agent, cache=summary_args.get("cache"), role=role)
summary = sender._reflection_with_llm(
prompt, msg_list, llm_agent=agent, cache=summary_args.get("cache"), role=role
)
except BadRequestError as e:
warnings.warn(
f"Cannot extract summary using reflection_with_llm: {e}. Using an empty str as summary.", UserWarning
Expand All @@ -1172,7 +1174,12 @@ def _reflection_with_llm_as_summary(sender, recipient, summary_args):
return summary

def _reflection_with_llm(
self, prompt, messages, llm_agent: Optional[Agent] = None, cache: Optional[AbstractCache] = None, role: Union[str, None] = None
self,
prompt,
messages,
llm_agent: Optional[Agent] = None,
cache: Optional[AbstractCache] = None,
role: Union[str, None] = None,
) -> str:
"""Get a chat summary using reflection with an llm client based on the conversation history.
Expand All @@ -1193,7 +1200,6 @@ def _reflection_with_llm(
}
]


messages = messages + system_msg
if llm_agent and llm_agent.client is not None:
llm_client = llm_agent.client
Expand Down
Loading

0 comments on commit bf32bf0

Please sign in to comment.