This repository has been archived by the owner on Mar 1, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 735
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
c848946
commit 2ace64c
Showing
10 changed files
with
236 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
# Gradio Chat With Your LlamaIndex Agent | ||
|
||
Create a LlamaIndex Agent (i.e., `BaseAgent`) and quickly chat with it using | ||
this pack's Gradio Chatbot interface. | ||
|
||
<p text-align="center"> | ||
<figure> | ||
<img src="chat-with-your-agent-dark.png" width="75%" /> | ||
<figcaption>Dark mode</figcaption> | ||
</figure> | ||
</p> | ||
<p text-align="center"> | ||
<figure> | ||
<img src="chat-with-your-agent-light.png" width="75%" /> | ||
<figcaption>Light mode</figcaption> | ||
</figure> | ||
</p> | ||
|
||
## Usage | ||
|
||
You can download the pack to a `./gradio_agent_chat_pack` directory: | ||
|
||
`sample.py` | ||
```python | ||
from llama_index.llama_packs import download_llama_pack | ||
from llama_index.agent import OpenAIAgent | ||
from llama_index.llms import OpenAI | ||
from llama_index.tools import FunctionTool | ||
|
||
|
||
def add(a: int, b: int) -> int: | ||
"""Add two integers and returns the result integer""" | ||
return a + b | ||
|
||
|
||
def multiply(a: int, b: int) -> int: | ||
"""Multiple two integers and returns the result integer""" | ||
return a * b | ||
|
||
|
||
multiply_tool = FunctionTool.from_defaults(fn=multiply) | ||
add_tool = FunctionTool.from_defaults(fn=add) | ||
|
||
# Works with any BaseAgent | ||
agent = OpenAIAgent.from_tools( | ||
tools=[multiply_tool, add_tool], | ||
llm=OpenAI(model="gpt-3.5-turbo-1106"), | ||
verbose=True # thoughts are displayed in the Gradio interface! | ||
) | ||
|
||
# download and install dependencies | ||
GradioAgentChatPack = download_llama_pack( | ||
"GradioAgentChatPack", "./gradio_agent_chat_pack" | ||
) | ||
|
||
gradio_agent_chat_pack = GradioAgentChatPack(agent=agent) | ||
|
||
if __name__ == "__main__": | ||
gradio_agent_chat_pack.run() | ||
``` | ||
|
||
From here, you can use the pack, or inspect and modify the pack in `./gradio_agent_chat_pack`. | ||
|
||
The `run()` function is a light wrapper around `gr.demo.launch()`. To run the | ||
app directly, use in your terminal: | ||
|
||
```bash | ||
export OPENAI_API_KEY="sk-..." | ||
python ./gradio_agent_chat/sample.py | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
from llama_hub.llama_packs.gradio_agent_chat.base import GradioAgentChatPack | ||
|
||
__all__ = ["GradioAgentChatPack"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,149 @@ | ||
from typing import Dict, Any, List, Tuple | ||
|
||
from llama_index.llama_pack.base import BaseLlamaPack | ||
from llama_index.agent.types import BaseAgent | ||
|
||
from io import StringIO | ||
import sys | ||
|
||
|
||
class Capturing(list): | ||
"""To capture the stdout from `BaseAgent.stream_chat` with `verbose=True`. Taken from | ||
https://stackoverflow.com/questions/16571150/\ | ||
how-to-capture-stdout-output-from-a-python-function-call | ||
""" | ||
|
||
def __enter__(self): | ||
self._stdout = sys.stdout | ||
sys.stdout = self._stringio = StringIO() | ||
return self | ||
|
||
def __exit__(self, *args): | ||
self.extend(self._stringio.getvalue().splitlines()) | ||
del self._stringio # free up some memory | ||
sys.stdout = self._stdout | ||
|
||
|
||
class GradioAgentChatPack(BaseLlamaPack): | ||
"""Gradio chatbot to chat with your own Agent.""" | ||
|
||
def __init__( | ||
self, | ||
agent: BaseAgent, | ||
**kwargs: Any, | ||
) -> None: | ||
"""Init params.""" | ||
try: | ||
from ansi2html import Ansi2HTMLConverter | ||
except ImportError: | ||
raise ImportError("Please install ansi2html via `pip install ansi2html`") | ||
|
||
self.agent = agent | ||
self.thoughts = "" | ||
self.conv = Ansi2HTMLConverter() | ||
|
||
def get_modules(self) -> Dict[str, Any]: | ||
"""Get modules.""" | ||
return {"agent": self.agent} | ||
|
||
def _handle_user_message(self, user_message, history): | ||
"""Handle the user submitted message. Clear message box, and append | ||
to the history.""" | ||
return "", history + [(user_message, "")] | ||
|
||
def _generate_response( | ||
self, chat_history: List[Tuple[str, str]] | ||
) -> Tuple[str, List[Tuple[str, str]]]: | ||
"""Generate the response from agent, and capture the stdout of the | ||
ReActAgent's thoughts. | ||
""" | ||
with Capturing() as output: | ||
response = self.agent.stream_chat(chat_history[-1][0]) | ||
ansi = "\n========\n".join(output) | ||
html_output = self.conv.convert(ansi) | ||
for token in response.response_gen: | ||
chat_history[-1][1] += token | ||
yield chat_history, str(html_output) | ||
|
||
def _reset_chat(self) -> Tuple[str, str]: | ||
"""Reset the agent's chat history. And clear all dialogue boxes.""" | ||
# clear agent history | ||
self.agent.reset() | ||
return "", "", "" # clear textboxes | ||
|
||
def run(self, *args: Any, **kwargs: Any) -> Any: | ||
"""Run the pipeline.""" | ||
import gradio as gr | ||
from gradio.themes.utils import fonts, colors, sizes | ||
|
||
llama_theme = gr.themes.Soft( | ||
primary_hue=colors.purple, | ||
secondary_hue=colors.pink, | ||
neutral_hue=colors.gray, | ||
spacing_size=sizes.spacing_md, | ||
radius_size=sizes.radius_md, | ||
text_size=sizes.text_lg, | ||
font=( | ||
fonts.GoogleFont("Quicksand"), | ||
"ui-sans-serif", | ||
"sans-serif", | ||
), | ||
font_mono=( | ||
fonts.GoogleFont("IBM Plex Mono"), | ||
"ui-monospace", | ||
"monospace", | ||
), | ||
) | ||
llama_theme.set( | ||
body_background_fill="#FFFFFF", | ||
body_background_fill_dark="#000000", | ||
button_primary_background_fill="linear-gradient(90deg, *primary_300, *secondary_400)", | ||
button_primary_background_fill_hover="linear-gradient(90deg, *primary_200, *secondary_300)", | ||
button_primary_text_color="white", | ||
button_primary_background_fill_dark="linear-gradient(90deg, *primary_600, *secondary_800)", | ||
slider_color="*secondary_300", | ||
slider_color_dark="*secondary_600", | ||
block_title_text_weight="600", | ||
block_border_width="3px", | ||
block_shadow="*shadow_drop_lg", | ||
button_shadow="*shadow_drop_lg", | ||
button_large_padding="32px", | ||
) | ||
|
||
demo = gr.Blocks( | ||
theme=llama_theme, | ||
css="#box { height: 420px; overflow-y: scroll !important} #logo { align-self: right }", | ||
) | ||
with demo: | ||
with gr.Row(): | ||
gr.Markdown( | ||
"# Gradio Chat With Your Agent Powered by LlamaIndex and LlamaHub 🦙\n" | ||
"This Gradio app allows you to chat with your own agent (`BaseAgent`).\n" | ||
) | ||
gr.Markdown( | ||
"[![Alt text](https://d3ddy8balm3goa.cloudfront.net/other/llama-index-light-transparent-sm-font.svg)](https://llamaindex.ai)", | ||
elem_id="logo", | ||
) | ||
with gr.Row(): | ||
chat_window = gr.Chatbot( | ||
label="Message History", | ||
scale=3, | ||
) | ||
console = gr.HTML(elem_id="box") | ||
with gr.Row(): | ||
message = gr.Textbox(label="Write A Message", scale=4) | ||
clear = gr.ClearButton() | ||
|
||
message.submit( | ||
self._handle_user_message, | ||
[message, chat_window], | ||
[message, chat_window], | ||
queue=False, | ||
).then( | ||
self._generate_response, | ||
chat_window, | ||
[chat_window, console], | ||
) | ||
clear.click(self._reset_chat, None, [message, chat_window, console]) | ||
|
||
demo.launch(server_name="0.0.0.0", server_port=8080) |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added
BIN
+154 KB
llama_hub/llama_packs/gradio_agent_chat/chat-with-your-agent-light.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
llama-hub | ||
gradio | ||
ansi2html |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
from llama_hub.llama_packs.gradio_react_agent_chatbot.base import GradioReActAgentPack | ||
|
||
__all__ = ["GradioReActAgentPack"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters