From de34c17cda518f880bf9ffc5a021a508b5a30e71 Mon Sep 17 00:00:00 2001 From: imotai Date: Mon, 18 Sep 2023 00:44:23 +0800 Subject: [PATCH] fix: use syntax to output terminal --- agent/setup.py | 2 +- chat/setup.py | 2 +- chat/src/octopus_terminal/markdown.py | 2 +- chat/src/octopus_terminal/terminal_chat.py | 37 +++++++++++-------- clean_sandbox.sh | 6 ++- docker/Dockerfile_for_agent | 4 +- docker/Dockerfile_for_kernel | 4 +- kernel/setup.py | 2 +- .../server/kernel_rpc_server.py | 14 +++++-- proto/setup.py | 2 +- setup/compose.yaml | 8 ++-- setup/octopus_up.sh | 22 +++++++---- 12 files changed, 65 insertions(+), 40 deletions(-) diff --git a/agent/setup.py b/agent/setup.py index f38c951..c61f152 100644 --- a/agent/setup.py +++ b/agent/setup.py @@ -19,7 +19,7 @@ setup( name="octopus_agent", - version="0.3.3", + version="0.3.6", description="Open source code interpreter agent for LLM", author="imotai", author_email="wangtaize@dbpunk.com", diff --git a/chat/setup.py b/chat/setup.py index 2df3f79..0107285 100644 --- a/chat/setup.py +++ b/chat/setup.py @@ -19,7 +19,7 @@ setup( name="octopus_chat", - version="0.3.3", + version="0.3.6", description="the chat client for open source code interpreter octopus", author="imotai", author_email="wangtaize@dbpunk.com", diff --git a/chat/src/octopus_terminal/markdown.py b/chat/src/octopus_terminal/markdown.py index 36cc60a..10c978f 100644 --- a/chat/src/octopus_terminal/markdown.py +++ b/chat/src/octopus_terminal/markdown.py @@ -43,7 +43,7 @@ def __rich_console__( syntax = Syntax( code, self.lexer_name, - background_color="default", + # background_color="default", line_numbers=True, theme=self.theme, word_wrap=True, diff --git a/chat/src/octopus_terminal/terminal_chat.py b/chat/src/octopus_terminal/terminal_chat.py index 371197b..2721def 100644 --- a/chat/src/octopus_terminal/terminal_chat.py +++ b/chat/src/octopus_terminal/terminal_chat.py @@ -177,12 +177,14 @@ def handle_action_output(segments, respond, images, values): output = respond.on_agent_action_end.output if not output: return - mk = output - markdown = Markdown(mk) - images.extend(respond.on_agent_action_end.output_files) + syntax = Syntax( + mk, + line_numbers=True, # background_color="default" + ) + # images.extend(respond.on_agent_action_end.output_files) values.append(("text", mk, [])) - segments.append((len(values) - 1, markdown)) + segments.append((len(values) - 1, syntax)) def handle_action_start(segments, respond, images, values): @@ -200,7 +202,9 @@ def handle_action_start(segments, respond, images, values): values.append(("text", explanation, [])) segments.append((len(values) - 1, markdown)) syntax = Syntax( - arguments["code"], "python", line_numbers=True, background_color="default" + arguments["code"], + "python", + line_numbers=True, # background_color="default" ) values.append( ("python", arguments["code"], arguments.get("saved_filenames", [])) @@ -246,11 +250,14 @@ def handle_final_answer(segments, respond, values): def render_image(images, sdk, image_dir, console): image_set = set(images) for image in image_set: - sdk.download_file(image, image_dir) - fullpath = "%s/%s" % (image_dir, image) - pil_image = Image.open(fullpath) - auto_image = AutoImage(image=pil_image, width=int(pil_image.size[0] / 8)) - print(f"{auto_image:1.1#}") + try: + sdk.download_file(image, image_dir) + fullpath = "%s/%s" % (image_dir, image) + pil_image = Image.open(fullpath) + auto_image = AutoImage(image=pil_image, width=int(pil_image.size[0] / 20)) + print(f"{auto_image:1.1#}") + except Exception as ex: + pass def run_chat( @@ -261,12 +268,12 @@ def run_chat( """ segments = [] images = [] - with Live(Group(*segments), console=console) as live: + token_usage = 0 + iteration = 0 + model_name = "" + with Live(Group(*segments), console=console, vertical_overflow="visible") as live: spinner = Spinner("dots", style="status.spinner", speed=1.0, text="") refresh(live, segments, spinner) - token_usage = 0 - iteration = 0 - model_name = "" for respond in sdk.prompt(prompt): if not respond: break @@ -343,7 +350,7 @@ def query_apps(sdk, console): app.language, datetime.fromtimestamp(app.ctime).strftime("%m/%d/%Y"), ) - console.print(app_table) + console.print(Panel(app_table, title=OCTOPUS_APP_TITLE, title_align="left")) def assemble_app(sdk, name, numbers, values): diff --git a/clean_sandbox.sh b/clean_sandbox.sh index 17af4cf..f05bd09 100644 --- a/clean_sandbox.sh +++ b/clean_sandbox.sh @@ -6,5 +6,9 @@ # Distributed under terms of the MIT license. # - +WORKDIR=`pwd` ps -eu | grep python3 | grep -v grep | awk '{print $2}' | while read line; do kill -9 $line; done +cd ${WORKDIR}/proto && test -e dist && rm -rf dist +cd ${WORKDIR}/agent && test -e dist && rm -rf dist +cd ${WORKDIR}/chat && test -e dist && rm -rf dist +cd ${WORKDIR}/kernel && test -e dist && rm -rf dist diff --git a/docker/Dockerfile_for_agent b/docker/Dockerfile_for_agent index db140bc..42d70b5 100644 --- a/docker/Dockerfile_for_agent +++ b/docker/Dockerfile_for_agent @@ -1,5 +1,5 @@ FROM imotai/octopus_base:0.3.3 WORKDIR /agent VOLUME /agent -ADD start_agent.sh /agent/ -CMD ["bash", "/agent/start_agent.sh"] +ADD start_agent.sh /bin/ +CMD ["bash", "/bin/start_agent.sh"] diff --git a/docker/Dockerfile_for_kernel b/docker/Dockerfile_for_kernel index b72d0d9..9b6d970 100644 --- a/docker/Dockerfile_for_kernel +++ b/docker/Dockerfile_for_kernel @@ -1,5 +1,5 @@ FROM imotai/octopus_base:0.3.3 WORKDIR /kernel VOLUME /kernel -ADD start_kernel.sh /kernel/ -CMD ["bash", "/kernel/start_kernel.sh"] +ADD start_kernel.sh /bin/ +CMD ["bash", "/bin/start_kernel.sh"] diff --git a/kernel/setup.py b/kernel/setup.py index d1aed85..d6c8e57 100644 --- a/kernel/setup.py +++ b/kernel/setup.py @@ -19,7 +19,7 @@ setup( name="octopus_kernel", - version="0.3.3", + version="0.3.6", description="Open source code interpreter agent for LLM", author="imotai", author_email="wangtaize@dbpunk.com", diff --git a/kernel/src/octopus_kernel/server/kernel_rpc_server.py b/kernel/src/octopus_kernel/server/kernel_rpc_server.py index 34ada55..dc15a86 100644 --- a/kernel/src/octopus_kernel/server/kernel_rpc_server.py +++ b/kernel/src/octopus_kernel/server/kernel_rpc_server.py @@ -265,10 +265,18 @@ def _build_payload(self, msg, workspace): }, ) else: - keys = ",".join(msg["content"]["data"].keys()) - raise Exception( - f"unsupported display data type {keys} for the result {msg}" + logger.warning(f" unsupported display_data {msg}") + return ( + "result", + { + "data": msg["content"]["data"], + "msg_type": msg["msg_type"], + }, ) + # keys = ",".join(msg["content"]["data"].keys()) + # raise Exception( + # f"unsupported display data type {keys} for the result {msg}" + # ) if msg["msg_type"] == "execute_result": logger.debug("result data %s", msg["content"]["data"]["text/plain"]) diff --git a/proto/setup.py b/proto/setup.py index 9852838..319a8a4 100644 --- a/proto/setup.py +++ b/proto/setup.py @@ -19,7 +19,7 @@ setup( name="octopus_proto", - version="0.3.3", + version="0.3.6", description="Open source code interpreter agent for LLM", author="imotai", author_email="wangtaize@dbpunk.com", diff --git a/setup/compose.yaml b/setup/compose.yaml index ea5c2e9..dd0fe83 100644 --- a/setup/compose.yaml +++ b/setup/compose.yaml @@ -1,15 +1,15 @@ services: kernel: - image: imotai/octopus_kernel:0.3.4 + image: imotai/octopus_kernel:0.3.5 ports: - '9527:9527' volumes: - - ./app/kernel:/kernel + - ./kernel:/kernel agent: - image: imotai/octopus_agent:0.3.4 + image: imotai/octopus_agent:0.3.5 ports: - '9528:9528' volumes: - - ./app/agent:/agent + - ./agent:/agent depends_on: - kernel diff --git a/setup/octopus_up.sh b/setup/octopus_up.sh index 08103ff..2368029 100644 --- a/setup/octopus_up.sh +++ b/setup/octopus_up.sh @@ -58,9 +58,8 @@ function start_unsafe_local_instance() { function start_docker_local_instance() { ROOT_DIR=$1 - COMPOSE_DIR="$(dirname "$ROOT_DIR")" - cd $COMPOSE_DIR && docker compose up -d - sleep 3 + cd $ROOT_DIR && docker compose up -d + sleep 4 AGENT_RPC_KEY=$(cat ${ROOT_DIR}/agent/.env | grep admin_key | cut -d "=" -f 2) KERNEL_RPC_KEY=$(cat ${ROOT_DIR}/kernel/.env | grep rpc_key | cut -d "=" -f 2) octopus_agent_setup --kernel_endpoint=127.0.0.1:9527 --kernel_api_key=${KERNEL_RPC_KEY} --agent_endpoint=127.0.0.1:9528 --admin_key=${AGENT_RPC_KEY} @@ -143,16 +142,15 @@ function generate_common_env() { mkdir -p ${ROOT_DIR}/kernel/config echo "config_root_path=${ROOT_DIR}/kernel/config" >${ROOT_DIR}/kernel/.env echo "workspace=${ROOT_DIR}/kernel/ws" >>${ROOT_DIR}/kernel/.env - echo "rpc_host=127.0.0.1" >>${ROOT_DIR}/kernel/.env + echo "rpc_host=0.0.0.0" >>${ROOT_DIR}/kernel/.env echo "rpc_port=9527" >>${ROOT_DIR}/kernel/.env echo "rpc_key=${KERNEL_KEY}" >>${ROOT_DIR}/kernel/.env - echo "rpc_host=127.0.0.1" >${ROOT_DIR}/agent/.env + echo "rpc_host=0.0.0.0" >${ROOT_DIR}/agent/.env echo "rpc_port=9528" >>${ROOT_DIR}/agent/.env echo "admin_key=${AGENT_ADMIN_KEY}" >>${ROOT_DIR}/agent/.env echo "max_file_size=10240000" >>${ROOT_DIR}/agent/.env echo "verbose=True" >>${ROOT_DIR}/agent/.env - echo "db_path=${ROOT_DIR}/agent/octopus.db" >>${ROOT_DIR}/agent/.env echo "install the octopus" echo "✅ Install octopus to dir ${ROOT_DIR} done" if [ -f $HOME/.octopus/config ]; then @@ -215,16 +213,19 @@ function start_unsafe_local() { case $opt in "OpenAI") install_unsafe_local_openai ${ROOT_DIR} + echo "db_path=${ROOT_DIR}/agent/octopus.db" >>${ROOT_DIR}/agent/.env start_unsafe_local_instance ${ROOT_DIR} exit 0 ;; "Azure OpenAI") install_unsafe_local_azure_openai ${ROOT_DIR} + echo "db_path=${ROOT_DIR}/agent/octopus.db" >>${ROOT_DIR}/agent/.env start_unsafe_local_instance ${ROOT_DIR} exit 0 ;; "Codellama") install_unsafe_local_codellama ${ROOT_DIR} + echo "db_path=${ROOT_DIR}/agent/octopus.db" >>${ROOT_DIR}/agent/.env start_unsafe_local_instance ${ROOT_DIR} exit 0 ;; @@ -237,10 +238,11 @@ function start_unsafe_local() { } function start_docker_local() { - ROOT_DIR="./app" + CDIR=`pwd` + ROOT_DIR="$CDIR/app" if [ -f ${ROOT_DIR}/agent/.env ]; then echo "✅ You have setup the environment, the dir is ${ROOT_DIR}" - start_unsafe_local_instance ${ROOT_DIR} + start_docker_local_instance ${ROOT_DIR} exit 0 fi read -p 'Please specify the install folder(default:./app): ' new_dir @@ -248,6 +250,7 @@ function start_docker_local() { ROOT_DIR=${new_dir} fi mkdir -p ${ROOT_DIR} + cp compose.yaml ${ROOT_DIR} if [ $? -eq 0 ]; then echo "✅ Create octopus app dir ${ROOT_DIR} done " else @@ -261,16 +264,19 @@ function start_docker_local() { case $opt in "OpenAI") install_unsafe_local_openai ${ROOT_DIR} + echo "db_path=/agent/octopus.db" >>${ROOT_DIR}/agent/.env start_docker_local_instance ${ROOT_DIR} exit 0 ;; "Azure OpenAI") install_unsafe_local_azure_openai ${ROOT_DIR} + echo "db_path=/agent/octopus.db" >>${ROOT_DIR}/agent/.env start_docker_local_instance ${ROOT_DIR} exit 0 ;; "Codellama") install_unsafe_local_codellama ${ROOT_DIR} + echo "db_path=/agent/octopus.db" >>${ROOT_DIR}/agent/.env start_docker_local_instance ${ROOT_DIR} exit 0 ;;