Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
dorren002 committed Feb 12, 2025
1 parent 1551ed1 commit 4be1ada
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 15 deletions.
9 changes: 5 additions & 4 deletions examples/multimodal_chatbot_online.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@

# Before running, set the environment variable:
#
# 1. `export LAZYLLM_GLM_API_KEY=xxxx`: the API key of Zhipu AI, you need to set `source="glm"` and `model="glm-4v-flash"`.
# You can apply for the API key at https://open.bigmodel.cn/
# 1. `export LAZYLLM_GLM_API_KEY=xxxx`: the API key of Zhipu AI, you need to set `source="glm"` and
# `model="glm-4v-flash"`. You can apply for the API key at https://open.bigmodel.cn/
# Also supports other API keys:
# - LAZYLLM_OPENAI_API_KEY: the API key of OpenAI, set `source="openai"` and `model="gpt-4o-mini"`.
# You can apply for the API key at https://openai.com/index/openai-api/
# - LAZYLLM_KIMI_API_KEY: the API key of Moonshot AI, set `source="kimi"` and `model="moonshot-v1-8k-vision-preview"`.
# - LAZYLLM_KIMI_API_KEY: the API key of Moonshot AI, set `source="kimi"` and
# `model="moonshot-v1-8k-vision-preview"`.
# You can apply for the API key at https://platform.moonshot.cn/console
# - LAZYLLM_QWEN_API_KEY: the API key of Alibaba Cloud, set `source="qwen"` and `model="qwenvl-max"`.
# You can apply for the API key at https://home.console.aliyun.com/
Expand All @@ -19,4 +20,4 @@
chat = lazyllm.OnlineChatModule(source="glm", model="glm-4v-flash")

if __name__ == '__main__':
lazyllm.WebModule(chat, port=range(23466, 23470), files_target=chat).start().wait()
lazyllm.WebModule(chat, port=range(23466, 23470), files_target=chat).start().wait()
2 changes: 1 addition & 1 deletion lazyllm/components/utils/file_operate.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def delete_old_files(directory):
LOG.info(f"Deleted: {dir_path}")
except Exception as e:
LOG.error(f"Error deleting directory {dir_path}: {e}")

def image_to_base64(directory):
try:
with open(directory, 'rb') as f:
Expand Down
8 changes: 4 additions & 4 deletions lazyllm/module/onlineChatModule/onlineChatModuleBase.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def forward(self, __input: Union[Dict, str] = None, *, llm_chat_history: List[Li

if len(self._model_optional_params) > 0:
data.update(self._model_optional_params)

if isinstance(__input, str) and __input.startswith(LAZYLLM_QUERY_PREFIX):
for idx, message in enumerate(data["messages"]):
content = message["content"]
Expand Down Expand Up @@ -434,13 +434,13 @@ def _start_for_deployment():
raise ValueError(f"Deployment task {deployment_id} failed")
lazyllm.LOG.info(f"deployment {deployment_id} finished")
return Pipeline(_start_for_deployment)

def _format_vl_chat_query(self, query: str):
return [{"type": "text", "text": query}]

def _format_vl_chat_image_url(self, image_url: str, mime: str) -> List[Dict[str, str]]:
return [{"type": "image_url", "image_url": {"url": image_url}}]

# for online vlm
def _format_input_with_files(self, query_files: str) -> List[Dict[str, str]]:
if isinstance(query_files, str):
Expand Down
4 changes: 2 additions & 2 deletions lazyllm/module/onlineChatModule/qwenModule.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,8 +295,8 @@ def _query_deployment(self, deployment_id) -> str:

status = r.json()["output"]['status']
return status

def _format_vl_chat_image_url(self, image_url, mime):
assert mime is not None, "Qwen Module requires mime info."
image_url = f"data:{mime};base64,{image_url}"
image_url = f"data:{mime};base64,{image_url}"
return [{"type": "image_url", "image_url": {"url": image_url}}]
2 changes: 1 addition & 1 deletion lazyllm/module/onlineChatModule/sensenovaModule.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def _query_deployment(self, deployment_id) -> str:

status = r.json()["job"]['status']
return status

def _format_vl_chat_image_url(self, image_url, mime):
if image_url.startswith("http"):
return [{"type": "image_url", "image_url": image_url}]
Expand Down
2 changes: 1 addition & 1 deletion lazyllm/tools/rag/doc_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def do_embedding(self, embed: Dict[str, Callable]) -> None:
self.embedding = {**self.embedding, **generate_embed}

def get_content(self, metadata_mode=MetadataMode.LLM) -> str:
if metadata_mode==MetadataMode.LLM:
if metadata_mode == MetadataMode.LLM:
return Image.open(self._image_path)
elif metadata_mode == MetadataMode.EMBED:
image_base64, mime = image_to_base64(self._image_path)
Expand Down
4 changes: 2 additions & 2 deletions tests/advanced_tests/standard_test/test_deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,11 +106,11 @@ def test_cross_modal_embedding(self):
image_path = os.path.join(lazyllm.config['data_path'], "ci_data/ji.jpg")
image_base64, mime = image_to_base64(image_path)
image_base64 = f'data:{mime};base64,{image_base64}'
res = m(image_url, modality = 'image')
res = m(image_url, modality='image')
assert len(json.loads(res)) == 1152
res = m([image_url], modality='image')
assert len(json.loads(res)) == 1
res = m([image_url, image_base64], modality = 'image')
res = m([image_url, image_base64], modality='image')
assert len(json.loads(res)) == 2

def test_sd3(self):
Expand Down

0 comments on commit 4be1ada

Please sign in to comment.