From 9f91c2d05c21c7fea604a88a0974679a07293c81 Mon Sep 17 00:00:00 2001 From: suruiqiang Date: Sun, 9 Feb 2025 16:52:46 +0800 Subject: [PATCH 1/5] fix avatar for export message preview and saved image --- app/components/exporter.tsx | 37 ++++++++++++++----------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index 79ae87be2d2..69a73062ace 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -23,7 +23,6 @@ import CopyIcon from "../icons/copy.svg"; import LoadingIcon from "../icons/three-dots.svg"; import ChatGptIcon from "../icons/chatgpt.png"; import ShareIcon from "../icons/share.svg"; -import BotIcon from "../icons/bot.png"; import DownloadIcon from "../icons/download.svg"; import { useEffect, useMemo, useRef, useState } from "react"; @@ -33,13 +32,13 @@ import dynamic from "next/dynamic"; import NextImage from "next/image"; import { toBlob, toPng } from "html-to-image"; -import { DEFAULT_MASK_AVATAR } from "../store/mask"; import { prettyObject } from "../utils/format"; import { EXPORT_MESSAGE_CLASS_NAME } from "../constant"; import { getClientConfig } from "../config/client"; import { type ClientApi, getClientApi } from "../client/api"; import { getMessageTextContent } from "../utils"; +import { MaskAvatar } from "./mask"; import clsx from "clsx"; const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { @@ -407,22 +406,6 @@ export function PreviewActions(props: { ); } -function ExportAvatar(props: { avatar: string }) { - if (props.avatar === DEFAULT_MASK_AVATAR) { - return ( - bot - ); - } - - return ; -} - export function ImagePreviewer(props: { messages: ChatMessage[]; topic: string; @@ -546,9 +529,12 @@ export function ImagePreviewer(props: { github.com/ChatGPTNextWeb/ChatGPT-Next-Web
- + & - +
@@ -576,9 +562,14 @@ export function ImagePreviewer(props: { key={i} >
- + {m.role === "user" ? ( + + ) : ( + + )}
From 0bfc6480855640032ec3593960b434fc5e1c1de5 Mon Sep 17 00:00:00 2001 From: Shenghang Tsai Date: Sun, 9 Feb 2025 18:47:57 +0800 Subject: [PATCH 2/5] fix model icon on siliconflow --- app/components/emoji.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx index ecb1c65819e..19fb1400eaa 100644 --- a/app/components/emoji.tsx +++ b/app/components/emoji.tsx @@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { LlmIcon = BotIconGemma; } else if (modelName.startsWith("claude")) { LlmIcon = BotIconClaude; - } else if (modelName.startsWith("llama")) { + } else if (modelName.includes("llama")) { LlmIcon = BotIconMeta; } else if (modelName.startsWith("mixtral")) { LlmIcon = BotIconMistral; - } else if (modelName.startsWith("deepseek")) { + } else if (modelName.includes("deepseek")) { LlmIcon = BotIconDeepseek; } else if (modelName.startsWith("moonshot")) { LlmIcon = BotIconMoonshot; @@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { } else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) { LlmIcon = BotIconDoubao; } else if ( - modelName.startsWith("glm") || + modelName.includes("glm") || modelName.startsWith("cogview-") || modelName.startsWith("cogvideox-") ) { From 18fa2cc30d96fbb452efd9226db7ca6021cacb3e Mon Sep 17 00:00:00 2001 From: Shenghang Tsai Date: Sun, 9 Feb 2025 18:49:26 +0800 Subject: [PATCH 3/5] fix model icon on siliconflow --- app/components/emoji.tsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx index 19fb1400eaa..1bf39ac1d77 100644 --- a/app/components/emoji.tsx +++ b/app/components/emoji.tsx @@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { LlmIcon = BotIconGemma; } else if (modelName.startsWith("claude")) { LlmIcon = BotIconClaude; - } else if (modelName.includes("llama")) { + } else if (modelName.toLowerCase().includes("llama")) { LlmIcon = BotIconMeta; } else if (modelName.startsWith("mixtral")) { LlmIcon = BotIconMistral; - } else if (modelName.includes("deepseek")) { + } else if (modelName.toLowerCase().includes("deepseek")) { LlmIcon = BotIconDeepseek; } else if (modelName.startsWith("moonshot")) { LlmIcon = BotIconMoonshot; @@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { } else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) { LlmIcon = BotIconDoubao; } else if ( - modelName.includes("glm") || + modelName.toLowerCase().includes("glm") || modelName.startsWith("cogview-") || modelName.startsWith("cogvideox-") ) { From 2137aa65bfaeda33bdbfad7f1ae36bfdde8c9edf Mon Sep 17 00:00:00 2001 From: Shenghang Tsai Date: Mon, 10 Feb 2025 11:03:49 +0800 Subject: [PATCH 4/5] Model listing of SiliconFlow --- app/client/platforms/siliconflow.ts | 44 +++++++++++++++++++++++++++-- app/constant.ts | 1 + 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/app/client/platforms/siliconflow.ts b/app/client/platforms/siliconflow.ts index 1ad316a6143..8cf9ad3b1c2 100644 --- a/app/client/platforms/siliconflow.ts +++ b/app/client/platforms/siliconflow.ts @@ -5,6 +5,7 @@ import { SILICONFLOW_BASE_URL, SiliconFlow, REQUEST_TIMEOUT_MS_FOR_THINKING, + DEFAULT_MODELS, } from "@/app/constant"; import { useAccessStore, @@ -27,10 +28,19 @@ import { getMessageTextContentWithoutThinking, } from "@/app/utils"; import { RequestPayload } from "./openai"; + import { fetch } from "@/app/utils/stream"; +export interface SiliconFlowListModelResponse { + object: string; + data: Array<{ + id: string; + object: string; + root: string; + }>; +} export class SiliconflowApi implements LLMApi { - private disableListModels = true; + private disableListModels = false; path(path: string): string { const accessStore = useAccessStore.getState(); @@ -238,6 +248,36 @@ export class SiliconflowApi implements LLMApi { } async models(): Promise { - return []; + if (this.disableListModels) { + return DEFAULT_MODELS.slice(); + } + + const res = await fetch(this.path(SiliconFlow.ListModelPath), { + method: "GET", + headers: { + ...getHeaders(), + }, + }); + + const resJson = (await res.json()) as SiliconFlowListModelResponse; + const chatModels = resJson.data; + console.log("[Models]", chatModels); + + if (!chatModels) { + return []; + } + + let seq = 1000; //同 Constant.ts 中的排序保持一致 + return chatModels.map((m) => ({ + name: m.id, + available: true, + sorted: seq++, + provider: { + id: "siliconflow", + providerName: "SiliconFlow", + providerType: "siliconflow", + sorted: 14, + }, + })); } } diff --git a/app/constant.ts b/app/constant.ts index 09eec44b68d..5d0640d1cdd 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -258,6 +258,7 @@ export const ChatGLM = { export const SiliconFlow = { ExampleEndpoint: SILICONFLOW_BASE_URL, ChatPath: "v1/chat/completions", + ListModelPath: "v1/models?&sub_type=chat", }; export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang From 86f86962fb0725b888cee6ebd9eb9f818a0c9cee Mon Sep 17 00:00:00 2001 From: Shenghang Tsai Date: Mon, 10 Feb 2025 13:37:48 +0800 Subject: [PATCH 5/5] Support VLM on SiliconFlow --- app/client/platforms/siliconflow.ts | 8 ++++++-- app/constant.ts | 1 + 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/app/client/platforms/siliconflow.ts b/app/client/platforms/siliconflow.ts index 1ad316a6143..17650a9c69b 100644 --- a/app/client/platforms/siliconflow.ts +++ b/app/client/platforms/siliconflow.ts @@ -13,7 +13,7 @@ import { ChatMessageTool, usePluginStore, } from "@/app/store"; -import { streamWithThink } from "@/app/utils/chat"; +import { preProcessImageContent, streamWithThink } from "@/app/utils/chat"; import { ChatOptions, getHeaders, @@ -25,6 +25,7 @@ import { getClientConfig } from "@/app/config/client"; import { getMessageTextContent, getMessageTextContentWithoutThinking, + isVisionModel, } from "@/app/utils"; import { RequestPayload } from "./openai"; import { fetch } from "@/app/utils/stream"; @@ -71,13 +72,16 @@ export class SiliconflowApi implements LLMApi { } async chat(options: ChatOptions) { + const visionModel = isVisionModel(options.config.model); const messages: ChatOptions["messages"] = []; for (const v of options.messages) { if (v.role === "assistant") { const content = getMessageTextContentWithoutThinking(v); messages.push({ role: v.role, content }); } else { - const content = getMessageTextContent(v); + const content = visionModel + ? await preProcessImageContent(v.content) + : getMessageTextContent(v); messages.push({ role: v.role, content }); } } diff --git a/app/constant.ts b/app/constant.ts index 09eec44b68d..d9cb62bf934 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -462,6 +462,7 @@ export const VISION_MODEL_REGEXES = [ /gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview" /^dall-e-3$/, // Matches exactly "dall-e-3" /glm-4v/, + /vl/i, ]; export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];