diff --git a/app/client/platforms/siliconflow.ts b/app/client/platforms/siliconflow.ts index 1ad316a6143..029e5245a8e 100644 --- a/app/client/platforms/siliconflow.ts +++ b/app/client/platforms/siliconflow.ts @@ -5,6 +5,7 @@ import { SILICONFLOW_BASE_URL, SiliconFlow, REQUEST_TIMEOUT_MS_FOR_THINKING, + DEFAULT_MODELS, } from "@/app/constant"; import { useAccessStore, @@ -13,7 +14,7 @@ import { ChatMessageTool, usePluginStore, } from "@/app/store"; -import { streamWithThink } from "@/app/utils/chat"; +import { preProcessImageContent, streamWithThink } from "@/app/utils/chat"; import { ChatOptions, getHeaders, @@ -25,12 +26,22 @@ import { getClientConfig } from "@/app/config/client"; import { getMessageTextContent, getMessageTextContentWithoutThinking, + isVisionModel, } from "@/app/utils"; import { RequestPayload } from "./openai"; + import { fetch } from "@/app/utils/stream"; +export interface SiliconFlowListModelResponse { + object: string; + data: Array<{ + id: string; + object: string; + root: string; + }>; +} export class SiliconflowApi implements LLMApi { - private disableListModels = true; + private disableListModels = false; path(path: string): string { const accessStore = useAccessStore.getState(); @@ -71,13 +82,16 @@ export class SiliconflowApi implements LLMApi { } async chat(options: ChatOptions) { + const visionModel = isVisionModel(options.config.model); const messages: ChatOptions["messages"] = []; for (const v of options.messages) { if (v.role === "assistant") { const content = getMessageTextContentWithoutThinking(v); messages.push({ role: v.role, content }); } else { - const content = getMessageTextContent(v); + const content = visionModel + ? await preProcessImageContent(v.content) + : getMessageTextContent(v); messages.push({ role: v.role, content }); } } @@ -238,6 +252,36 @@ export class SiliconflowApi implements LLMApi { } async models(): Promise { - return []; + if (this.disableListModels) { + return DEFAULT_MODELS.slice(); + } + + const res = await fetch(this.path(SiliconFlow.ListModelPath), { + method: "GET", + headers: { + ...getHeaders(), + }, + }); + + const resJson = (await res.json()) as SiliconFlowListModelResponse; + const chatModels = resJson.data; + console.log("[Models]", chatModels); + + if (!chatModels) { + return []; + } + + let seq = 1000; //同 Constant.ts 中的排序保持一致 + return chatModels.map((m) => ({ + name: m.id, + available: true, + sorted: seq++, + provider: { + id: "siliconflow", + providerName: "SiliconFlow", + providerType: "siliconflow", + sorted: 14, + }, + })); } } diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx index ecb1c65819e..1bf39ac1d77 100644 --- a/app/components/emoji.tsx +++ b/app/components/emoji.tsx @@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { LlmIcon = BotIconGemma; } else if (modelName.startsWith("claude")) { LlmIcon = BotIconClaude; - } else if (modelName.startsWith("llama")) { + } else if (modelName.toLowerCase().includes("llama")) { LlmIcon = BotIconMeta; } else if (modelName.startsWith("mixtral")) { LlmIcon = BotIconMistral; - } else if (modelName.startsWith("deepseek")) { + } else if (modelName.toLowerCase().includes("deepseek")) { LlmIcon = BotIconDeepseek; } else if (modelName.startsWith("moonshot")) { LlmIcon = BotIconMoonshot; @@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { } else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) { LlmIcon = BotIconDoubao; } else if ( - modelName.startsWith("glm") || + modelName.toLowerCase().includes("glm") || modelName.startsWith("cogview-") || modelName.startsWith("cogvideox-") ) { diff --git a/app/components/exporter.tsx b/app/components/exporter.tsx index 79ae87be2d2..69a73062ace 100644 --- a/app/components/exporter.tsx +++ b/app/components/exporter.tsx @@ -23,7 +23,6 @@ import CopyIcon from "../icons/copy.svg"; import LoadingIcon from "../icons/three-dots.svg"; import ChatGptIcon from "../icons/chatgpt.png"; import ShareIcon from "../icons/share.svg"; -import BotIcon from "../icons/bot.png"; import DownloadIcon from "../icons/download.svg"; import { useEffect, useMemo, useRef, useState } from "react"; @@ -33,13 +32,13 @@ import dynamic from "next/dynamic"; import NextImage from "next/image"; import { toBlob, toPng } from "html-to-image"; -import { DEFAULT_MASK_AVATAR } from "../store/mask"; import { prettyObject } from "../utils/format"; import { EXPORT_MESSAGE_CLASS_NAME } from "../constant"; import { getClientConfig } from "../config/client"; import { type ClientApi, getClientApi } from "../client/api"; import { getMessageTextContent } from "../utils"; +import { MaskAvatar } from "./mask"; import clsx from "clsx"; const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { @@ -407,22 +406,6 @@ export function PreviewActions(props: { ); } -function ExportAvatar(props: { avatar: string }) { - if (props.avatar === DEFAULT_MASK_AVATAR) { - return ( - bot - ); - } - - return ; -} - export function ImagePreviewer(props: { messages: ChatMessage[]; topic: string; @@ -546,9 +529,12 @@ export function ImagePreviewer(props: { github.com/ChatGPTNextWeb/ChatGPT-Next-Web
- + & - +
@@ -576,9 +562,14 @@ export function ImagePreviewer(props: { key={i} >
- + {m.role === "user" ? ( + + ) : ( + + )}
diff --git a/app/constant.ts b/app/constant.ts index 09eec44b68d..72219d93278 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -258,6 +258,7 @@ export const ChatGLM = { export const SiliconFlow = { ExampleEndpoint: SILICONFLOW_BASE_URL, ChatPath: "v1/chat/completions", + ListModelPath: "v1/models?&sub_type=chat", }; export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang @@ -462,6 +463,7 @@ export const VISION_MODEL_REGEXES = [ /gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview" /^dall-e-3$/, // Matches exactly "dall-e-3" /glm-4v/, + /vl/i, ]; export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];