From 97030e27f88ac27a9ab4bcc2484f7d8e83d29d04 Mon Sep 17 00:00:00 2001 From: JustSong Date: Sun, 17 Dec 2023 23:30:45 +0800 Subject: [PATCH] fix: fix gemini panic (close #833) --- controller/relay-gemini.go | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/controller/relay-gemini.go b/controller/relay-gemini.go index 4c2daba9b9..2458458e15 100644 --- a/controller/relay-gemini.go +++ b/controller/relay-gemini.go @@ -114,7 +114,7 @@ func requestOpenAI2Gemini(textRequest GeneralOpenAIRequest) *GeminiChatRequest { Role: "model", Parts: []GeminiPart{ { - Text: "ok", + Text: "Okay", }, }, }) @@ -130,6 +130,16 @@ type GeminiChatResponse struct { PromptFeedback GeminiChatPromptFeedback `json:"promptFeedback"` } +func (g *GeminiChatResponse) GetResponseText() string { + if g == nil { + return "" + } + if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 { + return g.Candidates[0].Content.Parts[0].Text + } + return "" +} + type GeminiChatCandidate struct { Content GeminiChatContent `json:"content"` FinishReason string `json:"finishReason"` @@ -158,10 +168,13 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse Index: i, Message: Message{ Role: "assistant", - Content: candidate.Content.Parts[0].Text, + Content: "", }, FinishReason: stopFinishReason, } + if len(candidate.Content.Parts) > 0 { + choice.Message.Content = candidate.Content.Parts[0].Text + } fullTextResponse.Choices = append(fullTextResponse.Choices, choice) } return &fullTextResponse @@ -169,9 +182,7 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *ChatCompletionsStreamResponse { var choice ChatCompletionsStreamResponseChoice - if len(geminiResponse.Candidates) > 0 && len(geminiResponse.Candidates[0].Content.Parts) > 0 { - choice.Delta.Content = geminiResponse.Candidates[0].Content.Parts[0].Text - } + choice.Delta.Content = geminiResponse.GetResponseText() choice.FinishReason = &stopFinishReason var response ChatCompletionsStreamResponse response.Object = "chat.completion.chunk" @@ -276,7 +287,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo }, nil } fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse) - completionTokens := countTokenText(geminiResponse.Candidates[0].Content.Parts[0].Text, model) + completionTokens := countTokenText(geminiResponse.GetResponseText(), model) usage := Usage{ PromptTokens: promptTokens, CompletionTokens: completionTokens,