From 3875b141c644c910bd0aa9bcb7b92b0c2682a020 Mon Sep 17 00:00:00 2001 From: CalciumIon <1808837298@qq.com> Date: Fri, 19 Jul 2024 17:16:20 +0800 Subject: [PATCH] fix: gemini stream finish reason (close #378) --- relay/channel/gemini/relay-gemini.go | 7 +++++-- service/relay.go | 28 ++++++++++++++++++++++++++++ service/usage_helpr.go | 12 ------------ 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/relay/channel/gemini/relay-gemini.go b/relay/channel/gemini/relay-gemini.go index 45dfbb9b..7890b83e 100644 --- a/relay/channel/gemini/relay-gemini.go +++ b/relay/channel/gemini/relay-gemini.go @@ -198,7 +198,6 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *dto.Ch choice.Delta.SetContentString(respFirst.Text) } } - choice.FinishReason = &relaycommon.StopFinishReason var response dto.ChatCompletionsStreamResponse response.Object = "chat.completion.chunk" response.Model = "gemini" @@ -247,10 +246,14 @@ func geminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycom common.LogError(c, err.Error()) } } + + response := service.GenerateStopResponse(id, createAt, info.UpstreamModelName, relaycommon.StopFinishReason) + service.ObjectData(c, response) + usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens if info.ShouldIncludeUsage { - response := service.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage) + response = service.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage) err := service.ObjectData(c, response) if err != nil { common.SysError("send final response failed: " + err.Error()) diff --git a/service/relay.go b/service/relay.go index 03b005c3..924e0bbe 100644 --- a/service/relay.go +++ b/service/relay.go @@ -7,6 +7,7 @@ import ( "github.com/gin-gonic/gin" "net/http" "one-api/common" + "one-api/dto" ) func SetEventStreamHeaders(c *gin.Context) { @@ -45,3 +46,30 @@ func GetResponseID(c *gin.Context) string { logID := c.GetString("X-Oneapi-Request-Id") return fmt.Sprintf("chatcmpl-%s", logID) } + +func GenerateStopResponse(id string, createAt int64, model string, finishReason string) *dto.ChatCompletionsStreamResponse { + return &dto.ChatCompletionsStreamResponse{ + Id: id, + Object: "chat.completion.chunk", + Created: createAt, + Model: model, + SystemFingerprint: nil, + Choices: []dto.ChatCompletionsStreamResponseChoice{ + { + FinishReason: &finishReason, + }, + }, + } +} + +func GenerateFinalUsageResponse(id string, createAt int64, model string, usage dto.Usage) *dto.ChatCompletionsStreamResponse { + return &dto.ChatCompletionsStreamResponse{ + Id: id, + Object: "chat.completion.chunk", + Created: createAt, + Model: model, + SystemFingerprint: nil, + Choices: make([]dto.ChatCompletionsStreamResponseChoice, 0), + Usage: &usage, + } +} diff --git a/service/usage_helpr.go b/service/usage_helpr.go index adec566d..d2fa1022 100644 --- a/service/usage_helpr.go +++ b/service/usage_helpr.go @@ -25,18 +25,6 @@ func ResponseText2Usage(responseText string, modeName string, promptTokens int) return usage, err } -func GenerateFinalUsageResponse(id string, createAt int64, model string, usage dto.Usage) *dto.ChatCompletionsStreamResponse { - return &dto.ChatCompletionsStreamResponse{ - Id: id, - Object: "chat.completion.chunk", - Created: createAt, - Model: model, - SystemFingerprint: nil, - Choices: make([]dto.ChatCompletionsStreamResponseChoice, 0), - Usage: &usage, - } -} - func ValidUsage(usage *dto.Usage) bool { return usage != nil && (usage.PromptTokens != 0 || usage.CompletionTokens != 0) }