From f9960eb5b6ae5b48d80aac2d27fcd216a279457c Mon Sep 17 00:00:00 2001 From: adolphzhang Date: Sun, 10 Nov 2024 22:40:51 +0800 Subject: [PATCH] fix: only mock the fetch function when necessary. --- dist/buildinfo.json | 2 +- dist/index.js | 13 +++++++------ dist/timestamp | 2 +- src/agent/openai.ts | 9 +++++---- src/telegram/utils/md2tgmd.ts | 1 - 5 files changed, 14 insertions(+), 13 deletions(-) diff --git a/dist/buildinfo.json b/dist/buildinfo.json index 9ec93cdc..73f12219 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha":"7ba419d","timestamp":1731244126} \ No newline at end of file +{"sha":"77a9802","timestamp":1731249631} \ No newline at end of file diff --git a/dist/index.js b/dist/index.js index 7901f03a..c843824d 100644 --- a/dist/index.js +++ b/dist/index.js @@ -401,8 +401,8 @@ const ENV_KEY_MAPPER = { WORKERS_AI_MODEL: "WORKERS_CHAT_MODEL" }; class Environment extends EnvironmentConfig { - BUILD_TIMESTAMP = 1731244126; - BUILD_VERSION = "7ba419d"; + BUILD_TIMESTAMP = 1731249631; + BUILD_VERSION = "77a9802"; I18N = loadI18n(); PLUGINS_ENV = {}; USER_CONFIG = createAgentUserConfig(); @@ -16405,15 +16405,16 @@ class OpenAI extends OpenAIBase { return context.OPENAI_API_BASE; }; request = async (params, context, onStream) => { + const userMessage = params.messages.at(-1); + const originalModel = this.model(context, userMessage); + const transformedModel = this.transformModel(originalModel, context); const provider = createOpenAI({ baseURL: context.OPENAI_API_BASE, apiKey: this.apikey(context), compatibility: "strict", - fetch: this.fetch + fetch: originalModel === transformedModel ? void 0 : this.fetch }); - const userMessage = params.messages.at(-1); - const originalModel = this.model(context, userMessage); - const languageModelV1 = provider.languageModel(this.transformModel(originalModel, context), void 0); + const languageModelV1 = provider.languageModel(transformedModel, void 0); const { messages, onStream: newOnStream } = this.extraHandle(originalModel, params.messages, context, onStream); return requestChatCompletionsV2(await warpLLMParams({ model: languageModelV1, diff --git a/dist/timestamp b/dist/timestamp index e0a5d972..0127d5c6 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1731244126 \ No newline at end of file +1731249631 \ No newline at end of file diff --git a/src/agent/openai.ts b/src/agent/openai.ts index 96d44fa2..f9e47ab0 100644 --- a/src/agent/openai.ts +++ b/src/agent/openai.ts @@ -49,16 +49,17 @@ export class OpenAI extends OpenAIBase implements ChatAgent { }; readonly request = async (params: LLMChatParams, context: AgentUserConfig, onStream: ChatStreamTextHandler | null): Promise => { + const userMessage = params.messages.at(-1) as CoreUserMessage; + const originalModel = this.model(context, userMessage); + const transformedModel = this.transformModel(originalModel, context); const provider = createOpenAI({ baseURL: context.OPENAI_API_BASE, apiKey: this.apikey(context), compatibility: 'strict', - fetch: this.fetch, + fetch: originalModel === transformedModel ? undefined : this.fetch, }); - const userMessage = params.messages.at(-1) as CoreUserMessage; - const originalModel = this.model(context, userMessage); - const languageModelV1 = provider.languageModel(this.transformModel(originalModel, context), undefined); + const languageModelV1 = provider.languageModel(transformedModel, undefined); const { messages, onStream: newOnStream } = this.extraHandle(originalModel, params.messages, context, onStream); return requestChatCompletionsV2(await warpLLMParams({ diff --git a/src/telegram/utils/md2tgmd.ts b/src/telegram/utils/md2tgmd.ts index 7b4076dd..5dc39f6e 100644 --- a/src/telegram/utils/md2tgmd.ts +++ b/src/telegram/utils/md2tgmd.ts @@ -65,7 +65,6 @@ export function escape(text: string): string { } const regexp = /^LOGSTART\s(.*?)LOGEND/s; return result.join('\n') - // extra \n to avoid markdown render error .replace(regexp, '**$1||') .replace(new RegExp(Object.values(escapedChars).join('|'), 'g'), match => escapedCharsReverseMap.get(match) ?? match); }