diff --git a/dist/buildinfo.json b/dist/buildinfo.json index 2bb6e4b17..3b8b271f8 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha": "fe48aca", "timestamp": 1722442077} +{"sha": "8bdb99f", "timestamp": 1722442566} diff --git a/dist/index.js b/dist/index.js index 0ce9a4d35..f1f8aaaef 100644 --- a/dist/index.js +++ b/dist/index.js @@ -112,7 +112,6 @@ var UserConfig = class { AZURE_PROXY_URL = null; // Azure DallE API // https://RESOURCE_NAME.openai.azure.com/openai/deployments/MODEL_NAME/images/generations?api-version=VERSION_NAME - // https://RESOURCE_NAME.openai.azure.com/openai/deployments/MODEL_NAME/images/generations?api-version=VERSION_NAME AZURE_DALLE_API = null; // -- Workers 配置 -- // @@ -200,9 +199,9 @@ var Environment = class { // -- 版本数据 -- // // 当前版本 - BUILD_TIMESTAMP = 1722426808; + BUILD_TIMESTAMP = 1722442566; // 当前版本 commit id - BUILD_VERSION = "11e0c3a"; + BUILD_VERSION = "8bdb99f"; // -- 基础配置 -- /** * @type {I18n | null} @@ -308,6 +307,11 @@ var Environment = class { MAPPING_VALUE = ""; // MAPPING_VALUE = "c35son:claude-3-5-sonnet-20240620|haiku:claude-3-haiku-20240307|g4m:gpt-4o-mini|g4:gpt-4o|rp+:command-r-plus"; }; +var ENV_KEY_MAPPER = { + CHAT_MODEL: "OPENAI_CHAT_MODEL", + API_KEY: "OPENAI_API_KEY", + WORKERS_AI_MODEL: "WORKERS_CHAT_MODEL" +}; var ENV = new Environment(); var DATABASE = null; var API_GUARD = null; @@ -327,9 +331,7 @@ var ENV_TYPES = { GOOGLE_API_KEY: "string", MISTRAL_API_KEY: "string", COHERE_API_KEY: "string", - ANTHROPIC_API_KEY: "string", - MAPPING_KEY: "string", - MAPPING_VALUE: "string" + ANTHROPIC_API_KEY: "string" }; function parseArray(raw) { if (raw.startsWith("[") && raw.endsWith("]")) { @@ -2169,10 +2171,7 @@ Token: ${Object.values(this.token_info[this.step_index]).join(" | ")}`; throw new Error("unsupport type"); } if (!this.model) { - if (USER_CONFIG.AI_PROVIDER === "auto") { - this.model = USER_CONFIG[`OPENAI_${chatType}_MODEL`]; - } else - this.model = USER_CONFIG[`${USER_CONFIG.AI_PROVIDER.toUpperCase()}_${chatType}_MODEL`]; + this.model = USER_CONFIG[`${USER_CONFIG.AI_PROVIDER.toUpperCase()}_${chatType}_MODEL`] || USER_CONFIG[`OPENAI_${chatType}_MODEL`]; } for (const [key, value] of Object.entries(this.processes[this.step_index - 1])) { switch (key) { @@ -2202,14 +2201,15 @@ function tokensCounter() { }; } async function loadHistory(key) { -async function loadHistory(context, key) { - const historyDisable = context._info.lastStepHasFile || ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0; - if (historyDisable) { - return { real: [], original: [] }; - } let history = []; try { - history = JSON.parse(await DATABASE.get(key) || "{}"); + history = JSON.parse(await DATABASE.get(key) || "[]"); + history = history.map((item) => { + return { + role: item.role, + content: item.content + }; + }); } catch (e) { console.error(e); } @@ -2249,7 +2249,10 @@ async function requestCompletionsFromLLM(text, prompt, context, llm, modifier, o const historyDisable = context._info.lastStepHasFile || ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0; const historyKey = context.SHARE_CONTEXT.chatHistoryKey; const readStartTime = performance.now(); - let history = await loadHistory(context, historyKey); + let history = []; + if (historyDisable) { + history = await loadHistory(historyKey); + } const readTime = ((performance.now() - readStartTime) / 1e3).toFixed(2); console.log(`readHistoryTime: ${readTime}s`); if (modifier) { @@ -2506,7 +2509,6 @@ async function commandGenerateImg(message, command, subcommand, context) { return sendMessageToTelegramWithContext(context)(ENV.I18N.command.help.img); } try { - setTimeout(() => sendChatActionToTelegramWithContext(context)("upload_photo").catch(console.error), 0); if (!context.CURRENT_CHAT_CONTEXT) { context.CURRENT_CHAT_CONTEXT = {}; } @@ -2514,6 +2516,7 @@ async function commandGenerateImg(message, command, subcommand, context) { if (!gen) { return sendMessageToTelegramWithContext(context)(`ERROR: Image generator not found`); } + setTimeout(() => sendChatActionToTelegramWithContext(context)("upload_photo").catch(console.error), 0); const img = await gen(subcommand, context); return sendPhotoToTelegramWithContext(context)(img); } catch (e) { @@ -2577,9 +2580,6 @@ async function commandUpdateUserConfig(message, command, subcommand, context, pr if (!Object.keys(context.USER_CONFIG).includes(key)) { return sendMessageToTelegramWithContext(context)(`Key ${key} not found`); } - if (!Object.keys(context.USER_CONFIG).includes(key)) { - return sendMessageToTelegramWithContext(context)(`Key ${key} not found`); - } try { mergeEnvironment(context.USER_CONFIG, { [key]: value @@ -2607,7 +2607,6 @@ async function commandUpdateUserConfigs(message, command, subcommand, context, p } const values = JSON.parse(subcommand); const configKeys = Object.keys(context.USER_CONFIG); - const configKeys = Object.keys(context.USER_CONFIG); for (const ent of Object.entries(values)) { let [key, value] = ent; key = ENV_KEY_MAPPER[key] || key; @@ -2780,16 +2779,19 @@ Current version: ${current.sha}(${timeFormat(current.ts)})`); async function commandSystem(message, command, subcommand, context) { let chatAgent = loadChatLLM(context)?.name; let imageAgent = loadImageGen(context)?.name; - let chatModel = currentChatModel(chatAgent, context); - let imageModel = currentImageModel(imageAgent, context); - let msg = `
AGENT: ${JSON.stringify({ - CHAT_AGENT: chatAgent, - CHAT_MODEL: chatModel, - IMAGE_AGENT: imageAgent, - IMAGE_MODEL: imageModel, - STT_MODEL: context.USER_CONFIG.OPENAI_STT_MODEL, - VISION_MODEL: context.USER_CONFIG.OPENAI_VISION_MODEL - }, null, 2)} + const agent = { + AI_PROVIDER: chatAgent, + AI_IMAGE_PROVIDER: imageAgent + }; + if (chatModelKey(chatAgent)) { + agent[chatModelKey(chatAgent)] = currentChatModel(chatAgent, context); + } + if (imageModelKey(imageAgent)) { + agent[imageModelKey(imageAgent)] = currentImageModel(imageAgent, context); + } + agent.STT_MODEL = context.USER_CONFIG.OPENAI_STT_MODEL; + agent.VISION_MODEL = context.USER_CONFIG.OPENAI_VISION_MODEL; + let msg = `AGENT: ${JSON.stringify(agent, null, 2)} ` + customInfo(context.USER_CONFIG) + "\n"; if (ENV.DEV_MODE) { const shareCtx = { ...context.SHARE_CONTEXT }; @@ -3431,9 +3433,6 @@ function i18n(lang) { case "zh-hant": return zh_hant_default; case "pt": - case "pt-br": - return pt_default; - case "pt": case "pt-br": return pt_default; case "en": @@ -3441,8 +3440,6 @@ function i18n(lang) { return en_default; default: return en_default; - default: - return en_default; } } diff --git a/dist/timestamp b/dist/timestamp index 9b7daee1c..868fe7643 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1722442077 +1722442566