Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/dev' into test
Browse files Browse the repository at this point in the history
  • Loading branch information
adolphnov committed Jul 31, 2024
2 parents fe48aca + 9d65a4a commit 8bdb99f
Show file tree
Hide file tree
Showing 10 changed files with 303 additions and 167 deletions.
2 changes: 1 addition & 1 deletion dist/buildinfo.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"sha": "11e0c3a", "timestamp": 1722426808}
{"sha": "fe48aca", "timestamp": 1722442077}
123 changes: 88 additions & 35 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ var UserConfig = class {
AZURE_PROXY_URL = null;
// Azure DallE API
// https://RESOURCE_NAME.openai.azure.com/openai/deployments/MODEL_NAME/images/generations?api-version=VERSION_NAME
// https://RESOURCE_NAME.openai.azure.com/openai/deployments/MODEL_NAME/images/generations?api-version=VERSION_NAME
AZURE_DALLE_API = null;
// -- Workers 配置 --
//
Expand Down Expand Up @@ -404,12 +405,12 @@ function initEnv(env, i18n2) {
}
ENV.TELEGRAM_AVAILABLE_TOKENS.push(env.TELEGRAM_TOKEN);
}
if (env.WORKERS_AI_MODEL) {
ENV.USER_CONFIG.WORKERS_CHAT_MODEL = env.WORKERS_AI_MODEL;
}
if (env.OPENAI_API_DOMAIN && !ENV.OPENAI_API_BASE) {
ENV.USER_CONFIG.OPENAI_API_BASE = `${env.OPENAI_API_DOMAIN}/v1`;
}
if (env.WORKERS_AI_MODEL && !ENV.USER_CONFIG.WORKERS_CHAT_MODEL) {
ENV.USER_CONFIG.WORKERS_CHAT_MODEL = env.WORKERS_AI_MODEL;
}
if (env.API_KEY && ENV.USER_CONFIG.OPENAI_API_KEY.length === 0) {
ENV.USER_CONFIG.OPENAI_API_KEY = env.API_KEY.split(",");
}
Expand Down Expand Up @@ -1810,7 +1811,12 @@ var chatLlmAgents = [
function currentChatModel(agentName, context) {
switch (agentName) {
case "azure":
return "azure";
try {
const url = new URL(context.USER_CONFIG.AZURE_COMPLETIONS_API);
return url.pathname.split("/")[3];
} catch {
return context.USER_CONFIG.AZURE_COMPLETIONS_API;
}
case "openai":
return context.USER_CONFIG.OPENAI_CHAT_MODEL;
case "workers":
Expand All @@ -1827,6 +1833,26 @@ function currentChatModel(agentName, context) {
return null;
}
}
function chatModelKey(agentName) {
switch (agentName) {
case "azure":
return "AZURE_COMPLETIONS_API";
case "openai":
return "OPENAI_CHAT_MODEL";
case "workers":
return "WORKERS_CHAT_MODEL";
case "gemini":
return "GOOGLE_COMPLETIONS_MODEL";
case "mistral":
return "MISTRAL_CHAT_MODEL";
case "cohere":
return "COHERE_CHAT_MODEL";
case "anthropic":
return "ANTHROPIC_CHAT_MODEL";
default:
return null;
}
}
function customInfo(config) {
let info = `MODE: ${config.CURRENT_MODE}`;
const PROCESS = config.MODES[config.CURRENT_MODE] || [];
Expand Down Expand Up @@ -1936,7 +1962,12 @@ function loadImageGen(context) {
function currentImageModel(agentName, context) {
switch (agentName) {
case "azure":
return "azure";
try {
const url = new URL(context.USER_CONFIG.AZURE_DALLE_API);
return url.pathname.split("/")[3];
} catch {
return context.USER_CONFIG.AZURE_DALLE_API;
}
case "openai":
return context.USER_CONFIG.OPENAI_IMAGE_MODEL;
case "workers":
Expand All @@ -1945,6 +1976,18 @@ function currentImageModel(agentName, context) {
return null;
}
}
function imageModelKey(agentName) {
switch (agentName) {
case "azure":
return "AZURE_DALLE_API";
case "openai":
return "DALL_E_MODEL";
case "workers":
return "WORKERS_IMAGE_MODEL";
default:
return null;
}
}

// src/config/middle.js
async function extractMessageType(message, botToken) {
Expand Down Expand Up @@ -2158,6 +2201,7 @@ function tokensCounter() {
return text.length;
};
}
async function loadHistory(key) {
async function loadHistory(context, key) {
const historyDisable = context._info.lastStepHasFile || ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
if (historyDisable) {
Expand All @@ -2172,37 +2216,37 @@ async function loadHistory(context, key) {
if (!history || !Array.isArray(history)) {
history = [];
}
let original = JSON.parse(JSON.stringify(history));
const counter = tokensCounter();
const trimHistory = (list, initLength, maxLength, maxToken) => {
if (list.length > maxLength) {
if (maxLength >= 0 && list.length > maxLength) {
list = list.splice(list.length - maxLength);
}
let tokenLength = initLength;
for (let i = list.length - 1; i >= 0; i--) {
const historyItem = list[i];
let length = 0;
if (historyItem.content) {
length = counter(historyItem.content);
} else {
historyItem.content = "";
}
tokenLength += length;
if (tokenLength > maxToken) {
list = list.splice(i + 1);
break;
if (maxToken >= 0) {
let tokenLength = initLength;
for (let i = list.length - 1; i >= 0; i--) {
const historyItem = list[i];
let length = 0;
if (historyItem.content) {
length = counter(historyItem.content);
} else {
historyItem.content = "";
}
tokenLength += length;
if (tokenLength > maxToken) {
list = list.splice(i + 1);
break;
}
}
}
return list;
};
if (ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH > 0) {
history = trimHistory(history, 0, ENV.MAX_HISTORY_LENGTH, ENV.MAX_TOKEN_LENGTH);
original = trimHistory(original, 0, ENV.MAX_HISTORY_LENGTH, ENV.MAX_TOKEN_LENGTH);
}
return { real: history, original };
return history;
}
async function requestCompletionsFromLLM(text, prompt, context, llm, modifier, onStream) {
const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
const historyDisable = context._info.lastStepHasFile || ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
const historyKey = context.SHARE_CONTEXT.chatHistoryKey;
const readStartTime = performance.now();
let history = await loadHistory(context, historyKey);
Expand All @@ -2213,15 +2257,14 @@ async function requestCompletionsFromLLM(text, prompt, context, llm, modifier, o
history = modifierData.history;
text = modifierData.text;
}
const { real: realHistory, original: originalHistory } = history;
const answer = await llm(text, prompt, realHistory, context, onStream);
const answer = await llm(text, prompt, history, context, onStream);
if (context._info.lastStepHasFile) {
text = "[A FILE] " + text;
}
if (!historyDisable && answer) {
originalHistory.push({ role: "user", content: text || "" });
originalHistory.push({ role: "assistant", content: answer });
await DATABASE.put(historyKey, JSON.stringify(originalHistory)).catch(console.error);
history.push({ role: "user", content: text || "" });
history.push({ role: "assistant", content: answer });
await DATABASE.put(historyKey, JSON.stringify(history)).catch(console.error);
}
return answer;
}
Expand Down Expand Up @@ -2525,14 +2568,18 @@ async function commandUpdateUserConfig(message, command, subcommand, context, pr
if (kv === -1) {
return sendMessageToTelegramWithContext(context)(ENV.I18N.command.help.setenv);
}
const key = subcommand.slice(0, kv);
let key = subcommand.slice(0, kv);
const value = subcommand.slice(kv + 1);
key = ENV_KEY_MAPPER[key] || key;
if (ENV.LOCK_USER_CONFIG_KEYS.includes(key)) {
return sendMessageToTelegramWithContext(context)(`Key ${key} is locked`);
}
if (!Object.keys(context.USER_CONFIG).includes(key)) {
return sendMessageToTelegramWithContext(context)(`Key ${key} not found`);
}
if (!Object.keys(context.USER_CONFIG).includes(key)) {
return sendMessageToTelegramWithContext(context)(`Key ${key} not found`);
}
try {
mergeEnvironment(context.USER_CONFIG, {
[key]: value
Expand Down Expand Up @@ -2560,8 +2607,10 @@ async function commandUpdateUserConfigs(message, command, subcommand, context, p
}
const values = JSON.parse(subcommand);
const configKeys = Object.keys(context.USER_CONFIG);
const configKeys = Object.keys(context.USER_CONFIG);
for (const ent of Object.entries(values)) {
const [key, value] = ent;
let [key, value] = ent;
key = ENV_KEY_MAPPER[key] || key;
if (ENV.LOCK_USER_CONFIG_KEYS.includes(key)) {
return sendMessageToTelegramWithContext(context)(`Key ${key} is locked`);
}
Expand Down Expand Up @@ -2770,14 +2819,13 @@ async function commandSystem(message, command, subcommand, context) {
}
async function commandRegenerate(message, command, subcommand, context) {
const mf = (history, text) => {
const { real, original } = history;
let nextText = text;
if (!real || !original || real.length === 0 || original.length === 0) {
if (!(history && Array.isArray(history) && history.length > 0)) {
throw new Error("History not found");
}
const historyCopy = structuredClone(history);
while (true) {
const data = real.pop();
original.pop();
const data = historyCopy.pop();
if (data === void 0 || data === null) {
break;
} else if (data.role === "user") {
Expand All @@ -2790,7 +2838,7 @@ async function commandRegenerate(message, command, subcommand, context) {
if (subcommand) {
nextText = subcommand;
}
return { history: { real, original }, text: nextText };
return { history: historyCopy, text: nextText };
};
return chatWithLLM(null, context, mf);
}
Expand Down Expand Up @@ -3383,13 +3431,18 @@ function i18n(lang) {
case "zh-hant":
return zh_hant_default;
case "pt":
case "pt-br":
return pt_default;
case "pt":
case "pt-br":
return pt_default;
case "en":
case "en-us":
return en_default;
default:
return en_default;
default:
return en_default;
}
}

Expand Down
2 changes: 1 addition & 1 deletion dist/timestamp
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1722426808
1722442077
7 changes: 6 additions & 1 deletion eslint.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,9 @@ import pluginJs from "@eslint/js";
export default [
{languageOptions: { globals: {...globals.browser, ...globals.node} }},
pluginJs.configs.recommended,
];
{
rules: {
semi : ["error", "always"],
}
}
];
64 changes: 58 additions & 6 deletions src/agent/agents.js
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ export const chatLlmAgents = [
enable: isAnthropicAIEnable,
request: requestCompletionsFromAnthropicAI
}
]
];

/**
* @param {string} agentName
Expand All @@ -77,7 +77,12 @@ export const chatLlmAgents = [
export function currentChatModel(agentName, context) {
switch (agentName) {
case "azure":
return "azure";
try {
const url = new URL(context.USER_CONFIG.AZURE_COMPLETIONS_API);
return url.pathname.split("/")[3];
} catch {
return context.USER_CONFIG.AZURE_COMPLETIONS_API;
}
case "openai":
return context.USER_CONFIG.OPENAI_CHAT_MODEL;
case "workers":
Expand All @@ -91,7 +96,32 @@ export function currentChatModel(agentName, context) {
case "anthropic":
return context.USER_CONFIG.ANTHROPIC_CHAT_MODEL;
default:
return null
return null;
}
}

/**
* @param {string} agentName
* @returns {null|string}
*/
export function chatModelKey(agentName) {
switch (agentName) {
case "azure":
return "AZURE_COMPLETIONS_API";
case "openai":
return "OPENAI_CHAT_MODEL";
case "workers":
return "WORKERS_CHAT_MODEL";
case "gemini":
return "GOOGLE_COMPLETIONS_MODEL";
case "mistral":
return "MISTRAL_CHAT_MODEL";
case "cohere":
return "COHERE_CHAT_MODEL";
case "anthropic":
return "ANTHROPIC_CHAT_MODEL";
default:
return null;
}
}

Expand Down Expand Up @@ -241,7 +271,7 @@ export const imageGenAgents = [
enable: isWorkersAIEnable,
request: requestImageFromWorkersAI
}
]
];


/**
Expand Down Expand Up @@ -274,12 +304,34 @@ export function loadImageGen(context) {
export function currentImageModel(agentName, context) {
switch (agentName) {
case "azure":
return "azure";
try {
const url = new URL(context.USER_CONFIG.AZURE_DALLE_API);
return url.pathname.split("/")[3];
} catch {
return context.USER_CONFIG.AZURE_DALLE_API;
}
case "openai":
return context.USER_CONFIG.OPENAI_IMAGE_MODEL;
case "workers":
return context.USER_CONFIG.WORKERS_IMAGE_MODEL;
default:
return null
return null;
}
}

/**
* @param {string} agentName
* @returns {null|string}
*/
export function imageModelKey(agentName) {
switch (agentName) {
case "azure":
return "AZURE_DALLE_API";
case "openai":
return "DALL_E_MODEL";
case "workers":
return "WORKERS_IMAGE_MODEL";
default:
return null;
}
}
Loading

0 comments on commit 8bdb99f

Please sign in to comment.