diff --git a/config.py b/config.py index 46b65787d..a69fae497 100644 --- a/config.py +++ b/config.py @@ -32,7 +32,8 @@ # [step 3]>> 模型选择是 (注意: LLM_MODEL是默认选中的模型, 它*必须*被包含在AVAIL_LLM_MODELS列表中 ) LLM_MODEL = "gpt-3.5-turbo-16k" # 可选 ↓↓↓ -AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09", +AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", + "gpt-4o", "gpt-4-turbo", "gpt-4-turbo-2024-04-09", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", "gpt-4", "gpt-4-32k", "azure-gpt-4", "glm-4", "glm-4v", "glm-3-turbo", "gemini-pro", "chatglm3" @@ -44,7 +45,7 @@ # "spark", "sparkv2", "sparkv3", "sparkv3.5", # "qwen-turbo", "qwen-plus", "qwen-max", "qwen-local", # "moonshot-v1-128k", "moonshot-v1-32k", "moonshot-v1-8k", -# "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-3.5-turbo-0125" +# "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-3.5-turbo-0125", "gpt-4o-2024-05-13" # "claude-3-haiku-20240307","claude-3-sonnet-20240229","claude-3-opus-20240229", "claude-2.1", "claude-instant-1.2", # "moss", "llama2", "chatglm_onnx", "internlm", "jittorllms_pangualpha", "jittorllms_llama", # "yi-34b-chat-0205", "yi-34b-chat-200k" @@ -103,6 +104,10 @@ WEB_PORT = -1 +# 是否自动打开浏览器页面 +AUTO_OPEN_BROWSER = True + + # 如果OpenAI不响应(网络卡顿、代理失败、KEY失效),重试的次数限制 MAX_RETRY = 2 diff --git a/main.py b/main.py index 05eaec3f2..81d64a593 100644 --- a/main.py +++ b/main.py @@ -364,8 +364,9 @@ def open_browser(): time.sleep(2); webbrowser.open_new_tab(f"http://localhost:{P def warm_up_mods(): time.sleep(6); warm_up_modules() threading.Thread(target=auto_updates, name="self-upgrade", daemon=True).start() # 查看自动更新 - threading.Thread(target=open_browser, name="open-browser", daemon=True).start() # 打开浏览器页面 threading.Thread(target=warm_up_mods, name="warm-up", daemon=True).start() # 预热tiktoken模块 + if get_conf('AUTO_OPEN_BROWSER'): + threading.Thread(target=open_browser, name="open-browser", daemon=True).start() # 打开浏览器页面 # 运行一些异步任务:自动更新、打开浏览器页面、预热tiktoken模块 run_delayed_tasks() diff --git a/request_llms/bridge_all.py b/request_llms/bridge_all.py index a014bd78e..2cde4a064 100644 --- a/request_llms/bridge_all.py +++ b/request_llms/bridge_all.py @@ -175,6 +175,24 @@ def decode(self, *args, **kwargs): "token_cnt": get_token_num_gpt4, }, + "gpt-4o": { + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 128000, + "tokenizer": tokenizer_gpt4, + "token_cnt": get_token_num_gpt4, + }, + + "gpt-4o-2024-05-13": { + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 128000, + "tokenizer": tokenizer_gpt4, + "token_cnt": get_token_num_gpt4, + }, + "gpt-4-turbo-preview": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui,