From 0d934c65867f370b36be644120bb10f461287264 Mon Sep 17 00:00:00 2001 From: leslie2046 <253605712@qq.com> Date: Thu, 6 Feb 2025 15:03:22 +0800 Subject: [PATCH 1/2] feat:add deepseek r1 think display for ollama provider --- api/core/model_runtime/model_providers/ollama/llm/llm.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/api/core/model_runtime/model_providers/ollama/llm/llm.py b/api/core/model_runtime/model_providers/ollama/llm/llm.py index 3ae728d4b36985..304bc38003da8d 100644 --- a/api/core/model_runtime/model_providers/ollama/llm/llm.py +++ b/api/core/model_runtime/model_providers/ollama/llm/llm.py @@ -314,6 +314,7 @@ def _handle_generate_stream_response( """ full_text = "" chunk_index = 0 + is_reasoning_started = False def create_final_llm_result_chunk( index: int, message: AssistantPromptMessage, finish_reason: str @@ -367,6 +368,14 @@ def create_final_llm_result_chunk( # transform assistant message to prompt message text = chunk_json["response"] + if "" in text: + is_reasoning_started = True + text = text.replace("", "> 💭 ") + elif "" in text: + is_reasoning_started = False + text = text.replace("", "") + "\n\n" + elif is_reasoning_started: + text = text.replace("\n", "\n> ") assistant_prompt_message = AssistantPromptMessage(content=text) From e1b5fdb3ecca9b2e7889092bd84568a0e4768ffa Mon Sep 17 00:00:00 2001 From: leslie2046 <253605712@qq.com> Date: Thu, 6 Feb 2025 15:26:09 +0800 Subject: [PATCH 2/2] fix ci --- api/core/model_runtime/model_providers/ollama/llm/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/model_runtime/model_providers/ollama/llm/llm.py b/api/core/model_runtime/model_providers/ollama/llm/llm.py index 304bc38003da8d..0377731175ebbe 100644 --- a/api/core/model_runtime/model_providers/ollama/llm/llm.py +++ b/api/core/model_runtime/model_providers/ollama/llm/llm.py @@ -370,7 +370,7 @@ def create_final_llm_result_chunk( text = chunk_json["response"] if "" in text: is_reasoning_started = True - text = text.replace("", "> 💭 ") + text = text.replace("", "> 💭 ") elif "" in text: is_reasoning_started = False text = text.replace("", "") + "\n\n"