Timothy Jaeryang Baek 3 ay önce
ebeveyn
işleme
a28bec865b

+ 1 - 1
backend/open_webui/utils/misc.py

@@ -222,7 +222,7 @@ def openai_chat_chunk_message_template(
         template["choices"][0]["delta"]["content"] = content
 
     if reasoning_content:
-        template["choices"][0]["delta"]["reasonsing_content"] = reasoning_content
+        template["choices"][0]["delta"]["reasoning_content"] = reasoning_content
 
     if tool_calls:
         template["choices"][0]["delta"]["tool_calls"] = tool_calls

+ 2 - 1
backend/open_webui/utils/response.py

@@ -83,6 +83,7 @@ def convert_ollama_usage_to_openai(data: dict) -> dict:
 def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
     model = ollama_response.get("model", "ollama")
     message_content = ollama_response.get("message", {}).get("content", "")
+    reasoning_content = ollama_response.get("message", {}).get("thinking", None)
     tool_calls = ollama_response.get("message", {}).get("tool_calls", None)
     openai_tool_calls = None
 
@@ -94,7 +95,7 @@ def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
     usage = convert_ollama_usage_to_openai(data)
 
     response = openai_chat_completion_message_template(
-        model, message_content, openai_tool_calls, usage
+        model, message_content, reasoning_content, openai_tool_calls, usage
     )
     return response