Parcourir la source

Merge pull request #16397 from 17jmumford/add_gpt_5_max_token_handling

fix: added gpt-5 to reasoning model payload handler
Tim Jaeryang Baek il y a 2 mois
Parent
commit
9fbc76f4bb
1 fichiers modifiés avec 7 ajouts et 7 suppressions
  1. 7 7
      backend/open_webui/routers/openai.py

+ 7 - 7
backend/open_webui/routers/openai.py

@@ -95,12 +95,12 @@ async def cleanup_response(
         await session.close()
 
 
-def openai_o_series_handler(payload):
+def openai_reasoning_model_handler(payload):
     """
-    Handle "o" series specific parameters
+    Handle reasoning model specific parameters
     """
     if "max_tokens" in payload:
-        # Convert "max_tokens" to "max_completion_tokens" for all o-series models
+        # Convert "max_tokens" to "max_completion_tokens" for all reasoning models
         payload["max_completion_tokens"] = payload["max_tokens"]
         del payload["max_tokens"]
 
@@ -789,10 +789,10 @@ async def generate_chat_completion(
     url = request.app.state.config.OPENAI_API_BASE_URLS[idx]
     key = request.app.state.config.OPENAI_API_KEYS[idx]
 
-    # Check if model is from "o" series
-    is_o_series = payload["model"].lower().startswith(("o1", "o3", "o4"))
-    if is_o_series:
-        payload = openai_o_series_handler(payload)
+    # Check if model is a reasoning model that needs special handling
+    is_reasoning_model = payload["model"].lower().startswith(("o1", "o3", "o4", "gpt-5"))
+    if is_reasoning_model:
+        payload = openai_reasoning_model_handler(payload)
     elif "api.openai.com" not in url:
         # Remove "max_completion_tokens" from the payload for backward compatibility
         if "max_completion_tokens" in payload: