Browse Source

payload and response modifed for compatibility

henry 3 weeks ago
parent
commit
8f6c3f46d6
2 changed files with 82 additions and 0 deletions
  1. 30 0
      backend/open_webui/utils/payload.py
  2. 52 0
      backend/open_webui/utils/response.py

+ 30 - 0
backend/open_webui/utils/payload.py

@@ -329,3 +329,33 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
             ollama_payload["format"] = format
 
     return ollama_payload
+
+
+def convert_embedding_payload_openai_to_ollama(openai_payload: dict) -> dict:
+    """
+    Convert an embeddings request payload from OpenAI format to Ollama format.
+
+    Args:
+        openai_payload (dict): The original payload designed for OpenAI API usage. 
+            Example: {"model": "...", "input": [str, ...] or str}
+
+    Returns:
+        dict: A payload compatible with the Ollama API embeddings endpoint.
+            Example: {"model": "...", "input": [str, ...]}
+    """
+    ollama_payload = {
+        "model": openai_payload.get("model")
+    }
+    input_value = openai_payload.get("input")
+    # Ollama expects 'input' as a list. If it's a string, wrap it in a list.
+    if isinstance(input_value, list):
+        ollama_payload["input"] = input_value
+    else:
+        ollama_payload["input"] = [input_value]
+
+    # Optionally forward 'options', 'truncate', 'keep_alive' if present in OpenAI request
+    for optional_key in ("options", "truncate", "keep_alive"):
+        if optional_key in openai_payload:
+            ollama_payload[optional_key] = openai_payload[optional_key]
+
+    return ollama_payload

+ 52 - 0
backend/open_webui/utils/response.py

@@ -125,3 +125,55 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
         yield line
 
     yield "data: [DONE]\n\n"
+
+def convert_response_ollama_to_openai(response):
+    """
+    Convert the response from Ollama embeddings endpoint to the OpenAI-compatible format.
+
+    Args:
+        response (dict): The response from the Ollama API, 
+            e.g. {"embedding": [...], "model": "..."}
+            or {"embeddings": [{"embedding": [...], "index": 0}, ...], "model": "..."}
+
+    Returns:
+        dict: Response adapted to OpenAI's embeddings API format.
+            e.g. {
+                "object": "list",
+                "data": [
+                    {"object": "embedding", "embedding": [...], "index": 0},
+                    ...
+                ],
+                "model": "...",
+            }
+    """
+    # Ollama batch-style output
+    if isinstance(response, dict) and "embeddings" in response:
+        openai_data = []
+        for i, emb in enumerate(response["embeddings"]):
+            openai_data.append({
+                "object": "embedding",
+                "embedding": emb.get("embedding"),
+                "index": emb.get("index", i),
+            })
+        return {
+            "object": "list",
+            "data": openai_data,
+            "model": response.get("model"),
+        }
+    # Ollama single output
+    elif isinstance(response, dict) and "embedding" in response:
+        return {
+            "object": "list",
+            "data": [{
+                "object": "embedding",
+                "embedding": response["embedding"],
+                "index": 0,
+            }],
+            "model": response.get("model"),
+        }
+    # Already OpenAI-compatible?
+    elif isinstance(response, dict) and "data" in response and isinstance(response["data"], list):
+        return response
+
+    # Fallback: return as is if unrecognized
+    return response