|
@@ -1232,6 +1232,9 @@ class GenerateChatCompletionForm(BaseModel):
|
|
|
stream: Optional[bool] = True
|
|
|
keep_alive: Optional[Union[int, str]] = None
|
|
|
tools: Optional[list[dict]] = None
|
|
|
+ model_config = ConfigDict(
|
|
|
+ extra="allow",
|
|
|
+ )
|
|
|
|
|
|
|
|
|
async def get_ollama_url(request: Request, model: str, url_idx: Optional[int] = None):
|
|
@@ -1269,7 +1272,9 @@ async def generate_chat_completion(
|
|
|
detail=str(e),
|
|
|
)
|
|
|
|
|
|
- payload = {**form_data.model_dump(exclude_none=True)}
|
|
|
+ if isinstance(form_data, BaseModel):
|
|
|
+ payload = {**form_data.model_dump(exclude_none=True)}
|
|
|
+
|
|
|
if "metadata" in payload:
|
|
|
del payload["metadata"]
|
|
|
|