Ver Fonte

refactor: use cleanup_response on openai

Signed-off-by: Sihyeon Jang <sihyeon.jang@navercorp.com>
Sihyeon Jang há 2 meses atrás
pai
commit
17f0bef2e2
1 ficheiros alterados com 6 adições e 12 exclusões
  1. 6 12
      backend/open_webui/routers/openai.py

+ 6 - 12
backend/open_webui/routers/openai.py

@@ -893,10 +893,8 @@ async def generate_chat_completion(
             detail=detail if detail else "Open WebUI: Server Connection Error",
         )
     finally:
-        if not streaming and session:
-            if r:
-                r.close()
-            await session.close()
+        if not streaming:
+            await cleanup_response(r, session)
 
 
 async def embeddings(request: Request, form_data: dict, user):
@@ -975,10 +973,8 @@ async def embeddings(request: Request, form_data: dict, user):
             detail=detail if detail else "Open WebUI: Server Connection Error",
         )
     finally:
-        if not streaming and session:
-            if r:
-                r.close()
-            await session.close()
+        if not streaming:
+            await cleanup_response(r, session)
 
 
 @router.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
@@ -1074,7 +1070,5 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
             detail=detail if detail else "Open WebUI: Server Connection Error",
         )
     finally:
-        if not streaming and session:
-            if r:
-                r.close()
-            await session.close()
+        if not streaming:
+            await cleanup_response(r, session)