1
0

functions.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347
  1. import logging
  2. import sys
  3. import inspect
  4. import json
  5. import asyncio
  6. from pydantic import BaseModel
  7. from typing import AsyncGenerator, Generator, Iterator
  8. from fastapi import (
  9. Depends,
  10. FastAPI,
  11. File,
  12. Form,
  13. HTTPException,
  14. Request,
  15. UploadFile,
  16. status,
  17. )
  18. from starlette.responses import Response, StreamingResponse
  19. from open_webui.constants import ERROR_MESSAGES
  20. from open_webui.socket.main import (
  21. get_event_call,
  22. get_event_emitter,
  23. )
  24. from open_webui.models.users import UserModel
  25. from open_webui.models.functions import Functions
  26. from open_webui.models.models import Models
  27. from open_webui.utils.plugin import (
  28. load_function_module_by_id,
  29. get_function_module_from_cache,
  30. )
  31. from open_webui.utils.tools import get_tools
  32. from open_webui.utils.access_control import has_access
  33. from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL
  34. from open_webui.utils.misc import (
  35. add_or_update_system_message,
  36. get_last_user_message,
  37. prepend_to_first_user_message_content,
  38. openai_chat_chunk_message_template,
  39. openai_chat_completion_message_template,
  40. )
  41. from open_webui.utils.payload import (
  42. apply_model_params_to_body_openai,
  43. apply_system_prompt_to_body,
  44. )
  45. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  46. log = logging.getLogger(__name__)
  47. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  48. def get_function_module_by_id(request: Request, pipe_id: str):
  49. function_module, _, _ = get_function_module_from_cache(request, pipe_id)
  50. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  51. Valves = function_module.Valves
  52. valves = Functions.get_function_valves_by_id(pipe_id)
  53. if valves:
  54. try:
  55. function_module.valves = Valves(
  56. **({k: v for k, v in valves if v is not None})
  57. )
  58. except Exception as e:
  59. log.exception(f"Error loading valves for function {pipe_id}: {e}")
  60. raise e
  61. else:
  62. function_module.valves = Valves()
  63. return function_module
  64. async def get_function_models(request):
  65. pipes = Functions.get_functions_by_type("pipe", active_only=True)
  66. pipe_models = []
  67. for pipe in pipes:
  68. try:
  69. function_module = get_function_module_by_id(request, pipe.id)
  70. # Check if function is a manifold
  71. if hasattr(function_module, "pipes"):
  72. sub_pipes = []
  73. # Handle pipes being a list, sync function, or async function
  74. try:
  75. if callable(function_module.pipes):
  76. if asyncio.iscoroutinefunction(function_module.pipes):
  77. sub_pipes = await function_module.pipes()
  78. else:
  79. sub_pipes = function_module.pipes()
  80. else:
  81. sub_pipes = function_module.pipes
  82. except Exception as e:
  83. log.exception(e)
  84. sub_pipes = []
  85. log.debug(
  86. f"get_function_models: function '{pipe.id}' is a manifold of {sub_pipes}"
  87. )
  88. for p in sub_pipes:
  89. sub_pipe_id = f'{pipe.id}.{p["id"]}'
  90. sub_pipe_name = p["name"]
  91. if hasattr(function_module, "name"):
  92. sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
  93. pipe_flag = {"type": pipe.type}
  94. pipe_models.append(
  95. {
  96. "id": sub_pipe_id,
  97. "name": sub_pipe_name,
  98. "object": "model",
  99. "created": pipe.created_at,
  100. "owned_by": "openai",
  101. "pipe": pipe_flag,
  102. }
  103. )
  104. else:
  105. pipe_flag = {"type": "pipe"}
  106. log.debug(
  107. f"get_function_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
  108. )
  109. pipe_models.append(
  110. {
  111. "id": pipe.id,
  112. "name": pipe.name,
  113. "object": "model",
  114. "created": pipe.created_at,
  115. "owned_by": "openai",
  116. "pipe": pipe_flag,
  117. }
  118. )
  119. except Exception as e:
  120. log.exception(e)
  121. continue
  122. return pipe_models
  123. async def generate_function_chat_completion(
  124. request, form_data, user, models: dict = {}
  125. ):
  126. async def execute_pipe(pipe, params):
  127. if inspect.iscoroutinefunction(pipe):
  128. return await pipe(**params)
  129. else:
  130. return pipe(**params)
  131. async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
  132. if isinstance(res, str):
  133. return res
  134. if isinstance(res, Generator):
  135. return "".join(map(str, res))
  136. if isinstance(res, AsyncGenerator):
  137. return "".join([str(stream) async for stream in res])
  138. def process_line(form_data: dict, line):
  139. if isinstance(line, BaseModel):
  140. line = line.model_dump_json()
  141. line = f"data: {line}"
  142. if isinstance(line, dict):
  143. line = f"data: {json.dumps(line)}"
  144. try:
  145. line = line.decode("utf-8")
  146. except Exception:
  147. pass
  148. if line.startswith("data:"):
  149. return f"{line}\n\n"
  150. else:
  151. line = openai_chat_chunk_message_template(form_data["model"], line)
  152. return f"data: {json.dumps(line)}\n\n"
  153. def get_pipe_id(form_data: dict) -> str:
  154. pipe_id = form_data["model"]
  155. if "." in pipe_id:
  156. pipe_id, _ = pipe_id.split(".", 1)
  157. return pipe_id
  158. def get_function_params(function_module, form_data, user, extra_params=None):
  159. if extra_params is None:
  160. extra_params = {}
  161. pipe_id = get_pipe_id(form_data)
  162. # Get the signature of the function
  163. sig = inspect.signature(function_module.pipe)
  164. params = {"body": form_data} | {
  165. k: v for k, v in extra_params.items() if k in sig.parameters
  166. }
  167. if "__user__" in params and hasattr(function_module, "UserValves"):
  168. user_valves = Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
  169. try:
  170. params["__user__"]["valves"] = function_module.UserValves(**user_valves)
  171. except Exception as e:
  172. log.exception(e)
  173. params["__user__"]["valves"] = function_module.UserValves()
  174. return params
  175. model_id = form_data.get("model")
  176. model_info = Models.get_model_by_id(model_id)
  177. metadata = form_data.pop("metadata", {})
  178. files = metadata.get("files", [])
  179. tool_ids = metadata.get("tool_ids", [])
  180. # Check if tool_ids is None
  181. if tool_ids is None:
  182. tool_ids = []
  183. __event_emitter__ = None
  184. __event_call__ = None
  185. __task__ = None
  186. __task_body__ = None
  187. if metadata:
  188. if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
  189. __event_emitter__ = get_event_emitter(metadata)
  190. __event_call__ = get_event_call(metadata)
  191. __task__ = metadata.get("task", None)
  192. __task_body__ = metadata.get("task_body", None)
  193. oauth_token = None
  194. try:
  195. if request.cookies.get("oauth_session_id", None):
  196. oauth_token = request.app.state.oauth_manager.get_oauth_token(
  197. user.id,
  198. request.cookies.get("oauth_session_id", None),
  199. )
  200. except Exception as e:
  201. log.error(f"Error getting OAuth token: {e}")
  202. extra_params = {
  203. "__event_emitter__": __event_emitter__,
  204. "__event_call__": __event_call__,
  205. "__chat_id__": metadata.get("chat_id", None),
  206. "__session_id__": metadata.get("session_id", None),
  207. "__message_id__": metadata.get("message_id", None),
  208. "__task__": __task__,
  209. "__task_body__": __task_body__,
  210. "__files__": files,
  211. "__user__": user.model_dump() if isinstance(user, UserModel) else {},
  212. "__metadata__": metadata,
  213. "__oauth_token__": oauth_token,
  214. "__request__": request,
  215. }
  216. extra_params["__tools__"] = await get_tools(
  217. request,
  218. tool_ids,
  219. user,
  220. {
  221. **extra_params,
  222. "__model__": models.get(form_data["model"], None),
  223. "__messages__": form_data["messages"],
  224. "__files__": files,
  225. },
  226. )
  227. if model_info:
  228. if model_info.base_model_id:
  229. form_data["model"] = model_info.base_model_id
  230. params = model_info.params.model_dump()
  231. if params:
  232. system = params.pop("system", None)
  233. form_data = apply_model_params_to_body_openai(params, form_data)
  234. form_data = apply_system_prompt_to_body(system, form_data, metadata, user)
  235. pipe_id = get_pipe_id(form_data)
  236. function_module = get_function_module_by_id(request, pipe_id)
  237. pipe = function_module.pipe
  238. params = get_function_params(function_module, form_data, user, extra_params)
  239. if form_data.get("stream", False):
  240. async def stream_content():
  241. try:
  242. res = await execute_pipe(pipe, params)
  243. # Directly return if the response is a StreamingResponse
  244. if isinstance(res, StreamingResponse):
  245. async for data in res.body_iterator:
  246. yield data
  247. return
  248. if isinstance(res, dict):
  249. yield f"data: {json.dumps(res)}\n\n"
  250. return
  251. except Exception as e:
  252. log.error(f"Error: {e}")
  253. yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
  254. return
  255. if isinstance(res, str):
  256. message = openai_chat_chunk_message_template(form_data["model"], res)
  257. yield f"data: {json.dumps(message)}\n\n"
  258. if isinstance(res, Iterator):
  259. for line in res:
  260. yield process_line(form_data, line)
  261. if isinstance(res, AsyncGenerator):
  262. async for line in res:
  263. yield process_line(form_data, line)
  264. if isinstance(res, str) or isinstance(res, Generator):
  265. finish_message = openai_chat_chunk_message_template(
  266. form_data["model"], ""
  267. )
  268. finish_message["choices"][0]["finish_reason"] = "stop"
  269. yield f"data: {json.dumps(finish_message)}\n\n"
  270. yield "data: [DONE]"
  271. return StreamingResponse(stream_content(), media_type="text/event-stream")
  272. else:
  273. try:
  274. res = await execute_pipe(pipe, params)
  275. except Exception as e:
  276. log.error(f"Error: {e}")
  277. return {"error": {"detail": str(e)}}
  278. if isinstance(res, StreamingResponse) or isinstance(res, dict):
  279. return res
  280. if isinstance(res, BaseModel):
  281. return res.model_dump()
  282. message = await get_message_content(res)
  283. return openai_chat_completion_message_template(form_data["model"], message)