retrieval.py 65 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782
  1. import json
  2. import logging
  3. import mimetypes
  4. import os
  5. import shutil
  6. import uuid
  7. from datetime import datetime
  8. from pathlib import Path
  9. from typing import Iterator, List, Optional, Sequence, Union
  10. from fastapi import (
  11. Depends,
  12. FastAPI,
  13. File,
  14. Form,
  15. HTTPException,
  16. UploadFile,
  17. Request,
  18. status,
  19. APIRouter,
  20. )
  21. from fastapi.middleware.cors import CORSMiddleware
  22. from fastapi.concurrency import run_in_threadpool
  23. from pydantic import BaseModel
  24. import tiktoken
  25. from langchain.text_splitter import RecursiveCharacterTextSplitter, TokenTextSplitter
  26. from langchain_core.documents import Document
  27. from open_webui.models.files import FileModel, Files
  28. from open_webui.models.knowledge import Knowledges
  29. from open_webui.storage.provider import Storage
  30. from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT
  31. # Document loaders
  32. from open_webui.retrieval.loaders.main import Loader
  33. from open_webui.retrieval.loaders.youtube import YoutubeLoader
  34. # Web search engines
  35. from open_webui.retrieval.web.main import SearchResult
  36. from open_webui.retrieval.web.utils import get_web_loader
  37. from open_webui.retrieval.web.brave import search_brave
  38. from open_webui.retrieval.web.kagi import search_kagi
  39. from open_webui.retrieval.web.mojeek import search_mojeek
  40. from open_webui.retrieval.web.bocha import search_bocha
  41. from open_webui.retrieval.web.duckduckgo import search_duckduckgo
  42. from open_webui.retrieval.web.google_pse import search_google_pse
  43. from open_webui.retrieval.web.jina_search import search_jina
  44. from open_webui.retrieval.web.searchapi import search_searchapi
  45. from open_webui.retrieval.web.serpapi import search_serpapi
  46. from open_webui.retrieval.web.searxng import search_searxng
  47. from open_webui.retrieval.web.serper import search_serper
  48. from open_webui.retrieval.web.serply import search_serply
  49. from open_webui.retrieval.web.serpstack import search_serpstack
  50. from open_webui.retrieval.web.tavily import search_tavily
  51. from open_webui.retrieval.web.bing import search_bing
  52. from open_webui.retrieval.web.exa import search_exa
  53. from open_webui.retrieval.web.perplexity import search_perplexity
  54. from open_webui.retrieval.utils import (
  55. get_embedding_function,
  56. get_model_path,
  57. query_collection,
  58. query_collection_with_hybrid_search,
  59. query_doc,
  60. query_doc_with_hybrid_search,
  61. )
  62. from open_webui.utils.misc import (
  63. calculate_sha256_string,
  64. )
  65. from open_webui.utils.auth import get_admin_user, get_verified_user
  66. from open_webui.config import (
  67. ENV,
  68. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  69. RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
  70. RAG_RERANKING_MODEL_AUTO_UPDATE,
  71. RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
  72. UPLOAD_DIR,
  73. DEFAULT_LOCALE,
  74. RAG_EMBEDDING_CONTENT_PREFIX,
  75. RAG_EMBEDDING_QUERY_PREFIX,
  76. )
  77. from open_webui.env import (
  78. SRC_LOG_LEVELS,
  79. DEVICE_TYPE,
  80. DOCKER,
  81. )
  82. from open_webui.constants import ERROR_MESSAGES
  83. log = logging.getLogger(__name__)
  84. log.setLevel(SRC_LOG_LEVELS["RAG"])
  85. ##########################################
  86. #
  87. # Utility functions
  88. #
  89. ##########################################
  90. def get_ef(
  91. engine: str,
  92. embedding_model: str,
  93. auto_update: bool = False,
  94. ):
  95. ef = None
  96. if embedding_model and engine == "":
  97. from sentence_transformers import SentenceTransformer
  98. try:
  99. ef = SentenceTransformer(
  100. get_model_path(embedding_model, auto_update),
  101. device=DEVICE_TYPE,
  102. trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
  103. )
  104. except Exception as e:
  105. log.debug(f"Error loading SentenceTransformer: {e}")
  106. return ef
  107. def get_rf(
  108. reranking_model: Optional[str] = None,
  109. auto_update: bool = False,
  110. ):
  111. rf = None
  112. if reranking_model:
  113. if any(model in reranking_model for model in ["jinaai/jina-colbert-v2"]):
  114. try:
  115. from open_webui.retrieval.models.colbert import ColBERT
  116. rf = ColBERT(
  117. get_model_path(reranking_model, auto_update),
  118. env="docker" if DOCKER else None,
  119. )
  120. except Exception as e:
  121. log.error(f"ColBERT: {e}")
  122. raise Exception(ERROR_MESSAGES.DEFAULT(e))
  123. else:
  124. import sentence_transformers
  125. try:
  126. rf = sentence_transformers.CrossEncoder(
  127. get_model_path(reranking_model, auto_update),
  128. device=DEVICE_TYPE,
  129. trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
  130. )
  131. except:
  132. log.error("CrossEncoder error")
  133. raise Exception(ERROR_MESSAGES.DEFAULT("CrossEncoder error"))
  134. return rf
  135. ##########################################
  136. #
  137. # API routes
  138. #
  139. ##########################################
  140. router = APIRouter()
  141. class CollectionNameForm(BaseModel):
  142. collection_name: Optional[str] = None
  143. class ProcessUrlForm(CollectionNameForm):
  144. url: str
  145. class SearchForm(CollectionNameForm):
  146. query: str
  147. @router.get("/")
  148. async def get_status(request: Request):
  149. return {
  150. "status": True,
  151. "chunk_size": request.app.state.config.CHUNK_SIZE,
  152. "chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
  153. "template": request.app.state.config.RAG_TEMPLATE,
  154. "embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
  155. "embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
  156. "reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
  157. "embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  158. }
  159. @router.get("/embedding")
  160. async def get_embedding_config(request: Request, user=Depends(get_admin_user)):
  161. return {
  162. "status": True,
  163. "embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
  164. "embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
  165. "embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  166. "openai_config": {
  167. "url": request.app.state.config.RAG_OPENAI_API_BASE_URL,
  168. "key": request.app.state.config.RAG_OPENAI_API_KEY,
  169. },
  170. "ollama_config": {
  171. "url": request.app.state.config.RAG_OLLAMA_BASE_URL,
  172. "key": request.app.state.config.RAG_OLLAMA_API_KEY,
  173. },
  174. }
  175. @router.get("/reranking")
  176. async def get_reraanking_config(request: Request, user=Depends(get_admin_user)):
  177. return {
  178. "status": True,
  179. "reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
  180. }
  181. class OpenAIConfigForm(BaseModel):
  182. url: str
  183. key: str
  184. class OllamaConfigForm(BaseModel):
  185. url: str
  186. key: str
  187. class EmbeddingModelUpdateForm(BaseModel):
  188. openai_config: Optional[OpenAIConfigForm] = None
  189. ollama_config: Optional[OllamaConfigForm] = None
  190. embedding_engine: str
  191. embedding_model: str
  192. embedding_batch_size: Optional[int] = 1
  193. @router.post("/embedding/update")
  194. async def update_embedding_config(
  195. request: Request, form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
  196. ):
  197. log.info(
  198. f"Updating embedding model: {request.app.state.config.RAG_EMBEDDING_MODEL} to {form_data.embedding_model}"
  199. )
  200. try:
  201. request.app.state.config.RAG_EMBEDDING_ENGINE = form_data.embedding_engine
  202. request.app.state.config.RAG_EMBEDDING_MODEL = form_data.embedding_model
  203. if request.app.state.config.RAG_EMBEDDING_ENGINE in ["ollama", "openai"]:
  204. if form_data.openai_config is not None:
  205. request.app.state.config.RAG_OPENAI_API_BASE_URL = (
  206. form_data.openai_config.url
  207. )
  208. request.app.state.config.RAG_OPENAI_API_KEY = (
  209. form_data.openai_config.key
  210. )
  211. if form_data.ollama_config is not None:
  212. request.app.state.config.RAG_OLLAMA_BASE_URL = (
  213. form_data.ollama_config.url
  214. )
  215. request.app.state.config.RAG_OLLAMA_API_KEY = (
  216. form_data.ollama_config.key
  217. )
  218. request.app.state.config.RAG_EMBEDDING_BATCH_SIZE = (
  219. form_data.embedding_batch_size
  220. )
  221. request.app.state.ef = get_ef(
  222. request.app.state.config.RAG_EMBEDDING_ENGINE,
  223. request.app.state.config.RAG_EMBEDDING_MODEL,
  224. )
  225. request.app.state.EMBEDDING_FUNCTION = get_embedding_function(
  226. request.app.state.config.RAG_EMBEDDING_ENGINE,
  227. request.app.state.config.RAG_EMBEDDING_MODEL,
  228. request.app.state.ef,
  229. (
  230. request.app.state.config.RAG_OPENAI_API_BASE_URL
  231. if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  232. else request.app.state.config.RAG_OLLAMA_BASE_URL
  233. ),
  234. (
  235. request.app.state.config.RAG_OPENAI_API_KEY
  236. if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  237. else request.app.state.config.RAG_OLLAMA_API_KEY
  238. ),
  239. request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  240. )
  241. return {
  242. "status": True,
  243. "embedding_engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
  244. "embedding_model": request.app.state.config.RAG_EMBEDDING_MODEL,
  245. "embedding_batch_size": request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  246. "openai_config": {
  247. "url": request.app.state.config.RAG_OPENAI_API_BASE_URL,
  248. "key": request.app.state.config.RAG_OPENAI_API_KEY,
  249. },
  250. "ollama_config": {
  251. "url": request.app.state.config.RAG_OLLAMA_BASE_URL,
  252. "key": request.app.state.config.RAG_OLLAMA_API_KEY,
  253. },
  254. }
  255. except Exception as e:
  256. log.exception(f"Problem updating embedding model: {e}")
  257. raise HTTPException(
  258. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  259. detail=ERROR_MESSAGES.DEFAULT(e),
  260. )
  261. class RerankingModelUpdateForm(BaseModel):
  262. reranking_model: str
  263. @router.post("/reranking/update")
  264. async def update_reranking_config(
  265. request: Request, form_data: RerankingModelUpdateForm, user=Depends(get_admin_user)
  266. ):
  267. log.info(
  268. f"Updating reranking model: {request.app.state.config.RAG_RERANKING_MODEL} to {form_data.reranking_model}"
  269. )
  270. try:
  271. request.app.state.config.RAG_RERANKING_MODEL = form_data.reranking_model
  272. try:
  273. request.app.state.rf = get_rf(
  274. request.app.state.config.RAG_RERANKING_MODEL,
  275. True,
  276. )
  277. except Exception as e:
  278. log.error(f"Error loading reranking model: {e}")
  279. request.app.state.config.ENABLE_RAG_HYBRID_SEARCH = False
  280. return {
  281. "status": True,
  282. "reranking_model": request.app.state.config.RAG_RERANKING_MODEL,
  283. }
  284. except Exception as e:
  285. log.exception(f"Problem updating reranking model: {e}")
  286. raise HTTPException(
  287. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  288. detail=ERROR_MESSAGES.DEFAULT(e),
  289. )
  290. @router.get("/config")
  291. async def get_rag_config(request: Request, user=Depends(get_admin_user)):
  292. return {
  293. "status": True,
  294. "pdf_extract_images": request.app.state.config.PDF_EXTRACT_IMAGES,
  295. "RAG_FULL_CONTEXT": request.app.state.config.RAG_FULL_CONTEXT,
  296. "BYPASS_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL,
  297. "enable_google_drive_integration": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION,
  298. "enable_onedrive_integration": request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION,
  299. "content_extraction": {
  300. "engine": request.app.state.config.CONTENT_EXTRACTION_ENGINE,
  301. "tika_server_url": request.app.state.config.TIKA_SERVER_URL,
  302. "docling_server_url": request.app.state.config.DOCLING_SERVER_URL,
  303. "document_intelligence_config": {
  304. "endpoint": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
  305. "key": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
  306. },
  307. "mistral_ocr_config": {
  308. "api_key": request.app.state.config.MISTRAL_OCR_API_KEY,
  309. },
  310. },
  311. "chunk": {
  312. "text_splitter": request.app.state.config.TEXT_SPLITTER,
  313. "chunk_size": request.app.state.config.CHUNK_SIZE,
  314. "chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
  315. },
  316. "file": {
  317. "max_size": request.app.state.config.FILE_MAX_SIZE,
  318. "max_count": request.app.state.config.FILE_MAX_COUNT,
  319. },
  320. "youtube": {
  321. "language": request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
  322. "translation": request.app.state.YOUTUBE_LOADER_TRANSLATION,
  323. "proxy_url": request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
  324. },
  325. "web": {
  326. "ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION": request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  327. "BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL,
  328. "search": {
  329. "enabled": request.app.state.config.ENABLE_RAG_WEB_SEARCH,
  330. "drive": request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION,
  331. "onedrive": request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION,
  332. "engine": request.app.state.config.RAG_WEB_SEARCH_ENGINE,
  333. "searxng_query_url": request.app.state.config.SEARXNG_QUERY_URL,
  334. "google_pse_api_key": request.app.state.config.GOOGLE_PSE_API_KEY,
  335. "google_pse_engine_id": request.app.state.config.GOOGLE_PSE_ENGINE_ID,
  336. "brave_search_api_key": request.app.state.config.BRAVE_SEARCH_API_KEY,
  337. "kagi_search_api_key": request.app.state.config.KAGI_SEARCH_API_KEY,
  338. "mojeek_search_api_key": request.app.state.config.MOJEEK_SEARCH_API_KEY,
  339. "bocha_search_api_key": request.app.state.config.BOCHA_SEARCH_API_KEY,
  340. "serpstack_api_key": request.app.state.config.SERPSTACK_API_KEY,
  341. "serpstack_https": request.app.state.config.SERPSTACK_HTTPS,
  342. "serper_api_key": request.app.state.config.SERPER_API_KEY,
  343. "serply_api_key": request.app.state.config.SERPLY_API_KEY,
  344. "tavily_api_key": request.app.state.config.TAVILY_API_KEY,
  345. "searchapi_api_key": request.app.state.config.SEARCHAPI_API_KEY,
  346. "searchapi_engine": request.app.state.config.SEARCHAPI_ENGINE,
  347. "serpapi_api_key": request.app.state.config.SERPAPI_API_KEY,
  348. "serpapi_engine": request.app.state.config.SERPAPI_ENGINE,
  349. "jina_api_key": request.app.state.config.JINA_API_KEY,
  350. "bing_search_v7_endpoint": request.app.state.config.BING_SEARCH_V7_ENDPOINT,
  351. "bing_search_v7_subscription_key": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
  352. "exa_api_key": request.app.state.config.EXA_API_KEY,
  353. "perplexity_api_key": request.app.state.config.PERPLEXITY_API_KEY,
  354. "result_count": request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  355. "trust_env": request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV,
  356. "concurrent_requests": request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
  357. "domain_filter_list": request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  358. },
  359. },
  360. }
  361. class FileConfig(BaseModel):
  362. max_size: Optional[int] = None
  363. max_count: Optional[int] = None
  364. class DocumentIntelligenceConfigForm(BaseModel):
  365. endpoint: str
  366. key: str
  367. class MistralOCRConfigForm(BaseModel):
  368. api_key: str
  369. class ContentExtractionConfig(BaseModel):
  370. engine: str = ""
  371. tika_server_url: Optional[str] = None
  372. docling_server_url: Optional[str] = None
  373. document_intelligence_config: Optional[DocumentIntelligenceConfigForm] = None
  374. mistral_ocr_config: Optional[MistralOCRConfigForm] = None
  375. class ChunkParamUpdateForm(BaseModel):
  376. text_splitter: Optional[str] = None
  377. chunk_size: int
  378. chunk_overlap: int
  379. class YoutubeLoaderConfig(BaseModel):
  380. language: list[str]
  381. translation: Optional[str] = None
  382. proxy_url: str = ""
  383. class WebSearchConfig(BaseModel):
  384. enabled: bool
  385. engine: Optional[str] = None
  386. searxng_query_url: Optional[str] = None
  387. google_pse_api_key: Optional[str] = None
  388. google_pse_engine_id: Optional[str] = None
  389. brave_search_api_key: Optional[str] = None
  390. kagi_search_api_key: Optional[str] = None
  391. mojeek_search_api_key: Optional[str] = None
  392. bocha_search_api_key: Optional[str] = None
  393. serpstack_api_key: Optional[str] = None
  394. serpstack_https: Optional[bool] = None
  395. serper_api_key: Optional[str] = None
  396. serply_api_key: Optional[str] = None
  397. tavily_api_key: Optional[str] = None
  398. searchapi_api_key: Optional[str] = None
  399. searchapi_engine: Optional[str] = None
  400. serpapi_api_key: Optional[str] = None
  401. serpapi_engine: Optional[str] = None
  402. jina_api_key: Optional[str] = None
  403. bing_search_v7_endpoint: Optional[str] = None
  404. bing_search_v7_subscription_key: Optional[str] = None
  405. exa_api_key: Optional[str] = None
  406. perplexity_api_key: Optional[str] = None
  407. result_count: Optional[int] = None
  408. concurrent_requests: Optional[int] = None
  409. trust_env: Optional[bool] = None
  410. domain_filter_list: Optional[List[str]] = []
  411. class WebConfig(BaseModel):
  412. search: WebSearchConfig
  413. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION: Optional[bool] = None
  414. BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL: Optional[bool] = None
  415. class ConfigUpdateForm(BaseModel):
  416. RAG_FULL_CONTEXT: Optional[bool] = None
  417. BYPASS_EMBEDDING_AND_RETRIEVAL: Optional[bool] = None
  418. pdf_extract_images: Optional[bool] = None
  419. enable_google_drive_integration: Optional[bool] = None
  420. enable_onedrive_integration: Optional[bool] = None
  421. file: Optional[FileConfig] = None
  422. content_extraction: Optional[ContentExtractionConfig] = None
  423. chunk: Optional[ChunkParamUpdateForm] = None
  424. youtube: Optional[YoutubeLoaderConfig] = None
  425. web: Optional[WebConfig] = None
  426. @router.post("/config/update")
  427. async def update_rag_config(
  428. request: Request, form_data: ConfigUpdateForm, user=Depends(get_admin_user)
  429. ):
  430. request.app.state.config.PDF_EXTRACT_IMAGES = (
  431. form_data.pdf_extract_images
  432. if form_data.pdf_extract_images is not None
  433. else request.app.state.config.PDF_EXTRACT_IMAGES
  434. )
  435. request.app.state.config.RAG_FULL_CONTEXT = (
  436. form_data.RAG_FULL_CONTEXT
  437. if form_data.RAG_FULL_CONTEXT is not None
  438. else request.app.state.config.RAG_FULL_CONTEXT
  439. )
  440. request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL = (
  441. form_data.BYPASS_EMBEDDING_AND_RETRIEVAL
  442. if form_data.BYPASS_EMBEDDING_AND_RETRIEVAL is not None
  443. else request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL
  444. )
  445. request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION = (
  446. form_data.enable_google_drive_integration
  447. if form_data.enable_google_drive_integration is not None
  448. else request.app.state.config.ENABLE_GOOGLE_DRIVE_INTEGRATION
  449. )
  450. request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION = (
  451. form_data.enable_onedrive_integration
  452. if form_data.enable_onedrive_integration is not None
  453. else request.app.state.config.ENABLE_ONEDRIVE_INTEGRATION
  454. )
  455. if form_data.file is not None:
  456. request.app.state.config.FILE_MAX_SIZE = form_data.file.max_size
  457. request.app.state.config.FILE_MAX_COUNT = form_data.file.max_count
  458. if form_data.content_extraction is not None:
  459. log.info(
  460. f"Updating content extraction: {request.app.state.config.CONTENT_EXTRACTION_ENGINE} to {form_data.content_extraction.engine}"
  461. )
  462. request.app.state.config.CONTENT_EXTRACTION_ENGINE = (
  463. form_data.content_extraction.engine
  464. )
  465. request.app.state.config.TIKA_SERVER_URL = (
  466. form_data.content_extraction.tika_server_url
  467. )
  468. request.app.state.config.DOCLING_SERVER_URL = (
  469. form_data.content_extraction.docling_server_url
  470. )
  471. if form_data.content_extraction.document_intelligence_config is not None:
  472. request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = (
  473. form_data.content_extraction.document_intelligence_config.endpoint
  474. )
  475. request.app.state.config.DOCUMENT_INTELLIGENCE_KEY = (
  476. form_data.content_extraction.document_intelligence_config.key
  477. )
  478. if form_data.content_extraction.mistral_ocr_config is not None:
  479. request.app.state.config.MISTRAL_OCR_API_KEY = (
  480. form_data.content_extraction.mistral_ocr_config.api_key
  481. )
  482. if form_data.chunk is not None:
  483. request.app.state.config.TEXT_SPLITTER = form_data.chunk.text_splitter
  484. request.app.state.config.CHUNK_SIZE = form_data.chunk.chunk_size
  485. request.app.state.config.CHUNK_OVERLAP = form_data.chunk.chunk_overlap
  486. if form_data.youtube is not None:
  487. request.app.state.config.YOUTUBE_LOADER_LANGUAGE = form_data.youtube.language
  488. request.app.state.config.YOUTUBE_LOADER_PROXY_URL = form_data.youtube.proxy_url
  489. request.app.state.YOUTUBE_LOADER_TRANSLATION = form_data.youtube.translation
  490. if form_data.web is not None:
  491. request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
  492. # Note: When UI "Bypass SSL verification for Websites"=True then ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION=False
  493. form_data.web.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
  494. )
  495. request.app.state.config.ENABLE_RAG_WEB_SEARCH = form_data.web.search.enabled
  496. request.app.state.config.RAG_WEB_SEARCH_ENGINE = form_data.web.search.engine
  497. request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL = (
  498. form_data.web.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL
  499. )
  500. request.app.state.config.SEARXNG_QUERY_URL = (
  501. form_data.web.search.searxng_query_url
  502. )
  503. request.app.state.config.GOOGLE_PSE_API_KEY = (
  504. form_data.web.search.google_pse_api_key
  505. )
  506. request.app.state.config.GOOGLE_PSE_ENGINE_ID = (
  507. form_data.web.search.google_pse_engine_id
  508. )
  509. request.app.state.config.BRAVE_SEARCH_API_KEY = (
  510. form_data.web.search.brave_search_api_key
  511. )
  512. request.app.state.config.KAGI_SEARCH_API_KEY = (
  513. form_data.web.search.kagi_search_api_key
  514. )
  515. request.app.state.config.MOJEEK_SEARCH_API_KEY = (
  516. form_data.web.search.mojeek_search_api_key
  517. )
  518. request.app.state.config.BOCHA_SEARCH_API_KEY = (
  519. form_data.web.search.bocha_search_api_key
  520. )
  521. request.app.state.config.SERPSTACK_API_KEY = (
  522. form_data.web.search.serpstack_api_key
  523. )
  524. request.app.state.config.SERPSTACK_HTTPS = form_data.web.search.serpstack_https
  525. request.app.state.config.SERPER_API_KEY = form_data.web.search.serper_api_key
  526. request.app.state.config.SERPLY_API_KEY = form_data.web.search.serply_api_key
  527. request.app.state.config.TAVILY_API_KEY = form_data.web.search.tavily_api_key
  528. request.app.state.config.SEARCHAPI_API_KEY = (
  529. form_data.web.search.searchapi_api_key
  530. )
  531. request.app.state.config.SEARCHAPI_ENGINE = (
  532. form_data.web.search.searchapi_engine
  533. )
  534. request.app.state.config.SERPAPI_API_KEY = form_data.web.search.serpapi_api_key
  535. request.app.state.config.SERPAPI_ENGINE = form_data.web.search.serpapi_engine
  536. request.app.state.config.JINA_API_KEY = form_data.web.search.jina_api_key
  537. request.app.state.config.BING_SEARCH_V7_ENDPOINT = (
  538. form_data.web.search.bing_search_v7_endpoint
  539. )
  540. request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY = (
  541. form_data.web.search.bing_search_v7_subscription_key
  542. )
  543. request.app.state.config.EXA_API_KEY = form_data.web.search.exa_api_key
  544. request.app.state.config.PERPLEXITY_API_KEY = (
  545. form_data.web.search.perplexity_api_key
  546. )
  547. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT = (
  548. form_data.web.search.result_count
  549. )
  550. request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS = (
  551. form_data.web.search.concurrent_requests
  552. )
  553. request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV = (
  554. form_data.web.search.trust_env
  555. )
  556. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST = (
  557. form_data.web.search.domain_filter_list
  558. )
  559. return {
  560. "status": True,
  561. "pdf_extract_images": request.app.state.config.PDF_EXTRACT_IMAGES,
  562. "RAG_FULL_CONTEXT": request.app.state.config.RAG_FULL_CONTEXT,
  563. "BYPASS_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL,
  564. "file": {
  565. "max_size": request.app.state.config.FILE_MAX_SIZE,
  566. "max_count": request.app.state.config.FILE_MAX_COUNT,
  567. },
  568. "content_extraction": {
  569. "engine": request.app.state.config.CONTENT_EXTRACTION_ENGINE,
  570. "tika_server_url": request.app.state.config.TIKA_SERVER_URL,
  571. "docling_server_url": request.app.state.config.DOCLING_SERVER_URL,
  572. "document_intelligence_config": {
  573. "endpoint": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
  574. "key": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
  575. },
  576. "mistral_ocr_config": {
  577. "api_key": request.app.state.config.MISTRAL_OCR_API_KEY,
  578. },
  579. },
  580. "chunk": {
  581. "text_splitter": request.app.state.config.TEXT_SPLITTER,
  582. "chunk_size": request.app.state.config.CHUNK_SIZE,
  583. "chunk_overlap": request.app.state.config.CHUNK_OVERLAP,
  584. },
  585. "youtube": {
  586. "language": request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
  587. "proxy_url": request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
  588. "translation": request.app.state.YOUTUBE_LOADER_TRANSLATION,
  589. },
  590. "web": {
  591. "ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION": request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  592. "BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL": request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL,
  593. "search": {
  594. "enabled": request.app.state.config.ENABLE_RAG_WEB_SEARCH,
  595. "engine": request.app.state.config.RAG_WEB_SEARCH_ENGINE,
  596. "searxng_query_url": request.app.state.config.SEARXNG_QUERY_URL,
  597. "google_pse_api_key": request.app.state.config.GOOGLE_PSE_API_KEY,
  598. "google_pse_engine_id": request.app.state.config.GOOGLE_PSE_ENGINE_ID,
  599. "brave_search_api_key": request.app.state.config.BRAVE_SEARCH_API_KEY,
  600. "kagi_search_api_key": request.app.state.config.KAGI_SEARCH_API_KEY,
  601. "mojeek_search_api_key": request.app.state.config.MOJEEK_SEARCH_API_KEY,
  602. "bocha_search_api_key": request.app.state.config.BOCHA_SEARCH_API_KEY,
  603. "serpstack_api_key": request.app.state.config.SERPSTACK_API_KEY,
  604. "serpstack_https": request.app.state.config.SERPSTACK_HTTPS,
  605. "serper_api_key": request.app.state.config.SERPER_API_KEY,
  606. "serply_api_key": request.app.state.config.SERPLY_API_KEY,
  607. "serachapi_api_key": request.app.state.config.SEARCHAPI_API_KEY,
  608. "searchapi_engine": request.app.state.config.SEARCHAPI_ENGINE,
  609. "serpapi_api_key": request.app.state.config.SERPAPI_API_KEY,
  610. "serpapi_engine": request.app.state.config.SERPAPI_ENGINE,
  611. "tavily_api_key": request.app.state.config.TAVILY_API_KEY,
  612. "jina_api_key": request.app.state.config.JINA_API_KEY,
  613. "bing_search_v7_endpoint": request.app.state.config.BING_SEARCH_V7_ENDPOINT,
  614. "bing_search_v7_subscription_key": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
  615. "exa_api_key": request.app.state.config.EXA_API_KEY,
  616. "perplexity_api_key": request.app.state.config.PERPLEXITY_API_KEY,
  617. "result_count": request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  618. "concurrent_requests": request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
  619. "trust_env": request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV,
  620. "domain_filter_list": request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  621. },
  622. },
  623. }
  624. @router.get("/template")
  625. async def get_rag_template(request: Request, user=Depends(get_verified_user)):
  626. return {
  627. "status": True,
  628. "template": request.app.state.config.RAG_TEMPLATE,
  629. }
  630. @router.get("/query/settings")
  631. async def get_query_settings(request: Request, user=Depends(get_admin_user)):
  632. return {
  633. "status": True,
  634. "template": request.app.state.config.RAG_TEMPLATE,
  635. "k": request.app.state.config.TOP_K,
  636. "k_reranker": request.app.state.config.TOP_K_RERANKER,
  637. "r": request.app.state.config.RELEVANCE_THRESHOLD,
  638. "hybrid": request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  639. }
  640. class QuerySettingsForm(BaseModel):
  641. k: Optional[int] = None
  642. k_reranker: Optional[int] = None
  643. r: Optional[float] = None
  644. template: Optional[str] = None
  645. hybrid: Optional[bool] = None
  646. @router.post("/query/settings/update")
  647. async def update_query_settings(
  648. request: Request, form_data: QuerySettingsForm, user=Depends(get_admin_user)
  649. ):
  650. request.app.state.config.RAG_TEMPLATE = form_data.template
  651. request.app.state.config.TOP_K = form_data.k if form_data.k else 4
  652. request.app.state.config.TOP_K_RERANKER = form_data.k_reranker or 4
  653. request.app.state.config.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0
  654. request.app.state.config.ENABLE_RAG_HYBRID_SEARCH = (
  655. form_data.hybrid if form_data.hybrid else False
  656. )
  657. if not request.app.state.config.ENABLE_RAG_HYBRID_SEARCH:
  658. request.app.state.rf = None
  659. return {
  660. "status": True,
  661. "template": request.app.state.config.RAG_TEMPLATE,
  662. "k": request.app.state.config.TOP_K,
  663. "k_reranker": request.app.state.config.TOP_K_RERANKER,
  664. "r": request.app.state.config.RELEVANCE_THRESHOLD,
  665. "hybrid": request.app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  666. }
  667. ####################################
  668. #
  669. # Document process and retrieval
  670. #
  671. ####################################
  672. def save_docs_to_vector_db(
  673. request: Request,
  674. docs,
  675. collection_name,
  676. metadata: Optional[dict] = None,
  677. overwrite: bool = False,
  678. split: bool = True,
  679. add: bool = False,
  680. user=None,
  681. ) -> bool:
  682. def _get_docs_info(docs: list[Document]) -> str:
  683. docs_info = set()
  684. # Trying to select relevant metadata identifying the document.
  685. for doc in docs:
  686. metadata = getattr(doc, "metadata", {})
  687. doc_name = metadata.get("name", "")
  688. if not doc_name:
  689. doc_name = metadata.get("title", "")
  690. if not doc_name:
  691. doc_name = metadata.get("source", "")
  692. if doc_name:
  693. docs_info.add(doc_name)
  694. return ", ".join(docs_info)
  695. log.info(
  696. f"save_docs_to_vector_db: document {_get_docs_info(docs)} {collection_name}"
  697. )
  698. # Check if entries with the same hash (metadata.hash) already exist
  699. if metadata and "hash" in metadata:
  700. result = VECTOR_DB_CLIENT.query(
  701. collection_name=collection_name,
  702. filter={"hash": metadata["hash"]},
  703. )
  704. if result is not None:
  705. existing_doc_ids = result.ids[0]
  706. if existing_doc_ids:
  707. log.info(f"Document with hash {metadata['hash']} already exists")
  708. raise ValueError(ERROR_MESSAGES.DUPLICATE_CONTENT)
  709. if split:
  710. if request.app.state.config.TEXT_SPLITTER in ["", "character"]:
  711. text_splitter = RecursiveCharacterTextSplitter(
  712. chunk_size=request.app.state.config.CHUNK_SIZE,
  713. chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
  714. add_start_index=True,
  715. )
  716. elif request.app.state.config.TEXT_SPLITTER == "token":
  717. log.info(
  718. f"Using token text splitter: {request.app.state.config.TIKTOKEN_ENCODING_NAME}"
  719. )
  720. tiktoken.get_encoding(str(request.app.state.config.TIKTOKEN_ENCODING_NAME))
  721. text_splitter = TokenTextSplitter(
  722. encoding_name=str(request.app.state.config.TIKTOKEN_ENCODING_NAME),
  723. chunk_size=request.app.state.config.CHUNK_SIZE,
  724. chunk_overlap=request.app.state.config.CHUNK_OVERLAP,
  725. add_start_index=True,
  726. )
  727. else:
  728. raise ValueError(ERROR_MESSAGES.DEFAULT("Invalid text splitter"))
  729. docs = text_splitter.split_documents(docs)
  730. if len(docs) == 0:
  731. raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
  732. texts = [doc.page_content for doc in docs]
  733. metadatas = [
  734. {
  735. **doc.metadata,
  736. **(metadata if metadata else {}),
  737. "embedding_config": json.dumps(
  738. {
  739. "engine": request.app.state.config.RAG_EMBEDDING_ENGINE,
  740. "model": request.app.state.config.RAG_EMBEDDING_MODEL,
  741. }
  742. ),
  743. }
  744. for doc in docs
  745. ]
  746. # ChromaDB does not like datetime formats
  747. # for meta-data so convert them to string.
  748. for metadata in metadatas:
  749. for key, value in metadata.items():
  750. if (
  751. isinstance(value, datetime)
  752. or isinstance(value, list)
  753. or isinstance(value, dict)
  754. ):
  755. metadata[key] = str(value)
  756. try:
  757. if VECTOR_DB_CLIENT.has_collection(collection_name=collection_name):
  758. log.info(f"collection {collection_name} already exists")
  759. if overwrite:
  760. VECTOR_DB_CLIENT.delete_collection(collection_name=collection_name)
  761. log.info(f"deleting existing collection {collection_name}")
  762. elif add is False:
  763. log.info(
  764. f"collection {collection_name} already exists, overwrite is False and add is False"
  765. )
  766. return True
  767. log.info(f"adding to collection {collection_name}")
  768. embedding_function = get_embedding_function(
  769. request.app.state.config.RAG_EMBEDDING_ENGINE,
  770. request.app.state.config.RAG_EMBEDDING_MODEL,
  771. request.app.state.ef,
  772. (
  773. request.app.state.config.RAG_OPENAI_API_BASE_URL
  774. if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  775. else request.app.state.config.RAG_OLLAMA_BASE_URL
  776. ),
  777. (
  778. request.app.state.config.RAG_OPENAI_API_KEY
  779. if request.app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  780. else request.app.state.config.RAG_OLLAMA_API_KEY
  781. ),
  782. request.app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  783. )
  784. embeddings = embedding_function(
  785. list(map(lambda x: x.replace("\n", " "), texts)),
  786. prefix=RAG_EMBEDDING_CONTENT_PREFIX,
  787. user=user,
  788. )
  789. items = [
  790. {
  791. "id": str(uuid.uuid4()),
  792. "text": text,
  793. "vector": embeddings[idx],
  794. "metadata": metadatas[idx],
  795. }
  796. for idx, text in enumerate(texts)
  797. ]
  798. VECTOR_DB_CLIENT.insert(
  799. collection_name=collection_name,
  800. items=items,
  801. )
  802. return True
  803. except Exception as e:
  804. log.exception(e)
  805. raise e
  806. class ProcessFileForm(BaseModel):
  807. file_id: str
  808. content: Optional[str] = None
  809. collection_name: Optional[str] = None
  810. @router.post("/process/file")
  811. def process_file(
  812. request: Request,
  813. form_data: ProcessFileForm,
  814. user=Depends(get_verified_user),
  815. ):
  816. try:
  817. file = Files.get_file_by_id(form_data.file_id)
  818. collection_name = form_data.collection_name
  819. if collection_name is None:
  820. collection_name = f"file-{file.id}"
  821. if form_data.content:
  822. # Update the content in the file
  823. # Usage: /files/{file_id}/data/content/update
  824. try:
  825. # /files/{file_id}/data/content/update
  826. VECTOR_DB_CLIENT.delete_collection(collection_name=f"file-{file.id}")
  827. except:
  828. # Audio file upload pipeline
  829. pass
  830. docs = [
  831. Document(
  832. page_content=form_data.content.replace("<br/>", "\n"),
  833. metadata={
  834. **file.meta,
  835. "name": file.filename,
  836. "created_by": file.user_id,
  837. "file_id": file.id,
  838. "source": file.filename,
  839. },
  840. )
  841. ]
  842. text_content = form_data.content
  843. elif form_data.collection_name:
  844. # Check if the file has already been processed and save the content
  845. # Usage: /knowledge/{id}/file/add, /knowledge/{id}/file/update
  846. result = VECTOR_DB_CLIENT.query(
  847. collection_name=f"file-{file.id}", filter={"file_id": file.id}
  848. )
  849. if result is not None and len(result.ids[0]) > 0:
  850. docs = [
  851. Document(
  852. page_content=result.documents[0][idx],
  853. metadata=result.metadatas[0][idx],
  854. )
  855. for idx, id in enumerate(result.ids[0])
  856. ]
  857. else:
  858. docs = [
  859. Document(
  860. page_content=file.data.get("content", ""),
  861. metadata={
  862. **file.meta,
  863. "name": file.filename,
  864. "created_by": file.user_id,
  865. "file_id": file.id,
  866. "source": file.filename,
  867. },
  868. )
  869. ]
  870. text_content = file.data.get("content", "")
  871. else:
  872. # Process the file and save the content
  873. # Usage: /files/
  874. file_path = file.path
  875. if file_path:
  876. file_path = Storage.get_file(file_path)
  877. loader = Loader(
  878. engine=request.app.state.config.CONTENT_EXTRACTION_ENGINE,
  879. TIKA_SERVER_URL=request.app.state.config.TIKA_SERVER_URL,
  880. DOCLING_SERVER_URL=request.app.state.config.DOCLING_SERVER_URL,
  881. PDF_EXTRACT_IMAGES=request.app.state.config.PDF_EXTRACT_IMAGES,
  882. DOCUMENT_INTELLIGENCE_ENDPOINT=request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
  883. DOCUMENT_INTELLIGENCE_KEY=request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
  884. MISTRAL_OCR_API_KEY=request.app.state.config.MISTRAL_OCR_API_KEY,
  885. )
  886. docs = loader.load(
  887. file.filename, file.meta.get("content_type"), file_path
  888. )
  889. docs = [
  890. Document(
  891. page_content=doc.page_content,
  892. metadata={
  893. **doc.metadata,
  894. "name": file.filename,
  895. "created_by": file.user_id,
  896. "file_id": file.id,
  897. "source": file.filename,
  898. },
  899. )
  900. for doc in docs
  901. ]
  902. else:
  903. docs = [
  904. Document(
  905. page_content=file.data.get("content", ""),
  906. metadata={
  907. **file.meta,
  908. "name": file.filename,
  909. "created_by": file.user_id,
  910. "file_id": file.id,
  911. "source": file.filename,
  912. },
  913. )
  914. ]
  915. text_content = " ".join([doc.page_content for doc in docs])
  916. log.debug(f"text_content: {text_content}")
  917. Files.update_file_data_by_id(
  918. file.id,
  919. {"content": text_content},
  920. )
  921. hash = calculate_sha256_string(text_content)
  922. Files.update_file_hash_by_id(file.id, hash)
  923. if not request.app.state.config.BYPASS_EMBEDDING_AND_RETRIEVAL:
  924. try:
  925. result = save_docs_to_vector_db(
  926. request,
  927. docs=docs,
  928. collection_name=collection_name,
  929. metadata={
  930. "file_id": file.id,
  931. "name": file.filename,
  932. "hash": hash,
  933. },
  934. add=(True if form_data.collection_name else False),
  935. user=user,
  936. )
  937. if result:
  938. Files.update_file_metadata_by_id(
  939. file.id,
  940. {
  941. "collection_name": collection_name,
  942. },
  943. )
  944. return {
  945. "status": True,
  946. "collection_name": collection_name,
  947. "filename": file.filename,
  948. "content": text_content,
  949. }
  950. except Exception as e:
  951. raise e
  952. else:
  953. return {
  954. "status": True,
  955. "collection_name": None,
  956. "filename": file.filename,
  957. "content": text_content,
  958. }
  959. except Exception as e:
  960. log.exception(e)
  961. if "No pandoc was found" in str(e):
  962. raise HTTPException(
  963. status_code=status.HTTP_400_BAD_REQUEST,
  964. detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
  965. )
  966. else:
  967. raise HTTPException(
  968. status_code=status.HTTP_400_BAD_REQUEST,
  969. detail=str(e),
  970. )
  971. class ProcessTextForm(BaseModel):
  972. name: str
  973. content: str
  974. collection_name: Optional[str] = None
  975. @router.post("/process/text")
  976. def process_text(
  977. request: Request,
  978. form_data: ProcessTextForm,
  979. user=Depends(get_verified_user),
  980. ):
  981. collection_name = form_data.collection_name
  982. if collection_name is None:
  983. collection_name = calculate_sha256_string(form_data.content)
  984. docs = [
  985. Document(
  986. page_content=form_data.content,
  987. metadata={"name": form_data.name, "created_by": user.id},
  988. )
  989. ]
  990. text_content = form_data.content
  991. log.debug(f"text_content: {text_content}")
  992. result = save_docs_to_vector_db(request, docs, collection_name, user=user)
  993. if result:
  994. return {
  995. "status": True,
  996. "collection_name": collection_name,
  997. "content": text_content,
  998. }
  999. else:
  1000. raise HTTPException(
  1001. status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
  1002. detail=ERROR_MESSAGES.DEFAULT(),
  1003. )
  1004. @router.post("/process/youtube")
  1005. def process_youtube_video(
  1006. request: Request, form_data: ProcessUrlForm, user=Depends(get_verified_user)
  1007. ):
  1008. try:
  1009. collection_name = form_data.collection_name
  1010. if not collection_name:
  1011. collection_name = calculate_sha256_string(form_data.url)[:63]
  1012. loader = YoutubeLoader(
  1013. form_data.url,
  1014. language=request.app.state.config.YOUTUBE_LOADER_LANGUAGE,
  1015. proxy_url=request.app.state.config.YOUTUBE_LOADER_PROXY_URL,
  1016. )
  1017. docs = loader.load()
  1018. content = " ".join([doc.page_content for doc in docs])
  1019. log.debug(f"text_content: {content}")
  1020. save_docs_to_vector_db(
  1021. request, docs, collection_name, overwrite=True, user=user
  1022. )
  1023. return {
  1024. "status": True,
  1025. "collection_name": collection_name,
  1026. "filename": form_data.url,
  1027. "file": {
  1028. "data": {
  1029. "content": content,
  1030. },
  1031. "meta": {
  1032. "name": form_data.url,
  1033. },
  1034. },
  1035. }
  1036. except Exception as e:
  1037. log.exception(e)
  1038. raise HTTPException(
  1039. status_code=status.HTTP_400_BAD_REQUEST,
  1040. detail=ERROR_MESSAGES.DEFAULT(e),
  1041. )
  1042. @router.post("/process/web")
  1043. def process_web(
  1044. request: Request, form_data: ProcessUrlForm, user=Depends(get_verified_user)
  1045. ):
  1046. try:
  1047. collection_name = form_data.collection_name
  1048. if not collection_name:
  1049. collection_name = calculate_sha256_string(form_data.url)[:63]
  1050. loader = get_web_loader(
  1051. form_data.url,
  1052. verify_ssl=request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  1053. requests_per_second=request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
  1054. )
  1055. docs = loader.load()
  1056. content = " ".join([doc.page_content for doc in docs])
  1057. log.debug(f"text_content: {content}")
  1058. if not request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL:
  1059. save_docs_to_vector_db(
  1060. request, docs, collection_name, overwrite=True, user=user
  1061. )
  1062. else:
  1063. collection_name = None
  1064. return {
  1065. "status": True,
  1066. "collection_name": collection_name,
  1067. "filename": form_data.url,
  1068. "file": {
  1069. "data": {
  1070. "content": content,
  1071. },
  1072. "meta": {
  1073. "name": form_data.url,
  1074. "source": form_data.url,
  1075. },
  1076. },
  1077. }
  1078. except Exception as e:
  1079. log.exception(e)
  1080. raise HTTPException(
  1081. status_code=status.HTTP_400_BAD_REQUEST,
  1082. detail=ERROR_MESSAGES.DEFAULT(e),
  1083. )
  1084. def search_web(request: Request, engine: str, query: str) -> list[SearchResult]:
  1085. """Search the web using a search engine and return the results as a list of SearchResult objects.
  1086. Will look for a search engine API key in environment variables in the following order:
  1087. - SEARXNG_QUERY_URL
  1088. - GOOGLE_PSE_API_KEY + GOOGLE_PSE_ENGINE_ID
  1089. - BRAVE_SEARCH_API_KEY
  1090. - KAGI_SEARCH_API_KEY
  1091. - MOJEEK_SEARCH_API_KEY
  1092. - BOCHA_SEARCH_API_KEY
  1093. - SERPSTACK_API_KEY
  1094. - SERPER_API_KEY
  1095. - SERPLY_API_KEY
  1096. - TAVILY_API_KEY
  1097. - EXA_API_KEY
  1098. - PERPLEXITY_API_KEY
  1099. - SEARCHAPI_API_KEY + SEARCHAPI_ENGINE (by default `google`)
  1100. - SERPAPI_API_KEY + SERPAPI_ENGINE (by default `google`)
  1101. Args:
  1102. query (str): The query to search for
  1103. """
  1104. # TODO: add playwright to search the web
  1105. if engine == "searxng":
  1106. if request.app.state.config.SEARXNG_QUERY_URL:
  1107. return search_searxng(
  1108. request.app.state.config.SEARXNG_QUERY_URL,
  1109. query,
  1110. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1111. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1112. )
  1113. else:
  1114. raise Exception("No SEARXNG_QUERY_URL found in environment variables")
  1115. elif engine == "google_pse":
  1116. if (
  1117. request.app.state.config.GOOGLE_PSE_API_KEY
  1118. and request.app.state.config.GOOGLE_PSE_ENGINE_ID
  1119. ):
  1120. return search_google_pse(
  1121. request.app.state.config.GOOGLE_PSE_API_KEY,
  1122. request.app.state.config.GOOGLE_PSE_ENGINE_ID,
  1123. query,
  1124. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1125. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1126. )
  1127. else:
  1128. raise Exception(
  1129. "No GOOGLE_PSE_API_KEY or GOOGLE_PSE_ENGINE_ID found in environment variables"
  1130. )
  1131. elif engine == "brave":
  1132. if request.app.state.config.BRAVE_SEARCH_API_KEY:
  1133. return search_brave(
  1134. request.app.state.config.BRAVE_SEARCH_API_KEY,
  1135. query,
  1136. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1137. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1138. )
  1139. else:
  1140. raise Exception("No BRAVE_SEARCH_API_KEY found in environment variables")
  1141. elif engine == "kagi":
  1142. if request.app.state.config.KAGI_SEARCH_API_KEY:
  1143. return search_kagi(
  1144. request.app.state.config.KAGI_SEARCH_API_KEY,
  1145. query,
  1146. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1147. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1148. )
  1149. else:
  1150. raise Exception("No KAGI_SEARCH_API_KEY found in environment variables")
  1151. elif engine == "mojeek":
  1152. if request.app.state.config.MOJEEK_SEARCH_API_KEY:
  1153. return search_mojeek(
  1154. request.app.state.config.MOJEEK_SEARCH_API_KEY,
  1155. query,
  1156. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1157. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1158. )
  1159. else:
  1160. raise Exception("No MOJEEK_SEARCH_API_KEY found in environment variables")
  1161. elif engine == "bocha":
  1162. if request.app.state.config.BOCHA_SEARCH_API_KEY:
  1163. return search_bocha(
  1164. request.app.state.config.BOCHA_SEARCH_API_KEY,
  1165. query,
  1166. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1167. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1168. )
  1169. else:
  1170. raise Exception("No BOCHA_SEARCH_API_KEY found in environment variables")
  1171. elif engine == "serpstack":
  1172. if request.app.state.config.SERPSTACK_API_KEY:
  1173. return search_serpstack(
  1174. request.app.state.config.SERPSTACK_API_KEY,
  1175. query,
  1176. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1177. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1178. https_enabled=request.app.state.config.SERPSTACK_HTTPS,
  1179. )
  1180. else:
  1181. raise Exception("No SERPSTACK_API_KEY found in environment variables")
  1182. elif engine == "serper":
  1183. if request.app.state.config.SERPER_API_KEY:
  1184. return search_serper(
  1185. request.app.state.config.SERPER_API_KEY,
  1186. query,
  1187. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1188. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1189. )
  1190. else:
  1191. raise Exception("No SERPER_API_KEY found in environment variables")
  1192. elif engine == "serply":
  1193. if request.app.state.config.SERPLY_API_KEY:
  1194. return search_serply(
  1195. request.app.state.config.SERPLY_API_KEY,
  1196. query,
  1197. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1198. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1199. )
  1200. else:
  1201. raise Exception("No SERPLY_API_KEY found in environment variables")
  1202. elif engine == "duckduckgo":
  1203. return search_duckduckgo(
  1204. query,
  1205. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1206. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1207. )
  1208. elif engine == "tavily":
  1209. if request.app.state.config.TAVILY_API_KEY:
  1210. return search_tavily(
  1211. request.app.state.config.TAVILY_API_KEY,
  1212. query,
  1213. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1214. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1215. )
  1216. else:
  1217. raise Exception("No TAVILY_API_KEY found in environment variables")
  1218. elif engine == "searchapi":
  1219. if request.app.state.config.SEARCHAPI_API_KEY:
  1220. return search_searchapi(
  1221. request.app.state.config.SEARCHAPI_API_KEY,
  1222. request.app.state.config.SEARCHAPI_ENGINE,
  1223. query,
  1224. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1225. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1226. )
  1227. else:
  1228. raise Exception("No SEARCHAPI_API_KEY found in environment variables")
  1229. elif engine == "serpapi":
  1230. if request.app.state.config.SERPAPI_API_KEY:
  1231. return search_serpapi(
  1232. request.app.state.config.SERPAPI_API_KEY,
  1233. request.app.state.config.SERPAPI_ENGINE,
  1234. query,
  1235. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1236. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1237. )
  1238. else:
  1239. raise Exception("No SERPAPI_API_KEY found in environment variables")
  1240. elif engine == "jina":
  1241. return search_jina(
  1242. request.app.state.config.JINA_API_KEY,
  1243. query,
  1244. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1245. )
  1246. elif engine == "bing":
  1247. return search_bing(
  1248. request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
  1249. request.app.state.config.BING_SEARCH_V7_ENDPOINT,
  1250. str(DEFAULT_LOCALE),
  1251. query,
  1252. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1253. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1254. )
  1255. elif engine == "exa":
  1256. return search_exa(
  1257. request.app.state.config.EXA_API_KEY,
  1258. query,
  1259. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1260. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1261. )
  1262. elif engine == "perplexity":
  1263. return search_perplexity(
  1264. request.app.state.config.PERPLEXITY_API_KEY,
  1265. query,
  1266. request.app.state.config.RAG_WEB_SEARCH_RESULT_COUNT,
  1267. request.app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  1268. )
  1269. else:
  1270. raise Exception("No search engine API key found in environment variables")
  1271. @router.post("/process/web/search")
  1272. async def process_web_search(
  1273. request: Request, form_data: SearchForm, user=Depends(get_verified_user)
  1274. ):
  1275. try:
  1276. logging.info(
  1277. f"trying to web search with {request.app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query}"
  1278. )
  1279. web_results = search_web(
  1280. request, request.app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query
  1281. )
  1282. except Exception as e:
  1283. log.exception(e)
  1284. raise HTTPException(
  1285. status_code=status.HTTP_400_BAD_REQUEST,
  1286. detail=ERROR_MESSAGES.WEB_SEARCH_ERROR(e),
  1287. )
  1288. log.debug(f"web_results: {web_results}")
  1289. try:
  1290. collection_name = form_data.collection_name
  1291. if collection_name == "" or collection_name is None:
  1292. collection_name = f"web-search-{calculate_sha256_string(form_data.query)}"[
  1293. :63
  1294. ]
  1295. urls = [result.link for result in web_results]
  1296. loader = get_web_loader(
  1297. urls,
  1298. verify_ssl=request.app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  1299. requests_per_second=request.app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
  1300. trust_env=request.app.state.config.RAG_WEB_SEARCH_TRUST_ENV,
  1301. )
  1302. docs = await loader.aload()
  1303. if request.app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL:
  1304. return {
  1305. "status": True,
  1306. "collection_name": None,
  1307. "filenames": urls,
  1308. "docs": [
  1309. {
  1310. "content": doc.page_content,
  1311. "metadata": doc.metadata,
  1312. }
  1313. for doc in docs
  1314. ],
  1315. "loaded_count": len(docs),
  1316. }
  1317. else:
  1318. await run_in_threadpool(
  1319. save_docs_to_vector_db,
  1320. request,
  1321. docs,
  1322. collection_name,
  1323. overwrite=True,
  1324. user=user,
  1325. )
  1326. return {
  1327. "status": True,
  1328. "collection_name": collection_name,
  1329. "filenames": urls,
  1330. "loaded_count": len(docs),
  1331. }
  1332. except Exception as e:
  1333. log.exception(e)
  1334. raise HTTPException(
  1335. status_code=status.HTTP_400_BAD_REQUEST,
  1336. detail=ERROR_MESSAGES.DEFAULT(e),
  1337. )
  1338. class QueryDocForm(BaseModel):
  1339. collection_name: str
  1340. query: str
  1341. k: Optional[int] = None
  1342. k_reranker: Optional[int] = None
  1343. r: Optional[float] = None
  1344. hybrid: Optional[bool] = None
  1345. @router.post("/query/doc")
  1346. def query_doc_handler(
  1347. request: Request,
  1348. form_data: QueryDocForm,
  1349. user=Depends(get_verified_user),
  1350. ):
  1351. try:
  1352. if request.app.state.config.ENABLE_RAG_HYBRID_SEARCH:
  1353. return query_doc_with_hybrid_search(
  1354. collection_name=form_data.collection_name,
  1355. query=form_data.query,
  1356. embedding_function=lambda query, prefix: request.app.state.EMBEDDING_FUNCTION(
  1357. query, prefix=prefix, user=user
  1358. ),
  1359. k=form_data.k if form_data.k else request.app.state.config.TOP_K,
  1360. reranking_function=request.app.state.rf,
  1361. k_reranker=form_data.k_reranker
  1362. or request.app.state.config.TOP_K_RERANKER,
  1363. r=(
  1364. form_data.r
  1365. if form_data.r
  1366. else request.app.state.config.RELEVANCE_THRESHOLD
  1367. ),
  1368. user=user,
  1369. )
  1370. else:
  1371. return query_doc(
  1372. collection_name=form_data.collection_name,
  1373. query_embedding=request.app.state.EMBEDDING_FUNCTION(
  1374. form_data.query, prefix=RAG_EMBEDDING_QUERY_PREFIX, user=user
  1375. ),
  1376. k=form_data.k if form_data.k else request.app.state.config.TOP_K,
  1377. user=user,
  1378. )
  1379. except Exception as e:
  1380. log.exception(e)
  1381. raise HTTPException(
  1382. status_code=status.HTTP_400_BAD_REQUEST,
  1383. detail=ERROR_MESSAGES.DEFAULT(e),
  1384. )
  1385. class QueryCollectionsForm(BaseModel):
  1386. collection_names: list[str]
  1387. query: str
  1388. k: Optional[int] = None
  1389. k_reranker: Optional[int] = None
  1390. r: Optional[float] = None
  1391. hybrid: Optional[bool] = None
  1392. @router.post("/query/collection")
  1393. def query_collection_handler(
  1394. request: Request,
  1395. form_data: QueryCollectionsForm,
  1396. user=Depends(get_verified_user),
  1397. ):
  1398. try:
  1399. if request.app.state.config.ENABLE_RAG_HYBRID_SEARCH:
  1400. return query_collection_with_hybrid_search(
  1401. collection_names=form_data.collection_names,
  1402. queries=[form_data.query],
  1403. embedding_function=lambda query, prefix: request.app.state.EMBEDDING_FUNCTION(
  1404. query, prefix=prefix, user=user
  1405. ),
  1406. k=form_data.k if form_data.k else request.app.state.config.TOP_K,
  1407. reranking_function=request.app.state.rf,
  1408. k_reranker=form_data.k_reranker
  1409. or request.app.state.config.TOP_K_RERANKER,
  1410. r=(
  1411. form_data.r
  1412. if form_data.r
  1413. else request.app.state.config.RELEVANCE_THRESHOLD
  1414. ),
  1415. )
  1416. else:
  1417. return query_collection(
  1418. collection_names=form_data.collection_names,
  1419. queries=[form_data.query],
  1420. embedding_function=lambda query, prefix: request.app.state.EMBEDDING_FUNCTION(
  1421. query, prefix=prefix, user=user
  1422. ),
  1423. k=form_data.k if form_data.k else request.app.state.config.TOP_K,
  1424. )
  1425. except Exception as e:
  1426. log.exception(e)
  1427. raise HTTPException(
  1428. status_code=status.HTTP_400_BAD_REQUEST,
  1429. detail=ERROR_MESSAGES.DEFAULT(e),
  1430. )
  1431. ####################################
  1432. #
  1433. # Vector DB operations
  1434. #
  1435. ####################################
  1436. class DeleteForm(BaseModel):
  1437. collection_name: str
  1438. file_id: str
  1439. @router.post("/delete")
  1440. def delete_entries_from_collection(form_data: DeleteForm, user=Depends(get_admin_user)):
  1441. try:
  1442. if VECTOR_DB_CLIENT.has_collection(collection_name=form_data.collection_name):
  1443. file = Files.get_file_by_id(form_data.file_id)
  1444. hash = file.hash
  1445. VECTOR_DB_CLIENT.delete(
  1446. collection_name=form_data.collection_name,
  1447. metadata={"hash": hash},
  1448. )
  1449. return {"status": True}
  1450. else:
  1451. return {"status": False}
  1452. except Exception as e:
  1453. log.exception(e)
  1454. return {"status": False}
  1455. @router.post("/reset/db")
  1456. def reset_vector_db(user=Depends(get_admin_user)):
  1457. VECTOR_DB_CLIENT.reset()
  1458. Knowledges.delete_all_knowledge()
  1459. @router.post("/reset/uploads")
  1460. def reset_upload_dir(user=Depends(get_admin_user)) -> bool:
  1461. folder = f"{UPLOAD_DIR}"
  1462. try:
  1463. # Check if the directory exists
  1464. if os.path.exists(folder):
  1465. # Iterate over all the files and directories in the specified directory
  1466. for filename in os.listdir(folder):
  1467. file_path = os.path.join(folder, filename)
  1468. try:
  1469. if os.path.isfile(file_path) or os.path.islink(file_path):
  1470. os.unlink(file_path) # Remove the file or link
  1471. elif os.path.isdir(file_path):
  1472. shutil.rmtree(file_path) # Remove the directory
  1473. except Exception as e:
  1474. log.exception(f"Failed to delete {file_path}. Reason: {e}")
  1475. else:
  1476. log.warning(f"The directory {folder} does not exist")
  1477. except Exception as e:
  1478. log.exception(f"Failed to process the directory {folder}. Reason: {e}")
  1479. return True
  1480. if ENV == "dev":
  1481. @router.get("/ef/{text}")
  1482. async def get_embeddings(request: Request, text: Optional[str] = "Hello World!"):
  1483. return {
  1484. "result": request.app.state.EMBEDDING_FUNCTION(
  1485. text, prefix=RAG_EMBEDDING_QUERY_PREFIX
  1486. )
  1487. }
  1488. class BatchProcessFilesForm(BaseModel):
  1489. files: List[FileModel]
  1490. collection_name: str
  1491. class BatchProcessFilesResult(BaseModel):
  1492. file_id: str
  1493. status: str
  1494. error: Optional[str] = None
  1495. class BatchProcessFilesResponse(BaseModel):
  1496. results: List[BatchProcessFilesResult]
  1497. errors: List[BatchProcessFilesResult]
  1498. @router.post("/process/files/batch")
  1499. def process_files_batch(
  1500. request: Request,
  1501. form_data: BatchProcessFilesForm,
  1502. user=Depends(get_verified_user),
  1503. ) -> BatchProcessFilesResponse:
  1504. """
  1505. Process a batch of files and save them to the vector database.
  1506. """
  1507. results: List[BatchProcessFilesResult] = []
  1508. errors: List[BatchProcessFilesResult] = []
  1509. collection_name = form_data.collection_name
  1510. # Prepare all documents first
  1511. all_docs: List[Document] = []
  1512. for file in form_data.files:
  1513. try:
  1514. text_content = file.data.get("content", "")
  1515. docs: List[Document] = [
  1516. Document(
  1517. page_content=text_content.replace("<br/>", "\n"),
  1518. metadata={
  1519. **file.meta,
  1520. "name": file.filename,
  1521. "created_by": file.user_id,
  1522. "file_id": file.id,
  1523. "source": file.filename,
  1524. },
  1525. )
  1526. ]
  1527. hash = calculate_sha256_string(text_content)
  1528. Files.update_file_hash_by_id(file.id, hash)
  1529. Files.update_file_data_by_id(file.id, {"content": text_content})
  1530. all_docs.extend(docs)
  1531. results.append(BatchProcessFilesResult(file_id=file.id, status="prepared"))
  1532. except Exception as e:
  1533. log.error(f"process_files_batch: Error processing file {file.id}: {str(e)}")
  1534. errors.append(
  1535. BatchProcessFilesResult(file_id=file.id, status="failed", error=str(e))
  1536. )
  1537. # Save all documents in one batch
  1538. if all_docs:
  1539. try:
  1540. save_docs_to_vector_db(
  1541. request=request,
  1542. docs=all_docs,
  1543. collection_name=collection_name,
  1544. add=True,
  1545. user=user,
  1546. )
  1547. # Update all files with collection name
  1548. for result in results:
  1549. Files.update_file_metadata_by_id(
  1550. result.file_id, {"collection_name": collection_name}
  1551. )
  1552. result.status = "completed"
  1553. except Exception as e:
  1554. log.error(
  1555. f"process_files_batch: Error saving documents to vector DB: {str(e)}"
  1556. )
  1557. for result in results:
  1558. result.status = "failed"
  1559. errors.append(
  1560. BatchProcessFilesResult(file_id=result.file_id, error=str(e))
  1561. )
  1562. return BatchProcessFilesResponse(results=results, errors=errors)