Chat.svelte 48 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onDestroy, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Unsubscriber, Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. WEBUI_NAME,
  20. banners,
  21. user,
  22. socket,
  23. showCallOverlay,
  24. currentChatPage,
  25. temporaryChatEnabled
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. getMessageContentParts,
  31. extractSentencesForAudio,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. createNewChat,
  38. getChatById,
  39. getChatList,
  40. getTagsById,
  41. updateChatById
  42. } from '$lib/apis/chats';
  43. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import { createOpenAITextStream } from '$lib/apis/streaming';
  46. import { queryMemory } from '$lib/apis/memories';
  47. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  48. import {
  49. chatCompleted,
  50. generateTitle,
  51. generateSearchQuery,
  52. chatAction,
  53. generateMoACompletion
  54. } from '$lib/apis';
  55. import Banner from '../common/Banner.svelte';
  56. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  57. import Messages from '$lib/components/chat/Messages.svelte';
  58. import Navbar from '$lib/components/layout/Navbar.svelte';
  59. import ChatControls from './ChatControls.svelte';
  60. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  61. const i18n: Writable<i18nType> = getContext('i18n');
  62. export let chatIdProp = '';
  63. let loaded = false;
  64. const eventTarget = new EventTarget();
  65. let showControls = false;
  66. let stopResponseFlag = false;
  67. let autoScroll = true;
  68. let processing = '';
  69. let messagesContainerElement: HTMLDivElement;
  70. let showEventConfirmation = false;
  71. let eventConfirmationTitle = '';
  72. let eventConfirmationMessage = '';
  73. let eventConfirmationInput = false;
  74. let eventConfirmationInputPlaceholder = '';
  75. let eventConfirmationInputValue = '';
  76. let eventCallback = null;
  77. let showModelSelector = true;
  78. let selectedModels = [''];
  79. let atSelectedModel: Model | undefined;
  80. let selectedModelIds = [];
  81. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  82. let selectedToolIds = [];
  83. let webSearchEnabled = false;
  84. let chat = null;
  85. let tags = [];
  86. let title = '';
  87. let prompt = '';
  88. let chatFiles = [];
  89. let files = [];
  90. let messages = [];
  91. let history = {
  92. messages: {},
  93. currentId: null
  94. };
  95. let params = {};
  96. let chatIdUnsubscriber: Unsubscriber | undefined;
  97. $: if (history.currentId !== null) {
  98. let _messages = [];
  99. let currentMessage = history.messages[history.currentId];
  100. while (currentMessage !== null) {
  101. _messages.unshift({ ...currentMessage });
  102. currentMessage =
  103. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  104. }
  105. messages = _messages;
  106. } else {
  107. messages = [];
  108. }
  109. $: if (chatIdProp) {
  110. (async () => {
  111. console.log(chatIdProp);
  112. if (chatIdProp && (await loadChat())) {
  113. await tick();
  114. loaded = true;
  115. window.setTimeout(() => scrollToBottom(), 0);
  116. const chatInput = document.getElementById('chat-textarea');
  117. chatInput?.focus();
  118. } else {
  119. await goto('/');
  120. }
  121. })();
  122. }
  123. const chatEventHandler = async (event, cb) => {
  124. if (event.chat_id === $chatId) {
  125. await tick();
  126. console.log(event);
  127. let message = history.messages[event.message_id];
  128. const type = event?.data?.type ?? null;
  129. const data = event?.data?.data ?? null;
  130. if (type === 'status') {
  131. if (message?.statusHistory) {
  132. message.statusHistory.push(data);
  133. } else {
  134. message.statusHistory = [data];
  135. }
  136. } else if (type === 'citation') {
  137. if (message?.citations) {
  138. message.citations.push(data);
  139. } else {
  140. message.citations = [data];
  141. }
  142. } else if (type === 'message') {
  143. message.content += data.content;
  144. } else if (type === 'replace') {
  145. message.content = data.content;
  146. } else if (type === 'action') {
  147. if (data.action === 'continue') {
  148. const continueButton = document.getElementById('continue-response-button');
  149. if (continueButton) {
  150. continueButton.click();
  151. }
  152. }
  153. } else if (type === 'confirmation') {
  154. eventCallback = cb;
  155. eventConfirmationInput = false;
  156. showEventConfirmation = true;
  157. eventConfirmationTitle = data.title;
  158. eventConfirmationMessage = data.message;
  159. } else if (type === 'input') {
  160. eventCallback = cb;
  161. eventConfirmationInput = true;
  162. showEventConfirmation = true;
  163. eventConfirmationTitle = data.title;
  164. eventConfirmationMessage = data.message;
  165. eventConfirmationInputPlaceholder = data.placeholder;
  166. eventConfirmationInputValue = data?.value ?? '';
  167. } else {
  168. console.log('Unknown message type', data);
  169. }
  170. messages = messages;
  171. }
  172. };
  173. const onMessageHandler = async (event: {
  174. origin: string;
  175. data: { type: string; text: string };
  176. }) => {
  177. if (event.origin !== window.origin) {
  178. return;
  179. }
  180. // Replace with your iframe's origin
  181. if (event.data.type === 'input:prompt') {
  182. console.debug(event.data.text);
  183. const inputElement = document.getElementById('chat-textarea');
  184. if (inputElement) {
  185. prompt = event.data.text;
  186. inputElement.focus();
  187. }
  188. }
  189. if (event.data.type === 'action:submit') {
  190. console.debug(event.data.text);
  191. if (prompt !== '') {
  192. await tick();
  193. submitPrompt(prompt);
  194. }
  195. }
  196. if (event.data.type === 'input:prompt:submit') {
  197. console.debug(event.data.text);
  198. if (prompt !== '') {
  199. await tick();
  200. submitPrompt(event.data.text);
  201. }
  202. }
  203. };
  204. onMount(async () => {
  205. window.addEventListener('message', onMessageHandler);
  206. $socket?.on('chat-events', chatEventHandler);
  207. if (!$chatId) {
  208. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  209. if (!value) {
  210. await initNewChat();
  211. }
  212. });
  213. } else {
  214. if ($temporaryChatEnabled) {
  215. await goto('/');
  216. }
  217. }
  218. });
  219. onDestroy(() => {
  220. chatIdUnsubscriber?.();
  221. window.removeEventListener('message', onMessageHandler);
  222. $socket?.off('chat-events');
  223. });
  224. //////////////////////////
  225. // Web functions
  226. //////////////////////////
  227. const initNewChat = async () => {
  228. if ($page.url.pathname.includes('/c/')) {
  229. window.history.replaceState(history.state, '', `/`);
  230. }
  231. await chatId.set('');
  232. autoScroll = true;
  233. title = '';
  234. messages = [];
  235. history = {
  236. messages: {},
  237. currentId: null
  238. };
  239. chatFiles = [];
  240. params = {};
  241. if ($page.url.searchParams.get('models')) {
  242. selectedModels = $page.url.searchParams.get('models')?.split(',');
  243. } else if ($settings?.models) {
  244. selectedModels = $settings?.models;
  245. } else if ($config?.default_models) {
  246. console.log($config?.default_models.split(',') ?? '');
  247. selectedModels = $config?.default_models.split(',');
  248. } else {
  249. selectedModels = [''];
  250. }
  251. if ($page.url.searchParams.get('q')) {
  252. prompt = $page.url.searchParams.get('q') ?? '';
  253. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  254. .split(',')
  255. .map((id) => id.trim())
  256. .filter((id) => id);
  257. if (prompt) {
  258. await tick();
  259. submitPrompt(prompt);
  260. }
  261. }
  262. if ($page.url.searchParams.get('call') === 'true') {
  263. showCallOverlay.set(true);
  264. }
  265. selectedModels = selectedModels.map((modelId) =>
  266. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  267. );
  268. const userSettings = await getUserSettings(localStorage.token);
  269. if (userSettings) {
  270. settings.set(userSettings.ui);
  271. } else {
  272. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  273. }
  274. const chatInput = document.getElementById('chat-textarea');
  275. setTimeout(() => chatInput?.focus(), 0);
  276. };
  277. const loadChat = async () => {
  278. chatId.set(chatIdProp);
  279. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  280. await goto('/');
  281. return null;
  282. });
  283. if (chat) {
  284. tags = await getTags();
  285. const chatContent = chat.chat;
  286. if (chatContent) {
  287. console.log(chatContent);
  288. selectedModels =
  289. (chatContent?.models ?? undefined) !== undefined
  290. ? chatContent.models
  291. : [chatContent.models ?? ''];
  292. history =
  293. (chatContent?.history ?? undefined) !== undefined
  294. ? chatContent.history
  295. : convertMessagesToHistory(chatContent.messages);
  296. title = chatContent.title;
  297. const userSettings = await getUserSettings(localStorage.token);
  298. if (userSettings) {
  299. await settings.set(userSettings.ui);
  300. } else {
  301. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  302. }
  303. params = chatContent?.params ?? {};
  304. chatFiles = chatContent?.files ?? [];
  305. autoScroll = true;
  306. await tick();
  307. if (messages.length > 0) {
  308. history.messages[messages.at(-1).id].done = true;
  309. }
  310. await tick();
  311. return true;
  312. } else {
  313. return null;
  314. }
  315. }
  316. };
  317. const scrollToBottom = async () => {
  318. await tick();
  319. if (messagesContainerElement) {
  320. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  321. }
  322. };
  323. const createMessagesList = (responseMessageId) => {
  324. const message = history.messages[responseMessageId];
  325. if (message.parentId) {
  326. return [...createMessagesList(message.parentId), message];
  327. } else {
  328. return [message];
  329. }
  330. };
  331. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  332. await mermaid.run({
  333. querySelector: '.mermaid'
  334. });
  335. const res = await chatCompleted(localStorage.token, {
  336. model: modelId,
  337. messages: messages.map((m) => ({
  338. id: m.id,
  339. role: m.role,
  340. content: m.content,
  341. info: m.info ? m.info : undefined,
  342. timestamp: m.timestamp
  343. })),
  344. chat_id: chatId,
  345. session_id: $socket?.id,
  346. id: responseMessageId
  347. }).catch((error) => {
  348. toast.error(error);
  349. messages.at(-1).error = { content: error };
  350. return null;
  351. });
  352. if (res !== null) {
  353. // Update chat history with the new messages
  354. for (const message of res.messages) {
  355. history.messages[message.id] = {
  356. ...history.messages[message.id],
  357. ...(history.messages[message.id].content !== message.content
  358. ? { originalContent: history.messages[message.id].content }
  359. : {}),
  360. ...message
  361. };
  362. }
  363. }
  364. if ($chatId == chatId) {
  365. if (!$temporaryChatEnabled) {
  366. chat = await updateChatById(localStorage.token, chatId, {
  367. models: selectedModels,
  368. messages: messages,
  369. history: history,
  370. params: params,
  371. files: chatFiles
  372. });
  373. currentChatPage.set(1);
  374. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  375. }
  376. }
  377. };
  378. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  379. const res = await chatAction(localStorage.token, actionId, {
  380. model: modelId,
  381. messages: messages.map((m) => ({
  382. id: m.id,
  383. role: m.role,
  384. content: m.content,
  385. info: m.info ? m.info : undefined,
  386. timestamp: m.timestamp
  387. })),
  388. ...(event ? { event: event } : {}),
  389. chat_id: chatId,
  390. session_id: $socket?.id,
  391. id: responseMessageId
  392. }).catch((error) => {
  393. toast.error(error);
  394. messages.at(-1).error = { content: error };
  395. return null;
  396. });
  397. if (res !== null) {
  398. // Update chat history with the new messages
  399. for (const message of res.messages) {
  400. history.messages[message.id] = {
  401. ...history.messages[message.id],
  402. ...(history.messages[message.id].content !== message.content
  403. ? { originalContent: history.messages[message.id].content }
  404. : {}),
  405. ...message
  406. };
  407. }
  408. }
  409. if ($chatId == chatId) {
  410. if (!$temporaryChatEnabled) {
  411. chat = await updateChatById(localStorage.token, chatId, {
  412. models: selectedModels,
  413. messages: messages,
  414. history: history,
  415. params: params,
  416. files: chatFiles
  417. });
  418. currentChatPage.set(1);
  419. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  420. }
  421. }
  422. };
  423. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  424. return setInterval(() => {
  425. $socket?.emit('usage', {
  426. action: 'chat',
  427. model: modelId,
  428. chat_id: chatId
  429. });
  430. }, 1000);
  431. };
  432. //////////////////////////
  433. // Chat functions
  434. //////////////////////////
  435. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  436. let _responses = [];
  437. console.log('submitPrompt', $chatId);
  438. selectedModels = selectedModels.map((modelId) =>
  439. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  440. );
  441. if (selectedModels.includes('')) {
  442. toast.error($i18n.t('Model not selected'));
  443. } else if (messages.length != 0 && messages.at(-1).done != true) {
  444. // Response not done
  445. console.log('wait');
  446. } else if (messages.length != 0 && messages.at(-1).error) {
  447. // Error in response
  448. toast.error(
  449. $i18n.t(
  450. `Oops! There was an error in the previous response. Please try again or contact admin.`
  451. )
  452. );
  453. } else if (
  454. files.length > 0 &&
  455. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  456. ) {
  457. // Upload not done
  458. toast.error(
  459. $i18n.t(
  460. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  461. )
  462. );
  463. } else if (
  464. ($config?.file?.max_count ?? null) !== null &&
  465. files.length + chatFiles.length > $config?.file?.max_count
  466. ) {
  467. console.log(chatFiles.length, files.length);
  468. toast.error(
  469. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  470. maxCount: $config?.file?.max_count
  471. })
  472. );
  473. } else {
  474. // Reset chat input textarea
  475. const chatTextAreaElement = document.getElementById('chat-textarea');
  476. if (chatTextAreaElement) {
  477. chatTextAreaElement.value = '';
  478. chatTextAreaElement.style.height = '';
  479. }
  480. const _files = JSON.parse(JSON.stringify(files));
  481. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  482. chatFiles = chatFiles.filter(
  483. // Remove duplicates
  484. (item, index, array) =>
  485. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  486. );
  487. files = [];
  488. prompt = '';
  489. // Create user message
  490. let userMessageId = uuidv4();
  491. let userMessage = {
  492. id: userMessageId,
  493. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  494. childrenIds: [],
  495. role: 'user',
  496. content: userPrompt,
  497. files: _files.length > 0 ? _files : undefined,
  498. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  499. models: selectedModels
  500. };
  501. // Add message to history and Set currentId to messageId
  502. history.messages[userMessageId] = userMessage;
  503. history.currentId = userMessageId;
  504. // Append messageId to childrenIds of parent message
  505. if (messages.length !== 0) {
  506. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  507. }
  508. // Wait until history/message have been updated
  509. await tick();
  510. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  511. }
  512. return _responses;
  513. };
  514. const sendPrompt = async (
  515. prompt: string,
  516. parentId: string,
  517. { modelId = null, modelIdx = null, newChat = false } = {}
  518. ) => {
  519. let _responses: string[] = [];
  520. // If modelId is provided, use it, else use selected model
  521. let selectedModelIds = modelId
  522. ? [modelId]
  523. : atSelectedModel !== undefined
  524. ? [atSelectedModel.id]
  525. : selectedModels;
  526. // Create response messages for each selected model
  527. const responseMessageIds: Record<PropertyKey, string> = {};
  528. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  529. const model = $models.filter((m) => m.id === modelId).at(0);
  530. if (model) {
  531. let responseMessageId = uuidv4();
  532. let responseMessage = {
  533. parentId: parentId,
  534. id: responseMessageId,
  535. childrenIds: [],
  536. role: 'assistant',
  537. content: '',
  538. model: model.id,
  539. modelName: model.name ?? model.id,
  540. modelIdx: modelIdx ? modelIdx : _modelIdx,
  541. userContext: null,
  542. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  543. };
  544. // Add message to history and Set currentId to messageId
  545. history.messages[responseMessageId] = responseMessage;
  546. history.currentId = responseMessageId;
  547. // Append messageId to childrenIds of parent message
  548. if (parentId !== null) {
  549. history.messages[parentId].childrenIds = [
  550. ...history.messages[parentId].childrenIds,
  551. responseMessageId
  552. ];
  553. }
  554. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  555. }
  556. }
  557. await tick();
  558. // Create new chat if only one message in messages
  559. if (newChat && messages.length == 2) {
  560. if (!$temporaryChatEnabled) {
  561. chat = await createNewChat(localStorage.token, {
  562. id: $chatId,
  563. title: $i18n.t('New Chat'),
  564. models: selectedModels,
  565. system: $settings.system ?? undefined,
  566. params: params,
  567. messages: messages,
  568. history: history,
  569. tags: [],
  570. timestamp: Date.now()
  571. });
  572. currentChatPage.set(1);
  573. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  574. await chatId.set(chat.id);
  575. } else {
  576. await chatId.set('local');
  577. }
  578. await tick();
  579. }
  580. const _chatId = JSON.parse(JSON.stringify($chatId));
  581. await Promise.all(
  582. selectedModelIds.map(async (modelId, _modelIdx) => {
  583. console.log('modelId', modelId);
  584. const model = $models.filter((m) => m.id === modelId).at(0);
  585. if (model) {
  586. // If there are image files, check if model is vision capable
  587. const hasImages = messages.some((message) =>
  588. message.files?.some((file) => file.type === 'image')
  589. );
  590. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  591. toast.error(
  592. $i18n.t('Model {{modelName}} is not vision capable', {
  593. modelName: model.name ?? model.id
  594. })
  595. );
  596. }
  597. let responseMessageId =
  598. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  599. let responseMessage = history.messages[responseMessageId];
  600. let userContext = null;
  601. if ($settings?.memory ?? false) {
  602. if (userContext === null) {
  603. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  604. toast.error(error);
  605. return null;
  606. });
  607. if (res) {
  608. if (res.documents[0].length > 0) {
  609. userContext = res.documents[0].reduce((acc, doc, index) => {
  610. const createdAtTimestamp = res.metadatas[0][index].created_at;
  611. const createdAtDate = new Date(createdAtTimestamp * 1000)
  612. .toISOString()
  613. .split('T')[0];
  614. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  615. }, '');
  616. }
  617. console.log(userContext);
  618. }
  619. }
  620. }
  621. responseMessage.userContext = userContext;
  622. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  623. if (webSearchEnabled) {
  624. await getWebSearchResults(model.id, parentId, responseMessageId);
  625. }
  626. let _response = null;
  627. if (model?.owned_by === 'openai') {
  628. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  629. } else if (model) {
  630. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  631. }
  632. _responses.push(_response);
  633. if (chatEventEmitter) clearInterval(chatEventEmitter);
  634. } else {
  635. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  636. }
  637. })
  638. );
  639. currentChatPage.set(1);
  640. chats.set(await getChatList(localStorage.token, $currentChatPage));
  641. return _responses;
  642. };
  643. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  644. let _response: string | null = null;
  645. const responseMessage = history.messages[responseMessageId];
  646. const userMessage = history.messages[responseMessage.parentId];
  647. // Wait until history/message have been updated
  648. await tick();
  649. // Scroll down
  650. scrollToBottom();
  651. const messagesBody = [
  652. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  653. ? {
  654. role: 'system',
  655. content: `${promptTemplate(
  656. params?.system ?? $settings?.system ?? '',
  657. $user.name,
  658. $settings?.userLocation
  659. ? await getAndUpdateUserLocation(localStorage.token)
  660. : undefined
  661. )}${
  662. (responseMessage?.userContext ?? null)
  663. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  664. : ''
  665. }`
  666. }
  667. : undefined,
  668. ...messages
  669. ]
  670. .filter((message) => message?.content?.trim())
  671. .map((message) => {
  672. // Prepare the base message object
  673. const baseMessage = {
  674. role: message.role,
  675. content: message.content
  676. };
  677. // Extract and format image URLs if any exist
  678. const imageUrls = message.files
  679. ?.filter((file) => file.type === 'image')
  680. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  681. // Add images array only if it contains elements
  682. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  683. baseMessage.images = imageUrls;
  684. }
  685. return baseMessage;
  686. });
  687. let lastImageIndex = -1;
  688. // Find the index of the last object with images
  689. messagesBody.forEach((item, index) => {
  690. if (item.images) {
  691. lastImageIndex = index;
  692. }
  693. });
  694. // Remove images from all but the last one
  695. messagesBody.forEach((item, index) => {
  696. if (index !== lastImageIndex) {
  697. delete item.images;
  698. }
  699. });
  700. let files = JSON.parse(JSON.stringify(chatFiles));
  701. if (model?.info?.meta?.knowledge ?? false) {
  702. // Only initialize and add status if knowledge exists
  703. responseMessage.statusHistory = [
  704. {
  705. action: 'knowledge_search',
  706. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  707. searchQuery: userMessage.content
  708. }),
  709. done: false
  710. }
  711. ];
  712. files.push(...model.info.meta.knowledge);
  713. messages = messages; // Trigger Svelte update
  714. }
  715. files.push(
  716. ...(userMessage?.files ?? []).filter((item) =>
  717. ['doc', 'file', 'collection'].includes(item.type)
  718. ),
  719. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  720. );
  721. scrollToBottom();
  722. eventTarget.dispatchEvent(
  723. new CustomEvent('chat:start', {
  724. detail: {
  725. id: responseMessageId
  726. }
  727. })
  728. );
  729. await tick();
  730. const [res, controller] = await generateChatCompletion(localStorage.token, {
  731. stream: true,
  732. model: model.id,
  733. messages: messagesBody,
  734. options: {
  735. ...(params ?? $settings.params ?? {}),
  736. stop:
  737. (params?.stop ?? $settings?.params?.stop ?? undefined)
  738. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  739. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  740. )
  741. : undefined,
  742. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  743. repeat_penalty:
  744. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  745. },
  746. format: $settings.requestFormat ?? undefined,
  747. keep_alive: $settings.keepAlive ?? undefined,
  748. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  749. files: files.length > 0 ? files : undefined,
  750. session_id: $socket?.id,
  751. chat_id: $chatId,
  752. id: responseMessageId
  753. });
  754. if (res && res.ok) {
  755. console.log('controller', controller);
  756. const reader = res.body
  757. .pipeThrough(new TextDecoderStream())
  758. .pipeThrough(splitStream('\n'))
  759. .getReader();
  760. while (true) {
  761. const { value, done } = await reader.read();
  762. if (done || stopResponseFlag || _chatId !== $chatId) {
  763. responseMessage.done = true;
  764. messages = messages;
  765. if (stopResponseFlag) {
  766. controller.abort('User: Stop Response');
  767. } else {
  768. const messages = createMessagesList(responseMessageId);
  769. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  770. }
  771. _response = responseMessage.content;
  772. break;
  773. }
  774. try {
  775. let lines = value.split('\n');
  776. for (const line of lines) {
  777. if (line !== '') {
  778. console.log(line);
  779. let data = JSON.parse(line);
  780. if ('citations' in data) {
  781. responseMessage.citations = data.citations;
  782. // Only remove status if it was initially set
  783. if (model?.info?.meta?.knowledge ?? false) {
  784. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  785. (status) => status.action !== 'knowledge_search'
  786. );
  787. }
  788. continue;
  789. }
  790. if ('detail' in data) {
  791. throw data;
  792. }
  793. if (data.done == false) {
  794. if (responseMessage.content == '' && data.message.content == '\n') {
  795. continue;
  796. } else {
  797. responseMessage.content += data.message.content;
  798. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  799. navigator.vibrate(5);
  800. }
  801. const messageContentParts = getMessageContentParts(
  802. responseMessage.content,
  803. $config?.audio?.tts?.split_on ?? 'punctuation'
  804. );
  805. messageContentParts.pop();
  806. // dispatch only last sentence and make sure it hasn't been dispatched before
  807. if (
  808. messageContentParts.length > 0 &&
  809. messageContentParts[messageContentParts.length - 1] !==
  810. responseMessage.lastSentence
  811. ) {
  812. responseMessage.lastSentence =
  813. messageContentParts[messageContentParts.length - 1];
  814. eventTarget.dispatchEvent(
  815. new CustomEvent('chat', {
  816. detail: {
  817. id: responseMessageId,
  818. content: messageContentParts[messageContentParts.length - 1]
  819. }
  820. })
  821. );
  822. }
  823. messages = messages;
  824. }
  825. } else {
  826. responseMessage.done = true;
  827. if (responseMessage.content == '') {
  828. responseMessage.error = {
  829. code: 400,
  830. content: `Oops! No text generated from Ollama, Please try again.`
  831. };
  832. }
  833. responseMessage.context = data.context ?? null;
  834. responseMessage.info = {
  835. total_duration: data.total_duration,
  836. load_duration: data.load_duration,
  837. sample_count: data.sample_count,
  838. sample_duration: data.sample_duration,
  839. prompt_eval_count: data.prompt_eval_count,
  840. prompt_eval_duration: data.prompt_eval_duration,
  841. eval_count: data.eval_count,
  842. eval_duration: data.eval_duration
  843. };
  844. messages = messages;
  845. if ($settings.notificationEnabled && !document.hasFocus()) {
  846. const notification = new Notification(`${model.id}`, {
  847. body: responseMessage.content,
  848. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  849. });
  850. }
  851. if ($settings?.responseAutoCopy ?? false) {
  852. copyToClipboard(responseMessage.content);
  853. }
  854. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  855. await tick();
  856. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  857. }
  858. }
  859. }
  860. }
  861. } catch (error) {
  862. console.log(error);
  863. if ('detail' in error) {
  864. toast.error(error.detail);
  865. }
  866. break;
  867. }
  868. if (autoScroll) {
  869. scrollToBottom();
  870. }
  871. }
  872. if ($chatId == _chatId) {
  873. if ($settings.saveChatHistory ?? true) {
  874. chat = await updateChatById(localStorage.token, _chatId, {
  875. messages: messages,
  876. history: history,
  877. models: selectedModels,
  878. params: params,
  879. files: chatFiles
  880. });
  881. currentChatPage.set(1);
  882. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  883. }
  884. }
  885. } else {
  886. if (res !== null) {
  887. const error = await res.json();
  888. console.log(error);
  889. if ('detail' in error) {
  890. toast.error(error.detail);
  891. responseMessage.error = { content: error.detail };
  892. } else {
  893. toast.error(error.error);
  894. responseMessage.error = { content: error.error };
  895. }
  896. } else {
  897. toast.error(
  898. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  899. );
  900. responseMessage.error = {
  901. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  902. provider: 'Ollama'
  903. })
  904. };
  905. }
  906. responseMessage.done = true;
  907. if (responseMessage.statusHistory) {
  908. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  909. (status) => status.action !== 'knowledge_search'
  910. );
  911. }
  912. messages = messages;
  913. }
  914. stopResponseFlag = false;
  915. await tick();
  916. let lastMessageContentPart =
  917. getMessageContentParts(
  918. responseMessage.content,
  919. $config?.audio?.tts?.split_on ?? 'punctuation'
  920. )?.at(-1) ?? '';
  921. if (lastMessageContentPart) {
  922. eventTarget.dispatchEvent(
  923. new CustomEvent('chat', {
  924. detail: { id: responseMessageId, content: lastMessageContentPart }
  925. })
  926. );
  927. }
  928. eventTarget.dispatchEvent(
  929. new CustomEvent('chat:finish', {
  930. detail: {
  931. id: responseMessageId,
  932. content: responseMessage.content
  933. }
  934. })
  935. );
  936. if (autoScroll) {
  937. scrollToBottom();
  938. }
  939. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  940. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  941. const _title = await generateChatTitle(userPrompt);
  942. await setChatTitle(_chatId, _title);
  943. }
  944. return _response;
  945. };
  946. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  947. let _response = null;
  948. const responseMessage = history.messages[responseMessageId];
  949. const userMessage = history.messages[responseMessage.parentId];
  950. let files = JSON.parse(JSON.stringify(chatFiles));
  951. if (model?.info?.meta?.knowledge ?? false) {
  952. // Only initialize and add status if knowledge exists
  953. responseMessage.statusHistory = [
  954. {
  955. action: 'knowledge_search',
  956. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  957. searchQuery: userMessage.content
  958. }),
  959. done: false
  960. }
  961. ];
  962. files.push(...model.info.meta.knowledge);
  963. messages = messages; // Trigger Svelte update
  964. }
  965. files.push(
  966. ...(userMessage?.files ?? []).filter((item) =>
  967. ['doc', 'file', 'collection'].includes(item.type)
  968. ),
  969. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  970. );
  971. scrollToBottom();
  972. eventTarget.dispatchEvent(
  973. new CustomEvent('chat:start', {
  974. detail: {
  975. id: responseMessageId
  976. }
  977. })
  978. );
  979. await tick();
  980. try {
  981. const [res, controller] = await generateOpenAIChatCompletion(
  982. localStorage.token,
  983. {
  984. stream: true,
  985. model: model.id,
  986. stream_options:
  987. (model.info?.meta?.capabilities?.usage ?? false)
  988. ? {
  989. include_usage: true
  990. }
  991. : undefined,
  992. messages: [
  993. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  994. ? {
  995. role: 'system',
  996. content: `${promptTemplate(
  997. params?.system ?? $settings?.system ?? '',
  998. $user.name,
  999. $settings?.userLocation
  1000. ? await getAndUpdateUserLocation(localStorage.token)
  1001. : undefined
  1002. )}${
  1003. (responseMessage?.userContext ?? null)
  1004. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1005. : ''
  1006. }`
  1007. }
  1008. : undefined,
  1009. ...messages
  1010. ]
  1011. .filter((message) => message?.content?.trim())
  1012. .map((message, idx, arr) => ({
  1013. role: message.role,
  1014. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1015. message.role === 'user'
  1016. ? {
  1017. content: [
  1018. {
  1019. type: 'text',
  1020. text:
  1021. arr.length - 1 !== idx
  1022. ? message.content
  1023. : (message?.raContent ?? message.content)
  1024. },
  1025. ...message.files
  1026. .filter((file) => file.type === 'image')
  1027. .map((file) => ({
  1028. type: 'image_url',
  1029. image_url: {
  1030. url: file.url
  1031. }
  1032. }))
  1033. ]
  1034. }
  1035. : {
  1036. content:
  1037. arr.length - 1 !== idx
  1038. ? message.content
  1039. : (message?.raContent ?? message.content)
  1040. })
  1041. })),
  1042. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1043. stop:
  1044. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1045. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1046. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1047. )
  1048. : undefined,
  1049. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1050. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1051. frequency_penalty:
  1052. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1053. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1054. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1055. files: files.length > 0 ? files : undefined,
  1056. session_id: $socket?.id,
  1057. chat_id: $chatId,
  1058. id: responseMessageId
  1059. },
  1060. `${WEBUI_BASE_URL}/api`
  1061. );
  1062. // Wait until history/message have been updated
  1063. await tick();
  1064. scrollToBottom();
  1065. if (res && res.ok && res.body) {
  1066. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1067. for await (const update of textStream) {
  1068. const { value, done, citations, error, usage } = update;
  1069. if (error) {
  1070. await handleOpenAIError(error, null, model, responseMessage);
  1071. break;
  1072. }
  1073. if (done || stopResponseFlag || _chatId !== $chatId) {
  1074. responseMessage.done = true;
  1075. messages = messages;
  1076. if (stopResponseFlag) {
  1077. controller.abort('User: Stop Response');
  1078. } else {
  1079. const messages = createMessagesList(responseMessageId);
  1080. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  1081. }
  1082. _response = responseMessage.content;
  1083. break;
  1084. }
  1085. if (usage) {
  1086. responseMessage.info = { ...usage, openai: true };
  1087. }
  1088. if (citations) {
  1089. responseMessage.citations = citations;
  1090. // Only remove status if it was initially set
  1091. if (model?.info?.meta?.knowledge ?? false) {
  1092. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1093. (status) => status.action !== 'knowledge_search'
  1094. );
  1095. }
  1096. continue;
  1097. }
  1098. if (responseMessage.content == '' && value == '\n') {
  1099. continue;
  1100. } else {
  1101. responseMessage.content += value;
  1102. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1103. navigator.vibrate(5);
  1104. }
  1105. const messageContentParts = getMessageContentParts(
  1106. responseMessage.content,
  1107. $config?.audio?.tts?.split_on ?? 'punctuation'
  1108. );
  1109. messageContentParts.pop();
  1110. // dispatch only last sentence and make sure it hasn't been dispatched before
  1111. if (
  1112. messageContentParts.length > 0 &&
  1113. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1114. ) {
  1115. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1116. eventTarget.dispatchEvent(
  1117. new CustomEvent('chat', {
  1118. detail: {
  1119. id: responseMessageId,
  1120. content: messageContentParts[messageContentParts.length - 1]
  1121. }
  1122. })
  1123. );
  1124. }
  1125. messages = messages;
  1126. }
  1127. if (autoScroll) {
  1128. scrollToBottom();
  1129. }
  1130. }
  1131. if ($settings.notificationEnabled && !document.hasFocus()) {
  1132. const notification = new Notification(`${model.id}`, {
  1133. body: responseMessage.content,
  1134. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1135. });
  1136. }
  1137. if ($settings.responseAutoCopy) {
  1138. copyToClipboard(responseMessage.content);
  1139. }
  1140. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1141. await tick();
  1142. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1143. }
  1144. if ($chatId == _chatId) {
  1145. if ($settings.saveChatHistory ?? true) {
  1146. chat = await updateChatById(localStorage.token, _chatId, {
  1147. models: selectedModels,
  1148. messages: messages,
  1149. history: history,
  1150. params: params,
  1151. files: chatFiles
  1152. });
  1153. currentChatPage.set(1);
  1154. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1155. }
  1156. }
  1157. } else {
  1158. await handleOpenAIError(null, res, model, responseMessage);
  1159. }
  1160. } catch (error) {
  1161. await handleOpenAIError(error, null, model, responseMessage);
  1162. }
  1163. messages = messages;
  1164. stopResponseFlag = false;
  1165. await tick();
  1166. let lastMessageContentPart =
  1167. getMessageContentParts(
  1168. responseMessage.content,
  1169. $config?.audio?.tts?.split_on ?? 'punctuation'
  1170. )?.at(-1) ?? '';
  1171. if (lastMessageContentPart) {
  1172. eventTarget.dispatchEvent(
  1173. new CustomEvent('chat', {
  1174. detail: { id: responseMessageId, content: lastMessageContentPart }
  1175. })
  1176. );
  1177. }
  1178. eventTarget.dispatchEvent(
  1179. new CustomEvent('chat:finish', {
  1180. detail: {
  1181. id: responseMessageId,
  1182. content: responseMessage.content
  1183. }
  1184. })
  1185. );
  1186. if (autoScroll) {
  1187. scrollToBottom();
  1188. }
  1189. if (messages.length == 2 && selectedModels[0] === model.id) {
  1190. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1191. const _title = await generateChatTitle(userPrompt);
  1192. await setChatTitle(_chatId, _title);
  1193. }
  1194. return _response;
  1195. };
  1196. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1197. let errorMessage = '';
  1198. let innerError;
  1199. if (error) {
  1200. innerError = error;
  1201. } else if (res !== null) {
  1202. innerError = await res.json();
  1203. }
  1204. console.error(innerError);
  1205. if ('detail' in innerError) {
  1206. toast.error(innerError.detail);
  1207. errorMessage = innerError.detail;
  1208. } else if ('error' in innerError) {
  1209. if ('message' in innerError.error) {
  1210. toast.error(innerError.error.message);
  1211. errorMessage = innerError.error.message;
  1212. } else {
  1213. toast.error(innerError.error);
  1214. errorMessage = innerError.error;
  1215. }
  1216. } else if ('message' in innerError) {
  1217. toast.error(innerError.message);
  1218. errorMessage = innerError.message;
  1219. }
  1220. responseMessage.error = {
  1221. content:
  1222. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1223. provider: model.name ?? model.id
  1224. }) +
  1225. '\n' +
  1226. errorMessage
  1227. };
  1228. responseMessage.done = true;
  1229. if (responseMessage.statusHistory) {
  1230. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1231. (status) => status.action !== 'knowledge_search'
  1232. );
  1233. }
  1234. messages = messages;
  1235. };
  1236. const stopResponse = () => {
  1237. stopResponseFlag = true;
  1238. console.log('stopResponse');
  1239. };
  1240. const regenerateResponse = async (message) => {
  1241. console.log('regenerateResponse');
  1242. if (messages.length != 0) {
  1243. let userMessage = history.messages[message.parentId];
  1244. let userPrompt = userMessage.content;
  1245. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1246. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1247. await sendPrompt(userPrompt, userMessage.id);
  1248. } else {
  1249. // If there are multiple models selected, use the model of the response message for regeneration
  1250. // e.g. many model chat
  1251. await sendPrompt(userPrompt, userMessage.id, {
  1252. modelId: message.model,
  1253. modelIdx: message.modelIdx
  1254. });
  1255. }
  1256. }
  1257. };
  1258. const continueGeneration = async () => {
  1259. console.log('continueGeneration');
  1260. const _chatId = JSON.parse(JSON.stringify($chatId));
  1261. if (messages.length != 0 && messages.at(-1).done == true) {
  1262. const responseMessage = history.messages[history.currentId];
  1263. responseMessage.done = false;
  1264. await tick();
  1265. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1266. if (model) {
  1267. if (model?.owned_by === 'openai') {
  1268. await sendPromptOpenAI(
  1269. model,
  1270. history.messages[responseMessage.parentId].content,
  1271. responseMessage.id,
  1272. _chatId
  1273. );
  1274. } else
  1275. await sendPromptOllama(
  1276. model,
  1277. history.messages[responseMessage.parentId].content,
  1278. responseMessage.id,
  1279. _chatId
  1280. );
  1281. }
  1282. } else {
  1283. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1284. }
  1285. };
  1286. const generateChatTitle = async (userPrompt) => {
  1287. if ($settings?.title?.auto ?? true) {
  1288. const title = await generateTitle(
  1289. localStorage.token,
  1290. selectedModels[0],
  1291. userPrompt,
  1292. $chatId
  1293. ).catch((error) => {
  1294. console.error(error);
  1295. return 'New Chat';
  1296. });
  1297. return title;
  1298. } else {
  1299. return `${userPrompt}`;
  1300. }
  1301. };
  1302. const setChatTitle = async (_chatId, _title) => {
  1303. if (_chatId === $chatId) {
  1304. title = _title;
  1305. }
  1306. if (!$temporaryChatEnabled) {
  1307. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1308. currentChatPage.set(1);
  1309. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1310. }
  1311. };
  1312. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1313. const responseMessage = history.messages[responseId];
  1314. const userMessage = history.messages[parentId];
  1315. responseMessage.statusHistory = [
  1316. {
  1317. done: false,
  1318. action: 'web_search',
  1319. description: $i18n.t('Generating search query')
  1320. }
  1321. ];
  1322. messages = messages;
  1323. const prompt = userMessage.content;
  1324. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1325. (error) => {
  1326. console.log(error);
  1327. return prompt;
  1328. }
  1329. );
  1330. if (!searchQuery) {
  1331. toast.warning($i18n.t('No search query generated'));
  1332. responseMessage.statusHistory.push({
  1333. done: true,
  1334. error: true,
  1335. action: 'web_search',
  1336. description: 'No search query generated'
  1337. });
  1338. messages = messages;
  1339. }
  1340. responseMessage.statusHistory.push({
  1341. done: false,
  1342. action: 'web_search',
  1343. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1344. });
  1345. messages = messages;
  1346. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1347. console.log(error);
  1348. toast.error(error);
  1349. return null;
  1350. });
  1351. if (results) {
  1352. responseMessage.statusHistory.push({
  1353. done: true,
  1354. action: 'web_search',
  1355. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1356. query: searchQuery,
  1357. urls: results.filenames
  1358. });
  1359. if (responseMessage?.files ?? undefined === undefined) {
  1360. responseMessage.files = [];
  1361. }
  1362. responseMessage.files.push({
  1363. collection_name: results.collection_name,
  1364. name: searchQuery,
  1365. type: 'web_search_results',
  1366. urls: results.filenames
  1367. });
  1368. messages = messages;
  1369. } else {
  1370. responseMessage.statusHistory.push({
  1371. done: true,
  1372. error: true,
  1373. action: 'web_search',
  1374. description: 'No search results found'
  1375. });
  1376. messages = messages;
  1377. }
  1378. };
  1379. const getTags = async () => {
  1380. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1381. return [];
  1382. });
  1383. };
  1384. const saveChatHandler = async (_chatId) => {
  1385. if ($chatId == _chatId) {
  1386. if (!$temporaryChatEnabled) {
  1387. chat = await updateChatById(localStorage.token, _chatId, {
  1388. messages: messages,
  1389. history: history,
  1390. models: selectedModels,
  1391. params: params,
  1392. files: chatFiles
  1393. });
  1394. currentChatPage.set(1);
  1395. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1396. }
  1397. }
  1398. };
  1399. const mergeResponses = async (messageId, responses, _chatId) => {
  1400. console.log('mergeResponses', messageId, responses);
  1401. const message = history.messages[messageId];
  1402. const mergedResponse = {
  1403. status: true,
  1404. content: ''
  1405. };
  1406. message.merged = mergedResponse;
  1407. messages = messages;
  1408. try {
  1409. const [res, controller] = await generateMoACompletion(
  1410. localStorage.token,
  1411. message.model,
  1412. history.messages[message.parentId].content,
  1413. responses
  1414. );
  1415. if (res && res.ok && res.body) {
  1416. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1417. for await (const update of textStream) {
  1418. const { value, done, citations, error, usage } = update;
  1419. if (error || done) {
  1420. break;
  1421. }
  1422. if (mergedResponse.content == '' && value == '\n') {
  1423. continue;
  1424. } else {
  1425. mergedResponse.content += value;
  1426. messages = messages;
  1427. }
  1428. if (autoScroll) {
  1429. scrollToBottom();
  1430. }
  1431. }
  1432. await saveChatHandler(_chatId);
  1433. } else {
  1434. console.error(res);
  1435. }
  1436. } catch (e) {
  1437. console.error(e);
  1438. }
  1439. };
  1440. </script>
  1441. <svelte:head>
  1442. <title>
  1443. {title
  1444. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1445. : `${$WEBUI_NAME}`}
  1446. </title>
  1447. </svelte:head>
  1448. <audio id="audioElement" src="" style="display: none;" />
  1449. <EventConfirmDialog
  1450. bind:show={showEventConfirmation}
  1451. title={eventConfirmationTitle}
  1452. message={eventConfirmationMessage}
  1453. input={eventConfirmationInput}
  1454. inputPlaceholder={eventConfirmationInputPlaceholder}
  1455. inputValue={eventConfirmationInputValue}
  1456. on:confirm={(e) => {
  1457. if (e.detail) {
  1458. eventCallback(e.detail);
  1459. } else {
  1460. eventCallback(true);
  1461. }
  1462. }}
  1463. on:cancel={() => {
  1464. eventCallback(false);
  1465. }}
  1466. />
  1467. {#if !chatIdProp || (loaded && chatIdProp)}
  1468. <div
  1469. class="h-screen max-h-[100dvh] {$showSidebar
  1470. ? 'md:max-w-[calc(100%-260px)]'
  1471. : ''} w-full max-w-full flex flex-col"
  1472. >
  1473. {#if $settings?.backgroundImageUrl ?? null}
  1474. <div
  1475. class="absolute {$showSidebar
  1476. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1477. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1478. style="background-image: url({$settings.backgroundImageUrl}) "
  1479. />
  1480. <div
  1481. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1482. />
  1483. {/if}
  1484. <Navbar
  1485. {title}
  1486. bind:selectedModels
  1487. bind:showModelSelector
  1488. bind:showControls
  1489. shareEnabled={messages.length > 0}
  1490. {chat}
  1491. {initNewChat}
  1492. />
  1493. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1494. <div
  1495. class="absolute top-[4.25rem] w-full {$showSidebar
  1496. ? 'md:max-w-[calc(100%-260px)]'
  1497. : ''} {showControls ? 'lg:pr-[24rem]' : ''} z-20"
  1498. >
  1499. <div class=" flex flex-col gap-1 w-full">
  1500. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1501. <Banner
  1502. {banner}
  1503. on:dismiss={(e) => {
  1504. const bannerId = e.detail;
  1505. localStorage.setItem(
  1506. 'dismissedBannerIds',
  1507. JSON.stringify(
  1508. [
  1509. bannerId,
  1510. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1511. ].filter((id) => $banners.find((b) => b.id === id))
  1512. )
  1513. );
  1514. }}
  1515. />
  1516. {/each}
  1517. </div>
  1518. </div>
  1519. {/if}
  1520. <div class="flex flex-col flex-auto z-10">
  1521. <div
  1522. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1523. ? 'lg:pr-[24rem]'
  1524. : ''}"
  1525. id="messages-container"
  1526. bind:this={messagesContainerElement}
  1527. on:scroll={(e) => {
  1528. autoScroll =
  1529. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1530. messagesContainerElement.clientHeight + 5;
  1531. }}
  1532. >
  1533. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1534. <Messages
  1535. chatId={$chatId}
  1536. {selectedModels}
  1537. {processing}
  1538. bind:history
  1539. bind:messages
  1540. bind:autoScroll
  1541. bind:prompt
  1542. bottomPadding={files.length > 0}
  1543. {sendPrompt}
  1544. {continueGeneration}
  1545. {regenerateResponse}
  1546. {mergeResponses}
  1547. {chatActionHandler}
  1548. />
  1549. </div>
  1550. </div>
  1551. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1552. <MessageInput
  1553. bind:files
  1554. bind:prompt
  1555. bind:autoScroll
  1556. bind:selectedToolIds
  1557. bind:webSearchEnabled
  1558. bind:atSelectedModel
  1559. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1560. const model = $models.find((m) => m.id === e);
  1561. if (model?.info?.meta?.toolIds ?? false) {
  1562. return [...new Set([...a, ...model.info.meta.toolIds])];
  1563. }
  1564. return a;
  1565. }, [])}
  1566. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1567. {selectedModels}
  1568. {messages}
  1569. {submitPrompt}
  1570. {stopResponse}
  1571. on:call={() => {
  1572. showControls = true;
  1573. }}
  1574. />
  1575. </div>
  1576. </div>
  1577. </div>
  1578. {/if}
  1579. <ChatControls
  1580. models={selectedModelIds.reduce((a, e, i, arr) => {
  1581. const model = $models.find((m) => m.id === e);
  1582. if (model) {
  1583. return [...a, model];
  1584. }
  1585. return a;
  1586. }, [])}
  1587. bind:show={showControls}
  1588. bind:chatFiles
  1589. bind:params
  1590. bind:files
  1591. {submitPrompt}
  1592. {stopResponse}
  1593. modelId={selectedModelIds?.at(0) ?? null}
  1594. chatId={$chatId}
  1595. {eventTarget}
  1596. />