Chat.svelte 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onDestroy, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Unsubscriber, Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. WEBUI_NAME,
  20. banners,
  21. user,
  22. socket,
  23. showCallOverlay,
  24. currentChatPage,
  25. temporaryChatEnabled
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. extractSentencesForAudio,
  31. promptTemplate,
  32. splitStream
  33. } from '$lib/utils';
  34. import { generateChatCompletion } from '$lib/apis/ollama';
  35. import {
  36. createNewChat,
  37. getChatById,
  38. getChatList,
  39. getTagsById,
  40. updateChatById
  41. } from '$lib/apis/chats';
  42. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  43. import { runWebSearch } from '$lib/apis/rag';
  44. import { createOpenAITextStream } from '$lib/apis/streaming';
  45. import { queryMemory } from '$lib/apis/memories';
  46. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  47. import {
  48. chatCompleted,
  49. generateTitle,
  50. generateSearchQuery,
  51. chatAction,
  52. generateMoACompletion
  53. } from '$lib/apis';
  54. import Banner from '../common/Banner.svelte';
  55. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  56. import Messages from '$lib/components/chat/Messages.svelte';
  57. import Navbar from '$lib/components/layout/Navbar.svelte';
  58. import ChatControls from './ChatControls.svelte';
  59. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  60. const i18n: Writable<i18nType> = getContext('i18n');
  61. export let chatIdProp = '';
  62. let loaded = false;
  63. const eventTarget = new EventTarget();
  64. let showControls = false;
  65. let stopResponseFlag = false;
  66. let autoScroll = true;
  67. let processing = '';
  68. let messagesContainerElement: HTMLDivElement;
  69. let showEventConfirmation = false;
  70. let eventConfirmationTitle = '';
  71. let eventConfirmationMessage = '';
  72. let eventConfirmationInput = false;
  73. let eventConfirmationInputPlaceholder = '';
  74. let eventConfirmationInputValue = '';
  75. let eventCallback = null;
  76. let showModelSelector = true;
  77. let selectedModels = [''];
  78. let atSelectedModel: Model | undefined;
  79. let selectedModelIds = [];
  80. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  81. let selectedToolIds = [];
  82. let webSearchEnabled = false;
  83. let chat = null;
  84. let tags = [];
  85. let title = '';
  86. let prompt = '';
  87. let chatFiles = [];
  88. let files = [];
  89. let messages = [];
  90. let history = {
  91. messages: {},
  92. currentId: null
  93. };
  94. let params = {};
  95. let chatIdUnsubscriber: Unsubscriber | undefined;
  96. $: if (history.currentId !== null) {
  97. let _messages = [];
  98. let currentMessage = history.messages[history.currentId];
  99. while (currentMessage !== null) {
  100. _messages.unshift({ ...currentMessage });
  101. currentMessage =
  102. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  103. }
  104. messages = _messages;
  105. } else {
  106. messages = [];
  107. }
  108. $: if (chatIdProp) {
  109. (async () => {
  110. console.log(chatIdProp);
  111. if (chatIdProp && (await loadChat())) {
  112. await tick();
  113. loaded = true;
  114. window.setTimeout(() => scrollToBottom(), 0);
  115. const chatInput = document.getElementById('chat-textarea');
  116. chatInput?.focus();
  117. } else {
  118. await goto('/');
  119. }
  120. })();
  121. }
  122. const chatEventHandler = async (event, cb) => {
  123. if (event.chat_id === $chatId) {
  124. await tick();
  125. console.log(event);
  126. let message = history.messages[event.message_id];
  127. const type = event?.data?.type ?? null;
  128. const data = event?.data?.data ?? null;
  129. if (type === 'status') {
  130. if (message?.statusHistory) {
  131. message.statusHistory.push(data);
  132. } else {
  133. message.statusHistory = [data];
  134. }
  135. } else if (type === 'citation') {
  136. if (message?.citations) {
  137. message.citations.push(data);
  138. } else {
  139. message.citations = [data];
  140. }
  141. } else if (type === 'message') {
  142. message.content += data.content;
  143. } else if (type === 'replace') {
  144. message.content = data.content;
  145. } else if (type === 'action') {
  146. if (data.action === 'continue') {
  147. const continueButton = document.getElementById('continue-response-button');
  148. if (continueButton) {
  149. continueButton.click();
  150. }
  151. }
  152. } else if (type === 'confirmation') {
  153. eventCallback = cb;
  154. eventConfirmationInput = false;
  155. showEventConfirmation = true;
  156. eventConfirmationTitle = data.title;
  157. eventConfirmationMessage = data.message;
  158. } else if (type === 'input') {
  159. eventCallback = cb;
  160. eventConfirmationInput = true;
  161. showEventConfirmation = true;
  162. eventConfirmationTitle = data.title;
  163. eventConfirmationMessage = data.message;
  164. eventConfirmationInputPlaceholder = data.placeholder;
  165. eventConfirmationInputValue = data?.value ?? '';
  166. } else {
  167. console.log('Unknown message type', data);
  168. }
  169. messages = messages;
  170. }
  171. };
  172. const onMessageHandler = async (event: {
  173. origin: string;
  174. data: { type: string; text: string };
  175. }) => {
  176. if (event.origin !== window.origin) {
  177. return;
  178. }
  179. // Replace with your iframe's origin
  180. if (event.data.type === 'input:prompt') {
  181. console.debug(event.data.text);
  182. const inputElement = document.getElementById('chat-textarea');
  183. if (inputElement) {
  184. prompt = event.data.text;
  185. inputElement.focus();
  186. }
  187. }
  188. if (event.data.type === 'action:submit') {
  189. console.debug(event.data.text);
  190. if (prompt !== '') {
  191. await tick();
  192. submitPrompt(prompt);
  193. }
  194. }
  195. if (event.data.type === 'input:prompt:submit') {
  196. console.debug(event.data.text);
  197. if (prompt !== '') {
  198. await tick();
  199. submitPrompt(event.data.text);
  200. }
  201. }
  202. };
  203. onMount(async () => {
  204. window.addEventListener('message', onMessageHandler);
  205. $socket?.on('chat-events', chatEventHandler);
  206. if (!$chatId) {
  207. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  208. if (!value) {
  209. await initNewChat();
  210. }
  211. });
  212. } else {
  213. if ($temporaryChatEnabled) {
  214. await goto('/');
  215. }
  216. }
  217. });
  218. onDestroy(() => {
  219. chatIdUnsubscriber?.();
  220. window.removeEventListener('message', onMessageHandler);
  221. $socket?.off('chat-events');
  222. });
  223. //////////////////////////
  224. // Web functions
  225. //////////////////////////
  226. const initNewChat = async () => {
  227. if ($page.url.pathname.includes('/c/')) {
  228. window.history.replaceState(history.state, '', `/`);
  229. }
  230. await chatId.set('');
  231. autoScroll = true;
  232. title = '';
  233. messages = [];
  234. history = {
  235. messages: {},
  236. currentId: null
  237. };
  238. chatFiles = [];
  239. params = {};
  240. if ($page.url.searchParams.get('models')) {
  241. selectedModels = $page.url.searchParams.get('models')?.split(',');
  242. } else if ($settings?.models) {
  243. selectedModels = $settings?.models;
  244. } else if ($config?.default_models) {
  245. console.log($config?.default_models.split(',') ?? '');
  246. selectedModels = $config?.default_models.split(',');
  247. } else {
  248. selectedModels = [''];
  249. }
  250. if ($page.url.searchParams.get('q')) {
  251. prompt = $page.url.searchParams.get('q') ?? '';
  252. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  253. .split(',')
  254. .map((id) => id.trim())
  255. .filter((id) => id);
  256. if (prompt) {
  257. await tick();
  258. submitPrompt(prompt);
  259. }
  260. }
  261. if ($page.url.searchParams.get('call') === 'true') {
  262. showCallOverlay.set(true);
  263. }
  264. selectedModels = selectedModels.map((modelId) =>
  265. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  266. );
  267. const userSettings = await getUserSettings(localStorage.token);
  268. if (userSettings) {
  269. settings.set(userSettings.ui);
  270. } else {
  271. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  272. }
  273. const chatInput = document.getElementById('chat-textarea');
  274. setTimeout(() => chatInput?.focus(), 0);
  275. };
  276. const loadChat = async () => {
  277. chatId.set(chatIdProp);
  278. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  279. await goto('/');
  280. return null;
  281. });
  282. if (chat) {
  283. tags = await getTags();
  284. const chatContent = chat.chat;
  285. if (chatContent) {
  286. console.log(chatContent);
  287. selectedModels =
  288. (chatContent?.models ?? undefined) !== undefined
  289. ? chatContent.models
  290. : [chatContent.models ?? ''];
  291. history =
  292. (chatContent?.history ?? undefined) !== undefined
  293. ? chatContent.history
  294. : convertMessagesToHistory(chatContent.messages);
  295. title = chatContent.title;
  296. const userSettings = await getUserSettings(localStorage.token);
  297. if (userSettings) {
  298. await settings.set(userSettings.ui);
  299. } else {
  300. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  301. }
  302. params = chatContent?.params ?? {};
  303. chatFiles = chatContent?.files ?? [];
  304. autoScroll = true;
  305. await tick();
  306. if (messages.length > 0) {
  307. history.messages[messages.at(-1).id].done = true;
  308. }
  309. await tick();
  310. return true;
  311. } else {
  312. return null;
  313. }
  314. }
  315. };
  316. const scrollToBottom = async () => {
  317. await tick();
  318. if (messagesContainerElement) {
  319. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  320. }
  321. };
  322. const createMessagesList = (responseMessageId) => {
  323. const message = history.messages[responseMessageId];
  324. if (message.parentId) {
  325. return [...createMessagesList(message.parentId), message];
  326. } else {
  327. return [message];
  328. }
  329. };
  330. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  331. await mermaid.run({
  332. querySelector: '.mermaid'
  333. });
  334. const res = await chatCompleted(localStorage.token, {
  335. model: modelId,
  336. messages: messages.map((m) => ({
  337. id: m.id,
  338. role: m.role,
  339. content: m.content,
  340. info: m.info ? m.info : undefined,
  341. timestamp: m.timestamp
  342. })),
  343. chat_id: chatId,
  344. session_id: $socket?.id,
  345. id: responseMessageId
  346. }).catch((error) => {
  347. toast.error(error);
  348. messages.at(-1).error = { content: error };
  349. return null;
  350. });
  351. if (res !== null) {
  352. // Update chat history with the new messages
  353. for (const message of res.messages) {
  354. history.messages[message.id] = {
  355. ...history.messages[message.id],
  356. ...(history.messages[message.id].content !== message.content
  357. ? { originalContent: history.messages[message.id].content }
  358. : {}),
  359. ...message
  360. };
  361. }
  362. }
  363. if ($chatId == chatId) {
  364. if (!$temporaryChatEnabled) {
  365. chat = await updateChatById(localStorage.token, chatId, {
  366. models: selectedModels,
  367. messages: messages,
  368. history: history,
  369. params: params,
  370. files: chatFiles
  371. });
  372. currentChatPage.set(1);
  373. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  374. }
  375. }
  376. };
  377. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  378. const res = await chatAction(localStorage.token, actionId, {
  379. model: modelId,
  380. messages: messages.map((m) => ({
  381. id: m.id,
  382. role: m.role,
  383. content: m.content,
  384. info: m.info ? m.info : undefined,
  385. timestamp: m.timestamp
  386. })),
  387. ...(event ? { event: event } : {}),
  388. chat_id: chatId,
  389. session_id: $socket?.id,
  390. id: responseMessageId
  391. }).catch((error) => {
  392. toast.error(error);
  393. messages.at(-1).error = { content: error };
  394. return null;
  395. });
  396. if (res !== null) {
  397. // Update chat history with the new messages
  398. for (const message of res.messages) {
  399. history.messages[message.id] = {
  400. ...history.messages[message.id],
  401. ...(history.messages[message.id].content !== message.content
  402. ? { originalContent: history.messages[message.id].content }
  403. : {}),
  404. ...message
  405. };
  406. }
  407. }
  408. if ($chatId == chatId) {
  409. if (!$temporaryChatEnabled) {
  410. chat = await updateChatById(localStorage.token, chatId, {
  411. models: selectedModels,
  412. messages: messages,
  413. history: history,
  414. params: params,
  415. files: chatFiles
  416. });
  417. currentChatPage.set(1);
  418. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  419. }
  420. }
  421. };
  422. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  423. return setInterval(() => {
  424. $socket?.emit('usage', {
  425. action: 'chat',
  426. model: modelId,
  427. chat_id: chatId
  428. });
  429. }, 1000);
  430. };
  431. //////////////////////////
  432. // Chat functions
  433. //////////////////////////
  434. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  435. let _responses = [];
  436. console.log('submitPrompt', $chatId);
  437. selectedModels = selectedModels.map((modelId) =>
  438. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  439. );
  440. if (selectedModels.includes('')) {
  441. toast.error($i18n.t('Model not selected'));
  442. } else if (messages.length != 0 && messages.at(-1).done != true) {
  443. // Response not done
  444. console.log('wait');
  445. } else if (messages.length != 0 && messages.at(-1).error) {
  446. // Error in response
  447. toast.error(
  448. $i18n.t(
  449. `Oops! There was an error in the previous response. Please try again or contact admin.`
  450. )
  451. );
  452. } else if (
  453. files.length > 0 &&
  454. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  455. ) {
  456. // Upload not done
  457. toast.error(
  458. $i18n.t(
  459. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  460. )
  461. );
  462. } else {
  463. // Reset chat input textarea
  464. const chatTextAreaElement = document.getElementById('chat-textarea');
  465. if (chatTextAreaElement) {
  466. chatTextAreaElement.value = '';
  467. chatTextAreaElement.style.height = '';
  468. }
  469. const _files = JSON.parse(JSON.stringify(files));
  470. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  471. chatFiles = chatFiles.filter(
  472. // Remove duplicates
  473. (item, index, array) =>
  474. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  475. );
  476. files = [];
  477. prompt = '';
  478. // Create user message
  479. let userMessageId = uuidv4();
  480. let userMessage = {
  481. id: userMessageId,
  482. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  483. childrenIds: [],
  484. role: 'user',
  485. content: userPrompt,
  486. files: _files.length > 0 ? _files : undefined,
  487. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  488. models: selectedModels
  489. };
  490. // Add message to history and Set currentId to messageId
  491. history.messages[userMessageId] = userMessage;
  492. history.currentId = userMessageId;
  493. // Append messageId to childrenIds of parent message
  494. if (messages.length !== 0) {
  495. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  496. }
  497. // Wait until history/message have been updated
  498. await tick();
  499. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  500. }
  501. return _responses;
  502. };
  503. const sendPrompt = async (
  504. prompt: string,
  505. parentId: string,
  506. { modelId = null, modelIdx = null, newChat = false } = {}
  507. ) => {
  508. let _responses: string[] = [];
  509. // If modelId is provided, use it, else use selected model
  510. let selectedModelIds = modelId
  511. ? [modelId]
  512. : atSelectedModel !== undefined
  513. ? [atSelectedModel.id]
  514. : selectedModels;
  515. // Create response messages for each selected model
  516. const responseMessageIds: Record<PropertyKey, string> = {};
  517. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  518. const model = $models.filter((m) => m.id === modelId).at(0);
  519. if (model) {
  520. let responseMessageId = uuidv4();
  521. let responseMessage = {
  522. parentId: parentId,
  523. id: responseMessageId,
  524. childrenIds: [],
  525. role: 'assistant',
  526. content: '',
  527. model: model.id,
  528. modelName: model.name ?? model.id,
  529. modelIdx: modelIdx ? modelIdx : _modelIdx,
  530. userContext: null,
  531. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  532. };
  533. // Add message to history and Set currentId to messageId
  534. history.messages[responseMessageId] = responseMessage;
  535. history.currentId = responseMessageId;
  536. // Append messageId to childrenIds of parent message
  537. if (parentId !== null) {
  538. history.messages[parentId].childrenIds = [
  539. ...history.messages[parentId].childrenIds,
  540. responseMessageId
  541. ];
  542. }
  543. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  544. }
  545. }
  546. await tick();
  547. // Create new chat if only one message in messages
  548. if (newChat && messages.length == 2) {
  549. if (!$temporaryChatEnabled) {
  550. chat = await createNewChat(localStorage.token, {
  551. id: $chatId,
  552. title: $i18n.t('New Chat'),
  553. models: selectedModels,
  554. system: $settings.system ?? undefined,
  555. params: params,
  556. messages: messages,
  557. history: history,
  558. tags: [],
  559. timestamp: Date.now()
  560. });
  561. currentChatPage.set(1);
  562. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  563. await chatId.set(chat.id);
  564. } else {
  565. await chatId.set('local');
  566. }
  567. await tick();
  568. }
  569. const _chatId = JSON.parse(JSON.stringify($chatId));
  570. await Promise.all(
  571. selectedModelIds.map(async (modelId, _modelIdx) => {
  572. console.log('modelId', modelId);
  573. const model = $models.filter((m) => m.id === modelId).at(0);
  574. if (model) {
  575. // If there are image files, check if model is vision capable
  576. const hasImages = messages.some((message) =>
  577. message.files?.some((file) => file.type === 'image')
  578. );
  579. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  580. toast.error(
  581. $i18n.t('Model {{modelName}} is not vision capable', {
  582. modelName: model.name ?? model.id
  583. })
  584. );
  585. }
  586. let responseMessageId =
  587. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  588. let responseMessage = history.messages[responseMessageId];
  589. let userContext = null;
  590. if ($settings?.memory ?? false) {
  591. if (userContext === null) {
  592. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  593. toast.error(error);
  594. return null;
  595. });
  596. if (res) {
  597. if (res.documents[0].length > 0) {
  598. userContext = res.documents[0].reduce((acc, doc, index) => {
  599. const createdAtTimestamp = res.metadatas[0][index].created_at;
  600. const createdAtDate = new Date(createdAtTimestamp * 1000)
  601. .toISOString()
  602. .split('T')[0];
  603. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  604. }, '');
  605. }
  606. console.log(userContext);
  607. }
  608. }
  609. }
  610. responseMessage.userContext = userContext;
  611. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  612. if (webSearchEnabled) {
  613. await getWebSearchResults(model.id, parentId, responseMessageId);
  614. }
  615. let _response = null;
  616. if (model?.owned_by === 'openai') {
  617. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  618. } else if (model) {
  619. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  620. }
  621. _responses.push(_response);
  622. if (chatEventEmitter) clearInterval(chatEventEmitter);
  623. } else {
  624. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  625. }
  626. })
  627. );
  628. currentChatPage.set(1);
  629. chats.set(await getChatList(localStorage.token, $currentChatPage));
  630. return _responses;
  631. };
  632. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  633. let _response: string | null = null;
  634. const responseMessage = history.messages[responseMessageId];
  635. const userMessage = history.messages[responseMessage.parentId];
  636. // Wait until history/message have been updated
  637. await tick();
  638. // Scroll down
  639. scrollToBottom();
  640. const messagesBody = [
  641. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  642. ? {
  643. role: 'system',
  644. content: `${promptTemplate(
  645. params?.system ?? $settings?.system ?? '',
  646. $user.name,
  647. $settings?.userLocation
  648. ? await getAndUpdateUserLocation(localStorage.token)
  649. : undefined
  650. )}${
  651. (responseMessage?.userContext ?? null)
  652. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  653. : ''
  654. }`
  655. }
  656. : undefined,
  657. ...messages
  658. ]
  659. .filter((message) => message?.content?.trim())
  660. .map((message) => {
  661. // Prepare the base message object
  662. const baseMessage = {
  663. role: message.role,
  664. content: message.content
  665. };
  666. // Extract and format image URLs if any exist
  667. const imageUrls = message.files
  668. ?.filter((file) => file.type === 'image')
  669. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  670. // Add images array only if it contains elements
  671. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  672. baseMessage.images = imageUrls;
  673. }
  674. return baseMessage;
  675. });
  676. let lastImageIndex = -1;
  677. // Find the index of the last object with images
  678. messagesBody.forEach((item, index) => {
  679. if (item.images) {
  680. lastImageIndex = index;
  681. }
  682. });
  683. // Remove images from all but the last one
  684. messagesBody.forEach((item, index) => {
  685. if (index !== lastImageIndex) {
  686. delete item.images;
  687. }
  688. });
  689. let files = JSON.parse(JSON.stringify(chatFiles));
  690. if (model?.info?.meta?.knowledge ?? false) {
  691. // Only initialize and add status if knowledge exists
  692. responseMessage.statusHistory = [
  693. {
  694. action: 'knowledge_search',
  695. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  696. searchQuery: userMessage.content
  697. }),
  698. done: false
  699. }
  700. ];
  701. files.push(...model.info.meta.knowledge);
  702. messages = messages; // Trigger Svelte update
  703. }
  704. files.push(
  705. ...(userMessage?.files ?? []).filter((item) =>
  706. ['doc', 'file', 'collection'].includes(item.type)
  707. ),
  708. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  709. );
  710. scrollToBottom();
  711. eventTarget.dispatchEvent(
  712. new CustomEvent('chat:start', {
  713. detail: {
  714. id: responseMessageId
  715. }
  716. })
  717. );
  718. await tick();
  719. const [res, controller] = await generateChatCompletion(localStorage.token, {
  720. stream: true,
  721. model: model.id,
  722. messages: messagesBody,
  723. options: {
  724. ...(params ?? $settings.params ?? {}),
  725. stop:
  726. (params?.stop ?? $settings?.params?.stop ?? undefined)
  727. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  728. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  729. )
  730. : undefined,
  731. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  732. repeat_penalty:
  733. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  734. },
  735. format: $settings.requestFormat ?? undefined,
  736. keep_alive: $settings.keepAlive ?? undefined,
  737. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  738. files: files.length > 0 ? files : undefined,
  739. session_id: $socket?.id,
  740. chat_id: $chatId,
  741. id: responseMessageId
  742. });
  743. if (res && res.ok) {
  744. console.log('controller', controller);
  745. const reader = res.body
  746. .pipeThrough(new TextDecoderStream())
  747. .pipeThrough(splitStream('\n'))
  748. .getReader();
  749. while (true) {
  750. const { value, done } = await reader.read();
  751. if (done || stopResponseFlag || _chatId !== $chatId) {
  752. responseMessage.done = true;
  753. messages = messages;
  754. if (stopResponseFlag) {
  755. controller.abort('User: Stop Response');
  756. } else {
  757. const messages = createMessagesList(responseMessageId);
  758. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  759. }
  760. _response = responseMessage.content;
  761. break;
  762. }
  763. try {
  764. let lines = value.split('\n');
  765. for (const line of lines) {
  766. if (line !== '') {
  767. console.log(line);
  768. let data = JSON.parse(line);
  769. if ('citations' in data) {
  770. responseMessage.citations = data.citations;
  771. // Only remove status if it was initially set
  772. if (model?.info?.meta?.knowledge ?? false) {
  773. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  774. (status) => status.action !== 'knowledge_search'
  775. );
  776. }
  777. continue;
  778. }
  779. if ('detail' in data) {
  780. throw data;
  781. }
  782. if (data.done == false) {
  783. if (responseMessage.content == '' && data.message.content == '\n') {
  784. continue;
  785. } else {
  786. responseMessage.content += data.message.content;
  787. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  788. navigator.vibrate(5);
  789. }
  790. const sentences = extractSentencesForAudio(responseMessage.content);
  791. sentences.pop();
  792. // dispatch only last sentence and make sure it hasn't been dispatched before
  793. if (
  794. sentences.length > 0 &&
  795. sentences[sentences.length - 1] !== responseMessage.lastSentence
  796. ) {
  797. responseMessage.lastSentence = sentences[sentences.length - 1];
  798. eventTarget.dispatchEvent(
  799. new CustomEvent('chat', {
  800. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  801. })
  802. );
  803. }
  804. messages = messages;
  805. }
  806. } else {
  807. responseMessage.done = true;
  808. if (responseMessage.content == '') {
  809. responseMessage.error = {
  810. code: 400,
  811. content: `Oops! No text generated from Ollama, Please try again.`
  812. };
  813. }
  814. responseMessage.context = data.context ?? null;
  815. responseMessage.info = {
  816. total_duration: data.total_duration,
  817. load_duration: data.load_duration,
  818. sample_count: data.sample_count,
  819. sample_duration: data.sample_duration,
  820. prompt_eval_count: data.prompt_eval_count,
  821. prompt_eval_duration: data.prompt_eval_duration,
  822. eval_count: data.eval_count,
  823. eval_duration: data.eval_duration
  824. };
  825. messages = messages;
  826. if ($settings.notificationEnabled && !document.hasFocus()) {
  827. const notification = new Notification(`${model.id}`, {
  828. body: responseMessage.content,
  829. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  830. });
  831. }
  832. if ($settings?.responseAutoCopy ?? false) {
  833. copyToClipboard(responseMessage.content);
  834. }
  835. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  836. await tick();
  837. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  838. }
  839. }
  840. }
  841. }
  842. } catch (error) {
  843. console.log(error);
  844. if ('detail' in error) {
  845. toast.error(error.detail);
  846. }
  847. break;
  848. }
  849. if (autoScroll) {
  850. scrollToBottom();
  851. }
  852. }
  853. if ($chatId == _chatId) {
  854. if ($settings.saveChatHistory ?? true) {
  855. chat = await updateChatById(localStorage.token, _chatId, {
  856. messages: messages,
  857. history: history,
  858. models: selectedModels,
  859. params: params,
  860. files: chatFiles
  861. });
  862. currentChatPage.set(1);
  863. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  864. }
  865. }
  866. } else {
  867. if (res !== null) {
  868. const error = await res.json();
  869. console.log(error);
  870. if ('detail' in error) {
  871. toast.error(error.detail);
  872. responseMessage.error = { content: error.detail };
  873. } else {
  874. toast.error(error.error);
  875. responseMessage.error = { content: error.error };
  876. }
  877. } else {
  878. toast.error(
  879. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  880. );
  881. responseMessage.error = {
  882. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  883. provider: 'Ollama'
  884. })
  885. };
  886. }
  887. responseMessage.done = true;
  888. messages = messages;
  889. }
  890. stopResponseFlag = false;
  891. await tick();
  892. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  893. if (lastSentence) {
  894. eventTarget.dispatchEvent(
  895. new CustomEvent('chat', {
  896. detail: { id: responseMessageId, content: lastSentence }
  897. })
  898. );
  899. }
  900. eventTarget.dispatchEvent(
  901. new CustomEvent('chat:finish', {
  902. detail: {
  903. id: responseMessageId,
  904. content: responseMessage.content
  905. }
  906. })
  907. );
  908. if (autoScroll) {
  909. scrollToBottom();
  910. }
  911. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  912. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  913. const _title = await generateChatTitle(userPrompt);
  914. await setChatTitle(_chatId, _title);
  915. }
  916. return _response;
  917. };
  918. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  919. let _response = null;
  920. const responseMessage = history.messages[responseMessageId];
  921. const userMessage = history.messages[responseMessage.parentId];
  922. let files = JSON.parse(JSON.stringify(chatFiles));
  923. if (model?.info?.meta?.knowledge ?? false) {
  924. // Only initialize and add status if knowledge exists
  925. responseMessage.statusHistory = [
  926. {
  927. action: 'knowledge_search',
  928. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  929. searchQuery: userMessage.content
  930. }),
  931. done: false
  932. }
  933. ];
  934. files.push(...model.info.meta.knowledge);
  935. messages = messages; // Trigger Svelte update
  936. }
  937. files.push(
  938. ...(userMessage?.files ?? []).filter((item) =>
  939. ['doc', 'file', 'collection'].includes(item.type)
  940. ),
  941. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  942. );
  943. scrollToBottom();
  944. eventTarget.dispatchEvent(
  945. new CustomEvent('chat:start', {
  946. detail: {
  947. id: responseMessageId
  948. }
  949. })
  950. );
  951. await tick();
  952. try {
  953. const [res, controller] = await generateOpenAIChatCompletion(
  954. localStorage.token,
  955. {
  956. stream: true,
  957. model: model.id,
  958. stream_options:
  959. (model.info?.meta?.capabilities?.usage ?? false)
  960. ? {
  961. include_usage: true
  962. }
  963. : undefined,
  964. messages: [
  965. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  966. ? {
  967. role: 'system',
  968. content: `${promptTemplate(
  969. params?.system ?? $settings?.system ?? '',
  970. $user.name,
  971. $settings?.userLocation
  972. ? await getAndUpdateUserLocation(localStorage.token)
  973. : undefined
  974. )}${
  975. (responseMessage?.userContext ?? null)
  976. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  977. : ''
  978. }`
  979. }
  980. : undefined,
  981. ...messages
  982. ]
  983. .filter((message) => message?.content?.trim())
  984. .map((message, idx, arr) => ({
  985. role: message.role,
  986. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  987. message.role === 'user'
  988. ? {
  989. content: [
  990. {
  991. type: 'text',
  992. text:
  993. arr.length - 1 !== idx
  994. ? message.content
  995. : (message?.raContent ?? message.content)
  996. },
  997. ...message.files
  998. .filter((file) => file.type === 'image')
  999. .map((file) => ({
  1000. type: 'image_url',
  1001. image_url: {
  1002. url: file.url
  1003. }
  1004. }))
  1005. ]
  1006. }
  1007. : {
  1008. content:
  1009. arr.length - 1 !== idx
  1010. ? message.content
  1011. : (message?.raContent ?? message.content)
  1012. })
  1013. })),
  1014. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1015. stop:
  1016. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1017. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1018. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1019. )
  1020. : undefined,
  1021. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1022. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1023. frequency_penalty:
  1024. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1025. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1026. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1027. files: files.length > 0 ? files : undefined,
  1028. session_id: $socket?.id,
  1029. chat_id: $chatId,
  1030. id: responseMessageId
  1031. },
  1032. `${WEBUI_BASE_URL}/api`
  1033. );
  1034. // Wait until history/message have been updated
  1035. await tick();
  1036. scrollToBottom();
  1037. if (res && res.ok && res.body) {
  1038. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1039. for await (const update of textStream) {
  1040. const { value, done, citations, error, usage } = update;
  1041. if (error) {
  1042. await handleOpenAIError(error, null, model, responseMessage);
  1043. break;
  1044. }
  1045. if (done || stopResponseFlag || _chatId !== $chatId) {
  1046. responseMessage.done = true;
  1047. messages = messages;
  1048. if (stopResponseFlag) {
  1049. controller.abort('User: Stop Response');
  1050. } else {
  1051. const messages = createMessagesList(responseMessageId);
  1052. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  1053. }
  1054. _response = responseMessage.content;
  1055. break;
  1056. }
  1057. if (usage) {
  1058. responseMessage.info = { ...usage, openai: true };
  1059. }
  1060. if (citations) {
  1061. responseMessage.citations = citations;
  1062. // Only remove status if it was initially set
  1063. if (model?.info?.meta?.knowledge ?? false) {
  1064. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1065. (status) => status.action !== 'knowledge_search'
  1066. );
  1067. }
  1068. continue;
  1069. }
  1070. if (responseMessage.content == '' && value == '\n') {
  1071. continue;
  1072. } else {
  1073. responseMessage.content += value;
  1074. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1075. navigator.vibrate(5);
  1076. }
  1077. const sentences = extractSentencesForAudio(responseMessage.content);
  1078. sentences.pop();
  1079. // dispatch only last sentence and make sure it hasn't been dispatched before
  1080. if (
  1081. sentences.length > 0 &&
  1082. sentences[sentences.length - 1] !== responseMessage.lastSentence
  1083. ) {
  1084. responseMessage.lastSentence = sentences[sentences.length - 1];
  1085. eventTarget.dispatchEvent(
  1086. new CustomEvent('chat', {
  1087. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  1088. })
  1089. );
  1090. }
  1091. messages = messages;
  1092. }
  1093. if (autoScroll) {
  1094. scrollToBottom();
  1095. }
  1096. }
  1097. if ($settings.notificationEnabled && !document.hasFocus()) {
  1098. const notification = new Notification(`${model.id}`, {
  1099. body: responseMessage.content,
  1100. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1101. });
  1102. }
  1103. if ($settings.responseAutoCopy) {
  1104. copyToClipboard(responseMessage.content);
  1105. }
  1106. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1107. await tick();
  1108. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1109. }
  1110. if ($chatId == _chatId) {
  1111. if ($settings.saveChatHistory ?? true) {
  1112. chat = await updateChatById(localStorage.token, _chatId, {
  1113. models: selectedModels,
  1114. messages: messages,
  1115. history: history,
  1116. params: params,
  1117. files: chatFiles
  1118. });
  1119. currentChatPage.set(1);
  1120. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1121. }
  1122. }
  1123. } else {
  1124. await handleOpenAIError(null, res, model, responseMessage);
  1125. }
  1126. } catch (error) {
  1127. await handleOpenAIError(error, null, model, responseMessage);
  1128. }
  1129. messages = messages;
  1130. stopResponseFlag = false;
  1131. await tick();
  1132. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  1133. if (lastSentence) {
  1134. eventTarget.dispatchEvent(
  1135. new CustomEvent('chat', {
  1136. detail: { id: responseMessageId, content: lastSentence }
  1137. })
  1138. );
  1139. }
  1140. eventTarget.dispatchEvent(
  1141. new CustomEvent('chat:finish', {
  1142. detail: {
  1143. id: responseMessageId,
  1144. content: responseMessage.content
  1145. }
  1146. })
  1147. );
  1148. if (autoScroll) {
  1149. scrollToBottom();
  1150. }
  1151. if (messages.length == 2 && selectedModels[0] === model.id) {
  1152. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1153. const _title = await generateChatTitle(userPrompt);
  1154. await setChatTitle(_chatId, _title);
  1155. }
  1156. return _response;
  1157. };
  1158. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1159. let errorMessage = '';
  1160. let innerError;
  1161. if (error) {
  1162. innerError = error;
  1163. } else if (res !== null) {
  1164. innerError = await res.json();
  1165. }
  1166. console.error(innerError);
  1167. if ('detail' in innerError) {
  1168. toast.error(innerError.detail);
  1169. errorMessage = innerError.detail;
  1170. } else if ('error' in innerError) {
  1171. if ('message' in innerError.error) {
  1172. toast.error(innerError.error.message);
  1173. errorMessage = innerError.error.message;
  1174. } else {
  1175. toast.error(innerError.error);
  1176. errorMessage = innerError.error;
  1177. }
  1178. } else if ('message' in innerError) {
  1179. toast.error(innerError.message);
  1180. errorMessage = innerError.message;
  1181. }
  1182. responseMessage.error = {
  1183. content:
  1184. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1185. provider: model.name ?? model.id
  1186. }) +
  1187. '\n' +
  1188. errorMessage
  1189. };
  1190. responseMessage.done = true;
  1191. messages = messages;
  1192. };
  1193. const stopResponse = () => {
  1194. stopResponseFlag = true;
  1195. console.log('stopResponse');
  1196. };
  1197. const regenerateResponse = async (message) => {
  1198. console.log('regenerateResponse');
  1199. if (messages.length != 0) {
  1200. let userMessage = history.messages[message.parentId];
  1201. let userPrompt = userMessage.content;
  1202. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1203. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1204. await sendPrompt(userPrompt, userMessage.id);
  1205. } else {
  1206. // If there are multiple models selected, use the model of the response message for regeneration
  1207. // e.g. many model chat
  1208. await sendPrompt(userPrompt, userMessage.id, {
  1209. modelId: message.model,
  1210. modelIdx: message.modelIdx
  1211. });
  1212. }
  1213. }
  1214. };
  1215. const continueGeneration = async () => {
  1216. console.log('continueGeneration');
  1217. const _chatId = JSON.parse(JSON.stringify($chatId));
  1218. if (messages.length != 0 && messages.at(-1).done == true) {
  1219. const responseMessage = history.messages[history.currentId];
  1220. responseMessage.done = false;
  1221. await tick();
  1222. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1223. if (model) {
  1224. if (model?.owned_by === 'openai') {
  1225. await sendPromptOpenAI(
  1226. model,
  1227. history.messages[responseMessage.parentId].content,
  1228. responseMessage.id,
  1229. _chatId
  1230. );
  1231. } else
  1232. await sendPromptOllama(
  1233. model,
  1234. history.messages[responseMessage.parentId].content,
  1235. responseMessage.id,
  1236. _chatId
  1237. );
  1238. }
  1239. } else {
  1240. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1241. }
  1242. };
  1243. const generateChatTitle = async (userPrompt) => {
  1244. if ($settings?.title?.auto ?? true) {
  1245. const title = await generateTitle(
  1246. localStorage.token,
  1247. selectedModels[0],
  1248. userPrompt,
  1249. $chatId
  1250. ).catch((error) => {
  1251. console.error(error);
  1252. return 'New Chat';
  1253. });
  1254. return title;
  1255. } else {
  1256. return `${userPrompt}`;
  1257. }
  1258. };
  1259. const setChatTitle = async (_chatId, _title) => {
  1260. if (_chatId === $chatId) {
  1261. title = _title;
  1262. }
  1263. if (!$temporaryChatEnabled) {
  1264. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1265. currentChatPage.set(1);
  1266. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1267. }
  1268. };
  1269. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1270. const responseMessage = history.messages[responseId];
  1271. const userMessage = history.messages[parentId];
  1272. responseMessage.statusHistory = [
  1273. {
  1274. done: false,
  1275. action: 'web_search',
  1276. description: $i18n.t('Generating search query')
  1277. }
  1278. ];
  1279. messages = messages;
  1280. const prompt = userMessage.content;
  1281. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1282. (error) => {
  1283. console.log(error);
  1284. return prompt;
  1285. }
  1286. );
  1287. if (!searchQuery) {
  1288. toast.warning($i18n.t('No search query generated'));
  1289. responseMessage.statusHistory.push({
  1290. done: true,
  1291. error: true,
  1292. action: 'web_search',
  1293. description: 'No search query generated'
  1294. });
  1295. messages = messages;
  1296. }
  1297. responseMessage.statusHistory.push({
  1298. done: false,
  1299. action: 'web_search',
  1300. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1301. });
  1302. messages = messages;
  1303. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1304. console.log(error);
  1305. toast.error(error);
  1306. return null;
  1307. });
  1308. if (results) {
  1309. responseMessage.statusHistory.push({
  1310. done: true,
  1311. action: 'web_search',
  1312. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1313. query: searchQuery,
  1314. urls: results.filenames
  1315. });
  1316. if (responseMessage?.files ?? undefined === undefined) {
  1317. responseMessage.files = [];
  1318. }
  1319. responseMessage.files.push({
  1320. collection_name: results.collection_name,
  1321. name: searchQuery,
  1322. type: 'web_search_results',
  1323. urls: results.filenames
  1324. });
  1325. messages = messages;
  1326. } else {
  1327. responseMessage.statusHistory.push({
  1328. done: true,
  1329. error: true,
  1330. action: 'web_search',
  1331. description: 'No search results found'
  1332. });
  1333. messages = messages;
  1334. }
  1335. };
  1336. const getTags = async () => {
  1337. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1338. return [];
  1339. });
  1340. };
  1341. const saveChatHandler = async (_chatId) => {
  1342. if ($chatId == _chatId) {
  1343. if (!$temporaryChatEnabled) {
  1344. chat = await updateChatById(localStorage.token, _chatId, {
  1345. messages: messages,
  1346. history: history,
  1347. models: selectedModels,
  1348. params: params,
  1349. files: chatFiles
  1350. });
  1351. currentChatPage.set(1);
  1352. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1353. }
  1354. }
  1355. };
  1356. const mergeResponses = async (messageId, responses, _chatId) => {
  1357. console.log('mergeResponses', messageId, responses);
  1358. const message = history.messages[messageId];
  1359. const mergedResponse = {
  1360. status: true,
  1361. content: ''
  1362. };
  1363. message.merged = mergedResponse;
  1364. messages = messages;
  1365. try {
  1366. const [res, controller] = await generateMoACompletion(
  1367. localStorage.token,
  1368. message.model,
  1369. history.messages[message.parentId].content,
  1370. responses
  1371. );
  1372. if (res && res.ok && res.body) {
  1373. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1374. for await (const update of textStream) {
  1375. const { value, done, citations, error, usage } = update;
  1376. if (error || done) {
  1377. break;
  1378. }
  1379. if (mergedResponse.content == '' && value == '\n') {
  1380. continue;
  1381. } else {
  1382. mergedResponse.content += value;
  1383. messages = messages;
  1384. }
  1385. if (autoScroll) {
  1386. scrollToBottom();
  1387. }
  1388. }
  1389. await saveChatHandler(_chatId);
  1390. } else {
  1391. console.error(res);
  1392. }
  1393. } catch (e) {
  1394. console.error(e);
  1395. }
  1396. };
  1397. </script>
  1398. <svelte:head>
  1399. <title>
  1400. {title
  1401. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1402. : `${$WEBUI_NAME}`}
  1403. </title>
  1404. </svelte:head>
  1405. <audio id="audioElement" src="" style="display: none;" />
  1406. <ChatControls
  1407. models={selectedModelIds.reduce((a, e, i, arr) => {
  1408. const model = $models.find((m) => m.id === e);
  1409. if (model) {
  1410. return [...a, model];
  1411. }
  1412. return a;
  1413. }, [])}
  1414. bind:show={showControls}
  1415. bind:chatFiles
  1416. bind:params
  1417. bind:files
  1418. {submitPrompt}
  1419. {stopResponse}
  1420. modelId={selectedModelIds?.at(0) ?? null}
  1421. chatId={$chatId}
  1422. {eventTarget}
  1423. />
  1424. <EventConfirmDialog
  1425. bind:show={showEventConfirmation}
  1426. title={eventConfirmationTitle}
  1427. message={eventConfirmationMessage}
  1428. input={eventConfirmationInput}
  1429. inputPlaceholder={eventConfirmationInputPlaceholder}
  1430. inputValue={eventConfirmationInputValue}
  1431. on:confirm={(e) => {
  1432. if (e.detail) {
  1433. eventCallback(e.detail);
  1434. } else {
  1435. eventCallback(true);
  1436. }
  1437. }}
  1438. on:cancel={() => {
  1439. eventCallback(false);
  1440. }}
  1441. />
  1442. {#if !chatIdProp || (loaded && chatIdProp)}
  1443. <div
  1444. class="h-screen max-h-[100dvh] {$showSidebar
  1445. ? 'md:max-w-[calc(100%-260px)]'
  1446. : ''} w-full max-w-full flex flex-col"
  1447. >
  1448. {#if $settings?.backgroundImageUrl ?? null}
  1449. <div
  1450. class="absolute {$showSidebar
  1451. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1452. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1453. style="background-image: url({$settings.backgroundImageUrl}) "
  1454. />
  1455. <div
  1456. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1457. />
  1458. {/if}
  1459. <Navbar
  1460. {title}
  1461. bind:selectedModels
  1462. bind:showModelSelector
  1463. bind:showControls
  1464. shareEnabled={messages.length > 0}
  1465. {chat}
  1466. {initNewChat}
  1467. />
  1468. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1469. <div
  1470. class="absolute top-[4.25rem] w-full {$showSidebar
  1471. ? 'md:max-w-[calc(100%-260px)]'
  1472. : ''} {showControls ? 'lg:pr-[24rem]' : ''} z-20"
  1473. >
  1474. <div class=" flex flex-col gap-1 w-full">
  1475. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1476. <Banner
  1477. {banner}
  1478. on:dismiss={(e) => {
  1479. const bannerId = e.detail;
  1480. localStorage.setItem(
  1481. 'dismissedBannerIds',
  1482. JSON.stringify(
  1483. [
  1484. bannerId,
  1485. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1486. ].filter((id) => $banners.find((b) => b.id === id))
  1487. )
  1488. );
  1489. }}
  1490. />
  1491. {/each}
  1492. </div>
  1493. </div>
  1494. {/if}
  1495. <div class="flex flex-col flex-auto z-10">
  1496. <div
  1497. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1498. ? 'lg:pr-[24rem]'
  1499. : ''}"
  1500. id="messages-container"
  1501. bind:this={messagesContainerElement}
  1502. on:scroll={(e) => {
  1503. autoScroll =
  1504. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1505. messagesContainerElement.clientHeight + 5;
  1506. }}
  1507. >
  1508. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1509. <Messages
  1510. chatId={$chatId}
  1511. {selectedModels}
  1512. {processing}
  1513. bind:history
  1514. bind:messages
  1515. bind:autoScroll
  1516. bind:prompt
  1517. bottomPadding={files.length > 0}
  1518. {sendPrompt}
  1519. {continueGeneration}
  1520. {regenerateResponse}
  1521. {mergeResponses}
  1522. {chatActionHandler}
  1523. />
  1524. </div>
  1525. </div>
  1526. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1527. <MessageInput
  1528. bind:files
  1529. bind:prompt
  1530. bind:autoScroll
  1531. bind:selectedToolIds
  1532. bind:webSearchEnabled
  1533. bind:atSelectedModel
  1534. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1535. const model = $models.find((m) => m.id === e);
  1536. if (model?.info?.meta?.toolIds ?? false) {
  1537. return [...new Set([...a, ...model.info.meta.toolIds])];
  1538. }
  1539. return a;
  1540. }, [])}
  1541. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1542. {selectedModels}
  1543. {messages}
  1544. {submitPrompt}
  1545. {stopResponse}
  1546. on:call={() => {
  1547. showControls = true;
  1548. }}
  1549. />
  1550. </div>
  1551. </div>
  1552. </div>
  1553. {/if}