Chat.svelte 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. import { goto } from '$app/navigation';
  8. import { page } from '$app/stores';
  9. import type { Unsubscriber, Writable } from 'svelte/store';
  10. import type { i18n as i18nType } from 'i18next';
  11. import { WEBUI_BASE_URL } from '$lib/constants';
  12. import {
  13. chatId,
  14. chats,
  15. config,
  16. type Model,
  17. models,
  18. settings,
  19. showSidebar,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showControls,
  25. showCallOverlay,
  26. currentChatPage,
  27. temporaryChatEnabled,
  28. mobile,
  29. showOverview
  30. } from '$lib/stores';
  31. import {
  32. convertMessagesToHistory,
  33. copyToClipboard,
  34. getMessageContentParts,
  35. extractSentencesForAudio,
  36. promptTemplate,
  37. splitStream
  38. } from '$lib/utils';
  39. import { generateChatCompletion } from '$lib/apis/ollama';
  40. import {
  41. createNewChat,
  42. getChatById,
  43. getChatList,
  44. getTagsById,
  45. updateChatById
  46. } from '$lib/apis/chats';
  47. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  48. import { runWebSearch } from '$lib/apis/rag';
  49. import { createOpenAITextStream } from '$lib/apis/streaming';
  50. import { queryMemory } from '$lib/apis/memories';
  51. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  52. import {
  53. chatCompleted,
  54. generateTitle,
  55. generateSearchQuery,
  56. chatAction,
  57. generateMoACompletion
  58. } from '$lib/apis';
  59. import Banner from '../common/Banner.svelte';
  60. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  61. import Messages from '$lib/components/chat/Messages.svelte';
  62. import Navbar from '$lib/components/layout/Navbar.svelte';
  63. import ChatControls from './ChatControls.svelte';
  64. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  65. import EllipsisVertical from '../icons/EllipsisVertical.svelte';
  66. const i18n: Writable<i18nType> = getContext('i18n');
  67. export let chatIdProp = '';
  68. let loaded = false;
  69. const eventTarget = new EventTarget();
  70. let controlPane;
  71. let stopResponseFlag = false;
  72. let autoScroll = true;
  73. let processing = '';
  74. let messagesContainerElement: HTMLDivElement;
  75. let showEventConfirmation = false;
  76. let eventConfirmationTitle = '';
  77. let eventConfirmationMessage = '';
  78. let eventConfirmationInput = false;
  79. let eventConfirmationInputPlaceholder = '';
  80. let eventConfirmationInputValue = '';
  81. let eventCallback = null;
  82. let showModelSelector = true;
  83. let selectedModels = [''];
  84. let atSelectedModel: Model | undefined;
  85. let selectedModelIds = [];
  86. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  87. let selectedToolIds = [];
  88. let webSearchEnabled = false;
  89. let chat = null;
  90. let tags = [];
  91. let title = '';
  92. let prompt = '';
  93. let chatFiles = [];
  94. let files = [];
  95. let messages = [];
  96. let history = {
  97. messages: {},
  98. currentId: null
  99. };
  100. let params = {};
  101. let chatIdUnsubscriber: Unsubscriber | undefined;
  102. $: if (history.currentId !== null) {
  103. let _messages = [];
  104. let currentMessage = history.messages[history.currentId];
  105. while (currentMessage) {
  106. _messages.unshift({ ...currentMessage });
  107. currentMessage =
  108. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  109. }
  110. // This is most likely causing the performance issue
  111. messages = _messages;
  112. } else {
  113. messages = [];
  114. }
  115. $: if (chatIdProp) {
  116. (async () => {
  117. console.log(chatIdProp);
  118. if (chatIdProp && (await loadChat())) {
  119. await tick();
  120. loaded = true;
  121. window.setTimeout(() => scrollToBottom(), 0);
  122. const chatInput = document.getElementById('chat-textarea');
  123. chatInput?.focus();
  124. } else {
  125. await goto('/');
  126. }
  127. })();
  128. }
  129. const showMessage = async (message) => {
  130. let _messageId = JSON.parse(JSON.stringify(message.id));
  131. let messageChildrenIds = history.messages[_messageId].childrenIds;
  132. while (messageChildrenIds.length !== 0) {
  133. _messageId = messageChildrenIds.at(-1);
  134. messageChildrenIds = history.messages[_messageId].childrenIds;
  135. }
  136. history.currentId = _messageId;
  137. await tick();
  138. await tick();
  139. await tick();
  140. const messageElement = document.getElementById(`message-${message.id}`);
  141. if (messageElement) {
  142. messageElement.scrollIntoView({ behavior: 'smooth' });
  143. }
  144. };
  145. const chatEventHandler = async (event, cb) => {
  146. if (event.chat_id === $chatId) {
  147. await tick();
  148. console.log(event);
  149. let message = history.messages[event.message_id];
  150. const type = event?.data?.type ?? null;
  151. const data = event?.data?.data ?? null;
  152. if (type === 'status') {
  153. if (message?.statusHistory) {
  154. message.statusHistory.push(data);
  155. } else {
  156. message.statusHistory = [data];
  157. }
  158. } else if (type === 'citation') {
  159. if (message?.citations) {
  160. message.citations.push(data);
  161. } else {
  162. message.citations = [data];
  163. }
  164. } else if (type === 'message') {
  165. message.content += data.content;
  166. } else if (type === 'replace') {
  167. message.content = data.content;
  168. } else if (type === 'action') {
  169. if (data.action === 'continue') {
  170. const continueButton = document.getElementById('continue-response-button');
  171. if (continueButton) {
  172. continueButton.click();
  173. }
  174. }
  175. } else if (type === 'confirmation') {
  176. eventCallback = cb;
  177. eventConfirmationInput = false;
  178. showEventConfirmation = true;
  179. eventConfirmationTitle = data.title;
  180. eventConfirmationMessage = data.message;
  181. } else if (type === 'input') {
  182. eventCallback = cb;
  183. eventConfirmationInput = true;
  184. showEventConfirmation = true;
  185. eventConfirmationTitle = data.title;
  186. eventConfirmationMessage = data.message;
  187. eventConfirmationInputPlaceholder = data.placeholder;
  188. eventConfirmationInputValue = data?.value ?? '';
  189. } else {
  190. console.log('Unknown message type', data);
  191. }
  192. messages = messages;
  193. }
  194. };
  195. const onMessageHandler = async (event: {
  196. origin: string;
  197. data: { type: string; text: string };
  198. }) => {
  199. if (event.origin !== window.origin) {
  200. return;
  201. }
  202. // Replace with your iframe's origin
  203. if (event.data.type === 'input:prompt') {
  204. console.debug(event.data.text);
  205. const inputElement = document.getElementById('chat-textarea');
  206. if (inputElement) {
  207. prompt = event.data.text;
  208. inputElement.focus();
  209. }
  210. }
  211. if (event.data.type === 'action:submit') {
  212. console.debug(event.data.text);
  213. if (prompt !== '') {
  214. await tick();
  215. submitPrompt(prompt);
  216. }
  217. }
  218. if (event.data.type === 'input:prompt:submit') {
  219. console.debug(event.data.text);
  220. if (prompt !== '') {
  221. await tick();
  222. submitPrompt(event.data.text);
  223. }
  224. }
  225. };
  226. onMount(async () => {
  227. window.addEventListener('message', onMessageHandler);
  228. $socket?.on('chat-events', chatEventHandler);
  229. if (!$chatId) {
  230. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  231. if (!value) {
  232. await initNewChat();
  233. }
  234. });
  235. } else {
  236. if ($temporaryChatEnabled) {
  237. await goto('/');
  238. }
  239. }
  240. showControls.subscribe(async (value) => {
  241. if (controlPane && !$mobile) {
  242. if (value) {
  243. controlPane.resize(parseInt(localStorage.getItem('chat-controls-size') || '35'));
  244. } else {
  245. controlPane.resize(0);
  246. }
  247. }
  248. if (!value) {
  249. showCallOverlay.set(false);
  250. showOverview.set(false);
  251. }
  252. });
  253. });
  254. onDestroy(() => {
  255. chatIdUnsubscriber?.();
  256. window.removeEventListener('message', onMessageHandler);
  257. $socket?.off('chat-events');
  258. });
  259. //////////////////////////
  260. // Web functions
  261. //////////////////////////
  262. const initNewChat = async () => {
  263. if ($page.url.pathname.includes('/c/')) {
  264. window.history.replaceState(history.state, '', `/`);
  265. }
  266. await chatId.set('');
  267. autoScroll = true;
  268. title = '';
  269. messages = [];
  270. history = {
  271. messages: {},
  272. currentId: null
  273. };
  274. chatFiles = [];
  275. params = {};
  276. if ($page.url.searchParams.get('models')) {
  277. selectedModels = $page.url.searchParams.get('models')?.split(',');
  278. } else if ($settings?.models) {
  279. selectedModels = $settings?.models;
  280. } else if ($config?.default_models) {
  281. console.log($config?.default_models.split(',') ?? '');
  282. selectedModels = $config?.default_models.split(',');
  283. } else {
  284. selectedModels = [''];
  285. }
  286. if ($page.url.searchParams.get('web-search') === 'true') {
  287. webSearchEnabled = true;
  288. }
  289. if ($page.url.searchParams.get('q')) {
  290. prompt = $page.url.searchParams.get('q') ?? '';
  291. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  292. .split(',')
  293. .map((id) => id.trim())
  294. .filter((id) => id);
  295. if (prompt) {
  296. await tick();
  297. submitPrompt(prompt);
  298. }
  299. }
  300. if ($page.url.searchParams.get('call') === 'true') {
  301. showCallOverlay.set(true);
  302. }
  303. selectedModels = selectedModels.map((modelId) =>
  304. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  305. );
  306. const userSettings = await getUserSettings(localStorage.token);
  307. if (userSettings) {
  308. settings.set(userSettings.ui);
  309. } else {
  310. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  311. }
  312. const chatInput = document.getElementById('chat-textarea');
  313. setTimeout(() => chatInput?.focus(), 0);
  314. };
  315. const loadChat = async () => {
  316. chatId.set(chatIdProp);
  317. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  318. await goto('/');
  319. return null;
  320. });
  321. if (chat) {
  322. tags = await getTags();
  323. const chatContent = chat.chat;
  324. if (chatContent) {
  325. console.log(chatContent);
  326. selectedModels =
  327. (chatContent?.models ?? undefined) !== undefined
  328. ? chatContent.models
  329. : [chatContent.models ?? ''];
  330. history =
  331. (chatContent?.history ?? undefined) !== undefined
  332. ? chatContent.history
  333. : convertMessagesToHistory(chatContent.messages);
  334. title = chatContent.title;
  335. const userSettings = await getUserSettings(localStorage.token);
  336. if (userSettings) {
  337. await settings.set(userSettings.ui);
  338. } else {
  339. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  340. }
  341. params = chatContent?.params ?? {};
  342. chatFiles = chatContent?.files ?? [];
  343. autoScroll = true;
  344. await tick();
  345. if (messages.length > 0) {
  346. history.messages[messages.at(-1).id].done = true;
  347. }
  348. await tick();
  349. return true;
  350. } else {
  351. return null;
  352. }
  353. }
  354. };
  355. const scrollToBottom = async () => {
  356. await tick();
  357. if (messagesContainerElement) {
  358. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  359. }
  360. };
  361. const createMessagesList = (responseMessageId) => {
  362. const message = history.messages[responseMessageId];
  363. if (message.parentId) {
  364. return [...createMessagesList(message.parentId), message];
  365. } else {
  366. return [message];
  367. }
  368. };
  369. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  370. await mermaid.run({
  371. querySelector: '.mermaid'
  372. });
  373. const res = await chatCompleted(localStorage.token, {
  374. model: modelId,
  375. messages: messages.map((m) => ({
  376. id: m.id,
  377. role: m.role,
  378. content: m.content,
  379. info: m.info ? m.info : undefined,
  380. timestamp: m.timestamp
  381. })),
  382. chat_id: chatId,
  383. session_id: $socket?.id,
  384. id: responseMessageId
  385. }).catch((error) => {
  386. toast.error(error);
  387. messages.at(-1).error = { content: error };
  388. return null;
  389. });
  390. if (res !== null) {
  391. // Update chat history with the new messages
  392. for (const message of res.messages) {
  393. history.messages[message.id] = {
  394. ...history.messages[message.id],
  395. ...(history.messages[message.id].content !== message.content
  396. ? { originalContent: history.messages[message.id].content }
  397. : {}),
  398. ...message
  399. };
  400. }
  401. }
  402. if ($chatId == chatId) {
  403. if (!$temporaryChatEnabled) {
  404. chat = await updateChatById(localStorage.token, chatId, {
  405. models: selectedModels,
  406. messages: messages,
  407. history: history,
  408. params: params,
  409. files: chatFiles
  410. });
  411. currentChatPage.set(1);
  412. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  413. }
  414. }
  415. };
  416. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  417. const res = await chatAction(localStorage.token, actionId, {
  418. model: modelId,
  419. messages: messages.map((m) => ({
  420. id: m.id,
  421. role: m.role,
  422. content: m.content,
  423. info: m.info ? m.info : undefined,
  424. timestamp: m.timestamp
  425. })),
  426. ...(event ? { event: event } : {}),
  427. chat_id: chatId,
  428. session_id: $socket?.id,
  429. id: responseMessageId
  430. }).catch((error) => {
  431. toast.error(error);
  432. messages.at(-1).error = { content: error };
  433. return null;
  434. });
  435. if (res !== null) {
  436. // Update chat history with the new messages
  437. for (const message of res.messages) {
  438. history.messages[message.id] = {
  439. ...history.messages[message.id],
  440. ...(history.messages[message.id].content !== message.content
  441. ? { originalContent: history.messages[message.id].content }
  442. : {}),
  443. ...message
  444. };
  445. }
  446. }
  447. if ($chatId == chatId) {
  448. if (!$temporaryChatEnabled) {
  449. chat = await updateChatById(localStorage.token, chatId, {
  450. models: selectedModels,
  451. messages: messages,
  452. history: history,
  453. params: params,
  454. files: chatFiles
  455. });
  456. currentChatPage.set(1);
  457. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  458. }
  459. }
  460. };
  461. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  462. return setInterval(() => {
  463. $socket?.emit('usage', {
  464. action: 'chat',
  465. model: modelId,
  466. chat_id: chatId
  467. });
  468. }, 1000);
  469. };
  470. //////////////////////////
  471. // Chat functions
  472. //////////////////////////
  473. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  474. let _responses = [];
  475. console.log('submitPrompt', $chatId);
  476. selectedModels = selectedModels.map((modelId) =>
  477. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  478. );
  479. if (selectedModels.includes('')) {
  480. toast.error($i18n.t('Model not selected'));
  481. } else if (messages.length != 0 && messages.at(-1).done != true) {
  482. // Response not done
  483. console.log('wait');
  484. } else if (messages.length != 0 && messages.at(-1).error) {
  485. // Error in response
  486. toast.error(
  487. $i18n.t(
  488. `Oops! There was an error in the previous response. Please try again or contact admin.`
  489. )
  490. );
  491. } else if (
  492. files.length > 0 &&
  493. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  494. ) {
  495. // Upload not done
  496. toast.error(
  497. $i18n.t(
  498. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  499. )
  500. );
  501. } else if (
  502. ($config?.file?.max_count ?? null) !== null &&
  503. files.length + chatFiles.length > $config?.file?.max_count
  504. ) {
  505. console.log(chatFiles.length, files.length);
  506. toast.error(
  507. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  508. maxCount: $config?.file?.max_count
  509. })
  510. );
  511. } else {
  512. // Reset chat input textarea
  513. const chatTextAreaElement = document.getElementById('chat-textarea');
  514. if (chatTextAreaElement) {
  515. chatTextAreaElement.value = '';
  516. chatTextAreaElement.style.height = '';
  517. }
  518. const _files = JSON.parse(JSON.stringify(files));
  519. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  520. chatFiles = chatFiles.filter(
  521. // Remove duplicates
  522. (item, index, array) =>
  523. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  524. );
  525. files = [];
  526. prompt = '';
  527. // Create user message
  528. let userMessageId = uuidv4();
  529. let userMessage = {
  530. id: userMessageId,
  531. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  532. childrenIds: [],
  533. role: 'user',
  534. content: userPrompt,
  535. files: _files.length > 0 ? _files : undefined,
  536. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  537. models: selectedModels
  538. };
  539. // Add message to history and Set currentId to messageId
  540. history.messages[userMessageId] = userMessage;
  541. history.currentId = userMessageId;
  542. // Append messageId to childrenIds of parent message
  543. if (messages.length !== 0) {
  544. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  545. }
  546. // Wait until history/message have been updated
  547. await tick();
  548. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  549. }
  550. return _responses;
  551. };
  552. const sendPrompt = async (
  553. prompt: string,
  554. parentId: string,
  555. { modelId = null, modelIdx = null, newChat = false } = {}
  556. ) => {
  557. let _responses: string[] = [];
  558. // If modelId is provided, use it, else use selected model
  559. let selectedModelIds = modelId
  560. ? [modelId]
  561. : atSelectedModel !== undefined
  562. ? [atSelectedModel.id]
  563. : selectedModels;
  564. // Create response messages for each selected model
  565. const responseMessageIds: Record<PropertyKey, string> = {};
  566. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  567. const model = $models.filter((m) => m.id === modelId).at(0);
  568. if (model) {
  569. let responseMessageId = uuidv4();
  570. let responseMessage = {
  571. parentId: parentId,
  572. id: responseMessageId,
  573. childrenIds: [],
  574. role: 'assistant',
  575. content: '',
  576. model: model.id,
  577. modelName: model.name ?? model.id,
  578. modelIdx: modelIdx ? modelIdx : _modelIdx,
  579. userContext: null,
  580. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  581. };
  582. // Add message to history and Set currentId to messageId
  583. history.messages[responseMessageId] = responseMessage;
  584. history.currentId = responseMessageId;
  585. // Append messageId to childrenIds of parent message
  586. if (parentId !== null) {
  587. history.messages[parentId].childrenIds = [
  588. ...history.messages[parentId].childrenIds,
  589. responseMessageId
  590. ];
  591. }
  592. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  593. }
  594. }
  595. await tick();
  596. // Create new chat if only one message in messages
  597. if (newChat && messages.length == 2) {
  598. if (!$temporaryChatEnabled) {
  599. chat = await createNewChat(localStorage.token, {
  600. id: $chatId,
  601. title: $i18n.t('New Chat'),
  602. models: selectedModels,
  603. system: $settings.system ?? undefined,
  604. params: params,
  605. messages: messages,
  606. history: history,
  607. tags: [],
  608. timestamp: Date.now()
  609. });
  610. currentChatPage.set(1);
  611. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  612. await chatId.set(chat.id);
  613. } else {
  614. await chatId.set('local');
  615. }
  616. await tick();
  617. }
  618. const _chatId = JSON.parse(JSON.stringify($chatId));
  619. await Promise.all(
  620. selectedModelIds.map(async (modelId, _modelIdx) => {
  621. console.log('modelId', modelId);
  622. const model = $models.filter((m) => m.id === modelId).at(0);
  623. if (model) {
  624. // If there are image files, check if model is vision capable
  625. const hasImages = messages.some((message) =>
  626. message.files?.some((file) => file.type === 'image')
  627. );
  628. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  629. toast.error(
  630. $i18n.t('Model {{modelName}} is not vision capable', {
  631. modelName: model.name ?? model.id
  632. })
  633. );
  634. }
  635. let responseMessageId =
  636. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  637. let responseMessage = history.messages[responseMessageId];
  638. let userContext = null;
  639. if ($settings?.memory ?? false) {
  640. if (userContext === null) {
  641. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  642. toast.error(error);
  643. return null;
  644. });
  645. if (res) {
  646. if (res.documents[0].length > 0) {
  647. userContext = res.documents[0].reduce((acc, doc, index) => {
  648. const createdAtTimestamp = res.metadatas[0][index].created_at;
  649. const createdAtDate = new Date(createdAtTimestamp * 1000)
  650. .toISOString()
  651. .split('T')[0];
  652. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  653. }, '');
  654. }
  655. console.log(userContext);
  656. }
  657. }
  658. }
  659. responseMessage.userContext = userContext;
  660. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  661. scrollToBottom();
  662. if (webSearchEnabled) {
  663. await getWebSearchResults(model.id, parentId, responseMessageId);
  664. }
  665. let _response = null;
  666. if (model?.owned_by === 'openai') {
  667. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  668. } else if (model) {
  669. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  670. }
  671. _responses.push(_response);
  672. if (chatEventEmitter) clearInterval(chatEventEmitter);
  673. } else {
  674. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  675. }
  676. })
  677. );
  678. currentChatPage.set(1);
  679. chats.set(await getChatList(localStorage.token, $currentChatPage));
  680. return _responses;
  681. };
  682. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  683. let _response: string | null = null;
  684. const responseMessage = history.messages[responseMessageId];
  685. const userMessage = history.messages[responseMessage.parentId];
  686. // Wait until history/message have been updated
  687. await tick();
  688. // Scroll down
  689. scrollToBottom();
  690. const messagesBody = [
  691. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  692. ? {
  693. role: 'system',
  694. content: `${promptTemplate(
  695. params?.system ?? $settings?.system ?? '',
  696. $user.name,
  697. $settings?.userLocation
  698. ? await getAndUpdateUserLocation(localStorage.token)
  699. : undefined
  700. )}${
  701. (responseMessage?.userContext ?? null)
  702. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  703. : ''
  704. }`
  705. }
  706. : undefined,
  707. ...messages
  708. ]
  709. .filter((message) => message?.content?.trim())
  710. .map((message) => {
  711. // Prepare the base message object
  712. const baseMessage = {
  713. role: message.role,
  714. content: message.content
  715. };
  716. // Extract and format image URLs if any exist
  717. const imageUrls = message.files
  718. ?.filter((file) => file.type === 'image')
  719. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  720. // Add images array only if it contains elements
  721. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  722. baseMessage.images = imageUrls;
  723. }
  724. return baseMessage;
  725. });
  726. let lastImageIndex = -1;
  727. // Find the index of the last object with images
  728. messagesBody.forEach((item, index) => {
  729. if (item.images) {
  730. lastImageIndex = index;
  731. }
  732. });
  733. // Remove images from all but the last one
  734. messagesBody.forEach((item, index) => {
  735. if (index !== lastImageIndex) {
  736. delete item.images;
  737. }
  738. });
  739. let files = JSON.parse(JSON.stringify(chatFiles));
  740. if (model?.info?.meta?.knowledge ?? false) {
  741. // Only initialize and add status if knowledge exists
  742. responseMessage.statusHistory = [
  743. {
  744. action: 'knowledge_search',
  745. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  746. searchQuery: userMessage.content
  747. }),
  748. done: false
  749. }
  750. ];
  751. files.push(...model.info.meta.knowledge);
  752. messages = messages; // Trigger Svelte update
  753. }
  754. files.push(
  755. ...(userMessage?.files ?? []).filter((item) =>
  756. ['doc', 'file', 'collection'].includes(item.type)
  757. ),
  758. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  759. );
  760. scrollToBottom();
  761. eventTarget.dispatchEvent(
  762. new CustomEvent('chat:start', {
  763. detail: {
  764. id: responseMessageId
  765. }
  766. })
  767. );
  768. await tick();
  769. const stream = $settings?.streamResponse ?? true;
  770. const [res, controller] = await generateChatCompletion(localStorage.token, {
  771. stream: stream,
  772. model: model.id,
  773. messages: messagesBody,
  774. options: {
  775. ...{ ...($settings?.params ?? {}), ...params },
  776. stop:
  777. (params?.stop ?? $settings?.params?.stop ?? undefined)
  778. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  779. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  780. )
  781. : undefined,
  782. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  783. repeat_penalty:
  784. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  785. },
  786. format: $settings.requestFormat ?? undefined,
  787. keep_alive: $settings.keepAlive ?? undefined,
  788. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  789. files: files.length > 0 ? files : undefined,
  790. session_id: $socket?.id,
  791. chat_id: $chatId,
  792. id: responseMessageId
  793. });
  794. if (res && res.ok) {
  795. if (!stream) {
  796. const response = await res.json();
  797. console.log(response);
  798. responseMessage.content = response.message.content;
  799. responseMessage.info = {
  800. eval_count: response.eval_count,
  801. eval_duration: response.eval_duration,
  802. load_duration: response.load_duration,
  803. prompt_eval_count: response.prompt_eval_count,
  804. prompt_eval_duration: response.prompt_eval_duration,
  805. total_duration: response.total_duration
  806. };
  807. responseMessage.done = true;
  808. } else {
  809. console.log('controller', controller);
  810. const reader = res.body
  811. .pipeThrough(new TextDecoderStream())
  812. .pipeThrough(splitStream('\n'))
  813. .getReader();
  814. while (true) {
  815. const { value, done } = await reader.read();
  816. if (done || stopResponseFlag || _chatId !== $chatId) {
  817. responseMessage.done = true;
  818. messages = messages;
  819. if (stopResponseFlag) {
  820. controller.abort('User: Stop Response');
  821. }
  822. _response = responseMessage.content;
  823. break;
  824. }
  825. try {
  826. let lines = value.split('\n');
  827. for (const line of lines) {
  828. if (line !== '') {
  829. console.log(line);
  830. let data = JSON.parse(line);
  831. if ('citations' in data) {
  832. responseMessage.citations = data.citations;
  833. // Only remove status if it was initially set
  834. if (model?.info?.meta?.knowledge ?? false) {
  835. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  836. (status) => status.action !== 'knowledge_search'
  837. );
  838. }
  839. continue;
  840. }
  841. if ('detail' in data) {
  842. throw data;
  843. }
  844. if (data.done == false) {
  845. if (responseMessage.content == '' && data.message.content == '\n') {
  846. continue;
  847. } else {
  848. responseMessage.content += data.message.content;
  849. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  850. navigator.vibrate(5);
  851. }
  852. const messageContentParts = getMessageContentParts(
  853. responseMessage.content,
  854. $config?.audio?.tts?.split_on ?? 'punctuation'
  855. );
  856. messageContentParts.pop();
  857. // dispatch only last sentence and make sure it hasn't been dispatched before
  858. if (
  859. messageContentParts.length > 0 &&
  860. messageContentParts[messageContentParts.length - 1] !==
  861. responseMessage.lastSentence
  862. ) {
  863. responseMessage.lastSentence =
  864. messageContentParts[messageContentParts.length - 1];
  865. eventTarget.dispatchEvent(
  866. new CustomEvent('chat', {
  867. detail: {
  868. id: responseMessageId,
  869. content: messageContentParts[messageContentParts.length - 1]
  870. }
  871. })
  872. );
  873. }
  874. messages = messages;
  875. }
  876. } else {
  877. responseMessage.done = true;
  878. if (responseMessage.content == '') {
  879. responseMessage.error = {
  880. code: 400,
  881. content: `Oops! No text generated from Ollama, Please try again.`
  882. };
  883. }
  884. responseMessage.context = data.context ?? null;
  885. responseMessage.info = {
  886. total_duration: data.total_duration,
  887. load_duration: data.load_duration,
  888. sample_count: data.sample_count,
  889. sample_duration: data.sample_duration,
  890. prompt_eval_count: data.prompt_eval_count,
  891. prompt_eval_duration: data.prompt_eval_duration,
  892. eval_count: data.eval_count,
  893. eval_duration: data.eval_duration
  894. };
  895. messages = messages;
  896. if ($settings.notificationEnabled && !document.hasFocus()) {
  897. const notification = new Notification(`${model.id}`, {
  898. body: responseMessage.content,
  899. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  900. });
  901. }
  902. if ($settings?.responseAutoCopy ?? false) {
  903. copyToClipboard(responseMessage.content);
  904. }
  905. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  906. await tick();
  907. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  908. }
  909. }
  910. }
  911. }
  912. } catch (error) {
  913. console.log(error);
  914. if ('detail' in error) {
  915. toast.error(error.detail);
  916. }
  917. break;
  918. }
  919. if (autoScroll) {
  920. scrollToBottom();
  921. }
  922. }
  923. }
  924. await chatCompletedHandler(
  925. _chatId,
  926. model.id,
  927. responseMessageId,
  928. createMessagesList(responseMessageId)
  929. );
  930. } else {
  931. if (res !== null) {
  932. const error = await res.json();
  933. console.log(error);
  934. if ('detail' in error) {
  935. toast.error(error.detail);
  936. responseMessage.error = { content: error.detail };
  937. } else {
  938. toast.error(error.error);
  939. responseMessage.error = { content: error.error };
  940. }
  941. } else {
  942. toast.error(
  943. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  944. );
  945. responseMessage.error = {
  946. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  947. provider: 'Ollama'
  948. })
  949. };
  950. }
  951. responseMessage.done = true;
  952. if (responseMessage.statusHistory) {
  953. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  954. (status) => status.action !== 'knowledge_search'
  955. );
  956. }
  957. messages = messages;
  958. }
  959. await saveChatHandler(_chatId);
  960. stopResponseFlag = false;
  961. await tick();
  962. let lastMessageContentPart =
  963. getMessageContentParts(
  964. responseMessage.content,
  965. $config?.audio?.tts?.split_on ?? 'punctuation'
  966. )?.at(-1) ?? '';
  967. if (lastMessageContentPart) {
  968. eventTarget.dispatchEvent(
  969. new CustomEvent('chat', {
  970. detail: { id: responseMessageId, content: lastMessageContentPart }
  971. })
  972. );
  973. }
  974. eventTarget.dispatchEvent(
  975. new CustomEvent('chat:finish', {
  976. detail: {
  977. id: responseMessageId,
  978. content: responseMessage.content
  979. }
  980. })
  981. );
  982. if (autoScroll) {
  983. scrollToBottom();
  984. }
  985. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  986. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  987. const _title = await generateChatTitle(userPrompt);
  988. await setChatTitle(_chatId, _title);
  989. }
  990. return _response;
  991. };
  992. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  993. let _response = null;
  994. const responseMessage = history.messages[responseMessageId];
  995. const userMessage = history.messages[responseMessage.parentId];
  996. let files = JSON.parse(JSON.stringify(chatFiles));
  997. if (model?.info?.meta?.knowledge ?? false) {
  998. // Only initialize and add status if knowledge exists
  999. responseMessage.statusHistory = [
  1000. {
  1001. action: 'knowledge_search',
  1002. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  1003. searchQuery: userMessage.content
  1004. }),
  1005. done: false
  1006. }
  1007. ];
  1008. files.push(...model.info.meta.knowledge);
  1009. messages = messages; // Trigger Svelte update
  1010. }
  1011. files.push(
  1012. ...(userMessage?.files ?? []).filter((item) =>
  1013. ['doc', 'file', 'collection'].includes(item.type)
  1014. ),
  1015. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1016. );
  1017. scrollToBottom();
  1018. eventTarget.dispatchEvent(
  1019. new CustomEvent('chat:start', {
  1020. detail: {
  1021. id: responseMessageId
  1022. }
  1023. })
  1024. );
  1025. await tick();
  1026. try {
  1027. const stream = $settings?.streamResponse ?? true;
  1028. const [res, controller] = await generateOpenAIChatCompletion(
  1029. localStorage.token,
  1030. {
  1031. stream: stream,
  1032. model: model.id,
  1033. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1034. ? {
  1035. stream_options: {
  1036. include_usage: true
  1037. }
  1038. }
  1039. : {}),
  1040. messages: [
  1041. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1042. ? {
  1043. role: 'system',
  1044. content: `${promptTemplate(
  1045. params?.system ?? $settings?.system ?? '',
  1046. $user.name,
  1047. $settings?.userLocation
  1048. ? await getAndUpdateUserLocation(localStorage.token)
  1049. : undefined
  1050. )}${
  1051. (responseMessage?.userContext ?? null)
  1052. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1053. : ''
  1054. }`
  1055. }
  1056. : undefined,
  1057. ...messages
  1058. ]
  1059. .filter((message) => message?.content?.trim())
  1060. .map((message, idx, arr) => ({
  1061. role: message.role,
  1062. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1063. message.role === 'user'
  1064. ? {
  1065. content: [
  1066. {
  1067. type: 'text',
  1068. text:
  1069. arr.length - 1 !== idx
  1070. ? message.content
  1071. : (message?.raContent ?? message.content)
  1072. },
  1073. ...message.files
  1074. .filter((file) => file.type === 'image')
  1075. .map((file) => ({
  1076. type: 'image_url',
  1077. image_url: {
  1078. url: file.url
  1079. }
  1080. }))
  1081. ]
  1082. }
  1083. : {
  1084. content:
  1085. arr.length - 1 !== idx
  1086. ? message.content
  1087. : (message?.raContent ?? message.content)
  1088. })
  1089. })),
  1090. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1091. stop:
  1092. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1093. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1094. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1095. )
  1096. : undefined,
  1097. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1098. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1099. frequency_penalty:
  1100. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1101. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1102. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1103. files: files.length > 0 ? files : undefined,
  1104. session_id: $socket?.id,
  1105. chat_id: $chatId,
  1106. id: responseMessageId
  1107. },
  1108. `${WEBUI_BASE_URL}/api`
  1109. );
  1110. // Wait until history/message have been updated
  1111. await tick();
  1112. scrollToBottom();
  1113. if (res && res.ok && res.body) {
  1114. if (!stream) {
  1115. const response = await res.json();
  1116. console.log(response);
  1117. responseMessage.content = response.choices[0].message.content;
  1118. responseMessage.info = { ...response.usage, openai: true };
  1119. responseMessage.done = true;
  1120. } else {
  1121. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1122. for await (const update of textStream) {
  1123. const { value, done, citations, error, usage } = update;
  1124. if (error) {
  1125. await handleOpenAIError(error, null, model, responseMessage);
  1126. break;
  1127. }
  1128. if (done || stopResponseFlag || _chatId !== $chatId) {
  1129. responseMessage.done = true;
  1130. messages = messages;
  1131. if (stopResponseFlag) {
  1132. controller.abort('User: Stop Response');
  1133. }
  1134. _response = responseMessage.content;
  1135. break;
  1136. }
  1137. if (usage) {
  1138. responseMessage.info = { ...usage, openai: true };
  1139. }
  1140. if (citations) {
  1141. responseMessage.citations = citations;
  1142. // Only remove status if it was initially set
  1143. if (model?.info?.meta?.knowledge ?? false) {
  1144. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1145. (status) => status.action !== 'knowledge_search'
  1146. );
  1147. }
  1148. continue;
  1149. }
  1150. if (responseMessage.content == '' && value == '\n') {
  1151. continue;
  1152. } else {
  1153. responseMessage.content += value;
  1154. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1155. navigator.vibrate(5);
  1156. }
  1157. const messageContentParts = getMessageContentParts(
  1158. responseMessage.content,
  1159. $config?.audio?.tts?.split_on ?? 'punctuation'
  1160. );
  1161. messageContentParts.pop();
  1162. // dispatch only last sentence and make sure it hasn't been dispatched before
  1163. if (
  1164. messageContentParts.length > 0 &&
  1165. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1166. ) {
  1167. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1168. eventTarget.dispatchEvent(
  1169. new CustomEvent('chat', {
  1170. detail: {
  1171. id: responseMessageId,
  1172. content: messageContentParts[messageContentParts.length - 1]
  1173. }
  1174. })
  1175. );
  1176. }
  1177. messages = messages;
  1178. }
  1179. if (autoScroll) {
  1180. scrollToBottom();
  1181. }
  1182. }
  1183. }
  1184. await chatCompletedHandler(
  1185. _chatId,
  1186. model.id,
  1187. responseMessageId,
  1188. createMessagesList(responseMessageId)
  1189. );
  1190. if ($settings.notificationEnabled && !document.hasFocus()) {
  1191. const notification = new Notification(`${model.id}`, {
  1192. body: responseMessage.content,
  1193. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1194. });
  1195. }
  1196. if ($settings.responseAutoCopy) {
  1197. copyToClipboard(responseMessage.content);
  1198. }
  1199. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1200. await tick();
  1201. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1202. }
  1203. } else {
  1204. await handleOpenAIError(null, res, model, responseMessage);
  1205. }
  1206. } catch (error) {
  1207. await handleOpenAIError(error, null, model, responseMessage);
  1208. }
  1209. await saveChatHandler(_chatId);
  1210. messages = messages;
  1211. stopResponseFlag = false;
  1212. await tick();
  1213. let lastMessageContentPart =
  1214. getMessageContentParts(
  1215. responseMessage.content,
  1216. $config?.audio?.tts?.split_on ?? 'punctuation'
  1217. )?.at(-1) ?? '';
  1218. if (lastMessageContentPart) {
  1219. eventTarget.dispatchEvent(
  1220. new CustomEvent('chat', {
  1221. detail: { id: responseMessageId, content: lastMessageContentPart }
  1222. })
  1223. );
  1224. }
  1225. eventTarget.dispatchEvent(
  1226. new CustomEvent('chat:finish', {
  1227. detail: {
  1228. id: responseMessageId,
  1229. content: responseMessage.content
  1230. }
  1231. })
  1232. );
  1233. if (autoScroll) {
  1234. scrollToBottom();
  1235. }
  1236. if (messages.length == 2 && selectedModels[0] === model.id) {
  1237. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1238. const _title = await generateChatTitle(userPrompt);
  1239. await setChatTitle(_chatId, _title);
  1240. }
  1241. return _response;
  1242. };
  1243. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1244. let errorMessage = '';
  1245. let innerError;
  1246. if (error) {
  1247. innerError = error;
  1248. } else if (res !== null) {
  1249. innerError = await res.json();
  1250. }
  1251. console.error(innerError);
  1252. if ('detail' in innerError) {
  1253. toast.error(innerError.detail);
  1254. errorMessage = innerError.detail;
  1255. } else if ('error' in innerError) {
  1256. if ('message' in innerError.error) {
  1257. toast.error(innerError.error.message);
  1258. errorMessage = innerError.error.message;
  1259. } else {
  1260. toast.error(innerError.error);
  1261. errorMessage = innerError.error;
  1262. }
  1263. } else if ('message' in innerError) {
  1264. toast.error(innerError.message);
  1265. errorMessage = innerError.message;
  1266. }
  1267. responseMessage.error = {
  1268. content:
  1269. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1270. provider: model.name ?? model.id
  1271. }) +
  1272. '\n' +
  1273. errorMessage
  1274. };
  1275. responseMessage.done = true;
  1276. if (responseMessage.statusHistory) {
  1277. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1278. (status) => status.action !== 'knowledge_search'
  1279. );
  1280. }
  1281. messages = messages;
  1282. };
  1283. const stopResponse = () => {
  1284. stopResponseFlag = true;
  1285. console.log('stopResponse');
  1286. };
  1287. const regenerateResponse = async (message) => {
  1288. console.log('regenerateResponse');
  1289. if (messages.length != 0) {
  1290. let userMessage = history.messages[message.parentId];
  1291. let userPrompt = userMessage.content;
  1292. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1293. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1294. await sendPrompt(userPrompt, userMessage.id);
  1295. } else {
  1296. // If there are multiple models selected, use the model of the response message for regeneration
  1297. // e.g. many model chat
  1298. await sendPrompt(userPrompt, userMessage.id, {
  1299. modelId: message.model,
  1300. modelIdx: message.modelIdx
  1301. });
  1302. }
  1303. }
  1304. };
  1305. const continueGeneration = async () => {
  1306. console.log('continueGeneration');
  1307. const _chatId = JSON.parse(JSON.stringify($chatId));
  1308. if (messages.length != 0 && messages.at(-1).done == true) {
  1309. const responseMessage = history.messages[history.currentId];
  1310. responseMessage.done = false;
  1311. await tick();
  1312. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1313. if (model) {
  1314. if (model?.owned_by === 'openai') {
  1315. await sendPromptOpenAI(
  1316. model,
  1317. history.messages[responseMessage.parentId].content,
  1318. responseMessage.id,
  1319. _chatId
  1320. );
  1321. } else
  1322. await sendPromptOllama(
  1323. model,
  1324. history.messages[responseMessage.parentId].content,
  1325. responseMessage.id,
  1326. _chatId
  1327. );
  1328. }
  1329. } else {
  1330. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1331. }
  1332. };
  1333. const generateChatTitle = async (userPrompt) => {
  1334. if ($settings?.title?.auto ?? true) {
  1335. const title = await generateTitle(
  1336. localStorage.token,
  1337. selectedModels[0],
  1338. userPrompt,
  1339. $chatId
  1340. ).catch((error) => {
  1341. console.error(error);
  1342. return 'New Chat';
  1343. });
  1344. return title;
  1345. } else {
  1346. return `${userPrompt}`;
  1347. }
  1348. };
  1349. const setChatTitle = async (_chatId, _title) => {
  1350. if (_chatId === $chatId) {
  1351. title = _title;
  1352. }
  1353. if (!$temporaryChatEnabled) {
  1354. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1355. currentChatPage.set(1);
  1356. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1357. }
  1358. };
  1359. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1360. const responseMessage = history.messages[responseId];
  1361. const userMessage = history.messages[parentId];
  1362. responseMessage.statusHistory = [
  1363. {
  1364. done: false,
  1365. action: 'web_search',
  1366. description: $i18n.t('Generating search query')
  1367. }
  1368. ];
  1369. messages = messages;
  1370. const prompt = userMessage.content;
  1371. let searchQuery = await generateSearchQuery(
  1372. localStorage.token,
  1373. model,
  1374. messages.filter((message) => message?.content?.trim()),
  1375. prompt
  1376. ).catch((error) => {
  1377. console.log(error);
  1378. return prompt;
  1379. });
  1380. if (!searchQuery || searchQuery == '') {
  1381. responseMessage.statusHistory.push({
  1382. done: true,
  1383. error: true,
  1384. action: 'web_search',
  1385. description: $i18n.t('No search query generated')
  1386. });
  1387. messages = messages;
  1388. return;
  1389. }
  1390. responseMessage.statusHistory.push({
  1391. done: false,
  1392. action: 'web_search',
  1393. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1394. });
  1395. messages = messages;
  1396. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1397. console.log(error);
  1398. toast.error(error);
  1399. return null;
  1400. });
  1401. if (results) {
  1402. responseMessage.statusHistory.push({
  1403. done: true,
  1404. action: 'web_search',
  1405. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1406. query: searchQuery,
  1407. urls: results.filenames
  1408. });
  1409. if (responseMessage?.files ?? undefined === undefined) {
  1410. responseMessage.files = [];
  1411. }
  1412. responseMessage.files.push({
  1413. collection_name: results.collection_name,
  1414. name: searchQuery,
  1415. type: 'web_search_results',
  1416. urls: results.filenames
  1417. });
  1418. messages = messages;
  1419. } else {
  1420. responseMessage.statusHistory.push({
  1421. done: true,
  1422. error: true,
  1423. action: 'web_search',
  1424. description: 'No search results found'
  1425. });
  1426. messages = messages;
  1427. }
  1428. };
  1429. const getTags = async () => {
  1430. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1431. return [];
  1432. });
  1433. };
  1434. const saveChatHandler = async (_chatId) => {
  1435. if ($chatId == _chatId) {
  1436. if (!$temporaryChatEnabled) {
  1437. chat = await updateChatById(localStorage.token, _chatId, {
  1438. messages: messages,
  1439. history: history,
  1440. models: selectedModels,
  1441. params: params,
  1442. files: chatFiles
  1443. });
  1444. currentChatPage.set(1);
  1445. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1446. }
  1447. }
  1448. };
  1449. const mergeResponses = async (messageId, responses, _chatId) => {
  1450. console.log('mergeResponses', messageId, responses);
  1451. const message = history.messages[messageId];
  1452. const mergedResponse = {
  1453. status: true,
  1454. content: ''
  1455. };
  1456. message.merged = mergedResponse;
  1457. messages = messages;
  1458. try {
  1459. const [res, controller] = await generateMoACompletion(
  1460. localStorage.token,
  1461. message.model,
  1462. history.messages[message.parentId].content,
  1463. responses
  1464. );
  1465. if (res && res.ok && res.body) {
  1466. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1467. for await (const update of textStream) {
  1468. const { value, done, citations, error, usage } = update;
  1469. if (error || done) {
  1470. break;
  1471. }
  1472. if (mergedResponse.content == '' && value == '\n') {
  1473. continue;
  1474. } else {
  1475. mergedResponse.content += value;
  1476. messages = messages;
  1477. }
  1478. if (autoScroll) {
  1479. scrollToBottom();
  1480. }
  1481. }
  1482. await saveChatHandler(_chatId);
  1483. } else {
  1484. console.error(res);
  1485. }
  1486. } catch (e) {
  1487. console.error(e);
  1488. }
  1489. };
  1490. </script>
  1491. <svelte:head>
  1492. <title>
  1493. {title
  1494. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1495. : `${$WEBUI_NAME}`}
  1496. </title>
  1497. </svelte:head>
  1498. <audio id="audioElement" src="" style="display: none;" />
  1499. <EventConfirmDialog
  1500. bind:show={showEventConfirmation}
  1501. title={eventConfirmationTitle}
  1502. message={eventConfirmationMessage}
  1503. input={eventConfirmationInput}
  1504. inputPlaceholder={eventConfirmationInputPlaceholder}
  1505. inputValue={eventConfirmationInputValue}
  1506. on:confirm={(e) => {
  1507. if (e.detail) {
  1508. eventCallback(e.detail);
  1509. } else {
  1510. eventCallback(true);
  1511. }
  1512. }}
  1513. on:cancel={() => {
  1514. eventCallback(false);
  1515. }}
  1516. />
  1517. {#if !chatIdProp || (loaded && chatIdProp)}
  1518. <div
  1519. class="h-screen max-h-[100dvh] {$showSidebar
  1520. ? 'md:max-w-[calc(100%-260px)]'
  1521. : ''} w-full max-w-full flex flex-col"
  1522. >
  1523. {#if $settings?.backgroundImageUrl ?? null}
  1524. <div
  1525. class="absolute {$showSidebar
  1526. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1527. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1528. style="background-image: url({$settings.backgroundImageUrl}) "
  1529. />
  1530. <div
  1531. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1532. />
  1533. {/if}
  1534. <Navbar
  1535. {title}
  1536. bind:selectedModels
  1537. bind:showModelSelector
  1538. shareEnabled={messages.length > 0}
  1539. {chat}
  1540. {initNewChat}
  1541. />
  1542. <PaneGroup direction="horizontal" class="w-full h-full">
  1543. <Pane defaultSize={50} class="h-full flex w-full relative">
  1544. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1545. <div class="absolute top-3 left-0 right-0 w-full z-20">
  1546. <div class=" flex flex-col gap-1 w-full">
  1547. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1548. <Banner
  1549. {banner}
  1550. on:dismiss={(e) => {
  1551. const bannerId = e.detail;
  1552. localStorage.setItem(
  1553. 'dismissedBannerIds',
  1554. JSON.stringify(
  1555. [
  1556. bannerId,
  1557. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1558. ].filter((id) => $banners.find((b) => b.id === id))
  1559. )
  1560. );
  1561. }}
  1562. />
  1563. {/each}
  1564. </div>
  1565. </div>
  1566. {/if}
  1567. <div class="flex flex-col flex-auto z-10 w-full">
  1568. <div
  1569. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1570. id="messages-container"
  1571. bind:this={messagesContainerElement}
  1572. on:scroll={(e) => {
  1573. autoScroll =
  1574. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1575. messagesContainerElement.clientHeight + 5;
  1576. }}
  1577. >
  1578. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1579. <Messages
  1580. chatId={$chatId}
  1581. {selectedModels}
  1582. {processing}
  1583. bind:history
  1584. bind:messages
  1585. bind:autoScroll
  1586. bind:prompt
  1587. bottomPadding={files.length > 0}
  1588. {sendPrompt}
  1589. {continueGeneration}
  1590. {regenerateResponse}
  1591. {mergeResponses}
  1592. {chatActionHandler}
  1593. {showMessage}
  1594. />
  1595. </div>
  1596. </div>
  1597. <div class="">
  1598. <MessageInput
  1599. bind:files
  1600. bind:prompt
  1601. bind:autoScroll
  1602. bind:selectedToolIds
  1603. bind:webSearchEnabled
  1604. bind:atSelectedModel
  1605. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1606. const model = $models.find((m) => m.id === e);
  1607. if (model?.info?.meta?.toolIds ?? false) {
  1608. return [...new Set([...a, ...model.info.meta.toolIds])];
  1609. }
  1610. return a;
  1611. }, [])}
  1612. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1613. {selectedModels}
  1614. {messages}
  1615. {submitPrompt}
  1616. {stopResponse}
  1617. on:call={async () => {
  1618. await showControls.set(true);
  1619. }}
  1620. />
  1621. </div>
  1622. </div>
  1623. </Pane>
  1624. <ChatControls
  1625. models={selectedModelIds.reduce((a, e, i, arr) => {
  1626. const model = $models.find((m) => m.id === e);
  1627. if (model) {
  1628. return [...a, model];
  1629. }
  1630. return a;
  1631. }, [])}
  1632. bind:history
  1633. bind:chatFiles
  1634. bind:params
  1635. bind:files
  1636. bind:pane={controlPane}
  1637. {submitPrompt}
  1638. {stopResponse}
  1639. {showMessage}
  1640. modelId={selectedModelIds?.at(0) ?? null}
  1641. chatId={$chatId}
  1642. {eventTarget}
  1643. />
  1644. </PaneGroup>
  1645. </div>
  1646. {/if}