Chat.svelte 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { PaneGroup, Pane, PaneResizer } from 'paneforge';
  6. import { getContext, onDestroy, onMount, tick } from 'svelte';
  7. import { goto } from '$app/navigation';
  8. import { page } from '$app/stores';
  9. import type { Unsubscriber, Writable } from 'svelte/store';
  10. import type { i18n as i18nType } from 'i18next';
  11. import { WEBUI_BASE_URL } from '$lib/constants';
  12. import {
  13. chatId,
  14. chats,
  15. config,
  16. type Model,
  17. models,
  18. settings,
  19. showSidebar,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showControls,
  25. showCallOverlay,
  26. currentChatPage,
  27. temporaryChatEnabled,
  28. mobile
  29. } from '$lib/stores';
  30. import {
  31. convertMessagesToHistory,
  32. copyToClipboard,
  33. getMessageContentParts,
  34. extractSentencesForAudio,
  35. promptTemplate,
  36. splitStream
  37. } from '$lib/utils';
  38. import { generateChatCompletion } from '$lib/apis/ollama';
  39. import {
  40. createNewChat,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import {
  52. chatCompleted,
  53. generateTitle,
  54. generateSearchQuery,
  55. chatAction,
  56. generateMoACompletion
  57. } from '$lib/apis';
  58. import Banner from '../common/Banner.svelte';
  59. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  60. import Messages from '$lib/components/chat/Messages.svelte';
  61. import Navbar from '$lib/components/layout/Navbar.svelte';
  62. import ChatControls from './ChatControls.svelte';
  63. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  64. import EllipsisVertical from '../icons/EllipsisVertical.svelte';
  65. const i18n: Writable<i18nType> = getContext('i18n');
  66. export let chatIdProp = '';
  67. let loaded = false;
  68. const eventTarget = new EventTarget();
  69. let controlPane;
  70. let stopResponseFlag = false;
  71. let autoScroll = true;
  72. let processing = '';
  73. let messagesContainerElement: HTMLDivElement;
  74. let showEventConfirmation = false;
  75. let eventConfirmationTitle = '';
  76. let eventConfirmationMessage = '';
  77. let eventConfirmationInput = false;
  78. let eventConfirmationInputPlaceholder = '';
  79. let eventConfirmationInputValue = '';
  80. let eventCallback = null;
  81. let showModelSelector = true;
  82. let selectedModels = [''];
  83. let atSelectedModel: Model | undefined;
  84. let selectedModelIds = [];
  85. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  86. let selectedToolIds = [];
  87. let webSearchEnabled = false;
  88. let chat = null;
  89. let tags = [];
  90. let title = '';
  91. let prompt = '';
  92. let chatFiles = [];
  93. let files = [];
  94. let messages = [];
  95. let history = {
  96. messages: {},
  97. currentId: null
  98. };
  99. let params = {};
  100. let chatIdUnsubscriber: Unsubscriber | undefined;
  101. $: if (history.currentId !== null) {
  102. let _messages = [];
  103. let currentMessage = history.messages[history.currentId];
  104. while (currentMessage) {
  105. _messages.unshift({ ...currentMessage });
  106. currentMessage =
  107. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  108. }
  109. // This is most likely causing the performance issue
  110. messages = _messages;
  111. } else {
  112. messages = [];
  113. }
  114. $: if (chatIdProp) {
  115. (async () => {
  116. console.log(chatIdProp);
  117. if (chatIdProp && (await loadChat())) {
  118. await tick();
  119. loaded = true;
  120. window.setTimeout(() => scrollToBottom(), 0);
  121. const chatInput = document.getElementById('chat-textarea');
  122. chatInput?.focus();
  123. } else {
  124. await goto('/');
  125. }
  126. })();
  127. }
  128. const showMessage = async (message) => {
  129. let _messageId = JSON.parse(JSON.stringify(message.id));
  130. let messageChildrenIds = history.messages[_messageId].childrenIds;
  131. while (messageChildrenIds.length !== 0) {
  132. _messageId = messageChildrenIds.at(-1);
  133. messageChildrenIds = history.messages[_messageId].childrenIds;
  134. }
  135. history.currentId = _messageId;
  136. await tick();
  137. await tick();
  138. await tick();
  139. const messageElement = document.getElementById(`message-${message.id}`);
  140. if (messageElement) {
  141. messageElement.scrollIntoView({ behavior: 'smooth' });
  142. }
  143. };
  144. const chatEventHandler = async (event, cb) => {
  145. if (event.chat_id === $chatId) {
  146. await tick();
  147. console.log(event);
  148. let message = history.messages[event.message_id];
  149. const type = event?.data?.type ?? null;
  150. const data = event?.data?.data ?? null;
  151. if (type === 'status') {
  152. if (message?.statusHistory) {
  153. message.statusHistory.push(data);
  154. } else {
  155. message.statusHistory = [data];
  156. }
  157. } else if (type === 'citation') {
  158. if (message?.citations) {
  159. message.citations.push(data);
  160. } else {
  161. message.citations = [data];
  162. }
  163. } else if (type === 'message') {
  164. message.content += data.content;
  165. } else if (type === 'replace') {
  166. message.content = data.content;
  167. } else if (type === 'action') {
  168. if (data.action === 'continue') {
  169. const continueButton = document.getElementById('continue-response-button');
  170. if (continueButton) {
  171. continueButton.click();
  172. }
  173. }
  174. } else if (type === 'confirmation') {
  175. eventCallback = cb;
  176. eventConfirmationInput = false;
  177. showEventConfirmation = true;
  178. eventConfirmationTitle = data.title;
  179. eventConfirmationMessage = data.message;
  180. } else if (type === 'input') {
  181. eventCallback = cb;
  182. eventConfirmationInput = true;
  183. showEventConfirmation = true;
  184. eventConfirmationTitle = data.title;
  185. eventConfirmationMessage = data.message;
  186. eventConfirmationInputPlaceholder = data.placeholder;
  187. eventConfirmationInputValue = data?.value ?? '';
  188. } else {
  189. console.log('Unknown message type', data);
  190. }
  191. messages = messages;
  192. }
  193. };
  194. const onMessageHandler = async (event: {
  195. origin: string;
  196. data: { type: string; text: string };
  197. }) => {
  198. if (event.origin !== window.origin) {
  199. return;
  200. }
  201. // Replace with your iframe's origin
  202. if (event.data.type === 'input:prompt') {
  203. console.debug(event.data.text);
  204. const inputElement = document.getElementById('chat-textarea');
  205. if (inputElement) {
  206. prompt = event.data.text;
  207. inputElement.focus();
  208. }
  209. }
  210. if (event.data.type === 'action:submit') {
  211. console.debug(event.data.text);
  212. if (prompt !== '') {
  213. await tick();
  214. submitPrompt(prompt);
  215. }
  216. }
  217. if (event.data.type === 'input:prompt:submit') {
  218. console.debug(event.data.text);
  219. if (prompt !== '') {
  220. await tick();
  221. submitPrompt(event.data.text);
  222. }
  223. }
  224. };
  225. onMount(async () => {
  226. window.addEventListener('message', onMessageHandler);
  227. $socket?.on('chat-events', chatEventHandler);
  228. if (!$chatId) {
  229. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  230. if (!value) {
  231. await initNewChat();
  232. }
  233. });
  234. } else {
  235. if ($temporaryChatEnabled) {
  236. await goto('/');
  237. }
  238. }
  239. showControls.subscribe(async (value) => {
  240. if (controlPane) {
  241. if (value) {
  242. controlPane.resize(parseInt(localStorage.getItem('chat-controls-size') || '35'));
  243. } else {
  244. controlPane.resize(0);
  245. }
  246. }
  247. });
  248. });
  249. onDestroy(() => {
  250. chatIdUnsubscriber?.();
  251. window.removeEventListener('message', onMessageHandler);
  252. $socket?.off('chat-events');
  253. });
  254. //////////////////////////
  255. // Web functions
  256. //////////////////////////
  257. const initNewChat = async () => {
  258. if ($page.url.pathname.includes('/c/')) {
  259. window.history.replaceState(history.state, '', `/`);
  260. }
  261. await chatId.set('');
  262. autoScroll = true;
  263. title = '';
  264. messages = [];
  265. history = {
  266. messages: {},
  267. currentId: null
  268. };
  269. chatFiles = [];
  270. params = {};
  271. if ($page.url.searchParams.get('models')) {
  272. selectedModels = $page.url.searchParams.get('models')?.split(',');
  273. } else if ($settings?.models) {
  274. selectedModels = $settings?.models;
  275. } else if ($config?.default_models) {
  276. console.log($config?.default_models.split(',') ?? '');
  277. selectedModels = $config?.default_models.split(',');
  278. } else {
  279. selectedModels = [''];
  280. }
  281. if ($page.url.searchParams.get('web-search') === 'true') {
  282. webSearchEnabled = true;
  283. }
  284. if ($page.url.searchParams.get('q')) {
  285. prompt = $page.url.searchParams.get('q') ?? '';
  286. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  287. .split(',')
  288. .map((id) => id.trim())
  289. .filter((id) => id);
  290. if (prompt) {
  291. await tick();
  292. submitPrompt(prompt);
  293. }
  294. }
  295. if ($page.url.searchParams.get('call') === 'true') {
  296. showCallOverlay.set(true);
  297. }
  298. selectedModels = selectedModels.map((modelId) =>
  299. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  300. );
  301. const userSettings = await getUserSettings(localStorage.token);
  302. if (userSettings) {
  303. settings.set(userSettings.ui);
  304. } else {
  305. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  306. }
  307. const chatInput = document.getElementById('chat-textarea');
  308. setTimeout(() => chatInput?.focus(), 0);
  309. };
  310. const loadChat = async () => {
  311. chatId.set(chatIdProp);
  312. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  313. await goto('/');
  314. return null;
  315. });
  316. if (chat) {
  317. tags = await getTags();
  318. const chatContent = chat.chat;
  319. if (chatContent) {
  320. console.log(chatContent);
  321. selectedModels =
  322. (chatContent?.models ?? undefined) !== undefined
  323. ? chatContent.models
  324. : [chatContent.models ?? ''];
  325. history =
  326. (chatContent?.history ?? undefined) !== undefined
  327. ? chatContent.history
  328. : convertMessagesToHistory(chatContent.messages);
  329. title = chatContent.title;
  330. const userSettings = await getUserSettings(localStorage.token);
  331. if (userSettings) {
  332. await settings.set(userSettings.ui);
  333. } else {
  334. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  335. }
  336. params = chatContent?.params ?? {};
  337. chatFiles = chatContent?.files ?? [];
  338. autoScroll = true;
  339. await tick();
  340. if (messages.length > 0) {
  341. history.messages[messages.at(-1).id].done = true;
  342. }
  343. await tick();
  344. return true;
  345. } else {
  346. return null;
  347. }
  348. }
  349. };
  350. const scrollToBottom = async () => {
  351. await tick();
  352. if (messagesContainerElement) {
  353. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  354. }
  355. };
  356. const createMessagesList = (responseMessageId) => {
  357. const message = history.messages[responseMessageId];
  358. if (message.parentId) {
  359. return [...createMessagesList(message.parentId), message];
  360. } else {
  361. return [message];
  362. }
  363. };
  364. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  365. await mermaid.run({
  366. querySelector: '.mermaid'
  367. });
  368. const res = await chatCompleted(localStorage.token, {
  369. model: modelId,
  370. messages: messages.map((m) => ({
  371. id: m.id,
  372. role: m.role,
  373. content: m.content,
  374. info: m.info ? m.info : undefined,
  375. timestamp: m.timestamp
  376. })),
  377. chat_id: chatId,
  378. session_id: $socket?.id,
  379. id: responseMessageId
  380. }).catch((error) => {
  381. toast.error(error);
  382. messages.at(-1).error = { content: error };
  383. return null;
  384. });
  385. if (res !== null) {
  386. // Update chat history with the new messages
  387. for (const message of res.messages) {
  388. history.messages[message.id] = {
  389. ...history.messages[message.id],
  390. ...(history.messages[message.id].content !== message.content
  391. ? { originalContent: history.messages[message.id].content }
  392. : {}),
  393. ...message
  394. };
  395. }
  396. }
  397. if ($chatId == chatId) {
  398. if (!$temporaryChatEnabled) {
  399. chat = await updateChatById(localStorage.token, chatId, {
  400. models: selectedModels,
  401. messages: messages,
  402. history: history,
  403. params: params,
  404. files: chatFiles
  405. });
  406. currentChatPage.set(1);
  407. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  408. }
  409. }
  410. };
  411. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  412. const res = await chatAction(localStorage.token, actionId, {
  413. model: modelId,
  414. messages: messages.map((m) => ({
  415. id: m.id,
  416. role: m.role,
  417. content: m.content,
  418. info: m.info ? m.info : undefined,
  419. timestamp: m.timestamp
  420. })),
  421. ...(event ? { event: event } : {}),
  422. chat_id: chatId,
  423. session_id: $socket?.id,
  424. id: responseMessageId
  425. }).catch((error) => {
  426. toast.error(error);
  427. messages.at(-1).error = { content: error };
  428. return null;
  429. });
  430. if (res !== null) {
  431. // Update chat history with the new messages
  432. for (const message of res.messages) {
  433. history.messages[message.id] = {
  434. ...history.messages[message.id],
  435. ...(history.messages[message.id].content !== message.content
  436. ? { originalContent: history.messages[message.id].content }
  437. : {}),
  438. ...message
  439. };
  440. }
  441. }
  442. if ($chatId == chatId) {
  443. if (!$temporaryChatEnabled) {
  444. chat = await updateChatById(localStorage.token, chatId, {
  445. models: selectedModels,
  446. messages: messages,
  447. history: history,
  448. params: params,
  449. files: chatFiles
  450. });
  451. currentChatPage.set(1);
  452. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  453. }
  454. }
  455. };
  456. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  457. return setInterval(() => {
  458. $socket?.emit('usage', {
  459. action: 'chat',
  460. model: modelId,
  461. chat_id: chatId
  462. });
  463. }, 1000);
  464. };
  465. //////////////////////////
  466. // Chat functions
  467. //////////////////////////
  468. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  469. let _responses = [];
  470. console.log('submitPrompt', $chatId);
  471. selectedModels = selectedModels.map((modelId) =>
  472. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  473. );
  474. if (selectedModels.includes('')) {
  475. toast.error($i18n.t('Model not selected'));
  476. } else if (messages.length != 0 && messages.at(-1).done != true) {
  477. // Response not done
  478. console.log('wait');
  479. } else if (messages.length != 0 && messages.at(-1).error) {
  480. // Error in response
  481. toast.error(
  482. $i18n.t(
  483. `Oops! There was an error in the previous response. Please try again or contact admin.`
  484. )
  485. );
  486. } else if (
  487. files.length > 0 &&
  488. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  489. ) {
  490. // Upload not done
  491. toast.error(
  492. $i18n.t(
  493. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  494. )
  495. );
  496. } else if (
  497. ($config?.file?.max_count ?? null) !== null &&
  498. files.length + chatFiles.length > $config?.file?.max_count
  499. ) {
  500. console.log(chatFiles.length, files.length);
  501. toast.error(
  502. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  503. maxCount: $config?.file?.max_count
  504. })
  505. );
  506. } else {
  507. // Reset chat input textarea
  508. const chatTextAreaElement = document.getElementById('chat-textarea');
  509. if (chatTextAreaElement) {
  510. chatTextAreaElement.value = '';
  511. chatTextAreaElement.style.height = '';
  512. }
  513. const _files = JSON.parse(JSON.stringify(files));
  514. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  515. chatFiles = chatFiles.filter(
  516. // Remove duplicates
  517. (item, index, array) =>
  518. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  519. );
  520. files = [];
  521. prompt = '';
  522. // Create user message
  523. let userMessageId = uuidv4();
  524. let userMessage = {
  525. id: userMessageId,
  526. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  527. childrenIds: [],
  528. role: 'user',
  529. content: userPrompt,
  530. files: _files.length > 0 ? _files : undefined,
  531. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  532. models: selectedModels
  533. };
  534. // Add message to history and Set currentId to messageId
  535. history.messages[userMessageId] = userMessage;
  536. history.currentId = userMessageId;
  537. // Append messageId to childrenIds of parent message
  538. if (messages.length !== 0) {
  539. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  540. }
  541. // Wait until history/message have been updated
  542. await tick();
  543. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  544. }
  545. return _responses;
  546. };
  547. const sendPrompt = async (
  548. prompt: string,
  549. parentId: string,
  550. { modelId = null, modelIdx = null, newChat = false } = {}
  551. ) => {
  552. let _responses: string[] = [];
  553. // If modelId is provided, use it, else use selected model
  554. let selectedModelIds = modelId
  555. ? [modelId]
  556. : atSelectedModel !== undefined
  557. ? [atSelectedModel.id]
  558. : selectedModels;
  559. // Create response messages for each selected model
  560. const responseMessageIds: Record<PropertyKey, string> = {};
  561. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  562. const model = $models.filter((m) => m.id === modelId).at(0);
  563. if (model) {
  564. let responseMessageId = uuidv4();
  565. let responseMessage = {
  566. parentId: parentId,
  567. id: responseMessageId,
  568. childrenIds: [],
  569. role: 'assistant',
  570. content: '',
  571. model: model.id,
  572. modelName: model.name ?? model.id,
  573. modelIdx: modelIdx ? modelIdx : _modelIdx,
  574. userContext: null,
  575. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  576. };
  577. // Add message to history and Set currentId to messageId
  578. history.messages[responseMessageId] = responseMessage;
  579. history.currentId = responseMessageId;
  580. // Append messageId to childrenIds of parent message
  581. if (parentId !== null) {
  582. history.messages[parentId].childrenIds = [
  583. ...history.messages[parentId].childrenIds,
  584. responseMessageId
  585. ];
  586. }
  587. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  588. }
  589. }
  590. await tick();
  591. // Create new chat if only one message in messages
  592. if (newChat && messages.length == 2) {
  593. if (!$temporaryChatEnabled) {
  594. chat = await createNewChat(localStorage.token, {
  595. id: $chatId,
  596. title: $i18n.t('New Chat'),
  597. models: selectedModels,
  598. system: $settings.system ?? undefined,
  599. params: params,
  600. messages: messages,
  601. history: history,
  602. tags: [],
  603. timestamp: Date.now()
  604. });
  605. currentChatPage.set(1);
  606. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  607. await chatId.set(chat.id);
  608. } else {
  609. await chatId.set('local');
  610. }
  611. await tick();
  612. }
  613. const _chatId = JSON.parse(JSON.stringify($chatId));
  614. await Promise.all(
  615. selectedModelIds.map(async (modelId, _modelIdx) => {
  616. console.log('modelId', modelId);
  617. const model = $models.filter((m) => m.id === modelId).at(0);
  618. if (model) {
  619. // If there are image files, check if model is vision capable
  620. const hasImages = messages.some((message) =>
  621. message.files?.some((file) => file.type === 'image')
  622. );
  623. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  624. toast.error(
  625. $i18n.t('Model {{modelName}} is not vision capable', {
  626. modelName: model.name ?? model.id
  627. })
  628. );
  629. }
  630. let responseMessageId =
  631. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  632. let responseMessage = history.messages[responseMessageId];
  633. let userContext = null;
  634. if ($settings?.memory ?? false) {
  635. if (userContext === null) {
  636. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  637. toast.error(error);
  638. return null;
  639. });
  640. if (res) {
  641. if (res.documents[0].length > 0) {
  642. userContext = res.documents[0].reduce((acc, doc, index) => {
  643. const createdAtTimestamp = res.metadatas[0][index].created_at;
  644. const createdAtDate = new Date(createdAtTimestamp * 1000)
  645. .toISOString()
  646. .split('T')[0];
  647. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  648. }, '');
  649. }
  650. console.log(userContext);
  651. }
  652. }
  653. }
  654. responseMessage.userContext = userContext;
  655. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  656. scrollToBottom();
  657. if (webSearchEnabled) {
  658. await getWebSearchResults(model.id, parentId, responseMessageId);
  659. }
  660. let _response = null;
  661. if (model?.owned_by === 'openai') {
  662. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  663. } else if (model) {
  664. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  665. }
  666. _responses.push(_response);
  667. if (chatEventEmitter) clearInterval(chatEventEmitter);
  668. } else {
  669. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  670. }
  671. })
  672. );
  673. currentChatPage.set(1);
  674. chats.set(await getChatList(localStorage.token, $currentChatPage));
  675. return _responses;
  676. };
  677. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  678. let _response: string | null = null;
  679. const responseMessage = history.messages[responseMessageId];
  680. const userMessage = history.messages[responseMessage.parentId];
  681. // Wait until history/message have been updated
  682. await tick();
  683. // Scroll down
  684. scrollToBottom();
  685. const messagesBody = [
  686. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  687. ? {
  688. role: 'system',
  689. content: `${promptTemplate(
  690. params?.system ?? $settings?.system ?? '',
  691. $user.name,
  692. $settings?.userLocation
  693. ? await getAndUpdateUserLocation(localStorage.token)
  694. : undefined
  695. )}${
  696. (responseMessage?.userContext ?? null)
  697. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  698. : ''
  699. }`
  700. }
  701. : undefined,
  702. ...messages
  703. ]
  704. .filter((message) => message?.content?.trim())
  705. .map((message) => {
  706. // Prepare the base message object
  707. const baseMessage = {
  708. role: message.role,
  709. content: message.content
  710. };
  711. // Extract and format image URLs if any exist
  712. const imageUrls = message.files
  713. ?.filter((file) => file.type === 'image')
  714. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  715. // Add images array only if it contains elements
  716. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  717. baseMessage.images = imageUrls;
  718. }
  719. return baseMessage;
  720. });
  721. let lastImageIndex = -1;
  722. // Find the index of the last object with images
  723. messagesBody.forEach((item, index) => {
  724. if (item.images) {
  725. lastImageIndex = index;
  726. }
  727. });
  728. // Remove images from all but the last one
  729. messagesBody.forEach((item, index) => {
  730. if (index !== lastImageIndex) {
  731. delete item.images;
  732. }
  733. });
  734. let files = JSON.parse(JSON.stringify(chatFiles));
  735. if (model?.info?.meta?.knowledge ?? false) {
  736. // Only initialize and add status if knowledge exists
  737. responseMessage.statusHistory = [
  738. {
  739. action: 'knowledge_search',
  740. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  741. searchQuery: userMessage.content
  742. }),
  743. done: false
  744. }
  745. ];
  746. files.push(...model.info.meta.knowledge);
  747. messages = messages; // Trigger Svelte update
  748. }
  749. files.push(
  750. ...(userMessage?.files ?? []).filter((item) =>
  751. ['doc', 'file', 'collection'].includes(item.type)
  752. ),
  753. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  754. );
  755. scrollToBottom();
  756. eventTarget.dispatchEvent(
  757. new CustomEvent('chat:start', {
  758. detail: {
  759. id: responseMessageId
  760. }
  761. })
  762. );
  763. await tick();
  764. const stream = $settings?.streamResponse ?? true;
  765. const [res, controller] = await generateChatCompletion(localStorage.token, {
  766. stream: stream,
  767. model: model.id,
  768. messages: messagesBody,
  769. options: {
  770. ...{ ...($settings?.params ?? {}), ...params },
  771. stop:
  772. (params?.stop ?? $settings?.params?.stop ?? undefined)
  773. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  774. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  775. )
  776. : undefined,
  777. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  778. repeat_penalty:
  779. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  780. },
  781. format: $settings.requestFormat ?? undefined,
  782. keep_alive: $settings.keepAlive ?? undefined,
  783. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  784. files: files.length > 0 ? files : undefined,
  785. session_id: $socket?.id,
  786. chat_id: $chatId,
  787. id: responseMessageId
  788. });
  789. if (res && res.ok) {
  790. if (!stream) {
  791. const response = await res.json();
  792. console.log(response);
  793. responseMessage.content = response.message.content;
  794. responseMessage.info = {
  795. eval_count: response.eval_count,
  796. eval_duration: response.eval_duration,
  797. load_duration: response.load_duration,
  798. prompt_eval_count: response.prompt_eval_count,
  799. prompt_eval_duration: response.prompt_eval_duration,
  800. total_duration: response.total_duration
  801. };
  802. responseMessage.done = true;
  803. } else {
  804. console.log('controller', controller);
  805. const reader = res.body
  806. .pipeThrough(new TextDecoderStream())
  807. .pipeThrough(splitStream('\n'))
  808. .getReader();
  809. while (true) {
  810. const { value, done } = await reader.read();
  811. if (done || stopResponseFlag || _chatId !== $chatId) {
  812. responseMessage.done = true;
  813. messages = messages;
  814. if (stopResponseFlag) {
  815. controller.abort('User: Stop Response');
  816. }
  817. _response = responseMessage.content;
  818. break;
  819. }
  820. try {
  821. let lines = value.split('\n');
  822. for (const line of lines) {
  823. if (line !== '') {
  824. console.log(line);
  825. let data = JSON.parse(line);
  826. if ('citations' in data) {
  827. responseMessage.citations = data.citations;
  828. // Only remove status if it was initially set
  829. if (model?.info?.meta?.knowledge ?? false) {
  830. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  831. (status) => status.action !== 'knowledge_search'
  832. );
  833. }
  834. continue;
  835. }
  836. if ('detail' in data) {
  837. throw data;
  838. }
  839. if (data.done == false) {
  840. if (responseMessage.content == '' && data.message.content == '\n') {
  841. continue;
  842. } else {
  843. responseMessage.content += data.message.content;
  844. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  845. navigator.vibrate(5);
  846. }
  847. const messageContentParts = getMessageContentParts(
  848. responseMessage.content,
  849. $config?.audio?.tts?.split_on ?? 'punctuation'
  850. );
  851. messageContentParts.pop();
  852. // dispatch only last sentence and make sure it hasn't been dispatched before
  853. if (
  854. messageContentParts.length > 0 &&
  855. messageContentParts[messageContentParts.length - 1] !==
  856. responseMessage.lastSentence
  857. ) {
  858. responseMessage.lastSentence =
  859. messageContentParts[messageContentParts.length - 1];
  860. eventTarget.dispatchEvent(
  861. new CustomEvent('chat', {
  862. detail: {
  863. id: responseMessageId,
  864. content: messageContentParts[messageContentParts.length - 1]
  865. }
  866. })
  867. );
  868. }
  869. messages = messages;
  870. }
  871. } else {
  872. responseMessage.done = true;
  873. if (responseMessage.content == '') {
  874. responseMessage.error = {
  875. code: 400,
  876. content: `Oops! No text generated from Ollama, Please try again.`
  877. };
  878. }
  879. responseMessage.context = data.context ?? null;
  880. responseMessage.info = {
  881. total_duration: data.total_duration,
  882. load_duration: data.load_duration,
  883. sample_count: data.sample_count,
  884. sample_duration: data.sample_duration,
  885. prompt_eval_count: data.prompt_eval_count,
  886. prompt_eval_duration: data.prompt_eval_duration,
  887. eval_count: data.eval_count,
  888. eval_duration: data.eval_duration
  889. };
  890. messages = messages;
  891. if ($settings.notificationEnabled && !document.hasFocus()) {
  892. const notification = new Notification(`${model.id}`, {
  893. body: responseMessage.content,
  894. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  895. });
  896. }
  897. if ($settings?.responseAutoCopy ?? false) {
  898. copyToClipboard(responseMessage.content);
  899. }
  900. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  901. await tick();
  902. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  903. }
  904. }
  905. }
  906. }
  907. } catch (error) {
  908. console.log(error);
  909. if ('detail' in error) {
  910. toast.error(error.detail);
  911. }
  912. break;
  913. }
  914. if (autoScroll) {
  915. scrollToBottom();
  916. }
  917. }
  918. }
  919. await chatCompletedHandler(
  920. _chatId,
  921. model.id,
  922. responseMessageId,
  923. createMessagesList(responseMessageId)
  924. );
  925. } else {
  926. if (res !== null) {
  927. const error = await res.json();
  928. console.log(error);
  929. if ('detail' in error) {
  930. toast.error(error.detail);
  931. responseMessage.error = { content: error.detail };
  932. } else {
  933. toast.error(error.error);
  934. responseMessage.error = { content: error.error };
  935. }
  936. } else {
  937. toast.error(
  938. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  939. );
  940. responseMessage.error = {
  941. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  942. provider: 'Ollama'
  943. })
  944. };
  945. }
  946. responseMessage.done = true;
  947. if (responseMessage.statusHistory) {
  948. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  949. (status) => status.action !== 'knowledge_search'
  950. );
  951. }
  952. messages = messages;
  953. }
  954. await saveChatHandler(_chatId);
  955. stopResponseFlag = false;
  956. await tick();
  957. let lastMessageContentPart =
  958. getMessageContentParts(
  959. responseMessage.content,
  960. $config?.audio?.tts?.split_on ?? 'punctuation'
  961. )?.at(-1) ?? '';
  962. if (lastMessageContentPart) {
  963. eventTarget.dispatchEvent(
  964. new CustomEvent('chat', {
  965. detail: { id: responseMessageId, content: lastMessageContentPart }
  966. })
  967. );
  968. }
  969. eventTarget.dispatchEvent(
  970. new CustomEvent('chat:finish', {
  971. detail: {
  972. id: responseMessageId,
  973. content: responseMessage.content
  974. }
  975. })
  976. );
  977. if (autoScroll) {
  978. scrollToBottom();
  979. }
  980. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  981. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  982. const _title = await generateChatTitle(userPrompt);
  983. await setChatTitle(_chatId, _title);
  984. }
  985. return _response;
  986. };
  987. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  988. let _response = null;
  989. const responseMessage = history.messages[responseMessageId];
  990. const userMessage = history.messages[responseMessage.parentId];
  991. let files = JSON.parse(JSON.stringify(chatFiles));
  992. if (model?.info?.meta?.knowledge ?? false) {
  993. // Only initialize and add status if knowledge exists
  994. responseMessage.statusHistory = [
  995. {
  996. action: 'knowledge_search',
  997. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  998. searchQuery: userMessage.content
  999. }),
  1000. done: false
  1001. }
  1002. ];
  1003. files.push(...model.info.meta.knowledge);
  1004. messages = messages; // Trigger Svelte update
  1005. }
  1006. files.push(
  1007. ...(userMessage?.files ?? []).filter((item) =>
  1008. ['doc', 'file', 'collection'].includes(item.type)
  1009. ),
  1010. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  1011. );
  1012. scrollToBottom();
  1013. eventTarget.dispatchEvent(
  1014. new CustomEvent('chat:start', {
  1015. detail: {
  1016. id: responseMessageId
  1017. }
  1018. })
  1019. );
  1020. await tick();
  1021. try {
  1022. const stream = $settings?.streamResponse ?? true;
  1023. const [res, controller] = await generateOpenAIChatCompletion(
  1024. localStorage.token,
  1025. {
  1026. stream: stream,
  1027. model: model.id,
  1028. ...(stream && (model.info?.meta?.capabilities?.usage ?? false)
  1029. ? {
  1030. stream_options: {
  1031. include_usage: true
  1032. }
  1033. }
  1034. : {}),
  1035. messages: [
  1036. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  1037. ? {
  1038. role: 'system',
  1039. content: `${promptTemplate(
  1040. params?.system ?? $settings?.system ?? '',
  1041. $user.name,
  1042. $settings?.userLocation
  1043. ? await getAndUpdateUserLocation(localStorage.token)
  1044. : undefined
  1045. )}${
  1046. (responseMessage?.userContext ?? null)
  1047. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  1048. : ''
  1049. }`
  1050. }
  1051. : undefined,
  1052. ...messages
  1053. ]
  1054. .filter((message) => message?.content?.trim())
  1055. .map((message, idx, arr) => ({
  1056. role: message.role,
  1057. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1058. message.role === 'user'
  1059. ? {
  1060. content: [
  1061. {
  1062. type: 'text',
  1063. text:
  1064. arr.length - 1 !== idx
  1065. ? message.content
  1066. : (message?.raContent ?? message.content)
  1067. },
  1068. ...message.files
  1069. .filter((file) => file.type === 'image')
  1070. .map((file) => ({
  1071. type: 'image_url',
  1072. image_url: {
  1073. url: file.url
  1074. }
  1075. }))
  1076. ]
  1077. }
  1078. : {
  1079. content:
  1080. arr.length - 1 !== idx
  1081. ? message.content
  1082. : (message?.raContent ?? message.content)
  1083. })
  1084. })),
  1085. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1086. stop:
  1087. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1088. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1089. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1090. )
  1091. : undefined,
  1092. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1093. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1094. frequency_penalty:
  1095. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1096. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1097. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1098. files: files.length > 0 ? files : undefined,
  1099. session_id: $socket?.id,
  1100. chat_id: $chatId,
  1101. id: responseMessageId
  1102. },
  1103. `${WEBUI_BASE_URL}/api`
  1104. );
  1105. // Wait until history/message have been updated
  1106. await tick();
  1107. scrollToBottom();
  1108. if (res && res.ok && res.body) {
  1109. if (!stream) {
  1110. const response = await res.json();
  1111. console.log(response);
  1112. responseMessage.content = response.choices[0].message.content;
  1113. responseMessage.info = { ...response.usage, openai: true };
  1114. responseMessage.done = true;
  1115. } else {
  1116. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1117. for await (const update of textStream) {
  1118. const { value, done, citations, error, usage } = update;
  1119. if (error) {
  1120. await handleOpenAIError(error, null, model, responseMessage);
  1121. break;
  1122. }
  1123. if (done || stopResponseFlag || _chatId !== $chatId) {
  1124. responseMessage.done = true;
  1125. messages = messages;
  1126. if (stopResponseFlag) {
  1127. controller.abort('User: Stop Response');
  1128. }
  1129. _response = responseMessage.content;
  1130. break;
  1131. }
  1132. if (usage) {
  1133. responseMessage.info = { ...usage, openai: true };
  1134. }
  1135. if (citations) {
  1136. responseMessage.citations = citations;
  1137. // Only remove status if it was initially set
  1138. if (model?.info?.meta?.knowledge ?? false) {
  1139. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1140. (status) => status.action !== 'knowledge_search'
  1141. );
  1142. }
  1143. continue;
  1144. }
  1145. if (responseMessage.content == '' && value == '\n') {
  1146. continue;
  1147. } else {
  1148. responseMessage.content += value;
  1149. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1150. navigator.vibrate(5);
  1151. }
  1152. const messageContentParts = getMessageContentParts(
  1153. responseMessage.content,
  1154. $config?.audio?.tts?.split_on ?? 'punctuation'
  1155. );
  1156. messageContentParts.pop();
  1157. // dispatch only last sentence and make sure it hasn't been dispatched before
  1158. if (
  1159. messageContentParts.length > 0 &&
  1160. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1161. ) {
  1162. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1163. eventTarget.dispatchEvent(
  1164. new CustomEvent('chat', {
  1165. detail: {
  1166. id: responseMessageId,
  1167. content: messageContentParts[messageContentParts.length - 1]
  1168. }
  1169. })
  1170. );
  1171. }
  1172. messages = messages;
  1173. }
  1174. if (autoScroll) {
  1175. scrollToBottom();
  1176. }
  1177. }
  1178. }
  1179. await chatCompletedHandler(
  1180. _chatId,
  1181. model.id,
  1182. responseMessageId,
  1183. createMessagesList(responseMessageId)
  1184. );
  1185. if ($settings.notificationEnabled && !document.hasFocus()) {
  1186. const notification = new Notification(`${model.id}`, {
  1187. body: responseMessage.content,
  1188. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1189. });
  1190. }
  1191. if ($settings.responseAutoCopy) {
  1192. copyToClipboard(responseMessage.content);
  1193. }
  1194. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1195. await tick();
  1196. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1197. }
  1198. } else {
  1199. await handleOpenAIError(null, res, model, responseMessage);
  1200. }
  1201. } catch (error) {
  1202. await handleOpenAIError(error, null, model, responseMessage);
  1203. }
  1204. await saveChatHandler(_chatId);
  1205. messages = messages;
  1206. stopResponseFlag = false;
  1207. await tick();
  1208. let lastMessageContentPart =
  1209. getMessageContentParts(
  1210. responseMessage.content,
  1211. $config?.audio?.tts?.split_on ?? 'punctuation'
  1212. )?.at(-1) ?? '';
  1213. if (lastMessageContentPart) {
  1214. eventTarget.dispatchEvent(
  1215. new CustomEvent('chat', {
  1216. detail: { id: responseMessageId, content: lastMessageContentPart }
  1217. })
  1218. );
  1219. }
  1220. eventTarget.dispatchEvent(
  1221. new CustomEvent('chat:finish', {
  1222. detail: {
  1223. id: responseMessageId,
  1224. content: responseMessage.content
  1225. }
  1226. })
  1227. );
  1228. if (autoScroll) {
  1229. scrollToBottom();
  1230. }
  1231. if (messages.length == 2 && selectedModels[0] === model.id) {
  1232. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1233. const _title = await generateChatTitle(userPrompt);
  1234. await setChatTitle(_chatId, _title);
  1235. }
  1236. return _response;
  1237. };
  1238. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1239. let errorMessage = '';
  1240. let innerError;
  1241. if (error) {
  1242. innerError = error;
  1243. } else if (res !== null) {
  1244. innerError = await res.json();
  1245. }
  1246. console.error(innerError);
  1247. if ('detail' in innerError) {
  1248. toast.error(innerError.detail);
  1249. errorMessage = innerError.detail;
  1250. } else if ('error' in innerError) {
  1251. if ('message' in innerError.error) {
  1252. toast.error(innerError.error.message);
  1253. errorMessage = innerError.error.message;
  1254. } else {
  1255. toast.error(innerError.error);
  1256. errorMessage = innerError.error;
  1257. }
  1258. } else if ('message' in innerError) {
  1259. toast.error(innerError.message);
  1260. errorMessage = innerError.message;
  1261. }
  1262. responseMessage.error = {
  1263. content:
  1264. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1265. provider: model.name ?? model.id
  1266. }) +
  1267. '\n' +
  1268. errorMessage
  1269. };
  1270. responseMessage.done = true;
  1271. if (responseMessage.statusHistory) {
  1272. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1273. (status) => status.action !== 'knowledge_search'
  1274. );
  1275. }
  1276. messages = messages;
  1277. };
  1278. const stopResponse = () => {
  1279. stopResponseFlag = true;
  1280. console.log('stopResponse');
  1281. };
  1282. const regenerateResponse = async (message) => {
  1283. console.log('regenerateResponse');
  1284. if (messages.length != 0) {
  1285. let userMessage = history.messages[message.parentId];
  1286. let userPrompt = userMessage.content;
  1287. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1288. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1289. await sendPrompt(userPrompt, userMessage.id);
  1290. } else {
  1291. // If there are multiple models selected, use the model of the response message for regeneration
  1292. // e.g. many model chat
  1293. await sendPrompt(userPrompt, userMessage.id, {
  1294. modelId: message.model,
  1295. modelIdx: message.modelIdx
  1296. });
  1297. }
  1298. }
  1299. };
  1300. const continueGeneration = async () => {
  1301. console.log('continueGeneration');
  1302. const _chatId = JSON.parse(JSON.stringify($chatId));
  1303. if (messages.length != 0 && messages.at(-1).done == true) {
  1304. const responseMessage = history.messages[history.currentId];
  1305. responseMessage.done = false;
  1306. await tick();
  1307. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1308. if (model) {
  1309. if (model?.owned_by === 'openai') {
  1310. await sendPromptOpenAI(
  1311. model,
  1312. history.messages[responseMessage.parentId].content,
  1313. responseMessage.id,
  1314. _chatId
  1315. );
  1316. } else
  1317. await sendPromptOllama(
  1318. model,
  1319. history.messages[responseMessage.parentId].content,
  1320. responseMessage.id,
  1321. _chatId
  1322. );
  1323. }
  1324. } else {
  1325. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1326. }
  1327. };
  1328. const generateChatTitle = async (userPrompt) => {
  1329. if ($settings?.title?.auto ?? true) {
  1330. const title = await generateTitle(
  1331. localStorage.token,
  1332. selectedModels[0],
  1333. userPrompt,
  1334. $chatId
  1335. ).catch((error) => {
  1336. console.error(error);
  1337. return 'New Chat';
  1338. });
  1339. return title;
  1340. } else {
  1341. return `${userPrompt}`;
  1342. }
  1343. };
  1344. const setChatTitle = async (_chatId, _title) => {
  1345. if (_chatId === $chatId) {
  1346. title = _title;
  1347. }
  1348. if (!$temporaryChatEnabled) {
  1349. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1350. currentChatPage.set(1);
  1351. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1352. }
  1353. };
  1354. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1355. const responseMessage = history.messages[responseId];
  1356. const userMessage = history.messages[parentId];
  1357. responseMessage.statusHistory = [
  1358. {
  1359. done: false,
  1360. action: 'web_search',
  1361. description: $i18n.t('Generating search query')
  1362. }
  1363. ];
  1364. messages = messages;
  1365. const prompt = userMessage.content;
  1366. let searchQuery = await generateSearchQuery(
  1367. localStorage.token,
  1368. model,
  1369. messages.filter((message) => message?.content?.trim()),
  1370. prompt
  1371. ).catch((error) => {
  1372. console.log(error);
  1373. return prompt;
  1374. });
  1375. if (!searchQuery || searchQuery == '') {
  1376. responseMessage.statusHistory.push({
  1377. done: true,
  1378. error: true,
  1379. action: 'web_search',
  1380. description: $i18n.t('No search query generated')
  1381. });
  1382. messages = messages;
  1383. return;
  1384. }
  1385. responseMessage.statusHistory.push({
  1386. done: false,
  1387. action: 'web_search',
  1388. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1389. });
  1390. messages = messages;
  1391. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1392. console.log(error);
  1393. toast.error(error);
  1394. return null;
  1395. });
  1396. if (results) {
  1397. responseMessage.statusHistory.push({
  1398. done: true,
  1399. action: 'web_search',
  1400. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1401. query: searchQuery,
  1402. urls: results.filenames
  1403. });
  1404. if (responseMessage?.files ?? undefined === undefined) {
  1405. responseMessage.files = [];
  1406. }
  1407. responseMessage.files.push({
  1408. collection_name: results.collection_name,
  1409. name: searchQuery,
  1410. type: 'web_search_results',
  1411. urls: results.filenames
  1412. });
  1413. messages = messages;
  1414. } else {
  1415. responseMessage.statusHistory.push({
  1416. done: true,
  1417. error: true,
  1418. action: 'web_search',
  1419. description: 'No search results found'
  1420. });
  1421. messages = messages;
  1422. }
  1423. };
  1424. const getTags = async () => {
  1425. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1426. return [];
  1427. });
  1428. };
  1429. const saveChatHandler = async (_chatId) => {
  1430. if ($chatId == _chatId) {
  1431. if (!$temporaryChatEnabled) {
  1432. chat = await updateChatById(localStorage.token, _chatId, {
  1433. messages: messages,
  1434. history: history,
  1435. models: selectedModels,
  1436. params: params,
  1437. files: chatFiles
  1438. });
  1439. currentChatPage.set(1);
  1440. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1441. }
  1442. }
  1443. };
  1444. const mergeResponses = async (messageId, responses, _chatId) => {
  1445. console.log('mergeResponses', messageId, responses);
  1446. const message = history.messages[messageId];
  1447. const mergedResponse = {
  1448. status: true,
  1449. content: ''
  1450. };
  1451. message.merged = mergedResponse;
  1452. messages = messages;
  1453. try {
  1454. const [res, controller] = await generateMoACompletion(
  1455. localStorage.token,
  1456. message.model,
  1457. history.messages[message.parentId].content,
  1458. responses
  1459. );
  1460. if (res && res.ok && res.body) {
  1461. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1462. for await (const update of textStream) {
  1463. const { value, done, citations, error, usage } = update;
  1464. if (error || done) {
  1465. break;
  1466. }
  1467. if (mergedResponse.content == '' && value == '\n') {
  1468. continue;
  1469. } else {
  1470. mergedResponse.content += value;
  1471. messages = messages;
  1472. }
  1473. if (autoScroll) {
  1474. scrollToBottom();
  1475. }
  1476. }
  1477. await saveChatHandler(_chatId);
  1478. } else {
  1479. console.error(res);
  1480. }
  1481. } catch (e) {
  1482. console.error(e);
  1483. }
  1484. };
  1485. </script>
  1486. <svelte:head>
  1487. <title>
  1488. {title
  1489. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1490. : `${$WEBUI_NAME}`}
  1491. </title>
  1492. </svelte:head>
  1493. <audio id="audioElement" src="" style="display: none;" />
  1494. <EventConfirmDialog
  1495. bind:show={showEventConfirmation}
  1496. title={eventConfirmationTitle}
  1497. message={eventConfirmationMessage}
  1498. input={eventConfirmationInput}
  1499. inputPlaceholder={eventConfirmationInputPlaceholder}
  1500. inputValue={eventConfirmationInputValue}
  1501. on:confirm={(e) => {
  1502. if (e.detail) {
  1503. eventCallback(e.detail);
  1504. } else {
  1505. eventCallback(true);
  1506. }
  1507. }}
  1508. on:cancel={() => {
  1509. eventCallback(false);
  1510. }}
  1511. />
  1512. {#if !chatIdProp || (loaded && chatIdProp)}
  1513. <div
  1514. class="h-screen max-h-[100dvh] {$showSidebar
  1515. ? 'md:max-w-[calc(100%-260px)]'
  1516. : ''} w-full max-w-full flex flex-col"
  1517. >
  1518. {#if $settings?.backgroundImageUrl ?? null}
  1519. <div
  1520. class="absolute {$showSidebar
  1521. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1522. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1523. style="background-image: url({$settings.backgroundImageUrl}) "
  1524. />
  1525. <div
  1526. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1527. />
  1528. {/if}
  1529. <Navbar
  1530. {title}
  1531. bind:selectedModels
  1532. bind:showModelSelector
  1533. shareEnabled={messages.length > 0}
  1534. {chat}
  1535. {initNewChat}
  1536. />
  1537. <PaneGroup direction="horizontal" class="w-full h-full">
  1538. <Pane defaultSize={50} class="h-full flex w-full relative">
  1539. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1540. <div class="absolute top-3 left-0 right-0 w-full z-20">
  1541. <div class=" flex flex-col gap-1 w-full">
  1542. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1543. <Banner
  1544. {banner}
  1545. on:dismiss={(e) => {
  1546. const bannerId = e.detail;
  1547. localStorage.setItem(
  1548. 'dismissedBannerIds',
  1549. JSON.stringify(
  1550. [
  1551. bannerId,
  1552. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1553. ].filter((id) => $banners.find((b) => b.id === id))
  1554. )
  1555. );
  1556. }}
  1557. />
  1558. {/each}
  1559. </div>
  1560. </div>
  1561. {/if}
  1562. <div class="flex flex-col flex-auto z-10 w-full">
  1563. <div
  1564. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
  1565. id="messages-container"
  1566. bind:this={messagesContainerElement}
  1567. on:scroll={(e) => {
  1568. autoScroll =
  1569. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1570. messagesContainerElement.clientHeight + 5;
  1571. }}
  1572. >
  1573. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1574. <Messages
  1575. chatId={$chatId}
  1576. {selectedModels}
  1577. {processing}
  1578. bind:history
  1579. bind:messages
  1580. bind:autoScroll
  1581. bind:prompt
  1582. bottomPadding={files.length > 0}
  1583. {sendPrompt}
  1584. {continueGeneration}
  1585. {regenerateResponse}
  1586. {mergeResponses}
  1587. {chatActionHandler}
  1588. {showMessage}
  1589. />
  1590. </div>
  1591. </div>
  1592. <div class="">
  1593. <MessageInput
  1594. bind:files
  1595. bind:prompt
  1596. bind:autoScroll
  1597. bind:selectedToolIds
  1598. bind:webSearchEnabled
  1599. bind:atSelectedModel
  1600. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1601. const model = $models.find((m) => m.id === e);
  1602. if (model?.info?.meta?.toolIds ?? false) {
  1603. return [...new Set([...a, ...model.info.meta.toolIds])];
  1604. }
  1605. return a;
  1606. }, [])}
  1607. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1608. {selectedModels}
  1609. {messages}
  1610. {submitPrompt}
  1611. {stopResponse}
  1612. on:call={async () => {
  1613. await showControls.set(true);
  1614. }}
  1615. />
  1616. </div>
  1617. </div>
  1618. </Pane>
  1619. <ChatControls
  1620. models={selectedModelIds.reduce((a, e, i, arr) => {
  1621. const model = $models.find((m) => m.id === e);
  1622. if (model) {
  1623. return [...a, model];
  1624. }
  1625. return a;
  1626. }, [])}
  1627. bind:history
  1628. bind:chatFiles
  1629. bind:params
  1630. bind:files
  1631. bind:pane={controlPane}
  1632. {submitPrompt}
  1633. {stopResponse}
  1634. {showMessage}
  1635. modelId={selectedModelIds?.at(0) ?? null}
  1636. chatId={$chatId}
  1637. {eventTarget}
  1638. />
  1639. </PaneGroup>
  1640. </div>
  1641. {/if}