Chat.svelte 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onDestroy, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Unsubscriber, Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. WEBUI_NAME,
  20. banners,
  21. user,
  22. socket,
  23. showCallOverlay,
  24. currentChatPage,
  25. temporaryChatEnabled
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. getMessageContentParts,
  31. extractSentencesForAudio,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. createNewChat,
  38. getChatById,
  39. getChatList,
  40. getTagsById,
  41. updateChatById
  42. } from '$lib/apis/chats';
  43. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import { createOpenAITextStream } from '$lib/apis/streaming';
  46. import { queryMemory } from '$lib/apis/memories';
  47. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  48. import {
  49. chatCompleted,
  50. generateTitle,
  51. generateSearchQuery,
  52. chatAction,
  53. generateMoACompletion
  54. } from '$lib/apis';
  55. import Banner from '../common/Banner.svelte';
  56. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  57. import Messages from '$lib/components/chat/Messages.svelte';
  58. import Navbar from '$lib/components/layout/Navbar.svelte';
  59. import ChatControls from './ChatControls.svelte';
  60. import EventConfirmDialog from '../common/ConfirmDialog.svelte';
  61. const i18n: Writable<i18nType> = getContext('i18n');
  62. export let chatIdProp = '';
  63. let loaded = false;
  64. const eventTarget = new EventTarget();
  65. let showControls = false;
  66. let stopResponseFlag = false;
  67. let autoScroll = true;
  68. let processing = '';
  69. let messagesContainerElement: HTMLDivElement;
  70. let showEventConfirmation = false;
  71. let eventConfirmationTitle = '';
  72. let eventConfirmationMessage = '';
  73. let eventConfirmationInput = false;
  74. let eventConfirmationInputPlaceholder = '';
  75. let eventConfirmationInputValue = '';
  76. let eventCallback = null;
  77. let showModelSelector = true;
  78. let selectedModels = [''];
  79. let atSelectedModel: Model | undefined;
  80. let selectedModelIds = [];
  81. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  82. let selectedToolIds = [];
  83. let webSearchEnabled = false;
  84. let chat = null;
  85. let tags = [];
  86. let title = '';
  87. let prompt = '';
  88. let chatFiles = [];
  89. let files = [];
  90. let messages = [];
  91. let history = {
  92. messages: {},
  93. currentId: null
  94. };
  95. let params = {};
  96. let chatIdUnsubscriber: Unsubscriber | undefined;
  97. $: if (history.currentId !== null) {
  98. let _messages = [];
  99. let currentMessage = history.messages[history.currentId];
  100. while (currentMessage !== null) {
  101. _messages.unshift({ ...currentMessage });
  102. currentMessage =
  103. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  104. }
  105. messages = _messages;
  106. } else {
  107. messages = [];
  108. }
  109. $: if (chatIdProp) {
  110. (async () => {
  111. console.log(chatIdProp);
  112. if (chatIdProp && (await loadChat())) {
  113. await tick();
  114. loaded = true;
  115. window.setTimeout(() => scrollToBottom(), 0);
  116. const chatInput = document.getElementById('chat-textarea');
  117. chatInput?.focus();
  118. } else {
  119. await goto('/');
  120. }
  121. })();
  122. }
  123. const chatEventHandler = async (event, cb) => {
  124. if (event.chat_id === $chatId) {
  125. await tick();
  126. console.log(event);
  127. let message = history.messages[event.message_id];
  128. const type = event?.data?.type ?? null;
  129. const data = event?.data?.data ?? null;
  130. if (type === 'status') {
  131. if (message?.statusHistory) {
  132. message.statusHistory.push(data);
  133. } else {
  134. message.statusHistory = [data];
  135. }
  136. } else if (type === 'citation') {
  137. if (message?.citations) {
  138. message.citations.push(data);
  139. } else {
  140. message.citations = [data];
  141. }
  142. } else if (type === 'message') {
  143. message.content += data.content;
  144. } else if (type === 'replace') {
  145. message.content = data.content;
  146. } else if (type === 'action') {
  147. if (data.action === 'continue') {
  148. const continueButton = document.getElementById('continue-response-button');
  149. if (continueButton) {
  150. continueButton.click();
  151. }
  152. }
  153. } else if (type === 'confirmation') {
  154. eventCallback = cb;
  155. eventConfirmationInput = false;
  156. showEventConfirmation = true;
  157. eventConfirmationTitle = data.title;
  158. eventConfirmationMessage = data.message;
  159. } else if (type === 'input') {
  160. eventCallback = cb;
  161. eventConfirmationInput = true;
  162. showEventConfirmation = true;
  163. eventConfirmationTitle = data.title;
  164. eventConfirmationMessage = data.message;
  165. eventConfirmationInputPlaceholder = data.placeholder;
  166. eventConfirmationInputValue = data?.value ?? '';
  167. } else {
  168. console.log('Unknown message type', data);
  169. }
  170. messages = messages;
  171. }
  172. };
  173. const onMessageHandler = async (event: {
  174. origin: string;
  175. data: { type: string; text: string };
  176. }) => {
  177. if (event.origin !== window.origin) {
  178. return;
  179. }
  180. // Replace with your iframe's origin
  181. if (event.data.type === 'input:prompt') {
  182. console.debug(event.data.text);
  183. const inputElement = document.getElementById('chat-textarea');
  184. if (inputElement) {
  185. prompt = event.data.text;
  186. inputElement.focus();
  187. }
  188. }
  189. if (event.data.type === 'action:submit') {
  190. console.debug(event.data.text);
  191. if (prompt !== '') {
  192. await tick();
  193. submitPrompt(prompt);
  194. }
  195. }
  196. if (event.data.type === 'input:prompt:submit') {
  197. console.debug(event.data.text);
  198. if (prompt !== '') {
  199. await tick();
  200. submitPrompt(event.data.text);
  201. }
  202. }
  203. };
  204. onMount(async () => {
  205. window.addEventListener('message', onMessageHandler);
  206. $socket?.on('chat-events', chatEventHandler);
  207. if (!$chatId) {
  208. chatIdUnsubscriber = chatId.subscribe(async (value) => {
  209. if (!value) {
  210. await initNewChat();
  211. }
  212. });
  213. } else {
  214. if ($temporaryChatEnabled) {
  215. await goto('/');
  216. }
  217. }
  218. });
  219. onDestroy(() => {
  220. chatIdUnsubscriber?.();
  221. window.removeEventListener('message', onMessageHandler);
  222. $socket?.off('chat-events');
  223. });
  224. //////////////////////////
  225. // Web functions
  226. //////////////////////////
  227. const initNewChat = async () => {
  228. if ($page.url.pathname.includes('/c/')) {
  229. window.history.replaceState(history.state, '', `/`);
  230. }
  231. await chatId.set('');
  232. autoScroll = true;
  233. title = '';
  234. messages = [];
  235. history = {
  236. messages: {},
  237. currentId: null
  238. };
  239. chatFiles = [];
  240. params = {};
  241. if ($page.url.searchParams.get('models')) {
  242. selectedModels = $page.url.searchParams.get('models')?.split(',');
  243. } else if ($settings?.models) {
  244. selectedModels = $settings?.models;
  245. } else if ($config?.default_models) {
  246. console.log($config?.default_models.split(',') ?? '');
  247. selectedModels = $config?.default_models.split(',');
  248. } else {
  249. selectedModels = [''];
  250. }
  251. if ($page.url.searchParams.get('web-search') === 'true') {
  252. webSearchEnabled = true;
  253. }
  254. if ($page.url.searchParams.get('q')) {
  255. prompt = $page.url.searchParams.get('q') ?? '';
  256. selectedToolIds = ($page.url.searchParams.get('tool_ids') ?? '')
  257. .split(',')
  258. .map((id) => id.trim())
  259. .filter((id) => id);
  260. if (prompt) {
  261. await tick();
  262. submitPrompt(prompt);
  263. }
  264. }
  265. if ($page.url.searchParams.get('call') === 'true') {
  266. showCallOverlay.set(true);
  267. }
  268. selectedModels = selectedModels.map((modelId) =>
  269. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  270. );
  271. const userSettings = await getUserSettings(localStorage.token);
  272. if (userSettings) {
  273. settings.set(userSettings.ui);
  274. } else {
  275. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  276. }
  277. const chatInput = document.getElementById('chat-textarea');
  278. setTimeout(() => chatInput?.focus(), 0);
  279. };
  280. const loadChat = async () => {
  281. chatId.set(chatIdProp);
  282. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  283. await goto('/');
  284. return null;
  285. });
  286. if (chat) {
  287. tags = await getTags();
  288. const chatContent = chat.chat;
  289. if (chatContent) {
  290. console.log(chatContent);
  291. selectedModels =
  292. (chatContent?.models ?? undefined) !== undefined
  293. ? chatContent.models
  294. : [chatContent.models ?? ''];
  295. history =
  296. (chatContent?.history ?? undefined) !== undefined
  297. ? chatContent.history
  298. : convertMessagesToHistory(chatContent.messages);
  299. title = chatContent.title;
  300. const userSettings = await getUserSettings(localStorage.token);
  301. if (userSettings) {
  302. await settings.set(userSettings.ui);
  303. } else {
  304. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  305. }
  306. params = chatContent?.params ?? {};
  307. chatFiles = chatContent?.files ?? [];
  308. autoScroll = true;
  309. await tick();
  310. if (messages.length > 0) {
  311. history.messages[messages.at(-1).id].done = true;
  312. }
  313. await tick();
  314. return true;
  315. } else {
  316. return null;
  317. }
  318. }
  319. };
  320. const scrollToBottom = async () => {
  321. await tick();
  322. if (messagesContainerElement) {
  323. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  324. }
  325. };
  326. const createMessagesList = (responseMessageId) => {
  327. const message = history.messages[responseMessageId];
  328. if (message.parentId) {
  329. return [...createMessagesList(message.parentId), message];
  330. } else {
  331. return [message];
  332. }
  333. };
  334. const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
  335. await mermaid.run({
  336. querySelector: '.mermaid'
  337. });
  338. const res = await chatCompleted(localStorage.token, {
  339. model: modelId,
  340. messages: messages.map((m) => ({
  341. id: m.id,
  342. role: m.role,
  343. content: m.content,
  344. info: m.info ? m.info : undefined,
  345. timestamp: m.timestamp
  346. })),
  347. chat_id: chatId,
  348. session_id: $socket?.id,
  349. id: responseMessageId
  350. }).catch((error) => {
  351. toast.error(error);
  352. messages.at(-1).error = { content: error };
  353. return null;
  354. });
  355. if (res !== null) {
  356. // Update chat history with the new messages
  357. for (const message of res.messages) {
  358. history.messages[message.id] = {
  359. ...history.messages[message.id],
  360. ...(history.messages[message.id].content !== message.content
  361. ? { originalContent: history.messages[message.id].content }
  362. : {}),
  363. ...message
  364. };
  365. }
  366. }
  367. if ($chatId == chatId) {
  368. if (!$temporaryChatEnabled) {
  369. chat = await updateChatById(localStorage.token, chatId, {
  370. models: selectedModels,
  371. messages: messages,
  372. history: history,
  373. params: params,
  374. files: chatFiles
  375. });
  376. currentChatPage.set(1);
  377. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  378. }
  379. }
  380. };
  381. const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
  382. const res = await chatAction(localStorage.token, actionId, {
  383. model: modelId,
  384. messages: messages.map((m) => ({
  385. id: m.id,
  386. role: m.role,
  387. content: m.content,
  388. info: m.info ? m.info : undefined,
  389. timestamp: m.timestamp
  390. })),
  391. ...(event ? { event: event } : {}),
  392. chat_id: chatId,
  393. session_id: $socket?.id,
  394. id: responseMessageId
  395. }).catch((error) => {
  396. toast.error(error);
  397. messages.at(-1).error = { content: error };
  398. return null;
  399. });
  400. if (res !== null) {
  401. // Update chat history with the new messages
  402. for (const message of res.messages) {
  403. history.messages[message.id] = {
  404. ...history.messages[message.id],
  405. ...(history.messages[message.id].content !== message.content
  406. ? { originalContent: history.messages[message.id].content }
  407. : {}),
  408. ...message
  409. };
  410. }
  411. }
  412. if ($chatId == chatId) {
  413. if (!$temporaryChatEnabled) {
  414. chat = await updateChatById(localStorage.token, chatId, {
  415. models: selectedModels,
  416. messages: messages,
  417. history: history,
  418. params: params,
  419. files: chatFiles
  420. });
  421. currentChatPage.set(1);
  422. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  423. }
  424. }
  425. };
  426. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  427. return setInterval(() => {
  428. $socket?.emit('usage', {
  429. action: 'chat',
  430. model: modelId,
  431. chat_id: chatId
  432. });
  433. }, 1000);
  434. };
  435. //////////////////////////
  436. // Chat functions
  437. //////////////////////////
  438. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  439. let _responses = [];
  440. console.log('submitPrompt', $chatId);
  441. selectedModels = selectedModels.map((modelId) =>
  442. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  443. );
  444. if (selectedModels.includes('')) {
  445. toast.error($i18n.t('Model not selected'));
  446. } else if (messages.length != 0 && messages.at(-1).done != true) {
  447. // Response not done
  448. console.log('wait');
  449. } else if (messages.length != 0 && messages.at(-1).error) {
  450. // Error in response
  451. toast.error(
  452. $i18n.t(
  453. `Oops! There was an error in the previous response. Please try again or contact admin.`
  454. )
  455. );
  456. } else if (
  457. files.length > 0 &&
  458. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  459. ) {
  460. // Upload not done
  461. toast.error(
  462. $i18n.t(
  463. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  464. )
  465. );
  466. } else if (
  467. ($config?.file?.max_count ?? null) !== null &&
  468. files.length + chatFiles.length > $config?.file?.max_count
  469. ) {
  470. console.log(chatFiles.length, files.length);
  471. toast.error(
  472. $i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
  473. maxCount: $config?.file?.max_count
  474. })
  475. );
  476. } else {
  477. // Reset chat input textarea
  478. const chatTextAreaElement = document.getElementById('chat-textarea');
  479. if (chatTextAreaElement) {
  480. chatTextAreaElement.value = '';
  481. chatTextAreaElement.style.height = '';
  482. }
  483. const _files = JSON.parse(JSON.stringify(files));
  484. chatFiles.push(..._files.filter((item) => ['doc', 'file', 'collection'].includes(item.type)));
  485. chatFiles = chatFiles.filter(
  486. // Remove duplicates
  487. (item, index, array) =>
  488. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  489. );
  490. files = [];
  491. prompt = '';
  492. // Create user message
  493. let userMessageId = uuidv4();
  494. let userMessage = {
  495. id: userMessageId,
  496. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  497. childrenIds: [],
  498. role: 'user',
  499. content: userPrompt,
  500. files: _files.length > 0 ? _files : undefined,
  501. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  502. models: selectedModels
  503. };
  504. // Add message to history and Set currentId to messageId
  505. history.messages[userMessageId] = userMessage;
  506. history.currentId = userMessageId;
  507. // Append messageId to childrenIds of parent message
  508. if (messages.length !== 0) {
  509. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  510. }
  511. // Wait until history/message have been updated
  512. await tick();
  513. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  514. }
  515. return _responses;
  516. };
  517. const sendPrompt = async (
  518. prompt: string,
  519. parentId: string,
  520. { modelId = null, modelIdx = null, newChat = false } = {}
  521. ) => {
  522. let _responses: string[] = [];
  523. // If modelId is provided, use it, else use selected model
  524. let selectedModelIds = modelId
  525. ? [modelId]
  526. : atSelectedModel !== undefined
  527. ? [atSelectedModel.id]
  528. : selectedModels;
  529. // Create response messages for each selected model
  530. const responseMessageIds: Record<PropertyKey, string> = {};
  531. for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
  532. const model = $models.filter((m) => m.id === modelId).at(0);
  533. if (model) {
  534. let responseMessageId = uuidv4();
  535. let responseMessage = {
  536. parentId: parentId,
  537. id: responseMessageId,
  538. childrenIds: [],
  539. role: 'assistant',
  540. content: '',
  541. model: model.id,
  542. modelName: model.name ?? model.id,
  543. modelIdx: modelIdx ? modelIdx : _modelIdx,
  544. userContext: null,
  545. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  546. };
  547. // Add message to history and Set currentId to messageId
  548. history.messages[responseMessageId] = responseMessage;
  549. history.currentId = responseMessageId;
  550. // Append messageId to childrenIds of parent message
  551. if (parentId !== null) {
  552. history.messages[parentId].childrenIds = [
  553. ...history.messages[parentId].childrenIds,
  554. responseMessageId
  555. ];
  556. }
  557. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
  558. }
  559. }
  560. await tick();
  561. // Create new chat if only one message in messages
  562. if (newChat && messages.length == 2) {
  563. if (!$temporaryChatEnabled) {
  564. chat = await createNewChat(localStorage.token, {
  565. id: $chatId,
  566. title: $i18n.t('New Chat'),
  567. models: selectedModels,
  568. system: $settings.system ?? undefined,
  569. params: params,
  570. messages: messages,
  571. history: history,
  572. tags: [],
  573. timestamp: Date.now()
  574. });
  575. currentChatPage.set(1);
  576. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  577. await chatId.set(chat.id);
  578. } else {
  579. await chatId.set('local');
  580. }
  581. await tick();
  582. }
  583. const _chatId = JSON.parse(JSON.stringify($chatId));
  584. await Promise.all(
  585. selectedModelIds.map(async (modelId, _modelIdx) => {
  586. console.log('modelId', modelId);
  587. const model = $models.filter((m) => m.id === modelId).at(0);
  588. if (model) {
  589. // If there are image files, check if model is vision capable
  590. const hasImages = messages.some((message) =>
  591. message.files?.some((file) => file.type === 'image')
  592. );
  593. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  594. toast.error(
  595. $i18n.t('Model {{modelName}} is not vision capable', {
  596. modelName: model.name ?? model.id
  597. })
  598. );
  599. }
  600. let responseMessageId =
  601. responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
  602. let responseMessage = history.messages[responseMessageId];
  603. let userContext = null;
  604. if ($settings?.memory ?? false) {
  605. if (userContext === null) {
  606. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  607. toast.error(error);
  608. return null;
  609. });
  610. if (res) {
  611. if (res.documents[0].length > 0) {
  612. userContext = res.documents[0].reduce((acc, doc, index) => {
  613. const createdAtTimestamp = res.metadatas[0][index].created_at;
  614. const createdAtDate = new Date(createdAtTimestamp * 1000)
  615. .toISOString()
  616. .split('T')[0];
  617. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  618. }, '');
  619. }
  620. console.log(userContext);
  621. }
  622. }
  623. }
  624. responseMessage.userContext = userContext;
  625. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  626. if (webSearchEnabled) {
  627. await getWebSearchResults(model.id, parentId, responseMessageId);
  628. }
  629. let _response = null;
  630. if (model?.owned_by === 'openai') {
  631. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  632. } else if (model) {
  633. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  634. }
  635. _responses.push(_response);
  636. if (chatEventEmitter) clearInterval(chatEventEmitter);
  637. } else {
  638. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  639. }
  640. })
  641. );
  642. currentChatPage.set(1);
  643. chats.set(await getChatList(localStorage.token, $currentChatPage));
  644. return _responses;
  645. };
  646. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  647. let _response: string | null = null;
  648. const responseMessage = history.messages[responseMessageId];
  649. const userMessage = history.messages[responseMessage.parentId];
  650. // Wait until history/message have been updated
  651. await tick();
  652. // Scroll down
  653. scrollToBottom();
  654. const messagesBody = [
  655. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  656. ? {
  657. role: 'system',
  658. content: `${promptTemplate(
  659. params?.system ?? $settings?.system ?? '',
  660. $user.name,
  661. $settings?.userLocation
  662. ? await getAndUpdateUserLocation(localStorage.token)
  663. : undefined
  664. )}${
  665. (responseMessage?.userContext ?? null)
  666. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  667. : ''
  668. }`
  669. }
  670. : undefined,
  671. ...messages
  672. ]
  673. .filter((message) => message?.content?.trim())
  674. .map((message) => {
  675. // Prepare the base message object
  676. const baseMessage = {
  677. role: message.role,
  678. content: message.content
  679. };
  680. // Extract and format image URLs if any exist
  681. const imageUrls = message.files
  682. ?.filter((file) => file.type === 'image')
  683. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  684. // Add images array only if it contains elements
  685. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  686. baseMessage.images = imageUrls;
  687. }
  688. return baseMessage;
  689. });
  690. let lastImageIndex = -1;
  691. // Find the index of the last object with images
  692. messagesBody.forEach((item, index) => {
  693. if (item.images) {
  694. lastImageIndex = index;
  695. }
  696. });
  697. // Remove images from all but the last one
  698. messagesBody.forEach((item, index) => {
  699. if (index !== lastImageIndex) {
  700. delete item.images;
  701. }
  702. });
  703. let files = JSON.parse(JSON.stringify(chatFiles));
  704. if (model?.info?.meta?.knowledge ?? false) {
  705. // Only initialize and add status if knowledge exists
  706. responseMessage.statusHistory = [
  707. {
  708. action: 'knowledge_search',
  709. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  710. searchQuery: userMessage.content
  711. }),
  712. done: false
  713. }
  714. ];
  715. files.push(...model.info.meta.knowledge);
  716. messages = messages; // Trigger Svelte update
  717. }
  718. files.push(
  719. ...(userMessage?.files ?? []).filter((item) =>
  720. ['doc', 'file', 'collection'].includes(item.type)
  721. ),
  722. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  723. );
  724. scrollToBottom();
  725. eventTarget.dispatchEvent(
  726. new CustomEvent('chat:start', {
  727. detail: {
  728. id: responseMessageId
  729. }
  730. })
  731. );
  732. await tick();
  733. const [res, controller] = await generateChatCompletion(localStorage.token, {
  734. stream: true,
  735. model: model.id,
  736. messages: messagesBody,
  737. options: {
  738. ...{ ...($settings?.params ?? {}), ...params },
  739. stop:
  740. (params?.stop ?? $settings?.params?.stop ?? undefined)
  741. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  742. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  743. )
  744. : undefined,
  745. num_predict: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  746. repeat_penalty:
  747. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined
  748. },
  749. format: $settings.requestFormat ?? undefined,
  750. keep_alive: $settings.keepAlive ?? undefined,
  751. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  752. files: files.length > 0 ? files : undefined,
  753. session_id: $socket?.id,
  754. chat_id: $chatId,
  755. id: responseMessageId
  756. });
  757. if (res && res.ok) {
  758. console.log('controller', controller);
  759. const reader = res.body
  760. .pipeThrough(new TextDecoderStream())
  761. .pipeThrough(splitStream('\n'))
  762. .getReader();
  763. while (true) {
  764. const { value, done } = await reader.read();
  765. if (done || stopResponseFlag || _chatId !== $chatId) {
  766. responseMessage.done = true;
  767. messages = messages;
  768. if (stopResponseFlag) {
  769. controller.abort('User: Stop Response');
  770. } else {
  771. const messages = createMessagesList(responseMessageId);
  772. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  773. }
  774. _response = responseMessage.content;
  775. break;
  776. }
  777. try {
  778. let lines = value.split('\n');
  779. for (const line of lines) {
  780. if (line !== '') {
  781. console.log(line);
  782. let data = JSON.parse(line);
  783. if ('citations' in data) {
  784. responseMessage.citations = data.citations;
  785. // Only remove status if it was initially set
  786. if (model?.info?.meta?.knowledge ?? false) {
  787. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  788. (status) => status.action !== 'knowledge_search'
  789. );
  790. }
  791. continue;
  792. }
  793. if ('detail' in data) {
  794. throw data;
  795. }
  796. if (data.done == false) {
  797. if (responseMessage.content == '' && data.message.content == '\n') {
  798. continue;
  799. } else {
  800. responseMessage.content += data.message.content;
  801. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  802. navigator.vibrate(5);
  803. }
  804. const messageContentParts = getMessageContentParts(
  805. responseMessage.content,
  806. $config?.audio?.tts?.split_on ?? 'punctuation'
  807. );
  808. messageContentParts.pop();
  809. // dispatch only last sentence and make sure it hasn't been dispatched before
  810. if (
  811. messageContentParts.length > 0 &&
  812. messageContentParts[messageContentParts.length - 1] !==
  813. responseMessage.lastSentence
  814. ) {
  815. responseMessage.lastSentence =
  816. messageContentParts[messageContentParts.length - 1];
  817. eventTarget.dispatchEvent(
  818. new CustomEvent('chat', {
  819. detail: {
  820. id: responseMessageId,
  821. content: messageContentParts[messageContentParts.length - 1]
  822. }
  823. })
  824. );
  825. }
  826. messages = messages;
  827. }
  828. } else {
  829. responseMessage.done = true;
  830. if (responseMessage.content == '') {
  831. responseMessage.error = {
  832. code: 400,
  833. content: `Oops! No text generated from Ollama, Please try again.`
  834. };
  835. }
  836. responseMessage.context = data.context ?? null;
  837. responseMessage.info = {
  838. total_duration: data.total_duration,
  839. load_duration: data.load_duration,
  840. sample_count: data.sample_count,
  841. sample_duration: data.sample_duration,
  842. prompt_eval_count: data.prompt_eval_count,
  843. prompt_eval_duration: data.prompt_eval_duration,
  844. eval_count: data.eval_count,
  845. eval_duration: data.eval_duration
  846. };
  847. messages = messages;
  848. if ($settings.notificationEnabled && !document.hasFocus()) {
  849. const notification = new Notification(`${model.id}`, {
  850. body: responseMessage.content,
  851. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  852. });
  853. }
  854. if ($settings?.responseAutoCopy ?? false) {
  855. copyToClipboard(responseMessage.content);
  856. }
  857. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  858. await tick();
  859. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  860. }
  861. }
  862. }
  863. }
  864. } catch (error) {
  865. console.log(error);
  866. if ('detail' in error) {
  867. toast.error(error.detail);
  868. }
  869. break;
  870. }
  871. if (autoScroll) {
  872. scrollToBottom();
  873. }
  874. }
  875. } else {
  876. if (res !== null) {
  877. const error = await res.json();
  878. console.log(error);
  879. if ('detail' in error) {
  880. toast.error(error.detail);
  881. responseMessage.error = { content: error.detail };
  882. } else {
  883. toast.error(error.error);
  884. responseMessage.error = { content: error.error };
  885. }
  886. } else {
  887. toast.error(
  888. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  889. );
  890. responseMessage.error = {
  891. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  892. provider: 'Ollama'
  893. })
  894. };
  895. }
  896. responseMessage.done = true;
  897. if (responseMessage.statusHistory) {
  898. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  899. (status) => status.action !== 'knowledge_search'
  900. );
  901. }
  902. messages = messages;
  903. }
  904. await saveChatHandler(_chatId);
  905. stopResponseFlag = false;
  906. await tick();
  907. let lastMessageContentPart =
  908. getMessageContentParts(
  909. responseMessage.content,
  910. $config?.audio?.tts?.split_on ?? 'punctuation'
  911. )?.at(-1) ?? '';
  912. if (lastMessageContentPart) {
  913. eventTarget.dispatchEvent(
  914. new CustomEvent('chat', {
  915. detail: { id: responseMessageId, content: lastMessageContentPart }
  916. })
  917. );
  918. }
  919. eventTarget.dispatchEvent(
  920. new CustomEvent('chat:finish', {
  921. detail: {
  922. id: responseMessageId,
  923. content: responseMessage.content
  924. }
  925. })
  926. );
  927. if (autoScroll) {
  928. scrollToBottom();
  929. }
  930. if (messages.length == 2 && messages.at(1).content !== '' && selectedModels[0] === model.id) {
  931. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  932. const _title = await generateChatTitle(userPrompt);
  933. await setChatTitle(_chatId, _title);
  934. }
  935. return _response;
  936. };
  937. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  938. let _response = null;
  939. const responseMessage = history.messages[responseMessageId];
  940. const userMessage = history.messages[responseMessage.parentId];
  941. let files = JSON.parse(JSON.stringify(chatFiles));
  942. if (model?.info?.meta?.knowledge ?? false) {
  943. // Only initialize and add status if knowledge exists
  944. responseMessage.statusHistory = [
  945. {
  946. action: 'knowledge_search',
  947. description: $i18n.t(`Searching Knowledge for "{{searchQuery}}"`, {
  948. searchQuery: userMessage.content
  949. }),
  950. done: false
  951. }
  952. ];
  953. files.push(...model.info.meta.knowledge);
  954. messages = messages; // Trigger Svelte update
  955. }
  956. files.push(
  957. ...(userMessage?.files ?? []).filter((item) =>
  958. ['doc', 'file', 'collection'].includes(item.type)
  959. ),
  960. ...(responseMessage?.files ?? []).filter((item) => ['web_search_results'].includes(item.type))
  961. );
  962. scrollToBottom();
  963. eventTarget.dispatchEvent(
  964. new CustomEvent('chat:start', {
  965. detail: {
  966. id: responseMessageId
  967. }
  968. })
  969. );
  970. await tick();
  971. try {
  972. const [res, controller] = await generateOpenAIChatCompletion(
  973. localStorage.token,
  974. {
  975. stream: true,
  976. model: model.id,
  977. stream_options:
  978. (model.info?.meta?.capabilities?.usage ?? false)
  979. ? {
  980. include_usage: true
  981. }
  982. : undefined,
  983. messages: [
  984. params?.system || $settings.system || (responseMessage?.userContext ?? null)
  985. ? {
  986. role: 'system',
  987. content: `${promptTemplate(
  988. params?.system ?? $settings?.system ?? '',
  989. $user.name,
  990. $settings?.userLocation
  991. ? await getAndUpdateUserLocation(localStorage.token)
  992. : undefined
  993. )}${
  994. (responseMessage?.userContext ?? null)
  995. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  996. : ''
  997. }`
  998. }
  999. : undefined,
  1000. ...messages
  1001. ]
  1002. .filter((message) => message?.content?.trim())
  1003. .map((message, idx, arr) => ({
  1004. role: message.role,
  1005. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  1006. message.role === 'user'
  1007. ? {
  1008. content: [
  1009. {
  1010. type: 'text',
  1011. text:
  1012. arr.length - 1 !== idx
  1013. ? message.content
  1014. : (message?.raContent ?? message.content)
  1015. },
  1016. ...message.files
  1017. .filter((file) => file.type === 'image')
  1018. .map((file) => ({
  1019. type: 'image_url',
  1020. image_url: {
  1021. url: file.url
  1022. }
  1023. }))
  1024. ]
  1025. }
  1026. : {
  1027. content:
  1028. arr.length - 1 !== idx
  1029. ? message.content
  1030. : (message?.raContent ?? message.content)
  1031. })
  1032. })),
  1033. seed: params?.seed ?? $settings?.params?.seed ?? undefined,
  1034. stop:
  1035. (params?.stop ?? $settings?.params?.stop ?? undefined)
  1036. ? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
  1037. (str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  1038. )
  1039. : undefined,
  1040. temperature: params?.temperature ?? $settings?.params?.temperature ?? undefined,
  1041. top_p: params?.top_p ?? $settings?.params?.top_p ?? undefined,
  1042. frequency_penalty:
  1043. params?.frequency_penalty ?? $settings?.params?.frequency_penalty ?? undefined,
  1044. max_tokens: params?.max_tokens ?? $settings?.params?.max_tokens ?? undefined,
  1045. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  1046. files: files.length > 0 ? files : undefined,
  1047. session_id: $socket?.id,
  1048. chat_id: $chatId,
  1049. id: responseMessageId
  1050. },
  1051. `${WEBUI_BASE_URL}/api`
  1052. );
  1053. // Wait until history/message have been updated
  1054. await tick();
  1055. scrollToBottom();
  1056. if (res && res.ok && res.body) {
  1057. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1058. for await (const update of textStream) {
  1059. const { value, done, citations, error, usage } = update;
  1060. if (error) {
  1061. await handleOpenAIError(error, null, model, responseMessage);
  1062. break;
  1063. }
  1064. if (done || stopResponseFlag || _chatId !== $chatId) {
  1065. responseMessage.done = true;
  1066. messages = messages;
  1067. if (stopResponseFlag) {
  1068. controller.abort('User: Stop Response');
  1069. } else {
  1070. const messages = createMessagesList(responseMessageId);
  1071. await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
  1072. }
  1073. _response = responseMessage.content;
  1074. break;
  1075. }
  1076. if (usage) {
  1077. responseMessage.info = { ...usage, openai: true };
  1078. }
  1079. if (citations) {
  1080. responseMessage.citations = citations;
  1081. // Only remove status if it was initially set
  1082. if (model?.info?.meta?.knowledge ?? false) {
  1083. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1084. (status) => status.action !== 'knowledge_search'
  1085. );
  1086. }
  1087. continue;
  1088. }
  1089. if (responseMessage.content == '' && value == '\n') {
  1090. continue;
  1091. } else {
  1092. responseMessage.content += value;
  1093. if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
  1094. navigator.vibrate(5);
  1095. }
  1096. const messageContentParts = getMessageContentParts(
  1097. responseMessage.content,
  1098. $config?.audio?.tts?.split_on ?? 'punctuation'
  1099. );
  1100. messageContentParts.pop();
  1101. // dispatch only last sentence and make sure it hasn't been dispatched before
  1102. if (
  1103. messageContentParts.length > 0 &&
  1104. messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
  1105. ) {
  1106. responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
  1107. eventTarget.dispatchEvent(
  1108. new CustomEvent('chat', {
  1109. detail: {
  1110. id: responseMessageId,
  1111. content: messageContentParts[messageContentParts.length - 1]
  1112. }
  1113. })
  1114. );
  1115. }
  1116. messages = messages;
  1117. }
  1118. if (autoScroll) {
  1119. scrollToBottom();
  1120. }
  1121. }
  1122. if ($settings.notificationEnabled && !document.hasFocus()) {
  1123. const notification = new Notification(`${model.id}`, {
  1124. body: responseMessage.content,
  1125. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  1126. });
  1127. }
  1128. if ($settings.responseAutoCopy) {
  1129. copyToClipboard(responseMessage.content);
  1130. }
  1131. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  1132. await tick();
  1133. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  1134. }
  1135. } else {
  1136. await handleOpenAIError(null, res, model, responseMessage);
  1137. }
  1138. } catch (error) {
  1139. await handleOpenAIError(error, null, model, responseMessage);
  1140. }
  1141. await saveChatHandler(_chatId);
  1142. messages = messages;
  1143. stopResponseFlag = false;
  1144. await tick();
  1145. let lastMessageContentPart =
  1146. getMessageContentParts(
  1147. responseMessage.content,
  1148. $config?.audio?.tts?.split_on ?? 'punctuation'
  1149. )?.at(-1) ?? '';
  1150. if (lastMessageContentPart) {
  1151. eventTarget.dispatchEvent(
  1152. new CustomEvent('chat', {
  1153. detail: { id: responseMessageId, content: lastMessageContentPart }
  1154. })
  1155. );
  1156. }
  1157. eventTarget.dispatchEvent(
  1158. new CustomEvent('chat:finish', {
  1159. detail: {
  1160. id: responseMessageId,
  1161. content: responseMessage.content
  1162. }
  1163. })
  1164. );
  1165. if (autoScroll) {
  1166. scrollToBottom();
  1167. }
  1168. if (messages.length == 2 && selectedModels[0] === model.id) {
  1169. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1170. const _title = await generateChatTitle(userPrompt);
  1171. await setChatTitle(_chatId, _title);
  1172. }
  1173. return _response;
  1174. };
  1175. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1176. let errorMessage = '';
  1177. let innerError;
  1178. if (error) {
  1179. innerError = error;
  1180. } else if (res !== null) {
  1181. innerError = await res.json();
  1182. }
  1183. console.error(innerError);
  1184. if ('detail' in innerError) {
  1185. toast.error(innerError.detail);
  1186. errorMessage = innerError.detail;
  1187. } else if ('error' in innerError) {
  1188. if ('message' in innerError.error) {
  1189. toast.error(innerError.error.message);
  1190. errorMessage = innerError.error.message;
  1191. } else {
  1192. toast.error(innerError.error);
  1193. errorMessage = innerError.error;
  1194. }
  1195. } else if ('message' in innerError) {
  1196. toast.error(innerError.message);
  1197. errorMessage = innerError.message;
  1198. }
  1199. responseMessage.error = {
  1200. content:
  1201. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1202. provider: model.name ?? model.id
  1203. }) +
  1204. '\n' +
  1205. errorMessage
  1206. };
  1207. responseMessage.done = true;
  1208. if (responseMessage.statusHistory) {
  1209. responseMessage.statusHistory = responseMessage.statusHistory.filter(
  1210. (status) => status.action !== 'knowledge_search'
  1211. );
  1212. }
  1213. messages = messages;
  1214. };
  1215. const stopResponse = () => {
  1216. stopResponseFlag = true;
  1217. console.log('stopResponse');
  1218. };
  1219. const regenerateResponse = async (message) => {
  1220. console.log('regenerateResponse');
  1221. if (messages.length != 0) {
  1222. let userMessage = history.messages[message.parentId];
  1223. let userPrompt = userMessage.content;
  1224. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1225. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1226. await sendPrompt(userPrompt, userMessage.id);
  1227. } else {
  1228. // If there are multiple models selected, use the model of the response message for regeneration
  1229. // e.g. many model chat
  1230. await sendPrompt(userPrompt, userMessage.id, {
  1231. modelId: message.model,
  1232. modelIdx: message.modelIdx
  1233. });
  1234. }
  1235. }
  1236. };
  1237. const continueGeneration = async () => {
  1238. console.log('continueGeneration');
  1239. const _chatId = JSON.parse(JSON.stringify($chatId));
  1240. if (messages.length != 0 && messages.at(-1).done == true) {
  1241. const responseMessage = history.messages[history.currentId];
  1242. responseMessage.done = false;
  1243. await tick();
  1244. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1245. if (model) {
  1246. if (model?.owned_by === 'openai') {
  1247. await sendPromptOpenAI(
  1248. model,
  1249. history.messages[responseMessage.parentId].content,
  1250. responseMessage.id,
  1251. _chatId
  1252. );
  1253. } else
  1254. await sendPromptOllama(
  1255. model,
  1256. history.messages[responseMessage.parentId].content,
  1257. responseMessage.id,
  1258. _chatId
  1259. );
  1260. }
  1261. } else {
  1262. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1263. }
  1264. };
  1265. const generateChatTitle = async (userPrompt) => {
  1266. if ($settings?.title?.auto ?? true) {
  1267. const title = await generateTitle(
  1268. localStorage.token,
  1269. selectedModels[0],
  1270. userPrompt,
  1271. $chatId
  1272. ).catch((error) => {
  1273. console.error(error);
  1274. return 'New Chat';
  1275. });
  1276. return title;
  1277. } else {
  1278. return `${userPrompt}`;
  1279. }
  1280. };
  1281. const setChatTitle = async (_chatId, _title) => {
  1282. if (_chatId === $chatId) {
  1283. title = _title;
  1284. }
  1285. if (!$temporaryChatEnabled) {
  1286. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1287. currentChatPage.set(1);
  1288. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1289. }
  1290. };
  1291. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1292. const responseMessage = history.messages[responseId];
  1293. const userMessage = history.messages[parentId];
  1294. responseMessage.statusHistory = [
  1295. {
  1296. done: false,
  1297. action: 'web_search',
  1298. description: $i18n.t('Generating search query')
  1299. }
  1300. ];
  1301. messages = messages;
  1302. const prompt = userMessage.content;
  1303. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1304. (error) => {
  1305. console.log(error);
  1306. return prompt;
  1307. }
  1308. );
  1309. if (!searchQuery) {
  1310. toast.warning($i18n.t('No search query generated'));
  1311. responseMessage.statusHistory.push({
  1312. done: true,
  1313. error: true,
  1314. action: 'web_search',
  1315. description: 'No search query generated'
  1316. });
  1317. messages = messages;
  1318. }
  1319. responseMessage.statusHistory.push({
  1320. done: false,
  1321. action: 'web_search',
  1322. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1323. });
  1324. messages = messages;
  1325. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1326. console.log(error);
  1327. toast.error(error);
  1328. return null;
  1329. });
  1330. if (results) {
  1331. responseMessage.statusHistory.push({
  1332. done: true,
  1333. action: 'web_search',
  1334. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1335. query: searchQuery,
  1336. urls: results.filenames
  1337. });
  1338. if (responseMessage?.files ?? undefined === undefined) {
  1339. responseMessage.files = [];
  1340. }
  1341. responseMessage.files.push({
  1342. collection_name: results.collection_name,
  1343. name: searchQuery,
  1344. type: 'web_search_results',
  1345. urls: results.filenames
  1346. });
  1347. messages = messages;
  1348. } else {
  1349. responseMessage.statusHistory.push({
  1350. done: true,
  1351. error: true,
  1352. action: 'web_search',
  1353. description: 'No search results found'
  1354. });
  1355. messages = messages;
  1356. }
  1357. };
  1358. const getTags = async () => {
  1359. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1360. return [];
  1361. });
  1362. };
  1363. const saveChatHandler = async (_chatId) => {
  1364. if ($chatId == _chatId) {
  1365. if (!$temporaryChatEnabled) {
  1366. chat = await updateChatById(localStorage.token, _chatId, {
  1367. messages: messages,
  1368. history: history,
  1369. models: selectedModels,
  1370. params: params,
  1371. files: chatFiles
  1372. });
  1373. currentChatPage.set(1);
  1374. await chats.set(await getChatList(localStorage.token, $currentChatPage));
  1375. }
  1376. }
  1377. };
  1378. const mergeResponses = async (messageId, responses, _chatId) => {
  1379. console.log('mergeResponses', messageId, responses);
  1380. const message = history.messages[messageId];
  1381. const mergedResponse = {
  1382. status: true,
  1383. content: ''
  1384. };
  1385. message.merged = mergedResponse;
  1386. messages = messages;
  1387. try {
  1388. const [res, controller] = await generateMoACompletion(
  1389. localStorage.token,
  1390. message.model,
  1391. history.messages[message.parentId].content,
  1392. responses
  1393. );
  1394. if (res && res.ok && res.body) {
  1395. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  1396. for await (const update of textStream) {
  1397. const { value, done, citations, error, usage } = update;
  1398. if (error || done) {
  1399. break;
  1400. }
  1401. if (mergedResponse.content == '' && value == '\n') {
  1402. continue;
  1403. } else {
  1404. mergedResponse.content += value;
  1405. messages = messages;
  1406. }
  1407. if (autoScroll) {
  1408. scrollToBottom();
  1409. }
  1410. }
  1411. await saveChatHandler(_chatId);
  1412. } else {
  1413. console.error(res);
  1414. }
  1415. } catch (e) {
  1416. console.error(e);
  1417. }
  1418. };
  1419. </script>
  1420. <svelte:head>
  1421. <title>
  1422. {title
  1423. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1424. : `${$WEBUI_NAME}`}
  1425. </title>
  1426. </svelte:head>
  1427. <audio id="audioElement" src="" style="display: none;" />
  1428. <EventConfirmDialog
  1429. bind:show={showEventConfirmation}
  1430. title={eventConfirmationTitle}
  1431. message={eventConfirmationMessage}
  1432. input={eventConfirmationInput}
  1433. inputPlaceholder={eventConfirmationInputPlaceholder}
  1434. inputValue={eventConfirmationInputValue}
  1435. on:confirm={(e) => {
  1436. if (e.detail) {
  1437. eventCallback(e.detail);
  1438. } else {
  1439. eventCallback(true);
  1440. }
  1441. }}
  1442. on:cancel={() => {
  1443. eventCallback(false);
  1444. }}
  1445. />
  1446. {#if !chatIdProp || (loaded && chatIdProp)}
  1447. <div
  1448. class="h-screen max-h-[100dvh] {$showSidebar
  1449. ? 'md:max-w-[calc(100%-260px)]'
  1450. : ''} w-full max-w-full flex flex-col"
  1451. >
  1452. {#if $settings?.backgroundImageUrl ?? null}
  1453. <div
  1454. class="absolute {$showSidebar
  1455. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1456. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1457. style="background-image: url({$settings.backgroundImageUrl}) "
  1458. />
  1459. <div
  1460. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1461. />
  1462. {/if}
  1463. <Navbar
  1464. {title}
  1465. bind:selectedModels
  1466. bind:showModelSelector
  1467. bind:showControls
  1468. shareEnabled={messages.length > 0}
  1469. {chat}
  1470. {initNewChat}
  1471. />
  1472. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1473. <div
  1474. class="absolute top-[4.25rem] w-full {$showSidebar
  1475. ? 'md:max-w-[calc(100%-260px)]'
  1476. : ''} {showControls ? 'lg:pr-[24rem]' : ''} z-20"
  1477. >
  1478. <div class=" flex flex-col gap-1 w-full">
  1479. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1480. <Banner
  1481. {banner}
  1482. on:dismiss={(e) => {
  1483. const bannerId = e.detail;
  1484. localStorage.setItem(
  1485. 'dismissedBannerIds',
  1486. JSON.stringify(
  1487. [
  1488. bannerId,
  1489. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1490. ].filter((id) => $banners.find((b) => b.id === id))
  1491. )
  1492. );
  1493. }}
  1494. />
  1495. {/each}
  1496. </div>
  1497. </div>
  1498. {/if}
  1499. <div class="flex flex-col flex-auto z-10">
  1500. <div
  1501. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden {showControls
  1502. ? 'lg:pr-[24rem]'
  1503. : ''}"
  1504. id="messages-container"
  1505. bind:this={messagesContainerElement}
  1506. on:scroll={(e) => {
  1507. autoScroll =
  1508. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1509. messagesContainerElement.clientHeight + 5;
  1510. }}
  1511. >
  1512. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1513. <Messages
  1514. chatId={$chatId}
  1515. {selectedModels}
  1516. {processing}
  1517. bind:history
  1518. bind:messages
  1519. bind:autoScroll
  1520. bind:prompt
  1521. bottomPadding={files.length > 0}
  1522. {sendPrompt}
  1523. {continueGeneration}
  1524. {regenerateResponse}
  1525. {mergeResponses}
  1526. {chatActionHandler}
  1527. />
  1528. </div>
  1529. </div>
  1530. <div class={showControls ? 'lg:pr-[24rem]' : ''}>
  1531. <MessageInput
  1532. bind:files
  1533. bind:prompt
  1534. bind:autoScroll
  1535. bind:selectedToolIds
  1536. bind:webSearchEnabled
  1537. bind:atSelectedModel
  1538. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1539. const model = $models.find((m) => m.id === e);
  1540. if (model?.info?.meta?.toolIds ?? false) {
  1541. return [...new Set([...a, ...model.info.meta.toolIds])];
  1542. }
  1543. return a;
  1544. }, [])}
  1545. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1546. {selectedModels}
  1547. {messages}
  1548. {submitPrompt}
  1549. {stopResponse}
  1550. on:call={() => {
  1551. showControls = true;
  1552. }}
  1553. />
  1554. </div>
  1555. </div>
  1556. </div>
  1557. {/if}
  1558. <ChatControls
  1559. models={selectedModelIds.reduce((a, e, i, arr) => {
  1560. const model = $models.find((m) => m.id === e);
  1561. if (model) {
  1562. return [...a, model];
  1563. }
  1564. return a;
  1565. }, [])}
  1566. bind:show={showControls}
  1567. bind:chatFiles
  1568. bind:params
  1569. bind:files
  1570. {submitPrompt}
  1571. {stopResponse}
  1572. modelId={selectedModelIds?.at(0) ?? null}
  1573. chatId={$chatId}
  1574. {eventTarget}
  1575. />