Chat.svelte 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. extractSentencesForAudio,
  31. getUserPosition,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. addTagById,
  38. createNewChat,
  39. deleteTagById,
  40. getAllChatTags,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  52. import Banner from '../common/Banner.svelte';
  53. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  54. import Messages from '$lib/components/chat/Messages.svelte';
  55. import Navbar from '$lib/components/layout/Navbar.svelte';
  56. import CallOverlay from './MessageInput/CallOverlay.svelte';
  57. import { error } from '@sveltejs/kit';
  58. const i18n: Writable<i18nType> = getContext('i18n');
  59. export let chatIdProp = '';
  60. let loaded = false;
  61. const eventTarget = new EventTarget();
  62. let stopResponseFlag = false;
  63. let autoScroll = true;
  64. let processing = '';
  65. let messagesContainerElement: HTMLDivElement;
  66. let showModelSelector = true;
  67. let selectedModels = [''];
  68. let atSelectedModel: Model | undefined;
  69. let selectedModelIds = [];
  70. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  71. let selectedToolIds = [];
  72. let webSearchEnabled = false;
  73. let chat = null;
  74. let tags = [];
  75. let title = '';
  76. let prompt = '';
  77. let files = [];
  78. let messages = [];
  79. let history = {
  80. messages: {},
  81. currentId: null
  82. };
  83. $: if (history.currentId !== null) {
  84. let _messages = [];
  85. let currentMessage = history.messages[history.currentId];
  86. while (currentMessage !== null) {
  87. _messages.unshift({ ...currentMessage });
  88. currentMessage =
  89. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  90. }
  91. messages = _messages;
  92. } else {
  93. messages = [];
  94. }
  95. $: if (chatIdProp) {
  96. (async () => {
  97. console.log(chatIdProp);
  98. if (chatIdProp && (await loadChat())) {
  99. await tick();
  100. loaded = true;
  101. window.setTimeout(() => scrollToBottom(), 0);
  102. const chatInput = document.getElementById('chat-textarea');
  103. chatInput?.focus();
  104. } else {
  105. await goto('/');
  106. }
  107. })();
  108. }
  109. const chatEventHandler = async (event) => {
  110. if (event.chat_id === $chatId) {
  111. await tick();
  112. console.log(event);
  113. let message = history.messages[event.message_id];
  114. const type = event?.data?.type ?? null;
  115. const data = event?.data?.data ?? null;
  116. if (type === 'status') {
  117. if (message.statusHistory) {
  118. message.statusHistory.push(data);
  119. } else {
  120. message.statusHistory = [data];
  121. }
  122. } else if (type === 'citation') {
  123. if (message.citations) {
  124. message.citations.push(data);
  125. } else {
  126. message.citations = [data];
  127. }
  128. } else {
  129. console.log('Unknown message type', data);
  130. }
  131. messages = messages;
  132. }
  133. };
  134. onMount(async () => {
  135. const onMessageHandler = async (event) => {
  136. if (event.origin === window.origin) {
  137. // Replace with your iframe's origin
  138. console.log('Message received from iframe:', event.data);
  139. if (event.data.type === 'input:prompt') {
  140. console.log(event.data.text);
  141. const inputElement = document.getElementById('chat-textarea');
  142. if (inputElement) {
  143. prompt = event.data.text;
  144. inputElement.focus();
  145. }
  146. }
  147. if (event.data.type === 'action:submit') {
  148. console.log(event.data.text);
  149. if (prompt !== '') {
  150. await tick();
  151. submitPrompt(prompt);
  152. }
  153. }
  154. if (event.data.type === 'input:prompt:submit') {
  155. console.log(event.data.text);
  156. if (prompt !== '') {
  157. await tick();
  158. submitPrompt(event.data.text);
  159. }
  160. }
  161. }
  162. };
  163. window.addEventListener('message', onMessageHandler);
  164. $socket.on('chat-events', chatEventHandler);
  165. if (!$chatId) {
  166. chatId.subscribe(async (value) => {
  167. if (!value) {
  168. await initNewChat();
  169. }
  170. });
  171. } else {
  172. if (!($settings.saveChatHistory ?? true)) {
  173. await goto('/');
  174. }
  175. }
  176. return () => {
  177. window.removeEventListener('message', onMessageHandler);
  178. $socket.off('chat-events');
  179. };
  180. });
  181. //////////////////////////
  182. // Web functions
  183. //////////////////////////
  184. const initNewChat = async () => {
  185. window.history.replaceState(history.state, '', `/`);
  186. await chatId.set('');
  187. autoScroll = true;
  188. title = '';
  189. messages = [];
  190. history = {
  191. messages: {},
  192. currentId: null
  193. };
  194. if ($page.url.searchParams.get('models')) {
  195. selectedModels = $page.url.searchParams.get('models')?.split(',');
  196. } else if ($settings?.models) {
  197. selectedModels = $settings?.models;
  198. } else if ($config?.default_models) {
  199. console.log($config?.default_models.split(',') ?? '');
  200. selectedModels = $config?.default_models.split(',');
  201. } else {
  202. selectedModels = [''];
  203. }
  204. if ($page.url.searchParams.get('q')) {
  205. prompt = $page.url.searchParams.get('q') ?? '';
  206. if (prompt) {
  207. await tick();
  208. submitPrompt(prompt);
  209. }
  210. }
  211. selectedModels = selectedModels.map((modelId) =>
  212. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  213. );
  214. const userSettings = await getUserSettings(localStorage.token);
  215. if (userSettings) {
  216. settings.set(userSettings.ui);
  217. } else {
  218. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  219. }
  220. const chatInput = document.getElementById('chat-textarea');
  221. setTimeout(() => chatInput?.focus(), 0);
  222. };
  223. const loadChat = async () => {
  224. chatId.set(chatIdProp);
  225. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  226. await goto('/');
  227. return null;
  228. });
  229. if (chat) {
  230. tags = await getTags();
  231. const chatContent = chat.chat;
  232. if (chatContent) {
  233. console.log(chatContent);
  234. selectedModels =
  235. (chatContent?.models ?? undefined) !== undefined
  236. ? chatContent.models
  237. : [chatContent.models ?? ''];
  238. history =
  239. (chatContent?.history ?? undefined) !== undefined
  240. ? chatContent.history
  241. : convertMessagesToHistory(chatContent.messages);
  242. title = chatContent.title;
  243. const userSettings = await getUserSettings(localStorage.token);
  244. if (userSettings) {
  245. await settings.set(userSettings.ui);
  246. } else {
  247. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  248. }
  249. await settings.set({
  250. ...$settings,
  251. system: chatContent.system ?? $settings.system,
  252. params: chatContent.options ?? $settings.params
  253. });
  254. autoScroll = true;
  255. await tick();
  256. if (messages.length > 0) {
  257. history.messages[messages.at(-1).id].done = true;
  258. }
  259. await tick();
  260. return true;
  261. } else {
  262. return null;
  263. }
  264. }
  265. };
  266. const scrollToBottom = async () => {
  267. await tick();
  268. if (messagesContainerElement) {
  269. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  270. }
  271. };
  272. const createMessagesList = (responseMessageId) => {
  273. const message = history.messages[responseMessageId];
  274. if (message.parentId) {
  275. return [...createMessagesList(message.parentId), message];
  276. } else {
  277. return [message];
  278. }
  279. };
  280. const chatCompletedHandler = async (modelId, responseMessageId, messages) => {
  281. await mermaid.run({
  282. querySelector: '.mermaid'
  283. });
  284. const res = await chatCompleted(localStorage.token, {
  285. model: modelId,
  286. messages: messages.map((m) => ({
  287. id: m.id,
  288. role: m.role,
  289. content: m.content,
  290. info: m.info ? m.info : undefined,
  291. timestamp: m.timestamp
  292. })),
  293. chat_id: $chatId,
  294. session_id: $socket?.id,
  295. id: responseMessageId
  296. }).catch((error) => {
  297. toast.error(error);
  298. messages.at(-1).error = { content: error };
  299. return null;
  300. });
  301. if (res !== null) {
  302. // Update chat history with the new messages
  303. for (const message of res.messages) {
  304. history.messages[message.id] = {
  305. ...history.messages[message.id],
  306. ...(history.messages[message.id].content !== message.content
  307. ? { originalContent: history.messages[message.id].content }
  308. : {}),
  309. ...message
  310. };
  311. }
  312. }
  313. };
  314. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  315. return setInterval(() => {
  316. $socket?.emit('usage', {
  317. action: 'chat',
  318. model: modelId,
  319. chat_id: chatId
  320. });
  321. }, 1000);
  322. };
  323. //////////////////////////
  324. // Chat functions
  325. //////////////////////////
  326. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  327. let _responses = [];
  328. console.log('submitPrompt', $chatId);
  329. selectedModels = selectedModels.map((modelId) =>
  330. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  331. );
  332. if (selectedModels.includes('')) {
  333. toast.error($i18n.t('Model not selected'));
  334. } else if (messages.length != 0 && messages.at(-1).done != true) {
  335. // Response not done
  336. console.log('wait');
  337. } else if (messages.length != 0 && messages.at(-1).error) {
  338. // Error in response
  339. toast.error(
  340. $i18n.t(
  341. `Oops! There was an error in the previous response. Please try again or contact admin.`
  342. )
  343. );
  344. } else if (
  345. files.length > 0 &&
  346. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  347. ) {
  348. // Upload not done
  349. toast.error(
  350. $i18n.t(
  351. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  352. )
  353. );
  354. } else {
  355. // Reset chat input textarea
  356. const chatTextAreaElement = document.getElementById('chat-textarea');
  357. if (chatTextAreaElement) {
  358. chatTextAreaElement.value = '';
  359. chatTextAreaElement.style.height = '';
  360. }
  361. const _files = JSON.parse(JSON.stringify(files));
  362. files = [];
  363. prompt = '';
  364. // Create user message
  365. let userMessageId = uuidv4();
  366. let userMessage = {
  367. id: userMessageId,
  368. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  369. childrenIds: [],
  370. role: 'user',
  371. content: userPrompt,
  372. files: _files.length > 0 ? _files : undefined,
  373. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  374. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  375. };
  376. // Add message to history and Set currentId to messageId
  377. history.messages[userMessageId] = userMessage;
  378. history.currentId = userMessageId;
  379. // Append messageId to childrenIds of parent message
  380. if (messages.length !== 0) {
  381. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  382. }
  383. // Wait until history/message have been updated
  384. await tick();
  385. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  386. }
  387. return _responses;
  388. };
  389. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  390. let _responses = [];
  391. // If modelId is provided, use it, else use selected model
  392. let selectedModelIds = modelId
  393. ? [modelId]
  394. : atSelectedModel !== undefined
  395. ? [atSelectedModel.id]
  396. : selectedModels;
  397. // Create response messages for each selected model
  398. const responseMessageIds = {};
  399. for (const modelId of selectedModelIds) {
  400. const model = $models.filter((m) => m.id === modelId).at(0);
  401. if (model) {
  402. let responseMessageId = uuidv4();
  403. let responseMessage = {
  404. parentId: parentId,
  405. id: responseMessageId,
  406. childrenIds: [],
  407. role: 'assistant',
  408. content: '',
  409. model: model.id,
  410. modelName: model.name ?? model.id,
  411. userContext: null,
  412. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  413. };
  414. // Add message to history and Set currentId to messageId
  415. history.messages[responseMessageId] = responseMessage;
  416. history.currentId = responseMessageId;
  417. // Append messageId to childrenIds of parent message
  418. if (parentId !== null) {
  419. history.messages[parentId].childrenIds = [
  420. ...history.messages[parentId].childrenIds,
  421. responseMessageId
  422. ];
  423. }
  424. responseMessageIds[modelId] = responseMessageId;
  425. }
  426. }
  427. await tick();
  428. // Create new chat if only one message in messages
  429. if (newChat && messages.length == 2) {
  430. if ($settings.saveChatHistory ?? true) {
  431. chat = await createNewChat(localStorage.token, {
  432. id: $chatId,
  433. title: $i18n.t('New Chat'),
  434. models: selectedModels,
  435. system: $settings.system ?? undefined,
  436. options: {
  437. ...($settings.params ?? {})
  438. },
  439. messages: messages,
  440. history: history,
  441. tags: [],
  442. timestamp: Date.now()
  443. });
  444. await chats.set(await getChatList(localStorage.token));
  445. await chatId.set(chat.id);
  446. } else {
  447. await chatId.set('local');
  448. }
  449. await tick();
  450. }
  451. const _chatId = JSON.parse(JSON.stringify($chatId));
  452. await Promise.all(
  453. selectedModelIds.map(async (modelId) => {
  454. console.log('modelId', modelId);
  455. const model = $models.filter((m) => m.id === modelId).at(0);
  456. if (model) {
  457. // If there are image files, check if model is vision capable
  458. const hasImages = messages.some((message) =>
  459. message.files?.some((file) => file.type === 'image')
  460. );
  461. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  462. toast.error(
  463. $i18n.t('Model {{modelName}} is not vision capable', {
  464. modelName: model.name ?? model.id
  465. })
  466. );
  467. }
  468. let responseMessageId = responseMessageIds[modelId];
  469. let responseMessage = history.messages[responseMessageId];
  470. let userContext = null;
  471. if ($settings?.memory ?? false) {
  472. if (userContext === null) {
  473. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  474. toast.error(error);
  475. return null;
  476. });
  477. if (res) {
  478. if (res.documents[0].length > 0) {
  479. userContext = res.documents[0].reduce((acc, doc, index) => {
  480. const createdAtTimestamp = res.metadatas[0][index].created_at;
  481. const createdAtDate = new Date(createdAtTimestamp * 1000)
  482. .toISOString()
  483. .split('T')[0];
  484. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  485. }, '');
  486. }
  487. console.log(userContext);
  488. }
  489. }
  490. }
  491. responseMessage.userContext = userContext;
  492. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  493. if (webSearchEnabled) {
  494. await getWebSearchResults(model.id, parentId, responseMessageId);
  495. }
  496. let _response = null;
  497. if (model?.owned_by === 'openai') {
  498. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  499. } else if (model) {
  500. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  501. }
  502. _responses.push(_response);
  503. if (chatEventEmitter) clearInterval(chatEventEmitter);
  504. } else {
  505. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  506. }
  507. })
  508. );
  509. await chats.set(await getChatList(localStorage.token));
  510. return _responses;
  511. };
  512. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  513. let _response = null;
  514. const responseMessage = history.messages[responseMessageId];
  515. // Wait until history/message have been updated
  516. await tick();
  517. // Scroll down
  518. scrollToBottom();
  519. const messagesBody = [
  520. $settings.system || (responseMessage?.userContext ?? null)
  521. ? {
  522. role: 'system',
  523. content: `${promptTemplate(
  524. $settings?.system ?? '',
  525. $user.name,
  526. $settings?.userLocation
  527. ? await getAndUpdateUserLocation(localStorage.token)
  528. : undefined
  529. )}${
  530. responseMessage?.userContext ?? null
  531. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  532. : ''
  533. }`
  534. }
  535. : undefined,
  536. ...messages
  537. ]
  538. .filter((message) => message?.content?.trim())
  539. .map((message, idx, arr) => {
  540. // Prepare the base message object
  541. const baseMessage = {
  542. role: message.role,
  543. content: message.content
  544. };
  545. // Extract and format image URLs if any exist
  546. const imageUrls = message.files
  547. ?.filter((file) => file.type === 'image')
  548. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  549. // Add images array only if it contains elements
  550. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  551. baseMessage.images = imageUrls;
  552. }
  553. return baseMessage;
  554. });
  555. let lastImageIndex = -1;
  556. // Find the index of the last object with images
  557. messagesBody.forEach((item, index) => {
  558. if (item.images) {
  559. lastImageIndex = index;
  560. }
  561. });
  562. // Remove images from all but the last one
  563. messagesBody.forEach((item, index) => {
  564. if (index !== lastImageIndex) {
  565. delete item.images;
  566. }
  567. });
  568. let files = [];
  569. if (model?.info?.meta?.knowledge ?? false) {
  570. files = model.info.meta.knowledge;
  571. }
  572. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  573. files = [
  574. ...files,
  575. ...(lastUserMessage?.files?.filter((item) =>
  576. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  577. ) ?? []),
  578. ...(responseMessage?.files?.filter((item) =>
  579. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  580. ) ?? [])
  581. ].filter(
  582. // Remove duplicates
  583. (item, index, array) =>
  584. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  585. );
  586. eventTarget.dispatchEvent(
  587. new CustomEvent('chat:start', {
  588. detail: {
  589. id: responseMessageId
  590. }
  591. })
  592. );
  593. await tick();
  594. const [res, controller] = await generateChatCompletion(localStorage.token, {
  595. stream: true,
  596. model: model.id,
  597. messages: messagesBody,
  598. options: {
  599. ...($settings.params ?? {}),
  600. stop:
  601. $settings?.params?.stop ?? undefined
  602. ? $settings.params.stop.map((str) =>
  603. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  604. )
  605. : undefined,
  606. num_predict: $settings?.params?.max_tokens ?? undefined,
  607. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  608. },
  609. format: $settings.requestFormat ?? undefined,
  610. keep_alive: $settings.keepAlive ?? undefined,
  611. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  612. files: files.length > 0 ? files : undefined,
  613. session_id: $socket?.id,
  614. chat_id: $chatId,
  615. id: responseMessageId
  616. });
  617. if (res && res.ok) {
  618. console.log('controller', controller);
  619. const reader = res.body
  620. .pipeThrough(new TextDecoderStream())
  621. .pipeThrough(splitStream('\n'))
  622. .getReader();
  623. while (true) {
  624. const { value, done } = await reader.read();
  625. if (done || stopResponseFlag || _chatId !== $chatId) {
  626. responseMessage.done = true;
  627. messages = messages;
  628. if (stopResponseFlag) {
  629. controller.abort('User: Stop Response');
  630. } else {
  631. const messages = createMessagesList(responseMessageId);
  632. await chatCompletedHandler(model.id, responseMessageId, messages);
  633. }
  634. _response = responseMessage.content;
  635. break;
  636. }
  637. try {
  638. let lines = value.split('\n');
  639. for (const line of lines) {
  640. if (line !== '') {
  641. console.log(line);
  642. let data = JSON.parse(line);
  643. if ('citations' in data) {
  644. responseMessage.citations = data.citations;
  645. continue;
  646. }
  647. if ('detail' in data) {
  648. throw data;
  649. }
  650. if (data.done == false) {
  651. if (responseMessage.content == '' && data.message.content == '\n') {
  652. continue;
  653. } else {
  654. responseMessage.content += data.message.content;
  655. const sentences = extractSentencesForAudio(responseMessage.content);
  656. sentences.pop();
  657. // dispatch only last sentence and make sure it hasn't been dispatched before
  658. if (
  659. sentences.length > 0 &&
  660. sentences[sentences.length - 1] !== responseMessage.lastSentence
  661. ) {
  662. responseMessage.lastSentence = sentences[sentences.length - 1];
  663. eventTarget.dispatchEvent(
  664. new CustomEvent('chat', {
  665. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  666. })
  667. );
  668. }
  669. messages = messages;
  670. }
  671. } else {
  672. responseMessage.done = true;
  673. if (responseMessage.content == '') {
  674. responseMessage.error = {
  675. code: 400,
  676. content: `Oops! No text generated from Ollama, Please try again.`
  677. };
  678. }
  679. responseMessage.context = data.context ?? null;
  680. responseMessage.info = {
  681. total_duration: data.total_duration,
  682. load_duration: data.load_duration,
  683. sample_count: data.sample_count,
  684. sample_duration: data.sample_duration,
  685. prompt_eval_count: data.prompt_eval_count,
  686. prompt_eval_duration: data.prompt_eval_duration,
  687. eval_count: data.eval_count,
  688. eval_duration: data.eval_duration
  689. };
  690. messages = messages;
  691. if ($settings.notificationEnabled && !document.hasFocus()) {
  692. const notification = new Notification(`${model.id}`, {
  693. body: responseMessage.content,
  694. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  695. });
  696. }
  697. if ($settings?.responseAutoCopy ?? false) {
  698. copyToClipboard(responseMessage.content);
  699. }
  700. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  701. await tick();
  702. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  703. }
  704. }
  705. }
  706. }
  707. } catch (error) {
  708. console.log(error);
  709. if ('detail' in error) {
  710. toast.error(error.detail);
  711. }
  712. break;
  713. }
  714. if (autoScroll) {
  715. scrollToBottom();
  716. }
  717. }
  718. if ($chatId == _chatId) {
  719. if ($settings.saveChatHistory ?? true) {
  720. chat = await updateChatById(localStorage.token, _chatId, {
  721. messages: messages,
  722. history: history,
  723. models: selectedModels
  724. });
  725. await chats.set(await getChatList(localStorage.token));
  726. }
  727. }
  728. } else {
  729. if (res !== null) {
  730. const error = await res.json();
  731. console.log(error);
  732. if ('detail' in error) {
  733. toast.error(error.detail);
  734. responseMessage.error = { content: error.detail };
  735. } else {
  736. toast.error(error.error);
  737. responseMessage.error = { content: error.error };
  738. }
  739. } else {
  740. toast.error(
  741. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  742. );
  743. responseMessage.error = {
  744. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  745. provider: 'Ollama'
  746. })
  747. };
  748. }
  749. responseMessage.done = true;
  750. messages = messages;
  751. }
  752. stopResponseFlag = false;
  753. await tick();
  754. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  755. if (lastSentence) {
  756. eventTarget.dispatchEvent(
  757. new CustomEvent('chat', {
  758. detail: { id: responseMessageId, content: lastSentence }
  759. })
  760. );
  761. }
  762. eventTarget.dispatchEvent(
  763. new CustomEvent('chat:finish', {
  764. detail: {
  765. id: responseMessageId,
  766. content: responseMessage.content
  767. }
  768. })
  769. );
  770. if (autoScroll) {
  771. scrollToBottom();
  772. }
  773. if (messages.length == 2 && messages.at(1).content !== '') {
  774. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  775. const _title = await generateChatTitle(userPrompt);
  776. await setChatTitle(_chatId, _title);
  777. }
  778. return _response;
  779. };
  780. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  781. let _response = null;
  782. const responseMessage = history.messages[responseMessageId];
  783. let files = [];
  784. if (model?.info?.meta?.knowledge ?? false) {
  785. files = model.info.meta.knowledge;
  786. }
  787. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  788. files = [
  789. ...files,
  790. ...(lastUserMessage?.files?.filter((item) =>
  791. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  792. ) ?? []),
  793. ...(responseMessage?.files?.filter((item) =>
  794. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  795. ) ?? [])
  796. ].filter(
  797. // Remove duplicates
  798. (item, index, array) =>
  799. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  800. );
  801. scrollToBottom();
  802. eventTarget.dispatchEvent(
  803. new CustomEvent('chat:start', {
  804. detail: {
  805. id: responseMessageId
  806. }
  807. })
  808. );
  809. await tick();
  810. try {
  811. const [res, controller] = await generateOpenAIChatCompletion(
  812. localStorage.token,
  813. {
  814. stream: true,
  815. model: model.id,
  816. stream_options:
  817. model.info?.meta?.capabilities?.usage ?? false
  818. ? {
  819. include_usage: true
  820. }
  821. : undefined,
  822. messages: [
  823. $settings.system || (responseMessage?.userContext ?? null)
  824. ? {
  825. role: 'system',
  826. content: `${promptTemplate(
  827. $settings?.system ?? '',
  828. $user.name,
  829. $settings?.userLocation
  830. ? await getAndUpdateUserLocation(localStorage.token)
  831. : undefined
  832. )}${
  833. responseMessage?.userContext ?? null
  834. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  835. : ''
  836. }`
  837. }
  838. : undefined,
  839. ...messages
  840. ]
  841. .filter((message) => message?.content?.trim())
  842. .map((message, idx, arr) => ({
  843. role: message.role,
  844. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  845. message.role === 'user'
  846. ? {
  847. content: [
  848. {
  849. type: 'text',
  850. text:
  851. arr.length - 1 !== idx
  852. ? message.content
  853. : message?.raContent ?? message.content
  854. },
  855. ...message.files
  856. .filter((file) => file.type === 'image')
  857. .map((file) => ({
  858. type: 'image_url',
  859. image_url: {
  860. url: file.url
  861. }
  862. }))
  863. ]
  864. }
  865. : {
  866. content:
  867. arr.length - 1 !== idx
  868. ? message.content
  869. : message?.raContent ?? message.content
  870. })
  871. })),
  872. seed: $settings?.params?.seed ?? undefined,
  873. stop:
  874. $settings?.params?.stop ?? undefined
  875. ? $settings.params.stop.map((str) =>
  876. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  877. )
  878. : undefined,
  879. temperature: $settings?.params?.temperature ?? undefined,
  880. top_p: $settings?.params?.top_p ?? undefined,
  881. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  882. max_tokens: $settings?.params?.max_tokens ?? undefined,
  883. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  884. files: files.length > 0 ? files : undefined,
  885. session_id: $socket?.id,
  886. chat_id: $chatId,
  887. id: responseMessageId
  888. },
  889. `${WEBUI_BASE_URL}/api`
  890. );
  891. // Wait until history/message have been updated
  892. await tick();
  893. scrollToBottom();
  894. if (res && res.ok && res.body) {
  895. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  896. let lastUsage = null;
  897. for await (const update of textStream) {
  898. const { value, done, citations, error, usage } = update;
  899. if (error) {
  900. await handleOpenAIError(error, null, model, responseMessage);
  901. break;
  902. }
  903. if (done || stopResponseFlag || _chatId !== $chatId) {
  904. responseMessage.done = true;
  905. messages = messages;
  906. if (stopResponseFlag) {
  907. controller.abort('User: Stop Response');
  908. } else {
  909. const messages = createMessagesList(responseMessageId);
  910. await chatCompletedHandler(model.id, responseMessageId, messages);
  911. }
  912. _response = responseMessage.content;
  913. break;
  914. }
  915. if (usage) {
  916. lastUsage = usage;
  917. }
  918. if (citations) {
  919. responseMessage.citations = citations;
  920. continue;
  921. }
  922. if (responseMessage.content == '' && value == '\n') {
  923. continue;
  924. } else {
  925. responseMessage.content += value;
  926. const sentences = extractSentencesForAudio(responseMessage.content);
  927. sentences.pop();
  928. // dispatch only last sentence and make sure it hasn't been dispatched before
  929. if (
  930. sentences.length > 0 &&
  931. sentences[sentences.length - 1] !== responseMessage.lastSentence
  932. ) {
  933. responseMessage.lastSentence = sentences[sentences.length - 1];
  934. eventTarget.dispatchEvent(
  935. new CustomEvent('chat', {
  936. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  937. })
  938. );
  939. }
  940. messages = messages;
  941. }
  942. if (autoScroll) {
  943. scrollToBottom();
  944. }
  945. }
  946. if ($settings.notificationEnabled && !document.hasFocus()) {
  947. const notification = new Notification(`${model.id}`, {
  948. body: responseMessage.content,
  949. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  950. });
  951. }
  952. if ($settings.responseAutoCopy) {
  953. copyToClipboard(responseMessage.content);
  954. }
  955. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  956. await tick();
  957. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  958. }
  959. if (lastUsage) {
  960. responseMessage.info = { ...lastUsage, openai: true };
  961. }
  962. if ($chatId == _chatId) {
  963. if ($settings.saveChatHistory ?? true) {
  964. chat = await updateChatById(localStorage.token, _chatId, {
  965. models: selectedModels,
  966. messages: messages,
  967. history: history
  968. });
  969. await chats.set(await getChatList(localStorage.token));
  970. }
  971. }
  972. } else {
  973. await handleOpenAIError(null, res, model, responseMessage);
  974. }
  975. } catch (error) {
  976. await handleOpenAIError(error, null, model, responseMessage);
  977. }
  978. messages = messages;
  979. stopResponseFlag = false;
  980. await tick();
  981. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  982. if (lastSentence) {
  983. eventTarget.dispatchEvent(
  984. new CustomEvent('chat', {
  985. detail: { id: responseMessageId, content: lastSentence }
  986. })
  987. );
  988. }
  989. eventTarget.dispatchEvent(
  990. new CustomEvent('chat:finish', {
  991. detail: {
  992. id: responseMessageId,
  993. content: responseMessage.content
  994. }
  995. })
  996. );
  997. if (autoScroll) {
  998. scrollToBottom();
  999. }
  1000. if (messages.length == 2) {
  1001. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  1002. const _title = await generateChatTitle(userPrompt);
  1003. await setChatTitle(_chatId, _title);
  1004. }
  1005. return _response;
  1006. };
  1007. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1008. let errorMessage = '';
  1009. let innerError;
  1010. if (error) {
  1011. innerError = error;
  1012. } else if (res !== null) {
  1013. innerError = await res.json();
  1014. }
  1015. console.error(innerError);
  1016. if ('detail' in innerError) {
  1017. toast.error(innerError.detail);
  1018. errorMessage = innerError.detail;
  1019. } else if ('error' in innerError) {
  1020. if ('message' in innerError.error) {
  1021. toast.error(innerError.error.message);
  1022. errorMessage = innerError.error.message;
  1023. } else {
  1024. toast.error(innerError.error);
  1025. errorMessage = innerError.error;
  1026. }
  1027. } else if ('message' in innerError) {
  1028. toast.error(innerError.message);
  1029. errorMessage = innerError.message;
  1030. }
  1031. responseMessage.error = {
  1032. content:
  1033. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1034. provider: model.name ?? model.id
  1035. }) +
  1036. '\n' +
  1037. errorMessage
  1038. };
  1039. responseMessage.done = true;
  1040. messages = messages;
  1041. };
  1042. const stopResponse = () => {
  1043. stopResponseFlag = true;
  1044. console.log('stopResponse');
  1045. };
  1046. const regenerateResponse = async (message) => {
  1047. console.log('regenerateResponse');
  1048. if (messages.length != 0) {
  1049. let userMessage = history.messages[message.parentId];
  1050. let userPrompt = userMessage.content;
  1051. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1052. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1053. await sendPrompt(userPrompt, userMessage.id);
  1054. } else {
  1055. // If there are multiple models selected, use the model of the response message for regeneration
  1056. // e.g. many model chat
  1057. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  1058. }
  1059. }
  1060. };
  1061. const continueGeneration = async () => {
  1062. console.log('continueGeneration');
  1063. const _chatId = JSON.parse(JSON.stringify($chatId));
  1064. if (messages.length != 0 && messages.at(-1).done == true) {
  1065. const responseMessage = history.messages[history.currentId];
  1066. responseMessage.done = false;
  1067. await tick();
  1068. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1069. if (model) {
  1070. if (model?.owned_by === 'openai') {
  1071. await sendPromptOpenAI(
  1072. model,
  1073. history.messages[responseMessage.parentId].content,
  1074. responseMessage.id,
  1075. _chatId
  1076. );
  1077. } else
  1078. await sendPromptOllama(
  1079. model,
  1080. history.messages[responseMessage.parentId].content,
  1081. responseMessage.id,
  1082. _chatId
  1083. );
  1084. }
  1085. } else {
  1086. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1087. }
  1088. };
  1089. const generateChatTitle = async (userPrompt) => {
  1090. if ($settings?.title?.auto ?? true) {
  1091. const title = await generateTitle(
  1092. localStorage.token,
  1093. selectedModels[0],
  1094. userPrompt,
  1095. $chatId
  1096. ).catch((error) => {
  1097. console.error(error);
  1098. return 'New Chat';
  1099. });
  1100. return title;
  1101. } else {
  1102. return `${userPrompt}`;
  1103. }
  1104. };
  1105. const setChatTitle = async (_chatId, _title) => {
  1106. if (_chatId === $chatId) {
  1107. title = _title;
  1108. }
  1109. if ($settings.saveChatHistory ?? true) {
  1110. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1111. await chats.set(await getChatList(localStorage.token));
  1112. }
  1113. };
  1114. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1115. const responseMessage = history.messages[responseId];
  1116. const userMessage = history.messages[parentId];
  1117. responseMessage.statusHistory = [
  1118. {
  1119. done: false,
  1120. action: 'web_search',
  1121. description: $i18n.t('Generating search query')
  1122. }
  1123. ];
  1124. messages = messages;
  1125. const prompt = userMessage.content;
  1126. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1127. (error) => {
  1128. console.log(error);
  1129. return prompt;
  1130. }
  1131. );
  1132. if (!searchQuery) {
  1133. toast.warning($i18n.t('No search query generated'));
  1134. responseMessage.statusHistory.push({
  1135. done: true,
  1136. error: true,
  1137. action: 'web_search',
  1138. description: 'No search query generated'
  1139. });
  1140. messages = messages;
  1141. }
  1142. responseMessage.statusHistory.push({
  1143. done: false,
  1144. action: 'web_search',
  1145. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1146. });
  1147. messages = messages;
  1148. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1149. console.log(error);
  1150. toast.error(error);
  1151. return null;
  1152. });
  1153. if (results) {
  1154. responseMessage.statusHistory.push({
  1155. done: true,
  1156. action: 'web_search',
  1157. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1158. query: searchQuery,
  1159. urls: results.filenames
  1160. });
  1161. if (responseMessage?.files ?? undefined === undefined) {
  1162. responseMessage.files = [];
  1163. }
  1164. responseMessage.files.push({
  1165. collection_name: results.collection_name,
  1166. name: searchQuery,
  1167. type: 'web_search_results',
  1168. urls: results.filenames
  1169. });
  1170. messages = messages;
  1171. } else {
  1172. responseMessage.statusHistory.push({
  1173. done: true,
  1174. error: true,
  1175. action: 'web_search',
  1176. description: 'No search results found'
  1177. });
  1178. messages = messages;
  1179. }
  1180. };
  1181. const getTags = async () => {
  1182. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1183. return [];
  1184. });
  1185. };
  1186. </script>
  1187. <svelte:head>
  1188. <title>
  1189. {title
  1190. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1191. : `${$WEBUI_NAME}`}
  1192. </title>
  1193. </svelte:head>
  1194. <audio id="audioElement" src="" style="display: none;" />
  1195. {#if $showCallOverlay}
  1196. <CallOverlay
  1197. {submitPrompt}
  1198. {stopResponse}
  1199. bind:files
  1200. modelId={selectedModelIds?.at(0) ?? null}
  1201. chatId={$chatId}
  1202. {eventTarget}
  1203. />
  1204. {/if}
  1205. {#if !chatIdProp || (loaded && chatIdProp)}
  1206. <div
  1207. class="h-screen max-h-[100dvh] {$showSidebar
  1208. ? 'md:max-w-[calc(100%-260px)]'
  1209. : ''} w-full max-w-full flex flex-col"
  1210. >
  1211. {#if $settings?.backgroundImageUrl ?? null}
  1212. <div
  1213. class="absolute {$showSidebar
  1214. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1215. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1216. style="background-image: url({$settings.backgroundImageUrl}) "
  1217. />
  1218. <div
  1219. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1220. />
  1221. {/if}
  1222. <Navbar
  1223. {title}
  1224. bind:selectedModels
  1225. bind:showModelSelector
  1226. shareEnabled={messages.length > 0}
  1227. {chat}
  1228. {initNewChat}
  1229. />
  1230. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1231. <div
  1232. class="absolute top-[4.25rem] w-full {$showSidebar
  1233. ? 'md:max-w-[calc(100%-260px)]'
  1234. : ''} z-20"
  1235. >
  1236. <div class=" flex flex-col gap-1 w-full">
  1237. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1238. <Banner
  1239. {banner}
  1240. on:dismiss={(e) => {
  1241. const bannerId = e.detail;
  1242. localStorage.setItem(
  1243. 'dismissedBannerIds',
  1244. JSON.stringify(
  1245. [
  1246. bannerId,
  1247. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1248. ].filter((id) => $banners.find((b) => b.id === id))
  1249. )
  1250. );
  1251. }}
  1252. />
  1253. {/each}
  1254. </div>
  1255. </div>
  1256. {/if}
  1257. <div class="flex flex-col flex-auto z-10">
  1258. <div
  1259. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10"
  1260. id="messages-container"
  1261. bind:this={messagesContainerElement}
  1262. on:scroll={(e) => {
  1263. autoScroll =
  1264. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1265. messagesContainerElement.clientHeight + 5;
  1266. }}
  1267. >
  1268. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1269. <Messages
  1270. chatId={$chatId}
  1271. {selectedModels}
  1272. {processing}
  1273. bind:history
  1274. bind:messages
  1275. bind:autoScroll
  1276. bind:prompt
  1277. bottomPadding={files.length > 0}
  1278. {sendPrompt}
  1279. {continueGeneration}
  1280. {regenerateResponse}
  1281. />
  1282. </div>
  1283. </div>
  1284. <MessageInput
  1285. bind:files
  1286. bind:prompt
  1287. bind:autoScroll
  1288. bind:selectedToolIds
  1289. bind:webSearchEnabled
  1290. bind:atSelectedModel
  1291. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1292. const model = $models.find((m) => m.id === e);
  1293. if (model?.info?.meta?.toolIds ?? false) {
  1294. return [...new Set([...a, ...model.info.meta.toolIds])];
  1295. }
  1296. return a;
  1297. }, [])}
  1298. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1299. {selectedModels}
  1300. {messages}
  1301. {submitPrompt}
  1302. {stopResponse}
  1303. />
  1304. </div>
  1305. </div>
  1306. {/if}