Chat.svelte 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay,
  25. tools
  26. } from '$lib/stores';
  27. import {
  28. convertMessagesToHistory,
  29. copyToClipboard,
  30. extractSentencesForAudio,
  31. getUserPosition,
  32. promptTemplate,
  33. splitStream
  34. } from '$lib/utils';
  35. import { generateChatCompletion } from '$lib/apis/ollama';
  36. import {
  37. addTagById,
  38. createNewChat,
  39. deleteTagById,
  40. getAllChatTags,
  41. getChatById,
  42. getChatList,
  43. getTagsById,
  44. updateChatById
  45. } from '$lib/apis/chats';
  46. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  47. import { runWebSearch } from '$lib/apis/rag';
  48. import { createOpenAITextStream } from '$lib/apis/streaming';
  49. import { queryMemory } from '$lib/apis/memories';
  50. import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
  51. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  52. import Banner from '../common/Banner.svelte';
  53. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  54. import Messages from '$lib/components/chat/Messages.svelte';
  55. import Navbar from '$lib/components/layout/Navbar.svelte';
  56. import CallOverlay from './MessageInput/CallOverlay.svelte';
  57. import { error } from '@sveltejs/kit';
  58. const i18n: Writable<i18nType> = getContext('i18n');
  59. export let chatIdProp = '';
  60. let loaded = false;
  61. const eventTarget = new EventTarget();
  62. let stopResponseFlag = false;
  63. let autoScroll = true;
  64. let processing = '';
  65. let messagesContainerElement: HTMLDivElement;
  66. let showModelSelector = true;
  67. let selectedModels = [''];
  68. let atSelectedModel: Model | undefined;
  69. let selectedModelIds = [];
  70. $: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
  71. let selectedToolIds = [];
  72. let webSearchEnabled = false;
  73. let chat = null;
  74. let tags = [];
  75. let title = '';
  76. let prompt = '';
  77. let files = [];
  78. let messages = [];
  79. let history = {
  80. messages: {},
  81. currentId: null
  82. };
  83. $: if (history.currentId !== null) {
  84. let _messages = [];
  85. let currentMessage = history.messages[history.currentId];
  86. while (currentMessage !== null) {
  87. _messages.unshift({ ...currentMessage });
  88. currentMessage =
  89. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  90. }
  91. messages = _messages;
  92. } else {
  93. messages = [];
  94. }
  95. $: if (chatIdProp) {
  96. (async () => {
  97. console.log(chatIdProp);
  98. if (chatIdProp && (await loadChat())) {
  99. await tick();
  100. loaded = true;
  101. window.setTimeout(() => scrollToBottom(), 0);
  102. const chatInput = document.getElementById('chat-textarea');
  103. chatInput?.focus();
  104. } else {
  105. await goto('/');
  106. }
  107. })();
  108. }
  109. const chatEventHandler = async (data) => {
  110. if (data.chat_id === $chatId) {
  111. await tick();
  112. console.log(data);
  113. let message = history.messages[data.message_id];
  114. const status = {
  115. done: data?.data?.done ?? null,
  116. description: data?.data?.status ?? null
  117. };
  118. if (message.statusHistory) {
  119. message.statusHistory.push(status);
  120. } else {
  121. message.statusHistory = [status];
  122. }
  123. messages = messages;
  124. }
  125. };
  126. onMount(async () => {
  127. const onMessageHandler = async (event) => {
  128. if (event.origin === window.origin) {
  129. // Replace with your iframe's origin
  130. console.log('Message received from iframe:', event.data);
  131. if (event.data.type === 'input:prompt') {
  132. console.log(event.data.text);
  133. const inputElement = document.getElementById('chat-textarea');
  134. if (inputElement) {
  135. prompt = event.data.text;
  136. inputElement.focus();
  137. }
  138. }
  139. if (event.data.type === 'action:submit') {
  140. console.log(event.data.text);
  141. if (prompt !== '') {
  142. await tick();
  143. submitPrompt(prompt);
  144. }
  145. }
  146. if (event.data.type === 'input:prompt:submit') {
  147. console.log(event.data.text);
  148. if (prompt !== '') {
  149. await tick();
  150. submitPrompt(event.data.text);
  151. }
  152. }
  153. }
  154. };
  155. window.addEventListener('message', onMessageHandler);
  156. $socket.on('chat-events', chatEventHandler);
  157. if (!$chatId) {
  158. chatId.subscribe(async (value) => {
  159. if (!value) {
  160. await initNewChat();
  161. }
  162. });
  163. } else {
  164. if (!($settings.saveChatHistory ?? true)) {
  165. await goto('/');
  166. }
  167. }
  168. return () => {
  169. window.removeEventListener('message', onMessageHandler);
  170. $socket.off('chat-events');
  171. };
  172. });
  173. //////////////////////////
  174. // Web functions
  175. //////////////////////////
  176. const initNewChat = async () => {
  177. window.history.replaceState(history.state, '', `/`);
  178. await chatId.set('');
  179. autoScroll = true;
  180. title = '';
  181. messages = [];
  182. history = {
  183. messages: {},
  184. currentId: null
  185. };
  186. if ($page.url.searchParams.get('models')) {
  187. selectedModels = $page.url.searchParams.get('models')?.split(',');
  188. } else if ($settings?.models) {
  189. selectedModels = $settings?.models;
  190. } else if ($config?.default_models) {
  191. console.log($config?.default_models.split(',') ?? '');
  192. selectedModels = $config?.default_models.split(',');
  193. } else {
  194. selectedModels = [''];
  195. }
  196. if ($page.url.searchParams.get('q')) {
  197. prompt = $page.url.searchParams.get('q') ?? '';
  198. if (prompt) {
  199. await tick();
  200. submitPrompt(prompt);
  201. }
  202. }
  203. selectedModels = selectedModels.map((modelId) =>
  204. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  205. );
  206. const userSettings = await getUserSettings(localStorage.token);
  207. if (userSettings) {
  208. settings.set(userSettings.ui);
  209. } else {
  210. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  211. }
  212. const chatInput = document.getElementById('chat-textarea');
  213. setTimeout(() => chatInput?.focus(), 0);
  214. };
  215. const loadChat = async () => {
  216. chatId.set(chatIdProp);
  217. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  218. await goto('/');
  219. return null;
  220. });
  221. if (chat) {
  222. tags = await getTags();
  223. const chatContent = chat.chat;
  224. if (chatContent) {
  225. console.log(chatContent);
  226. selectedModels =
  227. (chatContent?.models ?? undefined) !== undefined
  228. ? chatContent.models
  229. : [chatContent.models ?? ''];
  230. history =
  231. (chatContent?.history ?? undefined) !== undefined
  232. ? chatContent.history
  233. : convertMessagesToHistory(chatContent.messages);
  234. title = chatContent.title;
  235. const userSettings = await getUserSettings(localStorage.token);
  236. if (userSettings) {
  237. await settings.set(userSettings.ui);
  238. } else {
  239. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  240. }
  241. await settings.set({
  242. ...$settings,
  243. system: chatContent.system ?? $settings.system,
  244. params: chatContent.options ?? $settings.params
  245. });
  246. autoScroll = true;
  247. await tick();
  248. if (messages.length > 0) {
  249. history.messages[messages.at(-1).id].done = true;
  250. }
  251. await tick();
  252. return true;
  253. } else {
  254. return null;
  255. }
  256. }
  257. };
  258. const scrollToBottom = async () => {
  259. await tick();
  260. if (messagesContainerElement) {
  261. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  262. }
  263. };
  264. const createMessagesList = (responseMessageId) => {
  265. const message = history.messages[responseMessageId];
  266. if (message.parentId) {
  267. return [...createMessagesList(message.parentId), message];
  268. } else {
  269. return [message];
  270. }
  271. };
  272. const chatCompletedHandler = async (modelId, responseMessageId, messages) => {
  273. await mermaid.run({
  274. querySelector: '.mermaid'
  275. });
  276. const res = await chatCompleted(localStorage.token, {
  277. model: modelId,
  278. messages: messages.map((m) => ({
  279. id: m.id,
  280. role: m.role,
  281. content: m.content,
  282. info: m.info ? m.info : undefined,
  283. timestamp: m.timestamp
  284. })),
  285. chat_id: $chatId,
  286. session_id: $socket?.id,
  287. id: responseMessageId
  288. }).catch((error) => {
  289. toast.error(error);
  290. messages.at(-1).error = { content: error };
  291. return null;
  292. });
  293. if (res !== null) {
  294. // Update chat history with the new messages
  295. for (const message of res.messages) {
  296. history.messages[message.id] = {
  297. ...history.messages[message.id],
  298. ...(history.messages[message.id].content !== message.content
  299. ? { originalContent: history.messages[message.id].content }
  300. : {}),
  301. ...message
  302. };
  303. }
  304. }
  305. };
  306. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  307. return setInterval(() => {
  308. $socket?.emit('usage', {
  309. action: 'chat',
  310. model: modelId,
  311. chat_id: chatId
  312. });
  313. }, 1000);
  314. };
  315. //////////////////////////
  316. // Chat functions
  317. //////////////////////////
  318. const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
  319. let _responses = [];
  320. console.log('submitPrompt', $chatId);
  321. selectedModels = selectedModels.map((modelId) =>
  322. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  323. );
  324. if (selectedModels.includes('')) {
  325. toast.error($i18n.t('Model not selected'));
  326. } else if (messages.length != 0 && messages.at(-1).done != true) {
  327. // Response not done
  328. console.log('wait');
  329. } else if (messages.length != 0 && messages.at(-1).error) {
  330. // Error in response
  331. toast.error(
  332. $i18n.t(
  333. `Oops! There was an error in the previous response. Please try again or contact admin.`
  334. )
  335. );
  336. } else if (
  337. files.length > 0 &&
  338. files.filter((file) => file.type !== 'image' && file.status !== 'processed').length > 0
  339. ) {
  340. // Upload not done
  341. toast.error(
  342. $i18n.t(
  343. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  344. )
  345. );
  346. } else {
  347. // Reset chat input textarea
  348. const chatTextAreaElement = document.getElementById('chat-textarea');
  349. if (chatTextAreaElement) {
  350. chatTextAreaElement.value = '';
  351. chatTextAreaElement.style.height = '';
  352. }
  353. const _files = JSON.parse(JSON.stringify(files));
  354. files = [];
  355. prompt = '';
  356. // Create user message
  357. let userMessageId = uuidv4();
  358. let userMessage = {
  359. id: userMessageId,
  360. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  361. childrenIds: [],
  362. role: 'user',
  363. content: userPrompt,
  364. files: _files.length > 0 ? _files : undefined,
  365. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  366. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  367. };
  368. // Add message to history and Set currentId to messageId
  369. history.messages[userMessageId] = userMessage;
  370. history.currentId = userMessageId;
  371. // Append messageId to childrenIds of parent message
  372. if (messages.length !== 0) {
  373. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  374. }
  375. // Wait until history/message have been updated
  376. await tick();
  377. _responses = await sendPrompt(userPrompt, userMessageId, { newChat: true });
  378. }
  379. return _responses;
  380. };
  381. const sendPrompt = async (prompt, parentId, { modelId = null, newChat = false } = {}) => {
  382. let _responses = [];
  383. // If modelId is provided, use it, else use selected model
  384. let selectedModelIds = modelId
  385. ? [modelId]
  386. : atSelectedModel !== undefined
  387. ? [atSelectedModel.id]
  388. : selectedModels;
  389. // Create response messages for each selected model
  390. const responseMessageIds = {};
  391. for (const modelId of selectedModelIds) {
  392. const model = $models.filter((m) => m.id === modelId).at(0);
  393. if (model) {
  394. let responseMessageId = uuidv4();
  395. let responseMessage = {
  396. parentId: parentId,
  397. id: responseMessageId,
  398. childrenIds: [],
  399. role: 'assistant',
  400. content: '',
  401. model: model.id,
  402. modelName: model.name ?? model.id,
  403. userContext: null,
  404. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  405. };
  406. // Add message to history and Set currentId to messageId
  407. history.messages[responseMessageId] = responseMessage;
  408. history.currentId = responseMessageId;
  409. // Append messageId to childrenIds of parent message
  410. if (parentId !== null) {
  411. history.messages[parentId].childrenIds = [
  412. ...history.messages[parentId].childrenIds,
  413. responseMessageId
  414. ];
  415. }
  416. responseMessageIds[modelId] = responseMessageId;
  417. }
  418. }
  419. await tick();
  420. // Create new chat if only one message in messages
  421. if (newChat && messages.length == 2) {
  422. if ($settings.saveChatHistory ?? true) {
  423. chat = await createNewChat(localStorage.token, {
  424. id: $chatId,
  425. title: $i18n.t('New Chat'),
  426. models: selectedModels,
  427. system: $settings.system ?? undefined,
  428. options: {
  429. ...($settings.params ?? {})
  430. },
  431. messages: messages,
  432. history: history,
  433. tags: [],
  434. timestamp: Date.now()
  435. });
  436. await chats.set(await getChatList(localStorage.token));
  437. await chatId.set(chat.id);
  438. } else {
  439. await chatId.set('local');
  440. }
  441. await tick();
  442. }
  443. const _chatId = JSON.parse(JSON.stringify($chatId));
  444. await Promise.all(
  445. selectedModelIds.map(async (modelId) => {
  446. console.log('modelId', modelId);
  447. const model = $models.filter((m) => m.id === modelId).at(0);
  448. if (model) {
  449. // If there are image files, check if model is vision capable
  450. const hasImages = messages.some((message) =>
  451. message.files?.some((file) => file.type === 'image')
  452. );
  453. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  454. toast.error(
  455. $i18n.t('Model {{modelName}} is not vision capable', {
  456. modelName: model.name ?? model.id
  457. })
  458. );
  459. }
  460. let responseMessageId = responseMessageIds[modelId];
  461. let responseMessage = history.messages[responseMessageId];
  462. let userContext = null;
  463. if ($settings?.memory ?? false) {
  464. if (userContext === null) {
  465. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  466. toast.error(error);
  467. return null;
  468. });
  469. if (res) {
  470. if (res.documents[0].length > 0) {
  471. userContext = res.documents[0].reduce((acc, doc, index) => {
  472. const createdAtTimestamp = res.metadatas[0][index].created_at;
  473. const createdAtDate = new Date(createdAtTimestamp * 1000)
  474. .toISOString()
  475. .split('T')[0];
  476. return `${acc}${index + 1}. [${createdAtDate}]. ${doc}\n`;
  477. }, '');
  478. }
  479. console.log(userContext);
  480. }
  481. }
  482. }
  483. responseMessage.userContext = userContext;
  484. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  485. if (webSearchEnabled) {
  486. await getWebSearchResults(model.id, parentId, responseMessageId);
  487. }
  488. let _response = null;
  489. if (model?.owned_by === 'openai') {
  490. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  491. } else if (model) {
  492. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  493. }
  494. _responses.push(_response);
  495. if (chatEventEmitter) clearInterval(chatEventEmitter);
  496. } else {
  497. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  498. }
  499. })
  500. );
  501. await chats.set(await getChatList(localStorage.token));
  502. return _responses;
  503. };
  504. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  505. let _response = null;
  506. const responseMessage = history.messages[responseMessageId];
  507. // Wait until history/message have been updated
  508. await tick();
  509. // Scroll down
  510. scrollToBottom();
  511. const messagesBody = [
  512. $settings.system || (responseMessage?.userContext ?? null)
  513. ? {
  514. role: 'system',
  515. content: `${promptTemplate(
  516. $settings?.system ?? '',
  517. $user.name,
  518. $settings?.userLocation
  519. ? await getAndUpdateUserLocation(localStorage.token)
  520. : undefined
  521. )}${
  522. responseMessage?.userContext ?? null
  523. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  524. : ''
  525. }`
  526. }
  527. : undefined,
  528. ...messages
  529. ]
  530. .filter((message) => message?.content?.trim())
  531. .map((message, idx, arr) => {
  532. // Prepare the base message object
  533. const baseMessage = {
  534. role: message.role,
  535. content: message.content
  536. };
  537. // Extract and format image URLs if any exist
  538. const imageUrls = message.files
  539. ?.filter((file) => file.type === 'image')
  540. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  541. // Add images array only if it contains elements
  542. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  543. baseMessage.images = imageUrls;
  544. }
  545. return baseMessage;
  546. });
  547. let lastImageIndex = -1;
  548. // Find the index of the last object with images
  549. messagesBody.forEach((item, index) => {
  550. if (item.images) {
  551. lastImageIndex = index;
  552. }
  553. });
  554. // Remove images from all but the last one
  555. messagesBody.forEach((item, index) => {
  556. if (index !== lastImageIndex) {
  557. delete item.images;
  558. }
  559. });
  560. let files = [];
  561. if (model?.info?.meta?.knowledge ?? false) {
  562. files = model.info.meta.knowledge;
  563. }
  564. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  565. files = [
  566. ...files,
  567. ...(lastUserMessage?.files?.filter((item) =>
  568. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  569. ) ?? []),
  570. ...(responseMessage?.files?.filter((item) =>
  571. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  572. ) ?? [])
  573. ].filter(
  574. // Remove duplicates
  575. (item, index, array) =>
  576. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  577. );
  578. eventTarget.dispatchEvent(
  579. new CustomEvent('chat:start', {
  580. detail: {
  581. id: responseMessageId
  582. }
  583. })
  584. );
  585. await tick();
  586. const [res, controller] = await generateChatCompletion(localStorage.token, {
  587. stream: true,
  588. model: model.id,
  589. messages: messagesBody,
  590. options: {
  591. ...($settings.params ?? {}),
  592. stop:
  593. $settings?.params?.stop ?? undefined
  594. ? $settings.params.stop.map((str) =>
  595. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  596. )
  597. : undefined,
  598. num_predict: $settings?.params?.max_tokens ?? undefined,
  599. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  600. },
  601. format: $settings.requestFormat ?? undefined,
  602. keep_alive: $settings.keepAlive ?? undefined,
  603. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  604. files: files.length > 0 ? files : undefined,
  605. session_id: $socket?.id,
  606. chat_id: $chatId,
  607. id: responseMessageId
  608. });
  609. if (res && res.ok) {
  610. console.log('controller', controller);
  611. const reader = res.body
  612. .pipeThrough(new TextDecoderStream())
  613. .pipeThrough(splitStream('\n'))
  614. .getReader();
  615. while (true) {
  616. const { value, done } = await reader.read();
  617. if (done || stopResponseFlag || _chatId !== $chatId) {
  618. responseMessage.done = true;
  619. messages = messages;
  620. if (stopResponseFlag) {
  621. controller.abort('User: Stop Response');
  622. } else {
  623. const messages = createMessagesList(responseMessageId);
  624. await chatCompletedHandler(model.id, responseMessageId, messages);
  625. }
  626. _response = responseMessage.content;
  627. break;
  628. }
  629. try {
  630. let lines = value.split('\n');
  631. for (const line of lines) {
  632. if (line !== '') {
  633. console.log(line);
  634. let data = JSON.parse(line);
  635. if ('citations' in data) {
  636. responseMessage.citations = data.citations;
  637. continue;
  638. }
  639. if ('detail' in data) {
  640. throw data;
  641. }
  642. if (data.done == false) {
  643. if (responseMessage.content == '' && data.message.content == '\n') {
  644. continue;
  645. } else {
  646. responseMessage.content += data.message.content;
  647. const sentences = extractSentencesForAudio(responseMessage.content);
  648. sentences.pop();
  649. // dispatch only last sentence and make sure it hasn't been dispatched before
  650. if (
  651. sentences.length > 0 &&
  652. sentences[sentences.length - 1] !== responseMessage.lastSentence
  653. ) {
  654. responseMessage.lastSentence = sentences[sentences.length - 1];
  655. eventTarget.dispatchEvent(
  656. new CustomEvent('chat', {
  657. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  658. })
  659. );
  660. }
  661. messages = messages;
  662. }
  663. } else {
  664. responseMessage.done = true;
  665. if (responseMessage.content == '') {
  666. responseMessage.error = {
  667. code: 400,
  668. content: `Oops! No text generated from Ollama, Please try again.`
  669. };
  670. }
  671. responseMessage.context = data.context ?? null;
  672. responseMessage.info = {
  673. total_duration: data.total_duration,
  674. load_duration: data.load_duration,
  675. sample_count: data.sample_count,
  676. sample_duration: data.sample_duration,
  677. prompt_eval_count: data.prompt_eval_count,
  678. prompt_eval_duration: data.prompt_eval_duration,
  679. eval_count: data.eval_count,
  680. eval_duration: data.eval_duration
  681. };
  682. messages = messages;
  683. if ($settings.notificationEnabled && !document.hasFocus()) {
  684. const notification = new Notification(`${model.id}`, {
  685. body: responseMessage.content,
  686. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  687. });
  688. }
  689. if ($settings?.responseAutoCopy ?? false) {
  690. copyToClipboard(responseMessage.content);
  691. }
  692. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  693. await tick();
  694. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  695. }
  696. }
  697. }
  698. }
  699. } catch (error) {
  700. console.log(error);
  701. if ('detail' in error) {
  702. toast.error(error.detail);
  703. }
  704. break;
  705. }
  706. if (autoScroll) {
  707. scrollToBottom();
  708. }
  709. }
  710. if ($chatId == _chatId) {
  711. if ($settings.saveChatHistory ?? true) {
  712. chat = await updateChatById(localStorage.token, _chatId, {
  713. messages: messages,
  714. history: history,
  715. models: selectedModels
  716. });
  717. await chats.set(await getChatList(localStorage.token));
  718. }
  719. }
  720. } else {
  721. if (res !== null) {
  722. const error = await res.json();
  723. console.log(error);
  724. if ('detail' in error) {
  725. toast.error(error.detail);
  726. responseMessage.error = { content: error.detail };
  727. } else {
  728. toast.error(error.error);
  729. responseMessage.error = { content: error.error };
  730. }
  731. } else {
  732. toast.error(
  733. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  734. );
  735. responseMessage.error = {
  736. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  737. provider: 'Ollama'
  738. })
  739. };
  740. }
  741. responseMessage.done = true;
  742. messages = messages;
  743. }
  744. stopResponseFlag = false;
  745. await tick();
  746. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  747. if (lastSentence) {
  748. eventTarget.dispatchEvent(
  749. new CustomEvent('chat', {
  750. detail: { id: responseMessageId, content: lastSentence }
  751. })
  752. );
  753. }
  754. eventTarget.dispatchEvent(
  755. new CustomEvent('chat:finish', {
  756. detail: {
  757. id: responseMessageId,
  758. content: responseMessage.content
  759. }
  760. })
  761. );
  762. if (autoScroll) {
  763. scrollToBottom();
  764. }
  765. if (messages.length == 2 && messages.at(1).content !== '') {
  766. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  767. const _title = await generateChatTitle(userPrompt);
  768. await setChatTitle(_chatId, _title);
  769. }
  770. return _response;
  771. };
  772. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  773. let _response = null;
  774. const responseMessage = history.messages[responseMessageId];
  775. let files = [];
  776. if (model?.info?.meta?.knowledge ?? false) {
  777. files = model.info.meta.knowledge;
  778. }
  779. const lastUserMessage = messages.filter((message) => message.role === 'user').at(-1);
  780. files = [
  781. ...files,
  782. ...(lastUserMessage?.files?.filter((item) =>
  783. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  784. ) ?? []),
  785. ...(responseMessage?.files?.filter((item) =>
  786. ['doc', 'file', 'collection', 'web_search_results'].includes(item.type)
  787. ) ?? [])
  788. ].filter(
  789. // Remove duplicates
  790. (item, index, array) =>
  791. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  792. );
  793. scrollToBottom();
  794. eventTarget.dispatchEvent(
  795. new CustomEvent('chat:start', {
  796. detail: {
  797. id: responseMessageId
  798. }
  799. })
  800. );
  801. await tick();
  802. try {
  803. const [res, controller] = await generateOpenAIChatCompletion(
  804. localStorage.token,
  805. {
  806. stream: true,
  807. model: model.id,
  808. stream_options:
  809. model.info?.meta?.capabilities?.usage ?? false
  810. ? {
  811. include_usage: true
  812. }
  813. : undefined,
  814. messages: [
  815. $settings.system || (responseMessage?.userContext ?? null)
  816. ? {
  817. role: 'system',
  818. content: `${promptTemplate(
  819. $settings?.system ?? '',
  820. $user.name,
  821. $settings?.userLocation
  822. ? await getAndUpdateUserLocation(localStorage.token)
  823. : undefined
  824. )}${
  825. responseMessage?.userContext ?? null
  826. ? `\n\nUser Context:\n${responseMessage?.userContext ?? ''}`
  827. : ''
  828. }`
  829. }
  830. : undefined,
  831. ...messages
  832. ]
  833. .filter((message) => message?.content?.trim())
  834. .map((message, idx, arr) => ({
  835. role: message.role,
  836. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  837. message.role === 'user'
  838. ? {
  839. content: [
  840. {
  841. type: 'text',
  842. text:
  843. arr.length - 1 !== idx
  844. ? message.content
  845. : message?.raContent ?? message.content
  846. },
  847. ...message.files
  848. .filter((file) => file.type === 'image')
  849. .map((file) => ({
  850. type: 'image_url',
  851. image_url: {
  852. url: file.url
  853. }
  854. }))
  855. ]
  856. }
  857. : {
  858. content:
  859. arr.length - 1 !== idx
  860. ? message.content
  861. : message?.raContent ?? message.content
  862. })
  863. })),
  864. seed: $settings?.params?.seed ?? undefined,
  865. stop:
  866. $settings?.params?.stop ?? undefined
  867. ? $settings.params.stop.map((str) =>
  868. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  869. )
  870. : undefined,
  871. temperature: $settings?.params?.temperature ?? undefined,
  872. top_p: $settings?.params?.top_p ?? undefined,
  873. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  874. max_tokens: $settings?.params?.max_tokens ?? undefined,
  875. tool_ids: selectedToolIds.length > 0 ? selectedToolIds : undefined,
  876. files: files.length > 0 ? files : undefined,
  877. session_id: $socket?.id,
  878. chat_id: $chatId,
  879. id: responseMessageId
  880. },
  881. `${WEBUI_BASE_URL}/api`
  882. );
  883. // Wait until history/message have been updated
  884. await tick();
  885. scrollToBottom();
  886. if (res && res.ok && res.body) {
  887. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  888. let lastUsage = null;
  889. for await (const update of textStream) {
  890. const { value, done, citations, error, usage } = update;
  891. if (error) {
  892. await handleOpenAIError(error, null, model, responseMessage);
  893. break;
  894. }
  895. if (done || stopResponseFlag || _chatId !== $chatId) {
  896. responseMessage.done = true;
  897. messages = messages;
  898. if (stopResponseFlag) {
  899. controller.abort('User: Stop Response');
  900. } else {
  901. const messages = createMessagesList(responseMessageId);
  902. await chatCompletedHandler(model.id, responseMessageId, messages);
  903. }
  904. _response = responseMessage.content;
  905. break;
  906. }
  907. if (usage) {
  908. lastUsage = usage;
  909. }
  910. if (citations) {
  911. responseMessage.citations = citations;
  912. continue;
  913. }
  914. if (responseMessage.content == '' && value == '\n') {
  915. continue;
  916. } else {
  917. responseMessage.content += value;
  918. const sentences = extractSentencesForAudio(responseMessage.content);
  919. sentences.pop();
  920. // dispatch only last sentence and make sure it hasn't been dispatched before
  921. if (
  922. sentences.length > 0 &&
  923. sentences[sentences.length - 1] !== responseMessage.lastSentence
  924. ) {
  925. responseMessage.lastSentence = sentences[sentences.length - 1];
  926. eventTarget.dispatchEvent(
  927. new CustomEvent('chat', {
  928. detail: { id: responseMessageId, content: sentences[sentences.length - 1] }
  929. })
  930. );
  931. }
  932. messages = messages;
  933. }
  934. if (autoScroll) {
  935. scrollToBottom();
  936. }
  937. }
  938. if ($settings.notificationEnabled && !document.hasFocus()) {
  939. const notification = new Notification(`${model.id}`, {
  940. body: responseMessage.content,
  941. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  942. });
  943. }
  944. if ($settings.responseAutoCopy) {
  945. copyToClipboard(responseMessage.content);
  946. }
  947. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  948. await tick();
  949. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  950. }
  951. if (lastUsage) {
  952. responseMessage.info = { ...lastUsage, openai: true };
  953. }
  954. if ($chatId == _chatId) {
  955. if ($settings.saveChatHistory ?? true) {
  956. chat = await updateChatById(localStorage.token, _chatId, {
  957. models: selectedModels,
  958. messages: messages,
  959. history: history
  960. });
  961. await chats.set(await getChatList(localStorage.token));
  962. }
  963. }
  964. } else {
  965. await handleOpenAIError(null, res, model, responseMessage);
  966. }
  967. } catch (error) {
  968. await handleOpenAIError(error, null, model, responseMessage);
  969. }
  970. messages = messages;
  971. stopResponseFlag = false;
  972. await tick();
  973. let lastSentence = extractSentencesForAudio(responseMessage.content)?.at(-1) ?? '';
  974. if (lastSentence) {
  975. eventTarget.dispatchEvent(
  976. new CustomEvent('chat', {
  977. detail: { id: responseMessageId, content: lastSentence }
  978. })
  979. );
  980. }
  981. eventTarget.dispatchEvent(
  982. new CustomEvent('chat:finish', {
  983. detail: {
  984. id: responseMessageId,
  985. content: responseMessage.content
  986. }
  987. })
  988. );
  989. if (autoScroll) {
  990. scrollToBottom();
  991. }
  992. if (messages.length == 2) {
  993. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  994. const _title = await generateChatTitle(userPrompt);
  995. await setChatTitle(_chatId, _title);
  996. }
  997. return _response;
  998. };
  999. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  1000. let errorMessage = '';
  1001. let innerError;
  1002. if (error) {
  1003. innerError = error;
  1004. } else if (res !== null) {
  1005. innerError = await res.json();
  1006. }
  1007. console.error(innerError);
  1008. if ('detail' in innerError) {
  1009. toast.error(innerError.detail);
  1010. errorMessage = innerError.detail;
  1011. } else if ('error' in innerError) {
  1012. if ('message' in innerError.error) {
  1013. toast.error(innerError.error.message);
  1014. errorMessage = innerError.error.message;
  1015. } else {
  1016. toast.error(innerError.error);
  1017. errorMessage = innerError.error;
  1018. }
  1019. } else if ('message' in innerError) {
  1020. toast.error(innerError.message);
  1021. errorMessage = innerError.message;
  1022. }
  1023. responseMessage.error = {
  1024. content:
  1025. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  1026. provider: model.name ?? model.id
  1027. }) +
  1028. '\n' +
  1029. errorMessage
  1030. };
  1031. responseMessage.done = true;
  1032. messages = messages;
  1033. };
  1034. const stopResponse = () => {
  1035. stopResponseFlag = true;
  1036. console.log('stopResponse');
  1037. };
  1038. const regenerateResponse = async (message) => {
  1039. console.log('regenerateResponse');
  1040. if (messages.length != 0) {
  1041. let userMessage = history.messages[message.parentId];
  1042. let userPrompt = userMessage.content;
  1043. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  1044. // If user message has only one model selected, sendPrompt automatically selects it for regeneration
  1045. await sendPrompt(userPrompt, userMessage.id);
  1046. } else {
  1047. // If there are multiple models selected, use the model of the response message for regeneration
  1048. // e.g. many model chat
  1049. await sendPrompt(userPrompt, userMessage.id, { modelId: message.model });
  1050. }
  1051. }
  1052. };
  1053. const continueGeneration = async () => {
  1054. console.log('continueGeneration');
  1055. const _chatId = JSON.parse(JSON.stringify($chatId));
  1056. if (messages.length != 0 && messages.at(-1).done == true) {
  1057. const responseMessage = history.messages[history.currentId];
  1058. responseMessage.done = false;
  1059. await tick();
  1060. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  1061. if (model) {
  1062. if (model?.owned_by === 'openai') {
  1063. await sendPromptOpenAI(
  1064. model,
  1065. history.messages[responseMessage.parentId].content,
  1066. responseMessage.id,
  1067. _chatId
  1068. );
  1069. } else
  1070. await sendPromptOllama(
  1071. model,
  1072. history.messages[responseMessage.parentId].content,
  1073. responseMessage.id,
  1074. _chatId
  1075. );
  1076. }
  1077. } else {
  1078. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  1079. }
  1080. };
  1081. const generateChatTitle = async (userPrompt) => {
  1082. if ($settings?.title?.auto ?? true) {
  1083. const title = await generateTitle(
  1084. localStorage.token,
  1085. selectedModels[0],
  1086. userPrompt,
  1087. $chatId
  1088. ).catch((error) => {
  1089. console.error(error);
  1090. return 'New Chat';
  1091. });
  1092. return title;
  1093. } else {
  1094. return `${userPrompt}`;
  1095. }
  1096. };
  1097. const setChatTitle = async (_chatId, _title) => {
  1098. if (_chatId === $chatId) {
  1099. title = _title;
  1100. }
  1101. if ($settings.saveChatHistory ?? true) {
  1102. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  1103. await chats.set(await getChatList(localStorage.token));
  1104. }
  1105. };
  1106. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  1107. const responseMessage = history.messages[responseId];
  1108. const userMessage = history.messages[parentId];
  1109. responseMessage.statusHistory = [
  1110. {
  1111. done: false,
  1112. action: 'web_search',
  1113. description: $i18n.t('Generating search query')
  1114. }
  1115. ];
  1116. messages = messages;
  1117. const prompt = userMessage.content;
  1118. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  1119. (error) => {
  1120. console.log(error);
  1121. return prompt;
  1122. }
  1123. );
  1124. if (!searchQuery) {
  1125. toast.warning($i18n.t('No search query generated'));
  1126. responseMessage.statusHistory.push({
  1127. done: true,
  1128. error: true,
  1129. action: 'web_search',
  1130. description: 'No search query generated'
  1131. });
  1132. messages = messages;
  1133. }
  1134. responseMessage.statusHistory.push({
  1135. done: false,
  1136. action: 'web_search',
  1137. description: $i18n.t(`Searching "{{searchQuery}}"`, { searchQuery })
  1138. });
  1139. messages = messages;
  1140. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  1141. console.log(error);
  1142. toast.error(error);
  1143. return null;
  1144. });
  1145. if (results) {
  1146. responseMessage.statusHistory.push({
  1147. done: true,
  1148. action: 'web_search',
  1149. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  1150. query: searchQuery,
  1151. urls: results.filenames
  1152. });
  1153. if (responseMessage?.files ?? undefined === undefined) {
  1154. responseMessage.files = [];
  1155. }
  1156. responseMessage.files.push({
  1157. collection_name: results.collection_name,
  1158. name: searchQuery,
  1159. type: 'web_search_results',
  1160. urls: results.filenames
  1161. });
  1162. messages = messages;
  1163. } else {
  1164. responseMessage.statusHistory.push({
  1165. done: true,
  1166. error: true,
  1167. action: 'web_search',
  1168. description: 'No search results found'
  1169. });
  1170. messages = messages;
  1171. }
  1172. };
  1173. const getTags = async () => {
  1174. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1175. return [];
  1176. });
  1177. };
  1178. </script>
  1179. <svelte:head>
  1180. <title>
  1181. {title
  1182. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1183. : `${$WEBUI_NAME}`}
  1184. </title>
  1185. </svelte:head>
  1186. <audio id="audioElement" src="" style="display: none;" />
  1187. {#if $showCallOverlay}
  1188. <CallOverlay
  1189. {submitPrompt}
  1190. {stopResponse}
  1191. bind:files
  1192. modelId={selectedModelIds?.at(0) ?? null}
  1193. chatId={$chatId}
  1194. {eventTarget}
  1195. />
  1196. {/if}
  1197. {#if !chatIdProp || (loaded && chatIdProp)}
  1198. <div
  1199. class="h-screen max-h-[100dvh] {$showSidebar
  1200. ? 'md:max-w-[calc(100%-260px)]'
  1201. : ''} w-full max-w-full flex flex-col"
  1202. >
  1203. {#if $settings?.backgroundImageUrl ?? null}
  1204. <div
  1205. class="absolute {$showSidebar
  1206. ? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
  1207. : ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
  1208. style="background-image: url({$settings.backgroundImageUrl}) "
  1209. />
  1210. <div
  1211. class="absolute top-0 left-0 w-full h-full bg-gradient-to-t from-white to-white/85 dark:from-gray-900 dark:to-[#171717]/90 z-0"
  1212. />
  1213. {/if}
  1214. <Navbar
  1215. {title}
  1216. bind:selectedModels
  1217. bind:showModelSelector
  1218. shareEnabled={messages.length > 0}
  1219. {chat}
  1220. {initNewChat}
  1221. />
  1222. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1223. <div
  1224. class="absolute top-[4.25rem] w-full {$showSidebar
  1225. ? 'md:max-w-[calc(100%-260px)]'
  1226. : ''} z-20"
  1227. >
  1228. <div class=" flex flex-col gap-1 w-full">
  1229. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1230. <Banner
  1231. {banner}
  1232. on:dismiss={(e) => {
  1233. const bannerId = e.detail;
  1234. localStorage.setItem(
  1235. 'dismissedBannerIds',
  1236. JSON.stringify(
  1237. [
  1238. bannerId,
  1239. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1240. ].filter((id) => $banners.find((b) => b.id === id))
  1241. )
  1242. );
  1243. }}
  1244. />
  1245. {/each}
  1246. </div>
  1247. </div>
  1248. {/if}
  1249. <div class="flex flex-col flex-auto z-10">
  1250. <div
  1251. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10"
  1252. id="messages-container"
  1253. bind:this={messagesContainerElement}
  1254. on:scroll={(e) => {
  1255. autoScroll =
  1256. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1257. messagesContainerElement.clientHeight + 5;
  1258. }}
  1259. >
  1260. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1261. <Messages
  1262. chatId={$chatId}
  1263. {selectedModels}
  1264. {processing}
  1265. bind:history
  1266. bind:messages
  1267. bind:autoScroll
  1268. bind:prompt
  1269. bottomPadding={files.length > 0}
  1270. {sendPrompt}
  1271. {continueGeneration}
  1272. {regenerateResponse}
  1273. />
  1274. </div>
  1275. </div>
  1276. <MessageInput
  1277. bind:files
  1278. bind:prompt
  1279. bind:autoScroll
  1280. bind:selectedToolIds
  1281. bind:webSearchEnabled
  1282. bind:atSelectedModel
  1283. availableToolIds={selectedModelIds.reduce((a, e, i, arr) => {
  1284. const model = $models.find((m) => m.id === e);
  1285. if (model?.info?.meta?.toolIds ?? false) {
  1286. return [...new Set([...a, ...model.info.meta.toolIds])];
  1287. }
  1288. return a;
  1289. }, [])}
  1290. transparentBackground={$settings?.backgroundImageUrl ?? false}
  1291. {selectedModels}
  1292. {messages}
  1293. {submitPrompt}
  1294. {stopResponse}
  1295. />
  1296. </div>
  1297. </div>
  1298. {/if}