Chat.svelte 32 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay
  25. } from '$lib/stores';
  26. import {
  27. convertMessagesToHistory,
  28. copyToClipboard,
  29. promptTemplate,
  30. splitStream
  31. } from '$lib/utils';
  32. import { generateChatCompletion } from '$lib/apis/ollama';
  33. import {
  34. addTagById,
  35. createNewChat,
  36. deleteTagById,
  37. getAllChatTags,
  38. getChatById,
  39. getChatList,
  40. getTagsById,
  41. updateChatById
  42. } from '$lib/apis/chats';
  43. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import { createOpenAITextStream } from '$lib/apis/streaming';
  46. import { queryMemory } from '$lib/apis/memories';
  47. import { getUserSettings } from '$lib/apis/users';
  48. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  49. import Banner from '../common/Banner.svelte';
  50. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  51. import Messages from '$lib/components/chat/Messages.svelte';
  52. import Navbar from '$lib/components/layout/Navbar.svelte';
  53. import CallOverlay from './MessageInput/CallOverlay.svelte';
  54. import { error } from '@sveltejs/kit';
  55. const i18n: Writable<i18nType> = getContext('i18n');
  56. export let chatIdProp = '';
  57. let loaded = false;
  58. let stopResponseFlag = false;
  59. let autoScroll = true;
  60. let processing = '';
  61. let messagesContainerElement: HTMLDivElement;
  62. let showModelSelector = true;
  63. let selectedModels = [''];
  64. let atSelectedModel: Model | undefined;
  65. let webSearchEnabled = false;
  66. let chat = null;
  67. let tags = [];
  68. let title = '';
  69. let prompt = '';
  70. let files = [];
  71. let messages = [];
  72. let history = {
  73. messages: {},
  74. currentId: null
  75. };
  76. $: if (history.currentId !== null) {
  77. let _messages = [];
  78. let currentMessage = history.messages[history.currentId];
  79. while (currentMessage !== null) {
  80. _messages.unshift({ ...currentMessage });
  81. currentMessage =
  82. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  83. }
  84. messages = _messages;
  85. } else {
  86. messages = [];
  87. }
  88. $: if (chatIdProp) {
  89. (async () => {
  90. if (await loadChat()) {
  91. await tick();
  92. loaded = true;
  93. window.setTimeout(() => scrollToBottom(), 0);
  94. const chatInput = document.getElementById('chat-textarea');
  95. chatInput?.focus();
  96. } else {
  97. await goto('/');
  98. }
  99. })();
  100. }
  101. onMount(async () => {
  102. if (!$chatId) {
  103. await initNewChat();
  104. } else {
  105. if (!($settings.saveChatHistory ?? true)) {
  106. await goto('/');
  107. }
  108. }
  109. });
  110. //////////////////////////
  111. // Web functions
  112. //////////////////////////
  113. const initNewChat = async () => {
  114. window.history.replaceState(history.state, '', `/`);
  115. await chatId.set('');
  116. autoScroll = true;
  117. title = '';
  118. messages = [];
  119. history = {
  120. messages: {},
  121. currentId: null
  122. };
  123. if ($page.url.searchParams.get('models')) {
  124. selectedModels = $page.url.searchParams.get('models')?.split(',');
  125. } else if ($settings?.models) {
  126. selectedModels = $settings?.models;
  127. } else if ($config?.default_models) {
  128. console.log($config?.default_models.split(',') ?? '');
  129. selectedModels = $config?.default_models.split(',');
  130. } else {
  131. selectedModels = [''];
  132. }
  133. if ($page.url.searchParams.get('q')) {
  134. prompt = $page.url.searchParams.get('q') ?? '';
  135. if (prompt) {
  136. await tick();
  137. submitPrompt(prompt);
  138. }
  139. }
  140. selectedModels = selectedModels.map((modelId) =>
  141. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  142. );
  143. const userSettings = await getUserSettings(localStorage.token);
  144. if (userSettings) {
  145. settings.set(userSettings.ui);
  146. } else {
  147. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  148. }
  149. const chatInput = document.getElementById('chat-textarea');
  150. setTimeout(() => chatInput?.focus(), 0);
  151. };
  152. const loadChat = async () => {
  153. chatId.set(chatIdProp);
  154. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  155. await goto('/');
  156. return null;
  157. });
  158. if (chat) {
  159. tags = await getTags();
  160. const chatContent = chat.chat;
  161. if (chatContent) {
  162. console.log(chatContent);
  163. selectedModels =
  164. (chatContent?.models ?? undefined) !== undefined
  165. ? chatContent.models
  166. : [chatContent.models ?? ''];
  167. history =
  168. (chatContent?.history ?? undefined) !== undefined
  169. ? chatContent.history
  170. : convertMessagesToHistory(chatContent.messages);
  171. title = chatContent.title;
  172. const userSettings = await getUserSettings(localStorage.token);
  173. if (userSettings) {
  174. await settings.set(userSettings.ui);
  175. } else {
  176. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  177. }
  178. await settings.set({
  179. ...$settings,
  180. system: chatContent.system ?? $settings.system,
  181. params: chatContent.options ?? $settings.params
  182. });
  183. autoScroll = true;
  184. await tick();
  185. if (messages.length > 0) {
  186. history.messages[messages.at(-1).id].done = true;
  187. }
  188. await tick();
  189. return true;
  190. } else {
  191. return null;
  192. }
  193. }
  194. };
  195. const scrollToBottom = async () => {
  196. await tick();
  197. if (messagesContainerElement) {
  198. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  199. }
  200. };
  201. const createMessagesList = (responseMessageId) => {
  202. const message = history.messages[responseMessageId];
  203. if (message.parentId) {
  204. return [...createMessagesList(message.parentId), message];
  205. } else {
  206. return [message];
  207. }
  208. };
  209. const chatCompletedHandler = async (modelId, messages) => {
  210. await mermaid.run({
  211. querySelector: '.mermaid'
  212. });
  213. const res = await chatCompleted(localStorage.token, {
  214. model: modelId,
  215. messages: messages.map((m) => ({
  216. id: m.id,
  217. role: m.role,
  218. content: m.content,
  219. timestamp: m.timestamp
  220. })),
  221. chat_id: $chatId
  222. }).catch((error) => {
  223. console.error(error);
  224. return null;
  225. });
  226. if (res !== null) {
  227. // Update chat history with the new messages
  228. for (const message of res.messages) {
  229. history.messages[message.id] = {
  230. ...history.messages[message.id],
  231. ...(history.messages[message.id].content !== message.content
  232. ? { originalContent: history.messages[message.id].content }
  233. : {}),
  234. ...message
  235. };
  236. }
  237. }
  238. };
  239. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  240. return setInterval(() => {
  241. $socket?.emit('usage', {
  242. action: 'chat',
  243. model: modelId,
  244. chat_id: chatId
  245. });
  246. }, 1000);
  247. };
  248. //////////////////////////
  249. // Chat functions
  250. //////////////////////////
  251. const submitPrompt = async (userPrompt, _user = null) => {
  252. let _responses = [];
  253. console.log('submitPrompt', $chatId);
  254. selectedModels = selectedModels.map((modelId) =>
  255. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  256. );
  257. if (selectedModels.includes('')) {
  258. toast.error($i18n.t('Model not selected'));
  259. } else if (messages.length != 0 && messages.at(-1).done != true) {
  260. // Response not done
  261. console.log('wait');
  262. } else if (
  263. files.length > 0 &&
  264. files.filter((file) => file.upload_status === false).length > 0
  265. ) {
  266. // Upload not done
  267. toast.error(
  268. $i18n.t(
  269. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  270. )
  271. );
  272. } else {
  273. // Reset chat input textarea
  274. const chatTextAreaElement = document.getElementById('chat-textarea');
  275. if (chatTextAreaElement) {
  276. chatTextAreaElement.value = '';
  277. chatTextAreaElement.style.height = '';
  278. }
  279. prompt = '';
  280. // Create user message
  281. let userMessageId = uuidv4();
  282. let userMessage = {
  283. id: userMessageId,
  284. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  285. childrenIds: [],
  286. role: 'user',
  287. user: _user ?? undefined,
  288. content: userPrompt,
  289. files: files.length > 0 ? files : undefined,
  290. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  291. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  292. };
  293. // Add message to history and Set currentId to messageId
  294. history.messages[userMessageId] = userMessage;
  295. history.currentId = userMessageId;
  296. // Append messageId to childrenIds of parent message
  297. if (messages.length !== 0) {
  298. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  299. }
  300. // Wait until history/message have been updated
  301. await tick();
  302. // Create new chat if only one message in messages
  303. if (messages.length == 1) {
  304. if ($settings.saveChatHistory ?? true) {
  305. chat = await createNewChat(localStorage.token, {
  306. id: $chatId,
  307. title: $i18n.t('New Chat'),
  308. models: selectedModels,
  309. system: $settings.system ?? undefined,
  310. options: {
  311. ...($settings.params ?? {})
  312. },
  313. messages: messages,
  314. history: history,
  315. tags: [],
  316. timestamp: Date.now()
  317. });
  318. await chats.set(await getChatList(localStorage.token));
  319. await chatId.set(chat.id);
  320. } else {
  321. await chatId.set('local');
  322. }
  323. await tick();
  324. }
  325. files = [];
  326. // Send prompt
  327. _responses = await sendPrompt(userPrompt, userMessageId);
  328. }
  329. return _responses;
  330. };
  331. const sendPrompt = async (prompt, parentId, modelId = null) => {
  332. let _responses = [];
  333. const _chatId = JSON.parse(JSON.stringify($chatId));
  334. await Promise.all(
  335. (modelId
  336. ? [modelId]
  337. : atSelectedModel !== undefined
  338. ? [atSelectedModel.id]
  339. : selectedModels
  340. ).map(async (modelId) => {
  341. console.log('modelId', modelId);
  342. const model = $models.filter((m) => m.id === modelId).at(0);
  343. if (model) {
  344. // If there are image files, check if model is vision capable
  345. const hasImages = messages.some((message) =>
  346. message.files?.some((file) => file.type === 'image')
  347. );
  348. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  349. toast.error(
  350. $i18n.t('Model {{modelName}} is not vision capable', {
  351. modelName: model.name ?? model.id
  352. })
  353. );
  354. }
  355. // Create response message
  356. let responseMessageId = uuidv4();
  357. let responseMessage = {
  358. parentId: parentId,
  359. id: responseMessageId,
  360. childrenIds: [],
  361. role: 'assistant',
  362. content: '',
  363. model: model.id,
  364. modelName: model.name ?? model.id,
  365. userContext: null,
  366. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  367. };
  368. // Add message to history and Set currentId to messageId
  369. history.messages[responseMessageId] = responseMessage;
  370. history.currentId = responseMessageId;
  371. // Append messageId to childrenIds of parent message
  372. if (parentId !== null) {
  373. history.messages[parentId].childrenIds = [
  374. ...history.messages[parentId].childrenIds,
  375. responseMessageId
  376. ];
  377. }
  378. await tick();
  379. let userContext = null;
  380. if ($settings?.memory ?? false) {
  381. if (userContext === null) {
  382. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  383. toast.error(error);
  384. return null;
  385. });
  386. if (res) {
  387. if (res.documents[0].length > 0) {
  388. userContext = res.documents.reduce((acc, doc, index) => {
  389. const createdAtTimestamp = res.metadatas[index][0].created_at;
  390. const createdAtDate = new Date(createdAtTimestamp * 1000)
  391. .toISOString()
  392. .split('T')[0];
  393. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  394. return acc;
  395. }, []);
  396. }
  397. console.log(userContext);
  398. }
  399. }
  400. }
  401. responseMessage.userContext = userContext;
  402. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  403. if (webSearchEnabled) {
  404. await getWebSearchResults(model.id, parentId, responseMessageId);
  405. }
  406. let _response = null;
  407. if (model?.owned_by === 'openai') {
  408. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  409. } else if (model) {
  410. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  411. }
  412. _responses.push(_response);
  413. console.log('chatEventEmitter', chatEventEmitter);
  414. if (chatEventEmitter) clearInterval(chatEventEmitter);
  415. } else {
  416. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  417. }
  418. })
  419. );
  420. await chats.set(await getChatList(localStorage.token));
  421. return _responses;
  422. };
  423. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  424. const responseMessage = history.messages[responseId];
  425. responseMessage.status = {
  426. done: false,
  427. action: 'web_search',
  428. description: $i18n.t('Generating search query')
  429. };
  430. messages = messages;
  431. const prompt = history.messages[parentId].content;
  432. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
  433. (error) => {
  434. console.log(error);
  435. return prompt;
  436. }
  437. );
  438. if (!searchQuery) {
  439. toast.warning($i18n.t('No search query generated'));
  440. responseMessage.status = {
  441. ...responseMessage.status,
  442. done: true,
  443. error: true,
  444. description: 'No search query generated'
  445. };
  446. messages = messages;
  447. }
  448. responseMessage.status = {
  449. ...responseMessage.status,
  450. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  451. };
  452. messages = messages;
  453. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  454. console.log(error);
  455. toast.error(error);
  456. return null;
  457. });
  458. if (results) {
  459. responseMessage.status = {
  460. ...responseMessage.status,
  461. done: true,
  462. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  463. urls: results.filenames
  464. };
  465. if (responseMessage?.files ?? undefined === undefined) {
  466. responseMessage.files = [];
  467. }
  468. responseMessage.files.push({
  469. collection_name: results.collection_name,
  470. name: searchQuery,
  471. type: 'web_search_results',
  472. urls: results.filenames
  473. });
  474. messages = messages;
  475. } else {
  476. responseMessage.status = {
  477. ...responseMessage.status,
  478. done: true,
  479. error: true,
  480. description: 'No search results found'
  481. };
  482. messages = messages;
  483. }
  484. };
  485. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  486. let _response = null;
  487. const responseMessage = history.messages[responseMessageId];
  488. // Wait until history/message have been updated
  489. await tick();
  490. // Scroll down
  491. scrollToBottom();
  492. const messagesBody = [
  493. $settings.system || (responseMessage?.userContext ?? null)
  494. ? {
  495. role: 'system',
  496. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  497. responseMessage?.userContext ?? null
  498. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  499. : ''
  500. }`
  501. }
  502. : undefined,
  503. ...messages
  504. ]
  505. .filter((message) => message?.content?.trim())
  506. .map((message, idx, arr) => {
  507. // Prepare the base message object
  508. const baseMessage = {
  509. role: message.role,
  510. content: message.content
  511. };
  512. // Extract and format image URLs if any exist
  513. const imageUrls = message.files
  514. ?.filter((file) => file.type === 'image')
  515. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  516. // Add images array only if it contains elements
  517. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  518. baseMessage.images = imageUrls;
  519. }
  520. return baseMessage;
  521. });
  522. let lastImageIndex = -1;
  523. // Find the index of the last object with images
  524. messagesBody.forEach((item, index) => {
  525. if (item.images) {
  526. lastImageIndex = index;
  527. }
  528. });
  529. // Remove images from all but the last one
  530. messagesBody.forEach((item, index) => {
  531. if (index !== lastImageIndex) {
  532. delete item.images;
  533. }
  534. });
  535. let docs = [];
  536. if (model?.info?.meta?.knowledge ?? false) {
  537. docs = model.info.meta.knowledge;
  538. }
  539. docs = [
  540. ...docs,
  541. ...messages
  542. .filter((message) => message?.files ?? null)
  543. .map((message) =>
  544. message.files.filter((item) =>
  545. ['doc', 'collection', 'web_search_results'].includes(item.type)
  546. )
  547. )
  548. .flat(1)
  549. ].filter(
  550. (item, index, array) =>
  551. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  552. );
  553. const [res, controller] = await generateChatCompletion(localStorage.token, {
  554. model: model.id,
  555. messages: messagesBody,
  556. options: {
  557. ...($settings.params ?? {}),
  558. stop:
  559. $settings?.params?.stop ?? undefined
  560. ? $settings.params.stop.map((str) =>
  561. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  562. )
  563. : undefined,
  564. num_predict: $settings?.params?.max_tokens ?? undefined,
  565. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  566. },
  567. format: $settings.requestFormat ?? undefined,
  568. keep_alive: $settings.keepAlive ?? undefined,
  569. docs: docs.length > 0 ? docs : undefined,
  570. citations: docs.length > 0,
  571. chat_id: $chatId
  572. });
  573. if (res && res.ok) {
  574. console.log('controller', controller);
  575. const reader = res.body
  576. .pipeThrough(new TextDecoderStream())
  577. .pipeThrough(splitStream('\n'))
  578. .getReader();
  579. while (true) {
  580. const { value, done } = await reader.read();
  581. if (done || stopResponseFlag || _chatId !== $chatId) {
  582. responseMessage.done = true;
  583. messages = messages;
  584. if (stopResponseFlag) {
  585. controller.abort('User: Stop Response');
  586. } else {
  587. const messages = createMessagesList(responseMessageId);
  588. await chatCompletedHandler(model.id, messages);
  589. }
  590. _response = responseMessage.content;
  591. break;
  592. }
  593. try {
  594. let lines = value.split('\n');
  595. for (const line of lines) {
  596. if (line !== '') {
  597. console.log(line);
  598. let data = JSON.parse(line);
  599. if ('citations' in data) {
  600. responseMessage.citations = data.citations;
  601. continue;
  602. }
  603. if ('detail' in data) {
  604. throw data;
  605. }
  606. if (data.done == false) {
  607. if (responseMessage.content == '' && data.message.content == '\n') {
  608. continue;
  609. } else {
  610. responseMessage.content += data.message.content;
  611. messages = messages;
  612. }
  613. } else {
  614. responseMessage.done = true;
  615. if (responseMessage.content == '') {
  616. responseMessage.error = {
  617. code: 400,
  618. content: `Oops! No text generated from Ollama, Please try again.`
  619. };
  620. }
  621. responseMessage.context = data.context ?? null;
  622. responseMessage.info = {
  623. total_duration: data.total_duration,
  624. load_duration: data.load_duration,
  625. sample_count: data.sample_count,
  626. sample_duration: data.sample_duration,
  627. prompt_eval_count: data.prompt_eval_count,
  628. prompt_eval_duration: data.prompt_eval_duration,
  629. eval_count: data.eval_count,
  630. eval_duration: data.eval_duration
  631. };
  632. messages = messages;
  633. if ($settings.notificationEnabled && !document.hasFocus()) {
  634. const notification = new Notification(
  635. selectedModelfile
  636. ? `${
  637. selectedModelfile.title.charAt(0).toUpperCase() +
  638. selectedModelfile.title.slice(1)
  639. }`
  640. : `${model.id}`,
  641. {
  642. body: responseMessage.content,
  643. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  644. }
  645. );
  646. }
  647. if ($settings.responseAutoCopy) {
  648. copyToClipboard(responseMessage.content);
  649. }
  650. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  651. await tick();
  652. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  653. }
  654. }
  655. }
  656. }
  657. } catch (error) {
  658. console.log(error);
  659. if ('detail' in error) {
  660. toast.error(error.detail);
  661. }
  662. break;
  663. }
  664. if (autoScroll) {
  665. scrollToBottom();
  666. }
  667. }
  668. if ($chatId == _chatId) {
  669. if ($settings.saveChatHistory ?? true) {
  670. chat = await updateChatById(localStorage.token, _chatId, {
  671. messages: messages,
  672. history: history,
  673. models: selectedModels
  674. });
  675. await chats.set(await getChatList(localStorage.token));
  676. }
  677. }
  678. } else {
  679. if (res !== null) {
  680. const error = await res.json();
  681. console.log(error);
  682. if ('detail' in error) {
  683. toast.error(error.detail);
  684. responseMessage.error = { content: error.detail };
  685. } else {
  686. toast.error(error.error);
  687. responseMessage.error = { content: error.error };
  688. }
  689. } else {
  690. toast.error(
  691. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  692. );
  693. responseMessage.error = {
  694. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  695. provider: 'Ollama'
  696. })
  697. };
  698. }
  699. responseMessage.done = true;
  700. messages = messages;
  701. }
  702. stopResponseFlag = false;
  703. await tick();
  704. if (autoScroll) {
  705. scrollToBottom();
  706. }
  707. if (messages.length == 2 && messages.at(1).content !== '') {
  708. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  709. const _title = await generateChatTitle(userPrompt);
  710. await setChatTitle(_chatId, _title);
  711. }
  712. return _response;
  713. };
  714. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  715. let _response = null;
  716. const responseMessage = history.messages[responseMessageId];
  717. let docs = [];
  718. if (model?.info?.meta?.knowledge ?? false) {
  719. docs = model.info.meta.knowledge;
  720. }
  721. docs = [
  722. ...docs,
  723. ...messages
  724. .filter((message) => message?.files ?? null)
  725. .map((message) =>
  726. message.files.filter((item) =>
  727. ['doc', 'collection', 'web_search_results'].includes(item.type)
  728. )
  729. )
  730. .flat(1)
  731. ].filter(
  732. (item, index, array) =>
  733. array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
  734. );
  735. scrollToBottom();
  736. try {
  737. const [res, controller] = await generateOpenAIChatCompletion(
  738. localStorage.token,
  739. {
  740. model: model.id,
  741. stream: true,
  742. stream_options:
  743. model.info?.meta?.capabilities?.usage ?? false
  744. ? {
  745. include_usage: true
  746. }
  747. : undefined,
  748. messages: [
  749. $settings.system || (responseMessage?.userContext ?? null)
  750. ? {
  751. role: 'system',
  752. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  753. responseMessage?.userContext ?? null
  754. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  755. : ''
  756. }`
  757. }
  758. : undefined,
  759. ...messages
  760. ]
  761. .filter((message) => message?.content?.trim())
  762. .map((message, idx, arr) => ({
  763. role: message.role,
  764. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  765. message.role === 'user'
  766. ? {
  767. content: [
  768. {
  769. type: 'text',
  770. text:
  771. arr.length - 1 !== idx
  772. ? message.content
  773. : message?.raContent ?? message.content
  774. },
  775. ...message.files
  776. .filter((file) => file.type === 'image')
  777. .map((file) => ({
  778. type: 'image_url',
  779. image_url: {
  780. url: file.url
  781. }
  782. }))
  783. ]
  784. }
  785. : {
  786. content:
  787. arr.length - 1 !== idx
  788. ? message.content
  789. : message?.raContent ?? message.content
  790. })
  791. })),
  792. seed: $settings?.params?.seed ?? undefined,
  793. stop:
  794. $settings?.params?.stop ?? undefined
  795. ? $settings.params.stop.map((str) =>
  796. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  797. )
  798. : undefined,
  799. temperature: $settings?.params?.temperature ?? undefined,
  800. top_p: $settings?.params?.top_p ?? undefined,
  801. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  802. max_tokens: $settings?.params?.max_tokens ?? undefined,
  803. docs: docs.length > 0 ? docs : undefined,
  804. citations: docs.length > 0,
  805. chat_id: $chatId
  806. },
  807. `${OPENAI_API_BASE_URL}`
  808. );
  809. // Wait until history/message have been updated
  810. await tick();
  811. scrollToBottom();
  812. if (res && res.ok && res.body) {
  813. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  814. let lastUsage = null;
  815. for await (const update of textStream) {
  816. const { value, done, citations, error, usage } = update;
  817. if (error) {
  818. await handleOpenAIError(error, null, model, responseMessage);
  819. break;
  820. }
  821. if (done || stopResponseFlag || _chatId !== $chatId) {
  822. responseMessage.done = true;
  823. messages = messages;
  824. if (stopResponseFlag) {
  825. controller.abort('User: Stop Response');
  826. } else {
  827. const messages = createMessagesList(responseMessageId);
  828. await chatCompletedHandler(model.id, messages);
  829. }
  830. _response = responseMessage.content;
  831. break;
  832. }
  833. if (usage) {
  834. lastUsage = usage;
  835. }
  836. if (citations) {
  837. responseMessage.citations = citations;
  838. continue;
  839. }
  840. if (responseMessage.content == '' && value == '\n') {
  841. continue;
  842. } else {
  843. responseMessage.content += value;
  844. messages = messages;
  845. }
  846. if (autoScroll) {
  847. scrollToBottom();
  848. }
  849. }
  850. if ($settings.notificationEnabled && !document.hasFocus()) {
  851. const notification = new Notification(`${model.id}`, {
  852. body: responseMessage.content,
  853. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  854. });
  855. }
  856. if ($settings.responseAutoCopy) {
  857. copyToClipboard(responseMessage.content);
  858. }
  859. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  860. await tick();
  861. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  862. }
  863. if (lastUsage) {
  864. responseMessage.info = { ...lastUsage, openai: true };
  865. }
  866. if ($chatId == _chatId) {
  867. if ($settings.saveChatHistory ?? true) {
  868. chat = await updateChatById(localStorage.token, _chatId, {
  869. models: selectedModels,
  870. messages: messages,
  871. history: history
  872. });
  873. await chats.set(await getChatList(localStorage.token));
  874. }
  875. }
  876. } else {
  877. await handleOpenAIError(null, res, model, responseMessage);
  878. }
  879. } catch (error) {
  880. await handleOpenAIError(error, null, model, responseMessage);
  881. }
  882. messages = messages;
  883. stopResponseFlag = false;
  884. await tick();
  885. if (autoScroll) {
  886. scrollToBottom();
  887. }
  888. if (messages.length == 2) {
  889. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  890. const _title = await generateChatTitle(userPrompt);
  891. await setChatTitle(_chatId, _title);
  892. }
  893. return _response;
  894. };
  895. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  896. let errorMessage = '';
  897. let innerError;
  898. if (error) {
  899. innerError = error;
  900. } else if (res !== null) {
  901. innerError = await res.json();
  902. }
  903. console.error(innerError);
  904. if ('detail' in innerError) {
  905. toast.error(innerError.detail);
  906. errorMessage = innerError.detail;
  907. } else if ('error' in innerError) {
  908. if ('message' in innerError.error) {
  909. toast.error(innerError.error.message);
  910. errorMessage = innerError.error.message;
  911. } else {
  912. toast.error(innerError.error);
  913. errorMessage = innerError.error;
  914. }
  915. } else if ('message' in innerError) {
  916. toast.error(innerError.message);
  917. errorMessage = innerError.message;
  918. }
  919. responseMessage.error = {
  920. content:
  921. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  922. provider: model.name ?? model.id
  923. }) +
  924. '\n' +
  925. errorMessage
  926. };
  927. responseMessage.done = true;
  928. messages = messages;
  929. };
  930. const stopResponse = () => {
  931. stopResponseFlag = true;
  932. console.log('stopResponse');
  933. };
  934. const regenerateResponse = async (message) => {
  935. console.log('regenerateResponse');
  936. if (messages.length != 0) {
  937. let userMessage = history.messages[message.parentId];
  938. let userPrompt = userMessage.content;
  939. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  940. await sendPrompt(userPrompt, userMessage.id);
  941. } else {
  942. await sendPrompt(userPrompt, userMessage.id, message.model);
  943. }
  944. }
  945. };
  946. const continueGeneration = async () => {
  947. console.log('continueGeneration');
  948. const _chatId = JSON.parse(JSON.stringify($chatId));
  949. if (messages.length != 0 && messages.at(-1).done == true) {
  950. const responseMessage = history.messages[history.currentId];
  951. responseMessage.done = false;
  952. await tick();
  953. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  954. if (model) {
  955. if (model?.owned_by === 'openai') {
  956. await sendPromptOpenAI(
  957. model,
  958. history.messages[responseMessage.parentId].content,
  959. responseMessage.id,
  960. _chatId
  961. );
  962. } else
  963. await sendPromptOllama(
  964. model,
  965. history.messages[responseMessage.parentId].content,
  966. responseMessage.id,
  967. _chatId
  968. );
  969. }
  970. } else {
  971. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  972. }
  973. };
  974. const generateChatTitle = async (userPrompt) => {
  975. if ($settings?.title?.auto ?? true) {
  976. const title = await generateTitle(
  977. localStorage.token,
  978. selectedModels[0],
  979. userPrompt,
  980. $chatId
  981. ).catch((error) => {
  982. console.error(error);
  983. return 'New Chat';
  984. });
  985. return title;
  986. } else {
  987. return `${userPrompt}`;
  988. }
  989. };
  990. const setChatTitle = async (_chatId, _title) => {
  991. if (_chatId === $chatId) {
  992. title = _title;
  993. }
  994. if ($settings.saveChatHistory ?? true) {
  995. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  996. await chats.set(await getChatList(localStorage.token));
  997. }
  998. };
  999. const getTags = async () => {
  1000. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1001. return [];
  1002. });
  1003. };
  1004. </script>
  1005. <svelte:head>
  1006. <title>
  1007. {title
  1008. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1009. : `${$WEBUI_NAME}`}
  1010. </title>
  1011. </svelte:head>
  1012. <CallOverlay {submitPrompt} bind:files />
  1013. {#if !chatIdProp || (loaded && chatIdProp)}
  1014. <div
  1015. class="h-screen max-h-[100dvh] {$showSidebar
  1016. ? 'md:max-w-[calc(100%-260px)]'
  1017. : ''} w-full max-w-full flex flex-col"
  1018. >
  1019. <Navbar
  1020. {title}
  1021. bind:selectedModels
  1022. bind:showModelSelector
  1023. shareEnabled={messages.length > 0}
  1024. {chat}
  1025. {initNewChat}
  1026. />
  1027. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1028. <div
  1029. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1030. >
  1031. <div class=" flex flex-col gap-1 w-full">
  1032. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1033. <Banner
  1034. {banner}
  1035. on:dismiss={(e) => {
  1036. const bannerId = e.detail;
  1037. localStorage.setItem(
  1038. 'dismissedBannerIds',
  1039. JSON.stringify(
  1040. [
  1041. bannerId,
  1042. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1043. ].filter((id) => $banners.find((b) => b.id === id))
  1044. )
  1045. );
  1046. }}
  1047. />
  1048. {/each}
  1049. </div>
  1050. </div>
  1051. {/if}
  1052. <div class="flex flex-col flex-auto">
  1053. <div
  1054. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1055. id="messages-container"
  1056. bind:this={messagesContainerElement}
  1057. on:scroll={(e) => {
  1058. autoScroll =
  1059. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1060. messagesContainerElement.clientHeight + 5;
  1061. }}
  1062. >
  1063. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1064. <Messages
  1065. chatId={$chatId}
  1066. {selectedModels}
  1067. {processing}
  1068. bind:history
  1069. bind:messages
  1070. bind:autoScroll
  1071. bind:prompt
  1072. bottomPadding={files.length > 0}
  1073. {sendPrompt}
  1074. {continueGeneration}
  1075. {regenerateResponse}
  1076. />
  1077. </div>
  1078. </div>
  1079. <MessageInput
  1080. bind:files
  1081. bind:prompt
  1082. bind:autoScroll
  1083. bind:webSearchEnabled
  1084. bind:atSelectedModel
  1085. {selectedModels}
  1086. {messages}
  1087. {submitPrompt}
  1088. {stopResponse}
  1089. />
  1090. </div>
  1091. </div>
  1092. {/if}