Chat.svelte 33 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import { getContext, onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. chatId,
  9. chats,
  10. config,
  11. type Model,
  12. models,
  13. settings,
  14. showSidebar,
  15. tags as _tags,
  16. WEBUI_NAME,
  17. banners
  18. } from '$lib/stores';
  19. import { convertMessagesToHistory, copyToClipboard, splitStream } from '$lib/utils';
  20. import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama';
  21. import {
  22. addTagById,
  23. createNewChat,
  24. deleteTagById,
  25. getAllChatTags,
  26. getChatById,
  27. getChatList,
  28. getTagsById,
  29. updateChatById
  30. } from '$lib/apis/chats';
  31. import {
  32. generateOpenAIChatCompletion,
  33. generateSearchQuery,
  34. generateTitle
  35. } from '$lib/apis/openai';
  36. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  37. import Messages from '$lib/components/chat/Messages.svelte';
  38. import Navbar from '$lib/components/layout/Navbar.svelte';
  39. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  40. import { createOpenAITextStream } from '$lib/apis/streaming';
  41. import { queryMemory } from '$lib/apis/memories';
  42. import type { Writable } from 'svelte/store';
  43. import type { i18n as i18nType } from 'i18next';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import Banner from '../common/Banner.svelte';
  46. import { getUserSettings } from '$lib/apis/users';
  47. import { chatCompleted } from '$lib/apis';
  48. const i18n: Writable<i18nType> = getContext('i18n');
  49. export let chatIdProp = '';
  50. let loaded = false;
  51. let stopResponseFlag = false;
  52. let autoScroll = true;
  53. let processing = '';
  54. let messagesContainerElement: HTMLDivElement;
  55. let currentRequestId = null;
  56. let showModelSelector = true;
  57. let selectedModels = [''];
  58. let atSelectedModel: Model | undefined;
  59. let webSearchEnabled = false;
  60. let chat = null;
  61. let tags = [];
  62. let title = '';
  63. let prompt = '';
  64. let files = [];
  65. let messages = [];
  66. let history = {
  67. messages: {},
  68. currentId: null
  69. };
  70. $: if (history.currentId !== null) {
  71. let _messages = [];
  72. let currentMessage = history.messages[history.currentId];
  73. while (currentMessage !== null) {
  74. _messages.unshift({ ...currentMessage });
  75. currentMessage =
  76. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  77. }
  78. messages = _messages;
  79. } else {
  80. messages = [];
  81. }
  82. $: if (chatIdProp) {
  83. (async () => {
  84. if (await loadChat()) {
  85. await tick();
  86. loaded = true;
  87. window.setTimeout(() => scrollToBottom(), 0);
  88. const chatInput = document.getElementById('chat-textarea');
  89. chatInput?.focus();
  90. } else {
  91. await goto('/');
  92. }
  93. })();
  94. }
  95. onMount(async () => {
  96. if (!$chatId) {
  97. await initNewChat();
  98. } else {
  99. if (!($settings.saveChatHistory ?? true)) {
  100. await goto('/');
  101. }
  102. }
  103. });
  104. //////////////////////////
  105. // Web functions
  106. //////////////////////////
  107. const initNewChat = async () => {
  108. if (currentRequestId !== null) {
  109. await cancelOllamaRequest(localStorage.token, currentRequestId);
  110. currentRequestId = null;
  111. }
  112. window.history.replaceState(history.state, '', `/`);
  113. await chatId.set('');
  114. autoScroll = true;
  115. title = '';
  116. messages = [];
  117. history = {
  118. messages: {},
  119. currentId: null
  120. };
  121. if ($page.url.searchParams.get('models')) {
  122. selectedModels = $page.url.searchParams.get('models')?.split(',');
  123. } else if ($settings?.models) {
  124. selectedModels = $settings?.models;
  125. } else if ($config?.default_models) {
  126. console.log($config?.default_models.split(',') ?? '');
  127. selectedModels = $config?.default_models.split(',');
  128. } else {
  129. selectedModels = [''];
  130. }
  131. if ($page.url.searchParams.get('q')) {
  132. prompt = $page.url.searchParams.get('q') ?? '';
  133. if (prompt) {
  134. await tick();
  135. submitPrompt(prompt);
  136. }
  137. }
  138. selectedModels = selectedModels.map((modelId) =>
  139. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  140. );
  141. const userSettings = await getUserSettings(localStorage.token);
  142. if (userSettings) {
  143. settings.set(userSettings.ui);
  144. } else {
  145. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  146. }
  147. const chatInput = document.getElementById('chat-textarea');
  148. setTimeout(() => chatInput?.focus(), 0);
  149. };
  150. const loadChat = async () => {
  151. chatId.set(chatIdProp);
  152. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  153. await goto('/');
  154. return null;
  155. });
  156. if (chat) {
  157. tags = await getTags();
  158. const chatContent = chat.chat;
  159. if (chatContent) {
  160. console.log(chatContent);
  161. selectedModels =
  162. (chatContent?.models ?? undefined) !== undefined
  163. ? chatContent.models
  164. : [chatContent.models ?? ''];
  165. history =
  166. (chatContent?.history ?? undefined) !== undefined
  167. ? chatContent.history
  168. : convertMessagesToHistory(chatContent.messages);
  169. title = chatContent.title;
  170. const userSettings = await getUserSettings(localStorage.token);
  171. if (userSettings) {
  172. await settings.set(userSettings.ui);
  173. } else {
  174. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  175. }
  176. await settings.set({
  177. ...$settings,
  178. system: chatContent.system ?? $settings.system,
  179. params: chatContent.options ?? $settings.params
  180. });
  181. autoScroll = true;
  182. await tick();
  183. if (messages.length > 0) {
  184. history.messages[messages.at(-1).id].done = true;
  185. }
  186. await tick();
  187. return true;
  188. } else {
  189. return null;
  190. }
  191. }
  192. };
  193. const scrollToBottom = async () => {
  194. await tick();
  195. if (messagesContainerElement) {
  196. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  197. }
  198. };
  199. const createMessagesList = (responseMessageId) => {
  200. const message = history.messages[responseMessageId];
  201. if (message.parentId) {
  202. return [...createMessagesList(message.parentId), message];
  203. } else {
  204. return [message];
  205. }
  206. };
  207. //////////////////////////
  208. // Ollama functions
  209. //////////////////////////
  210. const submitPrompt = async (userPrompt, _user = null) => {
  211. console.log('submitPrompt', $chatId);
  212. selectedModels = selectedModels.map((modelId) =>
  213. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  214. );
  215. if (selectedModels.includes('')) {
  216. toast.error($i18n.t('Model not selected'));
  217. } else if (messages.length != 0 && messages.at(-1).done != true) {
  218. // Response not done
  219. console.log('wait');
  220. } else if (
  221. files.length > 0 &&
  222. files.filter((file) => file.upload_status === false).length > 0
  223. ) {
  224. // Upload not done
  225. toast.error(
  226. $i18n.t(
  227. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  228. )
  229. );
  230. } else {
  231. // Reset chat message textarea height
  232. document.getElementById('chat-textarea').style.height = '';
  233. // Create user message
  234. let userMessageId = uuidv4();
  235. let userMessage = {
  236. id: userMessageId,
  237. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  238. childrenIds: [],
  239. role: 'user',
  240. user: _user ?? undefined,
  241. content: userPrompt,
  242. files: files.length > 0 ? files : undefined,
  243. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  244. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  245. };
  246. // Add message to history and Set currentId to messageId
  247. history.messages[userMessageId] = userMessage;
  248. history.currentId = userMessageId;
  249. // Append messageId to childrenIds of parent message
  250. if (messages.length !== 0) {
  251. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  252. }
  253. // Wait until history/message have been updated
  254. await tick();
  255. // Create new chat if only one message in messages
  256. if (messages.length == 1) {
  257. if ($settings.saveChatHistory ?? true) {
  258. chat = await createNewChat(localStorage.token, {
  259. id: $chatId,
  260. title: $i18n.t('New Chat'),
  261. models: selectedModels,
  262. system: $settings.system ?? undefined,
  263. options: {
  264. ...($settings.params ?? {})
  265. },
  266. messages: messages,
  267. history: history,
  268. tags: [],
  269. timestamp: Date.now()
  270. });
  271. await chats.set(await getChatList(localStorage.token));
  272. await chatId.set(chat.id);
  273. } else {
  274. await chatId.set('local');
  275. }
  276. await tick();
  277. }
  278. // Reset chat input textarea
  279. prompt = '';
  280. document.getElementById('chat-textarea').style.height = '';
  281. files = [];
  282. // Send prompt
  283. await sendPrompt(userPrompt, userMessageId);
  284. }
  285. };
  286. const sendPrompt = async (prompt, parentId, modelId = null) => {
  287. const _chatId = JSON.parse(JSON.stringify($chatId));
  288. await Promise.all(
  289. (modelId
  290. ? [modelId]
  291. : atSelectedModel !== undefined
  292. ? [atSelectedModel.id]
  293. : selectedModels
  294. ).map(async (modelId) => {
  295. console.log('modelId', modelId);
  296. const model = $models.filter((m) => m.id === modelId).at(0);
  297. if (model) {
  298. // If there are image files, check if model is vision capable
  299. const hasImages = messages.some((message) =>
  300. message.files?.some((file) => file.type === 'image')
  301. );
  302. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  303. toast.error(
  304. $i18n.t('Model {{modelName}} is not vision capable', {
  305. modelName: model.name ?? model.id
  306. })
  307. );
  308. }
  309. // Create response message
  310. let responseMessageId = uuidv4();
  311. let responseMessage = {
  312. parentId: parentId,
  313. id: responseMessageId,
  314. childrenIds: [],
  315. role: 'assistant',
  316. content: '',
  317. model: model.id,
  318. modelName: model.name ?? model.id,
  319. userContext: null,
  320. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  321. };
  322. // Add message to history and Set currentId to messageId
  323. history.messages[responseMessageId] = responseMessage;
  324. history.currentId = responseMessageId;
  325. // Append messageId to childrenIds of parent message
  326. if (parentId !== null) {
  327. history.messages[parentId].childrenIds = [
  328. ...history.messages[parentId].childrenIds,
  329. responseMessageId
  330. ];
  331. }
  332. await tick();
  333. let userContext = null;
  334. if ($settings?.memory ?? false) {
  335. if (userContext === null) {
  336. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  337. toast.error(error);
  338. return null;
  339. });
  340. if (res) {
  341. if (res.documents[0].length > 0) {
  342. userContext = res.documents.reduce((acc, doc, index) => {
  343. const createdAtTimestamp = res.metadatas[index][0].created_at;
  344. const createdAtDate = new Date(createdAtTimestamp * 1000)
  345. .toISOString()
  346. .split('T')[0];
  347. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  348. return acc;
  349. }, []);
  350. }
  351. console.log(userContext);
  352. }
  353. }
  354. }
  355. responseMessage.userContext = userContext;
  356. if (webSearchEnabled) {
  357. await getWebSearchResults(model.id, parentId, responseMessageId);
  358. }
  359. if (model?.owned_by === 'openai') {
  360. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  361. } else if (model) {
  362. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  363. }
  364. } else {
  365. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  366. }
  367. })
  368. );
  369. await chats.set(await getChatList(localStorage.token));
  370. };
  371. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  372. const responseMessage = history.messages[responseId];
  373. responseMessage.status = {
  374. done: false,
  375. action: 'web_search',
  376. description: $i18n.t('Generating search query')
  377. };
  378. messages = messages;
  379. const prompt = history.messages[parentId].content;
  380. let searchQuery = prompt;
  381. if (prompt.length > 100) {
  382. searchQuery = await generateChatSearchQuery(model, prompt);
  383. if (!searchQuery) {
  384. toast.warning($i18n.t('No search query generated'));
  385. responseMessage.status = {
  386. ...responseMessage.status,
  387. done: true,
  388. error: true,
  389. description: 'No search query generated'
  390. };
  391. messages = messages;
  392. return;
  393. }
  394. }
  395. responseMessage.status = {
  396. ...responseMessage.status,
  397. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  398. };
  399. messages = messages;
  400. const results = await runWebSearch(localStorage.token, searchQuery);
  401. if (results === undefined) {
  402. toast.warning($i18n.t('No search results found'));
  403. responseMessage.status = {
  404. ...responseMessage.status,
  405. done: true,
  406. error: true,
  407. description: 'No search results found'
  408. };
  409. messages = messages;
  410. return;
  411. }
  412. responseMessage.status = {
  413. ...responseMessage.status,
  414. done: true,
  415. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  416. urls: results.filenames
  417. };
  418. if (responseMessage?.files ?? undefined === undefined) {
  419. responseMessage.files = [];
  420. }
  421. responseMessage.files.push({
  422. collection_name: results.collection_name,
  423. name: searchQuery,
  424. type: 'web_search_results',
  425. urls: results.filenames
  426. });
  427. messages = messages;
  428. };
  429. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  430. model = model.id;
  431. const responseMessage = history.messages[responseMessageId];
  432. // Wait until history/message have been updated
  433. await tick();
  434. // Scroll down
  435. scrollToBottom();
  436. const messagesBody = [
  437. $settings.system || (responseMessage?.userContext ?? null)
  438. ? {
  439. role: 'system',
  440. content: `${$settings?.system ?? ''}${
  441. responseMessage?.userContext ?? null
  442. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  443. : ''
  444. }`
  445. }
  446. : undefined,
  447. ...messages
  448. ]
  449. .filter((message) => message?.content?.trim())
  450. .map((message, idx, arr) => {
  451. // Prepare the base message object
  452. const baseMessage = {
  453. role: message.role,
  454. content: message.content
  455. };
  456. // Extract and format image URLs if any exist
  457. const imageUrls = message.files
  458. ?.filter((file) => file.type === 'image')
  459. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  460. // Add images array only if it contains elements
  461. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  462. baseMessage.images = imageUrls;
  463. }
  464. return baseMessage;
  465. });
  466. let lastImageIndex = -1;
  467. // Find the index of the last object with images
  468. messagesBody.forEach((item, index) => {
  469. if (item.images) {
  470. lastImageIndex = index;
  471. }
  472. });
  473. // Remove images from all but the last one
  474. messagesBody.forEach((item, index) => {
  475. if (index !== lastImageIndex) {
  476. delete item.images;
  477. }
  478. });
  479. const docs = messages
  480. .filter((message) => message?.files ?? null)
  481. .map((message) =>
  482. message.files.filter((item) =>
  483. ['doc', 'collection', 'web_search_results'].includes(item.type)
  484. )
  485. )
  486. .flat(1);
  487. const [res, controller] = await generateChatCompletion(localStorage.token, {
  488. model: model,
  489. messages: messagesBody,
  490. options: {
  491. ...($settings.params ?? {}),
  492. stop:
  493. $settings?.params?.stop ?? undefined
  494. ? $settings.params.stop.map((str) =>
  495. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  496. )
  497. : undefined,
  498. num_predict: $settings?.params?.max_tokens ?? undefined,
  499. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  500. },
  501. format: $settings.requestFormat ?? undefined,
  502. keep_alive: $settings.keepAlive ?? undefined,
  503. docs: docs.length > 0 ? docs : undefined,
  504. citations: docs.length > 0,
  505. chat_id: $chatId
  506. });
  507. if (res && res.ok) {
  508. console.log('controller', controller);
  509. const reader = res.body
  510. .pipeThrough(new TextDecoderStream())
  511. .pipeThrough(splitStream('\n'))
  512. .getReader();
  513. while (true) {
  514. const { value, done } = await reader.read();
  515. if (done || stopResponseFlag || _chatId !== $chatId) {
  516. responseMessage.done = true;
  517. messages = messages;
  518. if (stopResponseFlag) {
  519. controller.abort('User: Stop Response');
  520. await cancelOllamaRequest(localStorage.token, currentRequestId);
  521. } else {
  522. const messages = createMessagesList(responseMessageId);
  523. const res = await chatCompleted(localStorage.token, {
  524. model: model,
  525. messages: messages.map((m) => ({
  526. id: m.id,
  527. role: m.role,
  528. content: m.content,
  529. timestamp: m.timestamp
  530. })),
  531. chat_id: $chatId
  532. }).catch((error) => {
  533. console.error(error);
  534. return null;
  535. });
  536. if (res !== null) {
  537. // Update chat history with the new messages
  538. for (const message of res.messages) {
  539. history.messages[message.id] = { ...history.messages[message.id], ...message };
  540. }
  541. }
  542. }
  543. currentRequestId = null;
  544. break;
  545. }
  546. try {
  547. let lines = value.split('\n');
  548. for (const line of lines) {
  549. if (line !== '') {
  550. console.log(line);
  551. let data = JSON.parse(line);
  552. if ('citations' in data) {
  553. responseMessage.citations = data.citations;
  554. continue;
  555. }
  556. if ('detail' in data) {
  557. throw data;
  558. }
  559. if ('id' in data) {
  560. console.log(data);
  561. currentRequestId = data.id;
  562. } else {
  563. if (data.done == false) {
  564. if (responseMessage.content == '' && data.message.content == '\n') {
  565. continue;
  566. } else {
  567. responseMessage.content += data.message.content;
  568. messages = messages;
  569. }
  570. } else {
  571. responseMessage.done = true;
  572. if (responseMessage.content == '') {
  573. responseMessage.error = true;
  574. responseMessage.content =
  575. 'Oops! No text generated from Ollama, Please try again.';
  576. }
  577. responseMessage.context = data.context ?? null;
  578. responseMessage.info = {
  579. total_duration: data.total_duration,
  580. load_duration: data.load_duration,
  581. sample_count: data.sample_count,
  582. sample_duration: data.sample_duration,
  583. prompt_eval_count: data.prompt_eval_count,
  584. prompt_eval_duration: data.prompt_eval_duration,
  585. eval_count: data.eval_count,
  586. eval_duration: data.eval_duration
  587. };
  588. messages = messages;
  589. if ($settings.notificationEnabled && !document.hasFocus()) {
  590. const notification = new Notification(
  591. selectedModelfile
  592. ? `${
  593. selectedModelfile.title.charAt(0).toUpperCase() +
  594. selectedModelfile.title.slice(1)
  595. }`
  596. : `${model}`,
  597. {
  598. body: responseMessage.content,
  599. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  600. }
  601. );
  602. }
  603. if ($settings.responseAutoCopy) {
  604. copyToClipboard(responseMessage.content);
  605. }
  606. if ($settings.responseAutoPlayback) {
  607. await tick();
  608. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  609. }
  610. }
  611. }
  612. }
  613. }
  614. } catch (error) {
  615. console.log(error);
  616. if ('detail' in error) {
  617. toast.error(error.detail);
  618. }
  619. break;
  620. }
  621. if (autoScroll) {
  622. scrollToBottom();
  623. }
  624. }
  625. if ($chatId == _chatId) {
  626. if ($settings.saveChatHistory ?? true) {
  627. chat = await updateChatById(localStorage.token, _chatId, {
  628. messages: messages,
  629. history: history,
  630. models: selectedModels
  631. });
  632. await chats.set(await getChatList(localStorage.token));
  633. }
  634. }
  635. } else {
  636. if (res !== null) {
  637. const error = await res.json();
  638. console.log(error);
  639. if ('detail' in error) {
  640. toast.error(error.detail);
  641. responseMessage.content = error.detail;
  642. } else {
  643. toast.error(error.error);
  644. responseMessage.content = error.error;
  645. }
  646. } else {
  647. toast.error(
  648. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  649. );
  650. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  651. provider: 'Ollama'
  652. });
  653. }
  654. responseMessage.error = true;
  655. responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  656. provider: 'Ollama'
  657. });
  658. responseMessage.done = true;
  659. messages = messages;
  660. }
  661. stopResponseFlag = false;
  662. await tick();
  663. if (autoScroll) {
  664. scrollToBottom();
  665. }
  666. if (messages.length == 2 && messages.at(1).content !== '') {
  667. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  668. const _title = await generateChatTitle(userPrompt);
  669. await setChatTitle(_chatId, _title);
  670. }
  671. };
  672. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  673. const responseMessage = history.messages[responseMessageId];
  674. const docs = messages
  675. .filter((message) => message?.files ?? null)
  676. .map((message) =>
  677. message.files.filter((item) =>
  678. ['doc', 'collection', 'web_search_results'].includes(item.type)
  679. )
  680. )
  681. .flat(1);
  682. console.log(docs);
  683. scrollToBottom();
  684. try {
  685. const [res, controller] = await generateOpenAIChatCompletion(
  686. localStorage.token,
  687. {
  688. model: model.id,
  689. stream: true,
  690. stream_options:
  691. model.info?.meta?.capabilities?.usage ?? false
  692. ? {
  693. include_usage: true
  694. }
  695. : undefined,
  696. messages: [
  697. $settings.system || (responseMessage?.userContext ?? null)
  698. ? {
  699. role: 'system',
  700. content: `${$settings?.system ?? ''}${
  701. responseMessage?.userContext ?? null
  702. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  703. : ''
  704. }`
  705. }
  706. : undefined,
  707. ...messages
  708. ]
  709. .filter((message) => message?.content?.trim())
  710. .map((message, idx, arr) => ({
  711. role: message.role,
  712. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  713. message.role === 'user'
  714. ? {
  715. content: [
  716. {
  717. type: 'text',
  718. text:
  719. arr.length - 1 !== idx
  720. ? message.content
  721. : message?.raContent ?? message.content
  722. },
  723. ...message.files
  724. .filter((file) => file.type === 'image')
  725. .map((file) => ({
  726. type: 'image_url',
  727. image_url: {
  728. url: file.url
  729. }
  730. }))
  731. ]
  732. }
  733. : {
  734. content:
  735. arr.length - 1 !== idx
  736. ? message.content
  737. : message?.raContent ?? message.content
  738. })
  739. })),
  740. seed: $settings?.params?.seed ?? undefined,
  741. stop:
  742. $settings?.params?.stop ?? undefined
  743. ? $settings.params.stop.map((str) =>
  744. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  745. )
  746. : undefined,
  747. temperature: $settings?.params?.temperature ?? undefined,
  748. top_p: $settings?.params?.top_p ?? undefined,
  749. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  750. max_tokens: $settings?.params?.max_tokens ?? undefined,
  751. docs: docs.length > 0 ? docs : undefined,
  752. citations: docs.length > 0,
  753. chat_id: $chatId
  754. },
  755. `${OPENAI_API_BASE_URL}`
  756. );
  757. // Wait until history/message have been updated
  758. await tick();
  759. scrollToBottom();
  760. if (res && res.ok && res.body) {
  761. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  762. let lastUsage = null;
  763. for await (const update of textStream) {
  764. const { value, done, citations, error, usage } = update;
  765. if (error) {
  766. await handleOpenAIError(error, null, model, responseMessage);
  767. break;
  768. }
  769. if (done || stopResponseFlag || _chatId !== $chatId) {
  770. responseMessage.done = true;
  771. messages = messages;
  772. if (stopResponseFlag) {
  773. controller.abort('User: Stop Response');
  774. } else {
  775. const messages = createMessagesList(responseMessageId);
  776. const res = await chatCompleted(localStorage.token, {
  777. model: model,
  778. messages: messages.map((m) => ({
  779. id: m.id,
  780. role: m.role,
  781. content: m.content,
  782. timestamp: m.timestamp
  783. })),
  784. chat_id: $chatId
  785. }).catch((error) => {
  786. console.error(error);
  787. return null;
  788. });
  789. if (res !== null) {
  790. // Update chat history with the new messages
  791. for (const message of res.messages) {
  792. history.messages[message.id] = { ...history.messages[message.id], ...message };
  793. }
  794. }
  795. }
  796. break;
  797. }
  798. if (usage) {
  799. lastUsage = usage;
  800. }
  801. if (citations) {
  802. responseMessage.citations = citations;
  803. continue;
  804. }
  805. if (responseMessage.content == '' && value == '\n') {
  806. continue;
  807. } else {
  808. responseMessage.content += value;
  809. messages = messages;
  810. }
  811. if ($settings.notificationEnabled && !document.hasFocus()) {
  812. const notification = new Notification(`OpenAI ${model}`, {
  813. body: responseMessage.content,
  814. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  815. });
  816. }
  817. if ($settings.responseAutoCopy) {
  818. copyToClipboard(responseMessage.content);
  819. }
  820. if ($settings.responseAutoPlayback) {
  821. await tick();
  822. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  823. }
  824. if (autoScroll) {
  825. scrollToBottom();
  826. }
  827. }
  828. if (lastUsage) {
  829. responseMessage.info = { ...lastUsage, openai: true };
  830. }
  831. if ($chatId == _chatId) {
  832. if ($settings.saveChatHistory ?? true) {
  833. chat = await updateChatById(localStorage.token, _chatId, {
  834. models: selectedModels,
  835. messages: messages,
  836. history: history
  837. });
  838. await chats.set(await getChatList(localStorage.token));
  839. }
  840. }
  841. } else {
  842. await handleOpenAIError(null, res, model, responseMessage);
  843. }
  844. } catch (error) {
  845. await handleOpenAIError(error, null, model, responseMessage);
  846. }
  847. messages = messages;
  848. stopResponseFlag = false;
  849. await tick();
  850. if (autoScroll) {
  851. scrollToBottom();
  852. }
  853. if (messages.length == 2) {
  854. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  855. const _title = await generateChatTitle(userPrompt);
  856. await setChatTitle(_chatId, _title);
  857. }
  858. };
  859. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  860. let errorMessage = '';
  861. let innerError;
  862. if (error) {
  863. innerError = error;
  864. } else if (res !== null) {
  865. innerError = await res.json();
  866. }
  867. console.error(innerError);
  868. if ('detail' in innerError) {
  869. toast.error(innerError.detail);
  870. errorMessage = innerError.detail;
  871. } else if ('error' in innerError) {
  872. if ('message' in innerError.error) {
  873. toast.error(innerError.error.message);
  874. errorMessage = innerError.error.message;
  875. } else {
  876. toast.error(innerError.error);
  877. errorMessage = innerError.error;
  878. }
  879. } else if ('message' in innerError) {
  880. toast.error(innerError.message);
  881. errorMessage = innerError.message;
  882. }
  883. responseMessage.error = true;
  884. responseMessage.content =
  885. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  886. provider: model.name ?? model.id
  887. }) +
  888. '\n' +
  889. errorMessage;
  890. responseMessage.done = true;
  891. messages = messages;
  892. };
  893. const stopResponse = () => {
  894. stopResponseFlag = true;
  895. console.log('stopResponse');
  896. };
  897. const regenerateResponse = async (message) => {
  898. console.log('regenerateResponse');
  899. if (messages.length != 0) {
  900. let userMessage = history.messages[message.parentId];
  901. let userPrompt = userMessage.content;
  902. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  903. await sendPrompt(userPrompt, userMessage.id);
  904. } else {
  905. await sendPrompt(userPrompt, userMessage.id, message.model);
  906. }
  907. }
  908. };
  909. const continueGeneration = async () => {
  910. console.log('continueGeneration');
  911. const _chatId = JSON.parse(JSON.stringify($chatId));
  912. if (messages.length != 0 && messages.at(-1).done == true) {
  913. const responseMessage = history.messages[history.currentId];
  914. responseMessage.done = false;
  915. await tick();
  916. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  917. if (model) {
  918. if (model?.owned_by === 'openai') {
  919. await sendPromptOpenAI(
  920. model,
  921. history.messages[responseMessage.parentId].content,
  922. responseMessage.id,
  923. _chatId
  924. );
  925. } else
  926. await sendPromptOllama(
  927. model,
  928. history.messages[responseMessage.parentId].content,
  929. responseMessage.id,
  930. _chatId
  931. );
  932. }
  933. } else {
  934. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  935. }
  936. };
  937. const generateChatTitle = async (userPrompt) => {
  938. if ($settings?.title?.auto ?? true) {
  939. const model = $models.find((model) => model.id === selectedModels[0]);
  940. const titleModelId =
  941. model?.owned_by === 'openai' ?? false
  942. ? $settings?.title?.modelExternal ?? selectedModels[0]
  943. : $settings?.title?.model ?? selectedModels[0];
  944. const titleModel = $models.find((model) => model.id === titleModelId);
  945. console.log(titleModel);
  946. const title = await generateTitle(
  947. localStorage.token,
  948. $settings?.title?.prompt ??
  949. $i18n.t(
  950. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  951. ) + ' {{prompt}}',
  952. titleModelId,
  953. userPrompt,
  954. titleModel?.owned_by === 'openai' ?? false
  955. ? `${OPENAI_API_BASE_URL}`
  956. : `${OLLAMA_API_BASE_URL}/v1`
  957. );
  958. return title;
  959. } else {
  960. return `${userPrompt}`;
  961. }
  962. };
  963. const generateChatSearchQuery = async (modelId: string, prompt: string) => {
  964. const model = $models.find((model) => model.id === modelId);
  965. const taskModelId =
  966. model?.owned_by === 'openai' ?? false
  967. ? $settings?.title?.modelExternal ?? modelId
  968. : $settings?.title?.model ?? modelId;
  969. const taskModel = $models.find((model) => model.id === taskModelId);
  970. const previousMessages = messages
  971. .filter((message) => message.role === 'user')
  972. .map((message) => message.content);
  973. return await generateSearchQuery(
  974. localStorage.token,
  975. taskModelId,
  976. previousMessages,
  977. prompt,
  978. taskModel?.owned_by === 'openai' ?? false
  979. ? `${OPENAI_API_BASE_URL}`
  980. : `${OLLAMA_API_BASE_URL}/v1`
  981. );
  982. };
  983. const setChatTitle = async (_chatId, _title) => {
  984. if (_chatId === $chatId) {
  985. title = _title;
  986. }
  987. if ($settings.saveChatHistory ?? true) {
  988. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  989. await chats.set(await getChatList(localStorage.token));
  990. }
  991. };
  992. const getTags = async () => {
  993. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  994. return [];
  995. });
  996. };
  997. const addTag = async (tagName) => {
  998. const res = await addTagById(localStorage.token, $chatId, tagName);
  999. tags = await getTags();
  1000. chat = await updateChatById(localStorage.token, $chatId, {
  1001. tags: tags
  1002. });
  1003. _tags.set(await getAllChatTags(localStorage.token));
  1004. };
  1005. const deleteTag = async (tagName) => {
  1006. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  1007. tags = await getTags();
  1008. chat = await updateChatById(localStorage.token, $chatId, {
  1009. tags: tags
  1010. });
  1011. _tags.set(await getAllChatTags(localStorage.token));
  1012. };
  1013. </script>
  1014. <svelte:head>
  1015. <title>
  1016. {title
  1017. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1018. : `${$WEBUI_NAME}`}
  1019. </title>
  1020. </svelte:head>
  1021. {#if !chatIdProp || (loaded && chatIdProp)}
  1022. <div
  1023. class="min-h-screen max-h-screen {$showSidebar
  1024. ? 'md:max-w-[calc(100%-260px)]'
  1025. : ''} w-full max-w-full flex flex-col"
  1026. >
  1027. <Navbar
  1028. {title}
  1029. bind:selectedModels
  1030. bind:showModelSelector
  1031. shareEnabled={messages.length > 0}
  1032. {chat}
  1033. {initNewChat}
  1034. />
  1035. {#if $banners.length > 0 && !$chatId && selectedModels.length <= 1}
  1036. <div
  1037. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1038. >
  1039. <div class=" flex flex-col gap-1 w-full">
  1040. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1041. <Banner
  1042. {banner}
  1043. on:dismiss={(e) => {
  1044. const bannerId = e.detail;
  1045. localStorage.setItem(
  1046. 'dismissedBannerIds',
  1047. JSON.stringify(
  1048. [
  1049. bannerId,
  1050. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1051. ].filter((id) => $banners.find((b) => b.id === id))
  1052. )
  1053. );
  1054. }}
  1055. />
  1056. {/each}
  1057. </div>
  1058. </div>
  1059. {/if}
  1060. <div class="flex flex-col flex-auto">
  1061. <div
  1062. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1063. id="messages-container"
  1064. bind:this={messagesContainerElement}
  1065. on:scroll={(e) => {
  1066. autoScroll =
  1067. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1068. messagesContainerElement.clientHeight + 5;
  1069. }}
  1070. >
  1071. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1072. <Messages
  1073. chatId={$chatId}
  1074. {selectedModels}
  1075. {processing}
  1076. bind:history
  1077. bind:messages
  1078. bind:autoScroll
  1079. bind:prompt
  1080. bottomPadding={files.length > 0}
  1081. {sendPrompt}
  1082. {continueGeneration}
  1083. {regenerateResponse}
  1084. />
  1085. </div>
  1086. </div>
  1087. </div>
  1088. </div>
  1089. <MessageInput
  1090. bind:files
  1091. bind:prompt
  1092. bind:autoScroll
  1093. bind:webSearchEnabled
  1094. bind:atSelectedModel
  1095. {selectedModels}
  1096. {messages}
  1097. {submitPrompt}
  1098. {stopResponse}
  1099. />
  1100. {/if}