Chat.svelte 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import type { Writable } from 'svelte/store';
  9. import type { i18n as i18nType } from 'i18next';
  10. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  11. import {
  12. chatId,
  13. chats,
  14. config,
  15. type Model,
  16. models,
  17. settings,
  18. showSidebar,
  19. tags as _tags,
  20. WEBUI_NAME,
  21. banners,
  22. user,
  23. socket,
  24. showCallOverlay
  25. } from '$lib/stores';
  26. import {
  27. convertMessagesToHistory,
  28. copyToClipboard,
  29. promptTemplate,
  30. splitStream
  31. } from '$lib/utils';
  32. import { generateChatCompletion } from '$lib/apis/ollama';
  33. import {
  34. addTagById,
  35. createNewChat,
  36. deleteTagById,
  37. getAllChatTags,
  38. getChatById,
  39. getChatList,
  40. getTagsById,
  41. updateChatById
  42. } from '$lib/apis/chats';
  43. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  44. import { runWebSearch } from '$lib/apis/rag';
  45. import { createOpenAITextStream } from '$lib/apis/streaming';
  46. import { queryMemory } from '$lib/apis/memories';
  47. import { getUserSettings } from '$lib/apis/users';
  48. import { chatCompleted, generateTitle, generateSearchQuery } from '$lib/apis';
  49. import Banner from '../common/Banner.svelte';
  50. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  51. import Messages from '$lib/components/chat/Messages.svelte';
  52. import Navbar from '$lib/components/layout/Navbar.svelte';
  53. import CallOverlay from './MessageInput/CallOverlay.svelte';
  54. const i18n: Writable<i18nType> = getContext('i18n');
  55. export let chatIdProp = '';
  56. let loaded = false;
  57. let stopResponseFlag = false;
  58. let autoScroll = true;
  59. let processing = '';
  60. let messagesContainerElement: HTMLDivElement;
  61. let showModelSelector = true;
  62. let selectedModels = [''];
  63. let atSelectedModel: Model | undefined;
  64. let webSearchEnabled = false;
  65. let chat = null;
  66. let tags = [];
  67. let title = '';
  68. let prompt = '';
  69. let files = [];
  70. let messages = [];
  71. let history = {
  72. messages: {},
  73. currentId: null
  74. };
  75. $: if (history.currentId !== null) {
  76. let _messages = [];
  77. let currentMessage = history.messages[history.currentId];
  78. while (currentMessage !== null) {
  79. _messages.unshift({ ...currentMessage });
  80. currentMessage =
  81. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  82. }
  83. messages = _messages;
  84. } else {
  85. messages = [];
  86. }
  87. $: if (chatIdProp) {
  88. (async () => {
  89. if (await loadChat()) {
  90. await tick();
  91. loaded = true;
  92. window.setTimeout(() => scrollToBottom(), 0);
  93. const chatInput = document.getElementById('chat-textarea');
  94. chatInput?.focus();
  95. } else {
  96. await goto('/');
  97. }
  98. })();
  99. }
  100. onMount(async () => {
  101. if (!$chatId) {
  102. await initNewChat();
  103. } else {
  104. if (!($settings.saveChatHistory ?? true)) {
  105. await goto('/');
  106. }
  107. }
  108. });
  109. //////////////////////////
  110. // Web functions
  111. //////////////////////////
  112. const initNewChat = async () => {
  113. window.history.replaceState(history.state, '', `/`);
  114. await chatId.set('');
  115. autoScroll = true;
  116. title = '';
  117. messages = [];
  118. history = {
  119. messages: {},
  120. currentId: null
  121. };
  122. if ($page.url.searchParams.get('models')) {
  123. selectedModels = $page.url.searchParams.get('models')?.split(',');
  124. } else if ($settings?.models) {
  125. selectedModels = $settings?.models;
  126. } else if ($config?.default_models) {
  127. console.log($config?.default_models.split(',') ?? '');
  128. selectedModels = $config?.default_models.split(',');
  129. } else {
  130. selectedModels = [''];
  131. }
  132. if ($page.url.searchParams.get('q')) {
  133. prompt = $page.url.searchParams.get('q') ?? '';
  134. if (prompt) {
  135. await tick();
  136. submitPrompt(prompt);
  137. }
  138. }
  139. selectedModels = selectedModels.map((modelId) =>
  140. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  141. );
  142. const userSettings = await getUserSettings(localStorage.token);
  143. if (userSettings) {
  144. settings.set(userSettings.ui);
  145. } else {
  146. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  147. }
  148. const chatInput = document.getElementById('chat-textarea');
  149. setTimeout(() => chatInput?.focus(), 0);
  150. };
  151. const loadChat = async () => {
  152. chatId.set(chatIdProp);
  153. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  154. await goto('/');
  155. return null;
  156. });
  157. if (chat) {
  158. tags = await getTags();
  159. const chatContent = chat.chat;
  160. if (chatContent) {
  161. console.log(chatContent);
  162. selectedModels =
  163. (chatContent?.models ?? undefined) !== undefined
  164. ? chatContent.models
  165. : [chatContent.models ?? ''];
  166. history =
  167. (chatContent?.history ?? undefined) !== undefined
  168. ? chatContent.history
  169. : convertMessagesToHistory(chatContent.messages);
  170. title = chatContent.title;
  171. const userSettings = await getUserSettings(localStorage.token);
  172. if (userSettings) {
  173. await settings.set(userSettings.ui);
  174. } else {
  175. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  176. }
  177. await settings.set({
  178. ...$settings,
  179. system: chatContent.system ?? $settings.system,
  180. params: chatContent.options ?? $settings.params
  181. });
  182. autoScroll = true;
  183. await tick();
  184. if (messages.length > 0) {
  185. history.messages[messages.at(-1).id].done = true;
  186. }
  187. await tick();
  188. return true;
  189. } else {
  190. return null;
  191. }
  192. }
  193. };
  194. const scrollToBottom = async () => {
  195. await tick();
  196. if (messagesContainerElement) {
  197. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  198. }
  199. };
  200. const createMessagesList = (responseMessageId) => {
  201. const message = history.messages[responseMessageId];
  202. if (message.parentId) {
  203. return [...createMessagesList(message.parentId), message];
  204. } else {
  205. return [message];
  206. }
  207. };
  208. const chatCompletedHandler = async (modelId, messages) => {
  209. await mermaid.run({
  210. querySelector: '.mermaid'
  211. });
  212. const res = await chatCompleted(localStorage.token, {
  213. model: modelId,
  214. messages: messages.map((m) => ({
  215. id: m.id,
  216. role: m.role,
  217. content: m.content,
  218. timestamp: m.timestamp
  219. })),
  220. chat_id: $chatId
  221. }).catch((error) => {
  222. console.error(error);
  223. return null;
  224. });
  225. if (res !== null) {
  226. // Update chat history with the new messages
  227. for (const message of res.messages) {
  228. history.messages[message.id] = {
  229. ...history.messages[message.id],
  230. ...(history.messages[message.id].content !== message.content
  231. ? { originalContent: history.messages[message.id].content }
  232. : {}),
  233. ...message
  234. };
  235. }
  236. }
  237. };
  238. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  239. return setInterval(() => {
  240. $socket?.emit('usage', {
  241. action: 'chat',
  242. model: modelId,
  243. chat_id: chatId
  244. });
  245. }, 1000);
  246. };
  247. //////////////////////////
  248. // Chat functions
  249. //////////////////////////
  250. const submitPrompt = async (userPrompt, _user = null) => {
  251. // Reset chat input textarea
  252. prompt = '';
  253. document.getElementById('chat-textarea').style.height = '';
  254. let _responses = [];
  255. console.log('submitPrompt', $chatId);
  256. selectedModels = selectedModels.map((modelId) =>
  257. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  258. );
  259. if (selectedModels.includes('')) {
  260. toast.error($i18n.t('Model not selected'));
  261. } else if (messages.length != 0 && messages.at(-1).done != true) {
  262. // Response not done
  263. console.log('wait');
  264. } else if (
  265. files.length > 0 &&
  266. files.filter((file) => file.upload_status === false).length > 0
  267. ) {
  268. // Upload not done
  269. toast.error(
  270. $i18n.t(
  271. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  272. )
  273. );
  274. } else {
  275. // Reset chat message textarea height
  276. document.getElementById('chat-textarea').style.height = '';
  277. // Create user message
  278. let userMessageId = uuidv4();
  279. let userMessage = {
  280. id: userMessageId,
  281. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  282. childrenIds: [],
  283. role: 'user',
  284. user: _user ?? undefined,
  285. content: userPrompt,
  286. files: files.length > 0 ? files : undefined,
  287. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  288. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  289. };
  290. // Add message to history and Set currentId to messageId
  291. history.messages[userMessageId] = userMessage;
  292. history.currentId = userMessageId;
  293. // Append messageId to childrenIds of parent message
  294. if (messages.length !== 0) {
  295. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  296. }
  297. // Wait until history/message have been updated
  298. await tick();
  299. // Create new chat if only one message in messages
  300. if (messages.length == 1) {
  301. if ($settings.saveChatHistory ?? true) {
  302. chat = await createNewChat(localStorage.token, {
  303. id: $chatId,
  304. title: $i18n.t('New Chat'),
  305. models: selectedModels,
  306. system: $settings.system ?? undefined,
  307. options: {
  308. ...($settings.params ?? {})
  309. },
  310. messages: messages,
  311. history: history,
  312. tags: [],
  313. timestamp: Date.now()
  314. });
  315. await chats.set(await getChatList(localStorage.token));
  316. await chatId.set(chat.id);
  317. } else {
  318. await chatId.set('local');
  319. }
  320. await tick();
  321. }
  322. files = [];
  323. // Send prompt
  324. _responses = await sendPrompt(userPrompt, userMessageId);
  325. }
  326. return _responses;
  327. };
  328. const sendPrompt = async (prompt, parentId, modelId = null) => {
  329. let _responses = [];
  330. const _chatId = JSON.parse(JSON.stringify($chatId));
  331. await Promise.all(
  332. (modelId
  333. ? [modelId]
  334. : atSelectedModel !== undefined
  335. ? [atSelectedModel.id]
  336. : selectedModels
  337. ).map(async (modelId) => {
  338. console.log('modelId', modelId);
  339. const model = $models.filter((m) => m.id === modelId).at(0);
  340. if (model) {
  341. // If there are image files, check if model is vision capable
  342. const hasImages = messages.some((message) =>
  343. message.files?.some((file) => file.type === 'image')
  344. );
  345. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  346. toast.error(
  347. $i18n.t('Model {{modelName}} is not vision capable', {
  348. modelName: model.name ?? model.id
  349. })
  350. );
  351. }
  352. // Create response message
  353. let responseMessageId = uuidv4();
  354. let responseMessage = {
  355. parentId: parentId,
  356. id: responseMessageId,
  357. childrenIds: [],
  358. role: 'assistant',
  359. content: '',
  360. model: model.id,
  361. modelName: model.name ?? model.id,
  362. userContext: null,
  363. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  364. };
  365. // Add message to history and Set currentId to messageId
  366. history.messages[responseMessageId] = responseMessage;
  367. history.currentId = responseMessageId;
  368. // Append messageId to childrenIds of parent message
  369. if (parentId !== null) {
  370. history.messages[parentId].childrenIds = [
  371. ...history.messages[parentId].childrenIds,
  372. responseMessageId
  373. ];
  374. }
  375. await tick();
  376. let userContext = null;
  377. if ($settings?.memory ?? false) {
  378. if (userContext === null) {
  379. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  380. toast.error(error);
  381. return null;
  382. });
  383. if (res) {
  384. if (res.documents[0].length > 0) {
  385. userContext = res.documents.reduce((acc, doc, index) => {
  386. const createdAtTimestamp = res.metadatas[index][0].created_at;
  387. const createdAtDate = new Date(createdAtTimestamp * 1000)
  388. .toISOString()
  389. .split('T')[0];
  390. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  391. return acc;
  392. }, []);
  393. }
  394. console.log(userContext);
  395. }
  396. }
  397. }
  398. responseMessage.userContext = userContext;
  399. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  400. if (webSearchEnabled) {
  401. await getWebSearchResults(model.id, parentId, responseMessageId);
  402. }
  403. let _response = null;
  404. if (model?.owned_by === 'openai') {
  405. _response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  406. } else if (model) {
  407. _response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  408. }
  409. _responses.push(_response);
  410. console.log('chatEventEmitter', chatEventEmitter);
  411. if (chatEventEmitter) clearInterval(chatEventEmitter);
  412. } else {
  413. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  414. }
  415. })
  416. );
  417. await chats.set(await getChatList(localStorage.token));
  418. return _responses;
  419. };
  420. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  421. const responseMessage = history.messages[responseId];
  422. responseMessage.status = {
  423. done: false,
  424. action: 'web_search',
  425. description: $i18n.t('Generating search query')
  426. };
  427. messages = messages;
  428. const prompt = history.messages[parentId].content;
  429. let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt);
  430. if (!searchQuery) {
  431. toast.warning($i18n.t('No search query generated'));
  432. responseMessage.status = {
  433. ...responseMessage.status,
  434. done: true,
  435. error: true,
  436. description: 'No search query generated'
  437. };
  438. messages = messages;
  439. searchQuery = prompt;
  440. }
  441. responseMessage.status = {
  442. ...responseMessage.status,
  443. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  444. };
  445. messages = messages;
  446. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  447. console.log(error);
  448. toast.error(error);
  449. return null;
  450. });
  451. if (results) {
  452. responseMessage.status = {
  453. ...responseMessage.status,
  454. done: true,
  455. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  456. urls: results.filenames
  457. };
  458. if (responseMessage?.files ?? undefined === undefined) {
  459. responseMessage.files = [];
  460. }
  461. responseMessage.files.push({
  462. collection_name: results.collection_name,
  463. name: searchQuery,
  464. type: 'web_search_results',
  465. urls: results.filenames
  466. });
  467. messages = messages;
  468. } else {
  469. responseMessage.status = {
  470. ...responseMessage.status,
  471. done: true,
  472. error: true,
  473. description: 'No search results found'
  474. };
  475. messages = messages;
  476. }
  477. };
  478. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  479. let _response = null;
  480. model = model.id;
  481. const responseMessage = history.messages[responseMessageId];
  482. // Wait until history/message have been updated
  483. await tick();
  484. // Scroll down
  485. scrollToBottom();
  486. const messagesBody = [
  487. $settings.system || (responseMessage?.userContext ?? null)
  488. ? {
  489. role: 'system',
  490. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  491. responseMessage?.userContext ?? null
  492. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  493. : ''
  494. }`
  495. }
  496. : undefined,
  497. ...messages
  498. ]
  499. .filter((message) => message?.content?.trim())
  500. .map((message, idx, arr) => {
  501. // Prepare the base message object
  502. const baseMessage = {
  503. role: message.role,
  504. content: message.content
  505. };
  506. // Extract and format image URLs if any exist
  507. const imageUrls = message.files
  508. ?.filter((file) => file.type === 'image')
  509. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  510. // Add images array only if it contains elements
  511. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  512. baseMessage.images = imageUrls;
  513. }
  514. return baseMessage;
  515. });
  516. let lastImageIndex = -1;
  517. // Find the index of the last object with images
  518. messagesBody.forEach((item, index) => {
  519. if (item.images) {
  520. lastImageIndex = index;
  521. }
  522. });
  523. // Remove images from all but the last one
  524. messagesBody.forEach((item, index) => {
  525. if (index !== lastImageIndex) {
  526. delete item.images;
  527. }
  528. });
  529. const docs = messages
  530. .filter((message) => message?.files ?? null)
  531. .map((message) =>
  532. message.files.filter((item) =>
  533. ['doc', 'collection', 'web_search_results'].includes(item.type)
  534. )
  535. )
  536. .flat(1);
  537. const [res, controller] = await generateChatCompletion(localStorage.token, {
  538. model: model,
  539. messages: messagesBody,
  540. options: {
  541. ...($settings.params ?? {}),
  542. stop:
  543. $settings?.params?.stop ?? undefined
  544. ? $settings.params.stop.map((str) =>
  545. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  546. )
  547. : undefined,
  548. num_predict: $settings?.params?.max_tokens ?? undefined,
  549. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  550. },
  551. format: $settings.requestFormat ?? undefined,
  552. keep_alive: $settings.keepAlive ?? undefined,
  553. docs: docs.length > 0 ? docs : undefined,
  554. citations: docs.length > 0,
  555. chat_id: $chatId
  556. });
  557. if (res && res.ok) {
  558. console.log('controller', controller);
  559. const reader = res.body
  560. .pipeThrough(new TextDecoderStream())
  561. .pipeThrough(splitStream('\n'))
  562. .getReader();
  563. while (true) {
  564. const { value, done } = await reader.read();
  565. if (done || stopResponseFlag || _chatId !== $chatId) {
  566. responseMessage.done = true;
  567. messages = messages;
  568. if (stopResponseFlag) {
  569. controller.abort('User: Stop Response');
  570. } else {
  571. const messages = createMessagesList(responseMessageId);
  572. await chatCompletedHandler(model, messages);
  573. }
  574. _response = responseMessage.content;
  575. break;
  576. }
  577. try {
  578. let lines = value.split('\n');
  579. for (const line of lines) {
  580. if (line !== '') {
  581. console.log(line);
  582. let data = JSON.parse(line);
  583. if ('citations' in data) {
  584. responseMessage.citations = data.citations;
  585. continue;
  586. }
  587. if ('detail' in data) {
  588. throw data;
  589. }
  590. if (data.done == false) {
  591. if (responseMessage.content == '' && data.message.content == '\n') {
  592. continue;
  593. } else {
  594. responseMessage.content += data.message.content;
  595. messages = messages;
  596. }
  597. } else {
  598. responseMessage.done = true;
  599. if (responseMessage.content == '') {
  600. responseMessage.error = {
  601. code: 400,
  602. content: `Oops! No text generated from Ollama, Please try again.`
  603. };
  604. }
  605. responseMessage.context = data.context ?? null;
  606. responseMessage.info = {
  607. total_duration: data.total_duration,
  608. load_duration: data.load_duration,
  609. sample_count: data.sample_count,
  610. sample_duration: data.sample_duration,
  611. prompt_eval_count: data.prompt_eval_count,
  612. prompt_eval_duration: data.prompt_eval_duration,
  613. eval_count: data.eval_count,
  614. eval_duration: data.eval_duration
  615. };
  616. messages = messages;
  617. if ($settings.notificationEnabled && !document.hasFocus()) {
  618. const notification = new Notification(
  619. selectedModelfile
  620. ? `${
  621. selectedModelfile.title.charAt(0).toUpperCase() +
  622. selectedModelfile.title.slice(1)
  623. }`
  624. : `${model}`,
  625. {
  626. body: responseMessage.content,
  627. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  628. }
  629. );
  630. }
  631. if ($settings.responseAutoCopy) {
  632. copyToClipboard(responseMessage.content);
  633. }
  634. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  635. await tick();
  636. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  637. }
  638. }
  639. }
  640. }
  641. } catch (error) {
  642. console.log(error);
  643. if ('detail' in error) {
  644. toast.error(error.detail);
  645. }
  646. break;
  647. }
  648. if (autoScroll) {
  649. scrollToBottom();
  650. }
  651. }
  652. if ($chatId == _chatId) {
  653. if ($settings.saveChatHistory ?? true) {
  654. chat = await updateChatById(localStorage.token, _chatId, {
  655. messages: messages,
  656. history: history,
  657. models: selectedModels
  658. });
  659. await chats.set(await getChatList(localStorage.token));
  660. }
  661. }
  662. } else {
  663. if (res !== null) {
  664. const error = await res.json();
  665. console.log(error);
  666. if ('detail' in error) {
  667. toast.error(error.detail);
  668. responseMessage.error = { content: error.detail };
  669. } else {
  670. toast.error(error.error);
  671. responseMessage.error = { content: error.error };
  672. }
  673. } else {
  674. toast.error(
  675. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  676. );
  677. responseMessage.error = {
  678. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  679. provider: 'Ollama'
  680. })
  681. };
  682. }
  683. responseMessage.done = true;
  684. messages = messages;
  685. }
  686. stopResponseFlag = false;
  687. await tick();
  688. if (autoScroll) {
  689. scrollToBottom();
  690. }
  691. if (messages.length == 2 && messages.at(1).content !== '') {
  692. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  693. const _title = await generateChatTitle(userPrompt);
  694. await setChatTitle(_chatId, _title);
  695. }
  696. return _response;
  697. };
  698. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  699. let _response = null;
  700. const responseMessage = history.messages[responseMessageId];
  701. const docs = messages
  702. .filter((message) => message?.files ?? null)
  703. .map((message) =>
  704. message.files.filter((item) =>
  705. ['doc', 'collection', 'web_search_results'].includes(item.type)
  706. )
  707. )
  708. .flat(1);
  709. console.log(docs);
  710. scrollToBottom();
  711. try {
  712. const [res, controller] = await generateOpenAIChatCompletion(
  713. localStorage.token,
  714. {
  715. model: model.id,
  716. stream: true,
  717. stream_options:
  718. model.info?.meta?.capabilities?.usage ?? false
  719. ? {
  720. include_usage: true
  721. }
  722. : undefined,
  723. messages: [
  724. $settings.system || (responseMessage?.userContext ?? null)
  725. ? {
  726. role: 'system',
  727. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  728. responseMessage?.userContext ?? null
  729. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  730. : ''
  731. }`
  732. }
  733. : undefined,
  734. ...messages
  735. ]
  736. .filter((message) => message?.content?.trim())
  737. .map((message, idx, arr) => ({
  738. role: message.role,
  739. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  740. message.role === 'user'
  741. ? {
  742. content: [
  743. {
  744. type: 'text',
  745. text:
  746. arr.length - 1 !== idx
  747. ? message.content
  748. : message?.raContent ?? message.content
  749. },
  750. ...message.files
  751. .filter((file) => file.type === 'image')
  752. .map((file) => ({
  753. type: 'image_url',
  754. image_url: {
  755. url: file.url
  756. }
  757. }))
  758. ]
  759. }
  760. : {
  761. content:
  762. arr.length - 1 !== idx
  763. ? message.content
  764. : message?.raContent ?? message.content
  765. })
  766. })),
  767. seed: $settings?.params?.seed ?? undefined,
  768. stop:
  769. $settings?.params?.stop ?? undefined
  770. ? $settings.params.stop.map((str) =>
  771. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  772. )
  773. : undefined,
  774. temperature: $settings?.params?.temperature ?? undefined,
  775. top_p: $settings?.params?.top_p ?? undefined,
  776. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  777. max_tokens: $settings?.params?.max_tokens ?? undefined,
  778. docs: docs.length > 0 ? docs : undefined,
  779. citations: docs.length > 0,
  780. chat_id: $chatId
  781. },
  782. `${OPENAI_API_BASE_URL}`
  783. );
  784. // Wait until history/message have been updated
  785. await tick();
  786. scrollToBottom();
  787. if (res && res.ok && res.body) {
  788. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  789. let lastUsage = null;
  790. for await (const update of textStream) {
  791. const { value, done, citations, error, usage } = update;
  792. if (error) {
  793. await handleOpenAIError(error, null, model, responseMessage);
  794. break;
  795. }
  796. if (done || stopResponseFlag || _chatId !== $chatId) {
  797. responseMessage.done = true;
  798. messages = messages;
  799. if (stopResponseFlag) {
  800. controller.abort('User: Stop Response');
  801. } else {
  802. const messages = createMessagesList(responseMessageId);
  803. await chatCompletedHandler(model.id, messages);
  804. }
  805. _response = responseMessage.content;
  806. break;
  807. }
  808. if (usage) {
  809. lastUsage = usage;
  810. }
  811. if (citations) {
  812. responseMessage.citations = citations;
  813. continue;
  814. }
  815. if (responseMessage.content == '' && value == '\n') {
  816. continue;
  817. } else {
  818. responseMessage.content += value;
  819. messages = messages;
  820. }
  821. if (autoScroll) {
  822. scrollToBottom();
  823. }
  824. }
  825. if ($settings.notificationEnabled && !document.hasFocus()) {
  826. const notification = new Notification(`OpenAI ${model}`, {
  827. body: responseMessage.content,
  828. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  829. });
  830. }
  831. if ($settings.responseAutoCopy) {
  832. copyToClipboard(responseMessage.content);
  833. }
  834. if ($settings.responseAutoPlayback && !$showCallOverlay) {
  835. await tick();
  836. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  837. }
  838. if (lastUsage) {
  839. responseMessage.info = { ...lastUsage, openai: true };
  840. }
  841. if ($chatId == _chatId) {
  842. if ($settings.saveChatHistory ?? true) {
  843. chat = await updateChatById(localStorage.token, _chatId, {
  844. models: selectedModels,
  845. messages: messages,
  846. history: history
  847. });
  848. await chats.set(await getChatList(localStorage.token));
  849. }
  850. }
  851. } else {
  852. await handleOpenAIError(null, res, model, responseMessage);
  853. }
  854. } catch (error) {
  855. await handleOpenAIError(error, null, model, responseMessage);
  856. }
  857. messages = messages;
  858. stopResponseFlag = false;
  859. await tick();
  860. if (autoScroll) {
  861. scrollToBottom();
  862. }
  863. if (messages.length == 2) {
  864. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  865. const _title = await generateChatTitle(userPrompt);
  866. await setChatTitle(_chatId, _title);
  867. }
  868. return _response;
  869. };
  870. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  871. let errorMessage = '';
  872. let innerError;
  873. if (error) {
  874. innerError = error;
  875. } else if (res !== null) {
  876. innerError = await res.json();
  877. }
  878. console.error(innerError);
  879. if ('detail' in innerError) {
  880. toast.error(innerError.detail);
  881. errorMessage = innerError.detail;
  882. } else if ('error' in innerError) {
  883. if ('message' in innerError.error) {
  884. toast.error(innerError.error.message);
  885. errorMessage = innerError.error.message;
  886. } else {
  887. toast.error(innerError.error);
  888. errorMessage = innerError.error;
  889. }
  890. } else if ('message' in innerError) {
  891. toast.error(innerError.message);
  892. errorMessage = innerError.message;
  893. }
  894. responseMessage.error = {
  895. content:
  896. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  897. provider: model.name ?? model.id
  898. }) +
  899. '\n' +
  900. errorMessage
  901. };
  902. responseMessage.done = true;
  903. messages = messages;
  904. };
  905. const stopResponse = () => {
  906. stopResponseFlag = true;
  907. console.log('stopResponse');
  908. };
  909. const regenerateResponse = async (message) => {
  910. console.log('regenerateResponse');
  911. if (messages.length != 0) {
  912. let userMessage = history.messages[message.parentId];
  913. let userPrompt = userMessage.content;
  914. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  915. await sendPrompt(userPrompt, userMessage.id);
  916. } else {
  917. await sendPrompt(userPrompt, userMessage.id, message.model);
  918. }
  919. }
  920. };
  921. const continueGeneration = async () => {
  922. console.log('continueGeneration');
  923. const _chatId = JSON.parse(JSON.stringify($chatId));
  924. if (messages.length != 0 && messages.at(-1).done == true) {
  925. const responseMessage = history.messages[history.currentId];
  926. responseMessage.done = false;
  927. await tick();
  928. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  929. if (model) {
  930. if (model?.owned_by === 'openai') {
  931. await sendPromptOpenAI(
  932. model,
  933. history.messages[responseMessage.parentId].content,
  934. responseMessage.id,
  935. _chatId
  936. );
  937. } else
  938. await sendPromptOllama(
  939. model,
  940. history.messages[responseMessage.parentId].content,
  941. responseMessage.id,
  942. _chatId
  943. );
  944. }
  945. } else {
  946. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  947. }
  948. };
  949. const generateChatTitle = async (userPrompt) => {
  950. if ($settings?.title?.auto ?? true) {
  951. const title = await generateTitle(
  952. localStorage.token,
  953. selectedModels[0],
  954. userPrompt,
  955. $chatId
  956. ).catch((error) => {
  957. console.error(error);
  958. return 'New Chat';
  959. });
  960. return title;
  961. } else {
  962. return `${userPrompt}`;
  963. }
  964. };
  965. const setChatTitle = async (_chatId, _title) => {
  966. if (_chatId === $chatId) {
  967. title = _title;
  968. }
  969. if ($settings.saveChatHistory ?? true) {
  970. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  971. await chats.set(await getChatList(localStorage.token));
  972. }
  973. };
  974. const getTags = async () => {
  975. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  976. return [];
  977. });
  978. };
  979. </script>
  980. <svelte:head>
  981. <title>
  982. {title
  983. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  984. : `${$WEBUI_NAME}`}
  985. </title>
  986. </svelte:head>
  987. <CallOverlay {submitPrompt} bind:files />
  988. {#if !chatIdProp || (loaded && chatIdProp)}
  989. <div
  990. class="h-screen max-h-[100dvh] {$showSidebar
  991. ? 'md:max-w-[calc(100%-260px)]'
  992. : ''} w-full max-w-full flex flex-col"
  993. >
  994. <Navbar
  995. {title}
  996. bind:selectedModels
  997. bind:showModelSelector
  998. shareEnabled={messages.length > 0}
  999. {chat}
  1000. {initNewChat}
  1001. />
  1002. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1003. <div
  1004. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1005. >
  1006. <div class=" flex flex-col gap-1 w-full">
  1007. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1008. <Banner
  1009. {banner}
  1010. on:dismiss={(e) => {
  1011. const bannerId = e.detail;
  1012. localStorage.setItem(
  1013. 'dismissedBannerIds',
  1014. JSON.stringify(
  1015. [
  1016. bannerId,
  1017. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1018. ].filter((id) => $banners.find((b) => b.id === id))
  1019. )
  1020. );
  1021. }}
  1022. />
  1023. {/each}
  1024. </div>
  1025. </div>
  1026. {/if}
  1027. <div class="flex flex-col flex-auto">
  1028. <div
  1029. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1030. id="messages-container"
  1031. bind:this={messagesContainerElement}
  1032. on:scroll={(e) => {
  1033. autoScroll =
  1034. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1035. messagesContainerElement.clientHeight + 5;
  1036. }}
  1037. >
  1038. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1039. <Messages
  1040. chatId={$chatId}
  1041. {selectedModels}
  1042. {processing}
  1043. bind:history
  1044. bind:messages
  1045. bind:autoScroll
  1046. bind:prompt
  1047. bottomPadding={files.length > 0}
  1048. {sendPrompt}
  1049. {continueGeneration}
  1050. {regenerateResponse}
  1051. />
  1052. </div>
  1053. </div>
  1054. <MessageInput
  1055. bind:files
  1056. bind:prompt
  1057. bind:autoScroll
  1058. bind:webSearchEnabled
  1059. bind:atSelectedModel
  1060. {selectedModels}
  1061. {messages}
  1062. {submitPrompt}
  1063. {stopResponse}
  1064. />
  1065. </div>
  1066. </div>
  1067. {/if}