Chat.svelte 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import { toast } from 'svelte-sonner';
  4. import mermaid from 'mermaid';
  5. import { getContext, onMount, tick } from 'svelte';
  6. import { goto } from '$app/navigation';
  7. import { page } from '$app/stores';
  8. import {
  9. chatId,
  10. chats,
  11. config,
  12. type Model,
  13. models,
  14. settings,
  15. showSidebar,
  16. tags as _tags,
  17. WEBUI_NAME,
  18. banners,
  19. user,
  20. socket,
  21. showCallOverlay
  22. } from '$lib/stores';
  23. import {
  24. convertMessagesToHistory,
  25. copyToClipboard,
  26. promptTemplate,
  27. splitStream
  28. } from '$lib/utils';
  29. import { generateChatCompletion } from '$lib/apis/ollama';
  30. import {
  31. addTagById,
  32. createNewChat,
  33. deleteTagById,
  34. getAllChatTags,
  35. getChatById,
  36. getChatList,
  37. getTagsById,
  38. updateChatById
  39. } from '$lib/apis/chats';
  40. import {
  41. generateOpenAIChatCompletion,
  42. generateSearchQuery,
  43. generateTitle
  44. } from '$lib/apis/openai';
  45. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  46. import Messages from '$lib/components/chat/Messages.svelte';
  47. import Navbar from '$lib/components/layout/Navbar.svelte';
  48. import { OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
  49. import { createOpenAITextStream } from '$lib/apis/streaming';
  50. import { queryMemory } from '$lib/apis/memories';
  51. import type { Writable } from 'svelte/store';
  52. import type { i18n as i18nType } from 'i18next';
  53. import { runWebSearch } from '$lib/apis/rag';
  54. import Banner from '../common/Banner.svelte';
  55. import { getUserSettings } from '$lib/apis/users';
  56. import { chatCompleted } from '$lib/apis';
  57. import CallOverlay from './MessageInput/CallOverlay.svelte';
  58. const i18n: Writable<i18nType> = getContext('i18n');
  59. export let chatIdProp = '';
  60. let loaded = false;
  61. let stopResponseFlag = false;
  62. let autoScroll = true;
  63. let processing = '';
  64. let messagesContainerElement: HTMLDivElement;
  65. let showModelSelector = true;
  66. let selectedModels = [''];
  67. let atSelectedModel: Model | undefined;
  68. let webSearchEnabled = false;
  69. let chat = null;
  70. let tags = [];
  71. let title = '';
  72. let prompt = '';
  73. let files = [];
  74. let messages = [];
  75. let history = {
  76. messages: {},
  77. currentId: null
  78. };
  79. $: if (history.currentId !== null) {
  80. let _messages = [];
  81. let currentMessage = history.messages[history.currentId];
  82. while (currentMessage !== null) {
  83. _messages.unshift({ ...currentMessage });
  84. currentMessage =
  85. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  86. }
  87. messages = _messages;
  88. } else {
  89. messages = [];
  90. }
  91. $: if (chatIdProp) {
  92. (async () => {
  93. if (await loadChat()) {
  94. await tick();
  95. loaded = true;
  96. window.setTimeout(() => scrollToBottom(), 0);
  97. const chatInput = document.getElementById('chat-textarea');
  98. chatInput?.focus();
  99. } else {
  100. await goto('/');
  101. }
  102. })();
  103. }
  104. onMount(async () => {
  105. if (!$chatId) {
  106. await initNewChat();
  107. } else {
  108. if (!($settings.saveChatHistory ?? true)) {
  109. await goto('/');
  110. }
  111. }
  112. });
  113. //////////////////////////
  114. // Web functions
  115. //////////////////////////
  116. const initNewChat = async () => {
  117. window.history.replaceState(history.state, '', `/`);
  118. await chatId.set('');
  119. autoScroll = true;
  120. title = '';
  121. messages = [];
  122. history = {
  123. messages: {},
  124. currentId: null
  125. };
  126. if ($page.url.searchParams.get('models')) {
  127. selectedModels = $page.url.searchParams.get('models')?.split(',');
  128. } else if ($settings?.models) {
  129. selectedModels = $settings?.models;
  130. } else if ($config?.default_models) {
  131. console.log($config?.default_models.split(',') ?? '');
  132. selectedModels = $config?.default_models.split(',');
  133. } else {
  134. selectedModels = [''];
  135. }
  136. if ($page.url.searchParams.get('q')) {
  137. prompt = $page.url.searchParams.get('q') ?? '';
  138. if (prompt) {
  139. await tick();
  140. submitPrompt(prompt);
  141. }
  142. }
  143. selectedModels = selectedModels.map((modelId) =>
  144. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  145. );
  146. const userSettings = await getUserSettings(localStorage.token);
  147. if (userSettings) {
  148. settings.set(userSettings.ui);
  149. } else {
  150. settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  151. }
  152. const chatInput = document.getElementById('chat-textarea');
  153. setTimeout(() => chatInput?.focus(), 0);
  154. };
  155. const loadChat = async () => {
  156. chatId.set(chatIdProp);
  157. chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
  158. await goto('/');
  159. return null;
  160. });
  161. if (chat) {
  162. tags = await getTags();
  163. const chatContent = chat.chat;
  164. if (chatContent) {
  165. console.log(chatContent);
  166. selectedModels =
  167. (chatContent?.models ?? undefined) !== undefined
  168. ? chatContent.models
  169. : [chatContent.models ?? ''];
  170. history =
  171. (chatContent?.history ?? undefined) !== undefined
  172. ? chatContent.history
  173. : convertMessagesToHistory(chatContent.messages);
  174. title = chatContent.title;
  175. const userSettings = await getUserSettings(localStorage.token);
  176. if (userSettings) {
  177. await settings.set(userSettings.ui);
  178. } else {
  179. await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
  180. }
  181. await settings.set({
  182. ...$settings,
  183. system: chatContent.system ?? $settings.system,
  184. params: chatContent.options ?? $settings.params
  185. });
  186. autoScroll = true;
  187. await tick();
  188. if (messages.length > 0) {
  189. history.messages[messages.at(-1).id].done = true;
  190. }
  191. await tick();
  192. return true;
  193. } else {
  194. return null;
  195. }
  196. }
  197. };
  198. const scrollToBottom = async () => {
  199. await tick();
  200. if (messagesContainerElement) {
  201. messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
  202. }
  203. };
  204. const createMessagesList = (responseMessageId) => {
  205. const message = history.messages[responseMessageId];
  206. if (message.parentId) {
  207. return [...createMessagesList(message.parentId), message];
  208. } else {
  209. return [message];
  210. }
  211. };
  212. const chatCompletedHandler = async (modelId, messages) => {
  213. await mermaid.run({
  214. querySelector: '.mermaid'
  215. });
  216. const res = await chatCompleted(localStorage.token, {
  217. model: modelId,
  218. messages: messages.map((m) => ({
  219. id: m.id,
  220. role: m.role,
  221. content: m.content,
  222. timestamp: m.timestamp
  223. })),
  224. chat_id: $chatId
  225. }).catch((error) => {
  226. console.error(error);
  227. return null;
  228. });
  229. if (res !== null) {
  230. // Update chat history with the new messages
  231. for (const message of res.messages) {
  232. history.messages[message.id] = {
  233. ...history.messages[message.id],
  234. ...(history.messages[message.id].content !== message.content
  235. ? { originalContent: history.messages[message.id].content }
  236. : {}),
  237. ...message
  238. };
  239. }
  240. }
  241. };
  242. const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
  243. return setInterval(() => {
  244. $socket?.emit('usage', {
  245. action: 'chat',
  246. model: modelId,
  247. chat_id: chatId
  248. });
  249. }, 1000);
  250. };
  251. //////////////////////////
  252. // Ollama functions
  253. //////////////////////////
  254. const submitPrompt = async (userPrompt, _user = null) => {
  255. console.log('submitPrompt', $chatId);
  256. selectedModels = selectedModels.map((modelId) =>
  257. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  258. );
  259. if (selectedModels.includes('')) {
  260. toast.error($i18n.t('Model not selected'));
  261. } else if (messages.length != 0 && messages.at(-1).done != true) {
  262. // Response not done
  263. console.log('wait');
  264. } else if (
  265. files.length > 0 &&
  266. files.filter((file) => file.upload_status === false).length > 0
  267. ) {
  268. // Upload not done
  269. toast.error(
  270. $i18n.t(
  271. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  272. )
  273. );
  274. } else {
  275. // Reset chat message textarea height
  276. document.getElementById('chat-textarea').style.height = '';
  277. // Create user message
  278. let userMessageId = uuidv4();
  279. let userMessage = {
  280. id: userMessageId,
  281. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  282. childrenIds: [],
  283. role: 'user',
  284. user: _user ?? undefined,
  285. content: userPrompt,
  286. files: files.length > 0 ? files : undefined,
  287. timestamp: Math.floor(Date.now() / 1000), // Unix epoch
  288. models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
  289. };
  290. // Add message to history and Set currentId to messageId
  291. history.messages[userMessageId] = userMessage;
  292. history.currentId = userMessageId;
  293. // Append messageId to childrenIds of parent message
  294. if (messages.length !== 0) {
  295. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  296. }
  297. // Wait until history/message have been updated
  298. await tick();
  299. // Create new chat if only one message in messages
  300. if (messages.length == 1) {
  301. if ($settings.saveChatHistory ?? true) {
  302. chat = await createNewChat(localStorage.token, {
  303. id: $chatId,
  304. title: $i18n.t('New Chat'),
  305. models: selectedModels,
  306. system: $settings.system ?? undefined,
  307. options: {
  308. ...($settings.params ?? {})
  309. },
  310. messages: messages,
  311. history: history,
  312. tags: [],
  313. timestamp: Date.now()
  314. });
  315. await chats.set(await getChatList(localStorage.token));
  316. await chatId.set(chat.id);
  317. } else {
  318. await chatId.set('local');
  319. }
  320. await tick();
  321. }
  322. // Reset chat input textarea
  323. prompt = '';
  324. document.getElementById('chat-textarea').style.height = '';
  325. files = [];
  326. // Send prompt
  327. await sendPrompt(userPrompt, userMessageId);
  328. }
  329. };
  330. const sendPrompt = async (prompt, parentId, modelId = null) => {
  331. const _chatId = JSON.parse(JSON.stringify($chatId));
  332. await Promise.all(
  333. (modelId
  334. ? [modelId]
  335. : atSelectedModel !== undefined
  336. ? [atSelectedModel.id]
  337. : selectedModels
  338. ).map(async (modelId) => {
  339. console.log('modelId', modelId);
  340. const model = $models.filter((m) => m.id === modelId).at(0);
  341. if (model) {
  342. // If there are image files, check if model is vision capable
  343. const hasImages = messages.some((message) =>
  344. message.files?.some((file) => file.type === 'image')
  345. );
  346. if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
  347. toast.error(
  348. $i18n.t('Model {{modelName}} is not vision capable', {
  349. modelName: model.name ?? model.id
  350. })
  351. );
  352. }
  353. // Create response message
  354. let responseMessageId = uuidv4();
  355. let responseMessage = {
  356. parentId: parentId,
  357. id: responseMessageId,
  358. childrenIds: [],
  359. role: 'assistant',
  360. content: '',
  361. model: model.id,
  362. modelName: model.name ?? model.id,
  363. userContext: null,
  364. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  365. };
  366. // Add message to history and Set currentId to messageId
  367. history.messages[responseMessageId] = responseMessage;
  368. history.currentId = responseMessageId;
  369. // Append messageId to childrenIds of parent message
  370. if (parentId !== null) {
  371. history.messages[parentId].childrenIds = [
  372. ...history.messages[parentId].childrenIds,
  373. responseMessageId
  374. ];
  375. }
  376. await tick();
  377. let userContext = null;
  378. if ($settings?.memory ?? false) {
  379. if (userContext === null) {
  380. const res = await queryMemory(localStorage.token, prompt).catch((error) => {
  381. toast.error(error);
  382. return null;
  383. });
  384. if (res) {
  385. if (res.documents[0].length > 0) {
  386. userContext = res.documents.reduce((acc, doc, index) => {
  387. const createdAtTimestamp = res.metadatas[index][0].created_at;
  388. const createdAtDate = new Date(createdAtTimestamp * 1000)
  389. .toISOString()
  390. .split('T')[0];
  391. acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
  392. return acc;
  393. }, []);
  394. }
  395. console.log(userContext);
  396. }
  397. }
  398. }
  399. responseMessage.userContext = userContext;
  400. const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
  401. if (webSearchEnabled) {
  402. await getWebSearchResults(model.id, parentId, responseMessageId);
  403. }
  404. if (model?.owned_by === 'openai') {
  405. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  406. } else if (model) {
  407. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  408. }
  409. console.log('chatEventEmitter', chatEventEmitter);
  410. if (chatEventEmitter) clearInterval(chatEventEmitter);
  411. } else {
  412. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  413. }
  414. })
  415. );
  416. await chats.set(await getChatList(localStorage.token));
  417. };
  418. const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
  419. const responseMessage = history.messages[responseId];
  420. responseMessage.status = {
  421. done: false,
  422. action: 'web_search',
  423. description: $i18n.t('Generating search query')
  424. };
  425. messages = messages;
  426. const prompt = history.messages[parentId].content;
  427. let searchQuery = prompt;
  428. if (prompt.length > 100) {
  429. searchQuery = await generateChatSearchQuery(model, prompt);
  430. if (!searchQuery) {
  431. toast.warning($i18n.t('No search query generated'));
  432. responseMessage.status = {
  433. ...responseMessage.status,
  434. done: true,
  435. error: true,
  436. description: 'No search query generated'
  437. };
  438. messages = messages;
  439. return;
  440. }
  441. }
  442. responseMessage.status = {
  443. ...responseMessage.status,
  444. description: $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery })
  445. };
  446. messages = messages;
  447. const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
  448. console.log(error);
  449. toast.error(error);
  450. return null;
  451. });
  452. if (results) {
  453. responseMessage.status = {
  454. ...responseMessage.status,
  455. done: true,
  456. description: $i18n.t('Searched {{count}} sites', { count: results.filenames.length }),
  457. urls: results.filenames
  458. };
  459. if (responseMessage?.files ?? undefined === undefined) {
  460. responseMessage.files = [];
  461. }
  462. responseMessage.files.push({
  463. collection_name: results.collection_name,
  464. name: searchQuery,
  465. type: 'web_search_results',
  466. urls: results.filenames
  467. });
  468. messages = messages;
  469. } else {
  470. responseMessage.status = {
  471. ...responseMessage.status,
  472. done: true,
  473. error: true,
  474. description: 'No search results found'
  475. };
  476. messages = messages;
  477. }
  478. };
  479. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  480. model = model.id;
  481. const responseMessage = history.messages[responseMessageId];
  482. // Wait until history/message have been updated
  483. await tick();
  484. // Scroll down
  485. scrollToBottom();
  486. const messagesBody = [
  487. $settings.system || (responseMessage?.userContext ?? null)
  488. ? {
  489. role: 'system',
  490. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  491. responseMessage?.userContext ?? null
  492. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  493. : ''
  494. }`
  495. }
  496. : undefined,
  497. ...messages
  498. ]
  499. .filter((message) => message?.content?.trim())
  500. .map((message, idx, arr) => {
  501. // Prepare the base message object
  502. const baseMessage = {
  503. role: message.role,
  504. content: message.content
  505. };
  506. // Extract and format image URLs if any exist
  507. const imageUrls = message.files
  508. ?.filter((file) => file.type === 'image')
  509. .map((file) => file.url.slice(file.url.indexOf(',') + 1));
  510. // Add images array only if it contains elements
  511. if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
  512. baseMessage.images = imageUrls;
  513. }
  514. return baseMessage;
  515. });
  516. let lastImageIndex = -1;
  517. // Find the index of the last object with images
  518. messagesBody.forEach((item, index) => {
  519. if (item.images) {
  520. lastImageIndex = index;
  521. }
  522. });
  523. // Remove images from all but the last one
  524. messagesBody.forEach((item, index) => {
  525. if (index !== lastImageIndex) {
  526. delete item.images;
  527. }
  528. });
  529. const docs = messages
  530. .filter((message) => message?.files ?? null)
  531. .map((message) =>
  532. message.files.filter((item) =>
  533. ['doc', 'collection', 'web_search_results'].includes(item.type)
  534. )
  535. )
  536. .flat(1);
  537. const [res, controller] = await generateChatCompletion(localStorage.token, {
  538. model: model,
  539. messages: messagesBody,
  540. options: {
  541. ...($settings.params ?? {}),
  542. stop:
  543. $settings?.params?.stop ?? undefined
  544. ? $settings.params.stop.map((str) =>
  545. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  546. )
  547. : undefined,
  548. num_predict: $settings?.params?.max_tokens ?? undefined,
  549. repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
  550. },
  551. format: $settings.requestFormat ?? undefined,
  552. keep_alive: $settings.keepAlive ?? undefined,
  553. docs: docs.length > 0 ? docs : undefined,
  554. citations: docs.length > 0,
  555. chat_id: $chatId
  556. });
  557. if (res && res.ok) {
  558. console.log('controller', controller);
  559. const reader = res.body
  560. .pipeThrough(new TextDecoderStream())
  561. .pipeThrough(splitStream('\n'))
  562. .getReader();
  563. while (true) {
  564. const { value, done } = await reader.read();
  565. if (done || stopResponseFlag || _chatId !== $chatId) {
  566. responseMessage.done = true;
  567. messages = messages;
  568. if (stopResponseFlag) {
  569. controller.abort('User: Stop Response');
  570. } else {
  571. const messages = createMessagesList(responseMessageId);
  572. await chatCompletedHandler(model, messages);
  573. }
  574. break;
  575. }
  576. try {
  577. let lines = value.split('\n');
  578. for (const line of lines) {
  579. if (line !== '') {
  580. console.log(line);
  581. let data = JSON.parse(line);
  582. if ('citations' in data) {
  583. responseMessage.citations = data.citations;
  584. continue;
  585. }
  586. if ('detail' in data) {
  587. throw data;
  588. }
  589. if (data.done == false) {
  590. if (responseMessage.content == '' && data.message.content == '\n') {
  591. continue;
  592. } else {
  593. responseMessage.content += data.message.content;
  594. messages = messages;
  595. }
  596. } else {
  597. responseMessage.done = true;
  598. if (responseMessage.content == '') {
  599. responseMessage.error = {
  600. code: 400,
  601. content: `Oops! No text generated from Ollama, Please try again.`
  602. };
  603. }
  604. responseMessage.context = data.context ?? null;
  605. responseMessage.info = {
  606. total_duration: data.total_duration,
  607. load_duration: data.load_duration,
  608. sample_count: data.sample_count,
  609. sample_duration: data.sample_duration,
  610. prompt_eval_count: data.prompt_eval_count,
  611. prompt_eval_duration: data.prompt_eval_duration,
  612. eval_count: data.eval_count,
  613. eval_duration: data.eval_duration
  614. };
  615. messages = messages;
  616. if ($settings.notificationEnabled && !document.hasFocus()) {
  617. const notification = new Notification(
  618. selectedModelfile
  619. ? `${
  620. selectedModelfile.title.charAt(0).toUpperCase() +
  621. selectedModelfile.title.slice(1)
  622. }`
  623. : `${model}`,
  624. {
  625. body: responseMessage.content,
  626. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  627. }
  628. );
  629. }
  630. if ($settings.responseAutoCopy) {
  631. copyToClipboard(responseMessage.content);
  632. }
  633. if ($settings.responseAutoPlayback) {
  634. await tick();
  635. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  636. }
  637. }
  638. }
  639. }
  640. } catch (error) {
  641. console.log(error);
  642. if ('detail' in error) {
  643. toast.error(error.detail);
  644. }
  645. break;
  646. }
  647. if (autoScroll) {
  648. scrollToBottom();
  649. }
  650. }
  651. if ($chatId == _chatId) {
  652. if ($settings.saveChatHistory ?? true) {
  653. chat = await updateChatById(localStorage.token, _chatId, {
  654. messages: messages,
  655. history: history,
  656. models: selectedModels
  657. });
  658. await chats.set(await getChatList(localStorage.token));
  659. }
  660. }
  661. } else {
  662. if (res !== null) {
  663. const error = await res.json();
  664. console.log(error);
  665. if ('detail' in error) {
  666. toast.error(error.detail);
  667. responseMessage.error = { content: error.detail };
  668. } else {
  669. toast.error(error.error);
  670. responseMessage.error = { content: error.error };
  671. }
  672. } else {
  673. toast.error(
  674. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
  675. );
  676. responseMessage.error = {
  677. content: $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  678. provider: 'Ollama'
  679. })
  680. };
  681. }
  682. responseMessage.done = true;
  683. messages = messages;
  684. }
  685. stopResponseFlag = false;
  686. await tick();
  687. if (autoScroll) {
  688. scrollToBottom();
  689. }
  690. if (messages.length == 2 && messages.at(1).content !== '') {
  691. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  692. const _title = await generateChatTitle(userPrompt);
  693. await setChatTitle(_chatId, _title);
  694. }
  695. };
  696. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  697. const responseMessage = history.messages[responseMessageId];
  698. const docs = messages
  699. .filter((message) => message?.files ?? null)
  700. .map((message) =>
  701. message.files.filter((item) =>
  702. ['doc', 'collection', 'web_search_results'].includes(item.type)
  703. )
  704. )
  705. .flat(1);
  706. console.log(docs);
  707. scrollToBottom();
  708. try {
  709. const [res, controller] = await generateOpenAIChatCompletion(
  710. localStorage.token,
  711. {
  712. model: model.id,
  713. stream: true,
  714. stream_options:
  715. model.info?.meta?.capabilities?.usage ?? false
  716. ? {
  717. include_usage: true
  718. }
  719. : undefined,
  720. messages: [
  721. $settings.system || (responseMessage?.userContext ?? null)
  722. ? {
  723. role: 'system',
  724. content: `${promptTemplate($settings?.system ?? '', $user.name)}${
  725. responseMessage?.userContext ?? null
  726. ? `\n\nUser Context:\n${(responseMessage?.userContext ?? []).join('\n')}`
  727. : ''
  728. }`
  729. }
  730. : undefined,
  731. ...messages
  732. ]
  733. .filter((message) => message?.content?.trim())
  734. .map((message, idx, arr) => ({
  735. role: message.role,
  736. ...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
  737. message.role === 'user'
  738. ? {
  739. content: [
  740. {
  741. type: 'text',
  742. text:
  743. arr.length - 1 !== idx
  744. ? message.content
  745. : message?.raContent ?? message.content
  746. },
  747. ...message.files
  748. .filter((file) => file.type === 'image')
  749. .map((file) => ({
  750. type: 'image_url',
  751. image_url: {
  752. url: file.url
  753. }
  754. }))
  755. ]
  756. }
  757. : {
  758. content:
  759. arr.length - 1 !== idx
  760. ? message.content
  761. : message?.raContent ?? message.content
  762. })
  763. })),
  764. seed: $settings?.params?.seed ?? undefined,
  765. stop:
  766. $settings?.params?.stop ?? undefined
  767. ? $settings.params.stop.map((str) =>
  768. decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
  769. )
  770. : undefined,
  771. temperature: $settings?.params?.temperature ?? undefined,
  772. top_p: $settings?.params?.top_p ?? undefined,
  773. frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
  774. max_tokens: $settings?.params?.max_tokens ?? undefined,
  775. docs: docs.length > 0 ? docs : undefined,
  776. citations: docs.length > 0,
  777. chat_id: $chatId
  778. },
  779. `${OPENAI_API_BASE_URL}`
  780. );
  781. // Wait until history/message have been updated
  782. await tick();
  783. scrollToBottom();
  784. if (res && res.ok && res.body) {
  785. const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
  786. let lastUsage = null;
  787. for await (const update of textStream) {
  788. const { value, done, citations, error, usage } = update;
  789. if (error) {
  790. await handleOpenAIError(error, null, model, responseMessage);
  791. break;
  792. }
  793. if (done || stopResponseFlag || _chatId !== $chatId) {
  794. responseMessage.done = true;
  795. messages = messages;
  796. if (stopResponseFlag) {
  797. controller.abort('User: Stop Response');
  798. } else {
  799. const messages = createMessagesList(responseMessageId);
  800. await chatCompletedHandler(model.id, messages);
  801. }
  802. break;
  803. }
  804. if (usage) {
  805. lastUsage = usage;
  806. }
  807. if (citations) {
  808. responseMessage.citations = citations;
  809. continue;
  810. }
  811. if (responseMessage.content == '' && value == '\n') {
  812. continue;
  813. } else {
  814. responseMessage.content += value;
  815. messages = messages;
  816. }
  817. if (autoScroll) {
  818. scrollToBottom();
  819. }
  820. }
  821. if ($settings.notificationEnabled && !document.hasFocus()) {
  822. const notification = new Notification(`OpenAI ${model}`, {
  823. body: responseMessage.content,
  824. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  825. });
  826. }
  827. if ($settings.responseAutoCopy) {
  828. copyToClipboard(responseMessage.content);
  829. }
  830. if ($settings.responseAutoPlayback) {
  831. await tick();
  832. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  833. }
  834. if (lastUsage) {
  835. responseMessage.info = { ...lastUsage, openai: true };
  836. }
  837. if ($chatId == _chatId) {
  838. if ($settings.saveChatHistory ?? true) {
  839. chat = await updateChatById(localStorage.token, _chatId, {
  840. models: selectedModels,
  841. messages: messages,
  842. history: history
  843. });
  844. await chats.set(await getChatList(localStorage.token));
  845. }
  846. }
  847. } else {
  848. await handleOpenAIError(null, res, model, responseMessage);
  849. }
  850. } catch (error) {
  851. await handleOpenAIError(error, null, model, responseMessage);
  852. }
  853. messages = messages;
  854. stopResponseFlag = false;
  855. await tick();
  856. if (autoScroll) {
  857. scrollToBottom();
  858. }
  859. if (messages.length == 2) {
  860. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  861. const _title = await generateChatTitle(userPrompt);
  862. await setChatTitle(_chatId, _title);
  863. }
  864. };
  865. const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
  866. let errorMessage = '';
  867. let innerError;
  868. if (error) {
  869. innerError = error;
  870. } else if (res !== null) {
  871. innerError = await res.json();
  872. }
  873. console.error(innerError);
  874. if ('detail' in innerError) {
  875. toast.error(innerError.detail);
  876. errorMessage = innerError.detail;
  877. } else if ('error' in innerError) {
  878. if ('message' in innerError.error) {
  879. toast.error(innerError.error.message);
  880. errorMessage = innerError.error.message;
  881. } else {
  882. toast.error(innerError.error);
  883. errorMessage = innerError.error;
  884. }
  885. } else if ('message' in innerError) {
  886. toast.error(innerError.message);
  887. errorMessage = innerError.message;
  888. }
  889. responseMessage.error = {
  890. content:
  891. $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
  892. provider: model.name ?? model.id
  893. }) +
  894. '\n' +
  895. errorMessage
  896. };
  897. responseMessage.done = true;
  898. messages = messages;
  899. };
  900. const stopResponse = () => {
  901. stopResponseFlag = true;
  902. console.log('stopResponse');
  903. };
  904. const regenerateResponse = async (message) => {
  905. console.log('regenerateResponse');
  906. if (messages.length != 0) {
  907. let userMessage = history.messages[message.parentId];
  908. let userPrompt = userMessage.content;
  909. if ((userMessage?.models ?? [...selectedModels]).length == 1) {
  910. await sendPrompt(userPrompt, userMessage.id);
  911. } else {
  912. await sendPrompt(userPrompt, userMessage.id, message.model);
  913. }
  914. }
  915. };
  916. const continueGeneration = async () => {
  917. console.log('continueGeneration');
  918. const _chatId = JSON.parse(JSON.stringify($chatId));
  919. if (messages.length != 0 && messages.at(-1).done == true) {
  920. const responseMessage = history.messages[history.currentId];
  921. responseMessage.done = false;
  922. await tick();
  923. const model = $models.filter((m) => m.id === responseMessage.model).at(0);
  924. if (model) {
  925. if (model?.owned_by === 'openai') {
  926. await sendPromptOpenAI(
  927. model,
  928. history.messages[responseMessage.parentId].content,
  929. responseMessage.id,
  930. _chatId
  931. );
  932. } else
  933. await sendPromptOllama(
  934. model,
  935. history.messages[responseMessage.parentId].content,
  936. responseMessage.id,
  937. _chatId
  938. );
  939. }
  940. } else {
  941. toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
  942. }
  943. };
  944. const generateChatTitle = async (userPrompt) => {
  945. if ($settings?.title?.auto ?? true) {
  946. const model = $models.find((model) => model.id === selectedModels[0]);
  947. const titleModelId =
  948. model?.owned_by === 'openai' ?? false
  949. ? $settings?.title?.modelExternal ?? selectedModels[0]
  950. : $settings?.title?.model ?? selectedModels[0];
  951. const titleModel = $models.find((model) => model.id === titleModelId);
  952. console.log(titleModel);
  953. const title = await generateTitle(
  954. localStorage.token,
  955. $settings?.title?.prompt ??
  956. $i18n.t(
  957. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
  958. ) + ' {{prompt}}',
  959. titleModelId,
  960. userPrompt,
  961. $chatId,
  962. titleModel?.owned_by === 'openai' ?? false
  963. ? `${OPENAI_API_BASE_URL}`
  964. : `${OLLAMA_API_BASE_URL}/v1`
  965. );
  966. return title;
  967. } else {
  968. return `${userPrompt}`;
  969. }
  970. };
  971. const generateChatSearchQuery = async (modelId: string, prompt: string) => {
  972. const model = $models.find((model) => model.id === modelId);
  973. const taskModelId =
  974. model?.owned_by === 'openai' ?? false
  975. ? $settings?.title?.modelExternal ?? modelId
  976. : $settings?.title?.model ?? modelId;
  977. const taskModel = $models.find((model) => model.id === taskModelId);
  978. const previousMessages = messages
  979. .filter((message) => message.role === 'user')
  980. .map((message) => message.content);
  981. return await generateSearchQuery(
  982. localStorage.token,
  983. taskModelId,
  984. previousMessages,
  985. prompt,
  986. taskModel?.owned_by === 'openai' ?? false
  987. ? `${OPENAI_API_BASE_URL}`
  988. : `${OLLAMA_API_BASE_URL}/v1`
  989. );
  990. };
  991. const setChatTitle = async (_chatId, _title) => {
  992. if (_chatId === $chatId) {
  993. title = _title;
  994. }
  995. if ($settings.saveChatHistory ?? true) {
  996. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  997. await chats.set(await getChatList(localStorage.token));
  998. }
  999. };
  1000. const getTags = async () => {
  1001. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  1002. return [];
  1003. });
  1004. };
  1005. const addTag = async (tagName) => {
  1006. const res = await addTagById(localStorage.token, $chatId, tagName);
  1007. tags = await getTags();
  1008. chat = await updateChatById(localStorage.token, $chatId, {
  1009. tags: tags
  1010. });
  1011. _tags.set(await getAllChatTags(localStorage.token));
  1012. };
  1013. const deleteTag = async (tagName) => {
  1014. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  1015. tags = await getTags();
  1016. chat = await updateChatById(localStorage.token, $chatId, {
  1017. tags: tags
  1018. });
  1019. _tags.set(await getAllChatTags(localStorage.token));
  1020. };
  1021. </script>
  1022. <svelte:head>
  1023. <title>
  1024. {title
  1025. ? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
  1026. : `${$WEBUI_NAME}`}
  1027. </title>
  1028. </svelte:head>
  1029. <CallOverlay />
  1030. {#if !chatIdProp || (loaded && chatIdProp)}
  1031. <div
  1032. class="h-screen max-h-[100dvh] {$showSidebar
  1033. ? 'md:max-w-[calc(100%-260px)]'
  1034. : ''} w-full max-w-full flex flex-col"
  1035. >
  1036. <Navbar
  1037. {title}
  1038. bind:selectedModels
  1039. bind:showModelSelector
  1040. shareEnabled={messages.length > 0}
  1041. {chat}
  1042. {initNewChat}
  1043. />
  1044. {#if $banners.length > 0 && messages.length === 0 && !$chatId && selectedModels.length <= 1}
  1045. <div
  1046. class="absolute top-[4.25rem] w-full {$showSidebar ? 'md:max-w-[calc(100%-260px)]' : ''}"
  1047. >
  1048. <div class=" flex flex-col gap-1 w-full">
  1049. {#each $banners.filter( (b) => (b.dismissible ? !JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]').includes(b.id) : true) ) as banner}
  1050. <Banner
  1051. {banner}
  1052. on:dismiss={(e) => {
  1053. const bannerId = e.detail;
  1054. localStorage.setItem(
  1055. 'dismissedBannerIds',
  1056. JSON.stringify(
  1057. [
  1058. bannerId,
  1059. ...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
  1060. ].filter((id) => $banners.find((b) => b.id === id))
  1061. )
  1062. );
  1063. }}
  1064. />
  1065. {/each}
  1066. </div>
  1067. </div>
  1068. {/if}
  1069. <div class="flex flex-col flex-auto">
  1070. <div
  1071. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
  1072. id="messages-container"
  1073. bind:this={messagesContainerElement}
  1074. on:scroll={(e) => {
  1075. autoScroll =
  1076. messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
  1077. messagesContainerElement.clientHeight + 5;
  1078. }}
  1079. >
  1080. <div class=" h-full w-full flex flex-col {chatIdProp ? 'py-4' : 'pt-2 pb-4'}">
  1081. <Messages
  1082. chatId={$chatId}
  1083. {selectedModels}
  1084. {processing}
  1085. bind:history
  1086. bind:messages
  1087. bind:autoScroll
  1088. bind:prompt
  1089. bottomPadding={files.length > 0}
  1090. {sendPrompt}
  1091. {continueGeneration}
  1092. {regenerateResponse}
  1093. />
  1094. </div>
  1095. </div>
  1096. <MessageInput
  1097. bind:files
  1098. bind:prompt
  1099. bind:autoScroll
  1100. bind:webSearchEnabled
  1101. bind:atSelectedModel
  1102. {selectedModels}
  1103. {messages}
  1104. {submitPrompt}
  1105. {stopResponse}
  1106. />
  1107. </div>
  1108. </div>
  1109. {/if}