+page.svelte 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import toast from 'svelte-french-toast';
  4. import { onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import {
  8. models,
  9. modelfiles,
  10. user,
  11. settings,
  12. chats,
  13. chatId,
  14. config,
  15. tags as _tags
  16. } from '$lib/stores';
  17. import { copyToClipboard, splitStream } from '$lib/utils';
  18. import { generateChatCompletion, cancelChatCompletion, generateTitle } from '$lib/apis/ollama';
  19. import {
  20. addTagById,
  21. createNewChat,
  22. deleteTagById,
  23. getAllChatTags,
  24. getChatList,
  25. getTagsById,
  26. updateChatById
  27. } from '$lib/apis/chats';
  28. import { queryCollection, queryDoc } from '$lib/apis/rag';
  29. import { generateOpenAIChatCompletion } from '$lib/apis/openai';
  30. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  31. import Messages from '$lib/components/chat/Messages.svelte';
  32. import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
  33. import Navbar from '$lib/components/layout/Navbar.svelte';
  34. import { RAGTemplate } from '$lib/utils/rag';
  35. import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
  36. import { WEBUI_BASE_URL } from '$lib/constants';
  37. let stopResponseFlag = false;
  38. let autoScroll = true;
  39. let processing = '';
  40. let currentRequestId = null;
  41. let selectedModels = [''];
  42. let selectedModelfile = null;
  43. $: selectedModelfile =
  44. selectedModels.length === 1 &&
  45. $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
  46. ? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
  47. : null;
  48. let selectedModelfiles = {};
  49. $: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
  50. const modelfile =
  51. $modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
  52. return {
  53. ...a,
  54. ...(modelfile && { [tagName]: modelfile })
  55. };
  56. }, {});
  57. let chat = null;
  58. let tags = [];
  59. let title = '';
  60. let prompt = '';
  61. let files = [];
  62. let messages = [];
  63. let history = {
  64. messages: {},
  65. currentId: null
  66. };
  67. $: if (history.currentId !== null) {
  68. let _messages = [];
  69. let currentMessage = history.messages[history.currentId];
  70. while (currentMessage !== null) {
  71. _messages.unshift({ ...currentMessage });
  72. currentMessage =
  73. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  74. }
  75. messages = _messages;
  76. } else {
  77. messages = [];
  78. }
  79. onMount(async () => {
  80. await initNewChat();
  81. });
  82. //////////////////////////
  83. // Web functions
  84. //////////////////////////
  85. const initNewChat = async () => {
  86. if (currentRequestId !== null) {
  87. await cancelChatCompletion(localStorage.token, currentRequestId);
  88. currentRequestId = null;
  89. }
  90. window.history.replaceState(history.state, '', `/`);
  91. console.log('initNewChat');
  92. await chatId.set('');
  93. console.log($chatId);
  94. autoScroll = true;
  95. title = '';
  96. messages = [];
  97. history = {
  98. messages: {},
  99. currentId: null
  100. };
  101. console.log($config);
  102. if ($page.url.searchParams.get('models')) {
  103. selectedModels = $page.url.searchParams.get('models')?.split(',');
  104. } else if ($settings?.models) {
  105. selectedModels = $settings?.models;
  106. } else if ($config?.default_models) {
  107. selectedModels = $config?.default_models.split(',');
  108. } else {
  109. selectedModels = [''];
  110. }
  111. selectedModels = selectedModels.map((modelId) =>
  112. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  113. );
  114. let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
  115. settings.set({
  116. ..._settings
  117. });
  118. };
  119. const scrollToBottom = () => {
  120. const element = document.getElementById('messages-container');
  121. element.scrollTop = element.scrollHeight;
  122. };
  123. //////////////////////////
  124. // Ollama functions
  125. //////////////////////////
  126. const submitPrompt = async (userPrompt, _user = null) => {
  127. console.log('submitPrompt', $chatId);
  128. selectedModels = selectedModels.map((modelId) =>
  129. $models.map((m) => m.id).includes(modelId) ? modelId : ''
  130. );
  131. if (selectedModels.includes('')) {
  132. toast.error('Model not selected');
  133. } else if (messages.length != 0 && messages.at(-1).done != true) {
  134. // Response not done
  135. console.log('wait');
  136. } else if (
  137. files.length > 0 &&
  138. files.filter((file) => file.upload_status === false).length > 0
  139. ) {
  140. // Upload not done
  141. toast.error(
  142. `Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
  143. );
  144. } else {
  145. // Reset chat message textarea height
  146. document.getElementById('chat-textarea').style.height = '';
  147. // Create user message
  148. let userMessageId = uuidv4();
  149. let userMessage = {
  150. id: userMessageId,
  151. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  152. childrenIds: [],
  153. role: 'user',
  154. user: _user ?? undefined,
  155. content: userPrompt,
  156. files: files.length > 0 ? files : undefined,
  157. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  158. };
  159. // Add message to history and Set currentId to messageId
  160. history.messages[userMessageId] = userMessage;
  161. history.currentId = userMessageId;
  162. // Append messageId to childrenIds of parent message
  163. if (messages.length !== 0) {
  164. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  165. }
  166. // Wait until history/message have been updated
  167. await tick();
  168. // Create new chat if only one message in messages
  169. if (messages.length == 1) {
  170. if ($settings.saveChatHistory ?? true) {
  171. chat = await createNewChat(localStorage.token, {
  172. id: $chatId,
  173. title: 'New Chat',
  174. models: selectedModels,
  175. system: $settings.system ?? undefined,
  176. options: {
  177. ...($settings.options ?? {})
  178. },
  179. messages: messages,
  180. history: history,
  181. tags: [],
  182. timestamp: Date.now()
  183. });
  184. await chats.set(await getChatList(localStorage.token));
  185. await chatId.set(chat.id);
  186. } else {
  187. await chatId.set('local');
  188. }
  189. await tick();
  190. }
  191. // Reset chat input textarea
  192. prompt = '';
  193. files = [];
  194. // Send prompt
  195. await sendPrompt(userPrompt, userMessageId);
  196. }
  197. };
  198. const sendPrompt = async (prompt, parentId) => {
  199. const _chatId = JSON.parse(JSON.stringify($chatId));
  200. const docs = messages
  201. .filter((message) => message?.files ?? null)
  202. .map((message) =>
  203. message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
  204. )
  205. .flat(1);
  206. console.log(docs);
  207. if (docs.length > 0) {
  208. processing = 'Reading';
  209. const query = history.messages[parentId].content;
  210. let relevantContexts = await Promise.all(
  211. docs.map(async (doc) => {
  212. if (doc.type === 'collection') {
  213. return await queryCollection(localStorage.token, doc.collection_names, query, 4).catch(
  214. (error) => {
  215. console.log(error);
  216. return null;
  217. }
  218. );
  219. } else {
  220. return await queryDoc(localStorage.token, doc.collection_name, query, 4).catch(
  221. (error) => {
  222. console.log(error);
  223. return null;
  224. }
  225. );
  226. }
  227. })
  228. );
  229. relevantContexts = relevantContexts.filter((context) => context);
  230. const contextString = relevantContexts.reduce((a, context, i, arr) => {
  231. return `${a}${context.documents.join(' ')}\n`;
  232. }, '');
  233. console.log(contextString);
  234. history.messages[parentId].raContent = await RAGTemplate(
  235. localStorage.token,
  236. contextString,
  237. query
  238. );
  239. history.messages[parentId].contexts = relevantContexts;
  240. await tick();
  241. processing = '';
  242. }
  243. await Promise.all(
  244. selectedModels.map(async (modelId) => {
  245. const model = $models.filter((m) => m.id === modelId).at(0);
  246. if (model) {
  247. // Create response message
  248. let responseMessageId = uuidv4();
  249. let responseMessage = {
  250. parentId: parentId,
  251. id: responseMessageId,
  252. childrenIds: [],
  253. role: 'assistant',
  254. content: '',
  255. model: model.id,
  256. timestamp: Math.floor(Date.now() / 1000) // Unix epoch
  257. };
  258. // Add message to history and Set currentId to messageId
  259. history.messages[responseMessageId] = responseMessage;
  260. history.currentId = responseMessageId;
  261. // Append messageId to childrenIds of parent message
  262. if (parentId !== null) {
  263. history.messages[parentId].childrenIds = [
  264. ...history.messages[parentId].childrenIds,
  265. responseMessageId
  266. ];
  267. }
  268. if (model?.external) {
  269. await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
  270. } else if (model) {
  271. await sendPromptOllama(model, prompt, responseMessageId, _chatId);
  272. }
  273. } else {
  274. toast.error(`Model ${modelId} not found`);
  275. }
  276. })
  277. );
  278. await chats.set(await getChatList(localStorage.token));
  279. };
  280. const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
  281. model = model.id;
  282. const responseMessage = history.messages[responseMessageId];
  283. // Wait until history/message have been updated
  284. await tick();
  285. // Scroll down
  286. scrollToBottom();
  287. const messagesBody = [
  288. $settings.system
  289. ? {
  290. role: 'system',
  291. content: $settings.system
  292. }
  293. : undefined,
  294. ...messages.filter((message) => !message.deleted)
  295. ]
  296. .filter((message) => message)
  297. .map((message, idx, arr) => ({
  298. role: message.role,
  299. content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content,
  300. ...(message.files && {
  301. images: message.files
  302. .filter((file) => file.type === 'image')
  303. .map((file) => file.url.slice(file.url.indexOf(',') + 1))
  304. })
  305. }));
  306. let lastImageIndex = -1;
  307. // Find the index of the last object with images
  308. messagesBody.forEach((item, index) => {
  309. if (item.images) {
  310. lastImageIndex = index;
  311. }
  312. });
  313. // Remove images from all but the last one
  314. messagesBody.forEach((item, index) => {
  315. if (index !== lastImageIndex) {
  316. delete item.images;
  317. }
  318. });
  319. const [res, controller] = await generateChatCompletion(localStorage.token, {
  320. model: model,
  321. messages: messagesBody,
  322. options: {
  323. ...($settings.options ?? {})
  324. },
  325. format: $settings.requestFormat ?? undefined,
  326. keep_alive: $settings.keepAlive ?? undefined
  327. });
  328. if (res && res.ok) {
  329. console.log('controller', controller);
  330. const reader = res.body
  331. .pipeThrough(new TextDecoderStream())
  332. .pipeThrough(splitStream('\n'))
  333. .getReader();
  334. while (true) {
  335. const { value, done } = await reader.read();
  336. if (done || stopResponseFlag || _chatId !== $chatId) {
  337. responseMessage.done = true;
  338. messages = messages;
  339. if (stopResponseFlag) {
  340. controller.abort('User: Stop Response');
  341. await cancelChatCompletion(localStorage.token, currentRequestId);
  342. }
  343. currentRequestId = null;
  344. break;
  345. }
  346. try {
  347. let lines = value.split('\n');
  348. for (const line of lines) {
  349. if (line !== '') {
  350. console.log(line);
  351. let data = JSON.parse(line);
  352. if ('detail' in data) {
  353. throw data;
  354. }
  355. if ('id' in data) {
  356. console.log(data);
  357. currentRequestId = data.id;
  358. } else {
  359. if (data.done == false) {
  360. if (responseMessage.content == '' && data.message.content == '\n') {
  361. continue;
  362. } else {
  363. responseMessage.content += data.message.content;
  364. messages = messages;
  365. }
  366. } else {
  367. responseMessage.done = true;
  368. if (responseMessage.content == '') {
  369. responseMessage.error = true;
  370. responseMessage.content =
  371. 'Oops! No text generated from Ollama, Please try again.';
  372. }
  373. responseMessage.context = data.context ?? null;
  374. responseMessage.info = {
  375. total_duration: data.total_duration,
  376. load_duration: data.load_duration,
  377. sample_count: data.sample_count,
  378. sample_duration: data.sample_duration,
  379. prompt_eval_count: data.prompt_eval_count,
  380. prompt_eval_duration: data.prompt_eval_duration,
  381. eval_count: data.eval_count,
  382. eval_duration: data.eval_duration
  383. };
  384. messages = messages;
  385. if ($settings.notificationEnabled && !document.hasFocus()) {
  386. const notification = new Notification(
  387. selectedModelfile
  388. ? `${
  389. selectedModelfile.title.charAt(0).toUpperCase() +
  390. selectedModelfile.title.slice(1)
  391. }`
  392. : `${model}`,
  393. {
  394. body: responseMessage.content,
  395. icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
  396. }
  397. );
  398. }
  399. if ($settings.responseAutoCopy) {
  400. copyToClipboard(responseMessage.content);
  401. }
  402. if ($settings.responseAutoPlayback) {
  403. await tick();
  404. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  405. }
  406. }
  407. }
  408. }
  409. }
  410. } catch (error) {
  411. console.log(error);
  412. if ('detail' in error) {
  413. toast.error(error.detail);
  414. }
  415. break;
  416. }
  417. if (autoScroll) {
  418. scrollToBottom();
  419. }
  420. }
  421. if ($chatId == _chatId) {
  422. if ($settings.saveChatHistory ?? true) {
  423. chat = await updateChatById(localStorage.token, _chatId, {
  424. messages: messages,
  425. history: history
  426. });
  427. await chats.set(await getChatList(localStorage.token));
  428. }
  429. }
  430. } else {
  431. if (res !== null) {
  432. const error = await res.json();
  433. console.log(error);
  434. if ('detail' in error) {
  435. toast.error(error.detail);
  436. responseMessage.content = error.detail;
  437. } else {
  438. toast.error(error.error);
  439. responseMessage.content = error.error;
  440. }
  441. } else {
  442. toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
  443. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  444. }
  445. responseMessage.error = true;
  446. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  447. responseMessage.done = true;
  448. messages = messages;
  449. }
  450. stopResponseFlag = false;
  451. await tick();
  452. if (autoScroll) {
  453. scrollToBottom();
  454. }
  455. if (messages.length == 2 && messages.at(1).content !== '') {
  456. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  457. await generateChatTitle(_chatId, userPrompt);
  458. }
  459. };
  460. const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
  461. const responseMessage = history.messages[responseMessageId];
  462. scrollToBottom();
  463. const res = await generateOpenAIChatCompletion(
  464. localStorage.token,
  465. {
  466. model: model.id,
  467. stream: true,
  468. messages: [
  469. $settings.system
  470. ? {
  471. role: 'system',
  472. content: $settings.system
  473. }
  474. : undefined,
  475. ...messages.filter((message) => !message.deleted)
  476. ]
  477. .filter((message) => message)
  478. .map((message, idx, arr) => ({
  479. role: message.role,
  480. ...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
  481. ? {
  482. content: [
  483. {
  484. type: 'text',
  485. text:
  486. arr.length - 1 !== idx
  487. ? message.content
  488. : message?.raContent ?? message.content
  489. },
  490. ...message.files
  491. .filter((file) => file.type === 'image')
  492. .map((file) => ({
  493. type: 'image_url',
  494. image_url: {
  495. url: file.url
  496. }
  497. }))
  498. ]
  499. }
  500. : {
  501. content:
  502. arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
  503. })
  504. })),
  505. seed: $settings?.options?.seed ?? undefined,
  506. stop: $settings?.options?.stop ?? undefined,
  507. temperature: $settings?.options?.temperature ?? undefined,
  508. top_p: $settings?.options?.top_p ?? undefined,
  509. num_ctx: $settings?.options?.num_ctx ?? undefined,
  510. frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
  511. max_tokens: $settings?.options?.num_predict ?? undefined
  512. },
  513. model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
  514. );
  515. if (res && res.ok) {
  516. const reader = res.body
  517. .pipeThrough(new TextDecoderStream())
  518. .pipeThrough(splitStream('\n'))
  519. .getReader();
  520. while (true) {
  521. const { value, done } = await reader.read();
  522. if (done || stopResponseFlag || _chatId !== $chatId) {
  523. responseMessage.done = true;
  524. messages = messages;
  525. break;
  526. }
  527. try {
  528. let lines = value.split('\n');
  529. for (const line of lines) {
  530. if (line !== '') {
  531. console.log(line);
  532. if (line === 'data: [DONE]') {
  533. responseMessage.done = true;
  534. messages = messages;
  535. } else {
  536. let data = JSON.parse(line.replace(/^data: /, ''));
  537. console.log(data);
  538. if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
  539. continue;
  540. } else {
  541. responseMessage.content += data.choices[0].delta.content ?? '';
  542. messages = messages;
  543. }
  544. }
  545. }
  546. }
  547. } catch (error) {
  548. console.log(error);
  549. }
  550. if ($settings.notificationEnabled && !document.hasFocus()) {
  551. const notification = new Notification(`OpenAI ${model}`, {
  552. body: responseMessage.content,
  553. icon: `${WEBUI_BASE_URL}/static/favicon.png`
  554. });
  555. }
  556. if ($settings.responseAutoCopy) {
  557. copyToClipboard(responseMessage.content);
  558. }
  559. if ($settings.responseAutoPlayback) {
  560. await tick();
  561. document.getElementById(`speak-button-${responseMessage.id}`)?.click();
  562. }
  563. if (autoScroll) {
  564. scrollToBottom();
  565. }
  566. }
  567. if ($chatId == _chatId) {
  568. if ($settings.saveChatHistory ?? true) {
  569. chat = await updateChatById(localStorage.token, _chatId, {
  570. messages: messages,
  571. history: history
  572. });
  573. await chats.set(await getChatList(localStorage.token));
  574. }
  575. }
  576. } else {
  577. if (res !== null) {
  578. const error = await res.json();
  579. console.log(error);
  580. if ('detail' in error) {
  581. toast.error(error.detail);
  582. responseMessage.content = error.detail;
  583. } else {
  584. if ('message' in error.error) {
  585. toast.error(error.error.message);
  586. responseMessage.content = error.error.message;
  587. } else {
  588. toast.error(error.error);
  589. responseMessage.content = error.error;
  590. }
  591. }
  592. } else {
  593. toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
  594. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  595. }
  596. responseMessage.error = true;
  597. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  598. responseMessage.done = true;
  599. messages = messages;
  600. }
  601. stopResponseFlag = false;
  602. await tick();
  603. if (autoScroll) {
  604. scrollToBottom();
  605. }
  606. if (messages.length == 2) {
  607. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  608. await setChatTitle(_chatId, userPrompt);
  609. }
  610. };
  611. const stopResponse = () => {
  612. stopResponseFlag = true;
  613. console.log('stopResponse');
  614. };
  615. const regenerateResponse = async () => {
  616. console.log('regenerateResponse');
  617. if (messages.length != 0 && messages.at(-1).done == true) {
  618. messages.splice(messages.length - 1, 1);
  619. messages = messages;
  620. let userMessage = messages.at(-1);
  621. let userPrompt = userMessage.content;
  622. await sendPrompt(userPrompt, userMessage.id);
  623. }
  624. };
  625. const continueGeneration = async () => {
  626. console.log('continueGeneration');
  627. const _chatId = JSON.parse(JSON.stringify($chatId));
  628. if (messages.length != 0 && messages.at(-1).done == true) {
  629. const responseMessage = history.messages[history.currentId];
  630. responseMessage.done = false;
  631. await tick();
  632. const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0);
  633. if (modelTag?.external) {
  634. await sendPromptOpenAI(
  635. responseMessage.model,
  636. history.messages[responseMessage.parentId].content,
  637. responseMessage.id,
  638. _chatId
  639. );
  640. } else if (modelTag) {
  641. await sendPromptOllama(
  642. responseMessage.model,
  643. history.messages[responseMessage.parentId].content,
  644. responseMessage.id,
  645. _chatId
  646. );
  647. } else {
  648. toast.error(`Model ${model} not found`);
  649. }
  650. }
  651. };
  652. const generateChatTitle = async (_chatId, userPrompt) => {
  653. if ($settings.titleAutoGenerate ?? true) {
  654. const title = await generateTitle(
  655. localStorage.token,
  656. $settings?.titleGenerationPrompt ??
  657. "Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title': {{prompt}}",
  658. $settings?.titleAutoGenerateModel ?? selectedModels[0],
  659. userPrompt
  660. );
  661. if (title) {
  662. await setChatTitle(_chatId, title);
  663. }
  664. } else {
  665. await setChatTitle(_chatId, `${userPrompt}`);
  666. }
  667. };
  668. const getTags = async () => {
  669. return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
  670. return [];
  671. });
  672. };
  673. const addTag = async (tagName) => {
  674. const res = await addTagById(localStorage.token, $chatId, tagName);
  675. tags = await getTags();
  676. chat = await updateChatById(localStorage.token, $chatId, {
  677. tags: tags
  678. });
  679. _tags.set(await getAllChatTags(localStorage.token));
  680. };
  681. const deleteTag = async (tagName) => {
  682. const res = await deleteTagById(localStorage.token, $chatId, tagName);
  683. tags = await getTags();
  684. chat = await updateChatById(localStorage.token, $chatId, {
  685. tags: tags
  686. });
  687. _tags.set(await getAllChatTags(localStorage.token));
  688. };
  689. const setChatTitle = async (_chatId, _title) => {
  690. if (_chatId === $chatId) {
  691. title = _title;
  692. }
  693. if ($settings.saveChatHistory ?? true) {
  694. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  695. await chats.set(await getChatList(localStorage.token));
  696. }
  697. };
  698. </script>
  699. <div class="h-screen max-h-[100dvh] w-full flex flex-col">
  700. <Navbar {title} shareEnabled={messages.length > 0} {initNewChat} {tags} {addTag} {deleteTag} />
  701. <div class="flex flex-col flex-auto">
  702. <div
  703. class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0"
  704. id="messages-container"
  705. on:scroll={(e) => {
  706. autoScroll = e.target.scrollHeight - e.target.scrollTop <= e.target.clientHeight + 50;
  707. }}
  708. >
  709. <div
  710. class="{$settings?.fullScreenMode ?? null
  711. ? 'max-w-full'
  712. : 'max-w-2xl md:px-0'} mx-auto w-full px-4"
  713. >
  714. <ModelSelector
  715. bind:selectedModels
  716. disabled={messages.length > 0 && !selectedModels.includes('')}
  717. />
  718. </div>
  719. <div class=" h-full w-full flex flex-col py-8">
  720. <Messages
  721. chatId={$chatId}
  722. {selectedModels}
  723. {selectedModelfiles}
  724. {processing}
  725. bind:history
  726. bind:messages
  727. bind:autoScroll
  728. bottomPadding={files.length > 0}
  729. {sendPrompt}
  730. {continueGeneration}
  731. {regenerateResponse}
  732. />
  733. </div>
  734. </div>
  735. <MessageInput
  736. bind:files
  737. bind:prompt
  738. bind:autoScroll
  739. suggestionPrompts={selectedModelfile?.suggestionPrompts ?? $config.default_prompt_suggestions}
  740. {messages}
  741. {submitPrompt}
  742. {stopResponse}
  743. />
  744. </div>
  745. </div>