+page.svelte 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import toast from 'svelte-french-toast';
  4. import { onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import { models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
  8. import { OLLAMA_API_BASE_URL } from '$lib/constants';
  9. import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
  10. import { copyToClipboard, splitStream } from '$lib/utils';
  11. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  12. import Messages from '$lib/components/chat/Messages.svelte';
  13. import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
  14. import Navbar from '$lib/components/layout/Navbar.svelte';
  15. import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
  16. let stopResponseFlag = false;
  17. let autoScroll = true;
  18. let selectedModels = [''];
  19. let selectedModelfile = null;
  20. $: selectedModelfile =
  21. selectedModels.length === 1 &&
  22. $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
  23. ? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
  24. : null;
  25. let selectedModelfiles = {};
  26. $: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
  27. const modelfile =
  28. $modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
  29. return {
  30. ...a,
  31. ...(modelfile && { [tagName]: modelfile })
  32. };
  33. }, {});
  34. let chat = null;
  35. let title = '';
  36. let prompt = '';
  37. let files = [];
  38. let messages = [];
  39. let history = {
  40. messages: {},
  41. currentId: null
  42. };
  43. $: if (history.currentId !== null) {
  44. let _messages = [];
  45. let currentMessage = history.messages[history.currentId];
  46. while (currentMessage !== null) {
  47. _messages.unshift({ ...currentMessage });
  48. currentMessage =
  49. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  50. }
  51. messages = _messages;
  52. } else {
  53. messages = [];
  54. }
  55. onMount(async () => {
  56. await initNewChat();
  57. });
  58. //////////////////////////
  59. // Web functions
  60. //////////////////////////
  61. const initNewChat = async () => {
  62. window.history.replaceState(history.state, '', `/`);
  63. console.log('initNewChat');
  64. await chatId.set('');
  65. console.log($chatId);
  66. autoScroll = true;
  67. title = '';
  68. messages = [];
  69. history = {
  70. messages: {},
  71. currentId: null
  72. };
  73. selectedModels = $page.url.searchParams.get('models')
  74. ? $page.url.searchParams.get('models')?.split(',')
  75. : $settings.models ?? [''];
  76. let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
  77. settings.set({
  78. ..._settings
  79. });
  80. };
  81. //////////////////////////
  82. // Ollama functions
  83. //////////////////////////
  84. const sendPrompt = async (prompt, parentId) => {
  85. const _chatId = JSON.parse(JSON.stringify($chatId));
  86. await Promise.all(
  87. selectedModels.map(async (model) => {
  88. console.log(model);
  89. if ($models.filter((m) => m.name === model)[0].external) {
  90. await sendPromptOpenAI(model, prompt, parentId, _chatId);
  91. } else {
  92. await sendPromptOllama(model, prompt, parentId, _chatId);
  93. }
  94. })
  95. );
  96. await chats.set(await getChatList(localStorage.token));
  97. };
  98. const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
  99. // Create response message
  100. let responseMessageId = uuidv4();
  101. let responseMessage = {
  102. parentId: parentId,
  103. id: responseMessageId,
  104. childrenIds: [],
  105. role: 'assistant',
  106. content: '',
  107. model: model
  108. };
  109. // Add message to history and Set currentId to messageId
  110. history.messages[responseMessageId] = responseMessage;
  111. history.currentId = responseMessageId;
  112. // Append messageId to childrenIds of parent message
  113. if (parentId !== null) {
  114. history.messages[parentId].childrenIds = [
  115. ...history.messages[parentId].childrenIds,
  116. responseMessageId
  117. ];
  118. }
  119. // Wait until history/message have been updated
  120. await tick();
  121. // Scroll down
  122. window.scrollTo({ top: document.body.scrollHeight });
  123. const res = await generateChatCompletion(
  124. $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
  125. localStorage.token,
  126. {
  127. model: model,
  128. messages: [
  129. $settings.system
  130. ? {
  131. role: 'system',
  132. content: $settings.system
  133. }
  134. : undefined,
  135. ...messages
  136. ]
  137. .filter((message) => message)
  138. .map((message) => ({
  139. role: message.role,
  140. content: message.content,
  141. ...(message.files && {
  142. images: message.files
  143. .filter((file) => file.type === 'image')
  144. .map((file) => file.url.slice(file.url.indexOf(',') + 1))
  145. })
  146. })),
  147. options: {
  148. ...($settings.options ?? {})
  149. },
  150. format: $settings.requestFormat ?? undefined
  151. }
  152. );
  153. if (res && res.ok) {
  154. const reader = res.body
  155. .pipeThrough(new TextDecoderStream())
  156. .pipeThrough(splitStream('\n'))
  157. .getReader();
  158. while (true) {
  159. const { value, done } = await reader.read();
  160. if (done || stopResponseFlag || _chatId !== $chatId) {
  161. responseMessage.done = true;
  162. messages = messages;
  163. break;
  164. }
  165. try {
  166. let lines = value.split('\n');
  167. for (const line of lines) {
  168. if (line !== '') {
  169. console.log(line);
  170. let data = JSON.parse(line);
  171. if ('detail' in data) {
  172. throw data;
  173. }
  174. if (data.done == false) {
  175. if (responseMessage.content == '' && data.message.content == '\n') {
  176. continue;
  177. } else {
  178. responseMessage.content += data.message.content;
  179. messages = messages;
  180. }
  181. } else {
  182. responseMessage.done = true;
  183. if (responseMessage.content == '') {
  184. responseMessage.error = true;
  185. responseMessage.content =
  186. 'Oops! No text generated from Ollama, Please try again.';
  187. }
  188. responseMessage.context = data.context ?? null;
  189. responseMessage.info = {
  190. total_duration: data.total_duration,
  191. load_duration: data.load_duration,
  192. sample_count: data.sample_count,
  193. sample_duration: data.sample_duration,
  194. prompt_eval_count: data.prompt_eval_count,
  195. prompt_eval_duration: data.prompt_eval_duration,
  196. eval_count: data.eval_count,
  197. eval_duration: data.eval_duration
  198. };
  199. messages = messages;
  200. if ($settings.notificationEnabled && !document.hasFocus()) {
  201. const notification = new Notification(
  202. selectedModelfile
  203. ? `${
  204. selectedModelfile.title.charAt(0).toUpperCase() +
  205. selectedModelfile.title.slice(1)
  206. }`
  207. : `Ollama - ${model}`,
  208. {
  209. body: responseMessage.content,
  210. icon: selectedModelfile?.imageUrl ?? '/favicon.png'
  211. }
  212. );
  213. }
  214. if ($settings.responseAutoCopy) {
  215. copyToClipboard(responseMessage.content);
  216. }
  217. }
  218. }
  219. }
  220. } catch (error) {
  221. console.log(error);
  222. if ('detail' in error) {
  223. toast.error(error.detail);
  224. }
  225. break;
  226. }
  227. if (autoScroll) {
  228. window.scrollTo({ top: document.body.scrollHeight });
  229. }
  230. }
  231. if ($chatId == _chatId) {
  232. chat = await updateChatById(localStorage.token, _chatId, {
  233. messages: messages,
  234. history: history
  235. });
  236. await chats.set(await getChatList(localStorage.token));
  237. }
  238. } else {
  239. if (res !== null) {
  240. const error = await res.json();
  241. console.log(error);
  242. if ('detail' in error) {
  243. toast.error(error.detail);
  244. responseMessage.content = error.detail;
  245. } else {
  246. toast.error(error.error);
  247. responseMessage.content = error.error;
  248. }
  249. } else {
  250. toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
  251. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  252. }
  253. responseMessage.error = true;
  254. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  255. responseMessage.done = true;
  256. messages = messages;
  257. }
  258. stopResponseFlag = false;
  259. await tick();
  260. if (autoScroll) {
  261. window.scrollTo({ top: document.body.scrollHeight });
  262. }
  263. if (messages.length == 2 && messages.at(1).content !== '') {
  264. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  265. await generateChatTitle(_chatId, userPrompt);
  266. }
  267. };
  268. const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
  269. if ($settings.OPENAI_API_KEY) {
  270. if (models) {
  271. let responseMessageId = uuidv4();
  272. let responseMessage = {
  273. parentId: parentId,
  274. id: responseMessageId,
  275. childrenIds: [],
  276. role: 'assistant',
  277. content: '',
  278. model: model
  279. };
  280. history.messages[responseMessageId] = responseMessage;
  281. history.currentId = responseMessageId;
  282. if (parentId !== null) {
  283. history.messages[parentId].childrenIds = [
  284. ...history.messages[parentId].childrenIds,
  285. responseMessageId
  286. ];
  287. }
  288. window.scrollTo({ top: document.body.scrollHeight });
  289. const res = await fetch(
  290. `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
  291. {
  292. method: 'POST',
  293. headers: {
  294. Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
  295. 'Content-Type': 'application/json'
  296. },
  297. body: JSON.stringify({
  298. model: model,
  299. stream: true,
  300. messages: [
  301. $settings.system
  302. ? {
  303. role: 'system',
  304. content: $settings.system
  305. }
  306. : undefined,
  307. ...messages
  308. ]
  309. .filter((message) => message)
  310. .map((message) => ({
  311. role: message.role,
  312. ...(message.files
  313. ? {
  314. content: [
  315. {
  316. type: 'text',
  317. text: message.content
  318. },
  319. ...message.files
  320. .filter((file) => file.type === 'image')
  321. .map((file) => ({
  322. type: 'image_url',
  323. image_url: {
  324. url: file.url
  325. }
  326. }))
  327. ]
  328. }
  329. : { content: message.content })
  330. })),
  331. seed: $settings.options.seed ?? undefined,
  332. stop: $settings.options.stop ?? undefined,
  333. temperature: $settings.options.temperature ?? undefined,
  334. top_p: $settings.options.top_p ?? undefined,
  335. num_ctx: $settings.options.num_ctx ?? undefined,
  336. frequency_penalty: $settings.options.repeat_penalty ?? undefined,
  337. max_tokens: $settings.options.num_predict ?? undefined,
  338. })
  339. }
  340. ).catch((err) => {
  341. console.log(err);
  342. return null;
  343. });
  344. if (res && res.ok) {
  345. const reader = res.body
  346. .pipeThrough(new TextDecoderStream())
  347. .pipeThrough(splitStream('\n'))
  348. .getReader();
  349. while (true) {
  350. const { value, done } = await reader.read();
  351. if (done || stopResponseFlag || _chatId !== $chatId) {
  352. responseMessage.done = true;
  353. messages = messages;
  354. break;
  355. }
  356. try {
  357. let lines = value.split('\n');
  358. for (const line of lines) {
  359. if (line !== '') {
  360. console.log(line);
  361. if (line === 'data: [DONE]') {
  362. responseMessage.done = true;
  363. messages = messages;
  364. } else {
  365. let data = JSON.parse(line.replace(/^data: /, ''));
  366. console.log(data);
  367. if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
  368. continue;
  369. } else {
  370. responseMessage.content += data.choices[0].delta.content ?? '';
  371. messages = messages;
  372. }
  373. }
  374. }
  375. }
  376. } catch (error) {
  377. console.log(error);
  378. }
  379. if ($settings.notificationEnabled && !document.hasFocus()) {
  380. const notification = new Notification(`OpenAI ${model}`, {
  381. body: responseMessage.content,
  382. icon: '/favicon.png'
  383. });
  384. }
  385. if ($settings.responseAutoCopy) {
  386. copyToClipboard(responseMessage.content);
  387. }
  388. if (autoScroll) {
  389. window.scrollTo({ top: document.body.scrollHeight });
  390. }
  391. }
  392. if ($chatId == _chatId) {
  393. chat = await updateChatById(localStorage.token, _chatId, {
  394. messages: messages,
  395. history: history
  396. });
  397. await chats.set(await getChatList(localStorage.token));
  398. }
  399. } else {
  400. if (res !== null) {
  401. const error = await res.json();
  402. console.log(error);
  403. if ('detail' in error) {
  404. toast.error(error.detail);
  405. responseMessage.content = error.detail;
  406. } else {
  407. if ('message' in error.error) {
  408. toast.error(error.error.message);
  409. responseMessage.content = error.error.message;
  410. } else {
  411. toast.error(error.error);
  412. responseMessage.content = error.error;
  413. }
  414. }
  415. } else {
  416. toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
  417. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  418. }
  419. responseMessage.error = true;
  420. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  421. responseMessage.done = true;
  422. messages = messages;
  423. }
  424. stopResponseFlag = false;
  425. await tick();
  426. if (autoScroll) {
  427. window.scrollTo({ top: document.body.scrollHeight });
  428. }
  429. if (messages.length == 2) {
  430. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  431. await setChatTitle(_chatId, userPrompt);
  432. }
  433. }
  434. }
  435. };
  436. const submitPrompt = async (userPrompt) => {
  437. console.log('submitPrompt', $chatId);
  438. if (selectedModels.includes('')) {
  439. toast.error('Model not selected');
  440. } else if (messages.length != 0 && messages.at(-1).done != true) {
  441. // Response not done
  442. console.log('wait');
  443. } else {
  444. // Reset chat message textarea height
  445. document.getElementById('chat-textarea').style.height = '';
  446. // Create user message
  447. let userMessageId = uuidv4();
  448. let userMessage = {
  449. id: userMessageId,
  450. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  451. childrenIds: [],
  452. role: 'user',
  453. content: userPrompt,
  454. files: files.length > 0 ? files : undefined
  455. };
  456. // Add message to history and Set currentId to messageId
  457. history.messages[userMessageId] = userMessage;
  458. history.currentId = userMessageId;
  459. // Append messageId to childrenIds of parent message
  460. if (messages.length !== 0) {
  461. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  462. }
  463. // Wait until history/message have been updated
  464. await tick();
  465. // Create new chat if only one message in messages
  466. if (messages.length == 1) {
  467. chat = await createNewChat(localStorage.token, {
  468. id: $chatId,
  469. title: 'New Chat',
  470. models: selectedModels,
  471. system: $settings.system ?? undefined,
  472. options: {
  473. ...($settings.options ?? {})
  474. },
  475. messages: messages,
  476. history: history,
  477. timestamp: Date.now()
  478. });
  479. await chats.set(await getChatList(localStorage.token));
  480. await chatId.set(chat.id);
  481. await tick();
  482. }
  483. // Reset chat input textarea
  484. prompt = '';
  485. files = [];
  486. // Send prompt
  487. await sendPrompt(userPrompt, userMessageId);
  488. }
  489. };
  490. const stopResponse = () => {
  491. stopResponseFlag = true;
  492. console.log('stopResponse');
  493. };
  494. const regenerateResponse = async () => {
  495. console.log('regenerateResponse');
  496. if (messages.length != 0 && messages.at(-1).done == true) {
  497. messages.splice(messages.length - 1, 1);
  498. messages = messages;
  499. let userMessage = messages.at(-1);
  500. let userPrompt = userMessage.content;
  501. await sendPrompt(userPrompt, userMessage.id);
  502. }
  503. };
  504. const generateChatTitle = async (_chatId, userPrompt) => {
  505. if ($settings.titleAutoGenerate ?? true) {
  506. const title = await generateTitle(
  507. $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
  508. localStorage.token,
  509. selectedModels[0],
  510. userPrompt
  511. );
  512. if (title) {
  513. await setChatTitle(_chatId, title);
  514. }
  515. } else {
  516. await setChatTitle(_chatId, `${userPrompt}`);
  517. }
  518. };
  519. const setChatTitle = async (_chatId, _title) => {
  520. if (_chatId === $chatId) {
  521. title = _title;
  522. }
  523. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  524. await chats.set(await getChatList(localStorage.token));
  525. };
  526. </script>
  527. <svelte:window
  528. on:scroll={(e) => {
  529. autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
  530. }}
  531. />
  532. <Navbar {title} shareEnabled={messages.length > 0} {initNewChat} />
  533. <div class="min-h-screen w-full flex justify-center">
  534. <div class=" py-2.5 flex flex-col justify-between w-full">
  535. <div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
  536. <ModelSelector bind:selectedModels disabled={messages.length > 0} />
  537. </div>
  538. <div class=" h-full mt-10 mb-32 w-full flex flex-col">
  539. <Messages
  540. chatId={$chatId}
  541. {selectedModels}
  542. {selectedModelfiles}
  543. bind:history
  544. bind:messages
  545. bind:autoScroll
  546. bottomPadding={files.length > 0}
  547. {sendPrompt}
  548. {regenerateResponse}
  549. />
  550. </div>
  551. </div>
  552. <MessageInput
  553. bind:files
  554. bind:prompt
  555. bind:autoScroll
  556. suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
  557. {
  558. title: ['Help me study', 'vocabulary for a college entrance exam'],
  559. content: `Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.`
  560. },
  561. {
  562. title: ['Give me ideas', `for what to do with my kids' art`],
  563. content: `What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.`
  564. },
  565. {
  566. title: ['Tell me a fun fact', 'about the Roman Empire'],
  567. content: 'Tell me a random fun fact about the Roman Empire'
  568. },
  569. {
  570. title: ['Show me a code snippet', `of a website's sticky header`],
  571. content: `Show me a code snippet of a website's sticky header in CSS and JavaScript.`
  572. }
  573. ]}
  574. {messages}
  575. {submitPrompt}
  576. {stopResponse}
  577. />
  578. </div>