+page.svelte 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import toast from 'svelte-french-toast';
  4. import { onMount, tick } from 'svelte';
  5. import { goto } from '$app/navigation';
  6. import { page } from '$app/stores';
  7. import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
  8. import { OLLAMA_API_BASE_URL } from '$lib/constants';
  9. import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
  10. import { copyToClipboard, splitStream } from '$lib/utils';
  11. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  12. import Messages from '$lib/components/chat/Messages.svelte';
  13. import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
  14. import Navbar from '$lib/components/layout/Navbar.svelte';
  15. import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
  16. let stopResponseFlag = false;
  17. let autoScroll = true;
  18. let selectedModels = [''];
  19. let selectedModelfile = null;
  20. $: selectedModelfile =
  21. selectedModels.length === 1 &&
  22. $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
  23. ? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
  24. : null;
  25. let selectedModelfiles = {};
  26. $: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
  27. const modelfile =
  28. $modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
  29. return {
  30. ...a,
  31. ...(modelfile && { [tagName]: modelfile })
  32. };
  33. }, {});
  34. let chat = null;
  35. let title = '';
  36. let prompt = '';
  37. let files = [];
  38. let messages = [];
  39. let history = {
  40. messages: {},
  41. currentId: null
  42. };
  43. $: if (history.currentId !== null) {
  44. let _messages = [];
  45. let currentMessage = history.messages[history.currentId];
  46. while (currentMessage !== null) {
  47. _messages.unshift({ ...currentMessage });
  48. currentMessage =
  49. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  50. }
  51. messages = _messages;
  52. } else {
  53. messages = [];
  54. }
  55. onMount(async () => {
  56. await initNewChat();
  57. });
  58. //////////////////////////
  59. // Web functions
  60. //////////////////////////
  61. const initNewChat = async () => {
  62. window.history.replaceState(history.state, '', `/`);
  63. console.log('initNewChat');
  64. await chatId.set('');
  65. console.log($chatId);
  66. autoScroll = true;
  67. title = '';
  68. messages = [];
  69. history = {
  70. messages: {},
  71. currentId: null
  72. };
  73. selectedModels = $page.url.searchParams.get('models')
  74. ? $page.url.searchParams.get('models')?.split(',')
  75. : $settings.models ?? [''];
  76. let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
  77. settings.set({
  78. ..._settings
  79. });
  80. };
  81. //////////////////////////
  82. // Ollama functions
  83. //////////////////////////
  84. const sendPrompt = async (prompt, parentId) => {
  85. const _chatId = JSON.parse(JSON.stringify($chatId));
  86. await Promise.all(
  87. selectedModels.map(async (model) => {
  88. console.log(model);
  89. const modelTag = $models.filter((m) => m.name === model).at(0);
  90. if (modelTag?.external) {
  91. await sendPromptOpenAI(model, prompt, parentId, _chatId);
  92. } else if (modelTag) {
  93. await sendPromptOllama(model, prompt, parentId, _chatId);
  94. } else {
  95. toast.error(`Model ${model} not found`);
  96. }
  97. })
  98. );
  99. await chats.set(await getChatList(localStorage.token));
  100. };
  101. const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
  102. // Create response message
  103. let responseMessageId = uuidv4();
  104. let responseMessage = {
  105. parentId: parentId,
  106. id: responseMessageId,
  107. childrenIds: [],
  108. role: 'assistant',
  109. content: '',
  110. model: model
  111. };
  112. // Add message to history and Set currentId to messageId
  113. history.messages[responseMessageId] = responseMessage;
  114. history.currentId = responseMessageId;
  115. // Append messageId to childrenIds of parent message
  116. if (parentId !== null) {
  117. history.messages[parentId].childrenIds = [
  118. ...history.messages[parentId].childrenIds,
  119. responseMessageId
  120. ];
  121. }
  122. // Wait until history/message have been updated
  123. await tick();
  124. // Scroll down
  125. window.scrollTo({ top: document.body.scrollHeight });
  126. const res = await generateChatCompletion(
  127. $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
  128. localStorage.token,
  129. {
  130. model: model,
  131. messages: [
  132. $settings.system
  133. ? {
  134. role: 'system',
  135. content: $settings.system
  136. }
  137. : undefined,
  138. ...messages
  139. ]
  140. .filter((message) => message)
  141. .map((message) => ({
  142. role: message.role,
  143. content: message.content,
  144. ...(message.files && {
  145. images: message.files
  146. .filter((file) => file.type === 'image')
  147. .map((file) => file.url.slice(file.url.indexOf(',') + 1))
  148. })
  149. })),
  150. options: {
  151. ...($settings.options ?? {})
  152. },
  153. format: $settings.requestFormat ?? undefined
  154. }
  155. );
  156. if (res && res.ok) {
  157. const reader = res.body
  158. .pipeThrough(new TextDecoderStream())
  159. .pipeThrough(splitStream('\n'))
  160. .getReader();
  161. while (true) {
  162. const { value, done } = await reader.read();
  163. if (done || stopResponseFlag || _chatId !== $chatId) {
  164. responseMessage.done = true;
  165. messages = messages;
  166. break;
  167. }
  168. try {
  169. let lines = value.split('\n');
  170. for (const line of lines) {
  171. if (line !== '') {
  172. console.log(line);
  173. let data = JSON.parse(line);
  174. if ('detail' in data) {
  175. throw data;
  176. }
  177. if (data.done == false) {
  178. if (responseMessage.content == '' && data.message.content == '\n') {
  179. continue;
  180. } else {
  181. responseMessage.content += data.message.content;
  182. messages = messages;
  183. }
  184. } else {
  185. responseMessage.done = true;
  186. if (responseMessage.content == '') {
  187. responseMessage.error = true;
  188. responseMessage.content =
  189. 'Oops! No text generated from Ollama, Please try again.';
  190. }
  191. responseMessage.context = data.context ?? null;
  192. responseMessage.info = {
  193. total_duration: data.total_duration,
  194. load_duration: data.load_duration,
  195. sample_count: data.sample_count,
  196. sample_duration: data.sample_duration,
  197. prompt_eval_count: data.prompt_eval_count,
  198. prompt_eval_duration: data.prompt_eval_duration,
  199. eval_count: data.eval_count,
  200. eval_duration: data.eval_duration
  201. };
  202. messages = messages;
  203. if ($settings.notificationEnabled && !document.hasFocus()) {
  204. const notification = new Notification(
  205. selectedModelfile
  206. ? `${
  207. selectedModelfile.title.charAt(0).toUpperCase() +
  208. selectedModelfile.title.slice(1)
  209. }`
  210. : `Ollama - ${model}`,
  211. {
  212. body: responseMessage.content,
  213. icon: selectedModelfile?.imageUrl ?? '/favicon.png'
  214. }
  215. );
  216. }
  217. if ($settings.responseAutoCopy) {
  218. copyToClipboard(responseMessage.content);
  219. }
  220. }
  221. }
  222. }
  223. } catch (error) {
  224. console.log(error);
  225. if ('detail' in error) {
  226. toast.error(error.detail);
  227. }
  228. break;
  229. }
  230. if (autoScroll) {
  231. window.scrollTo({ top: document.body.scrollHeight });
  232. }
  233. }
  234. if ($chatId == _chatId) {
  235. chat = await updateChatById(localStorage.token, _chatId, {
  236. messages: messages,
  237. history: history
  238. });
  239. await chats.set(await getChatList(localStorage.token));
  240. }
  241. } else {
  242. if (res !== null) {
  243. const error = await res.json();
  244. console.log(error);
  245. if ('detail' in error) {
  246. toast.error(error.detail);
  247. responseMessage.content = error.detail;
  248. } else {
  249. toast.error(error.error);
  250. responseMessage.content = error.error;
  251. }
  252. } else {
  253. toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
  254. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  255. }
  256. responseMessage.error = true;
  257. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  258. responseMessage.done = true;
  259. messages = messages;
  260. }
  261. stopResponseFlag = false;
  262. await tick();
  263. if (autoScroll) {
  264. window.scrollTo({ top: document.body.scrollHeight });
  265. }
  266. if (messages.length == 2 && messages.at(1).content !== '') {
  267. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  268. await generateChatTitle(_chatId, userPrompt);
  269. }
  270. };
  271. const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
  272. if ($settings.OPENAI_API_KEY) {
  273. if (models) {
  274. let responseMessageId = uuidv4();
  275. let responseMessage = {
  276. parentId: parentId,
  277. id: responseMessageId,
  278. childrenIds: [],
  279. role: 'assistant',
  280. content: '',
  281. model: model
  282. };
  283. history.messages[responseMessageId] = responseMessage;
  284. history.currentId = responseMessageId;
  285. if (parentId !== null) {
  286. history.messages[parentId].childrenIds = [
  287. ...history.messages[parentId].childrenIds,
  288. responseMessageId
  289. ];
  290. }
  291. window.scrollTo({ top: document.body.scrollHeight });
  292. const res = await fetch(
  293. `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
  294. {
  295. method: 'POST',
  296. headers: {
  297. Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
  298. 'Content-Type': 'application/json'
  299. },
  300. body: JSON.stringify({
  301. model: model,
  302. stream: true,
  303. messages: [
  304. $settings.system
  305. ? {
  306. role: 'system',
  307. content: $settings.system
  308. }
  309. : undefined,
  310. ...messages
  311. ]
  312. .filter((message) => message)
  313. .map((message) => ({
  314. role: message.role,
  315. ...(message.files
  316. ? {
  317. content: [
  318. {
  319. type: 'text',
  320. text: message.content
  321. },
  322. ...message.files
  323. .filter((file) => file.type === 'image')
  324. .map((file) => ({
  325. type: 'image_url',
  326. image_url: {
  327. url: file.url
  328. }
  329. }))
  330. ]
  331. }
  332. : { content: message.content })
  333. })),
  334. seed: $settings.options.seed ?? undefined,
  335. stop: $settings.options.stop ?? undefined,
  336. temperature: $settings.options.temperature ?? undefined,
  337. top_p: $settings.options.top_p ?? undefined,
  338. num_ctx: $settings.options.num_ctx ?? undefined,
  339. frequency_penalty: $settings.options.repeat_penalty ?? undefined,
  340. max_tokens: $settings.options.num_predict ?? undefined
  341. })
  342. }
  343. ).catch((err) => {
  344. console.log(err);
  345. return null;
  346. });
  347. if (res && res.ok) {
  348. const reader = res.body
  349. .pipeThrough(new TextDecoderStream())
  350. .pipeThrough(splitStream('\n'))
  351. .getReader();
  352. while (true) {
  353. const { value, done } = await reader.read();
  354. if (done || stopResponseFlag || _chatId !== $chatId) {
  355. responseMessage.done = true;
  356. messages = messages;
  357. break;
  358. }
  359. try {
  360. let lines = value.split('\n');
  361. for (const line of lines) {
  362. if (line !== '') {
  363. console.log(line);
  364. if (line === 'data: [DONE]') {
  365. responseMessage.done = true;
  366. messages = messages;
  367. } else {
  368. let data = JSON.parse(line.replace(/^data: /, ''));
  369. console.log(data);
  370. if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
  371. continue;
  372. } else {
  373. responseMessage.content += data.choices[0].delta.content ?? '';
  374. messages = messages;
  375. }
  376. }
  377. }
  378. }
  379. } catch (error) {
  380. console.log(error);
  381. }
  382. if ($settings.notificationEnabled && !document.hasFocus()) {
  383. const notification = new Notification(`OpenAI ${model}`, {
  384. body: responseMessage.content,
  385. icon: '/favicon.png'
  386. });
  387. }
  388. if ($settings.responseAutoCopy) {
  389. copyToClipboard(responseMessage.content);
  390. }
  391. if (autoScroll) {
  392. window.scrollTo({ top: document.body.scrollHeight });
  393. }
  394. }
  395. if ($chatId == _chatId) {
  396. chat = await updateChatById(localStorage.token, _chatId, {
  397. messages: messages,
  398. history: history
  399. });
  400. await chats.set(await getChatList(localStorage.token));
  401. }
  402. } else {
  403. if (res !== null) {
  404. const error = await res.json();
  405. console.log(error);
  406. if ('detail' in error) {
  407. toast.error(error.detail);
  408. responseMessage.content = error.detail;
  409. } else {
  410. if ('message' in error.error) {
  411. toast.error(error.error.message);
  412. responseMessage.content = error.error.message;
  413. } else {
  414. toast.error(error.error);
  415. responseMessage.content = error.error;
  416. }
  417. }
  418. } else {
  419. toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
  420. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  421. }
  422. responseMessage.error = true;
  423. responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
  424. responseMessage.done = true;
  425. messages = messages;
  426. }
  427. stopResponseFlag = false;
  428. await tick();
  429. if (autoScroll) {
  430. window.scrollTo({ top: document.body.scrollHeight });
  431. }
  432. if (messages.length == 2) {
  433. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  434. await setChatTitle(_chatId, userPrompt);
  435. }
  436. }
  437. }
  438. };
  439. const submitPrompt = async (userPrompt) => {
  440. console.log('submitPrompt', $chatId);
  441. if (selectedModels.includes('')) {
  442. toast.error('Model not selected');
  443. } else if (messages.length != 0 && messages.at(-1).done != true) {
  444. // Response not done
  445. console.log('wait');
  446. } else {
  447. // Reset chat message textarea height
  448. document.getElementById('chat-textarea').style.height = '';
  449. // Create user message
  450. let userMessageId = uuidv4();
  451. let userMessage = {
  452. id: userMessageId,
  453. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  454. childrenIds: [],
  455. role: 'user',
  456. content: userPrompt,
  457. files: files.length > 0 ? files : undefined
  458. };
  459. // Add message to history and Set currentId to messageId
  460. history.messages[userMessageId] = userMessage;
  461. history.currentId = userMessageId;
  462. // Append messageId to childrenIds of parent message
  463. if (messages.length !== 0) {
  464. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  465. }
  466. // Wait until history/message have been updated
  467. await tick();
  468. // Create new chat if only one message in messages
  469. if (messages.length == 1) {
  470. chat = await createNewChat(localStorage.token, {
  471. id: $chatId,
  472. title: 'New Chat',
  473. models: selectedModels,
  474. system: $settings.system ?? undefined,
  475. options: {
  476. ...($settings.options ?? {})
  477. },
  478. messages: messages,
  479. history: history,
  480. timestamp: Date.now()
  481. });
  482. await chats.set(await getChatList(localStorage.token));
  483. await chatId.set(chat.id);
  484. await tick();
  485. }
  486. // Reset chat input textarea
  487. prompt = '';
  488. files = [];
  489. // Send prompt
  490. await sendPrompt(userPrompt, userMessageId);
  491. }
  492. };
  493. const stopResponse = () => {
  494. stopResponseFlag = true;
  495. console.log('stopResponse');
  496. };
  497. const regenerateResponse = async () => {
  498. console.log('regenerateResponse');
  499. if (messages.length != 0 && messages.at(-1).done == true) {
  500. messages.splice(messages.length - 1, 1);
  501. messages = messages;
  502. let userMessage = messages.at(-1);
  503. let userPrompt = userMessage.content;
  504. await sendPrompt(userPrompt, userMessage.id);
  505. }
  506. };
  507. const generateChatTitle = async (_chatId, userPrompt) => {
  508. if ($settings.titleAutoGenerate ?? true) {
  509. const title = await generateTitle(
  510. $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
  511. localStorage.token,
  512. selectedModels[0],
  513. userPrompt
  514. );
  515. if (title) {
  516. await setChatTitle(_chatId, title);
  517. }
  518. } else {
  519. await setChatTitle(_chatId, `${userPrompt}`);
  520. }
  521. };
  522. const setChatTitle = async (_chatId, _title) => {
  523. if (_chatId === $chatId) {
  524. title = _title;
  525. }
  526. chat = await updateChatById(localStorage.token, _chatId, { title: _title });
  527. await chats.set(await getChatList(localStorage.token));
  528. };
  529. </script>
  530. <svelte:window
  531. on:scroll={(e) => {
  532. autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
  533. }}
  534. />
  535. <Navbar {title} shareEnabled={messages.length > 0} {initNewChat} />
  536. <div class="min-h-screen w-full flex justify-center">
  537. <div class=" py-2.5 flex flex-col justify-between w-full">
  538. <div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
  539. <ModelSelector bind:selectedModels disabled={messages.length > 0} />
  540. </div>
  541. <div class=" h-full mt-10 mb-32 w-full flex flex-col">
  542. <Messages
  543. chatId={$chatId}
  544. {selectedModels}
  545. {selectedModelfiles}
  546. bind:history
  547. bind:messages
  548. bind:autoScroll
  549. bottomPadding={files.length > 0}
  550. {sendPrompt}
  551. {regenerateResponse}
  552. />
  553. </div>
  554. </div>
  555. <MessageInput
  556. bind:files
  557. bind:prompt
  558. bind:autoScroll
  559. suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
  560. {
  561. title: ['Help me study', 'vocabulary for a college entrance exam'],
  562. content: `Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.`
  563. },
  564. {
  565. title: ['Give me ideas', `for what to do with my kids' art`],
  566. content: `What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.`
  567. },
  568. {
  569. title: ['Tell me a fun fact', 'about the Roman Empire'],
  570. content: 'Tell me a random fun fact about the Roman Empire'
  571. },
  572. {
  573. title: ['Show me a code snippet', `of a website's sticky header`],
  574. content: `Show me a code snippet of a website's sticky header in CSS and JavaScript.`
  575. }
  576. ]}
  577. {messages}
  578. {submitPrompt}
  579. {stopResponse}
  580. />
  581. </div>