+page.svelte 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684
  1. <script lang="ts">
  2. import { v4 as uuidv4 } from 'uuid';
  3. import toast from 'svelte-french-toast';
  4. import { OLLAMA_API_BASE_URL } from '$lib/constants';
  5. import { onMount, tick } from 'svelte';
  6. import { splitStream } from '$lib/utils';
  7. import { goto } from '$app/navigation';
  8. import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
  9. import MessageInput from '$lib/components/chat/MessageInput.svelte';
  10. import Messages from '$lib/components/chat/Messages.svelte';
  11. import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
  12. import Navbar from '$lib/components/layout/Navbar.svelte';
  13. import { page } from '$app/stores';
  14. let stopResponseFlag = false;
  15. let autoScroll = true;
  16. let selectedModels = [''];
  17. let selectedModelfile = null;
  18. $: selectedModelfile =
  19. selectedModels.length === 1 &&
  20. $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
  21. ? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
  22. : null;
  23. let title = '';
  24. let prompt = '';
  25. let files = [];
  26. let messages = [];
  27. let history = {
  28. messages: {},
  29. currentId: null
  30. };
  31. $: if (history.currentId !== null) {
  32. let _messages = [];
  33. let currentMessage = history.messages[history.currentId];
  34. while (currentMessage !== null) {
  35. _messages.unshift({ ...currentMessage });
  36. currentMessage =
  37. currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
  38. }
  39. messages = _messages;
  40. } else {
  41. messages = [];
  42. }
  43. $: if (files) {
  44. console.log(files);
  45. }
  46. onMount(async () => {
  47. await chatId.set(uuidv4());
  48. chatId.subscribe(async () => {
  49. await initNewChat();
  50. });
  51. });
  52. //////////////////////////
  53. // Web functions
  54. //////////////////////////
  55. const initNewChat = async () => {
  56. console.log($chatId);
  57. autoScroll = true;
  58. title = '';
  59. messages = [];
  60. history = {
  61. messages: {},
  62. currentId: null
  63. };
  64. selectedModels = $page.url.searchParams.get('models')
  65. ? $page.url.searchParams.get('models')?.split(',')
  66. : $settings.models ?? [''];
  67. let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
  68. console.log(_settings);
  69. settings.set({
  70. ..._settings
  71. });
  72. };
  73. const copyToClipboard = (text) => {
  74. if (!navigator.clipboard) {
  75. var textArea = document.createElement('textarea');
  76. textArea.value = text;
  77. // Avoid scrolling to bottom
  78. textArea.style.top = '0';
  79. textArea.style.left = '0';
  80. textArea.style.position = 'fixed';
  81. document.body.appendChild(textArea);
  82. textArea.focus();
  83. textArea.select();
  84. try {
  85. var successful = document.execCommand('copy');
  86. var msg = successful ? 'successful' : 'unsuccessful';
  87. console.log('Fallback: Copying text command was ' + msg);
  88. } catch (err) {
  89. console.error('Fallback: Oops, unable to copy', err);
  90. }
  91. document.body.removeChild(textArea);
  92. return;
  93. }
  94. navigator.clipboard.writeText(text).then(
  95. function () {
  96. console.log('Async: Copying to clipboard was successful!');
  97. },
  98. function (err) {
  99. console.error('Async: Could not copy text: ', err);
  100. }
  101. );
  102. };
  103. //////////////////////////
  104. // Ollama functions
  105. //////////////////////////
  106. const sendPrompt = async (userPrompt, parentId, _chatId) => {
  107. await Promise.all(
  108. selectedModels.map(async (model) => {
  109. if (model.includes('gpt-')) {
  110. await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
  111. } else {
  112. await sendPromptOllama(model, userPrompt, parentId, _chatId);
  113. }
  114. })
  115. );
  116. await chats.set(await $db.getChats());
  117. };
  118. const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
  119. console.log('sendPromptOllama');
  120. let responseMessageId = uuidv4();
  121. let responseMessage = {
  122. parentId: parentId,
  123. id: responseMessageId,
  124. childrenIds: [],
  125. role: 'assistant',
  126. content: '',
  127. model: model
  128. };
  129. history.messages[responseMessageId] = responseMessage;
  130. history.currentId = responseMessageId;
  131. if (parentId !== null) {
  132. history.messages[parentId].childrenIds = [
  133. ...history.messages[parentId].childrenIds,
  134. responseMessageId
  135. ];
  136. }
  137. await tick();
  138. window.scrollTo({ top: document.body.scrollHeight });
  139. const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
  140. method: 'POST',
  141. headers: {
  142. 'Content-Type': 'text/event-stream',
  143. ...($settings.authHeader && { Authorization: $settings.authHeader }),
  144. ...($user && { Authorization: `Bearer ${localStorage.token}` })
  145. },
  146. body: JSON.stringify({
  147. model: model,
  148. messages: [
  149. $settings.system
  150. ? {
  151. role: 'system',
  152. content: $settings.system
  153. }
  154. : undefined,
  155. ...messages
  156. ]
  157. .filter((message) => message)
  158. .map((message) => ({
  159. role: message.role,
  160. content: message.content,
  161. ...(message.files && {
  162. images: message.files
  163. .filter((file) => file.type === 'image')
  164. .map((file) => file.url.slice(file.url.indexOf(',') + 1))
  165. })
  166. })),
  167. options: {
  168. seed: $settings.seed ?? undefined,
  169. temperature: $settings.temperature ?? undefined,
  170. repeat_penalty: $settings.repeat_penalty ?? undefined,
  171. top_k: $settings.top_k ?? undefined,
  172. top_p: $settings.top_p ?? undefined,
  173. num_ctx: $settings.num_ctx ?? undefined,
  174. ...($settings.options ?? {})
  175. },
  176. format: $settings.requestFormat ?? undefined
  177. })
  178. }).catch((err) => {
  179. console.log(err);
  180. return null;
  181. });
  182. if (res && res.ok) {
  183. const reader = res.body
  184. .pipeThrough(new TextDecoderStream())
  185. .pipeThrough(splitStream('\n'))
  186. .getReader();
  187. while (true) {
  188. const { value, done } = await reader.read();
  189. if (done || stopResponseFlag || _chatId !== $chatId) {
  190. responseMessage.done = true;
  191. messages = messages;
  192. break;
  193. }
  194. try {
  195. let lines = value.split('\n');
  196. for (const line of lines) {
  197. if (line !== '') {
  198. console.log(line);
  199. let data = JSON.parse(line);
  200. if ('detail' in data) {
  201. throw data;
  202. }
  203. if (data.done == false) {
  204. if (responseMessage.content == '' && data.message.content == '\n') {
  205. continue;
  206. } else {
  207. responseMessage.content += data.message.content;
  208. messages = messages;
  209. }
  210. } else {
  211. responseMessage.done = true;
  212. responseMessage.context = data.context ?? null;
  213. responseMessage.info = {
  214. total_duration: data.total_duration,
  215. load_duration: data.load_duration,
  216. sample_count: data.sample_count,
  217. sample_duration: data.sample_duration,
  218. prompt_eval_count: data.prompt_eval_count,
  219. prompt_eval_duration: data.prompt_eval_duration,
  220. eval_count: data.eval_count,
  221. eval_duration: data.eval_duration
  222. };
  223. messages = messages;
  224. if ($settings.notificationEnabled && !document.hasFocus()) {
  225. const notification = new Notification(
  226. selectedModelfile
  227. ? `${
  228. selectedModelfile.title.charAt(0).toUpperCase() +
  229. selectedModelfile.title.slice(1)
  230. }`
  231. : `Ollama - ${model}`,
  232. {
  233. body: responseMessage.content,
  234. icon: selectedModelfile?.imageUrl ?? '/favicon.png'
  235. }
  236. );
  237. }
  238. if ($settings.responseAutoCopy) {
  239. copyToClipboard(responseMessage.content);
  240. }
  241. }
  242. }
  243. }
  244. } catch (error) {
  245. console.log(error);
  246. if ('detail' in error) {
  247. toast.error(error.detail);
  248. }
  249. break;
  250. }
  251. if (autoScroll) {
  252. window.scrollTo({ top: document.body.scrollHeight });
  253. }
  254. await $db.updateChatById(_chatId, {
  255. title: title === '' ? 'New Chat' : title,
  256. models: selectedModels,
  257. system: $settings.system ?? undefined,
  258. options: {
  259. seed: $settings.seed ?? undefined,
  260. temperature: $settings.temperature ?? undefined,
  261. repeat_penalty: $settings.repeat_penalty ?? undefined,
  262. top_k: $settings.top_k ?? undefined,
  263. top_p: $settings.top_p ?? undefined,
  264. num_ctx: $settings.num_ctx ?? undefined,
  265. ...($settings.options ?? {})
  266. },
  267. messages: messages,
  268. history: history
  269. });
  270. }
  271. } else {
  272. if (res !== null) {
  273. const error = await res.json();
  274. console.log(error);
  275. if ('detail' in error) {
  276. toast.error(error.detail);
  277. responseMessage.content = error.detail;
  278. } else {
  279. toast.error(error.error);
  280. responseMessage.content = error.error;
  281. }
  282. } else {
  283. toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
  284. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  285. }
  286. responseMessage.error = true;
  287. responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
  288. responseMessage.done = true;
  289. messages = messages;
  290. }
  291. stopResponseFlag = false;
  292. await tick();
  293. if (autoScroll) {
  294. window.scrollTo({ top: document.body.scrollHeight });
  295. }
  296. if (messages.length == 2 && messages.at(1).content !== '') {
  297. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  298. await generateChatTitle(_chatId, userPrompt);
  299. }
  300. };
  301. const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
  302. if ($settings.OPENAI_API_KEY) {
  303. if (models) {
  304. let responseMessageId = uuidv4();
  305. let responseMessage = {
  306. parentId: parentId,
  307. id: responseMessageId,
  308. childrenIds: [],
  309. role: 'assistant',
  310. content: '',
  311. model: model
  312. };
  313. history.messages[responseMessageId] = responseMessage;
  314. history.currentId = responseMessageId;
  315. if (parentId !== null) {
  316. history.messages[parentId].childrenIds = [
  317. ...history.messages[parentId].childrenIds,
  318. responseMessageId
  319. ];
  320. }
  321. await tick();
  322. window.scrollTo({ top: document.body.scrollHeight });
  323. const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
  324. method: 'POST',
  325. headers: {
  326. 'Content-Type': 'application/json',
  327. Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
  328. },
  329. body: JSON.stringify({
  330. model: model,
  331. stream: true,
  332. messages: [
  333. $settings.system
  334. ? {
  335. role: 'system',
  336. content: $settings.system
  337. }
  338. : undefined,
  339. ...messages
  340. ]
  341. .filter((message) => message)
  342. .map((message) => ({
  343. role: message.role,
  344. ...(message.files
  345. ? {
  346. content: [
  347. {
  348. type: 'text',
  349. text: message.content
  350. },
  351. ...message.files
  352. .filter((file) => file.type === 'image')
  353. .map((file) => ({
  354. type: 'image_url',
  355. image_url: {
  356. url: file.url
  357. }
  358. }))
  359. ]
  360. }
  361. : { content: message.content })
  362. })),
  363. temperature: $settings.temperature ?? undefined,
  364. top_p: $settings.top_p ?? undefined,
  365. num_ctx: $settings.num_ctx ?? undefined,
  366. frequency_penalty: $settings.repeat_penalty ?? undefined
  367. })
  368. });
  369. const reader = res.body
  370. .pipeThrough(new TextDecoderStream())
  371. .pipeThrough(splitStream('\n'))
  372. .getReader();
  373. while (true) {
  374. const { value, done } = await reader.read();
  375. if (done || stopResponseFlag || _chatId !== $chatId) {
  376. responseMessage.done = true;
  377. messages = messages;
  378. break;
  379. }
  380. try {
  381. let lines = value.split('\n');
  382. for (const line of lines) {
  383. if (line !== '') {
  384. console.log(line);
  385. if (line === 'data: [DONE]') {
  386. responseMessage.done = true;
  387. messages = messages;
  388. } else {
  389. let data = JSON.parse(line.replace(/^data: /, ''));
  390. console.log(data);
  391. if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
  392. continue;
  393. } else {
  394. responseMessage.content += data.choices[0].delta.content ?? '';
  395. messages = messages;
  396. }
  397. }
  398. }
  399. }
  400. } catch (error) {
  401. console.log(error);
  402. }
  403. if (autoScroll) {
  404. window.scrollTo({ top: document.body.scrollHeight });
  405. }
  406. await $db.updateChatById(_chatId, {
  407. title: title === '' ? 'New Chat' : title,
  408. models: selectedModels,
  409. system: $settings.system ?? undefined,
  410. options: {
  411. seed: $settings.seed ?? undefined,
  412. temperature: $settings.temperature ?? undefined,
  413. repeat_penalty: $settings.repeat_penalty ?? undefined,
  414. top_k: $settings.top_k ?? undefined,
  415. top_p: $settings.top_p ?? undefined,
  416. num_ctx: $settings.num_ctx ?? undefined,
  417. ...($settings.options ?? {})
  418. },
  419. messages: messages,
  420. history: history
  421. });
  422. }
  423. stopResponseFlag = false;
  424. await tick();
  425. if ($settings.notificationEnabled && !document.hasFocus()) {
  426. const notification = new Notification(`OpenAI ${model}`, {
  427. body: responseMessage.content,
  428. icon: '/favicon.png'
  429. });
  430. }
  431. if ($settings.responseAutoCopy) {
  432. copyToClipboard(responseMessage.content);
  433. }
  434. if (autoScroll) {
  435. window.scrollTo({ top: document.body.scrollHeight });
  436. }
  437. if (messages.length == 2) {
  438. window.history.replaceState(history.state, '', `/c/${_chatId}`);
  439. await setChatTitle(_chatId, userPrompt);
  440. }
  441. }
  442. }
  443. };
  444. const submitPrompt = async (userPrompt) => {
  445. const _chatId = JSON.parse(JSON.stringify($chatId));
  446. console.log('submitPrompt', _chatId);
  447. if (selectedModels.includes('')) {
  448. toast.error('Model not selected');
  449. } else if (messages.length != 0 && messages.at(-1).done != true) {
  450. console.log('wait');
  451. } else {
  452. document.getElementById('chat-textarea').style.height = '';
  453. let userMessageId = uuidv4();
  454. let userMessage = {
  455. id: userMessageId,
  456. parentId: messages.length !== 0 ? messages.at(-1).id : null,
  457. childrenIds: [],
  458. role: 'user',
  459. content: userPrompt,
  460. files: files.length > 0 ? files : undefined
  461. };
  462. if (messages.length !== 0) {
  463. history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
  464. }
  465. history.messages[userMessageId] = userMessage;
  466. history.currentId = userMessageId;
  467. await tick();
  468. if (messages.length == 1) {
  469. await $db.createNewChat({
  470. id: _chatId,
  471. title: 'New Chat',
  472. models: selectedModels,
  473. system: $settings.system ?? undefined,
  474. options: {
  475. seed: $settings.seed ?? undefined,
  476. temperature: $settings.temperature ?? undefined,
  477. repeat_penalty: $settings.repeat_penalty ?? undefined,
  478. top_k: $settings.top_k ?? undefined,
  479. top_p: $settings.top_p ?? undefined,
  480. num_ctx: $settings.num_ctx ?? undefined,
  481. ...($settings.options ?? {})
  482. },
  483. messages: messages,
  484. history: history
  485. });
  486. }
  487. prompt = '';
  488. files = [];
  489. setTimeout(() => {
  490. window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
  491. }, 50);
  492. await sendPrompt(userPrompt, userMessageId, _chatId);
  493. }
  494. };
  495. const stopResponse = () => {
  496. stopResponseFlag = true;
  497. console.log('stopResponse');
  498. };
  499. const regenerateResponse = async () => {
  500. const _chatId = JSON.parse(JSON.stringify($chatId));
  501. console.log('regenerateResponse', _chatId);
  502. if (messages.length != 0 && messages.at(-1).done == true) {
  503. messages.splice(messages.length - 1, 1);
  504. messages = messages;
  505. let userMessage = messages.at(-1);
  506. let userPrompt = userMessage.content;
  507. await sendPrompt(userPrompt, userMessage.id, _chatId);
  508. }
  509. };
  510. const generateChatTitle = async (_chatId, userPrompt) => {
  511. if ($settings.titleAutoGenerate ?? true) {
  512. console.log('generateChatTitle');
  513. const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/generate`, {
  514. method: 'POST',
  515. headers: {
  516. 'Content-Type': 'text/event-stream',
  517. ...($settings.authHeader && { Authorization: $settings.authHeader }),
  518. ...($user && { Authorization: `Bearer ${localStorage.token}` })
  519. },
  520. body: JSON.stringify({
  521. model: selectedModels[0],
  522. prompt: `Generate a brief 3-5 word title for this question, excluding the term 'title.' Then, please reply with only the title: ${userPrompt}`,
  523. stream: false
  524. })
  525. })
  526. .then(async (res) => {
  527. if (!res.ok) throw await res.json();
  528. return res.json();
  529. })
  530. .catch((error) => {
  531. if ('detail' in error) {
  532. toast.error(error.detail);
  533. }
  534. console.log(error);
  535. return null;
  536. });
  537. if (res) {
  538. await setChatTitle(_chatId, res.response === '' ? 'New Chat' : res.response);
  539. }
  540. } else {
  541. await setChatTitle(_chatId, `${userPrompt}`);
  542. }
  543. };
  544. const setChatTitle = async (_chatId, _title) => {
  545. await $db.updateChatById(_chatId, { title: _title });
  546. if (_chatId === $chatId) {
  547. title = _title;
  548. }
  549. };
  550. </script>
  551. <svelte:window
  552. on:scroll={(e) => {
  553. autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
  554. }}
  555. />
  556. <Navbar {title} shareEnabled={messages.length > 0} />
  557. <div class="min-h-screen w-full flex justify-center">
  558. <div class=" py-2.5 flex flex-col justify-between w-full">
  559. <div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
  560. <ModelSelector bind:selectedModels disabled={messages.length > 0} />
  561. </div>
  562. <div class=" h-full mt-10 mb-32 w-full flex flex-col">
  563. <Messages
  564. {selectedModels}
  565. {selectedModelfile}
  566. bind:history
  567. bind:messages
  568. bind:autoScroll
  569. bottomPadding={files.length > 0}
  570. {sendPrompt}
  571. {regenerateResponse}
  572. />
  573. </div>
  574. </div>
  575. <MessageInput
  576. bind:files
  577. bind:prompt
  578. bind:autoScroll
  579. suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
  580. {
  581. title: ['Help me study', 'vocabulary for a college entrance exam'],
  582. content: `Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.`
  583. },
  584. {
  585. title: ['Give me ideas', `for what to do with my kids' art`],
  586. content: `What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.`
  587. },
  588. {
  589. title: ['Tell me a fun fact', 'about the Roman Empire'],
  590. content: 'Tell me a random fun fact about the Roman Empire'
  591. },
  592. {
  593. title: ['Show me a code snippet', `of a website's sticky header`],
  594. content: `Show me a code snippet of a website's sticky header in CSS and JavaScript.`
  595. }
  596. ]}
  597. {messages}
  598. {submitPrompt}
  599. {stopResponse}
  600. />
  601. </div>