|
@@ -31,7 +31,11 @@
|
|
updateChatById
|
|
updateChatById
|
|
} from '$lib/apis/chats';
|
|
} from '$lib/apis/chats';
|
|
import { queryCollection, queryDoc, runWebSearch } from '$lib/apis/rag';
|
|
import { queryCollection, queryDoc, runWebSearch } from '$lib/apis/rag';
|
|
- import { generateOpenAIChatCompletion, generateSearchQuery, generateTitle } from '$lib/apis/openai';
|
|
|
|
|
|
+ import {
|
|
|
|
+ generateOpenAIChatCompletion,
|
|
|
|
+ generateSearchQuery,
|
|
|
|
+ generateTitle
|
|
|
|
+ } from '$lib/apis/openai';
|
|
|
|
|
|
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
|
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
|
import Messages from '$lib/components/chat/Messages.svelte';
|
|
import Messages from '$lib/components/chat/Messages.svelte';
|
|
@@ -286,36 +290,7 @@
|
|
}
|
|
}
|
|
|
|
|
|
if (useWebSearch) {
|
|
if (useWebSearch) {
|
|
- // TODO: Toasts are temporary indicators for web search
|
|
|
|
- toast.info($i18n.t('Generating search query'));
|
|
|
|
- const searchQuery = await generateChatSearchQuery(prompt);
|
|
|
|
- if (searchQuery) {
|
|
|
|
- toast.info($i18n.t('Searching the web for \'{{searchQuery}}\'', { searchQuery }));
|
|
|
|
- const searchDocUuid = uuidv4();
|
|
|
|
- const searchDocument = await runWebSearch(localStorage.token, searchQuery, searchDocUuid);
|
|
|
|
- if (searchDocument) {
|
|
|
|
- const parentMessage = history.messages[parentId];
|
|
|
|
- if (!parentMessage.files) {
|
|
|
|
- parentMessage.files = [];
|
|
|
|
- }
|
|
|
|
- parentMessage.files.push({
|
|
|
|
- collection_name: searchDocument.collection_name,
|
|
|
|
- name: searchQuery,
|
|
|
|
- type: 'doc',
|
|
|
|
- upload_status: true,
|
|
|
|
- error: ""
|
|
|
|
- });
|
|
|
|
- // Find message in messages and update it
|
|
|
|
- const messageIndex = messages.findIndex((message) => message.id === parentId);
|
|
|
|
- if (messageIndex !== -1) {
|
|
|
|
- messages[messageIndex] = parentMessage;
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- toast.warning($i18n.t('No search results found'));
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- toast.warning($i18n.t('No search query generated'));
|
|
|
|
- }
|
|
|
|
|
|
+ await runWebSearchForPrompt(parentId, responseMessageId, prompt);
|
|
}
|
|
}
|
|
|
|
|
|
if (model?.external) {
|
|
if (model?.external) {
|
|
@@ -332,6 +307,41 @@
|
|
await chats.set(await getChatList(localStorage.token));
|
|
await chats.set(await getChatList(localStorage.token));
|
|
};
|
|
};
|
|
|
|
|
|
|
|
+ const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => {
|
|
|
|
+ const responseMessage = history.messages[responseId];
|
|
|
|
+ responseMessage.progress = $i18n.t('Generating search query');
|
|
|
|
+ messages = messages;
|
|
|
|
+ const searchQuery = await generateChatSearchQuery(prompt);
|
|
|
|
+ if (!searchQuery) {
|
|
|
|
+ toast.warning($i18n.t('No search query generated'));
|
|
|
|
+ responseMessage.progress = undefined;
|
|
|
|
+ messages = messages;
|
|
|
|
+ return;
|
|
|
|
+ }
|
|
|
|
+ responseMessage.progress = $i18n.t("Searching the web for '{{searchQuery}}'", { searchQuery });
|
|
|
|
+ messages = messages;
|
|
|
|
+ const searchDocument = await runWebSearch(localStorage.token, searchQuery);
|
|
|
|
+ if (!searchDocument) {
|
|
|
|
+ toast.warning($i18n.t('No search results found'));
|
|
|
|
+ responseMessage.progress = undefined;
|
|
|
|
+ messages = messages;
|
|
|
|
+ return;
|
|
|
|
+ }
|
|
|
|
+ const parentMessage = history.messages[parentId];
|
|
|
|
+ if (!parentMessage.files) {
|
|
|
|
+ parentMessage.files = [];
|
|
|
|
+ }
|
|
|
|
+ parentMessage.files.push({
|
|
|
|
+ collection_name: searchDocument!.collection_name,
|
|
|
|
+ name: searchQuery,
|
|
|
|
+ type: 'doc',
|
|
|
|
+ upload_status: true,
|
|
|
|
+ error: ''
|
|
|
|
+ });
|
|
|
|
+ responseMessage.progress = undefined;
|
|
|
|
+ messages = messages;
|
|
|
|
+ };
|
|
|
|
+
|
|
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
|
|
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
|
|
model = model.id;
|
|
model = model.id;
|
|
const responseMessage = history.messages[responseMessageId];
|
|
const responseMessage = history.messages[responseMessageId];
|