Browse Source

Merge branch 'dev' into add_reset_button_mermaidjs_renderer

cvaz1306 6 months ago
parent
commit
95598e5435

+ 5 - 0
backend/open_webui/env.py

@@ -311,6 +311,11 @@ RESET_CONFIG_ON_START = (
     os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
 )
 
+
+ENABLE_REALTIME_CHAT_SAVE = (
+    os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "True").lower() == "true"
+)
+
 ####################################
 # REDIS
 ####################################

+ 37 - 11
backend/open_webui/utils/middleware.py

@@ -65,6 +65,7 @@ from open_webui.env import (
     SRC_LOG_LEVELS,
     GLOBAL_LOG_LEVEL,
     BYPASS_MODEL_ACCESS_CONTROL,
+    ENABLE_REALTIME_CHAT_SAVE,
 )
 from open_webui.constants import TASKS
 
@@ -928,6 +929,10 @@ async def process_chat_response(
 
         # Handle as a background task
         async def post_response_handler(response, events):
+
+            assistant_message = get_last_assistant_message(form_data["messages"])
+            content = assistant_message if assistant_message else ""
+
             try:
                 for event in events:
                     await event_emitter(
@@ -946,9 +951,6 @@ async def process_chat_response(
                         },
                     )
 
-                assistant_message = get_last_assistant_message(form_data["messages"])
-                content = assistant_message if assistant_message else ""
-
                 async for line in response.body_iterator:
                     line = line.decode("utf-8") if isinstance(line, bytes) else line
                     data = line
@@ -977,7 +979,6 @@ async def process_chat_response(
                             )
 
                         else:
-
                             value = (
                                 data.get("choices", [])[0]
                                 .get("delta", {})
@@ -987,14 +988,19 @@ async def process_chat_response(
                             if value:
                                 content = f"{content}{value}"
 
-                                # Save message in the database
-                                Chats.upsert_message_to_chat_by_id_and_message_id(
-                                    metadata["chat_id"],
-                                    metadata["message_id"],
-                                    {
+                                if ENABLE_REALTIME_CHAT_SAVE:
+                                    # Save message in the database
+                                    Chats.upsert_message_to_chat_by_id_and_message_id(
+                                        metadata["chat_id"],
+                                        metadata["message_id"],
+                                        {
+                                            "content": content,
+                                        },
+                                    )
+                                else:
+                                    data = {
                                         "content": content,
-                                    },
-                                )
+                                    }
 
                     except Exception as e:
                         done = "data: [DONE]" in line
@@ -1003,6 +1009,16 @@ async def process_chat_response(
                         if done:
                             data = {"done": True, "content": content, "title": title}
 
+                            if not ENABLE_REALTIME_CHAT_SAVE:
+                                # Save message in the database
+                                Chats.upsert_message_to_chat_by_id_and_message_id(
+                                    metadata["chat_id"],
+                                    metadata["message_id"],
+                                    {
+                                        "content": content,
+                                    },
+                                )
+
                             # Send a webhook notification if the user is not active
                             if (
                                 get_user_id_from_session_pool(metadata["session_id"])
@@ -1036,6 +1052,16 @@ async def process_chat_response(
                 print("Task was cancelled!")
                 await event_emitter({"type": "task-cancelled"})
 
+                if not ENABLE_REALTIME_CHAT_SAVE:
+                    # Save message in the database
+                    Chats.upsert_message_to_chat_by_id_and_message_id(
+                        metadata["chat_id"],
+                        metadata["message_id"],
+                        {
+                            "content": content,
+                        },
+                    )
+
             if response.background is not None:
                 await response.background()
 

+ 1 - 1
src/lib/components/channel/Messages.svelte

@@ -66,7 +66,7 @@
 			{($settings?.widescreenMode ?? null) ? 'max-w-full' : 'max-w-5xl'} mx-auto"
 			>
 				{#if channel}
-					<div class="flex flex-col gap-1.5 py-5">
+					<div class="flex flex-col gap-1.5 pb-5 pt-10">
 						<div class="text-2xl font-medium capitalize">{channel.name}</div>
 
 						<div class=" text-gray-500">

+ 33 - 1
src/lib/components/chat/Chat.svelte

@@ -1053,7 +1053,7 @@
 	};
 
 	const chatCompletionEventHandler = async (data, message, chatId) => {
-		const { id, done, choices, sources, selected_model_id, error, usage } = data;
+		const { id, done, choices, content, sources, selected_model_id, error, usage } = data;
 
 		if (error) {
 			await handleOpenAIError(error, message);
@@ -1105,6 +1105,38 @@
 			}
 		}
 
+		if (content) {
+			// REALTIME_CHAT_SAVE is disabled
+			message.content = content;
+
+			if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
+				navigator.vibrate(5);
+			}
+
+			// Emit chat event for TTS
+			const messageContentParts = getMessageContentParts(
+				message.content,
+				$config?.audio?.tts?.split_on ?? 'punctuation'
+			);
+			messageContentParts.pop();
+
+			// dispatch only last sentence and make sure it hasn't been dispatched before
+			if (
+				messageContentParts.length > 0 &&
+				messageContentParts[messageContentParts.length - 1] !== message.lastSentence
+			) {
+				message.lastSentence = messageContentParts[messageContentParts.length - 1];
+				eventTarget.dispatchEvent(
+					new CustomEvent('chat', {
+						detail: {
+							id: message.id,
+							content: messageContentParts[messageContentParts.length - 1]
+						}
+					})
+				);
+			}
+		}
+
 		if (selected_model_id) {
 			message.selectedModelId = selected_model_id;
 			message.arena = true;

+ 6 - 6
src/lib/components/common/SVGPanZoom.svelte

@@ -29,12 +29,12 @@
 			zoomSpeed: 0.065
 		});
 	}
-	function resetPanZoomViewport() {
-		console.log('Reset View');
-		instance.moveTo(0, 0);
-		instance.zoomAbs(0, 0, 1);
-		console.log(instance.getTransform());
-	}
+function resetPanZoomViewport() {
+    console.log('Reset View');
+    instance.moveTo(0, 0);
+    instance.zoomAbs(0, 0, 1);
+    console.log(instance.getTransform());
+}
 </script>
 
 <div bind:this={sceneParentElement} class="relative {className}">

+ 5 - 5
src/lib/components/icons/Reset.svelte

@@ -2,8 +2,8 @@
 	export let className = 'size-4';
 </script>
 
-<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512" fill="currentColor" class={className}>
-	<path
-		d="M463.5 224l8.5 0c13.3 0 24-10.7 24-24l0-128c0-9.7-5.8-18.5-14.8-22.2s-19.3-1.7-26.2 5.2L413.4 96.6c-87.6-86.5-228.7-86.2-315.8 1c-87.5 87.5-87.5 229.3 0 316.8s229.3 87.5 316.8 0c12.5-12.5 12.5-32.8 0-45.3s-32.8-12.5-45.3 0c-62.5 62.5-163.8 62.5-226.3 0s-62.5-163.8 0-226.3c62.2-62.2 162.7-62.5 225.3-1L327 183c-6.9 6.9-8.9 17.2-5.2 26.2s12.5 14.8 22.2 14.8l119.5 0z"
-	/>
-</svg>
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"
+fill="currentColor"
+class={className}>
+    <path d="M463.5 224l8.5 0c13.3 0 24-10.7 24-24l0-128c0-9.7-5.8-18.5-14.8-22.2s-19.3-1.7-26.2 5.2L413.4 96.6c-87.6-86.5-228.7-86.2-315.8 1c-87.5 87.5-87.5 229.3 0 316.8s229.3 87.5 316.8 0c12.5-12.5 12.5-32.8 0-45.3s-32.8-12.5-45.3 0c-62.5 62.5-163.8 62.5-226.3 0s-62.5-163.8 0-226.3c62.2-62.2 162.7-62.5 225.3-1L327 183c-6.9 6.9-8.9 17.2-5.2 26.2s12.5 14.8 22.2 14.8l119.5 0z"/>
+</svg>

+ 0 - 0
update_ollama_models.sh