Browse Source

feat: add llm sessions and update related logic

0xJacky 1 month ago
parent
commit
c355cb8e65

+ 1 - 1
.github/workflows/weblate-pull.yml

@@ -29,4 +29,4 @@ jobs:
       - name: Update Repository
         env:
           WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
-        run: wlc --key $WEBLATE_TOKEN pull
+        run: wlc --key $WEBLATE_TOKEN reset nginx-ui

+ 4 - 4
api/config/rename.go

@@ -76,7 +76,7 @@ func Rename(c *gin.Context) {
 	}
 
 	// update LLM records
-	g := query.LLMMessages
+	g := query.LLMSession
 	q := query.Config
 	cfg, err := q.Where(q.Filepath.Eq(origFullPath)).FirstOrInit()
 	if err != nil {
@@ -84,13 +84,13 @@ func Rename(c *gin.Context) {
 		return
 	}
 	if !stat.IsDir() {
-		_, _ = g.Where(g.Name.Eq(newFullPath)).Delete()
-		_, _ = g.Where(g.Name.Eq(origFullPath)).Update(g.Name, newFullPath)
+		_, _ = g.Where(g.Path.Eq(newFullPath)).Delete()
+		_, _ = g.Where(g.Path.Eq(origFullPath)).Update(g.Path, newFullPath)
 		// for file, the sync policy for this file is used
 		json.SyncNodeIds = cfg.SyncNodeIds
 	} else {
 		// is directory, update all records under the directory
-		_, _ = g.Where(g.Name.Like(origFullPath+"%")).Update(g.Name, g.Name.Replace(origFullPath, newFullPath))
+		_, _ = g.Where(g.Path.Like(origFullPath+"%")).Update(g.Path, g.Path.Replace(origFullPath, newFullPath))
 	}
 
 	_, err = q.Where(q.Filepath.Eq(origFullPath)).Updates(&model.Config{

+ 0 - 31
api/llm/record.go

@@ -1,31 +0,0 @@
-package llm
-
-import (
-	"net/http"
-
-	"github.com/0xJacky/Nginx-UI/internal/helper"
-	"github.com/0xJacky/Nginx-UI/internal/nginx"
-	"github.com/0xJacky/Nginx-UI/query"
-	"github.com/gin-gonic/gin"
-	"github.com/uozi-tech/cosy"
-)
-
-func GetLLMRecord(c *gin.Context) {
-	absPath := c.Query("path")
-
-	if !helper.IsUnderDirectory(absPath, nginx.GetConfPath()) {
-		c.JSON(http.StatusForbidden, gin.H{
-			"message": "path is not under the nginx conf path",
-		})
-		return
-	}
-
-	g := query.LLMMessages
-	llmMsg, err := g.Where(g.Name.Eq(absPath)).FirstOrCreate()
-	if err != nil {
-		cosy.ErrHandler(c, err)
-		return
-	}
-
-	c.JSON(http.StatusOK, llmMsg)
-}

+ 12 - 2
api/llm/router.go

@@ -3,8 +3,18 @@ package llm
 import "github.com/gin-gonic/gin"
 
 func InitRouter(r *gin.RouterGroup) {
-	r.GET("llm_messages", GetLLMRecord)
-	r.POST("llm_messages", StoreLLMRecord)
+	// LLM Session endpoints
+	r.GET("llm_sessions", GetLLMSessions)
+	r.GET("llm_sessions/:session_id", GetLLMSession)
+	r.POST("llm_sessions", CreateLLMSession)
+	r.PUT("llm_sessions/:session_id", UpdateLLMSession)
+	r.DELETE("llm_sessions/:session_id", DeleteLLMSession)
+	r.POST("llm_sessions/:session_id/duplicate", DuplicateLLMSession)
+	r.POST("llm_sessions/:session_id/generate_title", GenerateSessionTitle)
+	
+	// Compatibility endpoints for legacy file-based sessions
+	r.GET("llm_messages", GetLLMSessionByPath)
+	r.POST("llm_messages", CreateOrUpdateLLMSessionByPath)
 }
 
 // InitLocalRouter for main node only (no proxy)

+ 331 - 0
api/llm/session.go

@@ -0,0 +1,331 @@
+package llm
+
+import (
+	"net/http"
+	"path/filepath"
+
+	"github.com/0xJacky/Nginx-UI/internal/helper"
+	"github.com/0xJacky/Nginx-UI/internal/llm"
+	"github.com/0xJacky/Nginx-UI/internal/nginx"
+	"github.com/0xJacky/Nginx-UI/model"
+	"github.com/0xJacky/Nginx-UI/query"
+	"github.com/gin-gonic/gin"
+	"github.com/sashabaranov/go-openai"
+	"github.com/uozi-tech/cosy"
+	"github.com/uozi-tech/cosy/logger"
+)
+
+// GetLLMSessions returns LLM sessions with optional filtering
+func GetLLMSessions(c *gin.Context) {
+	g := query.LLMSession
+	query := g.Order(g.UpdatedAt.Desc())
+	
+	// Filter by path if provided
+	if path := c.Query("path"); path != "" {
+		if !helper.IsUnderDirectory(path, nginx.GetConfPath()) {
+			c.JSON(http.StatusForbidden, gin.H{
+				"message": "path is not under the nginx conf path",
+			})
+			return
+		}
+		query = query.Where(g.Path.Eq(path))
+	}
+	
+	sessions, err := query.Find()
+	if err != nil {
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	c.JSON(http.StatusOK, sessions)
+}
+
+// GetLLMSession returns a single session by session_id
+func GetLLMSession(c *gin.Context) {
+	sessionID := c.Param("session_id")
+	
+	g := query.LLMSession
+	session, err := g.Where(g.SessionID.Eq(sessionID)).First()
+	if err != nil {
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	c.JSON(http.StatusOK, session)
+}
+
+// CreateLLMSession creates a new LLM session
+func CreateLLMSession(c *gin.Context) {
+	var json struct {
+		Title string `json:"title" binding:"required"`
+		Path  string `json:"path"`
+	}
+
+	if !cosy.BindAndValid(c, &json) {
+		return
+	}
+
+	// Validate path if provided
+	if json.Path != "" && !helper.IsUnderDirectory(json.Path, nginx.GetConfPath()) {
+		c.JSON(http.StatusForbidden, gin.H{
+			"message": "path is not under the nginx conf path",
+		})
+		return
+	}
+
+	session := &model.LLMSession{
+		Title:        json.Title,
+		Path:         json.Path,
+		Messages:     []openai.ChatCompletionMessage{},
+		MessageCount: 0,
+		IsActive:     true,
+	}
+
+	g := query.LLMSession
+	err := g.Create(session)
+	if err != nil {
+		logger.Error(err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	c.JSON(http.StatusOK, session)
+}
+
+// UpdateLLMSession updates an existing session
+func UpdateLLMSession(c *gin.Context) {
+	sessionID := c.Param("session_id")
+	
+	var json struct {
+		Title    string                         `json:"title,omitempty"`
+		Messages []openai.ChatCompletionMessage `json:"messages,omitempty"`
+		IsActive *bool                          `json:"is_active,omitempty"`
+	}
+
+	if !cosy.BindAndValid(c, &json) {
+		return
+	}
+
+	g := query.LLMSession
+	session, err := g.Where(g.SessionID.Eq(sessionID)).First()
+	if err != nil {
+		logger.Error(err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	// Update fields
+	if json.Title != "" {
+		session.Title = json.Title
+	}
+	
+	if json.Messages != nil {
+		session.Messages = json.Messages
+		session.MessageCount = len(json.Messages)
+	}
+	
+	if json.IsActive != nil {
+		session.IsActive = *json.IsActive
+	}
+
+	// Save the updated session
+	err = g.Save(session)
+	if err != nil {
+		logger.Error(err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	c.JSON(http.StatusOK, session)
+}
+
+// DeleteLLMSession deletes a session by session_id
+func DeleteLLMSession(c *gin.Context) {
+	sessionID := c.Param("session_id")
+	
+	g := query.LLMSession
+	result, err := g.Where(g.SessionID.Eq(sessionID)).Delete()
+	if err != nil {
+		logger.Error(err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	if result.RowsAffected == 0 {
+		c.JSON(http.StatusNotFound, gin.H{
+			"message": "Session not found",
+		})
+		return
+	}
+
+	c.JSON(http.StatusOK, gin.H{
+		"message": "Session deleted successfully",
+	})
+}
+
+// DuplicateLLMSession duplicates an existing session
+func DuplicateLLMSession(c *gin.Context) {
+	sessionID := c.Param("session_id")
+	
+	g := query.LLMSession
+	originalSession, err := g.Where(g.SessionID.Eq(sessionID)).First()
+	if err != nil {
+		logger.Error(err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	// Create a new session with the same content
+	newSession := &model.LLMSession{
+		Title:        originalSession.Title + " (Copy)",
+		Path:         originalSession.Path,
+		Messages:     originalSession.Messages,
+		MessageCount: originalSession.MessageCount,
+	}
+
+	err = g.Create(newSession)
+	if err != nil {
+		logger.Error(err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	c.JSON(http.StatusOK, newSession)
+}
+
+// GetLLMSessionByPath - 兼容性端点,基于路径获取或创建会话
+func GetLLMSessionByPath(c *gin.Context) {
+	path := c.Query("path")
+
+	if !helper.IsUnderDirectory(path, nginx.GetConfPath()) {
+		c.JSON(http.StatusForbidden, gin.H{
+			"message": "path is not under the nginx conf path",
+		})
+		return
+	}
+
+	g := query.LLMSession
+	
+	// 查找基于该路径的会话
+	session, err := g.Where(g.Path.Eq(path)).First()
+	if err != nil {
+		// 如果没找到,创建一个新的会话
+		title := "Chat for " + filepath.Base(path)
+		session = &model.LLMSession{
+			Title:        title,
+			Path:         path,
+			Messages:     []openai.ChatCompletionMessage{},
+			MessageCount: 0,
+			IsActive:     true,
+		}
+		
+		err = g.Create(session)
+		if err != nil {
+			logger.Error(err)
+			cosy.ErrHandler(c, err)
+			return
+		}
+	}
+
+	// 返回兼容格式
+	response := struct {
+		Name    string                         `json:"name"`
+		Content []openai.ChatCompletionMessage `json:"content"`
+	}{
+		Name:    session.Path,
+		Content: session.Messages,
+	}
+
+	c.JSON(http.StatusOK, response)
+}
+
+// CreateOrUpdateLLMSessionByPath - 兼容性端点,基于路径创建或更新会话
+func CreateOrUpdateLLMSessionByPath(c *gin.Context) {
+	var json struct {
+		FileName string                         `json:"file_name"`
+		Messages []openai.ChatCompletionMessage `json:"messages"`
+	}
+
+	if !cosy.BindAndValid(c, &json) {
+		return
+	}
+
+	if !helper.IsUnderDirectory(json.FileName, nginx.GetConfPath()) {
+		c.JSON(http.StatusForbidden, gin.H{
+			"message": "path is not under the nginx conf path",
+		})
+		return
+	}
+
+	g := query.LLMSession
+	
+	// 查找或创建基于该路径的会话
+	session, err := g.Where(g.Path.Eq(json.FileName)).First()
+	if err != nil {
+		// 创建新会话
+		title := "Chat for " + filepath.Base(json.FileName)
+		session = &model.LLMSession{
+			Title:        title,
+			Path:         json.FileName,
+			Messages:     json.Messages,
+			MessageCount: len(json.Messages),
+			IsActive:     true,
+		}
+		
+		err = g.Create(session)
+		if err != nil {
+			logger.Error(err)
+			cosy.ErrHandler(c, err)
+			return
+		}
+	} else {
+		// 更新现有会话
+		session.Messages = json.Messages
+		session.MessageCount = len(json.Messages)
+		
+		err = g.Save(session)
+		if err != nil {
+			logger.Error(err)
+			cosy.ErrHandler(c, err)
+			return
+		}
+	}
+
+	c.JSON(http.StatusOK, gin.H{
+		"message": "ok",
+	})
+}
+
+// GenerateSessionTitle generates a title for a session based on its context
+func GenerateSessionTitle(c *gin.Context) {
+	sessionID := c.Param("session_id")
+	
+	g := query.LLMSession
+	session, err := g.Where(g.SessionID.Eq(sessionID)).First()
+	if err != nil {
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	// Generate title based on session messages
+	title, err := llm.GenerateSessionTitle(session.Messages)
+	if err != nil {
+		logger.Error("Failed to generate session title:", err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	// Update the session with the new title
+	session.Title = title
+	err = g.Save(session)
+	if err != nil {
+		logger.Error("Failed to save session with new title:", err)
+		cosy.ErrHandler(c, err)
+		return
+	}
+
+	c.JSON(http.StatusOK, gin.H{
+		"title": title,
+		"message": "Title generated successfully",
+	})
+}

+ 0 - 45
api/llm/store.go

@@ -1,45 +0,0 @@
-package llm
-
-import (
-	"net/http"
-
-	"github.com/0xJacky/Nginx-UI/model"
-	"github.com/0xJacky/Nginx-UI/query"
-	"github.com/gin-gonic/gin"
-	"github.com/sashabaranov/go-openai"
-	"github.com/uozi-tech/cosy"
-)
-
-func StoreLLMRecord(c *gin.Context) {
-	var json struct {
-		FileName string                         `json:"file_name"`
-		Messages []openai.ChatCompletionMessage `json:"messages"`
-	}
-
-	if !cosy.BindAndValid(c, &json) {
-		return
-	}
-
-	name := json.FileName
-	g := query.LLMMessages
-	_, err := g.Where(g.Name.Eq(name)).FirstOrCreate()
-
-	if err != nil {
-		cosy.ErrHandler(c, err)
-		return
-	}
-
-	_, err = g.Where(g.Name.Eq(name)).Updates(&model.LLMMessages{
-		Name:    name,
-		Content: json.Messages,
-	})
-
-	if err != nil {
-		cosy.ErrHandler(c, err)
-		return
-	}
-
-	c.JSON(http.StatusOK, gin.H{
-		"message": "ok",
-	})
-}

+ 5 - 0
app/components.d.ts

@@ -42,6 +42,7 @@ declare module 'vue' {
     AListItem: typeof import('ant-design-vue/es')['ListItem']
     AListItemMeta: typeof import('ant-design-vue/es')['ListItemMeta']
     AMenu: typeof import('ant-design-vue/es')['Menu']
+    AMenuDivider: typeof import('ant-design-vue/es')['MenuDivider']
     AMenuItem: typeof import('ant-design-vue/es')['MenuItem']
     AModal: typeof import('ant-design-vue/es')['Modal']
     APopconfirm: typeof import('ant-design-vue/es')['Popconfirm']
@@ -89,6 +90,9 @@ declare module 'vue' {
     LLMChatMessageInput: typeof import('./src/components/LLM/ChatMessageInput.vue')['default']
     LLMChatMessageList: typeof import('./src/components/LLM/ChatMessageList.vue')['default']
     LLMLLM: typeof import('./src/components/LLM/LLM.vue')['default']
+    LLMLLMSessionSelector: typeof import('./src/components/LLM/LLMSessionSelector.vue')['default']
+    LLMLLMSessionSidebar: typeof import('./src/components/LLM/LLMSessionSidebar.vue')['default']
+    LLMLLMSessionTabs: typeof import('./src/components/LLM/LLMSessionTabs.vue')['default']
     LogoLogo: typeof import('./src/components/Logo/Logo.vue')['default']
     NamespaceRenderNamespaceRender: typeof import('./src/components/NamespaceRender/NamespaceRender.vue')['default']
     NamespaceTabsNamespaceTabs: typeof import('./src/components/NamespaceTabs/NamespaceTabs.vue')['default']
@@ -125,6 +129,7 @@ declare module 'vue' {
     SyncNodesPreviewSyncNodesPreview: typeof import('./src/components/SyncNodesPreview/SyncNodesPreview.vue')['default']
     SystemRestoreSystemRestoreContent: typeof import('./src/components/SystemRestore/SystemRestoreContent.vue')['default']
     TabFilterTabFilter: typeof import('./src/components/TabFilter/TabFilter.vue')['default']
+    TerminalTerminalStatusBar: typeof import('./src/components/Terminal/TerminalStatusBar.vue')['default']
     TwoFAAuthorization: typeof import('./src/components/TwoFA/Authorization.vue')['default']
     UpstreamCardsUpstreamCards: typeof import('./src/components/UpstreamCards/UpstreamCards.vue')['default']
     UpstreamDetailModalUpstreamDetailModal: typeof import('./src/components/UpstreamDetailModal/UpstreamDetailModal.vue')['default']

+ 36 - 0
app/src/api/llm.ts

@@ -23,6 +23,17 @@ export interface CodeCompletionResponse {
   code: string // Completed code
 }
 
+export interface LLMSessionResponse {
+  session_id: string
+  title: string
+  path: string
+  messages: ChatComplicationMessage[]
+  message_count: number
+  is_active: boolean
+  created_at: string
+  updated_at: string
+}
+
 const llm = {
   get_messages(path: string) {
     return http.get(`/llm_messages`, { params: { path } })
@@ -36,6 +47,31 @@ const llm = {
   get_code_completion_enabled_status() {
     return http.get<{ enabled: boolean }>('/code_completion/enabled')
   },
+
+  // Session APIs
+  get_sessions(path?: string) {
+    return http.get<LLMSessionResponse[]>('/llm_sessions', {
+      params: path ? { path } : undefined,
+    })
+  },
+  get_session(sessionId: string) {
+    return http.get<LLMSessionResponse>(`/llm_sessions/${sessionId}`)
+  },
+  create_session(data: { title: string, path?: string }) {
+    return http.post<LLMSessionResponse>('/llm_sessions', data)
+  },
+  update_session(sessionId: string, data: { title?: string, messages?: ChatComplicationMessage[] }) {
+    return http.put<LLMSessionResponse>(`/llm_sessions/${sessionId}`, data)
+  },
+  delete_session(sessionId: string) {
+    return http.delete(`/llm_sessions/${sessionId}`)
+  },
+  duplicate_session(sessionId: string) {
+    return http.post<LLMSessionResponse>(`/llm_sessions/${sessionId}/duplicate`)
+  },
+  generate_session_title(sessionId: string) {
+    return http.post<{ title: string, message: string }>(`/llm_sessions/${sessionId}/generate_title`)
+  },
 }
 
 export default llm

+ 65 - 36
app/src/components/LLM/ChatMessage.vue

@@ -1,5 +1,6 @@
 <script setup lang="ts">
 import type { ChatComplicationMessage } from '@/api/llm'
+import { useAnimationCoordinator } from './animationCoordinator'
 import { useLLMStore } from './llm'
 import { marked } from './markdown'
 import { transformText } from './utils'
@@ -23,6 +24,7 @@ defineEmits<{
 
 const llmStore = useLLMStore()
 const { streamingMessageIndex } = storeToRefs(llmStore)
+const { coordinator } = useAnimationCoordinator()
 
 function updateEditValue(value: string) {
   llmStore.editValue = value
@@ -51,20 +53,26 @@ function getTransformedContent(content: string): string {
 const shouldUseTypewriter = computed(() => {
   return props.message.role === 'assistant'
     && !props.isEditing
-    && streamingMessageIndex.value === props.index
+    && (streamingMessageIndex.value === props.index || isTyping.value)
 })
 
 // High-performance typewriter animation using RAF
 function startTypewriterAnimation(targetContent: string) {
-  if (animationFrame.value) {
-    cancelAnimationFrame(animationFrame.value)
-  }
-
   const transformedContent = getTransformedContent(targetContent)
 
   // Skip if content hasn't changed
   if (displayText.value === transformedContent) {
-    isTyping.value = false
+    if (isTyping.value) {
+      isTyping.value = false
+      coordinator.setMessageTyping(false)
+    }
+    return
+  }
+
+  // For streaming content, just update the target without restarting animation
+  if (isTyping.value && animationFrame.value) {
+    // Animation is already running, just update the target content
+    // The animation will automatically pick up the new content
     return
   }
 
@@ -75,44 +83,58 @@ function startTypewriterAnimation(targetContent: string) {
   // If content is shorter (like editing), immediately set to target
   if (targetLength < startLength) {
     displayText.value = transformedContent
-    isTyping.value = false
+    if (isTyping.value) {
+      isTyping.value = false
+      coordinator.setMessageTyping(false)
+    }
     return
   }
 
-  isTyping.value = true
-  let currentIndex = startLength
-  let lastTime = performance.now()
+  // Only start new animation if not already typing
+  if (!isTyping.value) {
+    isTyping.value = true
+    coordinator.setMessageTyping(true)
 
-  // Characters per second (adjustable for speed)
-  const charactersPerSecond = 120 // Similar to VScode speed
-  const msPerCharacter = 1000 / charactersPerSecond
+    let currentIndex = startLength
+    let lastTime = performance.now()
 
-  function animate(currentTime: number) {
-    const deltaTime = currentTime - lastTime
+    // Characters per second (adjustable for speed)
+    const charactersPerSecond = 120 // Similar to VScode speed
+    const msPerCharacter = 1000 / charactersPerSecond
 
-    // Check if enough time has passed to show next character(s)
-    if (deltaTime >= msPerCharacter) {
-      // Calculate how many characters to show based on elapsed time
-      const charactersToAdd = Math.floor(deltaTime / msPerCharacter)
-      currentIndex = Math.min(currentIndex + charactersToAdd, targetLength)
+    function animate(currentTime: number) {
+      // Get the latest transformed content (in case it changed during animation)
+      const latestContent = getTransformedContent(props.message.content)
+      const latestLength = latestContent.length
 
-      displayText.value = transformedContent.substring(0, currentIndex)
-      lastTime = currentTime
+      const deltaTime = currentTime - lastTime
 
-      // Check if we've reached the end
-      if (currentIndex >= targetLength) {
-        isTyping.value = false
-        animationFrame.value = null
-        return
+      // Check if enough time has passed to show next character(s)
+      if (deltaTime >= msPerCharacter) {
+        // Calculate how many characters to show based on elapsed time
+        const charactersToAdd = Math.floor(deltaTime / msPerCharacter)
+        currentIndex = Math.min(currentIndex + charactersToAdd, latestLength)
+
+        displayText.value = latestContent.substring(0, currentIndex)
+        lastTime = currentTime
+
+        // Check if we've reached the end
+        if (currentIndex >= latestLength) {
+          isTyping.value = false
+          coordinator.setMessageTyping(false)
+          coordinator.setMessageStreaming(false) // End streaming when typing completes
+          animationFrame.value = null
+          return
+        }
       }
+
+      // Continue animation
+      animationFrame.value = requestAnimationFrame(animate)
     }
 
-    // Continue animation
+    // Start the animation
     animationFrame.value = requestAnimationFrame(animate)
   }
-
-  // Start the animation
-  animationFrame.value = requestAnimationFrame(animate)
 }
 
 // Stop animation when component unmounts
@@ -133,7 +155,10 @@ watch(
     else {
       // For user messages, non-streaming messages, or when editing, show immediately
       displayText.value = getTransformedContent(newContent)
-      isTyping.value = false
+      if (isTyping.value) {
+        isTyping.value = false
+        coordinator.setMessageTyping(false)
+      }
     }
   },
   { immediate: true },
@@ -142,11 +167,15 @@ watch(
 // Watch for streaming state changes
 watch(
   shouldUseTypewriter,
-  newValue => {
-    if (!newValue) {
-      // If no longer streaming, immediately show full content
+  (newValue, oldValue) => {
+    if (!newValue && oldValue) {
+      // Don't interrupt if typewriter is still animating
+      if (isTyping.value) {
+        return
+      }
+
+      // If no longer streaming and not typing, immediately show full content
       displayText.value = getTransformedContent(props.message.content)
-      isTyping.value = false
       if (animationFrame.value) {
         cancelAnimationFrame(animationFrame.value)
         animationFrame.value = null

+ 6 - 5
app/src/components/LLM/ChatMessageInput.vue

@@ -1,5 +1,5 @@
 <script setup lang="ts">
-import { SendOutlined } from '@ant-design/icons-vue'
+import { LoadingOutlined, SendOutlined } from '@ant-design/icons-vue'
 import { storeToRefs } from 'pinia'
 import { useLLMStore } from './llm'
 
@@ -40,10 +40,11 @@ const messagesLength = computed(() => messages.value?.length ?? 0)
       <AButton
         size="small"
         type="text"
-        :loading="loading"
+        :disabled="loading"
         @click="llmStore.send(askBuffer)"
       >
-        <SendOutlined />
+        <LoadingOutlined v-if="loading" spin />
+        <SendOutlined v-else />
       </AButton>
     </div>
   </div>
@@ -53,13 +54,13 @@ const messagesLength = computed(() => messages.value?.length ?? 0)
 .input-msg {
   position: sticky;
   bottom: 0;
-  left: 0;
-  right: 0;
   background: rgba(255, 255, 255, 0.8);
   backdrop-filter: blur(10px);
   -webkit-backdrop-filter: blur(10px);
   padding: 16px;
   border-radius: 0 0 8px 8px;
+  width: 100%;
+  box-sizing: border-box;
 
   .control-btn {
     display: flex;

+ 1 - 2
app/src/components/LLM/ChatMessageList.vue

@@ -51,8 +51,7 @@ async function handleRegenerate(index: number) {
 
 <style lang="less" scoped>
 .message-list-container {
-  overflow-y: auto;
-  height: 100%;
+  width: 100%;
 
   .llm-log {
     :deep(.ant-list-item) {

+ 125 - 39
app/src/components/LLM/LLM.vue

@@ -6,6 +6,8 @@ import ChatMessageInput from './ChatMessageInput.vue'
 import ChatMessageList from './ChatMessageList.vue'
 import { buildLLMContext } from './contextBuilder'
 import { useLLMStore } from './llm'
+import LLMSessionTabs from './LLMSessionTabs.vue'
+import { useLLMSessionStore } from './sessionStore'
 
 const props = defineProps<{
   content: string
@@ -14,48 +16,46 @@ const props = defineProps<{
 
 const { language: current } = storeToRefs(useSettingsStore())
 
-// Use LLM store
+// Use LLM store and session store
 const llmStore = useLLMStore()
+const sessionStore = useLLMSessionStore()
 const { messageContainerRef } = storeToRefs(llmStore)
+const { activeSessionId, sortedSessions } = storeToRefs(sessionStore)
 
-// Initialize messages when path changes
+// Initialize sessions and handle path changes
 watch(() => props.path, async () => {
-  await llmStore.initMessages(props.path)
-  await nextTick()
+  // Load sessions for current path
+  await sessionStore.loadSessions(props.path)
 
-  // Auto-send first message if no messages exist
-  if (llmStore.messages.length === 0) {
-    await sendFirstMessage()
+  // Check if we have sessions available
+  if (sortedSessions.value.length > 0 && !activeSessionId.value) {
+    // Use the most recent session
+    const latestSession = sortedSessions.value[0]
+    await llmStore.switchSession(latestSession.session_id)
+    sessionStore.setActiveSession(latestSession.session_id)
   }
-  else {
-    // Check if we need to enhance the first message with include context
-    checkAndEnhanceFirstMessage()
-  }
-}, { immediate: true })
+  else if (sortedSessions.value.length === 0) {
+    // No sessions exist for this path, create a new one automatically
+    const title = props.path ? `Chat for ${props.path.split('/').pop()}` : 'New Chat'
+    try {
+      const session = await sessionStore.createSession(title, props.path)
+      await llmStore.switchSession(session.session_id)
 
-// Check if first message needs context enhancement
-async function checkAndEnhanceFirstMessage() {
-  if (llmStore.messages.length > 0 && props.path) {
-    const firstMessage = llmStore.messages[0]
-    // Check if the first message already contains included files info
-    if (firstMessage.role === 'user' && !firstMessage.content.includes('--- INCLUDED FILES ---')) {
-      try {
-        // Build complete context including included files
-        const context = await buildLLMContext(props.path, props.content)
-
-        if (context.includedFiles.length > 0) {
-          // Update the first message with enhanced context
-          const enhancedContent = `${context.contextText}\n\nCurrent Language Code: ${current.value}`
-          llmStore.messages[0].content = enhancedContent
-          await llmStore.storeRecord()
-        }
-      }
-      catch (error) {
-        console.error('Failed to enhance first message:', error)
+      // Initialize with first message
+      await nextTick()
+      await sendFirstMessage()
+    }
+    catch (error) {
+      console.error('Failed to create initial session:', error)
+      // Fallback to legacy mode
+      await llmStore.initMessages(props.path)
+      await nextTick()
+      if (llmStore.messages.length === 0) {
+        await sendFirstMessage()
       }
     }
   }
-}
+}, { immediate: true })
 
 // Build context and send first message
 async function sendFirstMessage() {
@@ -79,6 +79,23 @@ async function sendFirstMessage() {
   }
 }
 
+// Handle new session creation
+async function handleNewSessionCreated() {
+  // Reload sessions to update the list
+  await sessionStore.loadSessions(props.path)
+  await nextTick()
+
+  // Auto-send first message if no messages exist
+  if (llmStore.messages.length === 0) {
+    await sendFirstMessage()
+  }
+}
+
+// Handle when all sessions are cleared
+function handleSessionCleared() {
+  // Reset to initial state - could create a welcome message or just stay empty
+}
+
 const isVisible = useElementVisibility(messageContainerRef)
 
 watch(isVisible, visible => {
@@ -89,21 +106,90 @@ watch(isVisible, visible => {
 </script>
 
 <template>
-  <div
-    ref="messageContainerRef"
-    class="message-container"
-  >
-    <ChatMessageList />
+  <div class="llm-container">
+    <div class="session-header">
+      <LLMSessionTabs
+        :content="props.content"
+        :path="props.path"
+        @new-session-created="handleNewSessionCreated"
+        @session-cleared="handleSessionCleared"
+      />
+    </div>
 
-    <ChatMessageInput />
+    <div
+      ref="messageContainerRef"
+      class="message-container"
+    >
+      <ChatMessageList />
+      <ChatMessageInput />
+    </div>
   </div>
 </template>
 
 <style lang="less" scoped>
+.llm-container {
+  display: flex;
+  flex-direction: column;
+  height: 100%;
+  width: 100%;
+  position: relative;
+
+  // 为 backdrop-filter 提供背景内容
+  &::before {
+    content: '';
+    position: absolute;
+    top: 0;
+    left: 0;
+    right: 0;
+    bottom: 0;
+    background: linear-gradient(135deg,
+      rgba(0, 0, 0, 0.02) 0%,
+      rgba(255, 255, 255, 0.01) 50%,
+      rgba(0, 0, 0, 0.02) 100%);
+    pointer-events: none;
+    z-index: 0;
+  }
+}
+
+.session-header {
+  flex-shrink: 0;
+  position: relative;
+  z-index: 1;
+}
+
 .message-container {
+  flex: 1;
   margin: 0 auto;
+  width: 100%;
   max-width: 800px;
-  max-height: calc(100vh - 260px);
+  max-height: calc(100vh - 332px);
   overflow-y: auto;
+  overflow-x: hidden;
+  position: relative;
+  z-index: 1;
+  background: linear-gradient(to bottom,
+    rgba(0, 0, 0, 0.01) 0%,
+    rgba(255, 255, 255, 0.005) 30%,
+    rgba(0, 0, 0, 0.01) 60%,
+    rgba(255, 255, 255, 0.01) 100%);
+}
+
+.dark {
+  .llm-container {
+    &::before {
+      background: linear-gradient(135deg,
+        rgba(255, 255, 255, 0.02) 0%,
+        rgba(0, 0, 0, 0.01) 50%,
+        rgba(255, 255, 255, 0.02) 100%);
+    }
+  }
+
+  .message-container {
+    background: linear-gradient(to bottom,
+      rgba(255, 255, 255, 0.01) 0%,
+      rgba(0, 0, 0, 0.005) 30%,
+      rgba(255, 255, 255, 0.01) 60%,
+      rgba(0, 0, 0, 0.01) 100%);
+  }
 }
 </style>

+ 804 - 0
app/src/components/LLM/LLMSessionTabs.vue

@@ -0,0 +1,804 @@
+<script setup lang="ts">
+import {
+  ClockCircleOutlined,
+  CloseOutlined,
+  CopyOutlined,
+  DeleteOutlined,
+  EditOutlined,
+  MoreOutlined,
+  PlusOutlined,
+} from '@ant-design/icons-vue'
+import { storeToRefs } from 'pinia'
+import { useLLMStore } from './llm'
+import { useLLMSessionStore } from './sessionStore'
+
+const props = defineProps<{
+  content?: string
+  path?: string
+}>()
+
+const emit = defineEmits<{
+  newSessionCreated: []
+  sessionCleared: []
+}>()
+
+const sessionStore = useLLMSessionStore()
+const llmStore = useLLMStore()
+const { sortedSessions, activeSessionId } = storeToRefs(sessionStore)
+const { loading: llmLoading } = storeToRefs(llmStore)
+
+const editingSessionId = ref<string | null>(null)
+const editingTitle = ref('')
+const historyDrawerVisible = ref(false)
+
+// Only show first 3 sessions in tabs, rest in history
+const visibleSessions = computed(() => sortedSessions.value.slice(0, 3))
+const historySessions = computed(() => sortedSessions.value.slice(3))
+
+async function createNewSession() {
+  if (llmLoading.value) {
+    return // Don't create new session while LLM is generating output
+  }
+
+  const title = `New Chat`
+  try {
+    const session = await sessionStore.createSession(title, props.path)
+    await llmStore.switchSession(session.session_id)
+    emit('newSessionCreated')
+  }
+  catch (error) {
+    console.error('Failed to create session:', error)
+  }
+}
+
+async function selectSession(sessionId: string) {
+  if (sessionId === activeSessionId.value)
+    return
+
+  if (llmLoading.value) {
+    return // Don't switch sessions while LLM is generating output
+  }
+
+  await llmStore.switchSession(sessionId)
+  sessionStore.setActiveSession(sessionId)
+  historyDrawerVisible.value = false
+}
+
+async function closeSession(sessionId: string, event: Event) {
+  event.stopPropagation()
+
+  if (llmLoading.value) {
+    return // Don't delete sessions while LLM is generating output
+  }
+
+  const sessionIndex = sortedSessions.value.findIndex(s => s.session_id === sessionId)
+
+  try {
+    await sessionStore.deleteSession(sessionId)
+
+    // If deleted the active session, switch to another
+    if (sessionId === activeSessionId.value && sortedSessions.value.length > 0) {
+      // Try to select the next tab, or the previous one if it was the last
+      const newIndex = Math.min(sessionIndex, sortedSessions.value.length - 1)
+      if (newIndex >= 0) {
+        await selectSession(sortedSessions.value[newIndex].session_id)
+      }
+      else {
+        sessionStore.setActiveSession(null)
+        llmStore.clearMessages()
+        if (llmStore.currentSessionId) {
+          llmStore.currentSessionId = null
+        }
+        emit('sessionCleared')
+      }
+    }
+  }
+  catch (error) {
+    console.error('Failed to delete session:', error)
+  }
+}
+
+async function duplicateSession(sessionId: string, event: Event) {
+  event.stopPropagation()
+
+  if (llmLoading.value) {
+    return // Don't duplicate sessions while LLM is generating output
+  }
+
+  try {
+    const newSession = await sessionStore.duplicateSession(sessionId)
+    await selectSession(newSession.session_id)
+  }
+  catch (error) {
+    console.error('Failed to duplicate session:', error)
+  }
+}
+
+function startEditingTitle(sessionId: string, currentTitle: string, event: Event) {
+  event.stopPropagation()
+  editingSessionId.value = sessionId
+  editingTitle.value = currentTitle
+
+  nextTick(() => {
+    const input = document.querySelector('.tab-title-input input') as HTMLInputElement
+    if (input) {
+      input.focus()
+      input.select()
+    }
+  })
+}
+
+async function saveTitle() {
+  if (!editingSessionId.value || !editingTitle.value.trim())
+    return
+
+  try {
+    await sessionStore.updateSession(editingSessionId.value, {
+      title: editingTitle.value.trim(),
+    })
+    editingSessionId.value = null
+    editingTitle.value = ''
+  }
+  catch (error) {
+    console.error('Failed to update session title:', error)
+  }
+}
+
+function cancelEditing(event?: Event) {
+  if (event) {
+    event.stopPropagation()
+  }
+  editingSessionId.value = null
+  editingTitle.value = ''
+}
+
+function handleKeyDown(event: KeyboardEvent) {
+  if (event.key === 'Escape') {
+    cancelEditing()
+  }
+}
+
+function formatDate(dateStr: string) {
+  const date = new Date(dateStr)
+  const now = new Date()
+  const diffMs = now.getTime() - date.getTime()
+  const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24))
+
+  if (diffDays === 0) {
+    return date.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' })
+  }
+  else if (diffDays === 1) {
+    return 'Yesterday'
+  }
+  else if (diffDays < 7) {
+    return `${diffDays} days ago`
+  }
+  else {
+    return date.toLocaleDateString()
+  }
+}
+
+function showHistoryDrawer() {
+  historyDrawerVisible.value = true
+}
+</script>
+
+<template>
+  <div class="llm-session-tabs">
+    <div class="tabs-container">
+      <div class="tabs-scroll">
+        <!-- Visible session tabs -->
+        <div
+          v-for="session in visibleSessions"
+          :key="session.session_id"
+          class="tab-item" :class="[
+            {
+              active: session.session_id === activeSessionId,
+              disabled: llmLoading,
+            },
+          ]"
+          @click="selectSession(session.session_id)"
+        >
+          <div class="tab-content">
+            <div v-if="editingSessionId === session.session_id" class="tab-title-input">
+              <AInput
+                v-model:value="editingTitle"
+                size="small"
+                :bordered="false"
+                @press-enter="saveTitle"
+                @blur="saveTitle"
+                @keydown="handleKeyDown"
+                @click.stop
+              />
+            </div>
+            <span v-else class="tab-title" @dblclick="startEditingTitle(session.session_id, session.title, $event)">
+              {{ session.title }}
+            </span>
+
+            <div class="tab-actions">
+              <ADropdown :trigger="['click']" placement="bottomRight">
+                <AButton
+                  type="text"
+                  size="small"
+                  class="tab-action-btn"
+                  @click.stop
+                >
+                  <template #icon>
+                    <MoreOutlined />
+                  </template>
+                </AButton>
+                <template #overlay>
+                  <AMenu>
+                    <AMenuItem @click="startEditingTitle(session.session_id, session.title, $event)">
+                      <EditOutlined />
+                      {{ $gettext('Rename') }}
+                    </AMenuItem>
+                    <AMenuItem @click="duplicateSession(session.session_id, $event)">
+                      <CopyOutlined />
+                      {{ $gettext('Duplicate') }}
+                    </AMenuItem>
+                    <AMenuDivider />
+                    <AMenuItem danger @click="closeSession(session.session_id, $event)">
+                      <DeleteOutlined />
+                      {{ $gettext('Delete') }}
+                    </AMenuItem>
+                  </AMenu>
+                </template>
+              </ADropdown>
+
+              <AButton
+                type="text"
+                size="small"
+                class="tab-close-btn"
+                @click="closeSession(session.session_id, $event)"
+              >
+                <CloseOutlined />
+              </AButton>
+            </div>
+          </div>
+        </div>
+      </div>
+
+      <!-- Actions -->
+      <div class="tab-actions-group">
+        <!-- History button (only show if there are sessions beyond the visible ones) -->
+        <AButton
+          v-if="historySessions.length > 0"
+          type="text"
+          size="small"
+          class="history-btn"
+          @click="showHistoryDrawer"
+        >
+          <ClockCircleOutlined />
+        </AButton>
+
+        <!-- Add new session button -->
+        <AButton
+          type="text"
+          size="small"
+          class="add-btn"
+          :disabled="llmLoading"
+          @click="createNewSession"
+        >
+          <PlusOutlined />
+        </AButton>
+      </div>
+    </div>
+
+    <!-- History Drawer -->
+    <ADrawer
+      v-model:open="historyDrawerVisible"
+      title="Chat History"
+      placement="right"
+      :width="320"
+    >
+      <div class="history-list">
+        <div
+          v-for="session in historySessions"
+          :key="session.session_id"
+          class="history-item" :class="[
+            {
+              active: session.session_id === activeSessionId,
+              disabled: llmLoading,
+            },
+          ]"
+          @click="selectSession(session.session_id)"
+        >
+          <div class="history-content">
+            <div class="history-main">
+              <div class="history-title">
+                {{ session.title }}
+              </div>
+              <div class="history-meta">
+                <span class="history-date">{{ formatDate(session.updated_at) }}</span>
+                <span v-if="session.message_count > 0" class="history-count">
+                  {{ session.message_count }} messages
+                </span>
+              </div>
+            </div>
+
+            <div class="history-actions">
+              <ADropdown :trigger="['click']" placement="bottomLeft">
+                <AButton
+                  type="text"
+                  size="small"
+                  @click.stop
+                >
+                  <MoreOutlined />
+                </AButton>
+                <template #overlay>
+                  <AMenu>
+                    <AMenuItem @click="startEditingTitle(session.session_id, session.title, $event)">
+                      <EditOutlined />
+                      {{ $gettext('Rename') }}
+                    </AMenuItem>
+                    <AMenuItem @click="duplicateSession(session.session_id, $event)">
+                      <CopyOutlined />
+                      {{ $gettext('Duplicate') }}
+                    </AMenuItem>
+                    <AMenuDivider />
+                    <AMenuItem danger @click="closeSession(session.session_id, $event)">
+                      <DeleteOutlined />
+                      {{ $gettext('Delete') }}
+                    </AMenuItem>
+                  </AMenu>
+                </template>
+              </ADropdown>
+            </div>
+          </div>
+        </div>
+
+        <AEmpty
+          v-if="historySessions.length === 0"
+          :description="$gettext('No more sessions')"
+          size="small"
+        />
+      </div>
+    </ADrawer>
+  </div>
+</template>
+
+<style lang="less" scoped>
+.llm-session-tabs {
+  background: rgba(255, 255, 255, 0.3);
+  backdrop-filter: blur(10px);
+  -webkit-backdrop-filter: blur(10px);
+  width: 100%;
+  position: sticky;
+  top: 0;
+  z-index: 10;
+
+  .tabs-container {
+    display: flex;
+    align-items: flex-end;
+    height: 48px;
+    padding: 6px 12px 0;
+    width: 100%;
+    box-sizing: border-box;
+  }
+
+  .tabs-scroll {
+    flex: 1;
+    display: flex;
+    overflow-x: auto;
+    overflow-y: hidden;
+    gap: 0;
+    min-width: 0;
+    border: 1px solid var(--color-border);
+    border-bottom: none;
+    border-radius: 8px 8px 0 0;
+    background: transparent;
+    position: relative;
+
+    &::-webkit-scrollbar {
+      height: 0;
+    }
+  }
+
+  .tab-item {
+    flex-shrink: 0;
+    display: flex;
+    align-items: center;
+    padding: 8px 12px;
+    cursor: pointer;
+    transition: all 0.15s ease;
+    background: transparent;
+    border-right: 1px solid var(--color-border);
+    max-width: 240px;
+    min-width: 140px;
+    position: relative;
+    height: 34px;
+    box-sizing: border-box;
+
+    &:first-child {
+      border-top-left-radius: 7px;
+    }
+
+    &:last-child {
+      border-right: none;
+      border-top-right-radius: 7px;
+    }
+
+    &:hover:not(.disabled):not(.active) {
+      background: var(--color-fill-2);
+
+      .tab-title {
+        color: var(--color-text-1);
+      }
+
+      .tab-actions {
+        opacity: 1;
+      }
+    }
+
+    &.active {
+      background: var(--color-bg-container);
+      color: var(--color-text-1);
+      margin-bottom: -1px;
+      z-index: 2;
+      position: relative;
+
+      .tab-title {
+        font-weight: 500;
+        color: var(--color-text-1);
+      }
+
+      .tab-actions {
+        opacity: 1;
+      }
+    }
+
+    &.disabled {
+      opacity: 0.5;
+      cursor: not-allowed;
+    }
+  }
+
+  .tab-content {
+    display: flex;
+    align-items: center;
+    gap: 8px;
+    width: 100%;
+  }
+
+  .tab-title {
+    flex: 1;
+    overflow: hidden;
+    text-overflow: ellipsis;
+    white-space: nowrap;
+    font-size: 14px;
+    color: var(--color-text-2);
+    transition: color 0.15s ease;
+  }
+
+  .tab-title-input {
+    flex: 1;
+
+    :deep(.ant-input) {
+      padding: 4px 0;
+      font-size: 14px;
+      background: transparent;
+      border: none;
+      color: var(--color-text-1);
+
+      &:focus {
+        box-shadow: none;
+        border: none;
+        background: var(--color-fill-1);
+        border-radius: 4px;
+        padding: 4px 8px;
+      }
+    }
+  }
+
+  .tab-actions {
+    display: flex;
+    align-items: center;
+    gap: 4px;
+    opacity: 0;
+    transition: opacity 0.15s ease;
+  }
+
+  .tab-action-btn,
+  .tab-close-btn {
+    width: 22px;
+    height: 22px;
+    padding: 0;
+    display: flex;
+    align-items: center;
+    justify-content: center;
+    border-radius: 4px;
+    border: none;
+    background: transparent;
+    color: var(--color-text-3);
+    transition: all 0.15s ease;
+
+    &:hover {
+      background: var(--color-fill-3);
+      color: var(--color-text-1);
+    }
+
+    :deep(.anticon) {
+      font-size: 12px;
+    }
+  }
+
+  .tab-close-btn:hover {
+    background: var(--color-danger-light-1);
+    color: var(--color-danger);
+  }
+
+  .tab-actions-group {
+    flex-shrink: 0;
+    display: flex;
+    align-items: center;
+    height: 34px;
+    padding: 0 8px;
+    background: transparent;
+    border: 1px solid var(--color-border);
+    border-left: none;
+    border-bottom: none;
+    border-radius: 0 8px 0 0;
+    margin-left: 8px;
+    position: relative;
+
+    .history-btn,
+    .add-btn {
+      width: 24px;
+      height: 24px;
+      padding: 0;
+      display: flex;
+      align-items: center;
+      justify-content: center;
+      border-radius: 4px;
+      border: none;
+      background: transparent;
+      color: var(--color-text-3);
+      transition: all 0.15s ease;
+      margin: 0 2px;
+
+      &:hover:not(:disabled) {
+        background: var(--color-fill-2);
+        color: var(--color-text-1);
+      }
+
+      &:disabled {
+        opacity: 0.5;
+        cursor: not-allowed;
+      }
+
+      :deep(.anticon) {
+        font-size: 12px;
+      }
+    }
+  }
+}
+
+.history-list {
+  padding: 8px;
+
+  .history-item {
+    padding: 14px 16px;
+    cursor: pointer;
+    border-radius: 8px;
+    margin-bottom: 6px;
+    transition: all 0.15s ease;
+    border: 1px solid transparent;
+    background: var(--color-bg-container);
+
+    &:hover:not(.disabled) {
+      background: var(--color-fill-1);
+      border-color: var(--color-border-2);
+      transform: translateY(-1px);
+      box-shadow: 0 2px 8px rgba(0, 0, 0, 0.06);
+    }
+
+    &.active {
+      background: var(--color-primary-light-1);
+      border-color: var(--color-primary-border);
+
+      .history-title {
+        color: var(--color-primary);
+        font-weight: 500;
+      }
+    }
+
+    &.disabled {
+      opacity: 0.5;
+      cursor: not-allowed;
+
+      &:hover {
+        transform: none;
+        box-shadow: none;
+      }
+    }
+
+    &:last-child {
+      margin-bottom: 0;
+    }
+  }
+
+  .history-content {
+    display: flex;
+    align-items: flex-start;
+    justify-content: space-between;
+    gap: 12px;
+  }
+
+  .history-main {
+    flex: 1;
+    min-width: 0;
+  }
+
+  .history-title {
+    font-size: 14px;
+    font-weight: 450;
+    margin-bottom: 6px;
+    overflow: hidden;
+    text-overflow: ellipsis;
+    white-space: nowrap;
+    color: var(--color-text-1);
+    transition: color 0.15s ease;
+  }
+
+  .history-meta {
+    display: flex;
+    align-items: center;
+    gap: 8px;
+    font-size: 12px;
+    color: var(--color-text-3);
+
+    .history-date {
+      font-weight: 400;
+    }
+
+    .history-count {
+      padding: 2px 6px;
+      background: var(--color-fill-2);
+      border-radius: 10px;
+      font-size: 11px;
+      color: var(--color-text-2);
+    }
+  }
+
+  .history-actions {
+    flex-shrink: 0;
+    opacity: 0;
+    transition: opacity 0.15s ease;
+
+    .history-item:hover & {
+      opacity: 1;
+    }
+
+    .ant-btn {
+      border: none;
+      background: transparent;
+      color: var(--color-text-3);
+      border-radius: 4px;
+
+      &:hover {
+        background: var(--color-fill-2);
+        color: var(--color-text-1);
+      }
+    }
+  }
+}
+
+.dark {
+  .llm-session-tabs {
+    background: rgba(30, 30, 30, 0.8);
+
+    .tabs-scroll {
+      background: transparent;
+      border-color: rgba(255, 255, 255, 0.1);
+    }
+
+    .tab-item {
+      border-right-color: rgba(255, 255, 255, 0.08);
+
+      &:hover:not(.disabled):not(.active) {
+        background: rgba(255, 255, 255, 0.08);
+      }
+
+      &.active {
+        background: rgba(30, 30, 30, 0.8);
+        color: #ffffff;
+      }
+    }
+
+    .tab-title {
+      color: rgba(255, 255, 255, 0.7);
+    }
+
+    .tab-item:hover:not(.disabled):not(.active) .tab-title {
+      color: rgba(255, 255, 255, 0.9);
+    }
+
+    .tab-item.active .tab-title {
+      color: #ffffff;
+      font-weight: 500;
+    }
+
+    .tab-title-input :deep(.ant-input) {
+      color: #ffffff;
+
+      &:focus {
+        background: rgba(255, 255, 255, 0.1);
+      }
+    }
+
+    .tab-action-btn,
+    .tab-close-btn {
+      color: rgba(255, 255, 255, 0.6);
+
+      &:hover {
+        background: rgba(255, 255, 255, 0.1);
+        color: rgba(255, 255, 255, 0.9);
+      }
+    }
+
+    .tab-close-btn:hover {
+      background: rgba(239, 68, 68, 0.2);
+      color: #ef4444;
+    }
+
+    .tab-actions-group {
+      background: transparent;
+      border-color: rgba(255, 255, 255, 0.1);
+
+      .history-btn,
+      .add-btn {
+        color: rgba(255, 255, 255, 0.6);
+
+        &:hover:not(:disabled) {
+          background: rgba(255, 255, 255, 0.1);
+          color: rgba(255, 255, 255, 0.9);
+        }
+      }
+    }
+  }
+
+  .history-list {
+    .history-item {
+      background: #1a1a1a;
+
+      &:hover:not(.disabled) {
+        background: #2a2a2a;
+        border-color: #404040;
+        box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3);
+      }
+
+      &.active {
+        background: rgba(var(--primary-6), 0.15);
+        border-color: var(--color-primary);
+
+        .history-title {
+          color: var(--color-primary);
+        }
+      }
+    }
+
+    .history-title {
+      color: #e8e8e8;
+    }
+
+    .history-meta {
+      color: #888888;
+
+      .history-count {
+        background: #333333;
+        color: #aaaaaa;
+      }
+    }
+
+    .history-actions .ant-btn {
+      color: #888888;
+
+      &:hover {
+        background: #404040;
+        color: #e8e8e8;
+      }
+    }
+  }
+}
+</style>

+ 189 - 0
app/src/components/LLM/animationCoordinator.ts

@@ -0,0 +1,189 @@
+// Animation Coordinator - Centralized state management for all animations and scrolling
+import { readonly, ref, watch } from 'vue'
+
+export interface AnimationState {
+  messageStreaming: boolean
+  messageTyping: boolean
+  titleAnimating: boolean
+  scrolling: boolean
+}
+
+class AnimationCoordinator {
+  private state = ref<AnimationState>({
+    messageStreaming: false,
+    messageTyping: false,
+    titleAnimating: false,
+    scrolling: false,
+  })
+
+  private callbacks: {
+    onMessageTypingComplete?: () => void
+    onTitleAnimationComplete?: () => void
+    onAllAnimationsComplete?: () => void
+  } = {}
+
+  // Get current state (readonly)
+  getState() {
+    return readonly(this.state)
+  }
+
+  // Check if any animation is in progress
+  isAnyAnimationActive() {
+    const s = this.state.value
+    return s.messageStreaming || s.messageTyping || s.titleAnimating || s.scrolling
+  }
+
+  // Check if message-related animations are complete
+  isMessageAnimationComplete() {
+    const s = this.state.value
+    return !s.messageStreaming && !s.messageTyping
+  }
+
+  // Set message streaming state
+  setMessageStreaming(streaming: boolean) {
+    if (this.state.value.messageStreaming === streaming)
+      return
+
+    this.state.value.messageStreaming = streaming
+
+    if (!streaming) {
+      // When streaming stops, message typing might still be active
+      this.checkTransitions()
+    }
+  }
+
+  // Set message typing state
+  setMessageTyping(typing: boolean) {
+    // Prevent redundant state changes
+    if (this.state.value.messageTyping === typing)
+      return
+
+    this.state.value.messageTyping = typing
+
+    if (!typing) {
+      this.callbacks.onMessageTypingComplete?.()
+      this.checkTransitions()
+    }
+  }
+
+  // Set title animation state
+  setTitleAnimating(animating: boolean) {
+    if (this.state.value.titleAnimating === animating)
+      return
+
+    this.state.value.titleAnimating = animating
+
+    if (!animating) {
+      this.callbacks.onTitleAnimationComplete?.()
+      this.checkTransitions()
+    }
+  }
+
+  // Set scrolling state
+  setScrolling(scrolling: boolean) {
+    this.state.value.scrolling = scrolling
+
+    if (!scrolling) {
+      this.checkTransitions()
+    }
+  }
+
+  // Set callbacks
+  setCallbacks(callbacks: Partial<typeof this.callbacks>) {
+    Object.assign(this.callbacks, callbacks)
+  }
+
+  private titleAnimationTriggered = false
+
+  // Check for state transitions and trigger appropriate actions
+  private checkTransitions() {
+    const s = this.state.value
+
+    // If message animation is complete and title is not animating, we can start title animation
+    if (this.isMessageAnimationComplete() && !s.titleAnimating && !this.titleAnimationTriggered) {
+      this.titleAnimationTriggered = true
+
+      // Small delay before starting title animation
+      setTimeout(() => {
+        if (this.isMessageAnimationComplete() && !this.state.value.titleAnimating) {
+          this.triggerTitleAnimation()
+        }
+      }, 200)
+    }
+
+    // If all animations are complete
+    if (!this.isAnyAnimationActive()) {
+      this.callbacks.onAllAnimationsComplete?.()
+    }
+  }
+
+  // Trigger title animation (to be called by external code)
+  private triggerTitleAnimation() {
+    // This will be handled by the LLM store
+    window.dispatchEvent(new CustomEvent('startTitleAnimation'))
+  }
+
+  // Reset all states (useful when starting a new conversation)
+  reset() {
+    this.state.value = {
+      messageStreaming: false,
+      messageTyping: false,
+      titleAnimating: false,
+      scrolling: false,
+    }
+    this.titleAnimationTriggered = false
+  }
+
+  // Wait for message animation to complete
+  async waitForMessageAnimationComplete(): Promise<void> {
+    return new Promise(resolve => {
+      if (this.isMessageAnimationComplete()) {
+        resolve()
+        return
+      }
+
+      const unwatch = watch(
+        () => this.isMessageAnimationComplete(),
+        complete => {
+          if (complete) {
+            unwatch()
+            resolve()
+          }
+        },
+      )
+    })
+  }
+
+  // Wait for all animations to complete
+  async waitForAllAnimationsComplete(): Promise<void> {
+    return new Promise(resolve => {
+      if (!this.isAnyAnimationActive()) {
+        resolve()
+        return
+      }
+
+      const unwatch = watch(
+        () => this.isAnyAnimationActive(),
+        active => {
+          if (!active) {
+            unwatch()
+            resolve()
+          }
+        },
+      )
+    })
+  }
+}
+
+// Global singleton instance
+export const animationCoordinator = new AnimationCoordinator()
+
+// Composable for using in components
+export function useAnimationCoordinator() {
+  return {
+    coordinator: animationCoordinator,
+    state: animationCoordinator.getState(),
+    isAnyAnimationActive: () => animationCoordinator.isAnyAnimationActive(),
+    isMessageAnimationComplete: () => animationCoordinator.isMessageAnimationComplete(),
+  }
+}

+ 267 - 39
app/src/components/LLM/llm.ts

@@ -1,17 +1,22 @@
 import type { ChatComplicationMessage } from '@/api/llm'
 import llm from '@/api/llm'
+import { animationCoordinator } from './animationCoordinator'
 import { ChatService } from './chatService'
+import { useLLMSessionStore } from './sessionStore'
 
 export const useLLMStore = defineStore('llm', () => {
   // State
-  const path = ref<string>('') // Path to the chat record file
+  const path = ref('')
+  const currentSessionId = ref<string | null>(null)
   const messages = ref<ChatComplicationMessage[]>([])
   const messageContainerRef = ref<HTMLDivElement>()
   const loading = ref(false)
   const editingIdx = ref(-1)
   const editValue = ref('')
   const askBuffer = ref('')
-  const streamingMessageIndex = ref(-1) // Track which message is currently streaming
+  const streamingMessageIndex = ref(-1)
+  const userScrolledUp = ref(false)
+  const messageTypingCompleted = ref(false)
 
   // Getters
   const isEditing = computed(() => editingIdx.value !== -1)
@@ -39,6 +44,34 @@ export const useLLMStore = defineStore('llm', () => {
     }
   }
 
+  // Switch to a specific session
+  async function switchSession(sessionId: string) {
+    try {
+      currentSessionId.value = sessionId
+      const session = await llm.get_session(sessionId)
+      messages.value = session.messages || []
+      path.value = session.path || ''
+      cancelEdit()
+    }
+    catch (error) {
+      console.error('Failed to switch session:', error)
+    }
+  }
+
+  // Save current messages to session
+  async function saveSession() {
+    if (!currentSessionId.value)
+      return
+
+    try {
+      const validMessages = messages.value.filter(msg => msg.content.trim() !== '')
+      await llm.update_session(currentSessionId.value, { messages: validMessages })
+    }
+    catch (error) {
+      console.error('Failed to save session:', error)
+    }
+  }
+
   // Start editing a message at the specified index
   function startEdit(index: number) {
     if (index >= 0 && index < messages.value.length) {
@@ -100,19 +133,21 @@ export const useLLMStore = defineStore('llm', () => {
 
   // Store chat record to server
   async function storeRecord() {
-    if (!path.value)
-      return
-
-    try {
-      // Filter out empty messages before storing
-      const validMessages = messages.value.filter(msg => msg.content.trim() !== '')
-      await llm.store_messages({
-        file_name: path.value,
-        messages: validMessages,
-      })
+    // Prefer session storage over legacy file storage
+    if (currentSessionId.value) {
+      await saveSession()
     }
-    catch (error) {
-      console.error('Failed to store chat record:', error)
+    else if (path.value) {
+      try {
+        const validMessages = messages.value.filter(msg => msg.content.trim() !== '')
+        await llm.store_messages({
+          file_name: path.value,
+          messages: validMessages,
+        })
+      }
+      catch (error) {
+        console.error('Failed to store chat record:', error)
+      }
     }
   }
 
@@ -148,15 +183,125 @@ export const useLLMStore = defineStore('llm', () => {
     askBuffer.value = ''
   }
 
-  // scroll to bottom
-  function scrollToBottom() {
-    if (messageContainerRef.value) {
-      // Use setTimeout to ensure DOM is updated
-      setTimeout(() => {
-        if (messageContainerRef.value) {
-          messageContainerRef.value.scrollTop = messageContainerRef.value.scrollHeight
+  // Auto-scroll state management
+  const isAutoScrolling = ref(false)
+  const scrollObserver = ref<ResizeObserver | null>(null)
+  const mutationObserver = ref<MutationObserver | null>(null)
+  const lastScrollHeight = ref(0)
+
+  // Check if container is at bottom
+  function isAtBottom() {
+    if (!messageContainerRef.value)
+      return true
+
+    const container = messageContainerRef.value
+    const threshold = 10 // Very strict threshold for precision
+    const scrollBottom = container.scrollHeight - container.scrollTop - container.clientHeight
+    return scrollBottom <= threshold
+  }
+
+  // Smooth scroll to bottom with high precision
+  function scrollToBottom(force = false) {
+    if (!messageContainerRef.value)
+      return
+
+    const container = messageContainerRef.value
+
+    // Always scroll if forced, or if user hasn't scrolled up
+    if (!force && userScrolledUp.value) {
+      return
+    }
+
+    // Mark as auto-scrolling to prevent user scroll detection
+    isAutoScrolling.value = true
+
+    // Immediate scroll
+    container.scrollTop = container.scrollHeight
+
+    // Reset auto-scroll flag after a short delay
+    setTimeout(() => {
+      isAutoScrolling.value = false
+      userScrolledUp.value = false // Reset user scroll state
+    }, 50)
+  }
+
+  // Enhanced scroll position detection
+  function checkScrollPosition() {
+    if (!messageContainerRef.value || isAutoScrolling.value)
+      return
+
+    const wasAtBottom = isAtBottom()
+    userScrolledUp.value = !wasAtBottom
+  }
+
+  // Start real-time scroll tracking for typewriter animations
+  function startScrollTracking() {
+    if (!messageContainerRef.value)
+      return
+
+    const container = messageContainerRef.value
+
+    // Stop any existing observers
+    stopScrollTracking()
+
+    // Track size changes using ResizeObserver for real-time response
+    scrollObserver.value = new ResizeObserver(entries => {
+      for (const entry of entries) {
+        const newHeight = entry.target.scrollHeight
+        if (newHeight !== lastScrollHeight.value) {
+          lastScrollHeight.value = newHeight
+
+          // Only auto-scroll if user hasn't scrolled up
+          if (!userScrolledUp.value) {
+            scrollToBottom()
+          }
         }
-      }, 10)
+      }
+    })
+
+    // Start observing the container
+    scrollObserver.value.observe(container)
+
+    // Also observe content changes using MutationObserver
+    mutationObserver.value = new MutationObserver(mutations => {
+      let shouldScroll = false
+
+      for (const mutation of mutations) {
+        if (mutation.type === 'childList' || mutation.type === 'characterData') {
+          shouldScroll = true
+          break
+        }
+      }
+
+      if (shouldScroll && !userScrolledUp.value) {
+        // Use RAF for smooth scrolling
+        requestAnimationFrame(() => {
+          scrollToBottom()
+        })
+      }
+    })
+
+    // Observe all content changes
+    mutationObserver.value.observe(container, {
+      childList: true,
+      subtree: true,
+      characterData: true,
+    })
+
+    // Initial scroll to bottom
+    scrollToBottom(true)
+  }
+
+  // Stop scroll tracking
+  function stopScrollTracking() {
+    if (scrollObserver.value) {
+      scrollObserver.value.disconnect()
+      scrollObserver.value = null
+    }
+
+    if (mutationObserver.value) {
+      mutationObserver.value.disconnect()
+      mutationObserver.value = null
     }
   }
 
@@ -173,10 +318,15 @@ export const useLLMStore = defineStore('llm', () => {
   // Request: Send messages to server using chat service
   async function request() {
     setLoading(true)
+    animationCoordinator.reset() // Reset all animation states
+    animationCoordinator.setMessageStreaming(true)
 
     // Set the streaming message index to the last message (assistant message)
     setStreamingMessageIndex(messages.value.length - 1)
 
+    // Start real-time scroll tracking for typewriter animation
+    startScrollTracking()
+
     try {
       const chatService = new ChatService()
       const assistantMessage = await chatService.request(
@@ -190,10 +340,19 @@ export const useLLMStore = defineStore('llm', () => {
 
       // Update the final content
       updateLastAssistantMessage(assistantMessage.content)
+      
+      // If no typing animation starts within a reasonable time, end streaming
+      // This handles cases where content is too short for typewriter effect
+      setTimeout(() => {
+        if (animationCoordinator.getState().value.messageStreaming) {
+          animationCoordinator.setMessageStreaming(false)
+        }
+      }, 200)
 
-      // Auto scroll to bottom after response
+      // Ensure content is rendered before scrolling
       await nextTick()
-      scrollToBottom()
+      await nextTick() // Double nextTick for complex content
+      scrollToBottom(true) // Force scroll when message is complete
     }
     catch (error) {
       console.error('Chat request failed:', error)
@@ -203,16 +362,32 @@ export const useLLMStore = defineStore('llm', () => {
       }
     }
     finally {
-      setLoading(false)
-      clearStreamingMessageIndex() // Clear streaming state
-
-      // Force scroll to bottom one more time after everything is done
+      // Don't clear streaming index immediately - let typewriter animation complete first
+      
+      // Ensure all DOM updates are complete before final scroll
       await nextTick()
-      setTimeout(() => {
-        scrollToBottom()
-      }, 100)
+      await nextTick()
+      scrollToBottom(true) // Force scroll after loading completes
 
       await storeRecord()
+
+      // Title animation will be triggered by coordinator when message animation completes
+
+      // Wait for all animations to complete before ending loading state
+      setTimeout(async () => {
+        await animationCoordinator.waitForAllAnimationsComplete()
+
+        // Now clear streaming state after all animations are done
+        clearStreamingMessageIndex()
+
+        // Stop scroll tracking when all animations are done
+        stopScrollTracking()
+
+        setLoading(false)
+
+        // Final scroll after everything is truly complete
+        scrollToBottom(true)
+      }, 100)
     }
   }
 
@@ -247,22 +422,67 @@ export const useLLMStore = defineStore('llm', () => {
     await request()
   }
 
-  // Watch for streaming messages to auto-scroll during typing
-  watch(streamingMessageIndex, newIndex => {
-    if (newIndex !== -1) {
-      scrollToBottom()
+  // Auto-generate title for sessions with user messages
+  async function tryGenerateSessionTitle() {
+    if (!currentSessionId.value) {
+      return
     }
+
+    // Check if there are user messages in the conversation
+    const hasUserMessages = messages.value.some(msg => msg.role === 'user')
+    if (!hasUserMessages) {
+      return
+    }
+    // Wait for message animation to complete before starting title animation
+    await animationCoordinator.waitForMessageAnimationComplete()
+
+    try {
+      const sessionStore = useLLMSessionStore()
+      await sessionStore.generateSessionTitle(currentSessionId.value)
+    }
+    catch (error) {
+      console.error('Failed to auto-generate session title:', error)
+    }
+  }
+
+  // Listen for title animation trigger from coordinator
+  onMounted(() => {
+    window.addEventListener('startTitleAnimation', () => {
+      tryGenerateSessionTitle()
+    })
+  })
+
+  onUnmounted(() => {
+    window.removeEventListener('startTitleAnimation', () => {
+      tryGenerateSessionTitle()
+    })
+
+    // Clean up observers when component unmounts
+    stopScrollTracking()
   })
 
-  // Auto-scroll when messages are updated during streaming
-  watch(() => messages.value.length > 0 ? messages.value[messages.value.length - 1]?.content : '', newContent => {
-    if (streamingMessageIndex.value !== -1 && newContent) {
-      scrollToBottom()
+  // Set up manual scroll detection when container ref changes
+  watch(messageContainerRef, newContainer => {
+    if (newContainer) {
+      // Simple scroll listener for detecting user manual scrolls
+      const handleScroll = () => {
+        // Only detect scroll if not auto-scrolling
+        if (!isAutoScrolling.value) {
+          checkScrollPosition()
+        }
+      }
+
+      newContainer.addEventListener('scroll', handleScroll, { passive: true })
+
+      // Initial check
+      checkScrollPosition()
     }
   })
   // Return all state, getters, and actions
   return {
     // State
+    path,
+    currentSessionId,
     messages,
     loading,
     editingIdx,
@@ -270,6 +490,8 @@ export const useLLMStore = defineStore('llm', () => {
     askBuffer,
     messageContainerRef,
     streamingMessageIndex,
+    userScrolledUp,
+    messageTypingCompleted,
 
     // Getters
     isEditing,
@@ -278,6 +500,8 @@ export const useLLMStore = defineStore('llm', () => {
 
     // Actions
     initMessages,
+    switchSession,
+    saveSession,
     startEdit,
     saveEdit,
     cancelEdit,
@@ -297,5 +521,9 @@ export const useLLMStore = defineStore('llm', () => {
     send,
     regenerate,
     scrollToBottom,
+    tryGenerateSessionTitle,
+    checkScrollPosition,
+    startScrollTracking,
+    stopScrollTracking,
   }
 })

+ 226 - 0
app/src/components/LLM/sessionStore.ts

@@ -0,0 +1,226 @@
+import type { ChatComplicationMessage, LLMSessionResponse } from '@/api/llm'
+import llm from '@/api/llm'
+import { animationCoordinator } from './animationCoordinator'
+
+export const useLLMSessionStore = defineStore('llm-session', () => {
+  // State
+  const sessions = ref<LLMSessionResponse[]>([])
+  const activeSessionId = ref<string | null>(null)
+  const loading = ref(false)
+  const sessionDrawerVisible = ref(false)
+  const typewriterInProgress = ref(new Set<string>())
+
+  // Getters
+  const activeSession = computed(() => {
+    if (!activeSessionId.value)
+      return null
+    return sessions.value.find(s => s.session_id === activeSessionId.value) || null
+  })
+
+  const sortedSessions = computed(() => {
+    return [...sessions.value].sort((a, b) => {
+      return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime()
+    })
+  })
+
+  const hasActiveSession = computed(() => activeSessionId.value !== null)
+
+  // Actions
+  async function loadSessions(path?: string) {
+    loading.value = true
+    try {
+      const response = await llm.get_sessions(path)
+      sessions.value = response
+    }
+    catch (error) {
+      console.error('Failed to load sessions:', error)
+    }
+    finally {
+      loading.value = false
+    }
+  }
+
+  async function createSession(title: string, path?: string) {
+    try {
+      const response = await llm.create_session({ title, path })
+      sessions.value.unshift(response)
+      activeSessionId.value = response.session_id
+      return response
+    }
+    catch (error) {
+      console.error('Failed to create session:', error)
+      throw error
+    }
+  }
+
+  async function updateSession(sessionId: string, data: { title?: string, messages?: ChatComplicationMessage[] }) {
+    try {
+      const response = await llm.update_session(sessionId, data)
+      const index = sessions.value.findIndex(s => s.session_id === sessionId)
+      if (index !== -1) {
+        // If typewriter is in progress for this session, preserve the current title
+        if (typewriterInProgress.value.has(sessionId) && data.title) {
+          const currentTitle = sessions.value[index].title
+          sessions.value[index] = { ...response, title: currentTitle }
+        }
+        else {
+          sessions.value[index] = response
+        }
+      }
+      return response
+    }
+    catch (error) {
+      console.error('Failed to update session:', error)
+      throw error
+    }
+  }
+
+  async function deleteSession(sessionId: string) {
+    try {
+      await llm.delete_session(sessionId)
+      sessions.value = sessions.value.filter(s => s.session_id !== sessionId)
+
+      // If deleting active session, clear it
+      if (activeSessionId.value === sessionId) {
+        activeSessionId.value = null
+      }
+    }
+    catch (error) {
+      console.error('Failed to delete session:', error)
+      throw error
+    }
+  }
+
+  async function duplicateSession(sessionId: string) {
+    try {
+      const response = await llm.duplicate_session(sessionId)
+      sessions.value.unshift(response)
+      activeSessionId.value = response.session_id
+      return response
+    }
+    catch (error) {
+      console.error('Failed to duplicate session:', error)
+      throw error
+    }
+  }
+
+  async function generateSessionTitle(sessionId: string) {
+    try {
+      // Skip if typewriter is already in progress for this session
+      if (typewriterInProgress.value.has(sessionId)) {
+        return
+      }
+
+      const response = await llm.generate_session_title(sessionId)
+
+      // Update the session in the local store with typewriter effect
+      const index = sessions.value.findIndex(s => s.session_id === sessionId)
+      if (index !== -1) {
+        await typewriterEffect(sessionId, response.title)
+      }
+
+      return response
+    }
+    catch (error) {
+      console.error('Failed to generate session title:', error)
+      throw error
+    }
+  }
+
+  // Typewriter effect for session title
+  async function typewriterEffect(sessionId: string, newTitle: string) {
+    const index = sessions.value.findIndex(s => s.session_id === sessionId)
+    if (index === -1) {
+      return
+    }
+
+    const session = sessions.value[index]
+
+    // Mark typewriter as in progress
+    typewriterInProgress.value.add(sessionId)
+    animationCoordinator.setTitleAnimating(true)
+
+    try {
+      let currentText = ''
+
+      // Clear the current title first
+      session.title = ''
+
+      // Type out the new title character by character
+      for (let i = 0; i <= newTitle.length; i++) {
+        // Double-check session still exists (in case of concurrent operations)
+        const currentIndex = sessions.value.findIndex(s => s.session_id === sessionId)
+        if (currentIndex === -1) {
+          break
+        }
+
+        currentText = newTitle.substring(0, i)
+        sessions.value[currentIndex].title = currentText
+
+        // Ensure Vue updates the DOM
+        await nextTick()
+
+        // Wait between each character (adjust speed as needed)
+        await new Promise(resolve => setTimeout(resolve, 20))
+      }
+
+      // Ensure final title is set correctly
+      const finalIndex = sessions.value.findIndex(s => s.session_id === sessionId)
+      if (finalIndex !== -1) {
+        sessions.value[finalIndex].title = newTitle
+      }
+    }
+    finally {
+      // Always remove from progress tracking when done
+      typewriterInProgress.value.delete(sessionId)
+      animationCoordinator.setTitleAnimating(false)
+    }
+  }
+
+  function setActiveSession(sessionId: string | null) {
+    activeSessionId.value = sessionId
+  }
+
+  function toggleSessionDrawer() {
+    sessionDrawerVisible.value = !sessionDrawerVisible.value
+  }
+
+  function showSessionDrawer() {
+    sessionDrawerVisible.value = true
+  }
+
+  function hideSessionDrawer() {
+    sessionDrawerVisible.value = false
+  }
+
+  // Initialize (will be called with path from parent component)
+  // onMounted(() => {
+  //   loadSessions()
+  // })
+
+  return {
+    // State
+    sessions,
+    activeSessionId,
+    loading,
+    sessionDrawerVisible,
+    typewriterInProgress,
+
+    // Getters
+    activeSession,
+    sortedSessions,
+    hasActiveSession,
+
+    // Actions
+    loadSessions,
+    createSession,
+    updateSession,
+    deleteSession,
+    duplicateSession,
+    generateSessionTitle,
+    setActiveSession,
+    toggleSessionDrawer,
+    showSessionDrawer,
+    hideSessionDrawer,
+  }
+})

+ 14 - 0
app/src/components/LLM/types.ts

@@ -7,3 +7,17 @@ export interface LLMProps {
   content: string
   path?: string
 }
+
+export interface LLMSession {
+  id: string
+  title: string
+  path?: string
+  createdAt: Date
+  updatedAt: Date
+  messageCount: number
+}
+
+export interface LLMSessionState {
+  sessions: LLMSession[]
+  activeSessionId: string | null
+}

+ 43 - 10
app/src/views/terminal/Terminal.vue

@@ -1,10 +1,12 @@
 <script setup lang="ts">
 import type ReconnectingWebSocket from 'reconnecting-websocket'
+import { ReloadOutlined } from '@ant-design/icons-vue'
 import { FitAddon } from '@xterm/addon-fit'
 import { Terminal } from '@xterm/xterm'
 import { throttle } from 'lodash'
 import use2FAModal from '@/components/TwoFA/use2FAModal'
 import ws from '@/lib/websocket'
+import TerminalStatusBar from './components/TerminalStatusBar.vue'
 import '@xterm/xterm/css/xterm.css'
 
 let term: Terminal | null
@@ -130,6 +132,10 @@ onUnmounted(() => {
   term?.dispose()
   websocket.value?.close()
 })
+
+function refreshTerminal() {
+  window.location.reload()
+}
 </script>
 
 <template>
@@ -147,27 +153,54 @@ onUnmounted(() => {
       type="error"
       show-icon
       :message="$gettext('Connection lost, please refresh the page.')"
-    />
-    <div
-      id="terminal"
-      class="console"
-    />
+      action
+    >
+      <template #action>
+        <AButton
+          size="small"
+          type="text"
+          @click="refreshTerminal"
+        >
+          <template #icon>
+            <ReloadOutlined />
+          </template>
+        </AButton>
+      </template>
+    </AAlert>
+    <div class="terminal-container">
+      <div
+        id="terminal"
+        class="console"
+      />
+      <TerminalStatusBar />
+    </div>
   </div>
 </template>
 
 <style lang="less" scoped>
-.console {
+.terminal-container {
+  display: flex;
+  flex-direction: column;
   min-height: calc(100vh - 200px);
+  border-radius: 5px;
+  overflow: hidden;
+  background: #000;
+
+  @media (max-width: 512px) {
+    border-radius: 0;
+  }
+}
+
+.console {
+  flex: 1;
 
   :deep(.terminal) {
     padding: 10px;
+    height: 100%;
   }
 
   :deep(.xterm-viewport) {
-    border-radius: 5px;
-    @media (max-width: 512px) {
-      border-radius: 0;
-    }
+    border-radius: 0;
   }
 }
 </style>

+ 359 - 0
app/src/views/terminal/components/TerminalStatusBar.vue

@@ -0,0 +1,359 @@
+<script setup lang="ts">
+import type { DiskStat, LoadStat, MemStat } from '@/api/analytic'
+import analytic from '@/api/analytic'
+import upgrade from '@/api/upgrade'
+import { formatDateTime } from '@/lib/helper'
+
+interface StatusData {
+  version: string
+  uptime: number
+  loadAvg: LoadStat | null
+  cpuFreq: number
+  cpuCount: number
+  memory: MemStat | null
+  disk: DiskStat | null
+  timestamp: string
+}
+
+const statusData = ref<StatusData>({
+  version: '',
+  uptime: 0,
+  loadAvg: null,
+  cpuFreq: 0,
+  cpuCount: 0,
+  memory: null,
+  disk: null,
+  timestamp: '',
+})
+
+const websocket = ref<WebSocket | null>(null)
+
+// Format uptime as days and hours
+function formatUptime(uptime: number) {
+  const days = Math.floor(uptime / (24 * 3600))
+  const hours = Math.floor((uptime % (24 * 3600)) / 3600)
+  return `${days}d${hours}h`
+}
+
+// Format memory usage as "used percentage"
+function formatMemoryUsage(memory: MemStat | null) {
+  if (!memory)
+    return '0B0%'
+
+  // Use the pressure value as percentage (since you said we can get it directly)
+  const percentage = memory.pressure.toFixed(1)
+
+  // Remove space from used size and combine without space
+  const usedSize = memory.used.replace(' ', '')
+
+  return `${usedSize}${percentage}%`
+}
+
+// Format disk usage as "used percentage"
+function formatDiskUsage(disk: DiskStat | null) {
+  if (!disk)
+    return '0B0%'
+
+  // Value is already formatted string like "39 GiB"
+  // Use the pre-calculated percentage from the API
+  const percentage = disk.percentage.toFixed(1)
+
+  // Remove space from used size and combine without space
+  const usedSize = disk.used.replace(' ', '')
+
+  return `${usedSize}${percentage}%`
+}
+
+// Format CPU frequency
+function formatCpuFreq(freq: number) {
+  if (!freq || freq === 0) {
+    return 'N/A'
+  }
+  if (freq >= 1000) {
+    return `${(freq / 1000).toFixed(2)}GHz`
+  }
+  return `${freq.toFixed(0)}MHz`
+}
+
+// Update current timestamp
+function updateTimestamp() {
+  statusData.value.timestamp = formatDateTime(new Date().toISOString())
+}
+
+// Initialize data from analytic init API
+async function initializeData() {
+  try {
+    const analyticData = await analytic.init()
+
+    // Set system info with fallbacks
+    statusData.value.uptime = analyticData?.host?.uptime || 0
+    statusData.value.loadAvg = analyticData?.loadavg || null
+    statusData.value.memory = analyticData?.memory || null
+    statusData.value.disk = analyticData?.disk || null
+
+    // Set CPU info with fallbacks
+    const cpuInfo = analyticData?.cpu?.info || []
+    statusData.value.cpuCount = cpuInfo.length || 0
+
+    // Get CPU frequency from first CPU info with fallback
+    if (cpuInfo.length > 0 && cpuInfo[0].mhz) {
+      statusData.value.cpuFreq = cpuInfo[0].mhz
+    }
+    else {
+      statusData.value.cpuFreq = 0
+    }
+
+    // Try to get version from upgrade API, fallback to host platform version
+    try {
+      const versionData = await upgrade.current_version()
+      statusData.value.version = versionData?.cur_version?.version || analyticData?.host?.platformVersion || 'unknown'
+    }
+    catch (versionError) {
+      console.warn('Failed to get app version, using platform version:', versionError)
+      statusData.value.version = analyticData?.host?.platformVersion || 'unknown'
+    }
+
+    updateTimestamp()
+  }
+  catch (error) {
+    console.error('Failed to initialize terminal status bar:', error)
+    // Set default values on error
+    statusData.value.version = 'error'
+    updateTimestamp()
+  }
+}
+
+// Connect to WebSocket for real-time updates
+function connectWebSocket() {
+  try {
+    const ws = analytic.server()
+    websocket.value = ws as WebSocket
+
+    if (websocket.value) {
+      websocket.value.onmessage = event => {
+        try {
+          const data = JSON.parse(event.data)
+          statusData.value.uptime = data.uptime
+          statusData.value.loadAvg = data.loadavg
+          statusData.value.memory = data.memory
+          statusData.value.disk = data.disk
+          updateTimestamp()
+        }
+        catch (error) {
+          console.error('Failed to parse WebSocket data:', error)
+        }
+      }
+
+      websocket.value.onerror = error => {
+        console.error('WebSocket error:', error)
+      }
+    }
+  }
+  catch (error) {
+    console.error('Failed to connect WebSocket:', error)
+  }
+}
+
+// Cleanup WebSocket connection
+function disconnectWebSocket() {
+  if (websocket.value) {
+    websocket.value.close()
+    websocket.value = null
+  }
+}
+
+onMounted(() => {
+  initializeData()
+  connectWebSocket()
+
+  // Update timestamp every second
+  const timestampInterval = setInterval(updateTimestamp, 1000)
+
+  onUnmounted(() => {
+    clearInterval(timestampInterval)
+    disconnectWebSocket()
+  })
+})
+</script>
+
+<template>
+  <div class="terminal-status-bar">
+    <!-- Left side: Version only -->
+    <div class="left-section">
+      <div class="status-item version">
+        <span class="icon i-tabler-package" />
+        <span class="value">{{ statusData.version }}</span>
+      </div>
+    </div>
+
+    <!-- Right side: All system info -->
+    <div class="right-section">
+      <div class="status-item uptime">
+        <span class="icon i-tabler-clock-up" />
+        <span class="value">{{ formatUptime(statusData.uptime) }}</span>
+      </div>
+
+      <div class="status-item load">
+        <span class="icon i-tabler-activity" />
+        <span class="value">{{ statusData.loadAvg?.load1.toFixed(2) || '0.00' }}</span>
+      </div>
+
+      <div class="status-item cpu">
+        <span class="icon i-tabler-cpu" />
+        <span class="value">{{ statusData.cpuCount || 0 }}x{{ formatCpuFreq(statusData.cpuFreq || 0) }}</span>
+      </div>
+
+      <div class="status-item memory">
+        <span class="icon i-tabler-chart-pie" />
+        <span class="value">{{ formatMemoryUsage(statusData.memory) }}</span>
+      </div>
+
+      <div class="status-item disk">
+        <span class="icon i-tabler-database" />
+        <span class="value">{{ formatDiskUsage(statusData.disk) }}</span>
+      </div>
+
+      <div class="status-item timestamp">
+        <span class="icon i-tabler-calendar-time" />
+        <span class="value">{{ statusData.timestamp }}</span>
+      </div>
+    </div>
+  </div>
+</template>
+
+<style lang="less" scoped>
+.terminal-status-bar {
+  display: flex;
+  align-items: center;
+  justify-content: space-between;
+  background: #1a1a1a;
+  border-top: 1px solid #333;
+  padding: 4px 12px;
+  font-family: 'Monaco', 'Menlo', 'Ubuntu Mono', monospace;
+  font-size: 12px;
+  height: 28px;
+  color: #e0e0e0;
+  white-space: nowrap;
+  overflow: hidden;
+
+  .left-section,
+  .right-section {
+    display: flex;
+    align-items: center;
+    gap: 12px;
+  }
+
+  .left-section {
+    flex-shrink: 0;
+  }
+
+  .right-section {
+    flex-shrink: 1;
+    overflow: hidden;
+  }
+
+  .status-item {
+    display: flex;
+    align-items: center;
+    gap: 4px;
+    flex-shrink: 0;
+
+    .icon {
+      font-size: 14px;
+      opacity: 0.8;
+      transition: opacity 0.2s;
+    }
+
+    .value {
+      color: #e0e0e0;
+      font-weight: 500;
+    }
+
+    &:hover .icon {
+      opacity: 1;
+    }
+
+    &.version {
+      .icon { color: #4a9eff; }
+      .value { color: #4a9eff; }
+    }
+
+    &.uptime {
+      .icon { color: #00d4aa; }
+      .value { color: #00d4aa; }
+    }
+
+    &.load {
+      .icon { color: #ff6b6b; }
+      .value { color: #ff6b6b; }
+    }
+
+    &.cpu {
+      .icon { color: #4ecdc4; }
+      .value { color: #4ecdc4; }
+    }
+
+    &.memory {
+      .icon { color: #ffe66d; }
+      .value { color: #ffe66d; }
+    }
+
+    &.disk {
+      .icon { color: #ff8a65; }
+      .value { color: #ff8a65; }
+    }
+
+    &.timestamp {
+      .icon { color: #b0b0b0; }
+      .value {
+        color: #b0b0b0;
+        font-size: 11px;
+      }
+    }
+  }
+
+  @media (max-width: 768px) {
+    padding: 3px 8px;
+    font-size: 11px;
+
+    .left-section,
+    .right-section {
+      gap: 8px;
+    }
+
+    .status-item {
+      gap: 2px;
+
+      .icon {
+        font-size: 12px;
+      }
+
+      &.timestamp .value {
+        font-size: 10px;
+      }
+    }
+  }
+
+  @media (max-width: 512px) {
+    padding: 2px 6px;
+    font-size: 10px;
+
+    .left-section,
+    .right-section {
+      gap: 6px;
+    }
+
+    .status-item {
+      gap: 1px;
+
+      .icon {
+        font-size: 11px;
+      }
+
+      &.timestamp .value {
+        font-size: 9px;
+      }
+    }
+  }
+}
+</style>

BIN
cmd/generate_licenses/internal/license/licenses.xz


+ 3 - 3
internal/config/delete.go

@@ -11,7 +11,7 @@ import (
 // CleanupDatabaseRecords removes related database records after deletion
 func CleanupDatabaseRecords(fullPath string, isDir bool) error {
 	q := query.Config
-	g := query.LLMMessages
+	g := query.LLMSession
 	b := query.ConfigBackup
 
 	if isDir {
@@ -19,7 +19,7 @@ func CleanupDatabaseRecords(fullPath string, isDir bool) error {
 		pathPattern := fullPath + "%"
 
 		// Delete ChatGPT logs
-		_, err := g.Where(g.Name.Like(pathPattern)).Delete()
+		_, err := g.Where(g.Path.Like(pathPattern)).Delete()
 		if err != nil {
 			return err
 		}
@@ -37,7 +37,7 @@ func CleanupDatabaseRecords(fullPath string, isDir bool) error {
 		}
 	} else {
 		// For files, delete specific records
-		_, err := g.Where(g.Name.Eq(fullPath)).Delete()
+		_, err := g.Where(g.Path.Eq(fullPath)).Delete()
 		if err != nil {
 			return err
 		}

+ 148 - 0
internal/llm/title_generator.go

@@ -0,0 +1,148 @@
+package llm
+
+import (
+	"context"
+	"fmt"
+	"strings"
+
+	"github.com/0xJacky/Nginx-UI/settings"
+	"github.com/sashabaranov/go-openai"
+	"github.com/uozi-tech/cosy/logger"
+)
+
+// GenerateSessionTitle generates a concise title for an LLM session based on the conversation context
+func GenerateSessionTitle(messages []openai.ChatCompletionMessage) (string, error) {
+	client, err := GetClient()
+	if err != nil {
+		return "", fmt.Errorf("failed to get LLM client: %w", err)
+	}
+
+	// Create a summarized context from the first few messages
+	messageContext := extractContextForTitleGeneration(messages)
+	if messageContext == "" {
+		return "New Session", nil
+	}
+
+	// Prepare the system message for title generation
+	systemMessage := openai.ChatCompletionMessage{
+		Role: openai.ChatMessageRoleSystem,
+		Content: `You are a helpful assistant that generates concise, descriptive titles for chat sessions.
+Based on the conversation context provided, generate a short title (2-6 words) that captures the main topic or purpose.
+The title should be clear, specific, and professional.
+Respond only with the title, no additional text or formatting.`,
+	}
+
+	userMessage := openai.ChatCompletionMessage{
+		Role:    openai.ChatMessageRoleUser,
+		Content: fmt.Sprintf("Generate a title for this conversation:\n\n%s", messageContext),
+	}
+
+	req := openai.ChatCompletionRequest{
+		Model:       settings.OpenAISettings.Model,
+		Messages:    []openai.ChatCompletionMessage{systemMessage, userMessage},
+		MaxTokens:   20, // Keep it short
+		Temperature: 0.3, // Lower temperature for more consistent titles
+	}
+
+	resp, err := client.CreateChatCompletion(context.Background(), req)
+	if err != nil {
+		logger.Error("Failed to generate session title:", err)
+		return "", fmt.Errorf("failed to generate title: %w", err)
+	}
+
+	if len(resp.Choices) == 0 {
+		return "New Session", nil
+	}
+
+	title := strings.TrimSpace(resp.Choices[0].Message.Content)
+	
+	// Sanitize the title
+	title = sanitizeTitle(title)
+	
+	if title == "" {
+		return "New Session", nil
+	}
+
+	return title, nil
+}
+
+// extractContextForTitleGeneration extracts relevant context from messages for title generation
+func extractContextForTitleGeneration(messages []openai.ChatCompletionMessage) string {
+	if len(messages) == 0 {
+		return ""
+	}
+
+	var contextBuilder strings.Builder
+	messageCount := 0
+	maxMessages := 3 // Only use the first few messages for context
+	maxLength := 800  // Limit total context length
+
+	for _, message := range messages {
+		if messageCount >= maxMessages {
+			break
+		}
+
+		// Skip system messages for title generation
+		if message.Role == openai.ChatMessageRoleSystem {
+			continue
+		}
+
+		content := strings.TrimSpace(message.Content)
+		if content == "" {
+			continue
+		}
+
+		// Add role prefix for clarity
+		rolePrefix := ""
+		switch message.Role {
+		case openai.ChatMessageRoleUser:
+			rolePrefix = "User: "
+		case openai.ChatMessageRoleAssistant:
+			rolePrefix = "Assistant: "
+		}
+
+		// Truncate very long messages
+		if len(content) > 200 {
+			content = content[:200] + "..."
+		}
+
+		newContent := fmt.Sprintf("%s%s\n", rolePrefix, content)
+		
+		// Check if adding this message would exceed the max length
+		if contextBuilder.Len()+len(newContent) > maxLength {
+			break
+		}
+
+		contextBuilder.WriteString(newContent)
+		messageCount++
+	}
+
+	return contextBuilder.String()
+}
+
+// sanitizeTitle cleans up the generated title
+func sanitizeTitle(title string) string {
+	// Remove quotes if present
+	title = strings.Trim(title, `"'`)
+	
+	// Remove any prefix like "Title: " if present
+	if strings.HasPrefix(strings.ToLower(title), "title:") {
+		title = strings.TrimSpace(title[6:])
+	}
+	
+	// Limit length
+	if len(title) > 50 {
+		title = title[:47] + "..."
+	}
+	
+	// Replace any problematic characters
+	title = strings.ReplaceAll(title, "\n", " ")
+	title = strings.ReplaceAll(title, "\r", " ")
+	
+	// Collapse multiple spaces
+	for strings.Contains(title, "  ") {
+		title = strings.ReplaceAll(title, "  ", " ")
+	}
+	
+	return strings.TrimSpace(title)
+}

+ 9 - 17
internal/migrate/7.rename_chatgpt_logs_to_llm_messages.go

@@ -5,7 +5,7 @@ import (
 	"gorm.io/gorm"
 )
 
-var RenameChatGPTLogsToLLMMessages = &gormigrate.Migration{
+var RenameChatGPTLogsToLLMSessions = &gormigrate.Migration{
 	ID: "20250831000001",
 	Migrate: func(tx *gorm.DB) error {
 		// 检查 chatgpt_logs 表是否存在
@@ -13,23 +13,15 @@ var RenameChatGPTLogsToLLMMessages = &gormigrate.Migration{
 			return nil
 		}
 
-		// 检查 llm_messages 表是否存在
-		if !tx.Migrator().HasTable("llm_messages") {
-			// llm_messages 表不存在,直接重命名
-			if err := tx.Exec("ALTER TABLE chat_gpt_logs RENAME TO llm_messages").Error; err != nil {
-				return err
-			}
-		} else {
-			// llm_messages 表已存在,迁移数据后删除旧表
-			// 使用原生 SQL 迁移数据,因为两个表结构相同
-			if err := tx.Exec("INSERT INTO llm_messages (name, content) SELECT name, content FROM chat_gpt_logs WHERE NOT EXISTS (SELECT 1 FROM llm_messages WHERE llm_messages.name = chat_gpt_logs.name)").Error; err != nil {
-				return err
-			}
+		// llm_messages 表已存在,迁移数据后删除旧表
+		// 使用原生 SQL 迁移数据,因为两个表结构相同
+		if err := tx.Exec("INSERT INTO llm_messages (path, content) SELECT name, content FROM chat_gpt_logs WHERE NOT EXISTS (SELECT 1 FROM llm_messages WHERE llm_messages.name = chat_gpt_logs.name)").Error; err != nil {
+			return err
+		}
 
-			// 删除旧表
-			if err := tx.Migrator().DropTable("chat_gpt_logs"); err != nil {
-				return err
-			}
+		// 删除旧表
+		if err := tx.Migrator().DropTable("chat_gpt_logs"); err != nil {
+			return err
 		}
 
 		return nil

+ 1 - 1
internal/migrate/migrate.go

@@ -10,7 +10,7 @@ var Migrations = []*gormigrate.Migration{
 	UpdateCertDomains,
 	RenameEnvGroupsToNamespaces,
 	RenameEnvironmentsToNodes,
-	RenameChatGPTLogsToLLMMessages,
+	RenameChatGPTLogsToLLMSessions,
 }
 
 var BeforeAutoMigrate = []*gormigrate.Migration{

+ 2 - 2
internal/site/rename.go

@@ -60,8 +60,8 @@ func Rename(oldName string, newName string) (err error) {
 	}
 
 	// update ChatGPT history
-	g := query.LLMMessages
-	_, _ = g.Where(g.Name.Eq(oldName)).Update(g.Name, newName)
+	g := query.LLMSession
+	_, _ = g.Where(g.Path.Eq(oldName)).Update(g.Path, newName)
 
 	// update config history
 	b := query.ConfigBackup

+ 2 - 2
internal/stream/rename.go

@@ -60,8 +60,8 @@ func Rename(oldName string, newName string) (err error) {
 	}
 
 	// update LLM history
-	g := query.LLMMessages
-	_, _ = g.Where(g.Name.Eq(oldName)).Update(g.Name, newName)
+	g := query.LLMSession
+	_, _ = g.Where(g.Path.Eq(oldPath)).Update(g.Path, newPath)
 
 	// update config history
 	b := query.ConfigBackup

+ 7 - 14
mcp/config/config_get.go

@@ -41,26 +41,19 @@ func handleNginxConfigGet(ctx context.Context, request mcp.CallToolRequest) (*mc
 	}
 
 	q := query.Config
-	g := query.LLMMessages
-	llmMsg, err := g.Where(g.Name.Eq(absPath)).FirstOrCreate()
-	if err != nil {
-		return nil, err
-	}
-
 	cfg, err := q.Where(q.Filepath.Eq(absPath)).FirstOrInit()
 	if err != nil {
 		return nil, err
 	}
 
 	result := map[string]interface{}{
-		"name":              stat.Name(),
-		"content":           string(content),
-		"llm_messages": llmMsg.Content,
-		"file_path":         absPath,
-		"modified_at":       stat.ModTime(),
-		"dir":               filepath.Dir(relativePath),
-		"sync_node_ids":     cfg.SyncNodeIds,
-		"sync_overwrite":    cfg.SyncOverwrite,
+		"name":           stat.Name(),
+		"content":        string(content),
+		"file_path":      absPath,
+		"modified_at":    stat.ModTime(),
+		"dir":            filepath.Dir(relativePath),
+		"sync_node_ids":  cfg.SyncNodeIds,
+		"sync_overwrite": cfg.SyncOverwrite,
 	}
 
 	jsonResult, _ := json.Marshal(result)

+ 4 - 4
mcp/config/config_rename.go

@@ -73,7 +73,7 @@ func handleNginxConfigRename(ctx context.Context, request mcp.CallToolRequest) (
 	}
 
 	// update LLM records
-	g := query.LLMMessages
+	g := query.LLMSession
 	q := query.Config
 	cfg, err := q.Where(q.Filepath.Eq(origFullPath)).FirstOrInit()
 	if err != nil {
@@ -81,13 +81,13 @@ func handleNginxConfigRename(ctx context.Context, request mcp.CallToolRequest) (
 	}
 
 	if !stat.IsDir() {
-		_, _ = g.Where(g.Name.Eq(newFullPath)).Delete()
-		_, _ = g.Where(g.Name.Eq(origFullPath)).Update(g.Name, newFullPath)
+		_, _ = g.Where(g.Path.Eq(newFullPath)).Delete()
+		_, _ = g.Where(g.Path.Eq(origFullPath)).Update(g.Path, newFullPath)
 		// for file, the sync policy for this file is used
 		syncNodeIds = cfg.SyncNodeIds
 	} else {
 		// is directory, update all records under the directory
-		_, _ = g.Where(g.Name.Like(origFullPath+"%")).Update(g.Name, g.Name.Replace(origFullPath, newFullPath))
+		_, _ = g.Where(g.Path.Like(origFullPath+"%")).Update(g.Path, g.Path.Replace(origFullPath, newFullPath))
 	}
 
 	_, err = q.Where(q.Filepath.Eq(origFullPath)).Updates(&model.Config{

+ 0 - 36
model/llm_messages.go

@@ -1,36 +0,0 @@
-package model
-
-import (
-	"database/sql/driver"
-	"encoding/json"
-	"errors"
-	"fmt"
-
-	"github.com/sashabaranov/go-openai"
-)
-
-type LLMCompletionMessages []openai.ChatCompletionMessage
-
-// Scan value into Jsonb, implements sql.Scanner interface
-func (j *LLMCompletionMessages) Scan(value interface{}) error {
-	bytes, ok := value.([]byte)
-	if !ok {
-		return errors.New(fmt.Sprint("Failed to unmarshal JSONB value:", value))
-	}
-
-	result := make([]openai.ChatCompletionMessage, 0)
-	err := json.Unmarshal(bytes, &result)
-	*j = result
-
-	return err
-}
-
-// Value return json value, implement driver.Valuer interface
-func (j *LLMCompletionMessages) Value() (driver.Value, error) {
-	return json.Marshal(*j)
-}
-
-type LLMMessages struct {
-	Name    string                `json:"name"`
-	Content LLMCompletionMessages `json:"content" gorm:"serializer:json"`
-}

+ 35 - 0
model/llm_session.go

@@ -0,0 +1,35 @@
+package model
+
+import (
+	"time"
+
+	"github.com/google/uuid"
+	"github.com/sashabaranov/go-openai"
+	"gorm.io/gorm"
+)
+
+type LLMCompletionMessages []openai.ChatCompletionMessage
+
+type LLMSession struct {
+	ID           int                   `json:"id" gorm:"primaryKey"`
+	SessionID    string                `json:"session_id" gorm:"uniqueIndex;not null"`
+	Title        string                `json:"title"`
+	Path         string                `json:"path" gorm:"index"` // 文件路径,可以为空
+	Messages     LLMCompletionMessages `json:"messages" gorm:"serializer:json"`
+	MessageCount int                   `json:"message_count"`
+	IsActive     bool                  `json:"is_active" gorm:"default:true"`
+	CreatedAt    time.Time             `json:"created_at"`
+	UpdatedAt    time.Time             `json:"updated_at"`
+	DeletedAt    gorm.DeletedAt        `json:"-" gorm:"index"`
+}
+
+func (LLMSession) TableName() string {
+	return "llm_sessions"
+}
+
+func (s *LLMSession) BeforeCreate(tx *gorm.DB) error {
+	if s.SessionID == "" {
+		s.SessionID = uuid.New().String()
+	}
+	return nil
+}

+ 1 - 1
model/model.go

@@ -38,7 +38,7 @@ func GenerateAllModel() []any {
 		User{},
 		AuthToken{},
 		Cert{},
-		LLMMessages{},
+		LLMSession{},
 		Site{},
 		Stream{},
 		DnsCredential{},

+ 8 - 8
query/gen.go

@@ -26,7 +26,7 @@ var (
 	ConfigBackup   *configBackup
 	DnsCredential  *dnsCredential
 	ExternalNotify *externalNotify
-	LLMMessages    *lLMMessages
+	LLMSession     *lLMSession
 	Namespace      *namespace
 	NginxLogIndex  *nginxLogIndex
 	Node           *node
@@ -49,7 +49,7 @@ func SetDefault(db *gorm.DB, opts ...gen.DOOption) {
 	ConfigBackup = &Q.ConfigBackup
 	DnsCredential = &Q.DnsCredential
 	ExternalNotify = &Q.ExternalNotify
-	LLMMessages = &Q.LLMMessages
+	LLMSession = &Q.LLMSession
 	Namespace = &Q.Namespace
 	NginxLogIndex = &Q.NginxLogIndex
 	Node = &Q.Node
@@ -73,7 +73,7 @@ func Use(db *gorm.DB, opts ...gen.DOOption) *Query {
 		ConfigBackup:   newConfigBackup(db, opts...),
 		DnsCredential:  newDnsCredential(db, opts...),
 		ExternalNotify: newExternalNotify(db, opts...),
-		LLMMessages:    newLLMMessages(db, opts...),
+		LLMSession:     newLLMSession(db, opts...),
 		Namespace:      newNamespace(db, opts...),
 		NginxLogIndex:  newNginxLogIndex(db, opts...),
 		Node:           newNode(db, opts...),
@@ -98,7 +98,7 @@ type Query struct {
 	ConfigBackup   configBackup
 	DnsCredential  dnsCredential
 	ExternalNotify externalNotify
-	LLMMessages    lLMMessages
+	LLMSession     lLMSession
 	Namespace      namespace
 	NginxLogIndex  nginxLogIndex
 	Node           node
@@ -124,7 +124,7 @@ func (q *Query) clone(db *gorm.DB) *Query {
 		ConfigBackup:   q.ConfigBackup.clone(db),
 		DnsCredential:  q.DnsCredential.clone(db),
 		ExternalNotify: q.ExternalNotify.clone(db),
-		LLMMessages:    q.LLMMessages.clone(db),
+		LLMSession:     q.LLMSession.clone(db),
 		Namespace:      q.Namespace.clone(db),
 		NginxLogIndex:  q.NginxLogIndex.clone(db),
 		Node:           q.Node.clone(db),
@@ -157,7 +157,7 @@ func (q *Query) ReplaceDB(db *gorm.DB) *Query {
 		ConfigBackup:   q.ConfigBackup.replaceDB(db),
 		DnsCredential:  q.DnsCredential.replaceDB(db),
 		ExternalNotify: q.ExternalNotify.replaceDB(db),
-		LLMMessages:    q.LLMMessages.replaceDB(db),
+		LLMSession:     q.LLMSession.replaceDB(db),
 		Namespace:      q.Namespace.replaceDB(db),
 		NginxLogIndex:  q.NginxLogIndex.replaceDB(db),
 		Node:           q.Node.replaceDB(db),
@@ -180,7 +180,7 @@ type queryCtx struct {
 	ConfigBackup   *configBackupDo
 	DnsCredential  *dnsCredentialDo
 	ExternalNotify *externalNotifyDo
-	LLMMessages    *lLMMessagesDo
+	LLMSession     *lLMSessionDo
 	Namespace      *namespaceDo
 	NginxLogIndex  *nginxLogIndexDo
 	Node           *nodeDo
@@ -203,7 +203,7 @@ func (q *Query) WithContext(ctx context.Context) *queryCtx {
 		ConfigBackup:   q.ConfigBackup.WithContext(ctx),
 		DnsCredential:  q.DnsCredential.WithContext(ctx),
 		ExternalNotify: q.ExternalNotify.WithContext(ctx),
-		LLMMessages:    q.LLMMessages.WithContext(ctx),
+		LLMSession:     q.LLMSession.WithContext(ctx),
 		Namespace:      q.Namespace.WithContext(ctx),
 		NginxLogIndex:  q.NginxLogIndex.WithContext(ctx),
 		Node:           q.Node.WithContext(ctx),

+ 0 - 354
query/llm_messages.gen.go

@@ -1,354 +0,0 @@
-// Code generated by gorm.io/gen. DO NOT EDIT.
-// Code generated by gorm.io/gen. DO NOT EDIT.
-// Code generated by gorm.io/gen. DO NOT EDIT.
-
-package query
-
-import (
-	"context"
-	"strings"
-
-	"gorm.io/gorm"
-	"gorm.io/gorm/clause"
-	"gorm.io/gorm/schema"
-
-	"gorm.io/gen"
-	"gorm.io/gen/field"
-
-	"gorm.io/plugin/dbresolver"
-
-	"github.com/0xJacky/Nginx-UI/model"
-)
-
-func newLLMMessages(db *gorm.DB, opts ...gen.DOOption) lLMMessages {
-	_lLMMessages := lLMMessages{}
-
-	_lLMMessages.lLMMessagesDo.UseDB(db, opts...)
-	_lLMMessages.lLMMessagesDo.UseModel(&model.LLMMessages{})
-
-	tableName := _lLMMessages.lLMMessagesDo.TableName()
-	_lLMMessages.ALL = field.NewAsterisk(tableName)
-	_lLMMessages.Name = field.NewString(tableName, "name")
-	_lLMMessages.Content = field.NewField(tableName, "content")
-
-	_lLMMessages.fillFieldMap()
-
-	return _lLMMessages
-}
-
-type lLMMessages struct {
-	lLMMessagesDo
-
-	ALL     field.Asterisk
-	Name    field.String
-	Content field.Field
-
-	fieldMap map[string]field.Expr
-}
-
-func (l lLMMessages) Table(newTableName string) *lLMMessages {
-	l.lLMMessagesDo.UseTable(newTableName)
-	return l.updateTableName(newTableName)
-}
-
-func (l lLMMessages) As(alias string) *lLMMessages {
-	l.lLMMessagesDo.DO = *(l.lLMMessagesDo.As(alias).(*gen.DO))
-	return l.updateTableName(alias)
-}
-
-func (l *lLMMessages) updateTableName(table string) *lLMMessages {
-	l.ALL = field.NewAsterisk(table)
-	l.Name = field.NewString(table, "name")
-	l.Content = field.NewField(table, "content")
-
-	l.fillFieldMap()
-
-	return l
-}
-
-func (l *lLMMessages) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
-	_f, ok := l.fieldMap[fieldName]
-	if !ok || _f == nil {
-		return nil, false
-	}
-	_oe, ok := _f.(field.OrderExpr)
-	return _oe, ok
-}
-
-func (l *lLMMessages) fillFieldMap() {
-	l.fieldMap = make(map[string]field.Expr, 2)
-	l.fieldMap["name"] = l.Name
-	l.fieldMap["content"] = l.Content
-}
-
-func (l lLMMessages) clone(db *gorm.DB) lLMMessages {
-	l.lLMMessagesDo.ReplaceConnPool(db.Statement.ConnPool)
-	return l
-}
-
-func (l lLMMessages) replaceDB(db *gorm.DB) lLMMessages {
-	l.lLMMessagesDo.ReplaceDB(db)
-	return l
-}
-
-type lLMMessagesDo struct{ gen.DO }
-
-// FirstByID Where("id=@id")
-func (l lLMMessagesDo) FirstByID(id uint64) (result *model.LLMMessages, err error) {
-	var params []interface{}
-
-	var generateSQL strings.Builder
-	params = append(params, id)
-	generateSQL.WriteString("id=? ")
-
-	var executeSQL *gorm.DB
-	executeSQL = l.UnderlyingDB().Where(generateSQL.String(), params...).Take(&result) // ignore_security_alert
-	err = executeSQL.Error
-
-	return
-}
-
-// DeleteByID update @@table set deleted_at=strftime('%Y-%m-%d %H:%M:%S','now') where id=@id
-func (l lLMMessagesDo) DeleteByID(id uint64) (err error) {
-	var params []interface{}
-
-	var generateSQL strings.Builder
-	params = append(params, id)
-	generateSQL.WriteString("update llm_messages set deleted_at=strftime('%Y-%m-%d %H:%M:%S','now') where id=? ")
-
-	var executeSQL *gorm.DB
-	executeSQL = l.UnderlyingDB().Exec(generateSQL.String(), params...) // ignore_security_alert
-	err = executeSQL.Error
-
-	return
-}
-
-func (l lLMMessagesDo) Debug() *lLMMessagesDo {
-	return l.withDO(l.DO.Debug())
-}
-
-func (l lLMMessagesDo) WithContext(ctx context.Context) *lLMMessagesDo {
-	return l.withDO(l.DO.WithContext(ctx))
-}
-
-func (l lLMMessagesDo) ReadDB() *lLMMessagesDo {
-	return l.Clauses(dbresolver.Read)
-}
-
-func (l lLMMessagesDo) WriteDB() *lLMMessagesDo {
-	return l.Clauses(dbresolver.Write)
-}
-
-func (l lLMMessagesDo) Session(config *gorm.Session) *lLMMessagesDo {
-	return l.withDO(l.DO.Session(config))
-}
-
-func (l lLMMessagesDo) Clauses(conds ...clause.Expression) *lLMMessagesDo {
-	return l.withDO(l.DO.Clauses(conds...))
-}
-
-func (l lLMMessagesDo) Returning(value interface{}, columns ...string) *lLMMessagesDo {
-	return l.withDO(l.DO.Returning(value, columns...))
-}
-
-func (l lLMMessagesDo) Not(conds ...gen.Condition) *lLMMessagesDo {
-	return l.withDO(l.DO.Not(conds...))
-}
-
-func (l lLMMessagesDo) Or(conds ...gen.Condition) *lLMMessagesDo {
-	return l.withDO(l.DO.Or(conds...))
-}
-
-func (l lLMMessagesDo) Select(conds ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.Select(conds...))
-}
-
-func (l lLMMessagesDo) Where(conds ...gen.Condition) *lLMMessagesDo {
-	return l.withDO(l.DO.Where(conds...))
-}
-
-func (l lLMMessagesDo) Order(conds ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.Order(conds...))
-}
-
-func (l lLMMessagesDo) Distinct(cols ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.Distinct(cols...))
-}
-
-func (l lLMMessagesDo) Omit(cols ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.Omit(cols...))
-}
-
-func (l lLMMessagesDo) Join(table schema.Tabler, on ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.Join(table, on...))
-}
-
-func (l lLMMessagesDo) LeftJoin(table schema.Tabler, on ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.LeftJoin(table, on...))
-}
-
-func (l lLMMessagesDo) RightJoin(table schema.Tabler, on ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.RightJoin(table, on...))
-}
-
-func (l lLMMessagesDo) Group(cols ...field.Expr) *lLMMessagesDo {
-	return l.withDO(l.DO.Group(cols...))
-}
-
-func (l lLMMessagesDo) Having(conds ...gen.Condition) *lLMMessagesDo {
-	return l.withDO(l.DO.Having(conds...))
-}
-
-func (l lLMMessagesDo) Limit(limit int) *lLMMessagesDo {
-	return l.withDO(l.DO.Limit(limit))
-}
-
-func (l lLMMessagesDo) Offset(offset int) *lLMMessagesDo {
-	return l.withDO(l.DO.Offset(offset))
-}
-
-func (l lLMMessagesDo) Scopes(funcs ...func(gen.Dao) gen.Dao) *lLMMessagesDo {
-	return l.withDO(l.DO.Scopes(funcs...))
-}
-
-func (l lLMMessagesDo) Unscoped() *lLMMessagesDo {
-	return l.withDO(l.DO.Unscoped())
-}
-
-func (l lLMMessagesDo) Create(values ...*model.LLMMessages) error {
-	if len(values) == 0 {
-		return nil
-	}
-	return l.DO.Create(values)
-}
-
-func (l lLMMessagesDo) CreateInBatches(values []*model.LLMMessages, batchSize int) error {
-	return l.DO.CreateInBatches(values, batchSize)
-}
-
-// Save : !!! underlying implementation is different with GORM
-// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
-func (l lLMMessagesDo) Save(values ...*model.LLMMessages) error {
-	if len(values) == 0 {
-		return nil
-	}
-	return l.DO.Save(values)
-}
-
-func (l lLMMessagesDo) First() (*model.LLMMessages, error) {
-	if result, err := l.DO.First(); err != nil {
-		return nil, err
-	} else {
-		return result.(*model.LLMMessages), nil
-	}
-}
-
-func (l lLMMessagesDo) Take() (*model.LLMMessages, error) {
-	if result, err := l.DO.Take(); err != nil {
-		return nil, err
-	} else {
-		return result.(*model.LLMMessages), nil
-	}
-}
-
-func (l lLMMessagesDo) Last() (*model.LLMMessages, error) {
-	if result, err := l.DO.Last(); err != nil {
-		return nil, err
-	} else {
-		return result.(*model.LLMMessages), nil
-	}
-}
-
-func (l lLMMessagesDo) Find() ([]*model.LLMMessages, error) {
-	result, err := l.DO.Find()
-	return result.([]*model.LLMMessages), err
-}
-
-func (l lLMMessagesDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.LLMMessages, err error) {
-	buf := make([]*model.LLMMessages, 0, batchSize)
-	err = l.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
-		defer func() { results = append(results, buf...) }()
-		return fc(tx, batch)
-	})
-	return results, err
-}
-
-func (l lLMMessagesDo) FindInBatches(result *[]*model.LLMMessages, batchSize int, fc func(tx gen.Dao, batch int) error) error {
-	return l.DO.FindInBatches(result, batchSize, fc)
-}
-
-func (l lLMMessagesDo) Attrs(attrs ...field.AssignExpr) *lLMMessagesDo {
-	return l.withDO(l.DO.Attrs(attrs...))
-}
-
-func (l lLMMessagesDo) Assign(attrs ...field.AssignExpr) *lLMMessagesDo {
-	return l.withDO(l.DO.Assign(attrs...))
-}
-
-func (l lLMMessagesDo) Joins(fields ...field.RelationField) *lLMMessagesDo {
-	for _, _f := range fields {
-		l = *l.withDO(l.DO.Joins(_f))
-	}
-	return &l
-}
-
-func (l lLMMessagesDo) Preload(fields ...field.RelationField) *lLMMessagesDo {
-	for _, _f := range fields {
-		l = *l.withDO(l.DO.Preload(_f))
-	}
-	return &l
-}
-
-func (l lLMMessagesDo) FirstOrInit() (*model.LLMMessages, error) {
-	if result, err := l.DO.FirstOrInit(); err != nil {
-		return nil, err
-	} else {
-		return result.(*model.LLMMessages), nil
-	}
-}
-
-func (l lLMMessagesDo) FirstOrCreate() (*model.LLMMessages, error) {
-	if result, err := l.DO.FirstOrCreate(); err != nil {
-		return nil, err
-	} else {
-		return result.(*model.LLMMessages), nil
-	}
-}
-
-func (l lLMMessagesDo) FindByPage(offset int, limit int) (result []*model.LLMMessages, count int64, err error) {
-	result, err = l.Offset(offset).Limit(limit).Find()
-	if err != nil {
-		return
-	}
-
-	if size := len(result); 0 < limit && 0 < size && size < limit {
-		count = int64(size + offset)
-		return
-	}
-
-	count, err = l.Offset(-1).Limit(-1).Count()
-	return
-}
-
-func (l lLMMessagesDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
-	count, err = l.Count()
-	if err != nil {
-		return
-	}
-
-	err = l.Offset(offset).Limit(limit).Scan(result)
-	return
-}
-
-func (l lLMMessagesDo) Scan(result interface{}) (err error) {
-	return l.DO.Scan(result)
-}
-
-func (l lLMMessagesDo) Delete(models ...*model.LLMMessages) (result gen.ResultInfo, err error) {
-	return l.DO.Delete(models)
-}
-
-func (l *lLMMessagesDo) withDO(do gen.Dao) *lLMMessagesDo {
-	l.DO = *do.(*gen.DO)
-	return l
-}

+ 390 - 0
query/llm_sessions.gen.go

@@ -0,0 +1,390 @@
+// Code generated by gorm.io/gen. DO NOT EDIT.
+// Code generated by gorm.io/gen. DO NOT EDIT.
+// Code generated by gorm.io/gen. DO NOT EDIT.
+
+package query
+
+import (
+	"context"
+	"strings"
+
+	"gorm.io/gorm"
+	"gorm.io/gorm/clause"
+	"gorm.io/gorm/schema"
+
+	"gorm.io/gen"
+	"gorm.io/gen/field"
+
+	"gorm.io/plugin/dbresolver"
+
+	"github.com/0xJacky/Nginx-UI/model"
+)
+
+func newLLMSession(db *gorm.DB, opts ...gen.DOOption) lLMSession {
+	_lLMSession := lLMSession{}
+
+	_lLMSession.lLMSessionDo.UseDB(db, opts...)
+	_lLMSession.lLMSessionDo.UseModel(&model.LLMSession{})
+
+	tableName := _lLMSession.lLMSessionDo.TableName()
+	_lLMSession.ALL = field.NewAsterisk(tableName)
+	_lLMSession.ID = field.NewInt(tableName, "id")
+	_lLMSession.SessionID = field.NewString(tableName, "session_id")
+	_lLMSession.Title = field.NewString(tableName, "title")
+	_lLMSession.Path = field.NewString(tableName, "path")
+	_lLMSession.SessionType = field.NewString(tableName, "session_type")
+	_lLMSession.Messages = field.NewField(tableName, "messages")
+	_lLMSession.MessageCount = field.NewInt(tableName, "message_count")
+	_lLMSession.IsActive = field.NewBool(tableName, "is_active")
+	_lLMSession.CreatedAt = field.NewTime(tableName, "created_at")
+	_lLMSession.UpdatedAt = field.NewTime(tableName, "updated_at")
+	_lLMSession.DeletedAt = field.NewField(tableName, "deleted_at")
+
+	_lLMSession.fillFieldMap()
+
+	return _lLMSession
+}
+
+type lLMSession struct {
+	lLMSessionDo
+
+	ALL          field.Asterisk
+	ID           field.Int
+	SessionID    field.String
+	Title        field.String
+	Path         field.String
+	SessionType  field.String
+	Messages     field.Field
+	MessageCount field.Int
+	IsActive     field.Bool
+	CreatedAt    field.Time
+	UpdatedAt    field.Time
+	DeletedAt    field.Field
+
+	fieldMap map[string]field.Expr
+}
+
+func (l lLMSession) Table(newTableName string) *lLMSession {
+	l.lLMSessionDo.UseTable(newTableName)
+	return l.updateTableName(newTableName)
+}
+
+func (l lLMSession) As(alias string) *lLMSession {
+	l.lLMSessionDo.DO = *(l.lLMSessionDo.As(alias).(*gen.DO))
+	return l.updateTableName(alias)
+}
+
+func (l *lLMSession) updateTableName(table string) *lLMSession {
+	l.ALL = field.NewAsterisk(table)
+	l.ID = field.NewInt(table, "id")
+	l.SessionID = field.NewString(table, "session_id")
+	l.Title = field.NewString(table, "title")
+	l.Path = field.NewString(table, "path")
+	l.SessionType = field.NewString(table, "session_type")
+	l.Messages = field.NewField(table, "messages")
+	l.MessageCount = field.NewInt(table, "message_count")
+	l.IsActive = field.NewBool(table, "is_active")
+	l.CreatedAt = field.NewTime(table, "created_at")
+	l.UpdatedAt = field.NewTime(table, "updated_at")
+	l.DeletedAt = field.NewField(table, "deleted_at")
+
+	l.fillFieldMap()
+
+	return l
+}
+
+func (l *lLMSession) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
+	_f, ok := l.fieldMap[fieldName]
+	if !ok || _f == nil {
+		return nil, false
+	}
+	_oe, ok := _f.(field.OrderExpr)
+	return _oe, ok
+}
+
+func (l *lLMSession) fillFieldMap() {
+	l.fieldMap = make(map[string]field.Expr, 11)
+	l.fieldMap["id"] = l.ID
+	l.fieldMap["session_id"] = l.SessionID
+	l.fieldMap["title"] = l.Title
+	l.fieldMap["path"] = l.Path
+	l.fieldMap["session_type"] = l.SessionType
+	l.fieldMap["messages"] = l.Messages
+	l.fieldMap["message_count"] = l.MessageCount
+	l.fieldMap["is_active"] = l.IsActive
+	l.fieldMap["created_at"] = l.CreatedAt
+	l.fieldMap["updated_at"] = l.UpdatedAt
+	l.fieldMap["deleted_at"] = l.DeletedAt
+}
+
+func (l lLMSession) clone(db *gorm.DB) lLMSession {
+	l.lLMSessionDo.ReplaceConnPool(db.Statement.ConnPool)
+	return l
+}
+
+func (l lLMSession) replaceDB(db *gorm.DB) lLMSession {
+	l.lLMSessionDo.ReplaceDB(db)
+	return l
+}
+
+type lLMSessionDo struct{ gen.DO }
+
+// FirstByID Where("id=@id")
+func (l lLMSessionDo) FirstByID(id uint64) (result *model.LLMSession, err error) {
+	var params []interface{}
+
+	var generateSQL strings.Builder
+	params = append(params, id)
+	generateSQL.WriteString("id=? ")
+
+	var executeSQL *gorm.DB
+	executeSQL = l.UnderlyingDB().Where(generateSQL.String(), params...).Take(&result) // ignore_security_alert
+	err = executeSQL.Error
+
+	return
+}
+
+// DeleteByID update @@table set deleted_at=strftime('%Y-%m-%d %H:%M:%S','now') where id=@id
+func (l lLMSessionDo) DeleteByID(id uint64) (err error) {
+	var params []interface{}
+
+	var generateSQL strings.Builder
+	params = append(params, id)
+	generateSQL.WriteString("update llm_sessions set deleted_at=strftime('%Y-%m-%d %H:%M:%S','now') where id=? ")
+
+	var executeSQL *gorm.DB
+	executeSQL = l.UnderlyingDB().Exec(generateSQL.String(), params...) // ignore_security_alert
+	err = executeSQL.Error
+
+	return
+}
+
+func (l lLMSessionDo) Debug() *lLMSessionDo {
+	return l.withDO(l.DO.Debug())
+}
+
+func (l lLMSessionDo) WithContext(ctx context.Context) *lLMSessionDo {
+	return l.withDO(l.DO.WithContext(ctx))
+}
+
+func (l lLMSessionDo) ReadDB() *lLMSessionDo {
+	return l.Clauses(dbresolver.Read)
+}
+
+func (l lLMSessionDo) WriteDB() *lLMSessionDo {
+	return l.Clauses(dbresolver.Write)
+}
+
+func (l lLMSessionDo) Session(config *gorm.Session) *lLMSessionDo {
+	return l.withDO(l.DO.Session(config))
+}
+
+func (l lLMSessionDo) Clauses(conds ...clause.Expression) *lLMSessionDo {
+	return l.withDO(l.DO.Clauses(conds...))
+}
+
+func (l lLMSessionDo) Returning(value interface{}, columns ...string) *lLMSessionDo {
+	return l.withDO(l.DO.Returning(value, columns...))
+}
+
+func (l lLMSessionDo) Not(conds ...gen.Condition) *lLMSessionDo {
+	return l.withDO(l.DO.Not(conds...))
+}
+
+func (l lLMSessionDo) Or(conds ...gen.Condition) *lLMSessionDo {
+	return l.withDO(l.DO.Or(conds...))
+}
+
+func (l lLMSessionDo) Select(conds ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.Select(conds...))
+}
+
+func (l lLMSessionDo) Where(conds ...gen.Condition) *lLMSessionDo {
+	return l.withDO(l.DO.Where(conds...))
+}
+
+func (l lLMSessionDo) Order(conds ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.Order(conds...))
+}
+
+func (l lLMSessionDo) Distinct(cols ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.Distinct(cols...))
+}
+
+func (l lLMSessionDo) Omit(cols ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.Omit(cols...))
+}
+
+func (l lLMSessionDo) Join(table schema.Tabler, on ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.Join(table, on...))
+}
+
+func (l lLMSessionDo) LeftJoin(table schema.Tabler, on ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.LeftJoin(table, on...))
+}
+
+func (l lLMSessionDo) RightJoin(table schema.Tabler, on ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.RightJoin(table, on...))
+}
+
+func (l lLMSessionDo) Group(cols ...field.Expr) *lLMSessionDo {
+	return l.withDO(l.DO.Group(cols...))
+}
+
+func (l lLMSessionDo) Having(conds ...gen.Condition) *lLMSessionDo {
+	return l.withDO(l.DO.Having(conds...))
+}
+
+func (l lLMSessionDo) Limit(limit int) *lLMSessionDo {
+	return l.withDO(l.DO.Limit(limit))
+}
+
+func (l lLMSessionDo) Offset(offset int) *lLMSessionDo {
+	return l.withDO(l.DO.Offset(offset))
+}
+
+func (l lLMSessionDo) Scopes(funcs ...func(gen.Dao) gen.Dao) *lLMSessionDo {
+	return l.withDO(l.DO.Scopes(funcs...))
+}
+
+func (l lLMSessionDo) Unscoped() *lLMSessionDo {
+	return l.withDO(l.DO.Unscoped())
+}
+
+func (l lLMSessionDo) Create(values ...*model.LLMSession) error {
+	if len(values) == 0 {
+		return nil
+	}
+	return l.DO.Create(values)
+}
+
+func (l lLMSessionDo) CreateInBatches(values []*model.LLMSession, batchSize int) error {
+	return l.DO.CreateInBatches(values, batchSize)
+}
+
+// Save : !!! underlying implementation is different with GORM
+// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
+func (l lLMSessionDo) Save(values ...*model.LLMSession) error {
+	if len(values) == 0 {
+		return nil
+	}
+	return l.DO.Save(values)
+}
+
+func (l lLMSessionDo) First() (*model.LLMSession, error) {
+	if result, err := l.DO.First(); err != nil {
+		return nil, err
+	} else {
+		return result.(*model.LLMSession), nil
+	}
+}
+
+func (l lLMSessionDo) Take() (*model.LLMSession, error) {
+	if result, err := l.DO.Take(); err != nil {
+		return nil, err
+	} else {
+		return result.(*model.LLMSession), nil
+	}
+}
+
+func (l lLMSessionDo) Last() (*model.LLMSession, error) {
+	if result, err := l.DO.Last(); err != nil {
+		return nil, err
+	} else {
+		return result.(*model.LLMSession), nil
+	}
+}
+
+func (l lLMSessionDo) Find() ([]*model.LLMSession, error) {
+	result, err := l.DO.Find()
+	return result.([]*model.LLMSession), err
+}
+
+func (l lLMSessionDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.LLMSession, err error) {
+	buf := make([]*model.LLMSession, 0, batchSize)
+	err = l.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
+		defer func() { results = append(results, buf...) }()
+		return fc(tx, batch)
+	})
+	return results, err
+}
+
+func (l lLMSessionDo) FindInBatches(result *[]*model.LLMSession, batchSize int, fc func(tx gen.Dao, batch int) error) error {
+	return l.DO.FindInBatches(result, batchSize, fc)
+}
+
+func (l lLMSessionDo) Attrs(attrs ...field.AssignExpr) *lLMSessionDo {
+	return l.withDO(l.DO.Attrs(attrs...))
+}
+
+func (l lLMSessionDo) Assign(attrs ...field.AssignExpr) *lLMSessionDo {
+	return l.withDO(l.DO.Assign(attrs...))
+}
+
+func (l lLMSessionDo) Joins(fields ...field.RelationField) *lLMSessionDo {
+	for _, _f := range fields {
+		l = *l.withDO(l.DO.Joins(_f))
+	}
+	return &l
+}
+
+func (l lLMSessionDo) Preload(fields ...field.RelationField) *lLMSessionDo {
+	for _, _f := range fields {
+		l = *l.withDO(l.DO.Preload(_f))
+	}
+	return &l
+}
+
+func (l lLMSessionDo) FirstOrInit() (*model.LLMSession, error) {
+	if result, err := l.DO.FirstOrInit(); err != nil {
+		return nil, err
+	} else {
+		return result.(*model.LLMSession), nil
+	}
+}
+
+func (l lLMSessionDo) FirstOrCreate() (*model.LLMSession, error) {
+	if result, err := l.DO.FirstOrCreate(); err != nil {
+		return nil, err
+	} else {
+		return result.(*model.LLMSession), nil
+	}
+}
+
+func (l lLMSessionDo) FindByPage(offset int, limit int) (result []*model.LLMSession, count int64, err error) {
+	result, err = l.Offset(offset).Limit(limit).Find()
+	if err != nil {
+		return
+	}
+
+	if size := len(result); 0 < limit && 0 < size && size < limit {
+		count = int64(size + offset)
+		return
+	}
+
+	count, err = l.Offset(-1).Limit(-1).Count()
+	return
+}
+
+func (l lLMSessionDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
+	count, err = l.Count()
+	if err != nil {
+		return
+	}
+
+	err = l.Offset(offset).Limit(limit).Scan(result)
+	return
+}
+
+func (l lLMSessionDo) Scan(result interface{}) (err error) {
+	return l.DO.Scan(result)
+}
+
+func (l lLMSessionDo) Delete(models ...*model.LLMSession) (result gen.ResultInfo, err error) {
+	return l.DO.Delete(models)
+}
+
+func (l *lLMSessionDo) withDO(do gen.Dao) *lLMSessionDo {
+	l.DO = *do.(*gen.DO)
+	return l
+}