llm.go 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. package llm
  2. import (
  3. "context"
  4. "errors"
  5. "fmt"
  6. "io"
  7. "strings"
  8. "time"
  9. "github.com/0xJacky/Nginx-UI/api"
  10. "github.com/0xJacky/Nginx-UI/internal/llm"
  11. "github.com/0xJacky/Nginx-UI/settings"
  12. "github.com/gin-gonic/gin"
  13. "github.com/sashabaranov/go-openai"
  14. "github.com/uozi-tech/cosy"
  15. "github.com/uozi-tech/cosy/logger"
  16. )
  17. func MakeChatCompletionRequest(c *gin.Context) {
  18. var json struct {
  19. Type string `json:"type"`
  20. Messages []openai.ChatCompletionMessage `json:"messages"`
  21. Language string `json:"language,omitempty"`
  22. NginxConfig string `json:"nginx_config,omitempty"` // Separate field for nginx configuration content
  23. OSInfo string `json:"os_info,omitempty"` // Operating system information
  24. }
  25. if !cosy.BindAndValid(c, &json) {
  26. return
  27. }
  28. // Choose appropriate system prompt based on the type
  29. var systemPrompt string
  30. if json.Type == "terminal" {
  31. systemPrompt = llm.TerminalAssistantPrompt
  32. // Add OS context for terminal assistant
  33. if json.OSInfo != "" {
  34. systemPrompt += fmt.Sprintf("\n\nSystem Information: %s", json.OSInfo)
  35. }
  36. } else {
  37. systemPrompt = llm.NginxConfigPrompt
  38. }
  39. // Append language instruction if language is provided
  40. if json.Language != "" {
  41. systemPrompt += fmt.Sprintf("\n\nIMPORTANT: Please respond in the language corresponding to this language code: %s", json.Language)
  42. }
  43. messages := []openai.ChatCompletionMessage{
  44. {
  45. Role: openai.ChatMessageRoleSystem,
  46. Content: systemPrompt,
  47. },
  48. }
  49. // Add nginx configuration context if provided
  50. if json.Type != "terminal" && json.NginxConfig != "" {
  51. // Add nginx configuration as context to the first user message
  52. if len(json.Messages) > 0 && json.Messages[0].Role == openai.ChatMessageRoleUser {
  53. // Prepend the nginx configuration to the first user message
  54. contextualContent := fmt.Sprintf("Nginx Configuration:\n```nginx\n%s\n```\n\n%s", json.NginxConfig, json.Messages[0].Content)
  55. json.Messages[0].Content = contextualContent
  56. }
  57. }
  58. messages = append(messages, json.Messages...)
  59. // SSE server
  60. api.SetSSEHeaders(c)
  61. openaiClient, err := llm.GetClient()
  62. if err != nil {
  63. c.Stream(func(w io.Writer) bool {
  64. c.SSEvent("message", gin.H{
  65. "type": "error",
  66. "content": err.Error(),
  67. })
  68. return false
  69. })
  70. return
  71. }
  72. ctx := context.Background()
  73. req := openai.ChatCompletionRequest{
  74. Model: settings.OpenAISettings.Model,
  75. Messages: messages,
  76. Stream: true,
  77. }
  78. stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
  79. if err != nil {
  80. logger.Errorf("CompletionStream error: %v\n", err)
  81. c.Stream(func(w io.Writer) bool {
  82. c.SSEvent("message", gin.H{
  83. "type": "error",
  84. "content": err.Error(),
  85. })
  86. return false
  87. })
  88. return
  89. }
  90. defer stream.Close()
  91. msgChan := make(chan string)
  92. go func() {
  93. defer close(msgChan)
  94. messageCh := make(chan string)
  95. // 消息接收协程
  96. go func() {
  97. defer close(messageCh)
  98. for {
  99. response, err := stream.Recv()
  100. if errors.Is(err, io.EOF) {
  101. return
  102. }
  103. if err != nil {
  104. messageCh <- fmt.Sprintf("error: %v", err)
  105. logger.Errorf("Stream error: %v\n", err)
  106. return
  107. }
  108. messageCh <- response.Choices[0].Delta.Content
  109. }
  110. }()
  111. ticker := time.NewTicker(500 * time.Millisecond)
  112. defer ticker.Stop()
  113. var buffer strings.Builder
  114. for {
  115. select {
  116. case msg, ok := <-messageCh:
  117. if !ok {
  118. if buffer.Len() > 0 {
  119. msgChan <- buffer.String()
  120. }
  121. return
  122. }
  123. if strings.HasPrefix(msg, "error: ") {
  124. msgChan <- msg
  125. return
  126. }
  127. buffer.WriteString(msg)
  128. case <-ticker.C:
  129. if buffer.Len() > 0 {
  130. msgChan <- buffer.String()
  131. buffer.Reset()
  132. }
  133. }
  134. }
  135. }()
  136. c.Stream(func(w io.Writer) bool {
  137. m, ok := <-msgChan
  138. if !ok {
  139. return false
  140. }
  141. if strings.HasPrefix(m, "error: ") {
  142. c.SSEvent("message", gin.H{
  143. "type": "error",
  144. "content": strings.TrimPrefix(m, "error: "),
  145. })
  146. return false
  147. }
  148. c.SSEvent("message", gin.H{
  149. "type": "message",
  150. "content": m,
  151. })
  152. return true
  153. })
  154. }