openai.go 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152
  1. package openai
  2. import (
  3. "context"
  4. "errors"
  5. "fmt"
  6. "io"
  7. "strings"
  8. "time"
  9. "github.com/0xJacky/Nginx-UI/api"
  10. "github.com/0xJacky/Nginx-UI/internal/llm"
  11. "github.com/0xJacky/Nginx-UI/settings"
  12. "github.com/gin-gonic/gin"
  13. "github.com/sashabaranov/go-openai"
  14. "github.com/uozi-tech/cosy"
  15. "github.com/uozi-tech/cosy/logger"
  16. )
  17. const ChatGPTInitPrompt = `You are a assistant who can help users write and optimise the configurations of Nginx,
  18. the first user message contains the content of the configuration file which is currently opened by the user and
  19. the current language code(CLC). You suppose to use the language corresponding to the CLC to give the first reply.
  20. Later the language environment depends on the user message.
  21. The first reply should involve the key information of the file and ask user what can you help them.`
  22. func MakeChatCompletionRequest(c *gin.Context) {
  23. var json struct {
  24. Filepath string `json:"filepath"`
  25. Messages []openai.ChatCompletionMessage `json:"messages"`
  26. }
  27. if !cosy.BindAndValid(c, &json) {
  28. return
  29. }
  30. messages := []openai.ChatCompletionMessage{
  31. {
  32. Role: openai.ChatMessageRoleSystem,
  33. Content: ChatGPTInitPrompt,
  34. },
  35. }
  36. messages = append(messages, json.Messages...)
  37. if json.Filepath != "" {
  38. messages = llm.ChatCompletionWithContext(json.Filepath, messages)
  39. }
  40. // SSE server
  41. api.SetSSEHeaders(c)
  42. openaiClient, err := llm.GetClient()
  43. if err != nil {
  44. c.Stream(func(w io.Writer) bool {
  45. c.SSEvent("message", gin.H{
  46. "type": "error",
  47. "content": err.Error(),
  48. })
  49. return false
  50. })
  51. return
  52. }
  53. ctx := context.Background()
  54. req := openai.ChatCompletionRequest{
  55. Model: settings.OpenAISettings.Model,
  56. Messages: messages,
  57. Stream: true,
  58. }
  59. stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
  60. if err != nil {
  61. logger.Errorf("CompletionStream error: %v\n", err)
  62. c.Stream(func(w io.Writer) bool {
  63. c.SSEvent("message", gin.H{
  64. "type": "error",
  65. "content": err.Error(),
  66. })
  67. return false
  68. })
  69. return
  70. }
  71. defer stream.Close()
  72. msgChan := make(chan string)
  73. go func() {
  74. defer close(msgChan)
  75. messageCh := make(chan string)
  76. // 消息接收协程
  77. go func() {
  78. defer close(messageCh)
  79. for {
  80. response, err := stream.Recv()
  81. if errors.Is(err, io.EOF) {
  82. return
  83. }
  84. if err != nil {
  85. messageCh <- fmt.Sprintf("error: %v", err)
  86. logger.Errorf("Stream error: %v\n", err)
  87. return
  88. }
  89. messageCh <- response.Choices[0].Delta.Content
  90. }
  91. }()
  92. ticker := time.NewTicker(500 * time.Millisecond)
  93. defer ticker.Stop()
  94. var buffer strings.Builder
  95. for {
  96. select {
  97. case msg, ok := <-messageCh:
  98. if !ok {
  99. if buffer.Len() > 0 {
  100. msgChan <- buffer.String()
  101. }
  102. return
  103. }
  104. if strings.HasPrefix(msg, "error: ") {
  105. msgChan <- msg
  106. return
  107. }
  108. buffer.WriteString(msg)
  109. case <-ticker.C:
  110. if buffer.Len() > 0 {
  111. msgChan <- buffer.String()
  112. buffer.Reset()
  113. }
  114. }
  115. }
  116. }()
  117. c.Stream(func(w io.Writer) bool {
  118. m, ok := <-msgChan
  119. if !ok {
  120. return false
  121. }
  122. if strings.HasPrefix(m, "error: ") {
  123. c.SSEvent("message", gin.H{
  124. "type": "error",
  125. "content": strings.TrimPrefix(m, "error: "),
  126. })
  127. return false
  128. }
  129. c.SSEvent("message", gin.H{
  130. "type": "message",
  131. "content": m,
  132. })
  133. return true
  134. })
  135. }