1
0

openai.go 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. package api
  2. import (
  3. "context"
  4. "fmt"
  5. "github.com/0xJacky/Nginx-UI/server/model"
  6. "github.com/0xJacky/Nginx-UI/server/query"
  7. "github.com/0xJacky/Nginx-UI/server/settings"
  8. "github.com/gin-gonic/gin"
  9. "github.com/pkg/errors"
  10. "github.com/sashabaranov/go-openai"
  11. "io"
  12. "net/http"
  13. "net/url"
  14. "os"
  15. )
  16. const ChatGPTInitPrompt = "You are a assistant who can help users write and optimise the configurations of Nginx, the first user message contains the content of the configuration file which is currently opened by the user and the current language code(CLC). You suppose to use the language corresponding to the CLC to give the first reply. Later the language environment depends on the user message. The first reply should involve the key information of the file and ask user what can you help them."
  17. func MakeChatCompletionRequest(c *gin.Context) {
  18. var json struct {
  19. Messages []openai.ChatCompletionMessage `json:"messages"`
  20. }
  21. if !BindAndValid(c, &json) {
  22. return
  23. }
  24. messages := []openai.ChatCompletionMessage{
  25. {
  26. Role: openai.ChatMessageRoleSystem,
  27. Content: ChatGPTInitPrompt,
  28. },
  29. }
  30. messages = append(messages, json.Messages...)
  31. // sse server
  32. c.Writer.Header().Set("Content-Type", "text/event-stream")
  33. c.Writer.Header().Set("Cache-Control", "no-cache")
  34. c.Writer.Header().Set("Connection", "keep-alive")
  35. c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
  36. if settings.OpenAISettings.Token == "" {
  37. c.Stream(func(w io.Writer) bool {
  38. c.SSEvent("message", gin.H{
  39. "type": "error",
  40. "content": "[Error] OpenAI token is empty",
  41. })
  42. return false
  43. })
  44. return
  45. }
  46. config := openai.DefaultConfig(settings.OpenAISettings.Token)
  47. if settings.OpenAISettings.Proxy != "" {
  48. proxyUrl, err := url.Parse(settings.OpenAISettings.Proxy)
  49. if err != nil {
  50. c.Stream(func(w io.Writer) bool {
  51. c.SSEvent("message", gin.H{
  52. "type": "error",
  53. "content": err.Error(),
  54. })
  55. return false
  56. })
  57. return
  58. }
  59. transport := &http.Transport{
  60. Proxy: http.ProxyURL(proxyUrl),
  61. }
  62. config.HTTPClient = &http.Client{
  63. Transport: transport,
  64. }
  65. }
  66. if settings.OpenAISettings.BaseUrl != "" {
  67. config.BaseURL = settings.OpenAISettings.BaseUrl
  68. }
  69. openaiClient := openai.NewClientWithConfig(config)
  70. ctx := context.Background()
  71. req := openai.ChatCompletionRequest{
  72. Model: settings.OpenAISettings.Model,
  73. Messages: messages,
  74. Stream: true,
  75. }
  76. stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
  77. if err != nil {
  78. fmt.Printf("CompletionStream error: %v\n", err)
  79. c.Stream(func(w io.Writer) bool {
  80. c.SSEvent("message", gin.H{
  81. "type": "error",
  82. "content": err.Error(),
  83. })
  84. return false
  85. })
  86. return
  87. }
  88. defer stream.Close()
  89. msgChan := make(chan string)
  90. go func() {
  91. for {
  92. response, err := stream.Recv()
  93. if errors.Is(err, io.EOF) {
  94. close(msgChan)
  95. fmt.Println()
  96. return
  97. }
  98. if err != nil {
  99. fmt.Printf("Stream error: %v\n", err)
  100. close(msgChan)
  101. return
  102. }
  103. message := fmt.Sprintf("%s", response.Choices[0].Delta.Content)
  104. fmt.Printf("%s", message)
  105. _ = os.Stdout.Sync()
  106. msgChan <- message
  107. }
  108. }()
  109. c.Stream(func(w io.Writer) bool {
  110. if m, ok := <-msgChan; ok {
  111. c.SSEvent("message", gin.H{
  112. "type": "message",
  113. "content": m,
  114. })
  115. return true
  116. }
  117. return false
  118. })
  119. }
  120. func StoreChatGPTRecord(c *gin.Context) {
  121. var json struct {
  122. FileName string `json:"file_name"`
  123. Messages []openai.ChatCompletionMessage `json:"messages"`
  124. }
  125. if !BindAndValid(c, &json) {
  126. return
  127. }
  128. name := json.FileName
  129. g := query.ChatGPTLog
  130. _, err := g.Where(g.Name.Eq(name)).FirstOrCreate()
  131. if err != nil {
  132. ErrHandler(c, err)
  133. return
  134. }
  135. _, err = g.Where(g.Name.Eq(name)).Updates(&model.ChatGPTLog{
  136. Name: name,
  137. Content: json.Messages,
  138. })
  139. if err != nil {
  140. ErrHandler(c, err)
  141. return
  142. }
  143. c.JSON(http.StatusOK, gin.H{
  144. "message": "ok",
  145. })
  146. }