123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154 |
- package openai
- import (
- "context"
- "errors"
- "fmt"
- "io"
- "strings"
- "time"
- "github.com/0xJacky/Nginx-UI/internal/llm"
- "github.com/0xJacky/Nginx-UI/settings"
- "github.com/gin-gonic/gin"
- "github.com/sashabaranov/go-openai"
- "github.com/uozi-tech/cosy"
- "github.com/uozi-tech/cosy/logger"
- )
- const ChatGPTInitPrompt = `You are a assistant who can help users write and optimise the configurations of Nginx,
- the first user message contains the content of the configuration file which is currently opened by the user and
- the current language code(CLC). You suppose to use the language corresponding to the CLC to give the first reply.
- Later the language environment depends on the user message.
- The first reply should involve the key information of the file and ask user what can you help them.`
- func MakeChatCompletionRequest(c *gin.Context) {
- var json struct {
- Filepath string `json:"filepath"`
- Messages []openai.ChatCompletionMessage `json:"messages"`
- }
- if !cosy.BindAndValid(c, &json) {
- return
- }
- messages := []openai.ChatCompletionMessage{
- {
- Role: openai.ChatMessageRoleSystem,
- Content: ChatGPTInitPrompt,
- },
- }
- messages = append(messages, json.Messages...)
- if json.Filepath != "" {
- messages = llm.ChatCompletionWithContext(json.Filepath, messages)
- }
- // SSE server
- c.Writer.Header().Set("Content-Type", "text/event-stream; charset=utf-8")
- c.Writer.Header().Set("Cache-Control", "no-cache")
- c.Writer.Header().Set("Connection", "keep-alive")
- c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
- openaiClient, err := llm.GetClient()
- if err != nil {
- c.Stream(func(w io.Writer) bool {
- c.SSEvent("message", gin.H{
- "type": "error",
- "content": err.Error(),
- })
- return false
- })
- return
- }
- ctx := context.Background()
- req := openai.ChatCompletionRequest{
- Model: settings.OpenAISettings.Model,
- Messages: messages,
- Stream: true,
- }
- stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
- if err != nil {
- logger.Errorf("CompletionStream error: %v\n", err)
- c.Stream(func(w io.Writer) bool {
- c.SSEvent("message", gin.H{
- "type": "error",
- "content": err.Error(),
- })
- return false
- })
- return
- }
- defer stream.Close()
- msgChan := make(chan string)
- go func() {
- defer close(msgChan)
- messageCh := make(chan string)
- // 消息接收协程
- go func() {
- defer close(messageCh)
- for {
- response, err := stream.Recv()
- if errors.Is(err, io.EOF) {
- return
- }
- if err != nil {
- messageCh <- fmt.Sprintf("error: %v", err)
- logger.Errorf("Stream error: %v\n", err)
- return
- }
- messageCh <- response.Choices[0].Delta.Content
- }
- }()
- ticker := time.NewTicker(500 * time.Millisecond)
- defer ticker.Stop()
- var buffer strings.Builder
- for {
- select {
- case msg, ok := <-messageCh:
- if !ok {
- if buffer.Len() > 0 {
- msgChan <- buffer.String()
- }
- return
- }
- if strings.HasPrefix(msg, "error: ") {
- msgChan <- msg
- return
- }
- buffer.WriteString(msg)
- case <-ticker.C:
- if buffer.Len() > 0 {
- msgChan <- buffer.String()
- buffer.Reset()
- }
- }
- }
- }()
- c.Stream(func(w io.Writer) bool {
- m, ok := <-msgChan
- if !ok {
- return false
- }
- if strings.HasPrefix(m, "error: ") {
- c.SSEvent("message", gin.H{
- "type": "error",
- "content": strings.TrimPrefix(m, "error: "),
- })
- return false
- }
- c.SSEvent("message", gin.H{
- "type": "message",
- "content": m,
- })
- return true
- })
- }
|