openai.go 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160
  1. package api
  2. import (
  3. "context"
  4. "fmt"
  5. "github.com/0xJacky/Nginx-UI/server/model"
  6. "github.com/0xJacky/Nginx-UI/server/query"
  7. "github.com/0xJacky/Nginx-UI/server/settings"
  8. "github.com/gin-gonic/gin"
  9. "github.com/pkg/errors"
  10. "github.com/sashabaranov/go-openai"
  11. "io"
  12. "log"
  13. "net/http"
  14. "net/url"
  15. "os"
  16. )
  17. const ChatGPTInitPrompt = "You are a assistant who can help users write and optimise the configurations of Nginx, the first user message contains the content of the configuration file which is currently opened by the user and the current language code(CLC). You suppose to use the language corresponding to the CLC to give the first reply. Later the language environment depends on the user message. The first reply should involve the key information of the file and ask user what can you help them."
  18. func MakeChatCompletionRequest(c *gin.Context) {
  19. var json struct {
  20. Messages []openai.ChatCompletionMessage `json:"messages"`
  21. }
  22. if !BindAndValid(c, &json) {
  23. return
  24. }
  25. messages := []openai.ChatCompletionMessage{
  26. {
  27. Role: openai.ChatMessageRoleSystem,
  28. Content: ChatGPTInitPrompt,
  29. },
  30. }
  31. messages = append(messages, json.Messages...)
  32. // sse server
  33. c.Writer.Header().Set("Content-Type", "text/event-stream")
  34. c.Writer.Header().Set("Cache-Control", "no-cache")
  35. c.Writer.Header().Set("Connection", "keep-alive")
  36. c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
  37. log.Println(settings.OpenAISettings.Token)
  38. config := openai.DefaultConfig(settings.OpenAISettings.Token)
  39. if settings.OpenAISettings.Proxy != "" {
  40. proxyUrl, err := url.Parse(settings.OpenAISettings.Proxy)
  41. if err != nil {
  42. c.Stream(func(w io.Writer) bool {
  43. c.SSEvent("message", gin.H{
  44. "type": "error",
  45. "content": err.Error(),
  46. })
  47. return false
  48. })
  49. return
  50. }
  51. transport := &http.Transport{
  52. Proxy: http.ProxyURL(proxyUrl),
  53. }
  54. config.HTTPClient = &http.Client{
  55. Transport: transport,
  56. }
  57. }
  58. if settings.OpenAISettings.BaseUrl != "" {
  59. config.BaseURL = settings.OpenAISettings.BaseUrl
  60. }
  61. openaiClient := openai.NewClientWithConfig(config)
  62. ctx := context.Background()
  63. req := openai.ChatCompletionRequest{
  64. Model: openai.GPT3Dot5Turbo0301,
  65. Messages: messages,
  66. Stream: true,
  67. }
  68. stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
  69. if err != nil {
  70. fmt.Printf("CompletionStream error: %v\n", err)
  71. c.Stream(func(w io.Writer) bool {
  72. c.SSEvent("message", gin.H{
  73. "type": "error",
  74. "content": err.Error(),
  75. })
  76. return false
  77. })
  78. return
  79. }
  80. defer stream.Close()
  81. msgChan := make(chan string)
  82. go func() {
  83. for {
  84. response, err := stream.Recv()
  85. if errors.Is(err, io.EOF) {
  86. close(msgChan)
  87. fmt.Println()
  88. return
  89. }
  90. if err != nil {
  91. fmt.Printf("Stream error: %v\n", err)
  92. close(msgChan)
  93. return
  94. }
  95. // Send SSE to client
  96. message := fmt.Sprintf("%s", response.Choices[0].Delta.Content)
  97. fmt.Printf("%s", response.Choices[0].Delta.Content)
  98. _ = os.Stdout.Sync()
  99. msgChan <- message
  100. }
  101. }()
  102. c.Stream(func(w io.Writer) bool {
  103. if m, ok := <-msgChan; ok {
  104. c.SSEvent("message", gin.H{
  105. "type": "message",
  106. "content": m,
  107. })
  108. return true
  109. }
  110. return false
  111. })
  112. }
  113. func StoreChatGPTRecord(c *gin.Context) {
  114. var json struct {
  115. FileName string `json:"file_name"`
  116. Messages []openai.ChatCompletionMessage `json:"messages"`
  117. }
  118. if !BindAndValid(c, &json) {
  119. return
  120. }
  121. name := json.FileName
  122. g := query.ChatGPTLog
  123. _, err := g.Where(g.Name.Eq(name)).FirstOrCreate()
  124. if err != nil {
  125. ErrHandler(c, err)
  126. return
  127. }
  128. _, err = g.Where(g.Name.Eq(name)).Updates(&model.ChatGPTLog{
  129. Name: name,
  130. Content: json.Messages,
  131. })
  132. if err != nil {
  133. ErrHandler(c, err)
  134. return
  135. }
  136. c.JSON(http.StatusOK, gin.H{
  137. "message": "ok",
  138. })
  139. }