enhance: chat with LLM

This commit is contained in:
Jacky 2024-05-02 12:49:01 +08:00
parent e84ea98be9
commit 642e21a260
No known key found for this signature in database
GPG key ID: 215C21B10DF38B4D
40 changed files with 544 additions and 250 deletions

View file

@ -23,7 +23,7 @@ func GetEnvironment(c *gin.Context) {
return
}
c.JSON(http.StatusOK, env)
c.JSON(http.StatusOK, analytic.GetNode(env))
}
func GetEnvironmentList(c *gin.Context) {

View file

@ -1,52 +1,52 @@
package config
import (
"github.com/0xJacky/Nginx-UI/api"
"github.com/0xJacky/Nginx-UI/internal/config"
"github.com/0xJacky/Nginx-UI/internal/nginx"
"github.com/0xJacky/Nginx-UI/query"
"github.com/gin-gonic/gin"
"github.com/sashabaranov/go-openai"
"net/http"
"os"
"github.com/0xJacky/Nginx-UI/api"
"github.com/0xJacky/Nginx-UI/internal/config"
"github.com/0xJacky/Nginx-UI/internal/nginx"
"github.com/0xJacky/Nginx-UI/query"
"github.com/gin-gonic/gin"
"github.com/sashabaranov/go-openai"
"net/http"
"os"
)
func GetConfig(c *gin.Context) {
name := c.Param("name")
name := c.Param("name")
path := nginx.GetConfPath("/", name)
path := nginx.GetConfPath("/", name)
stat, err := os.Stat(path)
stat, err := os.Stat(path)
if err != nil {
api.ErrHandler(c, err)
return
}
if err != nil {
api.ErrHandler(c, err)
return
}
content, err := os.ReadFile(path)
content, err := os.ReadFile(path)
if err != nil {
api.ErrHandler(c, err)
return
}
if err != nil {
api.ErrHandler(c, err)
return
}
g := query.ChatGPTLog
chatgpt, err := g.Where(g.Name.Eq(path)).FirstOrCreate()
g := query.ChatGPTLog
chatgpt, err := g.Where(g.Name.Eq(path)).FirstOrCreate()
if err != nil {
api.ErrHandler(c, err)
return
}
if err != nil {
api.ErrHandler(c, err)
return
}
if chatgpt.Content == nil {
chatgpt.Content = make([]openai.ChatCompletionMessage, 0)
}
if chatgpt.Content == nil {
chatgpt.Content = make([]openai.ChatCompletionMessage, 0)
}
c.JSON(http.StatusOK, config.Config{
Name: name,
Content: string(content),
ChatGPTMessages: chatgpt.Content,
FilePath: path,
ModifiedAt: stat.ModTime(),
})
c.JSON(http.StatusOK, config.Config{
Name: stat.Name(),
Content: string(content),
ChatGPTMessages: chatgpt.Content,
FilePath: path,
ModifiedAt: stat.ModTime(),
})
}

View file

@ -1,123 +1,132 @@
package openai
import (
"context"
"crypto/tls"
"fmt"
"github.com/0xJacky/Nginx-UI/api"
"github.com/0xJacky/Nginx-UI/settings"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
"github.com/sashabaranov/go-openai"
"io"
"net/http"
"net/url"
"os"
"context"
"crypto/tls"
"fmt"
"github.com/0xJacky/Nginx-UI/api"
"github.com/0xJacky/Nginx-UI/internal/chatbot"
"github.com/0xJacky/Nginx-UI/settings"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
"github.com/sashabaranov/go-openai"
"io"
"net/http"
"net/url"
)
const ChatGPTInitPrompt = "You are a assistant who can help users write and optimise the configurations of Nginx, the first user message contains the content of the configuration file which is currently opened by the user and the current language code(CLC). You suppose to use the language corresponding to the CLC to give the first reply. Later the language environment depends on the user message. The first reply should involve the key information of the file and ask user what can you help them."
const ChatGPTInitPrompt = `You are a assistant who can help users write and optimise the configurations of Nginx,
the first user message contains the content of the configuration file which is currently opened by the user and
the current language code(CLC). You suppose to use the language corresponding to the CLC to give the first reply.
Later the language environment depends on the user message.
The first reply should involve the key information of the file and ask user what can you help them.`
func MakeChatCompletionRequest(c *gin.Context) {
var json struct {
Messages []openai.ChatCompletionMessage `json:"messages"`
}
var json struct {
Filepath string `json:"filepath"`
Messages []openai.ChatCompletionMessage `json:"messages"`
}
if !api.BindAndValid(c, &json) {
return
}
if !api.BindAndValid(c, &json) {
return
}
messages := []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: ChatGPTInitPrompt,
},
}
messages = append(messages, json.Messages...)
// sse server
c.Writer.Header().Set("Content-Type", "text/event-stream; charset=utf-8")
c.Writer.Header().Set("Cache-Control", "no-cache")
c.Writer.Header().Set("Connection", "keep-alive")
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
messages := []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: ChatGPTInitPrompt,
},
}
config := openai.DefaultConfig(settings.OpenAISettings.Token)
messages = append(messages, json.Messages...)
if settings.OpenAISettings.Proxy != "" {
proxyUrl, err := url.Parse(settings.OpenAISettings.Proxy)
if err != nil {
c.Stream(func(w io.Writer) bool {
c.SSEvent("message", gin.H{
"type": "error",
"content": err.Error(),
})
return false
})
return
}
transport := &http.Transport{
Proxy: http.ProxyURL(proxyUrl),
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
config.HTTPClient = &http.Client{
Transport: transport,
}
}
if json.Filepath != "" {
messages = chatbot.ChatCompletionWithContext(json.Filepath, messages)
}
if settings.OpenAISettings.BaseUrl != "" {
config.BaseURL = settings.OpenAISettings.BaseUrl
}
// SSE server
c.Writer.Header().Set("Content-Type", "text/event-stream; charset=utf-8")
c.Writer.Header().Set("Cache-Control", "no-cache")
c.Writer.Header().Set("Connection", "keep-alive")
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
openaiClient := openai.NewClientWithConfig(config)
ctx := context.Background()
config := openai.DefaultConfig(settings.OpenAISettings.Token)
req := openai.ChatCompletionRequest{
Model: settings.OpenAISettings.Model,
Messages: messages,
Stream: true,
}
stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
if err != nil {
fmt.Printf("CompletionStream error: %v\n", err)
c.Stream(func(w io.Writer) bool {
c.SSEvent("message", gin.H{
"type": "error",
"content": err.Error(),
})
return false
})
return
}
defer stream.Close()
msgChan := make(chan string)
go func() {
defer close(msgChan)
for {
response, err := stream.Recv()
if errors.Is(err, io.EOF) {
fmt.Println()
return
}
if settings.OpenAISettings.Proxy != "" {
proxyUrl, err := url.Parse(settings.OpenAISettings.Proxy)
if err != nil {
c.Stream(func(w io.Writer) bool {
c.SSEvent("message", gin.H{
"type": "error",
"content": err.Error(),
})
return false
})
return
}
transport := &http.Transport{
Proxy: http.ProxyURL(proxyUrl),
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
config.HTTPClient = &http.Client{
Transport: transport,
}
}
if err != nil {
fmt.Printf("Stream error: %v\n", err)
return
}
if settings.OpenAISettings.BaseUrl != "" {
config.BaseURL = settings.OpenAISettings.BaseUrl
}
message := fmt.Sprintf("%s", response.Choices[0].Delta.Content)
fmt.Printf("%s", message)
_ = os.Stdout.Sync()
openaiClient := openai.NewClientWithConfig(config)
ctx := context.Background()
msgChan <- message
}
}()
req := openai.ChatCompletionRequest{
Model: settings.OpenAISettings.Model,
Messages: messages,
Stream: true,
}
stream, err := openaiClient.CreateChatCompletionStream(ctx, req)
if err != nil {
fmt.Printf("CompletionStream error: %v\n", err)
c.Stream(func(w io.Writer) bool {
c.SSEvent("message", gin.H{
"type": "error",
"content": err.Error(),
})
return false
})
return
}
defer stream.Close()
msgChan := make(chan string)
go func() {
defer close(msgChan)
for {
response, err := stream.Recv()
if errors.Is(err, io.EOF) {
fmt.Println()
return
}
c.Stream(func(w io.Writer) bool {
if m, ok := <-msgChan; ok {
c.SSEvent("message", gin.H{
"type": "message",
"content": m,
})
return true
}
return false
})
if err != nil {
fmt.Printf("Stream error: %v\n", err)
return
}
message := fmt.Sprintf("%s", response.Choices[0].Delta.Content)
msgChan <- message
}
}()
c.Stream(func(w io.Writer) bool {
if m, ok := <-msgChan; ok {
c.SSEvent("message", gin.H{
"type": "message",
"content": m,
})
return true
}
return false
})
}

View file

@ -4,6 +4,6 @@ import "github.com/gin-gonic/gin"
func InitRouter(r *gin.RouterGroup) {
// ChatGPT
r.POST("chat_gpt", MakeChatCompletionRequest)
r.POST("chat_gpt_record", StoreChatGPTRecord)
r.POST("chatgpt", MakeChatCompletionRequest)
r.POST("chatgpt_record", StoreChatGPTRecord)
}

View file

@ -132,6 +132,7 @@ func GetDomain(c *gin.Context) {
Config: string(origContent),
AutoCert: certModel.AutoCert == model.AutoCertEnabled,
ChatGPTMessages: chatgpt.Content,
Filepath: path,
})
return
}
@ -173,6 +174,7 @@ func GetDomain(c *gin.Context) {
AutoCert: certModel.AutoCert == model.AutoCertEnabled,
CertInfo: certInfoMap,
ChatGPTMessages: chatgpt.Content,
Filepath: path,
})
}

View file

@ -17,4 +17,5 @@ type Site struct {
ChatGPTMessages []openai.ChatCompletionMessage `json:"chatgpt_messages,omitempty"`
Tokenized *nginx.NgxConfig `json:"tokenized,omitempty"`
CertInfo map[int]*cert.Info `json:"cert_info,omitempty"`
Filepath string `json:"filepath"`
}

View file

@ -22,6 +22,7 @@ type Stream struct {
Config string `json:"config"`
ChatGPTMessages []openai.ChatCompletionMessage `json:"chatgpt_messages,omitempty"`
Tokenized *nginx.NgxConfig `json:"tokenized,omitempty"`
Filepath string `json:"filepath"`
}
func GetStreams(c *gin.Context) {
@ -133,6 +134,7 @@ func GetStream(c *gin.Context) {
Name: name,
Config: string(origContent),
ChatGPTMessages: chatgpt.Content,
Filepath: path,
})
return
}
@ -152,6 +154,7 @@ func GetStream(c *gin.Context) {
Config: nginxConfig.FmtCode(),
Tokenized: nginxConfig,
ChatGPTMessages: chatgpt.Content,
Filepath: path,
})
}