feat(wip): code completion with llm

This commit is contained in:
Jacky 2025-04-15 21:54:51 +08:00
parent 63fb823344
commit a57748a432
No known key found for this signature in database
GPG key ID: 215C21B10DF38B4D
22 changed files with 623 additions and 31 deletions

33
internal/llm/client.go Normal file
View file

@ -0,0 +1,33 @@
package llm
import (
"github.com/0xJacky/Nginx-UI/internal/transport"
"github.com/0xJacky/Nginx-UI/settings"
"github.com/sashabaranov/go-openai"
"net/http"
)
func GetClient() (*openai.Client, error) {
var config openai.ClientConfig
if openai.APIType(settings.OpenAISettings.APIType) == openai.APITypeAzure {
config = openai.DefaultAzureConfig(settings.OpenAISettings.Token, settings.OpenAISettings.BaseUrl)
} else {
config = openai.DefaultConfig(settings.OpenAISettings.Token)
}
if settings.OpenAISettings.Proxy != "" {
t, err := transport.NewTransport(transport.WithProxy(settings.OpenAISettings.Proxy))
if err != nil {
return nil, err
}
config.HTTPClient = &http.Client{
Transport: t,
}
}
if settings.OpenAISettings.BaseUrl != "" {
config.BaseURL = settings.OpenAISettings.BaseUrl
}
return openai.NewClientWithConfig(config), nil
}

View file

@ -0,0 +1,156 @@
package llm
import (
"context"
"regexp"
"strconv"
"strings"
"sync"
"github.com/0xJacky/Nginx-UI/settings"
"github.com/sashabaranov/go-openai"
"github.com/uozi-tech/cosy/logger"
)
const (
MaxTokens = 100
Temperature = 1
// Build system prompt and user prompt
SystemPrompt = "You are a code completion assistant. " +
"Complete the provided code snippet based on the context and instruction." +
"[IMPORTANT] Keep the original code indentation."
)
// Position the cursor position
type Position struct {
Row int `json:"row"`
Column int `json:"column"`
}
// CodeCompletionRequest the code completion request
type CodeCompletionRequest struct {
RequestID string `json:"request_id"`
UserID uint64 `json:"user_id"`
Context string `json:"context"`
Code string `json:"code"`
Suffix string `json:"suffix"`
Language string `json:"language"`
Position Position `json:"position"`
}
var (
requestContext = make(map[uint64]context.CancelFunc)
mutex sync.Mutex
)
func (c *CodeCompletionRequest) Send() (completedCode string, err error) {
if cancel, ok := requestContext[c.UserID]; ok {
logger.Infof("Code completion request cancelled for user %d", c.UserID)
cancel()
}
mutex.Lock()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
requestContext[c.UserID] = cancel
mutex.Unlock()
defer func() {
mutex.Lock()
delete(requestContext, c.UserID)
mutex.Unlock()
}()
openaiClient, err := GetClient()
if err != nil {
return
}
// Build user prompt with code and instruction
userPrompt := "Here is a file written in " + c.Language + ":\n```\n" + c.Context + "\n```\n"
userPrompt += "I'm editing at row " + strconv.Itoa(c.Position.Row) + ", column " + strconv.Itoa(c.Position.Column) + ".\n"
userPrompt += "Code before cursor:\n```\n" + c.Code + "\n```\n"
if c.Suffix != "" {
userPrompt += "Code after cursor:\n```\n" + c.Suffix + "\n```\n"
}
userPrompt += "Instruction: Only provide the completed code that should be inserted at the cursor position without explanations. " +
"The code should be syntactically correct and follow best practices for " + c.Language + "."
messages := []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: SystemPrompt,
},
{
Role: openai.ChatMessageRoleUser,
Content: userPrompt,
},
}
req := openai.ChatCompletionRequest{
Model: settings.OpenAISettings.GetCodeCompletionModel(),
Messages: messages,
MaxTokens: MaxTokens,
Temperature: Temperature,
}
// Make a direct (non-streaming) call to the API
response, err := openaiClient.CreateChatCompletion(ctx, req)
if err != nil {
return
}
completedCode = response.Choices[0].Message.Content
// extract the last word of the code
lastWord := extractLastWord(c.Code)
completedCode = cleanCompletionResponse(completedCode, lastWord)
logger.Infof("Code completion response: %s", completedCode)
return
}
// extractLastWord extract the last word of the code
func extractLastWord(code string) string {
if code == "" {
return ""
}
// define a regex to match word characters (letters, numbers, underscores)
re := regexp.MustCompile(`[a-zA-Z0-9_]+$`)
// find the last word of the code
match := re.FindString(code)
return match
}
// cleanCompletionResponse removes any <think></think> tags and their content from the completion response
// and strips the already entered code from the completion
func cleanCompletionResponse(response string, lastWord string) (cleanResp string) {
// remove <think></think> tags and their content using regex
re := regexp.MustCompile(`<think>[\s\S]*?</think>`)
cleanResp = re.ReplaceAllString(response, "")
// remove markdown code block tags
codeBlockRegex := regexp.MustCompile("```(?:[a-zA-Z]+)?\n((?:.|\n)*?)\n```")
matches := codeBlockRegex.FindStringSubmatch(cleanResp)
if len(matches) > 1 {
// extract the code block content
cleanResp = strings.TrimSpace(matches[1])
} else {
// if no code block is found, keep the original response
cleanResp = strings.TrimSpace(cleanResp)
}
// remove markdown backticks
cleanResp = strings.Trim(cleanResp, "`")
// if there is a last word, and the completion result starts with the last word, remove the already entered part
if lastWord != "" && strings.HasPrefix(cleanResp, lastWord) {
cleanResp = cleanResp[len(lastWord):]
}
return
}

107
internal/llm/context.go Normal file
View file

@ -0,0 +1,107 @@
package llm
import (
"github.com/0xJacky/Nginx-UI/internal/helper"
"github.com/0xJacky/Nginx-UI/internal/nginx"
"github.com/sashabaranov/go-openai"
"github.com/uozi-tech/cosy/logger"
"os"
"regexp"
"strings"
)
type includeContext struct {
Paths []string
PathsMap map[string]bool
}
func IncludeContext(filename string) (includes []string) {
c := &includeContext{
Paths: make([]string, 0),
PathsMap: make(map[string]bool),
}
c.extractIncludes(filename)
return c.Paths
}
// extractIncludes extracts all include statements from the given nginx configuration file.
func (c *includeContext) extractIncludes(filename string) {
if !helper.FileExists(filename) {
logger.Error("File does not exist: ", filename)
return
}
if !helper.IsUnderDirectory(filename, nginx.GetConfPath()) {
logger.Error("File is not under the nginx conf path: ", filename)
return
}
// Read the file content
content, err := os.ReadFile(filename)
if err != nil {
logger.Error(err)
return
}
// Find all include statements
pattern := regexp.MustCompile(`(?m)^\s*include\s+([^;]+);`)
matches := pattern.FindAllStringSubmatch(string(content), -1)
for _, match := range matches {
if len(match) > 1 {
// Resolve the path of the included file
includePath := match[1]
// to avoid infinite loop
if c.PathsMap[includePath] {
continue
}
c.push(includePath)
// Recursively extract includes from the included file
c.extractIncludes(includePath)
}
}
return
}
func (c *includeContext) push(path string) {
c.Paths = append(c.Paths, path)
c.PathsMap[path] = true
}
// getConfigIncludeContext returns the context of the given filename.
func getConfigIncludeContext(filename string) (multiContent []openai.ChatMessagePart) {
multiContent = make([]openai.ChatMessagePart, 0)
if !helper.IsUnderDirectory(filename, nginx.GetConfPath()) {
return
}
includes := IncludeContext(filename)
logger.Debug(includes)
var sb strings.Builder
for _, include := range includes {
text, _ := os.ReadFile(nginx.GetConfPath(include))
if len(text) == 0 {
continue
}
sb.WriteString("The Content of ")
sb.WriteString(include)
sb.WriteString(",")
sb.WriteString(string(text))
multiContent = append(multiContent, openai.ChatMessagePart{
Type: openai.ChatMessagePartTypeText,
Text: sb.String(),
})
sb.Reset()
}
return
}

View file

@ -0,0 +1,23 @@
package llm
import (
"github.com/stretchr/testify/assert"
"regexp"
"testing"
)
func TestRegex(t *testing.T) {
content := `
server {
listen 80;
listen [::]:80;
server_name _;
include error_json;
}
`
pattern := regexp.MustCompile(`(?m)^\s*include\s+([^;]+);`)
matches := pattern.FindAllStringSubmatch(content, -1)
assert.Equal(t, 1, len(matches))
assert.Equal(t, "error_json", matches[0][1])
}

10
internal/llm/errors.go Normal file
View file

@ -0,0 +1,10 @@
package llm
import (
"github.com/uozi-tech/cosy"
)
var (
e = cosy.NewErrorScope("llm")
ErrCodeCompletionNotEnabled = e.New(400, "code completion is not enabled")
)

21
internal/llm/messages.go Normal file
View file

@ -0,0 +1,21 @@
package llm
import (
"github.com/sashabaranov/go-openai"
)
func ChatCompletionWithContext(filename string, messages []openai.ChatCompletionMessage) []openai.ChatCompletionMessage {
for i := len(messages) - 1; i >= 0; i-- {
if messages[i].Role == openai.ChatMessageRoleUser {
// openai.ChatCompletionMessage: can't use both Content and MultiContent properties simultaneously
multiContent := getConfigIncludeContext(filename)
multiContent = append(multiContent, openai.ChatMessagePart{
Type: openai.ChatMessagePartTypeText,
Text: messages[i].Content,
})
messages[i].Content = ""
messages[i].MultiContent = multiContent
}
}
return messages
}

View file

@ -0,0 +1,26 @@
package llm
import (
"github.com/sashabaranov/go-openai"
"github.com/stretchr/testify/assert"
"testing"
)
func TestChatCompletionWithContext(t *testing.T) {
filename := "test"
messages := []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
},
{
Role: openai.ChatMessageRoleUser,
},
{
Role: openai.ChatMessageRoleAssistant,
},
}
messages = ChatCompletionWithContext(filename, messages)
assert.NotNil(t, messages[1].MultiContent)
}