mirror of
https://github.com/0xJacky/nginx-ui.git
synced 2025-05-11 18:35:51 +02:00
feat(env_group): migrate site_category to env_group
This commit is contained in:
parent
de1860718e
commit
a379211e3c
66 changed files with 4837 additions and 4251 deletions
|
@ -1,9 +1,10 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/0xJacky/Nginx-UI/model"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
|
@ -14,8 +15,8 @@ type Config struct {
|
|||
ModifiedAt time.Time `json:"modified_at"`
|
||||
Size int64 `json:"size,omitempty"`
|
||||
IsDir bool `json:"is_dir"`
|
||||
SiteCategoryID uint64 `json:"site_category_id"`
|
||||
SiteCategory *model.SiteCategory `json:"site_category,omitempty"`
|
||||
EnvGroupID uint64 `json:"env_group_id"`
|
||||
EnvGroup *model.EnvGroup `json:"env_group,omitempty"`
|
||||
Enabled bool `json:"enabled"`
|
||||
Dir string `json:"dir"`
|
||||
}
|
||||
|
|
|
@ -33,8 +33,8 @@ func (c ConfigsSort) Less(i, j int) bool {
|
|||
flag = boolToInt(c.ConfigList[i].IsDir) > boolToInt(c.ConfigList[j].IsDir)
|
||||
case "enabled":
|
||||
flag = boolToInt(c.ConfigList[i].Enabled) > boolToInt(c.ConfigList[j].Enabled)
|
||||
case "site_category_id":
|
||||
flag = c.ConfigList[i].SiteCategoryID > c.ConfigList[j].SiteCategoryID
|
||||
case "env_group_id":
|
||||
flag = c.ConfigList[i].EnvGroupID > c.ConfigList[j].EnvGroupID
|
||||
}
|
||||
|
||||
if c.Order == "asc" {
|
||||
|
|
47
internal/migrate/1.site_category_to_env_group.go
Normal file
47
internal/migrate/1.site_category_to_env_group.go
Normal file
|
@ -0,0 +1,47 @@
|
|||
package migrate
|
||||
|
||||
import (
|
||||
"github.com/0xJacky/Nginx-UI/model"
|
||||
"github.com/go-gormigrate/gormigrate/v2"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
var SiteCategoryToEnvGroup = &gormigrate.Migration{
|
||||
ID: "20250405000001",
|
||||
Migrate: func(tx *gorm.DB) error {
|
||||
// Step 1: Create new env_groups table
|
||||
if err := tx.Migrator().AutoMigrate(&model.EnvGroup{}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Step 2: Copy data from site_categories to env_groups
|
||||
if tx.Migrator().HasTable("site_categories") {
|
||||
var siteCategories []map[string]interface{}
|
||||
if err := tx.Table("site_categories").Find(&siteCategories).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, sc := range siteCategories {
|
||||
if err := tx.Table("env_groups").Create(sc).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Update sites table to use env_group_id instead of site_category_id
|
||||
if tx.Migrator().HasColumn("sites", "site_category_id") {
|
||||
// First add the new column if it doesn't exist
|
||||
if !tx.Migrator().HasColumn("sites", "env_group_id") {
|
||||
if err := tx.Exec("ALTER TABLE sites ADD COLUMN env_group_id bigint").Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Copy the values from site_category_id to env_group_id
|
||||
if err := tx.Exec("UPDATE sites SET env_group_id = site_category_id").Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
64
internal/migrate/2.fix_site_and_stream_unique.go
Normal file
64
internal/migrate/2.fix_site_and_stream_unique.go
Normal file
|
@ -0,0 +1,64 @@
|
|||
package migrate
|
||||
|
||||
import (
|
||||
"github.com/0xJacky/Nginx-UI/model"
|
||||
"github.com/go-gormigrate/gormigrate/v2"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
var FixSiteAndStreamPathUnique = &gormigrate.Migration{
|
||||
ID: "20250405000003",
|
||||
Migrate: func(tx *gorm.DB) error {
|
||||
// Check if sites table exists
|
||||
if tx.Migrator().HasTable(&model.Site{}) {
|
||||
// Find duplicated paths in sites table
|
||||
var siteDuplicates []struct {
|
||||
Path string
|
||||
Count int
|
||||
}
|
||||
|
||||
if err := tx.Model(&model.Site{}).
|
||||
Select("path, count(*) as count").
|
||||
Group("path").
|
||||
Having("count(*) > 1").
|
||||
Find(&siteDuplicates).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// For each duplicated path, delete all but the one with max id
|
||||
for _, dup := range siteDuplicates {
|
||||
if err := tx.Exec(`DELETE FROM sites WHERE path = ? AND id NOT IN
|
||||
(SELECT max(id) FROM sites WHERE path = ?)`, dup.Path, dup.Path).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if streams table exists
|
||||
if tx.Migrator().HasTable(&model.Stream{}) {
|
||||
// Find duplicated paths in streams table
|
||||
var streamDuplicates []struct {
|
||||
Path string
|
||||
Count int
|
||||
}
|
||||
|
||||
if err := tx.Model(&model.Stream{}).
|
||||
Select("path, count(*) as count").
|
||||
Group("path").
|
||||
Having("count(*) > 1").
|
||||
Find(&streamDuplicates).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// For each duplicated path, delete all but the one with max id
|
||||
for _, dup := range streamDuplicates {
|
||||
if err := tx.Exec(`DELETE FROM streams WHERE path = ? AND id NOT IN
|
||||
(SELECT max(id) FROM streams WHERE path = ?)`, dup.Path, dup.Path).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
44
internal/migrate/3.rename_auths_to_users.go
Normal file
44
internal/migrate/3.rename_auths_to_users.go
Normal file
|
@ -0,0 +1,44 @@
|
|||
package migrate
|
||||
|
||||
import (
|
||||
"github.com/go-gormigrate/gormigrate/v2"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
var RenameAuthsToUsers = &gormigrate.Migration{
|
||||
ID: "20250405000002",
|
||||
Migrate: func(tx *gorm.DB) error {
|
||||
// Check if both tables exist
|
||||
hasAuthsTable := tx.Migrator().HasTable("auths")
|
||||
hasUsersTable := tx.Migrator().HasTable("users")
|
||||
|
||||
if hasAuthsTable {
|
||||
if hasUsersTable {
|
||||
// Both tables exist - we need to check if users table is empty
|
||||
var count int64
|
||||
if err := tx.Table("users").Count(&count).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if count > 0 {
|
||||
// Users table has data - drop auths table as users table is now the source of truth
|
||||
return tx.Migrator().DropTable("auths")
|
||||
} else {
|
||||
// Users table is empty - drop it and rename auths to users
|
||||
return tx.Transaction(func(ttx *gorm.DB) error {
|
||||
if err := ttx.Migrator().DropTable("users"); err != nil {
|
||||
return err
|
||||
}
|
||||
return ttx.Migrator().RenameTable("auths", "users")
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Only auths table exists - simply rename it
|
||||
return tx.Migrator().RenameTable("auths", "users")
|
||||
}
|
||||
}
|
||||
|
||||
// If auths table doesn't exist, nothing to do
|
||||
return nil
|
||||
},
|
||||
}
|
14
internal/migrate/migrate.go
Normal file
14
internal/migrate/migrate.go
Normal file
|
@ -0,0 +1,14 @@
|
|||
package migrate
|
||||
|
||||
import (
|
||||
"github.com/go-gormigrate/gormigrate/v2"
|
||||
)
|
||||
|
||||
var Migrations = []*gormigrate.Migration{
|
||||
SiteCategoryToEnvGroup,
|
||||
RenameAuthsToUsers,
|
||||
}
|
||||
|
||||
var BeforeAutoMigrate = []*gormigrate.Migration{
|
||||
FixSiteAndStreamPathUnique,
|
||||
}
|
|
@ -24,11 +24,6 @@ func init() {
|
|||
|
||||
// scanForLogDirectives scans and parses configuration files for log directives
|
||||
func scanForLogDirectives(configPath string, content []byte) error {
|
||||
// Clear previous scan results when scanning the main config
|
||||
if configPath == nginx.GetConfPath("", "nginx.conf") {
|
||||
ClearLogCache()
|
||||
}
|
||||
|
||||
// Find log directives using regex
|
||||
matches := logDirectiveRegex.FindAllSubmatch(content, -1)
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ import (
|
|||
)
|
||||
|
||||
// Save saves a site configuration file
|
||||
func Save(name string, content string, overwrite bool, siteCategoryId uint64, syncNodeIds []uint64) (err error) {
|
||||
func Save(name string, content string, overwrite bool, envGroupId uint64, syncNodeIds []uint64) (err error) {
|
||||
path := nginx.GetConfPath("sites-available", name)
|
||||
if !overwrite && helper.FileExists(path) {
|
||||
return ErrDstFileExists
|
||||
|
@ -46,10 +46,10 @@ func Save(name string, content string, overwrite bool, siteCategoryId uint64, sy
|
|||
|
||||
s := query.Site
|
||||
_, err = s.Where(s.Path.Eq(path)).
|
||||
Select(s.SiteCategoryID, s.SyncNodeIDs).
|
||||
Select(s.EnvGroupID, s.SyncNodeIDs).
|
||||
Updates(&model.Site{
|
||||
SiteCategoryID: siteCategoryId,
|
||||
SyncNodeIDs: syncNodeIds,
|
||||
EnvGroupID: envGroupId,
|
||||
SyncNodeIDs: syncNodeIds,
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
|
|
|
@ -16,7 +16,7 @@ func getSyncNodes(name string) (nodes []*model.Environment) {
|
|||
configFilePath := nginx.GetConfPath("sites-available", name)
|
||||
s := query.Site
|
||||
site, err := s.Where(s.Path.Eq(configFilePath)).
|
||||
Preload(s.SiteCategory).First()
|
||||
Preload(s.EnvGroup).First()
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
return
|
||||
|
@ -24,8 +24,8 @@ func getSyncNodes(name string) (nodes []*model.Environment) {
|
|||
|
||||
syncNodeIds := site.SyncNodeIDs
|
||||
// inherit sync node ids from site category
|
||||
if site.SiteCategory != nil {
|
||||
syncNodeIds = append(syncNodeIds, site.SiteCategory.SyncNodeIds...)
|
||||
if site.EnvGroup != nil {
|
||||
syncNodeIds = append(syncNodeIds, site.EnvGroup.SyncNodeIds...)
|
||||
}
|
||||
syncNodeIds = lo.Uniq(syncNodeIds)
|
||||
|
||||
|
|
|
@ -15,13 +15,18 @@ import (
|
|||
func getSyncNodes(name string) (nodes []*model.Environment) {
|
||||
configFilePath := nginx.GetConfPath("streams-available", name)
|
||||
s := query.Stream
|
||||
stream, err := s.Where(s.Path.Eq(configFilePath)).First()
|
||||
stream, err := s.Where(s.Path.Eq(configFilePath)).
|
||||
Preload(s.EnvGroup).First()
|
||||
if err != nil {
|
||||
logger.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
syncNodeIds := stream.SyncNodeIDs
|
||||
// inherit sync node ids from site category
|
||||
if stream.EnvGroup != nil {
|
||||
syncNodeIds = append(syncNodeIds, stream.EnvGroup.SyncNodeIds...)
|
||||
}
|
||||
|
||||
e := query.Environment
|
||||
nodes, err = e.Where(e.ID.In(syncNodeIds...)).Find()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue