first commit
This commit is contained in:
commit
95e6c3439f
39
gen/main.go
Normal file
39
gen/main.go
Normal file
@ -0,0 +1,39 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/models"
|
||||
"log"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// Dynamic SQL
|
||||
type Querier interface {
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
FilterWithNameAndRole(name, role string) ([]gen.T, error)
|
||||
}
|
||||
|
||||
func main() {
|
||||
g := gen.NewGenerator(gen.Config{
|
||||
OutPath: "../query",
|
||||
Mode: gen.WithoutContext|gen.WithDefaultQuery|gen.WithQueryInterface, // generate mode
|
||||
})
|
||||
dbName:="shelfly_db.db"
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{})
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Erreur de connexion à la base de données : %v", err)
|
||||
}
|
||||
g.UseDB(db) // reuse your gorm db
|
||||
|
||||
// Generate basic type-safe DAO API for struct `model.User` following conventions
|
||||
g.ApplyBasic(models.User{},models.Files{})
|
||||
|
||||
// Generate Type Safe API with Dynamic SQL defined on Querier interface for `model.User` and `model.Company`
|
||||
g.ApplyInterface(func(Querier){},models.User{},models.Files{},models.LibrarySection{},models.MediaItem{},models.MediaPart{},models.MetadataItem{},models.SectionLocation{},models.Tag{},models.Tagging{},models.PathDownload{})
|
||||
|
||||
// Generate the code
|
||||
g.Execute()
|
||||
}
|
||||
0
gen/shelfly_db.db
Normal file
0
gen/shelfly_db.db
Normal file
33
go.mod
Normal file
33
go.mod
Normal file
@ -0,0 +1,33 @@
|
||||
module app/shelfly
|
||||
|
||||
go 1.23.4
|
||||
|
||||
require (
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
||||
github.com/gorilla/mux v1.8.1
|
||||
gorm.io/gorm v1.25.12
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/go-sql-driver/mysql v1.8.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.22 // indirect
|
||||
golang.org/x/mod v0.22.0 // indirect
|
||||
golang.org/x/sync v0.10.0 // indirect
|
||||
golang.org/x/tools v0.28.0 // indirect
|
||||
gorm.io/datatypes v1.2.5 // indirect
|
||||
gorm.io/hints v1.1.2 // indirect
|
||||
gorm.io/plugin/dbresolver v1.5.3 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
golang.org/x/crypto v0.31.0
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
gorm.io/driver/mysql v1.5.7
|
||||
gorm.io/driver/sqlite v1.5.7
|
||||
gorm.io/gen v0.3.26
|
||||
)
|
||||
109
go.sum
Normal file
109
go.sum
Normal file
@ -0,0 +1,109 @@
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc=
|
||||
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
||||
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
|
||||
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.1.2/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/mattn/go-sqlite3 v1.14.8/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
|
||||
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
|
||||
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
||||
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c h1:jWdr7cHgl8c/ua5vYbR2WhSp+NQmzhsj0xoY3foTzW8=
|
||||
gorm.io/datatypes v1.1.1-0.20230130040222-c43177d3cf8c/go.mod h1:SH2K9R+2RMjuX1CkCONrPwoe9JzVv2hkQvEu4bXGojE=
|
||||
gorm.io/datatypes v1.2.5 h1:9UogU3jkydFVW1bIVVeoYsTpLRgwDVW3rHfJG6/Ek9I=
|
||||
gorm.io/datatypes v1.2.5/go.mod h1:I5FUdlKpLb5PMqeMQhm30CQ6jXP8Rj89xkTeCSAaAD4=
|
||||
gorm.io/driver/mysql v1.4.3/go.mod h1:sSIebwZAVPiT+27jK9HIwvsqOGKx3YMPmrA3mBJR10c=
|
||||
gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo=
|
||||
gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM=
|
||||
gorm.io/driver/sqlite v1.1.6/go.mod h1:W8LmC/6UvVbHKah0+QOC7Ja66EaZXHwUTjgXY8YNWX8=
|
||||
gorm.io/driver/sqlite v1.5.0/go.mod h1:kDMDfntV9u/vuMmz8APHtHF0b4nyBB7sfCieC6G8k8I=
|
||||
gorm.io/driver/sqlite v1.5.7 h1:8NvsrhP0ifM7LX9G4zPB97NwovUakUxc+2V2uuf3Z1I=
|
||||
gorm.io/driver/sqlite v1.5.7/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4=
|
||||
gorm.io/gen v0.3.26 h1:sFf1j7vNStimPRRAtH4zz5NiHM+1dr6eA9aaRdplyhY=
|
||||
gorm.io/gen v0.3.26/go.mod h1:a5lq5y3w4g5LMxBcw0wnO6tYUCdNutWODq5LrIt75LE=
|
||||
gorm.io/gorm v1.21.15/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0=
|
||||
gorm.io/gorm v1.22.2/go.mod h1:F+OptMscr0P2F2qU97WT1WimdH9GaQPoDW7AYd5i2Y0=
|
||||
gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
|
||||
gorm.io/gorm v1.24.7-0.20230306060331-85eaf9eeda11/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k=
|
||||
gorm.io/gorm v1.25.0/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k=
|
||||
gorm.io/gorm v1.25.2/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k=
|
||||
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
|
||||
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
|
||||
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
|
||||
gorm.io/hints v1.1.0 h1:Lp4z3rxREufSdxn4qmkK3TLDltrM10FLTHiuqwDPvXw=
|
||||
gorm.io/hints v1.1.0/go.mod h1:lKQ0JjySsPBj3uslFzY3JhYDtqEwzm+G1hv8rWujB6Y=
|
||||
gorm.io/hints v1.1.2 h1:b5j0kwk5p4+3BtDtYqqfY+ATSxjj+6ptPgVveuynn9o=
|
||||
gorm.io/hints v1.1.2/go.mod h1:/ARdpUHAtyEMCh5NNi3tI7FsGh+Cj/MIUlvNxCNCFWg=
|
||||
gorm.io/plugin/dbresolver v1.5.0 h1:XVHLxh775eP0CqVh3vcfJtYqja3uFl5Wr3cKlY8jgDY=
|
||||
gorm.io/plugin/dbresolver v1.5.0/go.mod h1:l4Cn87EHLEYuqUncpEeTC2tTJQkjngPSD+lo8hIvcT0=
|
||||
gorm.io/plugin/dbresolver v1.5.3 h1:wFwINGZZmttuu9h7XpvbDHd8Lf9bb8GNzp/NpAMV2wU=
|
||||
gorm.io/plugin/dbresolver v1.5.3/go.mod h1:TSrVhaUg2DZAWP3PrHlDlITEJmNOkL0tFTjvTEsQ4XE=
|
||||
85
handlers/main.go
Normal file
85
handlers/main.go
Normal file
@ -0,0 +1,85 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
"github.com/golang-jwt/jwt"
|
||||
)
|
||||
var secretKey = []byte("secret-key")
|
||||
|
||||
func AuthMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
// Récupérer le cookie
|
||||
cookie, err := r.Cookie("token")
|
||||
if err != nil {
|
||||
// Si pas de cookie ou erreur de lecture
|
||||
fmt.Println("Erreur : cookie 'token' manquant ou illisible :", err)
|
||||
http.Error(w, "Token manquant ou invalide", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
tokenString := cookie.Value
|
||||
// fmt.Println("Token reçu :", tokenString)
|
||||
|
||||
// Parser et vérifier le token
|
||||
token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) {
|
||||
// Vérifier que la méthode de signature est correcte
|
||||
if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||
return nil, fmt.Errorf("méthode de signature inattendue : %v", token.Header["alg"])
|
||||
}
|
||||
return []byte(secretKey), nil // Utiliser la clé secrète correcte
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
fmt.Println("Erreur lors de la validation du token :", err)
|
||||
http.Error(w, "Token invalide", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
if !token.Valid {
|
||||
fmt.Println("Token non valide")
|
||||
http.Error(w, "Token invalide", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
// Extraire les claims
|
||||
claims, ok := token.Claims.(jwt.MapClaims)
|
||||
if !ok {
|
||||
fmt.Println("Erreur : claims introuvables ou invalides")
|
||||
http.Error(w, "Token invalide", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
// Vérifier l'expiration du token
|
||||
if exp, ok := claims["exp"].(float64); ok {
|
||||
if time.Now().Unix() > int64(exp) {
|
||||
fmt.Println("Erreur : token expiré")
|
||||
http.Error(w, "Token expiré", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
fmt.Println("Erreur : claim 'exp' introuvable ou invalide")
|
||||
http.Error(w, "Token invalide", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
// Extraire le champ 'username'
|
||||
userID, ok := claims["username"].(string)
|
||||
if !ok {
|
||||
fmt.Println("Erreur : claim 'username' introuvable ou invalide")
|
||||
http.Error(w, "Token invalide", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println("Utilisateur authentifié :", userID)
|
||||
|
||||
// Injecter userID dans le contexte
|
||||
ctx := context.WithValue(r.Context(), "username", userID)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
})
|
||||
}
|
||||
|
||||
// streamHandler écrit des messages en boucle
|
||||
BIN
internal/.DS_Store
vendored
Normal file
BIN
internal/.DS_Store
vendored
Normal file
Binary file not shown.
50
internal/crypter/crypter.go
Normal file
50
internal/crypter/crypter.go
Normal file
@ -0,0 +1,50 @@
|
||||
package crypter
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
)
|
||||
|
||||
// Fonction pour crypter des données
|
||||
func Encrypt(plaintext string, key string) (string, error) {
|
||||
block, err := aes.NewCipher([]byte(key))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
aesGCM, err := cipher.NewGCM(block)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nonce := make([]byte, aesGCM.NonceSize())
|
||||
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
|
||||
return "", err
|
||||
}
|
||||
ciphertext := aesGCM.Seal(nonce, nonce, []byte(plaintext), nil)
|
||||
return hex.EncodeToString(ciphertext), nil
|
||||
}
|
||||
|
||||
// Fonction pour décrypter des données
|
||||
func Decrypt(encryptedData string, key string) (string, error) {
|
||||
data, err := hex.DecodeString(encryptedData)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
block, err := aes.NewCipher([]byte(key))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
aesGCM, err := cipher.NewGCM(block)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
nonceSize := aesGCM.NonceSize()
|
||||
nonce, ciphertext := data[:nonceSize], data[nonceSize:]
|
||||
plaintext, err := aesGCM.Open(nil, nonce, ciphertext, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(plaintext), nil
|
||||
}
|
||||
28
internal/db/db.go
Normal file
28
internal/db/db.go
Normal file
@ -0,0 +1,28 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"app/shelfly/internal/models"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func InitDB()*gorm.DB {
|
||||
|
||||
dbName:="shelfly_db.db"
|
||||
// Ouvre une connexion à la base de données
|
||||
db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{})
|
||||
if err != nil {
|
||||
panic("failed to connect database")
|
||||
}
|
||||
|
||||
// Migrate the schema
|
||||
db.AutoMigrate(&models.User{},&models.Files{},&models.LibrarySection{},&models.MediaItem{},&models.MediaPart{},&models.MetadataItem{},&models.SectionLocation{},&models.Tag{},&models.Tagging{},&models.PathDownload{})
|
||||
|
||||
fmt.Println("Connexion réussie à MySQL !")
|
||||
fmt.Println("Auto migration completed")
|
||||
return db
|
||||
|
||||
}
|
||||
|
||||
|
||||
284
internal/download/download.go
Normal file
284
internal/download/download.go
Normal file
@ -0,0 +1,284 @@
|
||||
package download
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/models"
|
||||
"app/shelfly/query"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"github.com/gorilla/mux"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func CreateSavePath(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
var pathDownload models.PathDownload
|
||||
// Décoder les données de la requête
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&pathDownload); err != nil {
|
||||
http.Error(w, `{"error": "Invalid JSON format"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
q := query.Use(db)
|
||||
if err := q.PathDownload.Create(&pathDownload); err != nil {
|
||||
http.Error(w, `{"error": "Failed to create path"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Retourner un élément HTML dynamique pour HTMX
|
||||
|
||||
// Récupérer tous les chemins
|
||||
var paths []models.PathDownload
|
||||
if err := db.Find(&paths).Error; err != nil {
|
||||
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// Construire les lignes HTML
|
||||
var response strings.Builder
|
||||
for _, path := range paths {
|
||||
log.Println(path)
|
||||
response.WriteString(fmt.Sprintf(`
|
||||
<div class="column">
|
||||
|
||||
<form>
|
||||
|
||||
<div id="path-%d" class="path-update grid is-col-min-1">
|
||||
<input class="input is-primary cell" name="id" disabled value="%d"></input>
|
||||
<input class="input is-primary fff cell" name="path" value="%s" disabled></span>
|
||||
<input class="input is-primary fff cell" name="pathName" value="%s" disabled></span>
|
||||
<button class="button is-primary type="button" is-dark" id="btn-path-edit-%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" onclick="enableAllInputPath(%d)">Edit</button>
|
||||
<button class="button is-danger" type="button" id="btn-path-annuler-%d" onclick="disableAllInputPath(%d)" style="display:none" >Annuler</button>
|
||||
<button class="button is-primary is-dark" id="btn-path-valider-%d" hx-put="/api/pathDownload/update/%d" hx-trigger="click[checkGlobalState()]" hx-target="#path-%d" hx-swap="outerHTML" hx-ext="json-enc" style="display:none">Valider</button>
|
||||
<button class="button is-danger" hx-delete="/api/pathDownload/delete/%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" hx-confirm="Are you sure?" hx-swap="outerHTML swap:1s">Delete</button>
|
||||
</div>
|
||||
</form></div>
|
||||
|
||||
`, path.ID, path.ID, path.Path, path.PathName,path.ID,path.ID,path.ID,path.ID ,path.ID,path.ID,path.ID,path.ID, path.ID,path.ID))
|
||||
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
fmt.Fprint(w, response.String())
|
||||
}
|
||||
}
|
||||
|
||||
func ReadAllSavePath(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "text/html")
|
||||
|
||||
// Récupérer tous les chemins
|
||||
var paths []models.PathDownload
|
||||
if err := db.Find(&paths).Error; err != nil {
|
||||
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Construire les lignes HTML
|
||||
var response strings.Builder
|
||||
for _, path := range paths {
|
||||
response.WriteString(fmt.Sprintf(`
|
||||
<div class="column">
|
||||
<form>
|
||||
|
||||
<div id="path-%d" class="path-update grid is-col-min-1">
|
||||
<input class="input is-primary cell" name="id" disabled value="%d"></input>
|
||||
<input class="input is-primary fff cell" name="path" value="%s" disabled></span>
|
||||
<input class="input is-primary fff cell" name="pathName" value="%s" disabled></span>
|
||||
<button class="button is-primary is-dark" id="btn-path-edit-%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" onclick="enableAllInputPath(%d)">Edit</button>
|
||||
<button class="button is-danger" type="button" id="btn-path-annuler-%d" onclick="disableAllInputPath(%d)" style="display:none" >Annuler</button>
|
||||
<button class="button is-primary hx-trigger="click[checkGlobalState()]" type="button" is-dark" id="btn-path-valider-%d" hx-put="/api/pathDownload/update/%d" hx-target="#path-%d" hx-swap="outerHTML" hx-ext="json-enc" style="display:none">Valider</button>
|
||||
<button class="button is-danger" hx-delete="/api/pathDownload/delete/%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" hx-confirm="Are you sure?" hx-swap="outerHTML swap:1s">Delete</button>
|
||||
</div>
|
||||
</form></div>
|
||||
|
||||
`, path.ID, path.ID, path.Path, path.PathName,path.ID,path.ID,path.ID,path.ID, path.ID,path.ID,path.ID,path.ID, path.ID,path.ID))
|
||||
|
||||
|
||||
}
|
||||
|
||||
// Retourner les lignes HTML
|
||||
w.WriteHeader(http.StatusOK)
|
||||
fmt.Fprint(w, response.String())
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateSavePath(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Récupérer l'ID depuis les paramètres de l'URL
|
||||
id := mux.Vars(r)["id"]
|
||||
|
||||
var pathDownload models.PathDownload
|
||||
log.Println(pathDownload);
|
||||
// Décoder les données de la requête
|
||||
if err := json.NewDecoder(r.Body).Decode(&pathDownload); err != nil {
|
||||
http.Error(w, `{"error": "Invalid JSON format"}`, http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Vérifier si la ressource existe
|
||||
var existingPath models.PathDownload
|
||||
if err := db.First(&existingPath, "id = ?", id).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
http.Error(w, `{"error": "Path not found"}`, http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.Error(w, `{"error": "Failed to retrieve path"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Mettre à jour les champs souhaités
|
||||
existingPath.Path = pathDownload.Path
|
||||
existingPath.PathName = pathDownload.PathName
|
||||
|
||||
if err := db.Save(&existingPath).Error; err != nil {
|
||||
http.Error(w, `{"error": "Failed to update path"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
response := fmt.Sprintf(`
|
||||
|
||||
<form>
|
||||
|
||||
<div id="path-%d" class="path-update grid is-col-min-1">
|
||||
<input class="input is-primary cell" name="id" disabled value="%d"></input>
|
||||
<input class="input is-primary fff cell" name="path" value="%s" disabled></span>
|
||||
<input class="input is-primary fff cell" name="pathName" value="%s" disabled></span>
|
||||
<button class="button is-primary is-dark" id="btn-path-edit-%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" onclick="enableAllInputPath(%d)">Edit</button>
|
||||
<button class="button is-danger" type="button" id="btn-path-annuler-%d" onclick="disableAllInputPath(%d)" style="display:none" >Annuler</button>
|
||||
<button class="button is-primary type="button" is-dark" id="btn-path-valider-%d" hx-put="/api/pathDownload/update/%d" hx-trigger="click[checkGlobalState()]" hx-target="#path-%d" hx-swap="outerHTML" hx-ext="json-enc" style="display:none">Valider</button>
|
||||
<button class="button is-danger" hx-delete="/api/pathDownload/delete/%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" hx-confirm="Are you sure?" hx-swap="outerHTML swap:1s">Delete</button>
|
||||
</div>
|
||||
</form>
|
||||
`, existingPath.ID, existingPath.ID, existingPath.Path, existingPath.PathName, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID, existingPath.ID)
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
fmt.Fprint(w, response)
|
||||
// // Retourner le nouvel élément HTML
|
||||
// var paths []models.PathDownload
|
||||
// if err := db.Find(&paths).Error; err != nil {
|
||||
// http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
||||
// return
|
||||
// }
|
||||
|
||||
|
||||
// // Construire les lignes HTML
|
||||
// var response strings.Builder
|
||||
// for _, path := range paths {
|
||||
// log.Println(path)
|
||||
// response.WriteString(fmt.Sprintf(`
|
||||
// <form>
|
||||
|
||||
// <div id="path-%d" class="path-update grid is-col-min-1">
|
||||
// <input class="input is-primary cell" name="id" disabled value="%d"></input>
|
||||
// <input class="input is-primary fff cell" name="path" value="%s" disabled></span>
|
||||
// <input class="input is-primary fff cell" name="pathName" value="%s" disabled></span>
|
||||
// <button class="button is-primary is-dark" type="button" id="btn-path-edit-%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" onclick="enableAllInputPath(%d)">Edit</button>
|
||||
// <button class="button is-danger" id="btn-path-annuler-%d" type="button" onclick="disableAllInputPath(%d)" style="display:none" >Annuler</button>
|
||||
// <button class="button is-primary is-dark" id="btn-path-valider-%d" hx-put="/api/pathDownload/update/%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML" hx-ext="json-enc" style="display:none">Valider</button>
|
||||
// <button class="button is-danger" hx-delete="/api/pathDownload/delete/%d" hx-trigger="click" hx-target="#path-%d" hx-swap="outerHTML">Delete</button>
|
||||
// </div>
|
||||
// </form>
|
||||
|
||||
// `, path.ID, path.ID, path.Path, path.PathName,path.ID,path.ID,path.ID, path.ID,path.ID ,path.ID,path.ID,path.ID, path.ID,path.ID))
|
||||
|
||||
// }
|
||||
w.WriteHeader(http.StatusOK)
|
||||
//fmt.Fprint(w, response.String())
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func DeleteSavePath(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Récupérer l'ID depuis les paramètres de l'URL
|
||||
id := mux.Vars(r)["id"]
|
||||
|
||||
// Vérifier si la ressource existe
|
||||
var pathDownload models.PathDownload
|
||||
if err := db.First(&pathDownload, "id = ?", id).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
http.Error(w, `{"error": "Path not found"}`, http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.Error(w, `{"error": "Failed to retrieve path"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Supprimer la ressource
|
||||
if err := db.Delete(&pathDownload).Error; err != nil {
|
||||
http.Error(w, `{"error": "Failed to delete path"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Répondre avec succès
|
||||
w.WriteHeader(http.StatusOK)
|
||||
fmt.Fprintf(w, ` <div class="notification is-primary" id="notificationPath" style="display:block">
|
||||
<button class="delete" type="button" onclick="hide('notificationPath')"></button>
|
||||
Delete ok
|
||||
</div>`)
|
||||
}
|
||||
}
|
||||
func IsPathValid(path string) error {
|
||||
if path == "" {
|
||||
return errors.New("path is empty")
|
||||
}
|
||||
|
||||
info, err := os.Stat(path)
|
||||
if os.IsNotExist(err) {
|
||||
return errors.New("path does not exist")
|
||||
}
|
||||
if err != nil {
|
||||
return errors.New("unable to access path: " + err.Error())
|
||||
}
|
||||
|
||||
if !info.IsDir() && !info.Mode().IsRegular() {
|
||||
return errors.New("path is neither a file nor a directory")
|
||||
}
|
||||
|
||||
return nil // Path is valid
|
||||
}
|
||||
|
||||
// PathValidationHandler handles HTTP requests to validate a path
|
||||
func PathValidationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Error(w, "Invalid request method", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
var requestBody struct {
|
||||
Path string `json:"path"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(r.Body).Decode(&requestBody); err != nil {
|
||||
http.Error(w, "Invalid request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
err := IsPathValid(requestBody.Path)
|
||||
response := map[string]string{
|
||||
"path": requestBody.Path,
|
||||
"status": "valid",
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
response["status"] = "invalid"
|
||||
response["error"] = err.Error()
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(response); err != nil {
|
||||
http.Error(w, "Failed to encode response", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
79
internal/jwt/jwtFunction.go
Normal file
79
internal/jwt/jwtFunction.go
Normal file
@ -0,0 +1,79 @@
|
||||
package jwt
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
"github.com/golang-jwt/jwt/v5"
|
||||
)
|
||||
|
||||
/*
|
||||
Dans le snippet de code ci-dessus, nous importons les colis nécessaires,
|
||||
y compris github.com/golang-jwt/jwt/v5.
|
||||
Nous créons un nouveau jeton JWT en utilisant le jwt.NewWithClaims()fonction.
|
||||
Nous spécifions la méthode de signature comme HS256 et des informations pertinentes telles que le nom d'utilisateur
|
||||
et la date d'expiration du jeton. Ensuite, nous signons le jeton avec une clé secrète et retournons
|
||||
le jeton généré comme une chaîne.
|
||||
*/
|
||||
var secretKey = []byte("secret-key")
|
||||
|
||||
func CreateToken(username string) (string, error) {
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256,
|
||||
jwt.MapClaims{
|
||||
"username": username,
|
||||
"exp": time.Now().Add(time.Hour * 24).Unix(),
|
||||
})
|
||||
|
||||
tokenString, err := token.SignedString(secretKey)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
fmt.Sprintf(tokenString)
|
||||
return tokenString, nil
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Dans le snippet de code ci-dessus, nous utilisons le jwt.Parse()fonctionner pour
|
||||
analyser et vérifier le jeton. Nous fournissons une fonction de rappel pour récupérer
|
||||
la clé secrète utilisée pour signer le jeton. Si le jeton est valide, nous continuons à traiter la demande;
|
||||
sinon, nous renvoyons une erreur indiquant que le jeton est invalide.
|
||||
*/
|
||||
func verifyToken(tokenString string) error {
|
||||
token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) {
|
||||
return secretKey, nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !token.Valid {
|
||||
return fmt.Errorf("invalid token")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
|
||||
func ProtectedHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
tokenString := r.Header.Get("Authorization")
|
||||
if tokenString == "" {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
fmt.Fprint(w, "Missing authorization header")
|
||||
return
|
||||
}
|
||||
tokenString = tokenString[len("Bearer "):]
|
||||
|
||||
err := verifyToken(tokenString)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
fmt.Fprint(w, "Invalid token")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprint(w, "Welcome to the the protected area")
|
||||
|
||||
}
|
||||
184
internal/library/library.go
Normal file
184
internal/library/library.go
Normal file
@ -0,0 +1,184 @@
|
||||
package library
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/models"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"github.com/gorilla/mux"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
|
||||
|
||||
func ScanFolder(db *gorm.DB)http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Récupérer l'ID depuis les paramètres de l'URL
|
||||
id := mux.Vars(r)["id"]
|
||||
var existingPath models.PathDownload
|
||||
if err := db.First(&existingPath, "id = ?", id).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
http.Error(w, `{"error": "Path not found"}`, http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.Error(w, `{"error": "Failed to retrieve path"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
// fmt.Println(existingPath.Path)
|
||||
// info, _ := os.Stat(existingPath.Path)
|
||||
// fmt.Println(info)
|
||||
err := filepath.Walk(existingPath.Path, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
|
||||
fmt.Printf("dir: %v: name: %s\n", info.IsDir(), path)
|
||||
if(!info.IsDir()){
|
||||
//log.Fatalf("Lancement get info")
|
||||
|
||||
fmt.Printf("%s", info.Name()+"\n")
|
||||
str := info.Name()
|
||||
str =cleanMovieName(str)
|
||||
str1 := strings.Replace(str, " ", "+", -1)
|
||||
url := "https://api.themoviedb.org/3/search/movie?query="+str1+"&language=fr"
|
||||
fmt.Println(string(url))
|
||||
req, _ := http.NewRequest("GET", url, nil)
|
||||
|
||||
req.Header.Add("accept", "application/json")
|
||||
req.Header.Add("Authorization", "Bearer eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOiIzM2VlNTBlOGJlNGQ4YjFkMTYwOTgyMjFhMmEyMjgxOSIsIm5iZiI6MTYwMDc1OTQzOS41NjksInN1YiI6IjVmNjlhNjhmYTZlMmQyMDAzODU5OTVlYiIsInNjb3BlcyI6WyJhcGlfcmVhZCJdLCJ2ZXJzaW9uIjoxfQ.wJUlv5oiJhyB7uwb6mcYEnmKh_bh6vC0u0kBuz6ZsGk")
|
||||
|
||||
res, _ := http.DefaultClient.Do(req)
|
||||
|
||||
defer res.Body.Close()
|
||||
body, _ := io.ReadAll(res.Body)
|
||||
|
||||
var result map[string]interface{}
|
||||
err = json.Unmarshal(body, &result)
|
||||
if err != nil {
|
||||
log.Fatalf("Erreur lors du décodage de la réponse JSON: %v", err)
|
||||
}
|
||||
|
||||
// Afficher le contenu du résultat
|
||||
//fmt.Printf("Résultat brut : %+v\n", result)
|
||||
if results, ok := result["results"].([]interface{}); ok {
|
||||
for _, item := range results {
|
||||
if movie, ok := item.(map[string]interface{}); ok {
|
||||
fmt.Println(movie)
|
||||
// title := movie["title"]
|
||||
// releaseDate := movie["release_date"]
|
||||
// overview := movie["overview"]
|
||||
|
||||
//fmt.Printf("Titre: %v, Date de sortie: %v, Résumé: %v\n", title, releaseDate, overview)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Fatalf("Lancement api")
|
||||
|
||||
section := models.LibrarySection{
|
||||
Name: "Films",
|
||||
SectionType: 1,
|
||||
Language: "fr",
|
||||
UUID: "section-uuid-123",
|
||||
CreatedAt: time.Now().String(),
|
||||
UpdatedAt: time.Now().String(),
|
||||
}
|
||||
if err := db.Create(§ion).Error; err != nil {
|
||||
log.Fatalf("Failed to insert LibrarySection: %v", err)
|
||||
}
|
||||
|
||||
metadata := models.MetadataItem{
|
||||
LibrarySectionID: section.ID,
|
||||
MetadataType: 1,
|
||||
GUID: "film-guid-123",
|
||||
Title: "Inception",
|
||||
TitleSort: "Inception",
|
||||
OriginalTitle: "Inception",
|
||||
Studio: "Warner Bros",
|
||||
Rating: 8.8,
|
||||
ContentRating: "PG-13",
|
||||
Tagline: "Your mind is the scene of the crime.",
|
||||
Summary: "A skilled thief is offered a chance to erase his criminal past.",
|
||||
Index: 1,
|
||||
Duration: 8880,
|
||||
ReleaseDate: time.Date(2010, 7, 16, 0, 0, 0, 0, time.UTC).String(),
|
||||
CreatedAt: time.Now().String(),
|
||||
UpdatedAt: time.Now().String(),
|
||||
}
|
||||
if err := db.Create(&metadata).Error; err != nil {
|
||||
log.Fatalf("Failed to insert MetadataItem: %v", err)
|
||||
}
|
||||
|
||||
mediaItem := models.MediaItem{
|
||||
MetadataItemID: metadata.ID,
|
||||
Duration: 8880,
|
||||
Bitrate: 3000,
|
||||
Width: 1920,
|
||||
Height: 1080,
|
||||
AspectRatio: 16.0 / 9.0,
|
||||
AudioCodec: "AAC",
|
||||
VideoCodec: "H264",
|
||||
Container: "MP4",
|
||||
CreatedAt: time.Now().String(),
|
||||
UpdatedAt: time.Now().String(),
|
||||
}
|
||||
if err := db.Create(&mediaItem).Error; err != nil {
|
||||
log.Fatalf("Failed to insert MediaItem: %v", err)
|
||||
}
|
||||
|
||||
mediaPart := models.MediaPart{
|
||||
MediaItemID: mediaItem.ID,
|
||||
File: "/path/to/inception.mp4",
|
||||
Duration: 8880,
|
||||
Size: 1500000000,
|
||||
Indexes: "1",
|
||||
CreatedAt: time.Now().String(),
|
||||
UpdatedAt: time.Now().String(),
|
||||
}
|
||||
if err := db.Create(&mediaPart).Error; err != nil {
|
||||
log.Fatalf("Failed to insert MediaPart: %v", err)
|
||||
}
|
||||
fmt.Println("Film inserted successfully!")
|
||||
|
||||
|
||||
}
|
||||
// api
|
||||
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
func cleanMovieName(filename string) string {
|
||||
// Étape 1: Supprimer l'extension du fichier
|
||||
extPattern := regexp.MustCompile(`\.[^.]+$`)
|
||||
filename = extPattern.ReplaceAllString(filename, "")
|
||||
|
||||
// Étape 2: Remplacer les points par des espaces
|
||||
filename = strings.ReplaceAll(filename, ".", " ")
|
||||
|
||||
// Étape 3: Supprimer les informations techniques
|
||||
infoPattern := regexp.MustCompile(`(?i)(\b19[0-9]{2}\b|\b20[0-9]{2}\b|multi|truefrench|french|1080p|720p|4k|bluray|hdlight|x265|x264|h264|wawacity|blue|mkv|avi|mp4|m4v|Vff|WEBRIP|ING|-)`)
|
||||
filename = infoPattern.ReplaceAllString(filename, "")
|
||||
|
||||
// Étape 4: Nettoyer les espaces supplémentaires
|
||||
filename = strings.TrimSpace(filename)
|
||||
filename = regexp.MustCompile(`\s+`).ReplaceAllString(filename, " ")
|
||||
|
||||
return filename
|
||||
}
|
||||
69
internal/login/login.go
Normal file
69
internal/login/login.go
Normal file
@ -0,0 +1,69 @@
|
||||
package login
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/jwt"
|
||||
"app/shelfly/internal/models"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func LoginHandler(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
var u =models.User{}
|
||||
var user =models.User{}
|
||||
json.NewDecoder(r.Body).Decode(&u)
|
||||
fmt.Printf("The user request value %v", u)
|
||||
fmt.Println(u.Email)
|
||||
user=u;
|
||||
|
||||
d :=db.Where("Email = ?", u.Email).First(&user)
|
||||
if d.Error != nil {
|
||||
fmt.Println("Erreur lors de la requête :", d.Error)
|
||||
} else {
|
||||
// Afficher les données récupérées
|
||||
fmt.Printf("Utilisateur trouvé : %+v\n", user)
|
||||
}
|
||||
|
||||
if u.Email != user.Email {
|
||||
// Handle email mismatch
|
||||
fmt.Fprint(w, "Invalid credentials")
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if err := bcrypt.CompareHashAndPassword([]byte(user.Password), []byte(u.Password)); err != nil {
|
||||
fmt.Fprint(w, "Invalid credentials")
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
tokenString, err := jwt.CreateToken(user.Username)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
fmt.Errorf("No username found")
|
||||
}
|
||||
|
||||
// 4. Configurer un cookie HTTP-only
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: "token",
|
||||
Value: tokenString,
|
||||
Path: "/",
|
||||
HttpOnly: true, // Empêche l’accès via JS (mitige XSS)
|
||||
Secure: false, // Passez à true en HTTPS
|
||||
// SameSite et Domain peuvent être précisés selon votre config
|
||||
})
|
||||
|
||||
// 5. Réponse JSON (optionnel)
|
||||
w.Header().Add("Hx-Redirect", "/dashboard")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(`{"message": "Connected"}`))
|
||||
|
||||
return
|
||||
|
||||
}}
|
||||
98
internal/models/models.go
Normal file
98
internal/models/models.go
Normal file
@ -0,0 +1,98 @@
|
||||
package models
|
||||
|
||||
|
||||
|
||||
type User struct {
|
||||
ID uint `json:"id" gorm:"primaryKey"`
|
||||
Username string `json:"username" gorm:"size:255"`
|
||||
Name string `json:"name" gorm:"size:100"`
|
||||
Email string `json:"email" gorm:"unique"`
|
||||
Password string `json:"password" gorm:"size:255"`
|
||||
}
|
||||
|
||||
|
||||
type Files struct {
|
||||
ID uint `gorm:"primaryKey"`
|
||||
Name string `gorm:"size:255"`
|
||||
Comment string `gorm:"type:text"`
|
||||
Path string `gorm:"type:text"`
|
||||
}
|
||||
|
||||
|
||||
type LibrarySection struct {
|
||||
ID int64 `db:"id"`
|
||||
Name string `db:"name"`
|
||||
SectionType int64 `db:"section_type"`
|
||||
Language string `db:"language"`
|
||||
UUID string `db:"uuid"`
|
||||
CreatedAt string `db:"created_at"`
|
||||
UpdatedAt string `db:"updated_at"`
|
||||
}
|
||||
type SectionLocation struct {
|
||||
ID int64 `db:"id"`
|
||||
LibrarySectionID int64 `db:"library_section_id"`
|
||||
RootPath string `db:"root_path"`
|
||||
CreatedAt string `db:"created_at"`
|
||||
UpdatedAt string `db:"updated_at"`
|
||||
}
|
||||
type MetadataItem struct {
|
||||
ID int64 `db:"id"`
|
||||
LibrarySectionID int64 `db:"library_section_id"`
|
||||
ParentID int64 `db:"parent_id"`
|
||||
MetadataType int64 `db:"metadata_type"`
|
||||
GUID string `db:"guid"`
|
||||
Title string `db:"title"`
|
||||
TitleSort string `db:"title_sort"`
|
||||
OriginalTitle string `db:"original_title"`
|
||||
Studio string `db:"studio"`
|
||||
Rating float64 `db:"rating"`
|
||||
ContentRating string `db:"content_rating"`
|
||||
Tagline string `db:"tagline"`
|
||||
Summary string `db:"summary"`
|
||||
Index int64 `db:"index"`
|
||||
Duration int64 `db:"duration"`
|
||||
ReleaseDate string `db:"release_date"`
|
||||
CreatedAt string `db:"created_at"`
|
||||
UpdatedAt string `db:"updated_at"`
|
||||
UserThumbURL string `db:"user_thumb_url"`
|
||||
}
|
||||
type MediaItem struct {
|
||||
ID int64 `db:"id"`
|
||||
MetadataItemID int64 `db:"metadata_item_id"`
|
||||
Duration int64 `db:"duration"`
|
||||
Bitrate int64 `db:"bitrate"`
|
||||
Width int64 `db:"width"`
|
||||
Height int64 `db:"height"`
|
||||
AspectRatio float64 `db:"aspect_ratio"`
|
||||
AudioCodec string `db:"audio_codec"`
|
||||
VideoCodec string `db:"video_codec"`
|
||||
Container string `db:"container"`
|
||||
CreatedAt string `db:"created_at"`
|
||||
UpdatedAt string `db:"updated_at"`
|
||||
}
|
||||
type MediaPart struct {
|
||||
ID int64 `db:"id"`
|
||||
MediaItemID int64 `db:"media_item_id"`
|
||||
File string `db:"file"`
|
||||
Duration int64 `db:"duration"`
|
||||
Size int64 `db:"size"`
|
||||
Indexes string `db:"indexes"`
|
||||
CreatedAt string `db:"created_at"`
|
||||
UpdatedAt string `db:"updated_at"`
|
||||
}
|
||||
type Tag struct {
|
||||
ID int64 `db:"id"`
|
||||
Tag string `db:"tag"`
|
||||
TagType int64 `db:"tag_type"`
|
||||
}
|
||||
type Tagging struct {
|
||||
ID int64 `db:"id"`
|
||||
MetadataItemID int64 `db:"metadata_item_id"`
|
||||
TagID int64 `db:"tag_id"`
|
||||
Index int64 `db:"index"`
|
||||
}
|
||||
type PathDownload struct{
|
||||
ID int64 `db:"id"`
|
||||
Path string `db:"path"`
|
||||
PathName string `db:"path_name"`
|
||||
}
|
||||
100
internal/route/main.go
Normal file
100
internal/route/main.go
Normal file
@ -0,0 +1,100 @@
|
||||
package route
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/download"
|
||||
"app/shelfly/internal/library"
|
||||
"app/shelfly/internal/login"
|
||||
"app/shelfly/internal/users"
|
||||
"app/shelfly/renders"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
type spaHandler struct {
|
||||
staticPath string
|
||||
indexPath string
|
||||
}
|
||||
|
||||
// Routes non protégées
|
||||
func RoutesPublic(r *mux.Router, bd *gorm.DB) {
|
||||
|
||||
// Fichiers statiques (CSS, JS, etc.)
|
||||
staticDir := "./templates/assets/"
|
||||
r.PathPrefix("/templates/assets/").Handler(
|
||||
http.StripPrefix("/templates/assets/", http.FileServer(http.Dir(staticDir))),
|
||||
)
|
||||
|
||||
// Page de login
|
||||
r.HandleFunc("/login", renders.Login)
|
||||
|
||||
// Endpoint d'API pour se logger
|
||||
r.HandleFunc("/api/login", login.LoginHandler(bd)).Methods("POST")
|
||||
r.HandleFunc("/api/scan/{id}", library.ScanFolder(bd)).Methods("GET")
|
||||
|
||||
}
|
||||
|
||||
// Routes protégées
|
||||
func RoutesProtected(r *mux.Router, bd *gorm.DB) {
|
||||
|
||||
|
||||
|
||||
// Ici on place les vues et API qui doivent être protégées
|
||||
r.HandleFunc("/stream", StreamHandler)
|
||||
r.HandleFunc("/dashboard", renders.Dashboard(bd))
|
||||
r.HandleFunc("/settings", renders.Settings)
|
||||
r.HandleFunc("/library", renders.Library)
|
||||
r.HandleFunc("/menuLibary", renders.Library)
|
||||
r.HandleFunc("/godownloader/downloads", renders.GoDownload)
|
||||
r.HandleFunc("/godownloader/linkcollectors", renders.GoDownloadLinkCollectors)
|
||||
r.HandleFunc("/godownloader/settings", renders.GoDownloadSetting)
|
||||
// API user
|
||||
r.HandleFunc("/api/user/create", users.CreateUser(bd)).Methods("POST")
|
||||
r.HandleFunc("/api/user/update/{id}", users.UpdateUser(bd)).Methods("PUT")
|
||||
r.HandleFunc("/api/user/delete/{id}", users.DeleteUser(bd)).Methods("DELETE")
|
||||
r.HandleFunc("/api/user/all/", users.ReadAllUser(bd)).Methods("GET")
|
||||
r.HandleFunc("/api/user/{id}", users.FindUserById(bd)).Methods("GET")
|
||||
|
||||
// API download
|
||||
r.HandleFunc("/api/pathDownload/create", download.CreateSavePath(bd)).Methods("POST")
|
||||
r.HandleFunc("/api/pathDownload/update/{id}", download.UpdateSavePath(bd)).Methods("PUT")
|
||||
r.HandleFunc("/api/pathDownload/delete/{id}", download.DeleteSavePath(bd)).Methods("DELETE")
|
||||
r.HandleFunc("/api/pathDownload/all/", download.ReadAllSavePath(bd)).Methods("GET")
|
||||
|
||||
//API Check path
|
||||
r.HandleFunc("/validate-path", download.PathValidationHandler)
|
||||
|
||||
//API Scan folder
|
||||
|
||||
|
||||
}
|
||||
func StreamHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("Connection", "keep-alive")
|
||||
|
||||
flusher, ok := w.(http.Flusher)
|
||||
if !ok {
|
||||
http.Error(w, "Le streaming n’est pas supporté par ce serveur", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(1 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
// Boucle infinie (ou jusqu'à annulation)
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
fmt.Fprintf(w, "data: <p>Message #%d</p>\n\n")
|
||||
flusher.Flush()
|
||||
case <-r.Context().Done():
|
||||
// Le client a probablement fermé la connexion
|
||||
log.Println("Client déconnecté")
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
231
internal/users/users.go
Normal file
231
internal/users/users.go
Normal file
@ -0,0 +1,231 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/models"
|
||||
"app/shelfly/query"
|
||||
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func CreateUser(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Vérifier si le corps de la requête est vide
|
||||
if r.Body == nil {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
log.Println("Le corps de la requête est vide")
|
||||
fmt.Fprintf(w, `{"error": "Request body is empty"}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Vérifier le Content-Type
|
||||
if r.Header.Get("Content-Type") != "application/json" {
|
||||
w.WriteHeader(http.StatusUnsupportedMediaType)
|
||||
log.Println("Content-Type invalide. Requis: application/json")
|
||||
fmt.Fprintf(w, `{"error": "Content-Type must be application/json"}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Lire et décoder le JSON
|
||||
var u models.User
|
||||
if err := json.NewDecoder(r.Body).Decode(&u); err != nil {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
log.Printf("Erreur lors du décodage du JSON : %v", err)
|
||||
fmt.Fprintf(w, `{"error": "Invalid JSON format"}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Hacher le mot de passe de l'utilisateur
|
||||
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(u.Password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
log.Printf("Erreur lors du hachage du mot de passe : %v", err)
|
||||
fmt.Fprintf(w, `{"error": "Failed to hash password"}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Remplacer le mot de passe par le hachage
|
||||
u.Password = string(hashedPassword)
|
||||
|
||||
// Initialiser le query builder
|
||||
q := query.Use(db)
|
||||
|
||||
// Créer l'utilisateur
|
||||
if err := q.User.Create(&u); err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
log.Printf("Erreur lors de la création de l'utilisateur : %v", err)
|
||||
fmt.Fprintf(w, `{"error": "Failed to create user"}`)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Utilisateur créé avec succès : %v", u)
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
fmt.Fprintf(w, `{"message": "User created successfully"}`)
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateUser(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Vérifier si le corps de la requête est vide
|
||||
if r.Body == nil {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
log.Println("Le corps de la requête est vide")
|
||||
fmt.Fprintf(w, `{"error": "Request body is empty"}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Vérifier le Content-Type
|
||||
if r.Header.Get("Content-Type") != "application/json" {
|
||||
w.WriteHeader(http.StatusUnsupportedMediaType)
|
||||
log.Println("Content-Type invalide. Requis: application/json")
|
||||
fmt.Fprintf(w, `{"error": "Content-Type must be application/json"}`)
|
||||
return
|
||||
}
|
||||
|
||||
params := mux.Vars(r)
|
||||
q := query.Use(db)
|
||||
id, err := strconv.Atoi(params["id"])
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
u := q.User
|
||||
ctx := context.Background()
|
||||
|
||||
user, err := u.WithContext(ctx).Where(u.ID.Eq(uint(id))).First()
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
log.Printf("Utilisateur update avec succès : %v", user)
|
||||
|
||||
// Lire et décoder le JSON
|
||||
var usr models.User
|
||||
if err := json.NewDecoder(r.Body).Decode(&usr); err != nil {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
log.Printf("Erreur lors du décodage du JSON : %v", err)
|
||||
fmt.Fprintf(w, `{"error": "Invalid JSON format"}`)
|
||||
return
|
||||
}
|
||||
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(usr.Password), bcrypt.DefaultCost)
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
user.Name = usr.Name
|
||||
user.Username = usr.Username
|
||||
user.Email = usr.Email
|
||||
user.Password = string(hashedPassword)
|
||||
|
||||
u.Save(user)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
}
|
||||
}
|
||||
func DeleteUser(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
// Vérifier si le corps de la requête est vide
|
||||
if r.Body == nil {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
log.Println("Le corps de la requête est vide")
|
||||
fmt.Fprintf(w, `{"error": "Request body is empty"}`)
|
||||
return
|
||||
}
|
||||
|
||||
// Vérifier le Content-Type
|
||||
if r.Header.Get("Content-Type") != "application/json" {
|
||||
w.WriteHeader(http.StatusUnsupportedMediaType)
|
||||
log.Println("Content-Type invalide. Requis: application/json")
|
||||
fmt.Fprintf(w, `{"error": "Content-Type must be application/json"}`)
|
||||
return
|
||||
}
|
||||
|
||||
params := mux.Vars(r)
|
||||
q := query.Use(db)
|
||||
id, err := strconv.Atoi(params["id"])
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
u := q.User
|
||||
ctx := context.Background()
|
||||
|
||||
user, err := u.WithContext(ctx).Where(u.ID.Eq(uint(id))).First()
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
u.Delete(user)
|
||||
log.Printf("Utilisateur delete avec succès : %v", user)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
||||
}
|
||||
}
|
||||
func ReadAllUser(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
|
||||
q := query.Use(db)
|
||||
|
||||
data,err :=q.User.Find()
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
log.Printf("Utilisateur read avec succès : %v", data)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(data)
|
||||
|
||||
}
|
||||
}
|
||||
func FindUserById (db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
|
||||
params := mux.Vars(r)
|
||||
q := query.Use(db)
|
||||
id, err := strconv.Atoi(params["id"])
|
||||
log.Printf("Utilisateur ID avec succès : %v", id)
|
||||
|
||||
if err != nil {
|
||||
//panic(err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
u := q.User
|
||||
ctx := context.Background()
|
||||
|
||||
user, err := u.WithContext(ctx).Where(u.ID.Eq(uint(id))).First()
|
||||
|
||||
log.Printf("Utilisateur read avec succès : %v", user)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
json.NewEncoder(w).Encode(user)
|
||||
|
||||
}}
|
||||
34
main.go
Normal file
34
main.go
Normal file
@ -0,0 +1,34 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"app/shelfly/handlers"
|
||||
"app/shelfly/internal/db"
|
||||
"app/shelfly/internal/route"
|
||||
"log"
|
||||
"net/http"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// 1. Démarrer le routeur principal
|
||||
r := mux.NewRouter()
|
||||
|
||||
// 2. Initialiser la DB
|
||||
bd := db.InitDB()
|
||||
|
||||
// 3. Routes non protégées : on les monte sur le routeur principal
|
||||
route.RoutesPublic(r, bd)
|
||||
|
||||
|
||||
|
||||
// 4. Créer un sous-routeur pour les routes protégées
|
||||
protected := r.PathPrefix("/").Subrouter()
|
||||
|
||||
// 5. Appliquer le middleware JWT à ce sous-routeur
|
||||
protected.Use(handlers.AuthMiddleware)
|
||||
// 6. Enregistrer les routes protégées sur ce sous-routeur
|
||||
route.RoutesProtected(protected, bd)
|
||||
|
||||
// 7. Lancer le serveur sur le port 4000
|
||||
log.Fatal(http.ListenAndServe(":4000", r))
|
||||
}
|
||||
414
query/files.gen.go
Normal file
414
query/files.gen.go
Normal file
@ -0,0 +1,414 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newFiles(db *gorm.DB, opts ...gen.DOOption) files {
|
||||
_files := files{}
|
||||
|
||||
_files.filesDo.UseDB(db, opts...)
|
||||
_files.filesDo.UseModel(&models.Files{})
|
||||
|
||||
tableName := _files.filesDo.TableName()
|
||||
_files.ALL = field.NewAsterisk(tableName)
|
||||
_files.ID = field.NewUint(tableName, "id")
|
||||
_files.Name = field.NewString(tableName, "name")
|
||||
_files.Comment = field.NewString(tableName, "comment")
|
||||
_files.Path = field.NewString(tableName, "path")
|
||||
|
||||
_files.fillFieldMap()
|
||||
|
||||
return _files
|
||||
}
|
||||
|
||||
type files struct {
|
||||
filesDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Uint
|
||||
Name field.String
|
||||
Comment field.String
|
||||
Path field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (f files) Table(newTableName string) *files {
|
||||
f.filesDo.UseTable(newTableName)
|
||||
return f.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (f files) As(alias string) *files {
|
||||
f.filesDo.DO = *(f.filesDo.As(alias).(*gen.DO))
|
||||
return f.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (f *files) updateTableName(table string) *files {
|
||||
f.ALL = field.NewAsterisk(table)
|
||||
f.ID = field.NewUint(table, "id")
|
||||
f.Name = field.NewString(table, "name")
|
||||
f.Comment = field.NewString(table, "comment")
|
||||
f.Path = field.NewString(table, "path")
|
||||
|
||||
f.fillFieldMap()
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
func (f *files) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := f.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (f *files) fillFieldMap() {
|
||||
f.fieldMap = make(map[string]field.Expr, 4)
|
||||
f.fieldMap["id"] = f.ID
|
||||
f.fieldMap["name"] = f.Name
|
||||
f.fieldMap["comment"] = f.Comment
|
||||
f.fieldMap["path"] = f.Path
|
||||
}
|
||||
|
||||
func (f files) clone(db *gorm.DB) files {
|
||||
f.filesDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return f
|
||||
}
|
||||
|
||||
func (f files) replaceDB(db *gorm.DB) files {
|
||||
f.filesDo.ReplaceDB(db)
|
||||
return f
|
||||
}
|
||||
|
||||
type filesDo struct{ gen.DO }
|
||||
|
||||
type IFilesDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IFilesDo
|
||||
WithContext(ctx context.Context) IFilesDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IFilesDo
|
||||
WriteDB() IFilesDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IFilesDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IFilesDo
|
||||
Not(conds ...gen.Condition) IFilesDo
|
||||
Or(conds ...gen.Condition) IFilesDo
|
||||
Select(conds ...field.Expr) IFilesDo
|
||||
Where(conds ...gen.Condition) IFilesDo
|
||||
Order(conds ...field.Expr) IFilesDo
|
||||
Distinct(cols ...field.Expr) IFilesDo
|
||||
Omit(cols ...field.Expr) IFilesDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IFilesDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IFilesDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IFilesDo
|
||||
Group(cols ...field.Expr) IFilesDo
|
||||
Having(conds ...gen.Condition) IFilesDo
|
||||
Limit(limit int) IFilesDo
|
||||
Offset(offset int) IFilesDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IFilesDo
|
||||
Unscoped() IFilesDo
|
||||
Create(values ...*models.Files) error
|
||||
CreateInBatches(values []*models.Files, batchSize int) error
|
||||
Save(values ...*models.Files) error
|
||||
First() (*models.Files, error)
|
||||
Take() (*models.Files, error)
|
||||
Last() (*models.Files, error)
|
||||
Find() ([]*models.Files, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.Files, err error)
|
||||
FindInBatches(result *[]*models.Files, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.Files) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IFilesDo
|
||||
Assign(attrs ...field.AssignExpr) IFilesDo
|
||||
Joins(fields ...field.RelationField) IFilesDo
|
||||
Preload(fields ...field.RelationField) IFilesDo
|
||||
FirstOrInit() (*models.Files, error)
|
||||
FirstOrCreate() (*models.Files, error)
|
||||
FindByPage(offset int, limit int) (result []*models.Files, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IFilesDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.Files, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (f filesDo) FilterWithNameAndRole(name string, role string) (result []models.Files, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM files WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = f.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (f filesDo) Debug() IFilesDo {
|
||||
return f.withDO(f.DO.Debug())
|
||||
}
|
||||
|
||||
func (f filesDo) WithContext(ctx context.Context) IFilesDo {
|
||||
return f.withDO(f.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (f filesDo) ReadDB() IFilesDo {
|
||||
return f.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (f filesDo) WriteDB() IFilesDo {
|
||||
return f.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (f filesDo) Session(config *gorm.Session) IFilesDo {
|
||||
return f.withDO(f.DO.Session(config))
|
||||
}
|
||||
|
||||
func (f filesDo) Clauses(conds ...clause.Expression) IFilesDo {
|
||||
return f.withDO(f.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Returning(value interface{}, columns ...string) IFilesDo {
|
||||
return f.withDO(f.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (f filesDo) Not(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Or(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Select(conds ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Where(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Order(conds ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Distinct(cols ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (f filesDo) Omit(cols ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (f filesDo) Join(table schema.Tabler, on ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (f filesDo) LeftJoin(table schema.Tabler, on ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (f filesDo) RightJoin(table schema.Tabler, on ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (f filesDo) Group(cols ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (f filesDo) Having(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Limit(limit int) IFilesDo {
|
||||
return f.withDO(f.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (f filesDo) Offset(offset int) IFilesDo {
|
||||
return f.withDO(f.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (f filesDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IFilesDo {
|
||||
return f.withDO(f.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (f filesDo) Unscoped() IFilesDo {
|
||||
return f.withDO(f.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (f filesDo) Create(values ...*models.Files) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return f.DO.Create(values)
|
||||
}
|
||||
|
||||
func (f filesDo) CreateInBatches(values []*models.Files, batchSize int) error {
|
||||
return f.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (f filesDo) Save(values ...*models.Files) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return f.DO.Save(values)
|
||||
}
|
||||
|
||||
func (f filesDo) First() (*models.Files, error) {
|
||||
if result, err := f.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) Take() (*models.Files, error) {
|
||||
if result, err := f.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) Last() (*models.Files, error) {
|
||||
if result, err := f.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) Find() ([]*models.Files, error) {
|
||||
result, err := f.DO.Find()
|
||||
return result.([]*models.Files), err
|
||||
}
|
||||
|
||||
func (f filesDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.Files, err error) {
|
||||
buf := make([]*models.Files, 0, batchSize)
|
||||
err = f.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (f filesDo) FindInBatches(result *[]*models.Files, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return f.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (f filesDo) Attrs(attrs ...field.AssignExpr) IFilesDo {
|
||||
return f.withDO(f.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (f filesDo) Assign(attrs ...field.AssignExpr) IFilesDo {
|
||||
return f.withDO(f.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (f filesDo) Joins(fields ...field.RelationField) IFilesDo {
|
||||
for _, _f := range fields {
|
||||
f = *f.withDO(f.DO.Joins(_f))
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
func (f filesDo) Preload(fields ...field.RelationField) IFilesDo {
|
||||
for _, _f := range fields {
|
||||
f = *f.withDO(f.DO.Preload(_f))
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
func (f filesDo) FirstOrInit() (*models.Files, error) {
|
||||
if result, err := f.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) FirstOrCreate() (*models.Files, error) {
|
||||
if result, err := f.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) FindByPage(offset int, limit int) (result []*models.Files, count int64, err error) {
|
||||
result, err = f.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = f.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (f filesDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = f.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = f.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (f filesDo) Scan(result interface{}) (err error) {
|
||||
return f.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (f filesDo) Delete(models ...*models.Files) (result gen.ResultInfo, err error) {
|
||||
return f.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (f *filesDo) withDO(do gen.Dao) *filesDo {
|
||||
f.DO = *do.(*gen.DO)
|
||||
return f
|
||||
}
|
||||
175
query/gen.go
Normal file
175
query/gen.go
Normal file
@ -0,0 +1,175 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
"gorm.io/gen"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
)
|
||||
|
||||
var (
|
||||
Q = new(Query)
|
||||
Files *files
|
||||
LibrarySection *librarySection
|
||||
MediaItem *mediaItem
|
||||
MediaPart *mediaPart
|
||||
MetadataItem *metadataItem
|
||||
PathDownload *pathDownload
|
||||
SectionLocation *sectionLocation
|
||||
Tag *tag
|
||||
Tagging *tagging
|
||||
User *user
|
||||
)
|
||||
|
||||
func SetDefault(db *gorm.DB, opts ...gen.DOOption) {
|
||||
*Q = *Use(db, opts...)
|
||||
Files = &Q.Files
|
||||
LibrarySection = &Q.LibrarySection
|
||||
MediaItem = &Q.MediaItem
|
||||
MediaPart = &Q.MediaPart
|
||||
MetadataItem = &Q.MetadataItem
|
||||
PathDownload = &Q.PathDownload
|
||||
SectionLocation = &Q.SectionLocation
|
||||
Tag = &Q.Tag
|
||||
Tagging = &Q.Tagging
|
||||
User = &Q.User
|
||||
}
|
||||
|
||||
func Use(db *gorm.DB, opts ...gen.DOOption) *Query {
|
||||
return &Query{
|
||||
db: db,
|
||||
Files: newFiles(db, opts...),
|
||||
LibrarySection: newLibrarySection(db, opts...),
|
||||
MediaItem: newMediaItem(db, opts...),
|
||||
MediaPart: newMediaPart(db, opts...),
|
||||
MetadataItem: newMetadataItem(db, opts...),
|
||||
PathDownload: newPathDownload(db, opts...),
|
||||
SectionLocation: newSectionLocation(db, opts...),
|
||||
Tag: newTag(db, opts...),
|
||||
Tagging: newTagging(db, opts...),
|
||||
User: newUser(db, opts...),
|
||||
}
|
||||
}
|
||||
|
||||
type Query struct {
|
||||
db *gorm.DB
|
||||
|
||||
Files files
|
||||
LibrarySection librarySection
|
||||
MediaItem mediaItem
|
||||
MediaPart mediaPart
|
||||
MetadataItem metadataItem
|
||||
PathDownload pathDownload
|
||||
SectionLocation sectionLocation
|
||||
Tag tag
|
||||
Tagging tagging
|
||||
User user
|
||||
}
|
||||
|
||||
func (q *Query) Available() bool { return q.db != nil }
|
||||
|
||||
func (q *Query) clone(db *gorm.DB) *Query {
|
||||
return &Query{
|
||||
db: db,
|
||||
Files: q.Files.clone(db),
|
||||
LibrarySection: q.LibrarySection.clone(db),
|
||||
MediaItem: q.MediaItem.clone(db),
|
||||
MediaPart: q.MediaPart.clone(db),
|
||||
MetadataItem: q.MetadataItem.clone(db),
|
||||
PathDownload: q.PathDownload.clone(db),
|
||||
SectionLocation: q.SectionLocation.clone(db),
|
||||
Tag: q.Tag.clone(db),
|
||||
Tagging: q.Tagging.clone(db),
|
||||
User: q.User.clone(db),
|
||||
}
|
||||
}
|
||||
|
||||
func (q *Query) ReadDB() *Query {
|
||||
return q.ReplaceDB(q.db.Clauses(dbresolver.Read))
|
||||
}
|
||||
|
||||
func (q *Query) WriteDB() *Query {
|
||||
return q.ReplaceDB(q.db.Clauses(dbresolver.Write))
|
||||
}
|
||||
|
||||
func (q *Query) ReplaceDB(db *gorm.DB) *Query {
|
||||
return &Query{
|
||||
db: db,
|
||||
Files: q.Files.replaceDB(db),
|
||||
LibrarySection: q.LibrarySection.replaceDB(db),
|
||||
MediaItem: q.MediaItem.replaceDB(db),
|
||||
MediaPart: q.MediaPart.replaceDB(db),
|
||||
MetadataItem: q.MetadataItem.replaceDB(db),
|
||||
PathDownload: q.PathDownload.replaceDB(db),
|
||||
SectionLocation: q.SectionLocation.replaceDB(db),
|
||||
Tag: q.Tag.replaceDB(db),
|
||||
Tagging: q.Tagging.replaceDB(db),
|
||||
User: q.User.replaceDB(db),
|
||||
}
|
||||
}
|
||||
|
||||
type queryCtx struct {
|
||||
Files IFilesDo
|
||||
LibrarySection ILibrarySectionDo
|
||||
MediaItem IMediaItemDo
|
||||
MediaPart IMediaPartDo
|
||||
MetadataItem IMetadataItemDo
|
||||
PathDownload IPathDownloadDo
|
||||
SectionLocation ISectionLocationDo
|
||||
Tag ITagDo
|
||||
Tagging ITaggingDo
|
||||
User IUserDo
|
||||
}
|
||||
|
||||
func (q *Query) WithContext(ctx context.Context) *queryCtx {
|
||||
return &queryCtx{
|
||||
Files: q.Files.WithContext(ctx),
|
||||
LibrarySection: q.LibrarySection.WithContext(ctx),
|
||||
MediaItem: q.MediaItem.WithContext(ctx),
|
||||
MediaPart: q.MediaPart.WithContext(ctx),
|
||||
MetadataItem: q.MetadataItem.WithContext(ctx),
|
||||
PathDownload: q.PathDownload.WithContext(ctx),
|
||||
SectionLocation: q.SectionLocation.WithContext(ctx),
|
||||
Tag: q.Tag.WithContext(ctx),
|
||||
Tagging: q.Tagging.WithContext(ctx),
|
||||
User: q.User.WithContext(ctx),
|
||||
}
|
||||
}
|
||||
|
||||
func (q *Query) Transaction(fc func(tx *Query) error, opts ...*sql.TxOptions) error {
|
||||
return q.db.Transaction(func(tx *gorm.DB) error { return fc(q.clone(tx)) }, opts...)
|
||||
}
|
||||
|
||||
func (q *Query) Begin(opts ...*sql.TxOptions) *QueryTx {
|
||||
tx := q.db.Begin(opts...)
|
||||
return &QueryTx{Query: q.clone(tx), Error: tx.Error}
|
||||
}
|
||||
|
||||
type QueryTx struct {
|
||||
*Query
|
||||
Error error
|
||||
}
|
||||
|
||||
func (q *QueryTx) Commit() error {
|
||||
return q.db.Commit().Error
|
||||
}
|
||||
|
||||
func (q *QueryTx) Rollback() error {
|
||||
return q.db.Rollback().Error
|
||||
}
|
||||
|
||||
func (q *QueryTx) SavePoint(name string) error {
|
||||
return q.db.SavePoint(name).Error
|
||||
}
|
||||
|
||||
func (q *QueryTx) RollbackTo(name string) error {
|
||||
return q.db.RollbackTo(name).Error
|
||||
}
|
||||
426
query/library_sections.gen.go
Normal file
426
query/library_sections.gen.go
Normal file
@ -0,0 +1,426 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newLibrarySection(db *gorm.DB, opts ...gen.DOOption) librarySection {
|
||||
_librarySection := librarySection{}
|
||||
|
||||
_librarySection.librarySectionDo.UseDB(db, opts...)
|
||||
_librarySection.librarySectionDo.UseModel(&models.LibrarySection{})
|
||||
|
||||
tableName := _librarySection.librarySectionDo.TableName()
|
||||
_librarySection.ALL = field.NewAsterisk(tableName)
|
||||
_librarySection.ID = field.NewInt64(tableName, "id")
|
||||
_librarySection.Name = field.NewString(tableName, "name")
|
||||
_librarySection.SectionType = field.NewInt64(tableName, "section_type")
|
||||
_librarySection.Language = field.NewString(tableName, "language")
|
||||
_librarySection.UUID = field.NewString(tableName, "uuid")
|
||||
_librarySection.CreatedAt = field.NewString(tableName, "created_at")
|
||||
_librarySection.UpdatedAt = field.NewString(tableName, "updated_at")
|
||||
|
||||
_librarySection.fillFieldMap()
|
||||
|
||||
return _librarySection
|
||||
}
|
||||
|
||||
type librarySection struct {
|
||||
librarySectionDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
Name field.String
|
||||
SectionType field.Int64
|
||||
Language field.String
|
||||
UUID field.String
|
||||
CreatedAt field.String
|
||||
UpdatedAt field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (l librarySection) Table(newTableName string) *librarySection {
|
||||
l.librarySectionDo.UseTable(newTableName)
|
||||
return l.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (l librarySection) As(alias string) *librarySection {
|
||||
l.librarySectionDo.DO = *(l.librarySectionDo.As(alias).(*gen.DO))
|
||||
return l.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (l *librarySection) updateTableName(table string) *librarySection {
|
||||
l.ALL = field.NewAsterisk(table)
|
||||
l.ID = field.NewInt64(table, "id")
|
||||
l.Name = field.NewString(table, "name")
|
||||
l.SectionType = field.NewInt64(table, "section_type")
|
||||
l.Language = field.NewString(table, "language")
|
||||
l.UUID = field.NewString(table, "uuid")
|
||||
l.CreatedAt = field.NewString(table, "created_at")
|
||||
l.UpdatedAt = field.NewString(table, "updated_at")
|
||||
|
||||
l.fillFieldMap()
|
||||
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *librarySection) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := l.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (l *librarySection) fillFieldMap() {
|
||||
l.fieldMap = make(map[string]field.Expr, 7)
|
||||
l.fieldMap["id"] = l.ID
|
||||
l.fieldMap["name"] = l.Name
|
||||
l.fieldMap["section_type"] = l.SectionType
|
||||
l.fieldMap["language"] = l.Language
|
||||
l.fieldMap["uuid"] = l.UUID
|
||||
l.fieldMap["created_at"] = l.CreatedAt
|
||||
l.fieldMap["updated_at"] = l.UpdatedAt
|
||||
}
|
||||
|
||||
func (l librarySection) clone(db *gorm.DB) librarySection {
|
||||
l.librarySectionDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return l
|
||||
}
|
||||
|
||||
func (l librarySection) replaceDB(db *gorm.DB) librarySection {
|
||||
l.librarySectionDo.ReplaceDB(db)
|
||||
return l
|
||||
}
|
||||
|
||||
type librarySectionDo struct{ gen.DO }
|
||||
|
||||
type ILibrarySectionDo interface {
|
||||
gen.SubQuery
|
||||
Debug() ILibrarySectionDo
|
||||
WithContext(ctx context.Context) ILibrarySectionDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() ILibrarySectionDo
|
||||
WriteDB() ILibrarySectionDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) ILibrarySectionDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) ILibrarySectionDo
|
||||
Not(conds ...gen.Condition) ILibrarySectionDo
|
||||
Or(conds ...gen.Condition) ILibrarySectionDo
|
||||
Select(conds ...field.Expr) ILibrarySectionDo
|
||||
Where(conds ...gen.Condition) ILibrarySectionDo
|
||||
Order(conds ...field.Expr) ILibrarySectionDo
|
||||
Distinct(cols ...field.Expr) ILibrarySectionDo
|
||||
Omit(cols ...field.Expr) ILibrarySectionDo
|
||||
Join(table schema.Tabler, on ...field.Expr) ILibrarySectionDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) ILibrarySectionDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) ILibrarySectionDo
|
||||
Group(cols ...field.Expr) ILibrarySectionDo
|
||||
Having(conds ...gen.Condition) ILibrarySectionDo
|
||||
Limit(limit int) ILibrarySectionDo
|
||||
Offset(offset int) ILibrarySectionDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) ILibrarySectionDo
|
||||
Unscoped() ILibrarySectionDo
|
||||
Create(values ...*models.LibrarySection) error
|
||||
CreateInBatches(values []*models.LibrarySection, batchSize int) error
|
||||
Save(values ...*models.LibrarySection) error
|
||||
First() (*models.LibrarySection, error)
|
||||
Take() (*models.LibrarySection, error)
|
||||
Last() (*models.LibrarySection, error)
|
||||
Find() ([]*models.LibrarySection, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.LibrarySection, err error)
|
||||
FindInBatches(result *[]*models.LibrarySection, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.LibrarySection) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) ILibrarySectionDo
|
||||
Assign(attrs ...field.AssignExpr) ILibrarySectionDo
|
||||
Joins(fields ...field.RelationField) ILibrarySectionDo
|
||||
Preload(fields ...field.RelationField) ILibrarySectionDo
|
||||
FirstOrInit() (*models.LibrarySection, error)
|
||||
FirstOrCreate() (*models.LibrarySection, error)
|
||||
FindByPage(offset int, limit int) (result []*models.LibrarySection, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) ILibrarySectionDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.LibrarySection, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (l librarySectionDo) FilterWithNameAndRole(name string, role string) (result []models.LibrarySection, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM library_sections WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = l.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Debug() ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Debug())
|
||||
}
|
||||
|
||||
func (l librarySectionDo) WithContext(ctx context.Context) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) ReadDB() ILibrarySectionDo {
|
||||
return l.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (l librarySectionDo) WriteDB() ILibrarySectionDo {
|
||||
return l.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Session(config *gorm.Session) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Session(config))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Clauses(conds ...clause.Expression) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Returning(value interface{}, columns ...string) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Not(conds ...gen.Condition) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Or(conds ...gen.Condition) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Select(conds ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Where(conds ...gen.Condition) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Order(conds ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Distinct(cols ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Omit(cols ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Join(table schema.Tabler, on ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) LeftJoin(table schema.Tabler, on ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) RightJoin(table schema.Tabler, on ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Group(cols ...field.Expr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Having(conds ...gen.Condition) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Limit(limit int) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Offset(offset int) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Scopes(funcs ...func(gen.Dao) gen.Dao) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Unscoped() ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Create(values ...*models.LibrarySection) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return l.DO.Create(values)
|
||||
}
|
||||
|
||||
func (l librarySectionDo) CreateInBatches(values []*models.LibrarySection, batchSize int) error {
|
||||
return l.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (l librarySectionDo) Save(values ...*models.LibrarySection) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return l.DO.Save(values)
|
||||
}
|
||||
|
||||
func (l librarySectionDo) First() (*models.LibrarySection, error) {
|
||||
if result, err := l.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.LibrarySection), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Take() (*models.LibrarySection, error) {
|
||||
if result, err := l.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.LibrarySection), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Last() (*models.LibrarySection, error) {
|
||||
if result, err := l.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.LibrarySection), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Find() ([]*models.LibrarySection, error) {
|
||||
result, err := l.DO.Find()
|
||||
return result.([]*models.LibrarySection), err
|
||||
}
|
||||
|
||||
func (l librarySectionDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.LibrarySection, err error) {
|
||||
buf := make([]*models.LibrarySection, 0, batchSize)
|
||||
err = l.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (l librarySectionDo) FindInBatches(result *[]*models.LibrarySection, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return l.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Attrs(attrs ...field.AssignExpr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Assign(attrs ...field.AssignExpr) ILibrarySectionDo {
|
||||
return l.withDO(l.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Joins(fields ...field.RelationField) ILibrarySectionDo {
|
||||
for _, _f := range fields {
|
||||
l = *l.withDO(l.DO.Joins(_f))
|
||||
}
|
||||
return &l
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Preload(fields ...field.RelationField) ILibrarySectionDo {
|
||||
for _, _f := range fields {
|
||||
l = *l.withDO(l.DO.Preload(_f))
|
||||
}
|
||||
return &l
|
||||
}
|
||||
|
||||
func (l librarySectionDo) FirstOrInit() (*models.LibrarySection, error) {
|
||||
if result, err := l.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.LibrarySection), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (l librarySectionDo) FirstOrCreate() (*models.LibrarySection, error) {
|
||||
if result, err := l.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.LibrarySection), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (l librarySectionDo) FindByPage(offset int, limit int) (result []*models.LibrarySection, count int64, err error) {
|
||||
result, err = l.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = l.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (l librarySectionDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = l.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = l.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Scan(result interface{}) (err error) {
|
||||
return l.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (l librarySectionDo) Delete(models ...*models.LibrarySection) (result gen.ResultInfo, err error) {
|
||||
return l.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (l *librarySectionDo) withDO(do gen.Dao) *librarySectionDo {
|
||||
l.DO = *do.(*gen.DO)
|
||||
return l
|
||||
}
|
||||
446
query/media_items.gen.go
Normal file
446
query/media_items.gen.go
Normal file
@ -0,0 +1,446 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newMediaItem(db *gorm.DB, opts ...gen.DOOption) mediaItem {
|
||||
_mediaItem := mediaItem{}
|
||||
|
||||
_mediaItem.mediaItemDo.UseDB(db, opts...)
|
||||
_mediaItem.mediaItemDo.UseModel(&models.MediaItem{})
|
||||
|
||||
tableName := _mediaItem.mediaItemDo.TableName()
|
||||
_mediaItem.ALL = field.NewAsterisk(tableName)
|
||||
_mediaItem.ID = field.NewInt64(tableName, "id")
|
||||
_mediaItem.MetadataItemID = field.NewInt64(tableName, "metadata_item_id")
|
||||
_mediaItem.Duration = field.NewInt64(tableName, "duration")
|
||||
_mediaItem.Bitrate = field.NewInt64(tableName, "bitrate")
|
||||
_mediaItem.Width = field.NewInt64(tableName, "width")
|
||||
_mediaItem.Height = field.NewInt64(tableName, "height")
|
||||
_mediaItem.AspectRatio = field.NewFloat64(tableName, "aspect_ratio")
|
||||
_mediaItem.AudioCodec = field.NewString(tableName, "audio_codec")
|
||||
_mediaItem.VideoCodec = field.NewString(tableName, "video_codec")
|
||||
_mediaItem.Container = field.NewString(tableName, "container")
|
||||
_mediaItem.CreatedAt = field.NewString(tableName, "created_at")
|
||||
_mediaItem.UpdatedAt = field.NewString(tableName, "updated_at")
|
||||
|
||||
_mediaItem.fillFieldMap()
|
||||
|
||||
return _mediaItem
|
||||
}
|
||||
|
||||
type mediaItem struct {
|
||||
mediaItemDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
MetadataItemID field.Int64
|
||||
Duration field.Int64
|
||||
Bitrate field.Int64
|
||||
Width field.Int64
|
||||
Height field.Int64
|
||||
AspectRatio field.Float64
|
||||
AudioCodec field.String
|
||||
VideoCodec field.String
|
||||
Container field.String
|
||||
CreatedAt field.String
|
||||
UpdatedAt field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (m mediaItem) Table(newTableName string) *mediaItem {
|
||||
m.mediaItemDo.UseTable(newTableName)
|
||||
return m.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (m mediaItem) As(alias string) *mediaItem {
|
||||
m.mediaItemDo.DO = *(m.mediaItemDo.As(alias).(*gen.DO))
|
||||
return m.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (m *mediaItem) updateTableName(table string) *mediaItem {
|
||||
m.ALL = field.NewAsterisk(table)
|
||||
m.ID = field.NewInt64(table, "id")
|
||||
m.MetadataItemID = field.NewInt64(table, "metadata_item_id")
|
||||
m.Duration = field.NewInt64(table, "duration")
|
||||
m.Bitrate = field.NewInt64(table, "bitrate")
|
||||
m.Width = field.NewInt64(table, "width")
|
||||
m.Height = field.NewInt64(table, "height")
|
||||
m.AspectRatio = field.NewFloat64(table, "aspect_ratio")
|
||||
m.AudioCodec = field.NewString(table, "audio_codec")
|
||||
m.VideoCodec = field.NewString(table, "video_codec")
|
||||
m.Container = field.NewString(table, "container")
|
||||
m.CreatedAt = field.NewString(table, "created_at")
|
||||
m.UpdatedAt = field.NewString(table, "updated_at")
|
||||
|
||||
m.fillFieldMap()
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *mediaItem) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := m.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (m *mediaItem) fillFieldMap() {
|
||||
m.fieldMap = make(map[string]field.Expr, 12)
|
||||
m.fieldMap["id"] = m.ID
|
||||
m.fieldMap["metadata_item_id"] = m.MetadataItemID
|
||||
m.fieldMap["duration"] = m.Duration
|
||||
m.fieldMap["bitrate"] = m.Bitrate
|
||||
m.fieldMap["width"] = m.Width
|
||||
m.fieldMap["height"] = m.Height
|
||||
m.fieldMap["aspect_ratio"] = m.AspectRatio
|
||||
m.fieldMap["audio_codec"] = m.AudioCodec
|
||||
m.fieldMap["video_codec"] = m.VideoCodec
|
||||
m.fieldMap["container"] = m.Container
|
||||
m.fieldMap["created_at"] = m.CreatedAt
|
||||
m.fieldMap["updated_at"] = m.UpdatedAt
|
||||
}
|
||||
|
||||
func (m mediaItem) clone(db *gorm.DB) mediaItem {
|
||||
m.mediaItemDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return m
|
||||
}
|
||||
|
||||
func (m mediaItem) replaceDB(db *gorm.DB) mediaItem {
|
||||
m.mediaItemDo.ReplaceDB(db)
|
||||
return m
|
||||
}
|
||||
|
||||
type mediaItemDo struct{ gen.DO }
|
||||
|
||||
type IMediaItemDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IMediaItemDo
|
||||
WithContext(ctx context.Context) IMediaItemDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IMediaItemDo
|
||||
WriteDB() IMediaItemDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IMediaItemDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IMediaItemDo
|
||||
Not(conds ...gen.Condition) IMediaItemDo
|
||||
Or(conds ...gen.Condition) IMediaItemDo
|
||||
Select(conds ...field.Expr) IMediaItemDo
|
||||
Where(conds ...gen.Condition) IMediaItemDo
|
||||
Order(conds ...field.Expr) IMediaItemDo
|
||||
Distinct(cols ...field.Expr) IMediaItemDo
|
||||
Omit(cols ...field.Expr) IMediaItemDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IMediaItemDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IMediaItemDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IMediaItemDo
|
||||
Group(cols ...field.Expr) IMediaItemDo
|
||||
Having(conds ...gen.Condition) IMediaItemDo
|
||||
Limit(limit int) IMediaItemDo
|
||||
Offset(offset int) IMediaItemDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IMediaItemDo
|
||||
Unscoped() IMediaItemDo
|
||||
Create(values ...*models.MediaItem) error
|
||||
CreateInBatches(values []*models.MediaItem, batchSize int) error
|
||||
Save(values ...*models.MediaItem) error
|
||||
First() (*models.MediaItem, error)
|
||||
Take() (*models.MediaItem, error)
|
||||
Last() (*models.MediaItem, error)
|
||||
Find() ([]*models.MediaItem, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.MediaItem, err error)
|
||||
FindInBatches(result *[]*models.MediaItem, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.MediaItem) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IMediaItemDo
|
||||
Assign(attrs ...field.AssignExpr) IMediaItemDo
|
||||
Joins(fields ...field.RelationField) IMediaItemDo
|
||||
Preload(fields ...field.RelationField) IMediaItemDo
|
||||
FirstOrInit() (*models.MediaItem, error)
|
||||
FirstOrCreate() (*models.MediaItem, error)
|
||||
FindByPage(offset int, limit int) (result []*models.MediaItem, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IMediaItemDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.MediaItem, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (m mediaItemDo) FilterWithNameAndRole(name string, role string) (result []models.MediaItem, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM media_items WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = m.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Debug() IMediaItemDo {
|
||||
return m.withDO(m.DO.Debug())
|
||||
}
|
||||
|
||||
func (m mediaItemDo) WithContext(ctx context.Context) IMediaItemDo {
|
||||
return m.withDO(m.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) ReadDB() IMediaItemDo {
|
||||
return m.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (m mediaItemDo) WriteDB() IMediaItemDo {
|
||||
return m.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Session(config *gorm.Session) IMediaItemDo {
|
||||
return m.withDO(m.DO.Session(config))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Clauses(conds ...clause.Expression) IMediaItemDo {
|
||||
return m.withDO(m.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Returning(value interface{}, columns ...string) IMediaItemDo {
|
||||
return m.withDO(m.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Not(conds ...gen.Condition) IMediaItemDo {
|
||||
return m.withDO(m.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Or(conds ...gen.Condition) IMediaItemDo {
|
||||
return m.withDO(m.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Select(conds ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Where(conds ...gen.Condition) IMediaItemDo {
|
||||
return m.withDO(m.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Order(conds ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Distinct(cols ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Omit(cols ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Join(table schema.Tabler, on ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) LeftJoin(table schema.Tabler, on ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) RightJoin(table schema.Tabler, on ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Group(cols ...field.Expr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Having(conds ...gen.Condition) IMediaItemDo {
|
||||
return m.withDO(m.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Limit(limit int) IMediaItemDo {
|
||||
return m.withDO(m.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Offset(offset int) IMediaItemDo {
|
||||
return m.withDO(m.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IMediaItemDo {
|
||||
return m.withDO(m.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Unscoped() IMediaItemDo {
|
||||
return m.withDO(m.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Create(values ...*models.MediaItem) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.DO.Create(values)
|
||||
}
|
||||
|
||||
func (m mediaItemDo) CreateInBatches(values []*models.MediaItem, batchSize int) error {
|
||||
return m.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (m mediaItemDo) Save(values ...*models.MediaItem) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.DO.Save(values)
|
||||
}
|
||||
|
||||
func (m mediaItemDo) First() (*models.MediaItem, error) {
|
||||
if result, err := m.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Take() (*models.MediaItem, error) {
|
||||
if result, err := m.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Last() (*models.MediaItem, error) {
|
||||
if result, err := m.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Find() ([]*models.MediaItem, error) {
|
||||
result, err := m.DO.Find()
|
||||
return result.([]*models.MediaItem), err
|
||||
}
|
||||
|
||||
func (m mediaItemDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.MediaItem, err error) {
|
||||
buf := make([]*models.MediaItem, 0, batchSize)
|
||||
err = m.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (m mediaItemDo) FindInBatches(result *[]*models.MediaItem, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return m.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Attrs(attrs ...field.AssignExpr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Assign(attrs ...field.AssignExpr) IMediaItemDo {
|
||||
return m.withDO(m.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Joins(fields ...field.RelationField) IMediaItemDo {
|
||||
for _, _f := range fields {
|
||||
m = *m.withDO(m.DO.Joins(_f))
|
||||
}
|
||||
return &m
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Preload(fields ...field.RelationField) IMediaItemDo {
|
||||
for _, _f := range fields {
|
||||
m = *m.withDO(m.DO.Preload(_f))
|
||||
}
|
||||
return &m
|
||||
}
|
||||
|
||||
func (m mediaItemDo) FirstOrInit() (*models.MediaItem, error) {
|
||||
if result, err := m.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaItemDo) FirstOrCreate() (*models.MediaItem, error) {
|
||||
if result, err := m.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaItemDo) FindByPage(offset int, limit int) (result []*models.MediaItem, count int64, err error) {
|
||||
result, err = m.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = m.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (m mediaItemDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = m.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = m.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Scan(result interface{}) (err error) {
|
||||
return m.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (m mediaItemDo) Delete(models ...*models.MediaItem) (result gen.ResultInfo, err error) {
|
||||
return m.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (m *mediaItemDo) withDO(do gen.Dao) *mediaItemDo {
|
||||
m.DO = *do.(*gen.DO)
|
||||
return m
|
||||
}
|
||||
430
query/media_parts.gen.go
Normal file
430
query/media_parts.gen.go
Normal file
@ -0,0 +1,430 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newMediaPart(db *gorm.DB, opts ...gen.DOOption) mediaPart {
|
||||
_mediaPart := mediaPart{}
|
||||
|
||||
_mediaPart.mediaPartDo.UseDB(db, opts...)
|
||||
_mediaPart.mediaPartDo.UseModel(&models.MediaPart{})
|
||||
|
||||
tableName := _mediaPart.mediaPartDo.TableName()
|
||||
_mediaPart.ALL = field.NewAsterisk(tableName)
|
||||
_mediaPart.ID = field.NewInt64(tableName, "id")
|
||||
_mediaPart.MediaItemID = field.NewInt64(tableName, "media_item_id")
|
||||
_mediaPart.File = field.NewString(tableName, "file")
|
||||
_mediaPart.Duration = field.NewInt64(tableName, "duration")
|
||||
_mediaPart.Size = field.NewInt64(tableName, "size")
|
||||
_mediaPart.Indexes = field.NewString(tableName, "indexes")
|
||||
_mediaPart.CreatedAt = field.NewString(tableName, "created_at")
|
||||
_mediaPart.UpdatedAt = field.NewString(tableName, "updated_at")
|
||||
|
||||
_mediaPart.fillFieldMap()
|
||||
|
||||
return _mediaPart
|
||||
}
|
||||
|
||||
type mediaPart struct {
|
||||
mediaPartDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
MediaItemID field.Int64
|
||||
File field.String
|
||||
Duration field.Int64
|
||||
Size field.Int64
|
||||
Indexes field.String
|
||||
CreatedAt field.String
|
||||
UpdatedAt field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (m mediaPart) Table(newTableName string) *mediaPart {
|
||||
m.mediaPartDo.UseTable(newTableName)
|
||||
return m.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (m mediaPart) As(alias string) *mediaPart {
|
||||
m.mediaPartDo.DO = *(m.mediaPartDo.As(alias).(*gen.DO))
|
||||
return m.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (m *mediaPart) updateTableName(table string) *mediaPart {
|
||||
m.ALL = field.NewAsterisk(table)
|
||||
m.ID = field.NewInt64(table, "id")
|
||||
m.MediaItemID = field.NewInt64(table, "media_item_id")
|
||||
m.File = field.NewString(table, "file")
|
||||
m.Duration = field.NewInt64(table, "duration")
|
||||
m.Size = field.NewInt64(table, "size")
|
||||
m.Indexes = field.NewString(table, "indexes")
|
||||
m.CreatedAt = field.NewString(table, "created_at")
|
||||
m.UpdatedAt = field.NewString(table, "updated_at")
|
||||
|
||||
m.fillFieldMap()
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *mediaPart) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := m.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (m *mediaPart) fillFieldMap() {
|
||||
m.fieldMap = make(map[string]field.Expr, 8)
|
||||
m.fieldMap["id"] = m.ID
|
||||
m.fieldMap["media_item_id"] = m.MediaItemID
|
||||
m.fieldMap["file"] = m.File
|
||||
m.fieldMap["duration"] = m.Duration
|
||||
m.fieldMap["size"] = m.Size
|
||||
m.fieldMap["indexes"] = m.Indexes
|
||||
m.fieldMap["created_at"] = m.CreatedAt
|
||||
m.fieldMap["updated_at"] = m.UpdatedAt
|
||||
}
|
||||
|
||||
func (m mediaPart) clone(db *gorm.DB) mediaPart {
|
||||
m.mediaPartDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return m
|
||||
}
|
||||
|
||||
func (m mediaPart) replaceDB(db *gorm.DB) mediaPart {
|
||||
m.mediaPartDo.ReplaceDB(db)
|
||||
return m
|
||||
}
|
||||
|
||||
type mediaPartDo struct{ gen.DO }
|
||||
|
||||
type IMediaPartDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IMediaPartDo
|
||||
WithContext(ctx context.Context) IMediaPartDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IMediaPartDo
|
||||
WriteDB() IMediaPartDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IMediaPartDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IMediaPartDo
|
||||
Not(conds ...gen.Condition) IMediaPartDo
|
||||
Or(conds ...gen.Condition) IMediaPartDo
|
||||
Select(conds ...field.Expr) IMediaPartDo
|
||||
Where(conds ...gen.Condition) IMediaPartDo
|
||||
Order(conds ...field.Expr) IMediaPartDo
|
||||
Distinct(cols ...field.Expr) IMediaPartDo
|
||||
Omit(cols ...field.Expr) IMediaPartDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IMediaPartDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IMediaPartDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IMediaPartDo
|
||||
Group(cols ...field.Expr) IMediaPartDo
|
||||
Having(conds ...gen.Condition) IMediaPartDo
|
||||
Limit(limit int) IMediaPartDo
|
||||
Offset(offset int) IMediaPartDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IMediaPartDo
|
||||
Unscoped() IMediaPartDo
|
||||
Create(values ...*models.MediaPart) error
|
||||
CreateInBatches(values []*models.MediaPart, batchSize int) error
|
||||
Save(values ...*models.MediaPart) error
|
||||
First() (*models.MediaPart, error)
|
||||
Take() (*models.MediaPart, error)
|
||||
Last() (*models.MediaPart, error)
|
||||
Find() ([]*models.MediaPart, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.MediaPart, err error)
|
||||
FindInBatches(result *[]*models.MediaPart, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.MediaPart) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IMediaPartDo
|
||||
Assign(attrs ...field.AssignExpr) IMediaPartDo
|
||||
Joins(fields ...field.RelationField) IMediaPartDo
|
||||
Preload(fields ...field.RelationField) IMediaPartDo
|
||||
FirstOrInit() (*models.MediaPart, error)
|
||||
FirstOrCreate() (*models.MediaPart, error)
|
||||
FindByPage(offset int, limit int) (result []*models.MediaPart, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IMediaPartDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.MediaPart, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (m mediaPartDo) FilterWithNameAndRole(name string, role string) (result []models.MediaPart, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM media_parts WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = m.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Debug() IMediaPartDo {
|
||||
return m.withDO(m.DO.Debug())
|
||||
}
|
||||
|
||||
func (m mediaPartDo) WithContext(ctx context.Context) IMediaPartDo {
|
||||
return m.withDO(m.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) ReadDB() IMediaPartDo {
|
||||
return m.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (m mediaPartDo) WriteDB() IMediaPartDo {
|
||||
return m.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Session(config *gorm.Session) IMediaPartDo {
|
||||
return m.withDO(m.DO.Session(config))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Clauses(conds ...clause.Expression) IMediaPartDo {
|
||||
return m.withDO(m.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Returning(value interface{}, columns ...string) IMediaPartDo {
|
||||
return m.withDO(m.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Not(conds ...gen.Condition) IMediaPartDo {
|
||||
return m.withDO(m.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Or(conds ...gen.Condition) IMediaPartDo {
|
||||
return m.withDO(m.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Select(conds ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Where(conds ...gen.Condition) IMediaPartDo {
|
||||
return m.withDO(m.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Order(conds ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Distinct(cols ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Omit(cols ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Join(table schema.Tabler, on ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) LeftJoin(table schema.Tabler, on ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) RightJoin(table schema.Tabler, on ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Group(cols ...field.Expr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Having(conds ...gen.Condition) IMediaPartDo {
|
||||
return m.withDO(m.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Limit(limit int) IMediaPartDo {
|
||||
return m.withDO(m.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Offset(offset int) IMediaPartDo {
|
||||
return m.withDO(m.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IMediaPartDo {
|
||||
return m.withDO(m.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Unscoped() IMediaPartDo {
|
||||
return m.withDO(m.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Create(values ...*models.MediaPart) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.DO.Create(values)
|
||||
}
|
||||
|
||||
func (m mediaPartDo) CreateInBatches(values []*models.MediaPart, batchSize int) error {
|
||||
return m.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (m mediaPartDo) Save(values ...*models.MediaPart) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.DO.Save(values)
|
||||
}
|
||||
|
||||
func (m mediaPartDo) First() (*models.MediaPart, error) {
|
||||
if result, err := m.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaPart), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Take() (*models.MediaPart, error) {
|
||||
if result, err := m.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaPart), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Last() (*models.MediaPart, error) {
|
||||
if result, err := m.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaPart), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Find() ([]*models.MediaPart, error) {
|
||||
result, err := m.DO.Find()
|
||||
return result.([]*models.MediaPart), err
|
||||
}
|
||||
|
||||
func (m mediaPartDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.MediaPart, err error) {
|
||||
buf := make([]*models.MediaPart, 0, batchSize)
|
||||
err = m.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (m mediaPartDo) FindInBatches(result *[]*models.MediaPart, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return m.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Attrs(attrs ...field.AssignExpr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Assign(attrs ...field.AssignExpr) IMediaPartDo {
|
||||
return m.withDO(m.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Joins(fields ...field.RelationField) IMediaPartDo {
|
||||
for _, _f := range fields {
|
||||
m = *m.withDO(m.DO.Joins(_f))
|
||||
}
|
||||
return &m
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Preload(fields ...field.RelationField) IMediaPartDo {
|
||||
for _, _f := range fields {
|
||||
m = *m.withDO(m.DO.Preload(_f))
|
||||
}
|
||||
return &m
|
||||
}
|
||||
|
||||
func (m mediaPartDo) FirstOrInit() (*models.MediaPart, error) {
|
||||
if result, err := m.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaPart), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaPartDo) FirstOrCreate() (*models.MediaPart, error) {
|
||||
if result, err := m.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MediaPart), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m mediaPartDo) FindByPage(offset int, limit int) (result []*models.MediaPart, count int64, err error) {
|
||||
result, err = m.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = m.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (m mediaPartDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = m.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = m.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Scan(result interface{}) (err error) {
|
||||
return m.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (m mediaPartDo) Delete(models ...*models.MediaPart) (result gen.ResultInfo, err error) {
|
||||
return m.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (m *mediaPartDo) withDO(do gen.Dao) *mediaPartDo {
|
||||
m.DO = *do.(*gen.DO)
|
||||
return m
|
||||
}
|
||||
474
query/metadata_items.gen.go
Normal file
474
query/metadata_items.gen.go
Normal file
@ -0,0 +1,474 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newMetadataItem(db *gorm.DB, opts ...gen.DOOption) metadataItem {
|
||||
_metadataItem := metadataItem{}
|
||||
|
||||
_metadataItem.metadataItemDo.UseDB(db, opts...)
|
||||
_metadataItem.metadataItemDo.UseModel(&models.MetadataItem{})
|
||||
|
||||
tableName := _metadataItem.metadataItemDo.TableName()
|
||||
_metadataItem.ALL = field.NewAsterisk(tableName)
|
||||
_metadataItem.ID = field.NewInt64(tableName, "id")
|
||||
_metadataItem.LibrarySectionID = field.NewInt64(tableName, "library_section_id")
|
||||
_metadataItem.ParentID = field.NewInt64(tableName, "parent_id")
|
||||
_metadataItem.MetadataType = field.NewInt64(tableName, "metadata_type")
|
||||
_metadataItem.GUID = field.NewString(tableName, "guid")
|
||||
_metadataItem.Title = field.NewString(tableName, "title")
|
||||
_metadataItem.TitleSort = field.NewString(tableName, "title_sort")
|
||||
_metadataItem.OriginalTitle = field.NewString(tableName, "original_title")
|
||||
_metadataItem.Studio = field.NewString(tableName, "studio")
|
||||
_metadataItem.Rating = field.NewFloat64(tableName, "rating")
|
||||
_metadataItem.ContentRating = field.NewString(tableName, "content_rating")
|
||||
_metadataItem.Tagline = field.NewString(tableName, "tagline")
|
||||
_metadataItem.Summary = field.NewString(tableName, "summary")
|
||||
_metadataItem.Index = field.NewInt64(tableName, "index")
|
||||
_metadataItem.Duration = field.NewInt64(tableName, "duration")
|
||||
_metadataItem.ReleaseDate = field.NewString(tableName, "release_date")
|
||||
_metadataItem.CreatedAt = field.NewString(tableName, "created_at")
|
||||
_metadataItem.UpdatedAt = field.NewString(tableName, "updated_at")
|
||||
_metadataItem.UserThumbURL = field.NewString(tableName, "user_thumb_url")
|
||||
|
||||
_metadataItem.fillFieldMap()
|
||||
|
||||
return _metadataItem
|
||||
}
|
||||
|
||||
type metadataItem struct {
|
||||
metadataItemDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
LibrarySectionID field.Int64
|
||||
ParentID field.Int64
|
||||
MetadataType field.Int64
|
||||
GUID field.String
|
||||
Title field.String
|
||||
TitleSort field.String
|
||||
OriginalTitle field.String
|
||||
Studio field.String
|
||||
Rating field.Float64
|
||||
ContentRating field.String
|
||||
Tagline field.String
|
||||
Summary field.String
|
||||
Index field.Int64
|
||||
Duration field.Int64
|
||||
ReleaseDate field.String
|
||||
CreatedAt field.String
|
||||
UpdatedAt field.String
|
||||
UserThumbURL field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (m metadataItem) Table(newTableName string) *metadataItem {
|
||||
m.metadataItemDo.UseTable(newTableName)
|
||||
return m.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (m metadataItem) As(alias string) *metadataItem {
|
||||
m.metadataItemDo.DO = *(m.metadataItemDo.As(alias).(*gen.DO))
|
||||
return m.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (m *metadataItem) updateTableName(table string) *metadataItem {
|
||||
m.ALL = field.NewAsterisk(table)
|
||||
m.ID = field.NewInt64(table, "id")
|
||||
m.LibrarySectionID = field.NewInt64(table, "library_section_id")
|
||||
m.ParentID = field.NewInt64(table, "parent_id")
|
||||
m.MetadataType = field.NewInt64(table, "metadata_type")
|
||||
m.GUID = field.NewString(table, "guid")
|
||||
m.Title = field.NewString(table, "title")
|
||||
m.TitleSort = field.NewString(table, "title_sort")
|
||||
m.OriginalTitle = field.NewString(table, "original_title")
|
||||
m.Studio = field.NewString(table, "studio")
|
||||
m.Rating = field.NewFloat64(table, "rating")
|
||||
m.ContentRating = field.NewString(table, "content_rating")
|
||||
m.Tagline = field.NewString(table, "tagline")
|
||||
m.Summary = field.NewString(table, "summary")
|
||||
m.Index = field.NewInt64(table, "index")
|
||||
m.Duration = field.NewInt64(table, "duration")
|
||||
m.ReleaseDate = field.NewString(table, "release_date")
|
||||
m.CreatedAt = field.NewString(table, "created_at")
|
||||
m.UpdatedAt = field.NewString(table, "updated_at")
|
||||
m.UserThumbURL = field.NewString(table, "user_thumb_url")
|
||||
|
||||
m.fillFieldMap()
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *metadataItem) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := m.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (m *metadataItem) fillFieldMap() {
|
||||
m.fieldMap = make(map[string]field.Expr, 19)
|
||||
m.fieldMap["id"] = m.ID
|
||||
m.fieldMap["library_section_id"] = m.LibrarySectionID
|
||||
m.fieldMap["parent_id"] = m.ParentID
|
||||
m.fieldMap["metadata_type"] = m.MetadataType
|
||||
m.fieldMap["guid"] = m.GUID
|
||||
m.fieldMap["title"] = m.Title
|
||||
m.fieldMap["title_sort"] = m.TitleSort
|
||||
m.fieldMap["original_title"] = m.OriginalTitle
|
||||
m.fieldMap["studio"] = m.Studio
|
||||
m.fieldMap["rating"] = m.Rating
|
||||
m.fieldMap["content_rating"] = m.ContentRating
|
||||
m.fieldMap["tagline"] = m.Tagline
|
||||
m.fieldMap["summary"] = m.Summary
|
||||
m.fieldMap["index"] = m.Index
|
||||
m.fieldMap["duration"] = m.Duration
|
||||
m.fieldMap["release_date"] = m.ReleaseDate
|
||||
m.fieldMap["created_at"] = m.CreatedAt
|
||||
m.fieldMap["updated_at"] = m.UpdatedAt
|
||||
m.fieldMap["user_thumb_url"] = m.UserThumbURL
|
||||
}
|
||||
|
||||
func (m metadataItem) clone(db *gorm.DB) metadataItem {
|
||||
m.metadataItemDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return m
|
||||
}
|
||||
|
||||
func (m metadataItem) replaceDB(db *gorm.DB) metadataItem {
|
||||
m.metadataItemDo.ReplaceDB(db)
|
||||
return m
|
||||
}
|
||||
|
||||
type metadataItemDo struct{ gen.DO }
|
||||
|
||||
type IMetadataItemDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IMetadataItemDo
|
||||
WithContext(ctx context.Context) IMetadataItemDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IMetadataItemDo
|
||||
WriteDB() IMetadataItemDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IMetadataItemDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IMetadataItemDo
|
||||
Not(conds ...gen.Condition) IMetadataItemDo
|
||||
Or(conds ...gen.Condition) IMetadataItemDo
|
||||
Select(conds ...field.Expr) IMetadataItemDo
|
||||
Where(conds ...gen.Condition) IMetadataItemDo
|
||||
Order(conds ...field.Expr) IMetadataItemDo
|
||||
Distinct(cols ...field.Expr) IMetadataItemDo
|
||||
Omit(cols ...field.Expr) IMetadataItemDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IMetadataItemDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IMetadataItemDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IMetadataItemDo
|
||||
Group(cols ...field.Expr) IMetadataItemDo
|
||||
Having(conds ...gen.Condition) IMetadataItemDo
|
||||
Limit(limit int) IMetadataItemDo
|
||||
Offset(offset int) IMetadataItemDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IMetadataItemDo
|
||||
Unscoped() IMetadataItemDo
|
||||
Create(values ...*models.MetadataItem) error
|
||||
CreateInBatches(values []*models.MetadataItem, batchSize int) error
|
||||
Save(values ...*models.MetadataItem) error
|
||||
First() (*models.MetadataItem, error)
|
||||
Take() (*models.MetadataItem, error)
|
||||
Last() (*models.MetadataItem, error)
|
||||
Find() ([]*models.MetadataItem, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.MetadataItem, err error)
|
||||
FindInBatches(result *[]*models.MetadataItem, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.MetadataItem) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IMetadataItemDo
|
||||
Assign(attrs ...field.AssignExpr) IMetadataItemDo
|
||||
Joins(fields ...field.RelationField) IMetadataItemDo
|
||||
Preload(fields ...field.RelationField) IMetadataItemDo
|
||||
FirstOrInit() (*models.MetadataItem, error)
|
||||
FirstOrCreate() (*models.MetadataItem, error)
|
||||
FindByPage(offset int, limit int) (result []*models.MetadataItem, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IMetadataItemDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.MetadataItem, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (m metadataItemDo) FilterWithNameAndRole(name string, role string) (result []models.MetadataItem, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM metadata_items WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = m.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Debug() IMetadataItemDo {
|
||||
return m.withDO(m.DO.Debug())
|
||||
}
|
||||
|
||||
func (m metadataItemDo) WithContext(ctx context.Context) IMetadataItemDo {
|
||||
return m.withDO(m.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) ReadDB() IMetadataItemDo {
|
||||
return m.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (m metadataItemDo) WriteDB() IMetadataItemDo {
|
||||
return m.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Session(config *gorm.Session) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Session(config))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Clauses(conds ...clause.Expression) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Returning(value interface{}, columns ...string) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Not(conds ...gen.Condition) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Or(conds ...gen.Condition) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Select(conds ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Where(conds ...gen.Condition) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Order(conds ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Distinct(cols ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Omit(cols ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Join(table schema.Tabler, on ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) LeftJoin(table schema.Tabler, on ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) RightJoin(table schema.Tabler, on ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Group(cols ...field.Expr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Having(conds ...gen.Condition) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Limit(limit int) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Offset(offset int) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Unscoped() IMetadataItemDo {
|
||||
return m.withDO(m.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Create(values ...*models.MetadataItem) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.DO.Create(values)
|
||||
}
|
||||
|
||||
func (m metadataItemDo) CreateInBatches(values []*models.MetadataItem, batchSize int) error {
|
||||
return m.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (m metadataItemDo) Save(values ...*models.MetadataItem) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return m.DO.Save(values)
|
||||
}
|
||||
|
||||
func (m metadataItemDo) First() (*models.MetadataItem, error) {
|
||||
if result, err := m.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MetadataItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Take() (*models.MetadataItem, error) {
|
||||
if result, err := m.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MetadataItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Last() (*models.MetadataItem, error) {
|
||||
if result, err := m.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MetadataItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Find() ([]*models.MetadataItem, error) {
|
||||
result, err := m.DO.Find()
|
||||
return result.([]*models.MetadataItem), err
|
||||
}
|
||||
|
||||
func (m metadataItemDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.MetadataItem, err error) {
|
||||
buf := make([]*models.MetadataItem, 0, batchSize)
|
||||
err = m.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (m metadataItemDo) FindInBatches(result *[]*models.MetadataItem, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return m.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Attrs(attrs ...field.AssignExpr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Assign(attrs ...field.AssignExpr) IMetadataItemDo {
|
||||
return m.withDO(m.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Joins(fields ...field.RelationField) IMetadataItemDo {
|
||||
for _, _f := range fields {
|
||||
m = *m.withDO(m.DO.Joins(_f))
|
||||
}
|
||||
return &m
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Preload(fields ...field.RelationField) IMetadataItemDo {
|
||||
for _, _f := range fields {
|
||||
m = *m.withDO(m.DO.Preload(_f))
|
||||
}
|
||||
return &m
|
||||
}
|
||||
|
||||
func (m metadataItemDo) FirstOrInit() (*models.MetadataItem, error) {
|
||||
if result, err := m.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MetadataItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m metadataItemDo) FirstOrCreate() (*models.MetadataItem, error) {
|
||||
if result, err := m.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.MetadataItem), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (m metadataItemDo) FindByPage(offset int, limit int) (result []*models.MetadataItem, count int64, err error) {
|
||||
result, err = m.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = m.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (m metadataItemDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = m.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = m.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Scan(result interface{}) (err error) {
|
||||
return m.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (m metadataItemDo) Delete(models ...*models.MetadataItem) (result gen.ResultInfo, err error) {
|
||||
return m.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (m *metadataItemDo) withDO(do gen.Dao) *metadataItemDo {
|
||||
m.DO = *do.(*gen.DO)
|
||||
return m
|
||||
}
|
||||
410
query/path_downloads.gen.go
Normal file
410
query/path_downloads.gen.go
Normal file
@ -0,0 +1,410 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newPathDownload(db *gorm.DB, opts ...gen.DOOption) pathDownload {
|
||||
_pathDownload := pathDownload{}
|
||||
|
||||
_pathDownload.pathDownloadDo.UseDB(db, opts...)
|
||||
_pathDownload.pathDownloadDo.UseModel(&models.PathDownload{})
|
||||
|
||||
tableName := _pathDownload.pathDownloadDo.TableName()
|
||||
_pathDownload.ALL = field.NewAsterisk(tableName)
|
||||
_pathDownload.ID = field.NewInt64(tableName, "id")
|
||||
_pathDownload.Path = field.NewString(tableName, "path")
|
||||
_pathDownload.PathName = field.NewString(tableName, "path_name")
|
||||
|
||||
_pathDownload.fillFieldMap()
|
||||
|
||||
return _pathDownload
|
||||
}
|
||||
|
||||
type pathDownload struct {
|
||||
pathDownloadDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
Path field.String
|
||||
PathName field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (p pathDownload) Table(newTableName string) *pathDownload {
|
||||
p.pathDownloadDo.UseTable(newTableName)
|
||||
return p.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (p pathDownload) As(alias string) *pathDownload {
|
||||
p.pathDownloadDo.DO = *(p.pathDownloadDo.As(alias).(*gen.DO))
|
||||
return p.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (p *pathDownload) updateTableName(table string) *pathDownload {
|
||||
p.ALL = field.NewAsterisk(table)
|
||||
p.ID = field.NewInt64(table, "id")
|
||||
p.Path = field.NewString(table, "path")
|
||||
p.PathName = field.NewString(table, "path_name")
|
||||
|
||||
p.fillFieldMap()
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *pathDownload) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := p.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (p *pathDownload) fillFieldMap() {
|
||||
p.fieldMap = make(map[string]field.Expr, 3)
|
||||
p.fieldMap["id"] = p.ID
|
||||
p.fieldMap["path"] = p.Path
|
||||
p.fieldMap["path_name"] = p.PathName
|
||||
}
|
||||
|
||||
func (p pathDownload) clone(db *gorm.DB) pathDownload {
|
||||
p.pathDownloadDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return p
|
||||
}
|
||||
|
||||
func (p pathDownload) replaceDB(db *gorm.DB) pathDownload {
|
||||
p.pathDownloadDo.ReplaceDB(db)
|
||||
return p
|
||||
}
|
||||
|
||||
type pathDownloadDo struct{ gen.DO }
|
||||
|
||||
type IPathDownloadDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IPathDownloadDo
|
||||
WithContext(ctx context.Context) IPathDownloadDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IPathDownloadDo
|
||||
WriteDB() IPathDownloadDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IPathDownloadDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IPathDownloadDo
|
||||
Not(conds ...gen.Condition) IPathDownloadDo
|
||||
Or(conds ...gen.Condition) IPathDownloadDo
|
||||
Select(conds ...field.Expr) IPathDownloadDo
|
||||
Where(conds ...gen.Condition) IPathDownloadDo
|
||||
Order(conds ...field.Expr) IPathDownloadDo
|
||||
Distinct(cols ...field.Expr) IPathDownloadDo
|
||||
Omit(cols ...field.Expr) IPathDownloadDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IPathDownloadDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IPathDownloadDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IPathDownloadDo
|
||||
Group(cols ...field.Expr) IPathDownloadDo
|
||||
Having(conds ...gen.Condition) IPathDownloadDo
|
||||
Limit(limit int) IPathDownloadDo
|
||||
Offset(offset int) IPathDownloadDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IPathDownloadDo
|
||||
Unscoped() IPathDownloadDo
|
||||
Create(values ...*models.PathDownload) error
|
||||
CreateInBatches(values []*models.PathDownload, batchSize int) error
|
||||
Save(values ...*models.PathDownload) error
|
||||
First() (*models.PathDownload, error)
|
||||
Take() (*models.PathDownload, error)
|
||||
Last() (*models.PathDownload, error)
|
||||
Find() ([]*models.PathDownload, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.PathDownload, err error)
|
||||
FindInBatches(result *[]*models.PathDownload, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.PathDownload) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IPathDownloadDo
|
||||
Assign(attrs ...field.AssignExpr) IPathDownloadDo
|
||||
Joins(fields ...field.RelationField) IPathDownloadDo
|
||||
Preload(fields ...field.RelationField) IPathDownloadDo
|
||||
FirstOrInit() (*models.PathDownload, error)
|
||||
FirstOrCreate() (*models.PathDownload, error)
|
||||
FindByPage(offset int, limit int) (result []*models.PathDownload, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IPathDownloadDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.PathDownload, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (p pathDownloadDo) FilterWithNameAndRole(name string, role string) (result []models.PathDownload, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM path_downloads WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = p.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Debug() IPathDownloadDo {
|
||||
return p.withDO(p.DO.Debug())
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) WithContext(ctx context.Context) IPathDownloadDo {
|
||||
return p.withDO(p.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) ReadDB() IPathDownloadDo {
|
||||
return p.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) WriteDB() IPathDownloadDo {
|
||||
return p.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Session(config *gorm.Session) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Session(config))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Clauses(conds ...clause.Expression) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Returning(value interface{}, columns ...string) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Not(conds ...gen.Condition) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Or(conds ...gen.Condition) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Select(conds ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Where(conds ...gen.Condition) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Order(conds ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Distinct(cols ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Omit(cols ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Join(table schema.Tabler, on ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) LeftJoin(table schema.Tabler, on ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) RightJoin(table schema.Tabler, on ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Group(cols ...field.Expr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Having(conds ...gen.Condition) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Limit(limit int) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Offset(offset int) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Unscoped() IPathDownloadDo {
|
||||
return p.withDO(p.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Create(values ...*models.PathDownload) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return p.DO.Create(values)
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) CreateInBatches(values []*models.PathDownload, batchSize int) error {
|
||||
return p.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (p pathDownloadDo) Save(values ...*models.PathDownload) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return p.DO.Save(values)
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) First() (*models.PathDownload, error) {
|
||||
if result, err := p.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.PathDownload), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Take() (*models.PathDownload, error) {
|
||||
if result, err := p.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.PathDownload), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Last() (*models.PathDownload, error) {
|
||||
if result, err := p.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.PathDownload), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Find() ([]*models.PathDownload, error) {
|
||||
result, err := p.DO.Find()
|
||||
return result.([]*models.PathDownload), err
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.PathDownload, err error) {
|
||||
buf := make([]*models.PathDownload, 0, batchSize)
|
||||
err = p.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) FindInBatches(result *[]*models.PathDownload, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return p.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Attrs(attrs ...field.AssignExpr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Assign(attrs ...field.AssignExpr) IPathDownloadDo {
|
||||
return p.withDO(p.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Joins(fields ...field.RelationField) IPathDownloadDo {
|
||||
for _, _f := range fields {
|
||||
p = *p.withDO(p.DO.Joins(_f))
|
||||
}
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Preload(fields ...field.RelationField) IPathDownloadDo {
|
||||
for _, _f := range fields {
|
||||
p = *p.withDO(p.DO.Preload(_f))
|
||||
}
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) FirstOrInit() (*models.PathDownload, error) {
|
||||
if result, err := p.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.PathDownload), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) FirstOrCreate() (*models.PathDownload, error) {
|
||||
if result, err := p.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.PathDownload), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) FindByPage(offset int, limit int) (result []*models.PathDownload, count int64, err error) {
|
||||
result, err = p.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = p.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = p.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = p.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Scan(result interface{}) (err error) {
|
||||
return p.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (p pathDownloadDo) Delete(models ...*models.PathDownload) (result gen.ResultInfo, err error) {
|
||||
return p.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (p *pathDownloadDo) withDO(do gen.Dao) *pathDownloadDo {
|
||||
p.DO = *do.(*gen.DO)
|
||||
return p
|
||||
}
|
||||
418
query/section_locations.gen.go
Normal file
418
query/section_locations.gen.go
Normal file
@ -0,0 +1,418 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newSectionLocation(db *gorm.DB, opts ...gen.DOOption) sectionLocation {
|
||||
_sectionLocation := sectionLocation{}
|
||||
|
||||
_sectionLocation.sectionLocationDo.UseDB(db, opts...)
|
||||
_sectionLocation.sectionLocationDo.UseModel(&models.SectionLocation{})
|
||||
|
||||
tableName := _sectionLocation.sectionLocationDo.TableName()
|
||||
_sectionLocation.ALL = field.NewAsterisk(tableName)
|
||||
_sectionLocation.ID = field.NewInt64(tableName, "id")
|
||||
_sectionLocation.LibrarySectionID = field.NewInt64(tableName, "library_section_id")
|
||||
_sectionLocation.RootPath = field.NewString(tableName, "root_path")
|
||||
_sectionLocation.CreatedAt = field.NewString(tableName, "created_at")
|
||||
_sectionLocation.UpdatedAt = field.NewString(tableName, "updated_at")
|
||||
|
||||
_sectionLocation.fillFieldMap()
|
||||
|
||||
return _sectionLocation
|
||||
}
|
||||
|
||||
type sectionLocation struct {
|
||||
sectionLocationDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
LibrarySectionID field.Int64
|
||||
RootPath field.String
|
||||
CreatedAt field.String
|
||||
UpdatedAt field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (s sectionLocation) Table(newTableName string) *sectionLocation {
|
||||
s.sectionLocationDo.UseTable(newTableName)
|
||||
return s.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (s sectionLocation) As(alias string) *sectionLocation {
|
||||
s.sectionLocationDo.DO = *(s.sectionLocationDo.As(alias).(*gen.DO))
|
||||
return s.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (s *sectionLocation) updateTableName(table string) *sectionLocation {
|
||||
s.ALL = field.NewAsterisk(table)
|
||||
s.ID = field.NewInt64(table, "id")
|
||||
s.LibrarySectionID = field.NewInt64(table, "library_section_id")
|
||||
s.RootPath = field.NewString(table, "root_path")
|
||||
s.CreatedAt = field.NewString(table, "created_at")
|
||||
s.UpdatedAt = field.NewString(table, "updated_at")
|
||||
|
||||
s.fillFieldMap()
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *sectionLocation) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := s.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (s *sectionLocation) fillFieldMap() {
|
||||
s.fieldMap = make(map[string]field.Expr, 5)
|
||||
s.fieldMap["id"] = s.ID
|
||||
s.fieldMap["library_section_id"] = s.LibrarySectionID
|
||||
s.fieldMap["root_path"] = s.RootPath
|
||||
s.fieldMap["created_at"] = s.CreatedAt
|
||||
s.fieldMap["updated_at"] = s.UpdatedAt
|
||||
}
|
||||
|
||||
func (s sectionLocation) clone(db *gorm.DB) sectionLocation {
|
||||
s.sectionLocationDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return s
|
||||
}
|
||||
|
||||
func (s sectionLocation) replaceDB(db *gorm.DB) sectionLocation {
|
||||
s.sectionLocationDo.ReplaceDB(db)
|
||||
return s
|
||||
}
|
||||
|
||||
type sectionLocationDo struct{ gen.DO }
|
||||
|
||||
type ISectionLocationDo interface {
|
||||
gen.SubQuery
|
||||
Debug() ISectionLocationDo
|
||||
WithContext(ctx context.Context) ISectionLocationDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() ISectionLocationDo
|
||||
WriteDB() ISectionLocationDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) ISectionLocationDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) ISectionLocationDo
|
||||
Not(conds ...gen.Condition) ISectionLocationDo
|
||||
Or(conds ...gen.Condition) ISectionLocationDo
|
||||
Select(conds ...field.Expr) ISectionLocationDo
|
||||
Where(conds ...gen.Condition) ISectionLocationDo
|
||||
Order(conds ...field.Expr) ISectionLocationDo
|
||||
Distinct(cols ...field.Expr) ISectionLocationDo
|
||||
Omit(cols ...field.Expr) ISectionLocationDo
|
||||
Join(table schema.Tabler, on ...field.Expr) ISectionLocationDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) ISectionLocationDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) ISectionLocationDo
|
||||
Group(cols ...field.Expr) ISectionLocationDo
|
||||
Having(conds ...gen.Condition) ISectionLocationDo
|
||||
Limit(limit int) ISectionLocationDo
|
||||
Offset(offset int) ISectionLocationDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) ISectionLocationDo
|
||||
Unscoped() ISectionLocationDo
|
||||
Create(values ...*models.SectionLocation) error
|
||||
CreateInBatches(values []*models.SectionLocation, batchSize int) error
|
||||
Save(values ...*models.SectionLocation) error
|
||||
First() (*models.SectionLocation, error)
|
||||
Take() (*models.SectionLocation, error)
|
||||
Last() (*models.SectionLocation, error)
|
||||
Find() ([]*models.SectionLocation, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.SectionLocation, err error)
|
||||
FindInBatches(result *[]*models.SectionLocation, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.SectionLocation) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) ISectionLocationDo
|
||||
Assign(attrs ...field.AssignExpr) ISectionLocationDo
|
||||
Joins(fields ...field.RelationField) ISectionLocationDo
|
||||
Preload(fields ...field.RelationField) ISectionLocationDo
|
||||
FirstOrInit() (*models.SectionLocation, error)
|
||||
FirstOrCreate() (*models.SectionLocation, error)
|
||||
FindByPage(offset int, limit int) (result []*models.SectionLocation, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) ISectionLocationDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.SectionLocation, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (s sectionLocationDo) FilterWithNameAndRole(name string, role string) (result []models.SectionLocation, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM section_locations WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = s.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Debug() ISectionLocationDo {
|
||||
return s.withDO(s.DO.Debug())
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) WithContext(ctx context.Context) ISectionLocationDo {
|
||||
return s.withDO(s.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) ReadDB() ISectionLocationDo {
|
||||
return s.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) WriteDB() ISectionLocationDo {
|
||||
return s.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Session(config *gorm.Session) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Session(config))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Clauses(conds ...clause.Expression) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Returning(value interface{}, columns ...string) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Not(conds ...gen.Condition) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Or(conds ...gen.Condition) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Select(conds ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Where(conds ...gen.Condition) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Order(conds ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Distinct(cols ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Omit(cols ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Join(table schema.Tabler, on ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) LeftJoin(table schema.Tabler, on ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) RightJoin(table schema.Tabler, on ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Group(cols ...field.Expr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Having(conds ...gen.Condition) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Limit(limit int) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Offset(offset int) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Scopes(funcs ...func(gen.Dao) gen.Dao) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Unscoped() ISectionLocationDo {
|
||||
return s.withDO(s.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Create(values ...*models.SectionLocation) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return s.DO.Create(values)
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) CreateInBatches(values []*models.SectionLocation, batchSize int) error {
|
||||
return s.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (s sectionLocationDo) Save(values ...*models.SectionLocation) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return s.DO.Save(values)
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) First() (*models.SectionLocation, error) {
|
||||
if result, err := s.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.SectionLocation), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Take() (*models.SectionLocation, error) {
|
||||
if result, err := s.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.SectionLocation), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Last() (*models.SectionLocation, error) {
|
||||
if result, err := s.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.SectionLocation), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Find() ([]*models.SectionLocation, error) {
|
||||
result, err := s.DO.Find()
|
||||
return result.([]*models.SectionLocation), err
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.SectionLocation, err error) {
|
||||
buf := make([]*models.SectionLocation, 0, batchSize)
|
||||
err = s.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) FindInBatches(result *[]*models.SectionLocation, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return s.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Attrs(attrs ...field.AssignExpr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Assign(attrs ...field.AssignExpr) ISectionLocationDo {
|
||||
return s.withDO(s.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Joins(fields ...field.RelationField) ISectionLocationDo {
|
||||
for _, _f := range fields {
|
||||
s = *s.withDO(s.DO.Joins(_f))
|
||||
}
|
||||
return &s
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Preload(fields ...field.RelationField) ISectionLocationDo {
|
||||
for _, _f := range fields {
|
||||
s = *s.withDO(s.DO.Preload(_f))
|
||||
}
|
||||
return &s
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) FirstOrInit() (*models.SectionLocation, error) {
|
||||
if result, err := s.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.SectionLocation), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) FirstOrCreate() (*models.SectionLocation, error) {
|
||||
if result, err := s.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.SectionLocation), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) FindByPage(offset int, limit int) (result []*models.SectionLocation, count int64, err error) {
|
||||
result, err = s.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = s.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = s.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = s.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Scan(result interface{}) (err error) {
|
||||
return s.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (s sectionLocationDo) Delete(models ...*models.SectionLocation) (result gen.ResultInfo, err error) {
|
||||
return s.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (s *sectionLocationDo) withDO(do gen.Dao) *sectionLocationDo {
|
||||
s.DO = *do.(*gen.DO)
|
||||
return s
|
||||
}
|
||||
414
query/taggings.gen.go
Normal file
414
query/taggings.gen.go
Normal file
@ -0,0 +1,414 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newTagging(db *gorm.DB, opts ...gen.DOOption) tagging {
|
||||
_tagging := tagging{}
|
||||
|
||||
_tagging.taggingDo.UseDB(db, opts...)
|
||||
_tagging.taggingDo.UseModel(&models.Tagging{})
|
||||
|
||||
tableName := _tagging.taggingDo.TableName()
|
||||
_tagging.ALL = field.NewAsterisk(tableName)
|
||||
_tagging.ID = field.NewInt64(tableName, "id")
|
||||
_tagging.MetadataItemID = field.NewInt64(tableName, "metadata_item_id")
|
||||
_tagging.TagID = field.NewInt64(tableName, "tag_id")
|
||||
_tagging.Index = field.NewInt64(tableName, "index")
|
||||
|
||||
_tagging.fillFieldMap()
|
||||
|
||||
return _tagging
|
||||
}
|
||||
|
||||
type tagging struct {
|
||||
taggingDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
MetadataItemID field.Int64
|
||||
TagID field.Int64
|
||||
Index field.Int64
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (t tagging) Table(newTableName string) *tagging {
|
||||
t.taggingDo.UseTable(newTableName)
|
||||
return t.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (t tagging) As(alias string) *tagging {
|
||||
t.taggingDo.DO = *(t.taggingDo.As(alias).(*gen.DO))
|
||||
return t.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (t *tagging) updateTableName(table string) *tagging {
|
||||
t.ALL = field.NewAsterisk(table)
|
||||
t.ID = field.NewInt64(table, "id")
|
||||
t.MetadataItemID = field.NewInt64(table, "metadata_item_id")
|
||||
t.TagID = field.NewInt64(table, "tag_id")
|
||||
t.Index = field.NewInt64(table, "index")
|
||||
|
||||
t.fillFieldMap()
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *tagging) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := t.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (t *tagging) fillFieldMap() {
|
||||
t.fieldMap = make(map[string]field.Expr, 4)
|
||||
t.fieldMap["id"] = t.ID
|
||||
t.fieldMap["metadata_item_id"] = t.MetadataItemID
|
||||
t.fieldMap["tag_id"] = t.TagID
|
||||
t.fieldMap["index"] = t.Index
|
||||
}
|
||||
|
||||
func (t tagging) clone(db *gorm.DB) tagging {
|
||||
t.taggingDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return t
|
||||
}
|
||||
|
||||
func (t tagging) replaceDB(db *gorm.DB) tagging {
|
||||
t.taggingDo.ReplaceDB(db)
|
||||
return t
|
||||
}
|
||||
|
||||
type taggingDo struct{ gen.DO }
|
||||
|
||||
type ITaggingDo interface {
|
||||
gen.SubQuery
|
||||
Debug() ITaggingDo
|
||||
WithContext(ctx context.Context) ITaggingDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() ITaggingDo
|
||||
WriteDB() ITaggingDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) ITaggingDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) ITaggingDo
|
||||
Not(conds ...gen.Condition) ITaggingDo
|
||||
Or(conds ...gen.Condition) ITaggingDo
|
||||
Select(conds ...field.Expr) ITaggingDo
|
||||
Where(conds ...gen.Condition) ITaggingDo
|
||||
Order(conds ...field.Expr) ITaggingDo
|
||||
Distinct(cols ...field.Expr) ITaggingDo
|
||||
Omit(cols ...field.Expr) ITaggingDo
|
||||
Join(table schema.Tabler, on ...field.Expr) ITaggingDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) ITaggingDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) ITaggingDo
|
||||
Group(cols ...field.Expr) ITaggingDo
|
||||
Having(conds ...gen.Condition) ITaggingDo
|
||||
Limit(limit int) ITaggingDo
|
||||
Offset(offset int) ITaggingDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) ITaggingDo
|
||||
Unscoped() ITaggingDo
|
||||
Create(values ...*models.Tagging) error
|
||||
CreateInBatches(values []*models.Tagging, batchSize int) error
|
||||
Save(values ...*models.Tagging) error
|
||||
First() (*models.Tagging, error)
|
||||
Take() (*models.Tagging, error)
|
||||
Last() (*models.Tagging, error)
|
||||
Find() ([]*models.Tagging, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.Tagging, err error)
|
||||
FindInBatches(result *[]*models.Tagging, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.Tagging) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) ITaggingDo
|
||||
Assign(attrs ...field.AssignExpr) ITaggingDo
|
||||
Joins(fields ...field.RelationField) ITaggingDo
|
||||
Preload(fields ...field.RelationField) ITaggingDo
|
||||
FirstOrInit() (*models.Tagging, error)
|
||||
FirstOrCreate() (*models.Tagging, error)
|
||||
FindByPage(offset int, limit int) (result []*models.Tagging, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) ITaggingDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.Tagging, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (t taggingDo) FilterWithNameAndRole(name string, role string) (result []models.Tagging, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM taggings WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = t.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (t taggingDo) Debug() ITaggingDo {
|
||||
return t.withDO(t.DO.Debug())
|
||||
}
|
||||
|
||||
func (t taggingDo) WithContext(ctx context.Context) ITaggingDo {
|
||||
return t.withDO(t.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (t taggingDo) ReadDB() ITaggingDo {
|
||||
return t.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (t taggingDo) WriteDB() ITaggingDo {
|
||||
return t.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (t taggingDo) Session(config *gorm.Session) ITaggingDo {
|
||||
return t.withDO(t.DO.Session(config))
|
||||
}
|
||||
|
||||
func (t taggingDo) Clauses(conds ...clause.Expression) ITaggingDo {
|
||||
return t.withDO(t.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Returning(value interface{}, columns ...string) ITaggingDo {
|
||||
return t.withDO(t.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Not(conds ...gen.Condition) ITaggingDo {
|
||||
return t.withDO(t.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Or(conds ...gen.Condition) ITaggingDo {
|
||||
return t.withDO(t.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Select(conds ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Where(conds ...gen.Condition) ITaggingDo {
|
||||
return t.withDO(t.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Order(conds ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Distinct(cols ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Omit(cols ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Join(table schema.Tabler, on ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (t taggingDo) LeftJoin(table schema.Tabler, on ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (t taggingDo) RightJoin(table schema.Tabler, on ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Group(cols ...field.Expr) ITaggingDo {
|
||||
return t.withDO(t.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Having(conds ...gen.Condition) ITaggingDo {
|
||||
return t.withDO(t.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Limit(limit int) ITaggingDo {
|
||||
return t.withDO(t.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (t taggingDo) Offset(offset int) ITaggingDo {
|
||||
return t.withDO(t.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (t taggingDo) Scopes(funcs ...func(gen.Dao) gen.Dao) ITaggingDo {
|
||||
return t.withDO(t.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Unscoped() ITaggingDo {
|
||||
return t.withDO(t.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (t taggingDo) Create(values ...*models.Tagging) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return t.DO.Create(values)
|
||||
}
|
||||
|
||||
func (t taggingDo) CreateInBatches(values []*models.Tagging, batchSize int) error {
|
||||
return t.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (t taggingDo) Save(values ...*models.Tagging) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return t.DO.Save(values)
|
||||
}
|
||||
|
||||
func (t taggingDo) First() (*models.Tagging, error) {
|
||||
if result, err := t.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tagging), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t taggingDo) Take() (*models.Tagging, error) {
|
||||
if result, err := t.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tagging), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t taggingDo) Last() (*models.Tagging, error) {
|
||||
if result, err := t.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tagging), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t taggingDo) Find() ([]*models.Tagging, error) {
|
||||
result, err := t.DO.Find()
|
||||
return result.([]*models.Tagging), err
|
||||
}
|
||||
|
||||
func (t taggingDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.Tagging, err error) {
|
||||
buf := make([]*models.Tagging, 0, batchSize)
|
||||
err = t.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (t taggingDo) FindInBatches(result *[]*models.Tagging, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return t.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (t taggingDo) Attrs(attrs ...field.AssignExpr) ITaggingDo {
|
||||
return t.withDO(t.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Assign(attrs ...field.AssignExpr) ITaggingDo {
|
||||
return t.withDO(t.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (t taggingDo) Joins(fields ...field.RelationField) ITaggingDo {
|
||||
for _, _f := range fields {
|
||||
t = *t.withDO(t.DO.Joins(_f))
|
||||
}
|
||||
return &t
|
||||
}
|
||||
|
||||
func (t taggingDo) Preload(fields ...field.RelationField) ITaggingDo {
|
||||
for _, _f := range fields {
|
||||
t = *t.withDO(t.DO.Preload(_f))
|
||||
}
|
||||
return &t
|
||||
}
|
||||
|
||||
func (t taggingDo) FirstOrInit() (*models.Tagging, error) {
|
||||
if result, err := t.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tagging), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t taggingDo) FirstOrCreate() (*models.Tagging, error) {
|
||||
if result, err := t.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tagging), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t taggingDo) FindByPage(offset int, limit int) (result []*models.Tagging, count int64, err error) {
|
||||
result, err = t.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = t.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (t taggingDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = t.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = t.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (t taggingDo) Scan(result interface{}) (err error) {
|
||||
return t.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (t taggingDo) Delete(models ...*models.Tagging) (result gen.ResultInfo, err error) {
|
||||
return t.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (t *taggingDo) withDO(do gen.Dao) *taggingDo {
|
||||
t.DO = *do.(*gen.DO)
|
||||
return t
|
||||
}
|
||||
410
query/tags.gen.go
Normal file
410
query/tags.gen.go
Normal file
@ -0,0 +1,410 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newTag(db *gorm.DB, opts ...gen.DOOption) tag {
|
||||
_tag := tag{}
|
||||
|
||||
_tag.tagDo.UseDB(db, opts...)
|
||||
_tag.tagDo.UseModel(&models.Tag{})
|
||||
|
||||
tableName := _tag.tagDo.TableName()
|
||||
_tag.ALL = field.NewAsterisk(tableName)
|
||||
_tag.ID = field.NewInt64(tableName, "id")
|
||||
_tag.Tag = field.NewString(tableName, "tag")
|
||||
_tag.TagType = field.NewInt64(tableName, "tag_type")
|
||||
|
||||
_tag.fillFieldMap()
|
||||
|
||||
return _tag
|
||||
}
|
||||
|
||||
type tag struct {
|
||||
tagDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64
|
||||
Tag field.String
|
||||
TagType field.Int64
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (t tag) Table(newTableName string) *tag {
|
||||
t.tagDo.UseTable(newTableName)
|
||||
return t.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (t tag) As(alias string) *tag {
|
||||
t.tagDo.DO = *(t.tagDo.As(alias).(*gen.DO))
|
||||
return t.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (t *tag) updateTableName(table string) *tag {
|
||||
t.ALL = field.NewAsterisk(table)
|
||||
t.ID = field.NewInt64(table, "id")
|
||||
t.Tag = field.NewString(table, "tag")
|
||||
t.TagType = field.NewInt64(table, "tag_type")
|
||||
|
||||
t.fillFieldMap()
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func (t *tag) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := t.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (t *tag) fillFieldMap() {
|
||||
t.fieldMap = make(map[string]field.Expr, 3)
|
||||
t.fieldMap["id"] = t.ID
|
||||
t.fieldMap["tag"] = t.Tag
|
||||
t.fieldMap["tag_type"] = t.TagType
|
||||
}
|
||||
|
||||
func (t tag) clone(db *gorm.DB) tag {
|
||||
t.tagDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return t
|
||||
}
|
||||
|
||||
func (t tag) replaceDB(db *gorm.DB) tag {
|
||||
t.tagDo.ReplaceDB(db)
|
||||
return t
|
||||
}
|
||||
|
||||
type tagDo struct{ gen.DO }
|
||||
|
||||
type ITagDo interface {
|
||||
gen.SubQuery
|
||||
Debug() ITagDo
|
||||
WithContext(ctx context.Context) ITagDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() ITagDo
|
||||
WriteDB() ITagDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) ITagDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) ITagDo
|
||||
Not(conds ...gen.Condition) ITagDo
|
||||
Or(conds ...gen.Condition) ITagDo
|
||||
Select(conds ...field.Expr) ITagDo
|
||||
Where(conds ...gen.Condition) ITagDo
|
||||
Order(conds ...field.Expr) ITagDo
|
||||
Distinct(cols ...field.Expr) ITagDo
|
||||
Omit(cols ...field.Expr) ITagDo
|
||||
Join(table schema.Tabler, on ...field.Expr) ITagDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) ITagDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) ITagDo
|
||||
Group(cols ...field.Expr) ITagDo
|
||||
Having(conds ...gen.Condition) ITagDo
|
||||
Limit(limit int) ITagDo
|
||||
Offset(offset int) ITagDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) ITagDo
|
||||
Unscoped() ITagDo
|
||||
Create(values ...*models.Tag) error
|
||||
CreateInBatches(values []*models.Tag, batchSize int) error
|
||||
Save(values ...*models.Tag) error
|
||||
First() (*models.Tag, error)
|
||||
Take() (*models.Tag, error)
|
||||
Last() (*models.Tag, error)
|
||||
Find() ([]*models.Tag, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.Tag, err error)
|
||||
FindInBatches(result *[]*models.Tag, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.Tag) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) ITagDo
|
||||
Assign(attrs ...field.AssignExpr) ITagDo
|
||||
Joins(fields ...field.RelationField) ITagDo
|
||||
Preload(fields ...field.RelationField) ITagDo
|
||||
FirstOrInit() (*models.Tag, error)
|
||||
FirstOrCreate() (*models.Tag, error)
|
||||
FindByPage(offset int, limit int) (result []*models.Tag, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) ITagDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.Tag, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (t tagDo) FilterWithNameAndRole(name string, role string) (result []models.Tag, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM tags WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = t.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (t tagDo) Debug() ITagDo {
|
||||
return t.withDO(t.DO.Debug())
|
||||
}
|
||||
|
||||
func (t tagDo) WithContext(ctx context.Context) ITagDo {
|
||||
return t.withDO(t.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (t tagDo) ReadDB() ITagDo {
|
||||
return t.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (t tagDo) WriteDB() ITagDo {
|
||||
return t.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (t tagDo) Session(config *gorm.Session) ITagDo {
|
||||
return t.withDO(t.DO.Session(config))
|
||||
}
|
||||
|
||||
func (t tagDo) Clauses(conds ...clause.Expression) ITagDo {
|
||||
return t.withDO(t.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Returning(value interface{}, columns ...string) ITagDo {
|
||||
return t.withDO(t.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (t tagDo) Not(conds ...gen.Condition) ITagDo {
|
||||
return t.withDO(t.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Or(conds ...gen.Condition) ITagDo {
|
||||
return t.withDO(t.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Select(conds ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Where(conds ...gen.Condition) ITagDo {
|
||||
return t.withDO(t.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Order(conds ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Distinct(cols ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (t tagDo) Omit(cols ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (t tagDo) Join(table schema.Tabler, on ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (t tagDo) LeftJoin(table schema.Tabler, on ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (t tagDo) RightJoin(table schema.Tabler, on ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (t tagDo) Group(cols ...field.Expr) ITagDo {
|
||||
return t.withDO(t.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (t tagDo) Having(conds ...gen.Condition) ITagDo {
|
||||
return t.withDO(t.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (t tagDo) Limit(limit int) ITagDo {
|
||||
return t.withDO(t.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (t tagDo) Offset(offset int) ITagDo {
|
||||
return t.withDO(t.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (t tagDo) Scopes(funcs ...func(gen.Dao) gen.Dao) ITagDo {
|
||||
return t.withDO(t.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (t tagDo) Unscoped() ITagDo {
|
||||
return t.withDO(t.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (t tagDo) Create(values ...*models.Tag) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return t.DO.Create(values)
|
||||
}
|
||||
|
||||
func (t tagDo) CreateInBatches(values []*models.Tag, batchSize int) error {
|
||||
return t.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (t tagDo) Save(values ...*models.Tag) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return t.DO.Save(values)
|
||||
}
|
||||
|
||||
func (t tagDo) First() (*models.Tag, error) {
|
||||
if result, err := t.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tag), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t tagDo) Take() (*models.Tag, error) {
|
||||
if result, err := t.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tag), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t tagDo) Last() (*models.Tag, error) {
|
||||
if result, err := t.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tag), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t tagDo) Find() ([]*models.Tag, error) {
|
||||
result, err := t.DO.Find()
|
||||
return result.([]*models.Tag), err
|
||||
}
|
||||
|
||||
func (t tagDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.Tag, err error) {
|
||||
buf := make([]*models.Tag, 0, batchSize)
|
||||
err = t.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (t tagDo) FindInBatches(result *[]*models.Tag, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return t.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (t tagDo) Attrs(attrs ...field.AssignExpr) ITagDo {
|
||||
return t.withDO(t.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (t tagDo) Assign(attrs ...field.AssignExpr) ITagDo {
|
||||
return t.withDO(t.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (t tagDo) Joins(fields ...field.RelationField) ITagDo {
|
||||
for _, _f := range fields {
|
||||
t = *t.withDO(t.DO.Joins(_f))
|
||||
}
|
||||
return &t
|
||||
}
|
||||
|
||||
func (t tagDo) Preload(fields ...field.RelationField) ITagDo {
|
||||
for _, _f := range fields {
|
||||
t = *t.withDO(t.DO.Preload(_f))
|
||||
}
|
||||
return &t
|
||||
}
|
||||
|
||||
func (t tagDo) FirstOrInit() (*models.Tag, error) {
|
||||
if result, err := t.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tag), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t tagDo) FirstOrCreate() (*models.Tag, error) {
|
||||
if result, err := t.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.Tag), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (t tagDo) FindByPage(offset int, limit int) (result []*models.Tag, count int64, err error) {
|
||||
result, err = t.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = t.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (t tagDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = t.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = t.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (t tagDo) Scan(result interface{}) (err error) {
|
||||
return t.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (t tagDo) Delete(models ...*models.Tag) (result gen.ResultInfo, err error) {
|
||||
return t.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (t *tagDo) withDO(do gen.Dao) *tagDo {
|
||||
t.DO = *do.(*gen.DO)
|
||||
return t
|
||||
}
|
||||
418
query/users.gen.go
Normal file
418
query/users.gen.go
Normal file
@ -0,0 +1,418 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"app/shelfly/internal/models"
|
||||
)
|
||||
|
||||
func newUser(db *gorm.DB, opts ...gen.DOOption) user {
|
||||
_user := user{}
|
||||
|
||||
_user.userDo.UseDB(db, opts...)
|
||||
_user.userDo.UseModel(&models.User{})
|
||||
|
||||
tableName := _user.userDo.TableName()
|
||||
_user.ALL = field.NewAsterisk(tableName)
|
||||
_user.ID = field.NewUint(tableName, "id")
|
||||
_user.Username = field.NewString(tableName, "username")
|
||||
_user.Name = field.NewString(tableName, "name")
|
||||
_user.Email = field.NewString(tableName, "email")
|
||||
_user.Password = field.NewString(tableName, "password")
|
||||
|
||||
_user.fillFieldMap()
|
||||
|
||||
return _user
|
||||
}
|
||||
|
||||
type user struct {
|
||||
userDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Uint
|
||||
Username field.String
|
||||
Name field.String
|
||||
Email field.String
|
||||
Password field.String
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (u user) Table(newTableName string) *user {
|
||||
u.userDo.UseTable(newTableName)
|
||||
return u.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (u user) As(alias string) *user {
|
||||
u.userDo.DO = *(u.userDo.As(alias).(*gen.DO))
|
||||
return u.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (u *user) updateTableName(table string) *user {
|
||||
u.ALL = field.NewAsterisk(table)
|
||||
u.ID = field.NewUint(table, "id")
|
||||
u.Username = field.NewString(table, "username")
|
||||
u.Name = field.NewString(table, "name")
|
||||
u.Email = field.NewString(table, "email")
|
||||
u.Password = field.NewString(table, "password")
|
||||
|
||||
u.fillFieldMap()
|
||||
|
||||
return u
|
||||
}
|
||||
|
||||
func (u *user) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := u.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (u *user) fillFieldMap() {
|
||||
u.fieldMap = make(map[string]field.Expr, 5)
|
||||
u.fieldMap["id"] = u.ID
|
||||
u.fieldMap["username"] = u.Username
|
||||
u.fieldMap["name"] = u.Name
|
||||
u.fieldMap["email"] = u.Email
|
||||
u.fieldMap["password"] = u.Password
|
||||
}
|
||||
|
||||
func (u user) clone(db *gorm.DB) user {
|
||||
u.userDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return u
|
||||
}
|
||||
|
||||
func (u user) replaceDB(db *gorm.DB) user {
|
||||
u.userDo.ReplaceDB(db)
|
||||
return u
|
||||
}
|
||||
|
||||
type userDo struct{ gen.DO }
|
||||
|
||||
type IUserDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IUserDo
|
||||
WithContext(ctx context.Context) IUserDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IUserDo
|
||||
WriteDB() IUserDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IUserDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IUserDo
|
||||
Not(conds ...gen.Condition) IUserDo
|
||||
Or(conds ...gen.Condition) IUserDo
|
||||
Select(conds ...field.Expr) IUserDo
|
||||
Where(conds ...gen.Condition) IUserDo
|
||||
Order(conds ...field.Expr) IUserDo
|
||||
Distinct(cols ...field.Expr) IUserDo
|
||||
Omit(cols ...field.Expr) IUserDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IUserDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IUserDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IUserDo
|
||||
Group(cols ...field.Expr) IUserDo
|
||||
Having(conds ...gen.Condition) IUserDo
|
||||
Limit(limit int) IUserDo
|
||||
Offset(offset int) IUserDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IUserDo
|
||||
Unscoped() IUserDo
|
||||
Create(values ...*models.User) error
|
||||
CreateInBatches(values []*models.User, batchSize int) error
|
||||
Save(values ...*models.User) error
|
||||
First() (*models.User, error)
|
||||
Take() (*models.User, error)
|
||||
Last() (*models.User, error)
|
||||
Find() ([]*models.User, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.User, err error)
|
||||
FindInBatches(result *[]*models.User, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*models.User) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IUserDo
|
||||
Assign(attrs ...field.AssignExpr) IUserDo
|
||||
Joins(fields ...field.RelationField) IUserDo
|
||||
Preload(fields ...field.RelationField) IUserDo
|
||||
FirstOrInit() (*models.User, error)
|
||||
FirstOrCreate() (*models.User, error)
|
||||
FindByPage(offset int, limit int) (result []*models.User, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IUserDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
|
||||
FilterWithNameAndRole(name string, role string) (result []models.User, err error)
|
||||
}
|
||||
|
||||
// SELECT * FROM @@table WHERE name = @name{{if role !=""}} AND role = @role{{end}}
|
||||
func (u userDo) FilterWithNameAndRole(name string, role string) (result []models.User, err error) {
|
||||
var params []interface{}
|
||||
|
||||
var generateSQL strings.Builder
|
||||
params = append(params, name)
|
||||
generateSQL.WriteString("SELECT * FROM users WHERE name = ? ")
|
||||
if role != "" {
|
||||
params = append(params, role)
|
||||
generateSQL.WriteString("AND role = ? ")
|
||||
}
|
||||
|
||||
var executeSQL *gorm.DB
|
||||
executeSQL = u.UnderlyingDB().Raw(generateSQL.String(), params...).Find(&result) // ignore_security_alert
|
||||
err = executeSQL.Error
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (u userDo) Debug() IUserDo {
|
||||
return u.withDO(u.DO.Debug())
|
||||
}
|
||||
|
||||
func (u userDo) WithContext(ctx context.Context) IUserDo {
|
||||
return u.withDO(u.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (u userDo) ReadDB() IUserDo {
|
||||
return u.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (u userDo) WriteDB() IUserDo {
|
||||
return u.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (u userDo) Session(config *gorm.Session) IUserDo {
|
||||
return u.withDO(u.DO.Session(config))
|
||||
}
|
||||
|
||||
func (u userDo) Clauses(conds ...clause.Expression) IUserDo {
|
||||
return u.withDO(u.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Returning(value interface{}, columns ...string) IUserDo {
|
||||
return u.withDO(u.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (u userDo) Not(conds ...gen.Condition) IUserDo {
|
||||
return u.withDO(u.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Or(conds ...gen.Condition) IUserDo {
|
||||
return u.withDO(u.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Select(conds ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Where(conds ...gen.Condition) IUserDo {
|
||||
return u.withDO(u.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Order(conds ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Distinct(cols ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (u userDo) Omit(cols ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (u userDo) Join(table schema.Tabler, on ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (u userDo) LeftJoin(table schema.Tabler, on ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (u userDo) RightJoin(table schema.Tabler, on ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (u userDo) Group(cols ...field.Expr) IUserDo {
|
||||
return u.withDO(u.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (u userDo) Having(conds ...gen.Condition) IUserDo {
|
||||
return u.withDO(u.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (u userDo) Limit(limit int) IUserDo {
|
||||
return u.withDO(u.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (u userDo) Offset(offset int) IUserDo {
|
||||
return u.withDO(u.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (u userDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IUserDo {
|
||||
return u.withDO(u.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (u userDo) Unscoped() IUserDo {
|
||||
return u.withDO(u.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (u userDo) Create(values ...*models.User) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return u.DO.Create(values)
|
||||
}
|
||||
|
||||
func (u userDo) CreateInBatches(values []*models.User, batchSize int) error {
|
||||
return u.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (u userDo) Save(values ...*models.User) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return u.DO.Save(values)
|
||||
}
|
||||
|
||||
func (u userDo) First() (*models.User, error) {
|
||||
if result, err := u.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.User), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (u userDo) Take() (*models.User, error) {
|
||||
if result, err := u.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.User), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (u userDo) Last() (*models.User, error) {
|
||||
if result, err := u.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.User), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (u userDo) Find() ([]*models.User, error) {
|
||||
result, err := u.DO.Find()
|
||||
return result.([]*models.User), err
|
||||
}
|
||||
|
||||
func (u userDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*models.User, err error) {
|
||||
buf := make([]*models.User, 0, batchSize)
|
||||
err = u.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (u userDo) FindInBatches(result *[]*models.User, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return u.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (u userDo) Attrs(attrs ...field.AssignExpr) IUserDo {
|
||||
return u.withDO(u.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (u userDo) Assign(attrs ...field.AssignExpr) IUserDo {
|
||||
return u.withDO(u.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (u userDo) Joins(fields ...field.RelationField) IUserDo {
|
||||
for _, _f := range fields {
|
||||
u = *u.withDO(u.DO.Joins(_f))
|
||||
}
|
||||
return &u
|
||||
}
|
||||
|
||||
func (u userDo) Preload(fields ...field.RelationField) IUserDo {
|
||||
for _, _f := range fields {
|
||||
u = *u.withDO(u.DO.Preload(_f))
|
||||
}
|
||||
return &u
|
||||
}
|
||||
|
||||
func (u userDo) FirstOrInit() (*models.User, error) {
|
||||
if result, err := u.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.User), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (u userDo) FirstOrCreate() (*models.User, error) {
|
||||
if result, err := u.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*models.User), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (u userDo) FindByPage(offset int, limit int) (result []*models.User, count int64, err error) {
|
||||
result, err = u.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = u.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (u userDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = u.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = u.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (u userDo) Scan(result interface{}) (err error) {
|
||||
return u.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (u userDo) Delete(models ...*models.User) (result gen.ResultInfo, err error) {
|
||||
return u.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (u *userDo) withDO(do gen.Dao) *userDo {
|
||||
u.DO = *do.(*gen.DO)
|
||||
return u
|
||||
}
|
||||
124
renders/renders.go
Normal file
124
renders/renders.go
Normal file
@ -0,0 +1,124 @@
|
||||
package renders
|
||||
|
||||
import (
|
||||
"app/shelfly/internal/models"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"text/template"
|
||||
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
|
||||
func Login(w http.ResponseWriter, r *http.Request){
|
||||
renderTemplate(w,"login",nil)
|
||||
}
|
||||
func Dashboard(db *gorm.DB)http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
var paths []models.PathDownload
|
||||
if err := db.Find(&paths).Error; err != nil {
|
||||
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
data := map[string]interface{}{
|
||||
"paths": paths,
|
||||
}
|
||||
|
||||
renderTemplate(w,"dashboard",data)
|
||||
}
|
||||
|
||||
}
|
||||
func MenuLibrary(db *gorm.DB) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
var currentPaths []models.PathDownload
|
||||
if err := db.Find(¤tPaths).Error; err != nil {
|
||||
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Récupérer l'ancienne version des paths (si existante)
|
||||
lastUpdate := r.Header.Get("HX-Current-Paths")
|
||||
var previousPaths []models.PathDownload
|
||||
if lastUpdate != "" {
|
||||
json.Unmarshal([]byte(lastUpdate), &previousPaths)
|
||||
}
|
||||
|
||||
// Convertir en JSON pour comparaison
|
||||
currentJSON, _ := json.Marshal(currentPaths)
|
||||
previousJSON, _ := json.Marshal(previousPaths)
|
||||
|
||||
// Vérifier si les paths ont changé
|
||||
pathsChanged := string(currentJSON) != string(previousJSON)
|
||||
|
||||
data := map[string]interface{}{
|
||||
"paths": currentPaths,
|
||||
}
|
||||
|
||||
// Si HTMX request, ajouter les headers appropriés
|
||||
if r.Header.Get("HX-Request") == "true" {
|
||||
if pathsChanged {
|
||||
w.Header().Set("HX-Trigger", "pathsUpdated")
|
||||
}
|
||||
w.Header().Set("HX-Current-Paths", string(currentJSON))
|
||||
}
|
||||
|
||||
renderPartial(w, "dashboard", data)
|
||||
}
|
||||
|
||||
}
|
||||
func Settings(w http.ResponseWriter, r *http.Request) {
|
||||
data := map[string]interface{}{
|
||||
"Title": "Settings Page",
|
||||
"Options": []string{"Option 1", "Option 2", "Option 3"},
|
||||
}
|
||||
renderPartial(w, "settings", data)
|
||||
}
|
||||
|
||||
|
||||
func Library(w http.ResponseWriter, r *http.Request) {
|
||||
renderPartial(w, "library",nil)
|
||||
}
|
||||
|
||||
func GoDownload(w http.ResponseWriter, r *http.Request) {
|
||||
renderPartial(w, "godownloader_download",nil)
|
||||
}
|
||||
func GoDownloadLinkCollectors(w http.ResponseWriter, r *http.Request) {
|
||||
renderPartial(w, "godownloader_linkcollectors",nil)
|
||||
}
|
||||
func GoDownloadSetting(w http.ResponseWriter, r *http.Request) {
|
||||
renderPartial(w, "godownloader_setting",nil)
|
||||
}
|
||||
|
||||
|
||||
func renderTemplate(w http.ResponseWriter, templ string, data map[string]interface{}) {
|
||||
t, err := template.ParseFiles(
|
||||
"./templates/head.pages.tmpl", // Template inclus
|
||||
"./templates/" + templ + ".pages.tmpl", // Template principal
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Exécutez explicitement le template principal
|
||||
err = t.ExecuteTemplate(w, templ+".pages.tmpl", data)
|
||||
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func renderPartial(w http.ResponseWriter, templ string, data map[string]interface{}) {
|
||||
t, err := template.ParseFiles("./templates/" + templ + ".pages.tmpl")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = t.Execute(w, data)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
BIN
shelfly_db.db
Normal file
BIN
shelfly_db.db
Normal file
Binary file not shown.
6
templates/assets/css/all.min.css
vendored
Normal file
6
templates/assets/css/all.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
4085
templates/assets/css/boostrap/bootstrap-grid.css
vendored
Normal file
4085
templates/assets/css/boostrap/bootstrap-grid.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/css/boostrap/bootstrap-grid.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap-grid.css.map
Normal file
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap-grid.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap-grid.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
templates/assets/css/boostrap/bootstrap-grid.min.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap-grid.min.css.map
Normal file
File diff suppressed because one or more lines are too long
4084
templates/assets/css/boostrap/bootstrap-grid.rtl.css
vendored
Normal file
4084
templates/assets/css/boostrap/bootstrap-grid.rtl.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/css/boostrap/bootstrap-grid.rtl.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap-grid.rtl.css.map
Normal file
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap-grid.rtl.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap-grid.rtl.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
597
templates/assets/css/boostrap/bootstrap-reboot.css
vendored
Normal file
597
templates/assets/css/boostrap/bootstrap-reboot.css
vendored
Normal file
@ -0,0 +1,597 @@
|
||||
/*!
|
||||
* Bootstrap Reboot v5.3.3 (https://getbootstrap.com/)
|
||||
* Copyright 2011-2024 The Bootstrap Authors
|
||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
|
||||
*/
|
||||
:root,
|
||||
[data-bs-theme=light] {
|
||||
--bs-blue: #0d6efd;
|
||||
--bs-indigo: #6610f2;
|
||||
--bs-purple: #6f42c1;
|
||||
--bs-pink: #d63384;
|
||||
--bs-red: #dc3545;
|
||||
--bs-orange: #fd7e14;
|
||||
--bs-yellow: #ffc107;
|
||||
--bs-green: #198754;
|
||||
--bs-teal: #20c997;
|
||||
--bs-cyan: #0dcaf0;
|
||||
--bs-black: #000;
|
||||
--bs-white: #fff;
|
||||
--bs-gray: #6c757d;
|
||||
--bs-gray-dark: #343a40;
|
||||
--bs-gray-100: #f8f9fa;
|
||||
--bs-gray-200: #e9ecef;
|
||||
--bs-gray-300: #dee2e6;
|
||||
--bs-gray-400: #ced4da;
|
||||
--bs-gray-500: #adb5bd;
|
||||
--bs-gray-600: #6c757d;
|
||||
--bs-gray-700: #495057;
|
||||
--bs-gray-800: #343a40;
|
||||
--bs-gray-900: #212529;
|
||||
--bs-primary: #0d6efd;
|
||||
--bs-secondary: #6c757d;
|
||||
--bs-success: #198754;
|
||||
--bs-info: #0dcaf0;
|
||||
--bs-warning: #ffc107;
|
||||
--bs-danger: #dc3545;
|
||||
--bs-light: #f8f9fa;
|
||||
--bs-dark: #212529;
|
||||
--bs-primary-rgb: 13, 110, 253;
|
||||
--bs-secondary-rgb: 108, 117, 125;
|
||||
--bs-success-rgb: 25, 135, 84;
|
||||
--bs-info-rgb: 13, 202, 240;
|
||||
--bs-warning-rgb: 255, 193, 7;
|
||||
--bs-danger-rgb: 220, 53, 69;
|
||||
--bs-light-rgb: 248, 249, 250;
|
||||
--bs-dark-rgb: 33, 37, 41;
|
||||
--bs-primary-text-emphasis: #052c65;
|
||||
--bs-secondary-text-emphasis: #2b2f32;
|
||||
--bs-success-text-emphasis: #0a3622;
|
||||
--bs-info-text-emphasis: #055160;
|
||||
--bs-warning-text-emphasis: #664d03;
|
||||
--bs-danger-text-emphasis: #58151c;
|
||||
--bs-light-text-emphasis: #495057;
|
||||
--bs-dark-text-emphasis: #495057;
|
||||
--bs-primary-bg-subtle: #cfe2ff;
|
||||
--bs-secondary-bg-subtle: #e2e3e5;
|
||||
--bs-success-bg-subtle: #d1e7dd;
|
||||
--bs-info-bg-subtle: #cff4fc;
|
||||
--bs-warning-bg-subtle: #fff3cd;
|
||||
--bs-danger-bg-subtle: #f8d7da;
|
||||
--bs-light-bg-subtle: #fcfcfd;
|
||||
--bs-dark-bg-subtle: #ced4da;
|
||||
--bs-primary-border-subtle: #9ec5fe;
|
||||
--bs-secondary-border-subtle: #c4c8cb;
|
||||
--bs-success-border-subtle: #a3cfbb;
|
||||
--bs-info-border-subtle: #9eeaf9;
|
||||
--bs-warning-border-subtle: #ffe69c;
|
||||
--bs-danger-border-subtle: #f1aeb5;
|
||||
--bs-light-border-subtle: #e9ecef;
|
||||
--bs-dark-border-subtle: #adb5bd;
|
||||
--bs-white-rgb: 255, 255, 255;
|
||||
--bs-black-rgb: 0, 0, 0;
|
||||
--bs-font-sans-serif: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", "Noto Sans", "Liberation Sans", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||
--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0));
|
||||
--bs-body-font-family: var(--bs-font-sans-serif);
|
||||
--bs-body-font-size: 1rem;
|
||||
--bs-body-font-weight: 400;
|
||||
--bs-body-line-height: 1.5;
|
||||
--bs-body-color: #212529;
|
||||
--bs-body-color-rgb: 33, 37, 41;
|
||||
--bs-body-bg: #fff;
|
||||
--bs-body-bg-rgb: 255, 255, 255;
|
||||
--bs-emphasis-color: #000;
|
||||
--bs-emphasis-color-rgb: 0, 0, 0;
|
||||
--bs-secondary-color: rgba(33, 37, 41, 0.75);
|
||||
--bs-secondary-color-rgb: 33, 37, 41;
|
||||
--bs-secondary-bg: #e9ecef;
|
||||
--bs-secondary-bg-rgb: 233, 236, 239;
|
||||
--bs-tertiary-color: rgba(33, 37, 41, 0.5);
|
||||
--bs-tertiary-color-rgb: 33, 37, 41;
|
||||
--bs-tertiary-bg: #f8f9fa;
|
||||
--bs-tertiary-bg-rgb: 248, 249, 250;
|
||||
--bs-heading-color: inherit;
|
||||
--bs-link-color: #0d6efd;
|
||||
--bs-link-color-rgb: 13, 110, 253;
|
||||
--bs-link-decoration: underline;
|
||||
--bs-link-hover-color: #0a58ca;
|
||||
--bs-link-hover-color-rgb: 10, 88, 202;
|
||||
--bs-code-color: #d63384;
|
||||
--bs-highlight-color: #212529;
|
||||
--bs-highlight-bg: #fff3cd;
|
||||
--bs-border-width: 1px;
|
||||
--bs-border-style: solid;
|
||||
--bs-border-color: #dee2e6;
|
||||
--bs-border-color-translucent: rgba(0, 0, 0, 0.175);
|
||||
--bs-border-radius: 0.375rem;
|
||||
--bs-border-radius-sm: 0.25rem;
|
||||
--bs-border-radius-lg: 0.5rem;
|
||||
--bs-border-radius-xl: 1rem;
|
||||
--bs-border-radius-xxl: 2rem;
|
||||
--bs-border-radius-2xl: var(--bs-border-radius-xxl);
|
||||
--bs-border-radius-pill: 50rem;
|
||||
--bs-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15);
|
||||
--bs-box-shadow-sm: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075);
|
||||
--bs-box-shadow-lg: 0 1rem 3rem rgba(0, 0, 0, 0.175);
|
||||
--bs-box-shadow-inset: inset 0 1px 2px rgba(0, 0, 0, 0.075);
|
||||
--bs-focus-ring-width: 0.25rem;
|
||||
--bs-focus-ring-opacity: 0.25;
|
||||
--bs-focus-ring-color: rgba(13, 110, 253, 0.25);
|
||||
--bs-form-valid-color: #198754;
|
||||
--bs-form-valid-border-color: #198754;
|
||||
--bs-form-invalid-color: #dc3545;
|
||||
--bs-form-invalid-border-color: #dc3545;
|
||||
}
|
||||
|
||||
[data-bs-theme=dark] {
|
||||
color-scheme: dark;
|
||||
--bs-body-color: #dee2e6;
|
||||
--bs-body-color-rgb: 222, 226, 230;
|
||||
--bs-body-bg: #212529;
|
||||
--bs-body-bg-rgb: 33, 37, 41;
|
||||
--bs-emphasis-color: #fff;
|
||||
--bs-emphasis-color-rgb: 255, 255, 255;
|
||||
--bs-secondary-color: rgba(222, 226, 230, 0.75);
|
||||
--bs-secondary-color-rgb: 222, 226, 230;
|
||||
--bs-secondary-bg: #343a40;
|
||||
--bs-secondary-bg-rgb: 52, 58, 64;
|
||||
--bs-tertiary-color: rgba(222, 226, 230, 0.5);
|
||||
--bs-tertiary-color-rgb: 222, 226, 230;
|
||||
--bs-tertiary-bg: #2b3035;
|
||||
--bs-tertiary-bg-rgb: 43, 48, 53;
|
||||
--bs-primary-text-emphasis: #6ea8fe;
|
||||
--bs-secondary-text-emphasis: #a7acb1;
|
||||
--bs-success-text-emphasis: #75b798;
|
||||
--bs-info-text-emphasis: #6edff6;
|
||||
--bs-warning-text-emphasis: #ffda6a;
|
||||
--bs-danger-text-emphasis: #ea868f;
|
||||
--bs-light-text-emphasis: #f8f9fa;
|
||||
--bs-dark-text-emphasis: #dee2e6;
|
||||
--bs-primary-bg-subtle: #031633;
|
||||
--bs-secondary-bg-subtle: #161719;
|
||||
--bs-success-bg-subtle: #051b11;
|
||||
--bs-info-bg-subtle: #032830;
|
||||
--bs-warning-bg-subtle: #332701;
|
||||
--bs-danger-bg-subtle: #2c0b0e;
|
||||
--bs-light-bg-subtle: #343a40;
|
||||
--bs-dark-bg-subtle: #1a1d20;
|
||||
--bs-primary-border-subtle: #084298;
|
||||
--bs-secondary-border-subtle: #41464b;
|
||||
--bs-success-border-subtle: #0f5132;
|
||||
--bs-info-border-subtle: #087990;
|
||||
--bs-warning-border-subtle: #997404;
|
||||
--bs-danger-border-subtle: #842029;
|
||||
--bs-light-border-subtle: #495057;
|
||||
--bs-dark-border-subtle: #343a40;
|
||||
--bs-heading-color: inherit;
|
||||
--bs-link-color: #6ea8fe;
|
||||
--bs-link-hover-color: #8bb9fe;
|
||||
--bs-link-color-rgb: 110, 168, 254;
|
||||
--bs-link-hover-color-rgb: 139, 185, 254;
|
||||
--bs-code-color: #e685b5;
|
||||
--bs-highlight-color: #dee2e6;
|
||||
--bs-highlight-bg: #664d03;
|
||||
--bs-border-color: #495057;
|
||||
--bs-border-color-translucent: rgba(255, 255, 255, 0.15);
|
||||
--bs-form-valid-color: #75b798;
|
||||
--bs-form-valid-border-color: #75b798;
|
||||
--bs-form-invalid-color: #ea868f;
|
||||
--bs-form-invalid-border-color: #ea868f;
|
||||
}
|
||||
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
:root {
|
||||
scroll-behavior: smooth;
|
||||
}
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
font-family: var(--bs-body-font-family);
|
||||
font-size: var(--bs-body-font-size);
|
||||
font-weight: var(--bs-body-font-weight);
|
||||
line-height: var(--bs-body-line-height);
|
||||
color: var(--bs-body-color);
|
||||
text-align: var(--bs-body-text-align);
|
||||
background-color: var(--bs-body-bg);
|
||||
-webkit-text-size-adjust: 100%;
|
||||
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
|
||||
}
|
||||
|
||||
hr {
|
||||
margin: 1rem 0;
|
||||
color: inherit;
|
||||
border: 0;
|
||||
border-top: var(--bs-border-width) solid;
|
||||
opacity: 0.25;
|
||||
}
|
||||
|
||||
h6, h5, h4, h3, h2, h1 {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0.5rem;
|
||||
font-weight: 500;
|
||||
line-height: 1.2;
|
||||
color: var(--bs-heading-color);
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: calc(1.375rem + 1.5vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h1 {
|
||||
font-size: 2.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: calc(1.325rem + 0.9vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h2 {
|
||||
font-size: 2rem;
|
||||
}
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: calc(1.3rem + 0.6vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h3 {
|
||||
font-size: 1.75rem;
|
||||
}
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: calc(1.275rem + 0.3vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h4 {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
abbr[title] {
|
||||
-webkit-text-decoration: underline dotted;
|
||||
text-decoration: underline dotted;
|
||||
cursor: help;
|
||||
-webkit-text-decoration-skip-ink: none;
|
||||
text-decoration-skip-ink: none;
|
||||
}
|
||||
|
||||
address {
|
||||
margin-bottom: 1rem;
|
||||
font-style: normal;
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
ol,
|
||||
ul {
|
||||
padding-left: 2rem;
|
||||
}
|
||||
|
||||
ol,
|
||||
ul,
|
||||
dl {
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
ol ol,
|
||||
ul ul,
|
||||
ol ul,
|
||||
ul ol {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
dt {
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
dd {
|
||||
margin-bottom: 0.5rem;
|
||||
margin-left: 0;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
margin: 0 0 1rem;
|
||||
}
|
||||
|
||||
b,
|
||||
strong {
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
small {
|
||||
font-size: 0.875em;
|
||||
}
|
||||
|
||||
mark {
|
||||
padding: 0.1875em;
|
||||
color: var(--bs-highlight-color);
|
||||
background-color: var(--bs-highlight-bg);
|
||||
}
|
||||
|
||||
sub,
|
||||
sup {
|
||||
position: relative;
|
||||
font-size: 0.75em;
|
||||
line-height: 0;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
sub {
|
||||
bottom: -0.25em;
|
||||
}
|
||||
|
||||
sup {
|
||||
top: -0.5em;
|
||||
}
|
||||
|
||||
a {
|
||||
color: rgba(var(--bs-link-color-rgb), var(--bs-link-opacity, 1));
|
||||
text-decoration: underline;
|
||||
}
|
||||
a:hover {
|
||||
--bs-link-color-rgb: var(--bs-link-hover-color-rgb);
|
||||
}
|
||||
|
||||
a:not([href]):not([class]), a:not([href]):not([class]):hover {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
pre,
|
||||
code,
|
||||
kbd,
|
||||
samp {
|
||||
font-family: var(--bs-font-monospace);
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
pre {
|
||||
display: block;
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
overflow: auto;
|
||||
font-size: 0.875em;
|
||||
}
|
||||
pre code {
|
||||
font-size: inherit;
|
||||
color: inherit;
|
||||
word-break: normal;
|
||||
}
|
||||
|
||||
code {
|
||||
font-size: 0.875em;
|
||||
color: var(--bs-code-color);
|
||||
word-wrap: break-word;
|
||||
}
|
||||
a > code {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
kbd {
|
||||
padding: 0.1875rem 0.375rem;
|
||||
font-size: 0.875em;
|
||||
color: var(--bs-body-bg);
|
||||
background-color: var(--bs-body-color);
|
||||
border-radius: 0.25rem;
|
||||
}
|
||||
kbd kbd {
|
||||
padding: 0;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
figure {
|
||||
margin: 0 0 1rem;
|
||||
}
|
||||
|
||||
img,
|
||||
svg {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
table {
|
||||
caption-side: bottom;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
caption {
|
||||
padding-top: 0.5rem;
|
||||
padding-bottom: 0.5rem;
|
||||
color: var(--bs-secondary-color);
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: inherit;
|
||||
text-align: -webkit-match-parent;
|
||||
}
|
||||
|
||||
thead,
|
||||
tbody,
|
||||
tfoot,
|
||||
tr,
|
||||
td,
|
||||
th {
|
||||
border-color: inherit;
|
||||
border-style: solid;
|
||||
border-width: 0;
|
||||
}
|
||||
|
||||
label {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
button {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
button:focus:not(:focus-visible) {
|
||||
outline: 0;
|
||||
}
|
||||
|
||||
input,
|
||||
button,
|
||||
select,
|
||||
optgroup,
|
||||
textarea {
|
||||
margin: 0;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
button,
|
||||
select {
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
[role=button] {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
select {
|
||||
word-wrap: normal;
|
||||
}
|
||||
select:disabled {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
[list]:not([type=date]):not([type=datetime-local]):not([type=month]):not([type=week]):not([type=time])::-webkit-calendar-picker-indicator {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
button,
|
||||
[type=button],
|
||||
[type=reset],
|
||||
[type=submit] {
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
button:not(:disabled),
|
||||
[type=button]:not(:disabled),
|
||||
[type=reset]:not(:disabled),
|
||||
[type=submit]:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
::-moz-focus-inner {
|
||||
padding: 0;
|
||||
border-style: none;
|
||||
}
|
||||
|
||||
textarea {
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
fieldset {
|
||||
min-width: 0;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
border: 0;
|
||||
}
|
||||
|
||||
legend {
|
||||
float: left;
|
||||
width: 100%;
|
||||
padding: 0;
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: calc(1.275rem + 0.3vw);
|
||||
line-height: inherit;
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
legend {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
}
|
||||
legend + * {
|
||||
clear: left;
|
||||
}
|
||||
|
||||
::-webkit-datetime-edit-fields-wrapper,
|
||||
::-webkit-datetime-edit-text,
|
||||
::-webkit-datetime-edit-minute,
|
||||
::-webkit-datetime-edit-hour-field,
|
||||
::-webkit-datetime-edit-day-field,
|
||||
::-webkit-datetime-edit-month-field,
|
||||
::-webkit-datetime-edit-year-field {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
::-webkit-inner-spin-button {
|
||||
height: auto;
|
||||
}
|
||||
|
||||
[type=search] {
|
||||
-webkit-appearance: textfield;
|
||||
outline-offset: -2px;
|
||||
}
|
||||
|
||||
/* rtl:raw:
|
||||
[type="tel"],
|
||||
[type="url"],
|
||||
[type="email"],
|
||||
[type="number"] {
|
||||
direction: ltr;
|
||||
}
|
||||
*/
|
||||
::-webkit-search-decoration {
|
||||
-webkit-appearance: none;
|
||||
}
|
||||
|
||||
::-webkit-color-swatch-wrapper {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
::-webkit-file-upload-button {
|
||||
font: inherit;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
||||
::file-selector-button {
|
||||
font: inherit;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
||||
output {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
iframe {
|
||||
border: 0;
|
||||
}
|
||||
|
||||
summary {
|
||||
display: list-item;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
progress {
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
[hidden] {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/*# sourceMappingURL=bootstrap-reboot.css.map */
|
||||
1
templates/assets/css/boostrap/bootstrap-reboot.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap-reboot.css.map
Normal file
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap-reboot.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap-reboot.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
594
templates/assets/css/boostrap/bootstrap-reboot.rtl.css
vendored
Normal file
594
templates/assets/css/boostrap/bootstrap-reboot.rtl.css
vendored
Normal file
@ -0,0 +1,594 @@
|
||||
/*!
|
||||
* Bootstrap Reboot v5.3.3 (https://getbootstrap.com/)
|
||||
* Copyright 2011-2024 The Bootstrap Authors
|
||||
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
|
||||
*/
|
||||
:root,
|
||||
[data-bs-theme=light] {
|
||||
--bs-blue: #0d6efd;
|
||||
--bs-indigo: #6610f2;
|
||||
--bs-purple: #6f42c1;
|
||||
--bs-pink: #d63384;
|
||||
--bs-red: #dc3545;
|
||||
--bs-orange: #fd7e14;
|
||||
--bs-yellow: #ffc107;
|
||||
--bs-green: #198754;
|
||||
--bs-teal: #20c997;
|
||||
--bs-cyan: #0dcaf0;
|
||||
--bs-black: #000;
|
||||
--bs-white: #fff;
|
||||
--bs-gray: #6c757d;
|
||||
--bs-gray-dark: #343a40;
|
||||
--bs-gray-100: #f8f9fa;
|
||||
--bs-gray-200: #e9ecef;
|
||||
--bs-gray-300: #dee2e6;
|
||||
--bs-gray-400: #ced4da;
|
||||
--bs-gray-500: #adb5bd;
|
||||
--bs-gray-600: #6c757d;
|
||||
--bs-gray-700: #495057;
|
||||
--bs-gray-800: #343a40;
|
||||
--bs-gray-900: #212529;
|
||||
--bs-primary: #0d6efd;
|
||||
--bs-secondary: #6c757d;
|
||||
--bs-success: #198754;
|
||||
--bs-info: #0dcaf0;
|
||||
--bs-warning: #ffc107;
|
||||
--bs-danger: #dc3545;
|
||||
--bs-light: #f8f9fa;
|
||||
--bs-dark: #212529;
|
||||
--bs-primary-rgb: 13, 110, 253;
|
||||
--bs-secondary-rgb: 108, 117, 125;
|
||||
--bs-success-rgb: 25, 135, 84;
|
||||
--bs-info-rgb: 13, 202, 240;
|
||||
--bs-warning-rgb: 255, 193, 7;
|
||||
--bs-danger-rgb: 220, 53, 69;
|
||||
--bs-light-rgb: 248, 249, 250;
|
||||
--bs-dark-rgb: 33, 37, 41;
|
||||
--bs-primary-text-emphasis: #052c65;
|
||||
--bs-secondary-text-emphasis: #2b2f32;
|
||||
--bs-success-text-emphasis: #0a3622;
|
||||
--bs-info-text-emphasis: #055160;
|
||||
--bs-warning-text-emphasis: #664d03;
|
||||
--bs-danger-text-emphasis: #58151c;
|
||||
--bs-light-text-emphasis: #495057;
|
||||
--bs-dark-text-emphasis: #495057;
|
||||
--bs-primary-bg-subtle: #cfe2ff;
|
||||
--bs-secondary-bg-subtle: #e2e3e5;
|
||||
--bs-success-bg-subtle: #d1e7dd;
|
||||
--bs-info-bg-subtle: #cff4fc;
|
||||
--bs-warning-bg-subtle: #fff3cd;
|
||||
--bs-danger-bg-subtle: #f8d7da;
|
||||
--bs-light-bg-subtle: #fcfcfd;
|
||||
--bs-dark-bg-subtle: #ced4da;
|
||||
--bs-primary-border-subtle: #9ec5fe;
|
||||
--bs-secondary-border-subtle: #c4c8cb;
|
||||
--bs-success-border-subtle: #a3cfbb;
|
||||
--bs-info-border-subtle: #9eeaf9;
|
||||
--bs-warning-border-subtle: #ffe69c;
|
||||
--bs-danger-border-subtle: #f1aeb5;
|
||||
--bs-light-border-subtle: #e9ecef;
|
||||
--bs-dark-border-subtle: #adb5bd;
|
||||
--bs-white-rgb: 255, 255, 255;
|
||||
--bs-black-rgb: 0, 0, 0;
|
||||
--bs-font-sans-serif: system-ui, -apple-system, "Segoe UI", Roboto, "Helvetica Neue", "Noto Sans", "Liberation Sans", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
|
||||
--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
|
||||
--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0));
|
||||
--bs-body-font-family: var(--bs-font-sans-serif);
|
||||
--bs-body-font-size: 1rem;
|
||||
--bs-body-font-weight: 400;
|
||||
--bs-body-line-height: 1.5;
|
||||
--bs-body-color: #212529;
|
||||
--bs-body-color-rgb: 33, 37, 41;
|
||||
--bs-body-bg: #fff;
|
||||
--bs-body-bg-rgb: 255, 255, 255;
|
||||
--bs-emphasis-color: #000;
|
||||
--bs-emphasis-color-rgb: 0, 0, 0;
|
||||
--bs-secondary-color: rgba(33, 37, 41, 0.75);
|
||||
--bs-secondary-color-rgb: 33, 37, 41;
|
||||
--bs-secondary-bg: #e9ecef;
|
||||
--bs-secondary-bg-rgb: 233, 236, 239;
|
||||
--bs-tertiary-color: rgba(33, 37, 41, 0.5);
|
||||
--bs-tertiary-color-rgb: 33, 37, 41;
|
||||
--bs-tertiary-bg: #f8f9fa;
|
||||
--bs-tertiary-bg-rgb: 248, 249, 250;
|
||||
--bs-heading-color: inherit;
|
||||
--bs-link-color: #0d6efd;
|
||||
--bs-link-color-rgb: 13, 110, 253;
|
||||
--bs-link-decoration: underline;
|
||||
--bs-link-hover-color: #0a58ca;
|
||||
--bs-link-hover-color-rgb: 10, 88, 202;
|
||||
--bs-code-color: #d63384;
|
||||
--bs-highlight-color: #212529;
|
||||
--bs-highlight-bg: #fff3cd;
|
||||
--bs-border-width: 1px;
|
||||
--bs-border-style: solid;
|
||||
--bs-border-color: #dee2e6;
|
||||
--bs-border-color-translucent: rgba(0, 0, 0, 0.175);
|
||||
--bs-border-radius: 0.375rem;
|
||||
--bs-border-radius-sm: 0.25rem;
|
||||
--bs-border-radius-lg: 0.5rem;
|
||||
--bs-border-radius-xl: 1rem;
|
||||
--bs-border-radius-xxl: 2rem;
|
||||
--bs-border-radius-2xl: var(--bs-border-radius-xxl);
|
||||
--bs-border-radius-pill: 50rem;
|
||||
--bs-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15);
|
||||
--bs-box-shadow-sm: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075);
|
||||
--bs-box-shadow-lg: 0 1rem 3rem rgba(0, 0, 0, 0.175);
|
||||
--bs-box-shadow-inset: inset 0 1px 2px rgba(0, 0, 0, 0.075);
|
||||
--bs-focus-ring-width: 0.25rem;
|
||||
--bs-focus-ring-opacity: 0.25;
|
||||
--bs-focus-ring-color: rgba(13, 110, 253, 0.25);
|
||||
--bs-form-valid-color: #198754;
|
||||
--bs-form-valid-border-color: #198754;
|
||||
--bs-form-invalid-color: #dc3545;
|
||||
--bs-form-invalid-border-color: #dc3545;
|
||||
}
|
||||
|
||||
[data-bs-theme=dark] {
|
||||
color-scheme: dark;
|
||||
--bs-body-color: #dee2e6;
|
||||
--bs-body-color-rgb: 222, 226, 230;
|
||||
--bs-body-bg: #212529;
|
||||
--bs-body-bg-rgb: 33, 37, 41;
|
||||
--bs-emphasis-color: #fff;
|
||||
--bs-emphasis-color-rgb: 255, 255, 255;
|
||||
--bs-secondary-color: rgba(222, 226, 230, 0.75);
|
||||
--bs-secondary-color-rgb: 222, 226, 230;
|
||||
--bs-secondary-bg: #343a40;
|
||||
--bs-secondary-bg-rgb: 52, 58, 64;
|
||||
--bs-tertiary-color: rgba(222, 226, 230, 0.5);
|
||||
--bs-tertiary-color-rgb: 222, 226, 230;
|
||||
--bs-tertiary-bg: #2b3035;
|
||||
--bs-tertiary-bg-rgb: 43, 48, 53;
|
||||
--bs-primary-text-emphasis: #6ea8fe;
|
||||
--bs-secondary-text-emphasis: #a7acb1;
|
||||
--bs-success-text-emphasis: #75b798;
|
||||
--bs-info-text-emphasis: #6edff6;
|
||||
--bs-warning-text-emphasis: #ffda6a;
|
||||
--bs-danger-text-emphasis: #ea868f;
|
||||
--bs-light-text-emphasis: #f8f9fa;
|
||||
--bs-dark-text-emphasis: #dee2e6;
|
||||
--bs-primary-bg-subtle: #031633;
|
||||
--bs-secondary-bg-subtle: #161719;
|
||||
--bs-success-bg-subtle: #051b11;
|
||||
--bs-info-bg-subtle: #032830;
|
||||
--bs-warning-bg-subtle: #332701;
|
||||
--bs-danger-bg-subtle: #2c0b0e;
|
||||
--bs-light-bg-subtle: #343a40;
|
||||
--bs-dark-bg-subtle: #1a1d20;
|
||||
--bs-primary-border-subtle: #084298;
|
||||
--bs-secondary-border-subtle: #41464b;
|
||||
--bs-success-border-subtle: #0f5132;
|
||||
--bs-info-border-subtle: #087990;
|
||||
--bs-warning-border-subtle: #997404;
|
||||
--bs-danger-border-subtle: #842029;
|
||||
--bs-light-border-subtle: #495057;
|
||||
--bs-dark-border-subtle: #343a40;
|
||||
--bs-heading-color: inherit;
|
||||
--bs-link-color: #6ea8fe;
|
||||
--bs-link-hover-color: #8bb9fe;
|
||||
--bs-link-color-rgb: 110, 168, 254;
|
||||
--bs-link-hover-color-rgb: 139, 185, 254;
|
||||
--bs-code-color: #e685b5;
|
||||
--bs-highlight-color: #dee2e6;
|
||||
--bs-highlight-bg: #664d03;
|
||||
--bs-border-color: #495057;
|
||||
--bs-border-color-translucent: rgba(255, 255, 255, 0.15);
|
||||
--bs-form-valid-color: #75b798;
|
||||
--bs-form-valid-border-color: #75b798;
|
||||
--bs-form-invalid-color: #ea868f;
|
||||
--bs-form-invalid-border-color: #ea868f;
|
||||
}
|
||||
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
:root {
|
||||
scroll-behavior: smooth;
|
||||
}
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
font-family: var(--bs-body-font-family);
|
||||
font-size: var(--bs-body-font-size);
|
||||
font-weight: var(--bs-body-font-weight);
|
||||
line-height: var(--bs-body-line-height);
|
||||
color: var(--bs-body-color);
|
||||
text-align: var(--bs-body-text-align);
|
||||
background-color: var(--bs-body-bg);
|
||||
-webkit-text-size-adjust: 100%;
|
||||
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
|
||||
}
|
||||
|
||||
hr {
|
||||
margin: 1rem 0;
|
||||
color: inherit;
|
||||
border: 0;
|
||||
border-top: var(--bs-border-width) solid;
|
||||
opacity: 0.25;
|
||||
}
|
||||
|
||||
h6, h5, h4, h3, h2, h1 {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0.5rem;
|
||||
font-weight: 500;
|
||||
line-height: 1.2;
|
||||
color: var(--bs-heading-color);
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: calc(1.375rem + 1.5vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h1 {
|
||||
font-size: 2.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: calc(1.325rem + 0.9vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h2 {
|
||||
font-size: 2rem;
|
||||
}
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: calc(1.3rem + 0.6vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h3 {
|
||||
font-size: 1.75rem;
|
||||
}
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: calc(1.275rem + 0.3vw);
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
h4 {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
abbr[title] {
|
||||
-webkit-text-decoration: underline dotted;
|
||||
text-decoration: underline dotted;
|
||||
cursor: help;
|
||||
-webkit-text-decoration-skip-ink: none;
|
||||
text-decoration-skip-ink: none;
|
||||
}
|
||||
|
||||
address {
|
||||
margin-bottom: 1rem;
|
||||
font-style: normal;
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
ol,
|
||||
ul {
|
||||
padding-right: 2rem;
|
||||
}
|
||||
|
||||
ol,
|
||||
ul,
|
||||
dl {
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
ol ol,
|
||||
ul ul,
|
||||
ol ul,
|
||||
ul ol {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
dt {
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
dd {
|
||||
margin-bottom: 0.5rem;
|
||||
margin-right: 0;
|
||||
}
|
||||
|
||||
blockquote {
|
||||
margin: 0 0 1rem;
|
||||
}
|
||||
|
||||
b,
|
||||
strong {
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
small {
|
||||
font-size: 0.875em;
|
||||
}
|
||||
|
||||
mark {
|
||||
padding: 0.1875em;
|
||||
color: var(--bs-highlight-color);
|
||||
background-color: var(--bs-highlight-bg);
|
||||
}
|
||||
|
||||
sub,
|
||||
sup {
|
||||
position: relative;
|
||||
font-size: 0.75em;
|
||||
line-height: 0;
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
sub {
|
||||
bottom: -0.25em;
|
||||
}
|
||||
|
||||
sup {
|
||||
top: -0.5em;
|
||||
}
|
||||
|
||||
a {
|
||||
color: rgba(var(--bs-link-color-rgb), var(--bs-link-opacity, 1));
|
||||
text-decoration: underline;
|
||||
}
|
||||
a:hover {
|
||||
--bs-link-color-rgb: var(--bs-link-hover-color-rgb);
|
||||
}
|
||||
|
||||
a:not([href]):not([class]), a:not([href]):not([class]):hover {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
pre,
|
||||
code,
|
||||
kbd,
|
||||
samp {
|
||||
font-family: var(--bs-font-monospace);
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
pre {
|
||||
display: block;
|
||||
margin-top: 0;
|
||||
margin-bottom: 1rem;
|
||||
overflow: auto;
|
||||
font-size: 0.875em;
|
||||
}
|
||||
pre code {
|
||||
font-size: inherit;
|
||||
color: inherit;
|
||||
word-break: normal;
|
||||
}
|
||||
|
||||
code {
|
||||
font-size: 0.875em;
|
||||
color: var(--bs-code-color);
|
||||
word-wrap: break-word;
|
||||
}
|
||||
a > code {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
kbd {
|
||||
padding: 0.1875rem 0.375rem;
|
||||
font-size: 0.875em;
|
||||
color: var(--bs-body-bg);
|
||||
background-color: var(--bs-body-color);
|
||||
border-radius: 0.25rem;
|
||||
}
|
||||
kbd kbd {
|
||||
padding: 0;
|
||||
font-size: 1em;
|
||||
}
|
||||
|
||||
figure {
|
||||
margin: 0 0 1rem;
|
||||
}
|
||||
|
||||
img,
|
||||
svg {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
table {
|
||||
caption-side: bottom;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
caption {
|
||||
padding-top: 0.5rem;
|
||||
padding-bottom: 0.5rem;
|
||||
color: var(--bs-secondary-color);
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
th {
|
||||
text-align: inherit;
|
||||
text-align: -webkit-match-parent;
|
||||
}
|
||||
|
||||
thead,
|
||||
tbody,
|
||||
tfoot,
|
||||
tr,
|
||||
td,
|
||||
th {
|
||||
border-color: inherit;
|
||||
border-style: solid;
|
||||
border-width: 0;
|
||||
}
|
||||
|
||||
label {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
button {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
button:focus:not(:focus-visible) {
|
||||
outline: 0;
|
||||
}
|
||||
|
||||
input,
|
||||
button,
|
||||
select,
|
||||
optgroup,
|
||||
textarea {
|
||||
margin: 0;
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
line-height: inherit;
|
||||
}
|
||||
|
||||
button,
|
||||
select {
|
||||
text-transform: none;
|
||||
}
|
||||
|
||||
[role=button] {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
select {
|
||||
word-wrap: normal;
|
||||
}
|
||||
select:disabled {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
[list]:not([type=date]):not([type=datetime-local]):not([type=month]):not([type=week]):not([type=time])::-webkit-calendar-picker-indicator {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
button,
|
||||
[type=button],
|
||||
[type=reset],
|
||||
[type=submit] {
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
button:not(:disabled),
|
||||
[type=button]:not(:disabled),
|
||||
[type=reset]:not(:disabled),
|
||||
[type=submit]:not(:disabled) {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
::-moz-focus-inner {
|
||||
padding: 0;
|
||||
border-style: none;
|
||||
}
|
||||
|
||||
textarea {
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
fieldset {
|
||||
min-width: 0;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
border: 0;
|
||||
}
|
||||
|
||||
legend {
|
||||
float: right;
|
||||
width: 100%;
|
||||
padding: 0;
|
||||
margin-bottom: 0.5rem;
|
||||
font-size: calc(1.275rem + 0.3vw);
|
||||
line-height: inherit;
|
||||
}
|
||||
@media (min-width: 1200px) {
|
||||
legend {
|
||||
font-size: 1.5rem;
|
||||
}
|
||||
}
|
||||
legend + * {
|
||||
clear: right;
|
||||
}
|
||||
|
||||
::-webkit-datetime-edit-fields-wrapper,
|
||||
::-webkit-datetime-edit-text,
|
||||
::-webkit-datetime-edit-minute,
|
||||
::-webkit-datetime-edit-hour-field,
|
||||
::-webkit-datetime-edit-day-field,
|
||||
::-webkit-datetime-edit-month-field,
|
||||
::-webkit-datetime-edit-year-field {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
::-webkit-inner-spin-button {
|
||||
height: auto;
|
||||
}
|
||||
|
||||
[type=search] {
|
||||
-webkit-appearance: textfield;
|
||||
outline-offset: -2px;
|
||||
}
|
||||
|
||||
[type="tel"],
|
||||
[type="url"],
|
||||
[type="email"],
|
||||
[type="number"] {
|
||||
direction: ltr;
|
||||
}
|
||||
::-webkit-search-decoration {
|
||||
-webkit-appearance: none;
|
||||
}
|
||||
|
||||
::-webkit-color-swatch-wrapper {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
::-webkit-file-upload-button {
|
||||
font: inherit;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
||||
::file-selector-button {
|
||||
font: inherit;
|
||||
-webkit-appearance: button;
|
||||
}
|
||||
|
||||
output {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
iframe {
|
||||
border: 0;
|
||||
}
|
||||
|
||||
summary {
|
||||
display: list-item;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
progress {
|
||||
vertical-align: baseline;
|
||||
}
|
||||
|
||||
[hidden] {
|
||||
display: none !important;
|
||||
}
|
||||
/*# sourceMappingURL=bootstrap-reboot.rtl.css.map */
|
||||
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap-reboot.rtl.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap-reboot.rtl.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
5402
templates/assets/css/boostrap/bootstrap-utilities.css
vendored
Normal file
5402
templates/assets/css/boostrap/bootstrap-utilities.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap-utilities.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap-utilities.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
5393
templates/assets/css/boostrap/bootstrap-utilities.rtl.css
vendored
Normal file
5393
templates/assets/css/boostrap/bootstrap-utilities.rtl.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap-utilities.rtl.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap-utilities.rtl.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
12057
templates/assets/css/boostrap/bootstrap.css
vendored
Normal file
12057
templates/assets/css/boostrap/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/css/boostrap/bootstrap.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap.css.map
Normal file
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
templates/assets/css/boostrap/bootstrap.min.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap.min.css.map
Normal file
File diff suppressed because one or more lines are too long
12030
templates/assets/css/boostrap/bootstrap.rtl.css
vendored
Normal file
12030
templates/assets/css/boostrap/bootstrap.rtl.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/css/boostrap/bootstrap.rtl.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap.rtl.css.map
Normal file
File diff suppressed because one or more lines are too long
6
templates/assets/css/boostrap/bootstrap.rtl.min.css
vendored
Normal file
6
templates/assets/css/boostrap/bootstrap.rtl.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
1
templates/assets/css/boostrap/bootstrap.rtl.min.css.map
Normal file
1
templates/assets/css/boostrap/bootstrap.rtl.min.css.map
Normal file
File diff suppressed because one or more lines are too long
3
templates/assets/css/bulma.min.css
vendored
Normal file
3
templates/assets/css/bulma.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
8
templates/assets/css/styles.css
Normal file
8
templates/assets/css/styles.css
Normal file
@ -0,0 +1,8 @@
|
||||
button.htmx-swapping {
|
||||
opacity: 0;
|
||||
transition: opacity 1s ease-out;
|
||||
}
|
||||
|
||||
#path-list .column{
|
||||
padding-left: inherit;
|
||||
}
|
||||
6314
templates/assets/js/bootstrap/bootstrap.bundle.js
vendored
Normal file
6314
templates/assets/js/bootstrap/bootstrap.bundle.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/js/bootstrap/bootstrap.bundle.js.map
Normal file
1
templates/assets/js/bootstrap/bootstrap.bundle.js.map
Normal file
File diff suppressed because one or more lines are too long
7
templates/assets/js/bootstrap/bootstrap.bundle.min.js
vendored
Normal file
7
templates/assets/js/bootstrap/bootstrap.bundle.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
4447
templates/assets/js/bootstrap/bootstrap.esm.js
vendored
Normal file
4447
templates/assets/js/bootstrap/bootstrap.esm.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/js/bootstrap/bootstrap.esm.js.map
Normal file
1
templates/assets/js/bootstrap/bootstrap.esm.js.map
Normal file
File diff suppressed because one or more lines are too long
7
templates/assets/js/bootstrap/bootstrap.esm.min.js
vendored
Normal file
7
templates/assets/js/bootstrap/bootstrap.esm.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
templates/assets/js/bootstrap/bootstrap.esm.min.js.map
Normal file
1
templates/assets/js/bootstrap/bootstrap.esm.min.js.map
Normal file
File diff suppressed because one or more lines are too long
4494
templates/assets/js/bootstrap/bootstrap.js
vendored
Normal file
4494
templates/assets/js/bootstrap/bootstrap.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
templates/assets/js/bootstrap/bootstrap.js.map
Normal file
1
templates/assets/js/bootstrap/bootstrap.js.map
Normal file
File diff suppressed because one or more lines are too long
7
templates/assets/js/bootstrap/bootstrap.min.js
vendored
Normal file
7
templates/assets/js/bootstrap/bootstrap.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
templates/assets/js/bootstrap/bootstrap.min.js.map
Normal file
1
templates/assets/js/bootstrap/bootstrap.min.js.map
Normal file
File diff suppressed because one or more lines are too long
9
templates/assets/js/function/functions.js
Normal file
9
templates/assets/js/function/functions.js
Normal file
@ -0,0 +1,9 @@
|
||||
function toggleMenu() {
|
||||
const menu = document.getElementById('libraryMenu');
|
||||
menu.hidden = !menu.hidden;
|
||||
}
|
||||
|
||||
function toggleMenuGoDownload() {
|
||||
const menu = document.getElementById('menuDownload');
|
||||
menu.hidden = !menu.hidden;
|
||||
}
|
||||
13
templates/assets/js/function/login.js
Normal file
13
templates/assets/js/function/login.js
Normal file
@ -0,0 +1,13 @@
|
||||
document.body.addEventListener('htmx:confirm', function(evt) {
|
||||
// 0. To modify the behavior only for elements with the hx-confirm attribute,
|
||||
// check if evt.detail.target.hasAttribute('hx-confirm')
|
||||
|
||||
// 1. Prevent the default behavior (this will prevent the request from being issued)
|
||||
evt.preventDefault();
|
||||
|
||||
// 2. Do your own logic here
|
||||
console.log(evt.detail)
|
||||
|
||||
// 3. Manually issue the request when you are ready
|
||||
evt.detail.issueRequest(); // or evt.detail.issueRequest(true) to skip the built-in window.confirm()
|
||||
});
|
||||
5261
templates/assets/js/htmx.js
Normal file
5261
templates/assets/js/htmx.js
Normal file
File diff suppressed because it is too large
Load Diff
85
templates/assets/js/index.js
Normal file
85
templates/assets/js/index.js
Normal file
@ -0,0 +1,85 @@
|
||||
console.log("la");
|
||||
console.log("la");
|
||||
async function validatePath() {
|
||||
const pathInput = document.getElementById('path-input');
|
||||
const statusIcon = document.getElementById('path-status-icon');
|
||||
const validateBtn = document.getElementById('validate-btn');
|
||||
|
||||
const inputPath=document.getElementById('path');
|
||||
const inputPathV=document.getElementById('namePath');
|
||||
const path = pathInput.value;
|
||||
|
||||
if (!path) {
|
||||
statusIcon.innerHTML = '<i class="fas fa-times has-text-danger"></i>';
|
||||
validateBtn.disabled = true;
|
||||
return;
|
||||
}
|
||||
|
||||
statusIcon.innerHTML = '<i class="fas fa-circle-notch fa-spin"></i>'; // Loading icon
|
||||
|
||||
try {
|
||||
const response = await fetch('/validate-path', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ path }),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
statusIcon.innerHTML = '<i class="fas fa-check-square"></i>';
|
||||
validateBtn.disabled = false;
|
||||
inputPath.value=path;
|
||||
inputPathV.style.display="block";
|
||||
} else {
|
||||
const result = await response.json();
|
||||
statusIcon.innerHTML = '<i class="fas fa-exclamation-triangle"></i>';
|
||||
validateBtn.disabled = true;
|
||||
console.error('Error:', result.error);
|
||||
}
|
||||
} catch (error) {
|
||||
statusIcon.innerHTML = '<i class="fas fa-exclamation-triangle"></i>';
|
||||
validateBtn.disabled = true;
|
||||
console.error('Request failed:', error);
|
||||
}
|
||||
}
|
||||
function disableAllInputPath(id){
|
||||
console.log(this)
|
||||
var inputs = document.querySelectorAll('#path-'+id+' .fff');
|
||||
var btn =document.getElementById('btn-path-annuler-'+id)
|
||||
btn.style.display = "none";
|
||||
var btn2 =document.getElementById('btn-path-edit-'+id)
|
||||
btn2.style.display = "block";
|
||||
var btn3 =document.getElementById('btn-path-valider-'+id)
|
||||
btn3.style.display = "none";
|
||||
|
||||
inputs.forEach(function(input) {
|
||||
input.disabled = true;
|
||||
});
|
||||
|
||||
}
|
||||
function enableAllInputPath(id){
|
||||
console.log(this)
|
||||
var inputs = document.querySelectorAll('#path-'+id+' .fff');
|
||||
var btn =document.getElementById('btn-path-annuler-'+id)
|
||||
btn.style.display = "block";
|
||||
var btn2 =document.getElementById('btn-path-edit-'+id)
|
||||
btn2.style.display = "none";
|
||||
var btn3 =document.getElementById('btn-path-valider-'+id)
|
||||
btn3.style.display = "block";
|
||||
inputs.forEach(function(input) {
|
||||
input.disabled = false;
|
||||
});
|
||||
}
|
||||
|
||||
function setInputHidden(target,value){
|
||||
document.getElementById(target).value = value;
|
||||
|
||||
}
|
||||
function hide(target){
|
||||
var btn =document.getElementById(target)
|
||||
btn.style.display = "none";
|
||||
}
|
||||
|
||||
document.addEventListener("htmx:afterOnLoad", function (event) {
|
||||
console.log("Réponse du serveur :", event.detail.xhr.responseText);
|
||||
});
|
||||
|
||||
36
templates/assets/js/json-enc.js
Normal file
36
templates/assets/js/json-enc.js
Normal file
@ -0,0 +1,36 @@
|
||||
(function() {
|
||||
let api
|
||||
htmx.defineExtension('json-enc', {
|
||||
init: function(apiRef) {
|
||||
api = apiRef
|
||||
},
|
||||
|
||||
onEvent: function(name, evt) {
|
||||
if (name === 'htmx:configRequest') {
|
||||
evt.detail.headers['Content-Type'] = 'application/json'
|
||||
}
|
||||
},
|
||||
|
||||
encodeParameters: function(xhr, parameters, elt) {
|
||||
xhr.overrideMimeType('text/json')
|
||||
|
||||
const vals = api.getExpressionVars(elt)
|
||||
const object = {}
|
||||
parameters.forEach(function(value, key) {
|
||||
// FormData encodes values as strings, restore hx-vals/hx-vars with their initial types
|
||||
const typedValue = Object.hasOwn(vals, key) ? vals[key] : value
|
||||
if (Object.hasOwn(object, key)) {
|
||||
if (!Array.isArray(object[key])) {
|
||||
object[key] = [object[key]]
|
||||
}
|
||||
object[key].push(typedValue)
|
||||
} else {
|
||||
object[key] = typedValue
|
||||
}
|
||||
})
|
||||
|
||||
return (JSON.stringify(object))
|
||||
}
|
||||
})
|
||||
})()
|
||||
|
||||
291
templates/assets/js/sse.js
Normal file
291
templates/assets/js/sse.js
Normal file
@ -0,0 +1,291 @@
|
||||
/*
|
||||
Server Sent Events Extension
|
||||
============================
|
||||
This extension adds support for Server Sent Events to htmx. See /www/extensions/sse.md for usage instructions.
|
||||
|
||||
*/
|
||||
|
||||
(function() {
|
||||
/** @type {import("../htmx").HtmxInternalApi} */
|
||||
var api
|
||||
|
||||
htmx.defineExtension('sse', {
|
||||
|
||||
/**
|
||||
* Init saves the provided reference to the internal HTMX API.
|
||||
*
|
||||
* @param {import("../htmx").HtmxInternalApi} api
|
||||
* @returns void
|
||||
*/
|
||||
init: function(apiRef) {
|
||||
// store a reference to the internal API.
|
||||
api = apiRef
|
||||
|
||||
// set a function in the public API for creating new EventSource objects
|
||||
if (htmx.createEventSource == undefined) {
|
||||
htmx.createEventSource = createEventSource
|
||||
}
|
||||
},
|
||||
|
||||
getSelectors: function() {
|
||||
return ['[sse-connect]', '[data-sse-connect]', '[sse-swap]', '[data-sse-swap]']
|
||||
},
|
||||
|
||||
/**
|
||||
* onEvent handles all events passed to this extension.
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {Event} evt
|
||||
* @returns void
|
||||
*/
|
||||
onEvent: function(name, evt) {
|
||||
var parent = evt.target || evt.detail.elt
|
||||
switch (name) {
|
||||
case 'htmx:beforeCleanupElement':
|
||||
var internalData = api.getInternalData(parent)
|
||||
// Try to remove remove an EventSource when elements are removed
|
||||
var source = internalData.sseEventSource
|
||||
if (source) {
|
||||
api.triggerEvent(parent, 'htmx:sseClose', {
|
||||
source,
|
||||
type: 'nodeReplaced',
|
||||
})
|
||||
internalData.sseEventSource.close()
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
// Try to create EventSources when elements are processed
|
||||
case 'htmx:afterProcessNode':
|
||||
ensureEventSourceOnElement(parent)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/// ////////////////////////////////////////////
|
||||
// HELPER FUNCTIONS
|
||||
/// ////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* createEventSource is the default method for creating new EventSource objects.
|
||||
* it is hoisted into htmx.config.createEventSource to be overridden by the user, if needed.
|
||||
*
|
||||
* @param {string} url
|
||||
* @returns EventSource
|
||||
*/
|
||||
function createEventSource(url) {
|
||||
return new EventSource(url, { withCredentials: true })
|
||||
}
|
||||
|
||||
/**
|
||||
* registerSSE looks for attributes that can contain sse events, right
|
||||
* now hx-trigger and sse-swap and adds listeners based on these attributes too
|
||||
* the closest event source
|
||||
*
|
||||
* @param {HTMLElement} elt
|
||||
*/
|
||||
function registerSSE(elt) {
|
||||
// Add message handlers for every `sse-swap` attribute
|
||||
if (api.getAttributeValue(elt, 'sse-swap')) {
|
||||
// Find closest existing event source
|
||||
var sourceElement = api.getClosestMatch(elt, hasEventSource)
|
||||
if (sourceElement == null) {
|
||||
// api.triggerErrorEvent(elt, "htmx:noSSESourceError")
|
||||
return null // no eventsource in parentage, orphaned element
|
||||
}
|
||||
|
||||
// Set internalData and source
|
||||
var internalData = api.getInternalData(sourceElement)
|
||||
var source = internalData.sseEventSource
|
||||
|
||||
var sseSwapAttr = api.getAttributeValue(elt, 'sse-swap')
|
||||
var sseEventNames = sseSwapAttr.split(',')
|
||||
|
||||
for (var i = 0; i < sseEventNames.length; i++) {
|
||||
const sseEventName = sseEventNames[i].trim()
|
||||
const listener = function(event) {
|
||||
// If the source is missing then close SSE
|
||||
if (maybeCloseSSESource(sourceElement)) {
|
||||
return
|
||||
}
|
||||
|
||||
// If the body no longer contains the element, remove the listener
|
||||
if (!api.bodyContains(elt)) {
|
||||
source.removeEventListener(sseEventName, listener)
|
||||
return
|
||||
}
|
||||
|
||||
// swap the response into the DOM and trigger a notification
|
||||
if (!api.triggerEvent(elt, 'htmx:sseBeforeMessage', event)) {
|
||||
return
|
||||
}
|
||||
swap(elt, event.data)
|
||||
api.triggerEvent(elt, 'htmx:sseMessage', event)
|
||||
}
|
||||
|
||||
// Register the new listener
|
||||
api.getInternalData(elt).sseEventListener = listener
|
||||
source.addEventListener(sseEventName, listener)
|
||||
}
|
||||
}
|
||||
|
||||
// Add message handlers for every `hx-trigger="sse:*"` attribute
|
||||
if (api.getAttributeValue(elt, 'hx-trigger')) {
|
||||
// Find closest existing event source
|
||||
var sourceElement = api.getClosestMatch(elt, hasEventSource)
|
||||
if (sourceElement == null) {
|
||||
// api.triggerErrorEvent(elt, "htmx:noSSESourceError")
|
||||
return null // no eventsource in parentage, orphaned element
|
||||
}
|
||||
|
||||
// Set internalData and source
|
||||
var internalData = api.getInternalData(sourceElement)
|
||||
var source = internalData.sseEventSource
|
||||
|
||||
var triggerSpecs = api.getTriggerSpecs(elt)
|
||||
triggerSpecs.forEach(function(ts) {
|
||||
if (ts.trigger.slice(0, 4) !== 'sse:') {
|
||||
return
|
||||
}
|
||||
|
||||
var listener = function (event) {
|
||||
if (maybeCloseSSESource(sourceElement)) {
|
||||
return
|
||||
}
|
||||
if (!api.bodyContains(elt)) {
|
||||
source.removeEventListener(ts.trigger.slice(4), listener)
|
||||
}
|
||||
// Trigger events to be handled by the rest of htmx
|
||||
htmx.trigger(elt, ts.trigger, event)
|
||||
htmx.trigger(elt, 'htmx:sseMessage', event)
|
||||
}
|
||||
|
||||
// Register the new listener
|
||||
api.getInternalData(elt).sseEventListener = listener
|
||||
source.addEventListener(ts.trigger.slice(4), listener)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ensureEventSourceOnElement creates a new EventSource connection on the provided element.
|
||||
* If a usable EventSource already exists, then it is returned. If not, then a new EventSource
|
||||
* is created and stored in the element's internalData.
|
||||
* @param {HTMLElement} elt
|
||||
* @param {number} retryCount
|
||||
* @returns {EventSource | null}
|
||||
*/
|
||||
function ensureEventSourceOnElement(elt, retryCount) {
|
||||
if (elt == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
// handle extension source creation attribute
|
||||
if (api.getAttributeValue(elt, 'sse-connect')) {
|
||||
var sseURL = api.getAttributeValue(elt, 'sse-connect')
|
||||
if (sseURL == null) {
|
||||
return
|
||||
}
|
||||
|
||||
ensureEventSource(elt, sseURL, retryCount)
|
||||
}
|
||||
|
||||
registerSSE(elt)
|
||||
}
|
||||
|
||||
function ensureEventSource(elt, url, retryCount) {
|
||||
var source = htmx.createEventSource(url)
|
||||
|
||||
source.onerror = function(err) {
|
||||
// Log an error event
|
||||
api.triggerErrorEvent(elt, 'htmx:sseError', { error: err, source })
|
||||
|
||||
// If parent no longer exists in the document, then clean up this EventSource
|
||||
if (maybeCloseSSESource(elt)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, try to reconnect the EventSource
|
||||
if (source.readyState === EventSource.CLOSED) {
|
||||
retryCount = retryCount || 0
|
||||
retryCount = Math.max(Math.min(retryCount * 2, 128), 1)
|
||||
var timeout = retryCount * 500
|
||||
window.setTimeout(function() {
|
||||
ensureEventSourceOnElement(elt, retryCount)
|
||||
}, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
source.onopen = function(evt) {
|
||||
api.triggerEvent(elt, 'htmx:sseOpen', { source })
|
||||
|
||||
if (retryCount && retryCount > 0) {
|
||||
const childrenToFix = elt.querySelectorAll("[sse-swap], [data-sse-swap], [hx-trigger], [data-hx-trigger]")
|
||||
for (let i = 0; i < childrenToFix.length; i++) {
|
||||
registerSSE(childrenToFix[i])
|
||||
}
|
||||
// We want to increase the reconnection delay for consecutive failed attempts only
|
||||
retryCount = 0
|
||||
}
|
||||
}
|
||||
|
||||
api.getInternalData(elt).sseEventSource = source
|
||||
|
||||
|
||||
var closeAttribute = api.getAttributeValue(elt, "sse-close");
|
||||
if (closeAttribute) {
|
||||
// close eventsource when this message is received
|
||||
source.addEventListener(closeAttribute, function() {
|
||||
api.triggerEvent(elt, 'htmx:sseClose', {
|
||||
source,
|
||||
type: 'message',
|
||||
})
|
||||
source.close()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* maybeCloseSSESource confirms that the parent element still exists.
|
||||
* If not, then any associated SSE source is closed and the function returns true.
|
||||
*
|
||||
* @param {HTMLElement} elt
|
||||
* @returns boolean
|
||||
*/
|
||||
function maybeCloseSSESource(elt) {
|
||||
if (!api.bodyContains(elt)) {
|
||||
var source = api.getInternalData(elt).sseEventSource
|
||||
if (source != undefined) {
|
||||
api.triggerEvent(elt, 'htmx:sseClose', {
|
||||
source,
|
||||
type: 'nodeMissing',
|
||||
})
|
||||
source.close()
|
||||
// source = null
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param {HTMLElement} elt
|
||||
* @param {string} content
|
||||
*/
|
||||
function swap(elt, content) {
|
||||
api.withExtensions(elt, function(extension) {
|
||||
content = extension.transformResponse(content, null, elt)
|
||||
})
|
||||
|
||||
var swapSpec = api.getSwapSpecification(elt)
|
||||
var target = api.getTarget(elt)
|
||||
api.swap(target, content, swapSpec)
|
||||
}
|
||||
|
||||
|
||||
function hasEventSource(node) {
|
||||
return api.getInternalData(node).sseEventSource != null
|
||||
}
|
||||
})()
|
||||
|
||||
476
templates/assets/js/ws.js
Normal file
476
templates/assets/js/ws.js
Normal file
@ -0,0 +1,476 @@
|
||||
/*
|
||||
WebSockets Extension
|
||||
============================
|
||||
This extension adds support for WebSockets to htmx. See /www/extensions/ws.md for usage instructions.
|
||||
*/
|
||||
|
||||
(function () {
|
||||
|
||||
/** @type {import("../htmx").HtmxInternalApi} */
|
||||
var api;
|
||||
|
||||
htmx.defineExtension("ws", {
|
||||
|
||||
/**
|
||||
* init is called once, when this extension is first registered.
|
||||
* @param {import("../htmx").HtmxInternalApi} apiRef
|
||||
*/
|
||||
init: function (apiRef) {
|
||||
|
||||
// Store reference to internal API
|
||||
api = apiRef;
|
||||
|
||||
// Default function for creating new EventSource objects
|
||||
if (!htmx.createWebSocket) {
|
||||
htmx.createWebSocket = createWebSocket;
|
||||
}
|
||||
|
||||
// Default setting for reconnect delay
|
||||
if (!htmx.config.wsReconnectDelay) {
|
||||
htmx.config.wsReconnectDelay = "full-jitter";
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* onEvent handles all events passed to this extension.
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {Event} evt
|
||||
*/
|
||||
onEvent: function (name, evt) {
|
||||
var parent = evt.target || evt.detail.elt;
|
||||
|
||||
switch (name) {
|
||||
|
||||
// Try to close the socket when elements are removed
|
||||
case "htmx:beforeCleanupElement":
|
||||
|
||||
var internalData = api.getInternalData(parent)
|
||||
|
||||
if (internalData.webSocket) {
|
||||
internalData.webSocket.close();
|
||||
}
|
||||
return;
|
||||
|
||||
// Try to create websockets when elements are processed
|
||||
case "htmx:beforeProcessNode":
|
||||
forEach(queryAttributeOnThisOrChildren(parent, "ws-connect"), function (child) {
|
||||
ensureWebSocket(child)
|
||||
});
|
||||
forEach(queryAttributeOnThisOrChildren(parent, "ws-send"), function (child) {
|
||||
ensureWebSocketSend(child)
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
function splitOnWhitespace(trigger) {
|
||||
return trigger.trim().split(/\s+/);
|
||||
}
|
||||
|
||||
function getLegacyWebsocketURL(elt) {
|
||||
var legacySSEValue = api.getAttributeValue(elt, "hx-ws");
|
||||
if (legacySSEValue) {
|
||||
var values = splitOnWhitespace(legacySSEValue);
|
||||
for (var i = 0; i < values.length; i++) {
|
||||
var value = values[i].split(/:(.+)/);
|
||||
if (value[0] === "connect") {
|
||||
return value[1];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ensureWebSocket creates a new WebSocket on the designated element, using
|
||||
* the element's "ws-connect" attribute.
|
||||
* @param {HTMLElement} socketElt
|
||||
* @returns
|
||||
*/
|
||||
function ensureWebSocket(socketElt) {
|
||||
|
||||
// If the element containing the WebSocket connection no longer exists, then
|
||||
// do not connect/reconnect the WebSocket.
|
||||
if (!api.bodyContains(socketElt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the source straight from the element's value
|
||||
var wssSource = api.getAttributeValue(socketElt, "ws-connect")
|
||||
|
||||
if (wssSource == null || wssSource === "") {
|
||||
var legacySource = getLegacyWebsocketURL(socketElt);
|
||||
if (legacySource == null) {
|
||||
return;
|
||||
} else {
|
||||
wssSource = legacySource;
|
||||
}
|
||||
}
|
||||
|
||||
// Guarantee that the wssSource value is a fully qualified URL
|
||||
if (wssSource.indexOf("/") === 0) {
|
||||
var base_part = location.hostname + (location.port ? ':' + location.port : '');
|
||||
if (location.protocol === 'https:') {
|
||||
wssSource = "wss://" + base_part + wssSource;
|
||||
} else if (location.protocol === 'http:') {
|
||||
wssSource = "ws://" + base_part + wssSource;
|
||||
}
|
||||
}
|
||||
|
||||
var socketWrapper = createWebsocketWrapper(socketElt, function () {
|
||||
return htmx.createWebSocket(wssSource)
|
||||
});
|
||||
|
||||
socketWrapper.addEventListener('message', function (event) {
|
||||
if (maybeCloseWebSocketSource(socketElt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var response = event.data;
|
||||
if (!api.triggerEvent(socketElt, "htmx:wsBeforeMessage", {
|
||||
message: response,
|
||||
socketWrapper: socketWrapper.publicInterface
|
||||
})) {
|
||||
return;
|
||||
}
|
||||
|
||||
api.withExtensions(socketElt, function (extension) {
|
||||
response = extension.transformResponse(response, null, socketElt);
|
||||
});
|
||||
|
||||
var settleInfo = api.makeSettleInfo(socketElt);
|
||||
var fragment = api.makeFragment(response);
|
||||
|
||||
if (fragment.children.length) {
|
||||
var children = Array.from(fragment.children);
|
||||
for (var i = 0; i < children.length; i++) {
|
||||
api.oobSwap(api.getAttributeValue(children[i], "hx-swap-oob") || "true", children[i], settleInfo);
|
||||
}
|
||||
}
|
||||
|
||||
api.settleImmediately(settleInfo.tasks);
|
||||
api.triggerEvent(socketElt, "htmx:wsAfterMessage", { message: response, socketWrapper: socketWrapper.publicInterface })
|
||||
});
|
||||
|
||||
// Put the WebSocket into the HTML Element's custom data.
|
||||
api.getInternalData(socketElt).webSocket = socketWrapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} WebSocketWrapper
|
||||
* @property {WebSocket} socket
|
||||
* @property {Array<{message: string, sendElt: Element}>} messageQueue
|
||||
* @property {number} retryCount
|
||||
* @property {(message: string, sendElt: Element) => void} sendImmediately sendImmediately sends message regardless of websocket connection state
|
||||
* @property {(message: string, sendElt: Element) => void} send
|
||||
* @property {(event: string, handler: Function) => void} addEventListener
|
||||
* @property {() => void} handleQueuedMessages
|
||||
* @property {() => void} init
|
||||
* @property {() => void} close
|
||||
*/
|
||||
/**
|
||||
*
|
||||
* @param socketElt
|
||||
* @param socketFunc
|
||||
* @returns {WebSocketWrapper}
|
||||
*/
|
||||
function createWebsocketWrapper(socketElt, socketFunc) {
|
||||
var wrapper = {
|
||||
socket: null,
|
||||
messageQueue: [],
|
||||
retryCount: 0,
|
||||
|
||||
/** @type {Object<string, Function[]>} */
|
||||
events: {},
|
||||
|
||||
addEventListener: function (event, handler) {
|
||||
if (this.socket) {
|
||||
this.socket.addEventListener(event, handler);
|
||||
}
|
||||
|
||||
if (!this.events[event]) {
|
||||
this.events[event] = [];
|
||||
}
|
||||
|
||||
this.events[event].push(handler);
|
||||
},
|
||||
|
||||
sendImmediately: function (message, sendElt) {
|
||||
if (!this.socket) {
|
||||
api.triggerErrorEvent()
|
||||
}
|
||||
if (!sendElt || api.triggerEvent(sendElt, 'htmx:wsBeforeSend', {
|
||||
message: message,
|
||||
socketWrapper: this.publicInterface
|
||||
})) {
|
||||
this.socket.send(message);
|
||||
sendElt && api.triggerEvent(sendElt, 'htmx:wsAfterSend', {
|
||||
message: message,
|
||||
socketWrapper: this.publicInterface
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
send: function (message, sendElt) {
|
||||
if (this.socket.readyState !== this.socket.OPEN) {
|
||||
this.messageQueue.push({ message: message, sendElt: sendElt });
|
||||
} else {
|
||||
this.sendImmediately(message, sendElt);
|
||||
}
|
||||
},
|
||||
|
||||
handleQueuedMessages: function () {
|
||||
while (this.messageQueue.length > 0) {
|
||||
var queuedItem = this.messageQueue[0]
|
||||
if (this.socket.readyState === this.socket.OPEN) {
|
||||
this.sendImmediately(queuedItem.message, queuedItem.sendElt);
|
||||
this.messageQueue.shift();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
init: function () {
|
||||
if (this.socket && this.socket.readyState === this.socket.OPEN) {
|
||||
// Close discarded socket
|
||||
this.socket.close()
|
||||
}
|
||||
|
||||
// Create a new WebSocket and event handlers
|
||||
/** @type {WebSocket} */
|
||||
var socket = socketFunc();
|
||||
|
||||
// The event.type detail is added for interface conformance with the
|
||||
// other two lifecycle events (open and close) so a single handler method
|
||||
// can handle them polymorphically, if required.
|
||||
api.triggerEvent(socketElt, "htmx:wsConnecting", { event: { type: 'connecting' } });
|
||||
|
||||
this.socket = socket;
|
||||
|
||||
socket.onopen = function (e) {
|
||||
wrapper.retryCount = 0;
|
||||
api.triggerEvent(socketElt, "htmx:wsOpen", { event: e, socketWrapper: wrapper.publicInterface });
|
||||
wrapper.handleQueuedMessages();
|
||||
}
|
||||
|
||||
socket.onclose = function (e) {
|
||||
// If socket should not be connected, stop further attempts to establish connection
|
||||
// If Abnormal Closure/Service Restart/Try Again Later, then set a timer to reconnect after a pause.
|
||||
if (!maybeCloseWebSocketSource(socketElt) && [1006, 1012, 1013].indexOf(e.code) >= 0) {
|
||||
var delay = getWebSocketReconnectDelay(wrapper.retryCount);
|
||||
setTimeout(function () {
|
||||
wrapper.retryCount += 1;
|
||||
wrapper.init();
|
||||
}, delay);
|
||||
}
|
||||
|
||||
// Notify client code that connection has been closed. Client code can inspect `event` field
|
||||
// to determine whether closure has been valid or abnormal
|
||||
api.triggerEvent(socketElt, "htmx:wsClose", { event: e, socketWrapper: wrapper.publicInterface })
|
||||
};
|
||||
|
||||
socket.onerror = function (e) {
|
||||
api.triggerErrorEvent(socketElt, "htmx:wsError", { error: e, socketWrapper: wrapper });
|
||||
maybeCloseWebSocketSource(socketElt);
|
||||
};
|
||||
|
||||
var events = this.events;
|
||||
Object.keys(events).forEach(function (k) {
|
||||
events[k].forEach(function (e) {
|
||||
socket.addEventListener(k, e);
|
||||
})
|
||||
});
|
||||
},
|
||||
|
||||
close: function () {
|
||||
this.socket.close()
|
||||
}
|
||||
}
|
||||
|
||||
wrapper.init();
|
||||
|
||||
wrapper.publicInterface = {
|
||||
send: wrapper.send.bind(wrapper),
|
||||
sendImmediately: wrapper.sendImmediately.bind(wrapper),
|
||||
queue: wrapper.messageQueue
|
||||
};
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
/**
|
||||
* ensureWebSocketSend attaches trigger handles to elements with
|
||||
* "ws-send" attribute
|
||||
* @param {HTMLElement} elt
|
||||
*/
|
||||
function ensureWebSocketSend(elt) {
|
||||
var legacyAttribute = api.getAttributeValue(elt, "hx-ws");
|
||||
if (legacyAttribute && legacyAttribute !== 'send') {
|
||||
return;
|
||||
}
|
||||
|
||||
var webSocketParent = api.getClosestMatch(elt, hasWebSocket)
|
||||
processWebSocketSend(webSocketParent, elt);
|
||||
}
|
||||
|
||||
/**
|
||||
* hasWebSocket function checks if a node has webSocket instance attached
|
||||
* @param {HTMLElement} node
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function hasWebSocket(node) {
|
||||
return api.getInternalData(node).webSocket != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* processWebSocketSend adds event listeners to the <form> element so that
|
||||
* messages can be sent to the WebSocket server when the form is submitted.
|
||||
* @param {HTMLElement} socketElt
|
||||
* @param {HTMLElement} sendElt
|
||||
*/
|
||||
function processWebSocketSend(socketElt, sendElt) {
|
||||
var nodeData = api.getInternalData(sendElt);
|
||||
var triggerSpecs = api.getTriggerSpecs(sendElt);
|
||||
triggerSpecs.forEach(function (ts) {
|
||||
api.addTriggerHandler(sendElt, ts, nodeData, function (elt, evt) {
|
||||
if (maybeCloseWebSocketSource(socketElt)) {
|
||||
return;
|
||||
}
|
||||
|
||||
/** @type {WebSocketWrapper} */
|
||||
var socketWrapper = api.getInternalData(socketElt).webSocket;
|
||||
var headers = api.getHeaders(sendElt, api.getTarget(sendElt));
|
||||
var results = api.getInputValues(sendElt, 'post');
|
||||
var errors = results.errors;
|
||||
var rawParameters = results.values;
|
||||
var expressionVars = api.getExpressionVars(sendElt);
|
||||
var allParameters = api.mergeObjects(rawParameters, expressionVars);
|
||||
var filteredParameters = api.filterValues(allParameters, sendElt);
|
||||
|
||||
var sendConfig = {
|
||||
parameters: filteredParameters,
|
||||
unfilteredParameters: allParameters,
|
||||
headers: headers,
|
||||
errors: errors,
|
||||
|
||||
triggeringEvent: evt,
|
||||
messageBody: undefined,
|
||||
socketWrapper: socketWrapper.publicInterface
|
||||
};
|
||||
|
||||
if (!api.triggerEvent(elt, 'htmx:wsConfigSend', sendConfig)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (errors && errors.length > 0) {
|
||||
api.triggerEvent(elt, 'htmx:validation:halted', errors);
|
||||
return;
|
||||
}
|
||||
|
||||
var body = sendConfig.messageBody;
|
||||
if (body === undefined) {
|
||||
var toSend = Object.assign({}, sendConfig.parameters);
|
||||
if (sendConfig.headers)
|
||||
toSend['HEADERS'] = headers;
|
||||
body = JSON.stringify(toSend);
|
||||
}
|
||||
|
||||
socketWrapper.send(body, elt);
|
||||
|
||||
if (evt && api.shouldCancel(evt, elt)) {
|
||||
evt.preventDefault();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* getWebSocketReconnectDelay is the default easing function for WebSocket reconnects.
|
||||
* @param {number} retryCount // The number of retries that have already taken place
|
||||
* @returns {number}
|
||||
*/
|
||||
function getWebSocketReconnectDelay(retryCount) {
|
||||
|
||||
/** @type {"full-jitter" | ((retryCount:number) => number)} */
|
||||
var delay = htmx.config.wsReconnectDelay;
|
||||
if (typeof delay === 'function') {
|
||||
return delay(retryCount);
|
||||
}
|
||||
if (delay === 'full-jitter') {
|
||||
var exp = Math.min(retryCount, 6);
|
||||
var maxDelay = 1000 * Math.pow(2, exp);
|
||||
return maxDelay * Math.random();
|
||||
}
|
||||
|
||||
logError('htmx.config.wsReconnectDelay must either be a function or the string "full-jitter"');
|
||||
}
|
||||
|
||||
/**
|
||||
* maybeCloseWebSocketSource checks to the if the element that created the WebSocket
|
||||
* still exists in the DOM. If NOT, then the WebSocket is closed and this function
|
||||
* returns TRUE. If the element DOES EXIST, then no action is taken, and this function
|
||||
* returns FALSE.
|
||||
*
|
||||
* @param {*} elt
|
||||
* @returns
|
||||
*/
|
||||
function maybeCloseWebSocketSource(elt) {
|
||||
if (!api.bodyContains(elt)) {
|
||||
api.getInternalData(elt).webSocket.close();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* createWebSocket is the default method for creating new WebSocket objects.
|
||||
* it is hoisted into htmx.createWebSocket to be overridden by the user, if needed.
|
||||
*
|
||||
* @param {string} url
|
||||
* @returns WebSocket
|
||||
*/
|
||||
function createWebSocket(url) {
|
||||
var sock = new WebSocket(url, []);
|
||||
sock.binaryType = htmx.config.wsBinaryType;
|
||||
return sock;
|
||||
}
|
||||
|
||||
/**
|
||||
* queryAttributeOnThisOrChildren returns all nodes that contain the requested attributeName, INCLUDING THE PROVIDED ROOT ELEMENT.
|
||||
*
|
||||
* @param {HTMLElement} elt
|
||||
* @param {string} attributeName
|
||||
*/
|
||||
function queryAttributeOnThisOrChildren(elt, attributeName) {
|
||||
|
||||
var result = []
|
||||
|
||||
// If the parent element also contains the requested attribute, then add it to the results too.
|
||||
if (api.hasAttribute(elt, attributeName) || api.hasAttribute(elt, "hx-ws")) {
|
||||
result.push(elt);
|
||||
}
|
||||
|
||||
// Search all child nodes that match the requested attribute
|
||||
elt.querySelectorAll("[" + attributeName + "], [data-" + attributeName + "], [data-hx-ws], [hx-ws]").forEach(function (node) {
|
||||
result.push(node)
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* @template T
|
||||
* @param {T[]} arr
|
||||
* @param {(T) => void} func
|
||||
*/
|
||||
function forEach(arr, func) {
|
||||
if (arr) {
|
||||
for (var i = 0; i < arr.length; i++) {
|
||||
func(arr[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
})();
|
||||
|
||||
66
templates/dashboard.pages.tmpl
Normal file
66
templates/dashboard.pages.tmpl
Normal file
@ -0,0 +1,66 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
{{ template "head" . }}
|
||||
|
||||
<body>
|
||||
<header></header>
|
||||
|
||||
<div class="column">
|
||||
<h1>Dashboard</h1>
|
||||
<div class="columns is-mobile">
|
||||
<div class="column is-2">
|
||||
<aside class="menu">
|
||||
|
||||
<p class="menu-label">GoDownloader</p>
|
||||
|
||||
<ul class="menu-list">
|
||||
<li><a class="nav-link" onclick="toggleMenuGoDownload(); return false;">GoDownloader</a>
|
||||
<ul id="menuDownload" hidden>
|
||||
<li><a hx-get="/godownloader/downloads" hx-target="#content" hx-swap-oob="beforeend">Downloads</a></li>
|
||||
<li><a hx-get="/godownloader/linkcollectors" hx-target="#content" hx-swap-oob="beforeend">Linkcollectors</a></li>
|
||||
</ul>
|
||||
|
||||
</li>
|
||||
<p class="menu-label">Library</p>
|
||||
|
||||
<li><a hx-get="/library" class="nav-link"
|
||||
onclick="toggleMenu(); return false;"
|
||||
hx-target="#content" hx-swap-oob="beforeend">Library</a>
|
||||
<li>
|
||||
<ul class="menu-list" id="libraryMenu" hidden>
|
||||
<li>
|
||||
<a class="is-active">Choise Library</a>
|
||||
<ul>
|
||||
{{range .paths}}
|
||||
<li>
|
||||
<span class="icon-text">
|
||||
<span><a>{{ .PathName }}</a></span>
|
||||
<span class="icon">
|
||||
<a style="padding-top: 0;height: 0;"><i class="fas fa-ellipsis-v"></i></a>
|
||||
</span>
|
||||
</span>
|
||||
{{end}}
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><a hx-get="/settings" class="nav-link" hx-target="#content" hx-swap-oob="beforeend">Settings</a></li>
|
||||
</li>
|
||||
</ul>
|
||||
</aside>
|
||||
</div>
|
||||
<div class="column is-10">
|
||||
<div id="content">
|
||||
<!-- Le contenu spécifique sera chargé ici -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<footer></footer>
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
|
||||
</html>
|
||||
1
templates/godownloader_download.pages.tmpl
Normal file
1
templates/godownloader_download.pages.tmpl
Normal file
@ -0,0 +1 @@
|
||||
<h1>Download</h1>
|
||||
1
templates/godownloader_linkcollectors.pages.tmpl
Normal file
1
templates/godownloader_linkcollectors.pages.tmpl
Normal file
@ -0,0 +1 @@
|
||||
<h1>Linkcollectors</h1>
|
||||
15
templates/godownloader_setting.pages.tmpl
Normal file
15
templates/godownloader_setting.pages.tmpl
Normal file
@ -0,0 +1,15 @@
|
||||
<h1>Host setting</h1>
|
||||
|
||||
<form>
|
||||
<label>List host</label>
|
||||
<div class="select is-primary">
|
||||
<select>
|
||||
<option>Select dropdown</option>
|
||||
<option value="1">Debrid-link.com</option>
|
||||
</select>
|
||||
</div>
|
||||
<label>Username</label>
|
||||
<input class="input is-primary cell" type="text" value="">
|
||||
<label>Password</label>
|
||||
<input class="input is-primary cell" type="password" value="">
|
||||
</form>
|
||||
26
templates/head.pages.tmpl
Normal file
26
templates/head.pages.tmpl
Normal file
@ -0,0 +1,26 @@
|
||||
|
||||
{{ define "head" }}
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link
|
||||
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="/templates/assets/css/bulma.min.css">
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="/templates/assets/css/styles.css">
|
||||
<script src="/templates/assets/js/htmx.js" ></script>
|
||||
<script src="/templates/assets/js/sse.js"></script>
|
||||
<script src="/templates/assets/js/index.js" ></script>
|
||||
<script src="/templates/assets/js/json-enc.js"></script>
|
||||
<script src="/templates/assets/js/ws.js"></script>
|
||||
<script src="/templates/assets/js/function/functions.js"></script>
|
||||
<title>Login</title>
|
||||
</head>
|
||||
|
||||
{{ end }}
|
||||
2
templates/library.pages.tmpl
Normal file
2
templates/library.pages.tmpl
Normal file
@ -0,0 +1,2 @@
|
||||
<h2>Library</h2>
|
||||
<p>Bienvenue dans la bibliothèque.</p>
|
||||
45
templates/login.pages.tmpl
Normal file
45
templates/login.pages.tmpl
Normal file
@ -0,0 +1,45 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
{{ template "head" . }}
|
||||
|
||||
<body>
|
||||
|
||||
|
||||
<section class="hero is-fullheight is-flex is-justify-content-center is-align-items-center">
|
||||
<div class="card" style="width: 400px;">
|
||||
<div class="card-content">
|
||||
<div class="content">
|
||||
<div class="container has-text-centered">
|
||||
<h1 >Login Panel</p>
|
||||
</div>
|
||||
<form >
|
||||
<div class="mb-3">
|
||||
<label for="formGroupExampleInput" class="form-label">Email</label>
|
||||
<input type="text" class="input" id="email" placeholder="Votre email" name="email">
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="formGroupExampleInput2" class="form-label">Password</label>
|
||||
<input type="password" class="input" id="password" placeholder="Password" name="password">
|
||||
</div>
|
||||
<br />
|
||||
<div class="container has-text-centered">
|
||||
<button class="button is-primary is-outlined" type="submit" hx-post="/api/login"
|
||||
hx-ext="json-enc">
|
||||
Login
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
</div>
|
||||
</section>
|
||||
|
||||
|
||||
|
||||
</body>
|
||||
<script src="/templates/assets/js/function/login.js"></script>
|
||||
|
||||
</html>
|
||||
77
templates/settings.pages.tmpl
Normal file
77
templates/settings.pages.tmpl
Normal file
@ -0,0 +1,77 @@
|
||||
<section class="hero has-background-primary">
|
||||
<div class="hero-body">
|
||||
<p class="title">Settings</p>
|
||||
<p class="subtitle">Hero subtitle</p>
|
||||
</div>
|
||||
</section>
|
||||
<div class="card">
|
||||
<section class="section is-link">
|
||||
<div class="column">
|
||||
<h1 class="title">Section</h1>
|
||||
<h2 class="subtitle">
|
||||
A simple container to divide your page into <strong>sections</strong>, like
|
||||
the one you're currently reading.
|
||||
</h2>
|
||||
</div>
|
||||
<div class="column">
|
||||
<div class="file is-info has-name">
|
||||
|
||||
<div class="field has-addons">
|
||||
<div class="field">
|
||||
<label class="label">Enter Path:</label>
|
||||
<div class="control has-icons-right">
|
||||
<input type="text" id="path-input" class="input" placeholder="Enter path..." oninput="validatePath()" />
|
||||
<span class="icon is-small is-right has-text-success">
|
||||
<span id="path-status-icon" class="icon">
|
||||
<!-- Placeholder for status icon -->
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="field" id="namePath" style="display: none;">
|
||||
<label class="label">Name</label>
|
||||
<div class="control">
|
||||
<input id="imp" class="input" type="text" hx-target="pathName" oninput="setInputHidden('pathName',this.value)" placeholder="Text input">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<form hx-post="/api/pathDownload/create" hx-trigger="click" hx-target="#path-list" hx-ext="json-enc" style="width: min-content;">
|
||||
<input type="hidden" id="path" name="path">
|
||||
<input type="hidden" id="pathName" name="pathName">
|
||||
<button id="validate-btn" class="button is-primary" disabled type="submit">
|
||||
Validate
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
<div class="column">
|
||||
<div hx-get="/api/pathDownload/all/" hx-trigger="load" hx-target="#path-list"> </div>
|
||||
<div id="path-list">
|
||||
<!-- Liste des chemins apparaîtra ici -->
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
<div class="card">
|
||||
<section class="section is-primary">
|
||||
<h1 class="title">Section</h1>
|
||||
<div class="column">
|
||||
<div hx-get="/godownloader/settings" hx-trigger="load" hx-target="#download-list"> </div>
|
||||
<div id="download-list">
|
||||
<!-- Liste des chemins apparaîtra ici -->
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
<div class="card">
|
||||
<section class="section is-info">
|
||||
<h1 class="title">Section</h1>
|
||||
<h2 class="subtitle">
|
||||
A simple container to divide your page into <strong>sections</strong>, like
|
||||
the one you're currently reading.
|
||||
</h2>
|
||||
</section>
|
||||
</div>
|
||||
Loading…
Reference in New Issue
Block a user