2025-06-06 07:42:55 +00:00
|
|
|
|
package renders
|
|
|
|
|
|
|
|
|
|
|
|
import (
|
2025-06-09 14:13:32 +00:00
|
|
|
|
"app/shelfly/internal/debridlink"
|
2025-06-12 08:57:10 +00:00
|
|
|
|
"app/shelfly/internal/download"
|
2025-06-06 07:42:55 +00:00
|
|
|
|
"app/shelfly/internal/models"
|
2025-06-09 14:13:32 +00:00
|
|
|
|
"context"
|
2025-06-06 07:42:55 +00:00
|
|
|
|
"encoding/json"
|
2025-06-15 15:21:11 +00:00
|
|
|
|
"fmt"
|
|
|
|
|
|
"io"
|
2025-06-09 14:13:32 +00:00
|
|
|
|
"log"
|
2025-06-06 07:42:55 +00:00
|
|
|
|
"net/http"
|
2025-06-20 14:26:56 +00:00
|
|
|
|
"os"
|
|
|
|
|
|
"path/filepath"
|
2025-06-09 14:13:32 +00:00
|
|
|
|
"strconv"
|
2025-06-15 15:21:11 +00:00
|
|
|
|
"strings"
|
2025-06-12 15:31:12 +00:00
|
|
|
|
"sync"
|
2025-06-06 07:42:55 +00:00
|
|
|
|
"text/template"
|
2025-06-09 14:13:32 +00:00
|
|
|
|
"time"
|
2025-06-06 07:42:55 +00:00
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
"github.com/gorilla/mux"
|
2025-06-06 07:42:55 +00:00
|
|
|
|
"gorm.io/gorm"
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
var (
|
|
|
|
|
|
// templates contiendra TOUTES vos pages .pages.tmpl
|
|
|
|
|
|
templates *template.Template
|
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
|
|
funcMap := template.FuncMap{
|
|
|
|
|
|
"hasSuffix": strings.HasSuffix,
|
|
|
|
|
|
"ext": func(name string) string {
|
|
|
|
|
|
return strings.TrimPrefix(filepath.Ext(name), ".")
|
|
|
|
|
|
},
|
|
|
|
|
|
"split": strings.Split,
|
|
|
|
|
|
"trimPrefix": strings.TrimPrefix,
|
2025-06-20 14:40:04 +00:00
|
|
|
|
// nouveau helper pour convertir bytes -> kilobytes
|
|
|
|
|
|
"toKB": func(size int64) float64 {
|
|
|
|
|
|
return float64(size) / 1024
|
|
|
|
|
|
},
|
2025-06-20 13:41:37 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
templates = template.Must(
|
|
|
|
|
|
template.New("").
|
|
|
|
|
|
Funcs(funcMap).
|
|
|
|
|
|
ParseGlob("./templates/*.pages.tmpl"),
|
|
|
|
|
|
)
|
|
|
|
|
|
}
|
2025-06-06 07:42:55 +00:00
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
type Entry struct {
|
|
|
|
|
|
Name, Path string
|
|
|
|
|
|
IsDir bool
|
|
|
|
|
|
ModTime time.Time
|
|
|
|
|
|
Size int64
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// helper pour lister un dossier
|
|
|
|
|
|
func listEntries(base, rel string) ([]Entry, error) {
|
|
|
|
|
|
dir := filepath.Join(base, rel)
|
|
|
|
|
|
fis, err := os.ReadDir(dir)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
out := make([]Entry, 0, len(fis))
|
|
|
|
|
|
for _, fi := range fis {
|
|
|
|
|
|
info, _ := fi.Info()
|
|
|
|
|
|
out = append(out, Entry{
|
|
|
|
|
|
Name: fi.Name(),
|
|
|
|
|
|
Path: filepath.ToSlash(filepath.Join(rel, fi.Name())),
|
|
|
|
|
|
IsDir: fi.IsDir(),
|
|
|
|
|
|
ModTime: info.ModTime(),
|
|
|
|
|
|
Size: info.Size(),
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
return out, nil
|
|
|
|
|
|
}
|
2025-06-06 07:42:55 +00:00
|
|
|
|
func Login(w http.ResponseWriter, r *http.Request){
|
|
|
|
|
|
renderTemplate(w,"login",nil)
|
|
|
|
|
|
}
|
|
|
|
|
|
func Dashboard(db *gorm.DB)http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
var paths []models.PathDownload
|
|
|
|
|
|
if err := db.Find(&paths).Error; err != nil {
|
|
|
|
|
|
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"paths": paths,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
renderTemplate(w,"dashboard",data)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
func MenuLibrary(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
var currentPaths []models.PathDownload
|
|
|
|
|
|
if err := db.Find(¤tPaths).Error; err != nil {
|
|
|
|
|
|
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Récupérer l'ancienne version des paths (si existante)
|
|
|
|
|
|
lastUpdate := r.Header.Get("HX-Current-Paths")
|
|
|
|
|
|
var previousPaths []models.PathDownload
|
|
|
|
|
|
if lastUpdate != "" {
|
|
|
|
|
|
json.Unmarshal([]byte(lastUpdate), &previousPaths)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Convertir en JSON pour comparaison
|
|
|
|
|
|
currentJSON, _ := json.Marshal(currentPaths)
|
|
|
|
|
|
previousJSON, _ := json.Marshal(previousPaths)
|
|
|
|
|
|
|
|
|
|
|
|
// Vérifier si les paths ont changé
|
|
|
|
|
|
pathsChanged := string(currentJSON) != string(previousJSON)
|
|
|
|
|
|
|
|
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"paths": currentPaths,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Si HTMX request, ajouter les headers appropriés
|
|
|
|
|
|
if r.Header.Get("HX-Request") == "true" {
|
|
|
|
|
|
if pathsChanged {
|
|
|
|
|
|
w.Header().Set("HX-Trigger", "pathsUpdated")
|
|
|
|
|
|
}
|
|
|
|
|
|
w.Header().Set("HX-Current-Paths", string(currentJSON))
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
renderPartial(w, "dashboard", data)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
func Settings(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"Title": "Settings Page",
|
|
|
|
|
|
"Options": []string{"Option 1", "Option 2", "Option 3"},
|
|
|
|
|
|
}
|
|
|
|
|
|
renderPartial(w, "settings", data)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
func Library(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
renderPartial(w, "library",nil)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func GoDownload(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
renderPartial(w, "godownloader_download",nil)
|
|
|
|
|
|
}
|
|
|
|
|
|
func GoDownloadLinkCollectors(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
renderPartial(w, "godownloader_linkcollectors",nil)
|
|
|
|
|
|
}
|
2025-06-09 14:13:32 +00:00
|
|
|
|
func GetDebridClient(db *gorm.DB) *debridlink.Client {
|
|
|
|
|
|
return debridlink.NewClient(db)
|
2025-06-06 07:42:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-09 14:13:32 +00:00
|
|
|
|
func GoDownloadSettingDelete(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
ctx := context.Background()
|
|
|
|
|
|
DebridClient := GetDebridClient(db)
|
2025-06-06 07:42:55 +00:00
|
|
|
|
|
2025-06-09 14:13:32 +00:00
|
|
|
|
idStr := r.URL.Query().Get("id")
|
|
|
|
|
|
if idStr == "" {
|
|
|
|
|
|
http.Error(w, "ID manquant", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
idUint, err := strconv.ParseUint(idStr, 10, 64)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
http.Error(w, "ID invalide", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if err := DebridClient.DeleteDebridAccount(ctx, uint(idUint)); err != nil {
|
|
|
|
|
|
http.Error(w, "Erreur lors de la suppression", http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
http.Redirect(w, r, "/godownloader/settings", http.StatusSeeOther)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func GoDownloadSettingToggleActive(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
ctx := context.Background()
|
|
|
|
|
|
DebridClient := debridlink.NewClient(db)
|
|
|
|
|
|
|
|
|
|
|
|
idStr := r.URL.Query().Get("id")
|
|
|
|
|
|
idUint, err := strconv.ParseUint(idStr, 10, 32)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
http.Error(w, "ID invalide", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
err = DebridClient.ToggleActiveStatus(ctx, uint(idUint))
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Println("Erreur lors du toggle:", err)
|
|
|
|
|
|
http.Error(w, "Échec de mise à jour", http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Récupérer la liste mise à jour
|
|
|
|
|
|
accounts, err := DebridClient.ListDebridAccounts(ctx)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
http.Error(w, "Erreur lors du chargement des comptes", http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// HTMX ou page normale
|
|
|
|
|
|
if r.Header.Get("HX-Request") == "true" {
|
|
|
|
|
|
renderPartial(w, "partials/accounts_table", map[string]interface{}{
|
|
|
|
|
|
"accounts": accounts,
|
|
|
|
|
|
})
|
|
|
|
|
|
} else {
|
|
|
|
|
|
renderPartial(w, "godownloader_setting", map[string]interface{}{
|
|
|
|
|
|
"accounts": accounts,
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
func GoDownloadSetting(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
ctx := r.Context()
|
|
|
|
|
|
client := debridlink.NewClient(db)
|
|
|
|
|
|
|
|
|
|
|
|
switch r.Method {
|
|
|
|
|
|
case http.MethodPost:
|
|
|
|
|
|
if err := r.ParseForm(); err != nil {
|
|
|
|
|
|
http.Error(w, "Form invalide", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
username := r.FormValue("username")
|
|
|
|
|
|
password := r.FormValue("password")
|
|
|
|
|
|
|
|
|
|
|
|
deviceResp, err := client.RequestDeviceCodeWithCredentials(ctx, username, password)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Println("[OAuth2] Erreur device_code:", err)
|
|
|
|
|
|
http.Error(w, "Erreur OAuth: "+err.Error(), http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Affiche le code + URL dans #auth-status
|
|
|
|
|
|
renderPartial(w, "oauth_device_code", map[string]any{
|
|
|
|
|
|
"code": deviceResp.UserCode,
|
|
|
|
|
|
"url": deviceResp.VerificationURL,
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
// Polling async
|
|
|
|
|
|
go func() {
|
|
|
|
|
|
tokens, err := client.PollDeviceToken(context.Background(), deviceResp.DeviceCode, deviceResp.Interval)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Println("[OAuth2] Polling échoué:", err)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
account := &debridlink.DebridAccount{
|
|
|
|
|
|
Host: "debrid-link.com",
|
|
|
|
|
|
Username: username,
|
|
|
|
|
|
Password: password,
|
|
|
|
|
|
IsActive: true,
|
|
|
|
|
|
AccessToken: tokens.AccessToken,
|
|
|
|
|
|
RefreshToken: tokens.RefreshToken,
|
|
|
|
|
|
ExpiresAt: time.Now().Add(time.Duration(tokens.ExpiresIn) * time.Second),
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if err := db.Create(account).Error; err != nil {
|
|
|
|
|
|
log.Println("[DB] Sauvegarde échouée:", err)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
log.Println("[OAuth2] Compte sauvegardé")
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
|
|
case http.MethodGet:
|
|
|
|
|
|
accounts, _ := client.ListDebridAccounts(ctx)
|
|
|
|
|
|
renderPartial(w, "godownloader_setting", map[string]any{
|
|
|
|
|
|
"accounts": accounts,
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
func GoDownloadPartialTable(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
ctx := r.Context()
|
|
|
|
|
|
client := debridlink.NewClient(db)
|
|
|
|
|
|
accounts, _ := client.ListDebridAccounts(ctx)
|
|
|
|
|
|
renderPartial(w, "accounts_table", map[string]any{
|
|
|
|
|
|
"accounts": accounts,
|
|
|
|
|
|
})
|
|
|
|
|
|
}}
|
|
|
|
|
|
func PollStatusHandler(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
var count int64
|
|
|
|
|
|
db.Model(&debridlink.DebridAccount{}).Where("is_active = ?", true).Count(&count)
|
|
|
|
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
|
|
|
|
json.NewEncoder(w).Encode(map[string]bool{
|
|
|
|
|
|
"success": count > 0,
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-06-12 08:57:10 +00:00
|
|
|
|
func GoDownload2(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2025-06-15 15:21:11 +00:00
|
|
|
|
w.Header().Set("HX-Trigger", "forceUpdate")
|
|
|
|
|
|
|
|
|
|
|
|
log.Printf("GoDownload2")
|
|
|
|
|
|
jobs := download.ListJobs(db)
|
|
|
|
|
|
fmt.Printf("%+v\n", jobs)
|
|
|
|
|
|
|
2025-06-12 08:57:10 +00:00
|
|
|
|
var paths []models.PathDownload
|
|
|
|
|
|
db.Find(&paths)
|
|
|
|
|
|
|
|
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"jobs": jobs,
|
|
|
|
|
|
"paths": paths,
|
2025-06-15 15:21:11 +00:00
|
|
|
|
"Now": time.Now(), // 👈 ajoute la clé "Now"
|
|
|
|
|
|
|
2025-06-12 08:57:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
download.Broadcast()
|
2025-06-12 08:57:10 +00:00
|
|
|
|
renderTemplate(w, "godownloader_download", data)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
|
2025-06-12 08:57:10 +00:00
|
|
|
|
func HandleAddJob(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2025-06-12 15:31:12 +00:00
|
|
|
|
if err := r.ParseForm(); err != nil {
|
|
|
|
|
|
http.Error(w, "Requête invalide", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-12 08:57:10 +00:00
|
|
|
|
link := r.FormValue("link")
|
|
|
|
|
|
pathIDStr := r.FormValue("path_id")
|
|
|
|
|
|
|
|
|
|
|
|
parsedID, err := strconv.Atoi(pathIDStr)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
http.Error(w, "Chemin invalide", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
log.Println("[HTTP] Lien reçu :", link)
|
|
|
|
|
|
log.Println("[HTTP] ID de chemin :", parsedID)
|
2025-06-12 08:57:10 +00:00
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
// Authentification Debrid-Link
|
|
|
|
|
|
client := debridlink.NewClient(db)
|
|
|
|
|
|
account := download.GetFirstActiveAccount(client)
|
|
|
|
|
|
if account == nil {
|
|
|
|
|
|
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
client.SetAccount(account)
|
|
|
|
|
|
|
|
|
|
|
|
// Débride le lien
|
|
|
|
|
|
ctx := r.Context()
|
|
|
|
|
|
links, err := client.AddLink(ctx, link)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Printf("[ERROR] Echec lors de l'ajout du lien : %v\n", err)
|
|
|
|
|
|
http.Error(w, "Erreur côté Debrid-Link", http.StatusInternalServerError)
|
|
|
|
|
|
return
|
2025-06-12 08:57:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
// Enregistre chaque lien comme un job "en attente"
|
|
|
|
|
|
for _, l := range links {
|
2025-06-19 17:57:40 +00:00
|
|
|
|
log.Printf("[l'id] : %v\n", l.ID)
|
|
|
|
|
|
|
2025-06-19 19:21:17 +00:00
|
|
|
|
streamInfo, err := client.CreateTranscode(ctx,l.ID)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Println("Erreur GetTranscode:", err)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2025-06-19 17:40:08 +00:00
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
job := &download.DownloadJob{
|
|
|
|
|
|
ID: l.ID,
|
|
|
|
|
|
Link: l.DownloadURL,
|
|
|
|
|
|
Name: l.Name,
|
|
|
|
|
|
Status: "waiting",
|
2025-06-15 15:21:11 +00:00
|
|
|
|
PathID: parsedID,
|
2025-06-12 15:31:12 +00:00
|
|
|
|
Size: l.Size,
|
|
|
|
|
|
Host: l.Host,
|
2025-06-13 15:12:32 +00:00
|
|
|
|
Progress: 0, // obligatoire si valeur attendue
|
2025-06-19 19:21:17 +00:00
|
|
|
|
StreamURL: streamInfo.StreamURL, // vide par défaut
|
2025-06-12 15:31:12 +00:00
|
|
|
|
}
|
2025-06-13 15:12:32 +00:00
|
|
|
|
if err := download.RegisterJobWithDB(job, db); err != nil {
|
|
|
|
|
|
log.Printf("[ERROR] Job non enregistré : %v\n", err)
|
|
|
|
|
|
}
|
2025-06-12 15:31:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Met à jour la vue partielle (tableau des jobs)
|
|
|
|
|
|
data := map[string]interface{}{
|
2025-06-15 15:21:11 +00:00
|
|
|
|
"jobs": download.ListJobs(db),
|
2025-06-12 15:31:12 +00:00
|
|
|
|
}
|
2025-06-15 15:21:11 +00:00
|
|
|
|
fmt.Printf("%+v\n", data)
|
|
|
|
|
|
download.Broadcast() // ← on notifie tout de suite les clients SSE
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-06-12 08:57:10 +00:00
|
|
|
|
renderPartial(w, "downloads_table", data)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-06-15 15:21:11 +00:00
|
|
|
|
// HandleJobsStream ouvre le flux SSE et envoie un event "jobs" à chaque changement
|
|
|
|
|
|
// HandleJobsStream ouvre un flux SSE qui n’envoie qu’un signal "jobs"
|
|
|
|
|
|
func HandleJobsStream(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
log.Println("[DEBUG] Nouvelle connexion au flux SSE")
|
|
|
|
|
|
|
|
|
|
|
|
flusher, ok := w.(http.Flusher)
|
|
|
|
|
|
if !ok {
|
|
|
|
|
|
log.Println("[ERROR] Flusher non supporté")
|
|
|
|
|
|
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Entêtes SSE
|
|
|
|
|
|
w.Header().Set("Content-Type", "text/event-stream")
|
|
|
|
|
|
w.Header().Set("Cache-Control", "no-cache")
|
|
|
|
|
|
w.Header().Set("Connection", "keep-alive")
|
|
|
|
|
|
w.Header().Set("Access-Control-Allow-Origin", "*") // utile en dev ou si besoin CORS
|
|
|
|
|
|
|
|
|
|
|
|
ch := download.Subscribe()
|
|
|
|
|
|
log.Println("[DEBUG] Abonné au canal de téléchargement")
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
download.Unsubscribe(ch)
|
|
|
|
|
|
log.Println("[DEBUG] Désabonnement du canal de téléchargement")
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
|
|
// Envoi d’un message initial bien formé (JSON valide)
|
|
|
|
|
|
log.Println("[DEBUG] Envoi du signal initial")
|
|
|
|
|
|
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
|
|
|
|
|
|
flusher.Flush()
|
|
|
|
|
|
|
|
|
|
|
|
// Boucle de stream
|
|
|
|
|
|
for {
|
|
|
|
|
|
select {
|
|
|
|
|
|
case <-r.Context().Done():
|
|
|
|
|
|
log.Println("[DEBUG] Fermeture de la connexion SSE (client disconnect)")
|
|
|
|
|
|
return
|
|
|
|
|
|
case <-ch:
|
|
|
|
|
|
log.Println("[DEBUG] Événement reçu sur le canal — envoi SSE")
|
|
|
|
|
|
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
|
|
|
|
|
|
flusher.Flush()
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// sendSSEUpdate génère le HTML du partial et l’envoie comme event "jobs"
|
|
|
|
|
|
func sendSSEUpdate(w http.ResponseWriter, flusher http.Flusher, db *gorm.DB) error {
|
|
|
|
|
|
// 1) Création du pipe
|
|
|
|
|
|
pr, pw := io.Pipe()
|
|
|
|
|
|
|
|
|
|
|
|
// 2) Dans une goroutine, parse+execute du partial dans pw
|
|
|
|
|
|
go func() {
|
|
|
|
|
|
defer pw.Close()
|
|
|
|
|
|
|
|
|
|
|
|
// Charge le fichier de template du partial
|
|
|
|
|
|
tmpl, err := template.ParseFiles("./templates/downloads_table.pages.tmpl")
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Printf("ParseFiles error: %v", err)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Prépare les données réelles
|
|
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"jobs": download.ListJobs(db),
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Exécute *uniquement* le define "downloads_table"
|
|
|
|
|
|
if err := tmpl.ExecuteTemplate(pw, "downloads_table", data); err != nil {
|
|
|
|
|
|
log.Printf("ExecuteTemplate error: %v", err)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
|
|
// 3) Lecture complète du HTML
|
|
|
|
|
|
htmlBytes, err := io.ReadAll(pr)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return fmt.Errorf("lecture rendu échouée: %w", err)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 4) Construction du message SSE
|
|
|
|
|
|
// - event: jobs
|
|
|
|
|
|
// - chaque ligne de HTML préfixée data:
|
|
|
|
|
|
fmt.Fprintf(w, "event: jobs\n")
|
|
|
|
|
|
for _, line := range strings.Split(string(htmlBytes), "\n") {
|
|
|
|
|
|
fmt.Fprintf(w, "data: %s\n", line)
|
|
|
|
|
|
}
|
|
|
|
|
|
// ligne vide pour terminer l'event
|
|
|
|
|
|
fmt.Fprintf(w, "\n")
|
|
|
|
|
|
|
|
|
|
|
|
// 5) Flush pour envoyer au client immédiatement
|
|
|
|
|
|
flusher.Flush()
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
2025-06-12 15:31:12 +00:00
|
|
|
|
|
|
|
|
|
|
|
2025-06-12 08:57:10 +00:00
|
|
|
|
func HandleListJobsPartial(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2025-06-15 15:21:11 +00:00
|
|
|
|
jobs := download.ListJobs(db)
|
2025-06-12 08:57:10 +00:00
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"jobs": jobs,
|
|
|
|
|
|
}
|
2025-06-15 15:21:11 +00:00
|
|
|
|
//download.Broadcast()
|
2025-06-12 08:57:10 +00:00
|
|
|
|
renderPartial(w, "downloads_table", data)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-06-12 15:31:12 +00:00
|
|
|
|
var (
|
|
|
|
|
|
jobs = make(map[string]*download.DownloadJob)
|
|
|
|
|
|
jobsMu sync.Mutex
|
|
|
|
|
|
)
|
|
|
|
|
|
func HandleStartJob(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
id := mux.Vars(r)["id"]
|
2025-06-13 15:12:32 +00:00
|
|
|
|
log.Printf("[id] job id= "+id)
|
2025-06-12 15:31:12 +00:00
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 1. Récupérer depuis la map
|
2025-06-12 15:31:12 +00:00
|
|
|
|
jobsMu.Lock()
|
|
|
|
|
|
job, exists := jobs[id]
|
|
|
|
|
|
jobsMu.Unlock()
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 2. Sinon fallback base de données
|
2025-06-12 15:31:12 +00:00
|
|
|
|
if !exists {
|
2025-06-13 15:12:32 +00:00
|
|
|
|
var j download.DownloadJob
|
|
|
|
|
|
if err := db.First(&j, "id = ?", id).Error; err != nil {
|
|
|
|
|
|
http.Error(w, "Job introuvable", http.StatusNotFound)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// important : on copie vers un pointeur pour que la map pointe bien dessus
|
|
|
|
|
|
jobCopy := j
|
|
|
|
|
|
jobsMu.Lock()
|
|
|
|
|
|
jobs[id] = &jobCopy
|
|
|
|
|
|
job = &jobCopy
|
|
|
|
|
|
jobsMu.Unlock()
|
2025-06-12 15:31:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 3. Setup client Debrid-Link
|
2025-06-12 15:31:12 +00:00
|
|
|
|
client := debridlink.NewClient(db)
|
|
|
|
|
|
account := download.GetFirstActiveAccount(client)
|
|
|
|
|
|
if account == nil {
|
|
|
|
|
|
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
client.SetAccount(account)
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 4. Lancer le téléchargement réel
|
|
|
|
|
|
go download.StartDownload(job,job.Link,client, db)
|
2025-06-15 15:21:11 +00:00
|
|
|
|
download.Broadcast() // ← on notifie tout de suite les clients SSE
|
2025-06-12 15:31:12 +00:00
|
|
|
|
|
|
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-06-13 15:12:32 +00:00
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
func HandlePauseJob(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
id := mux.Vars(r)["id"]
|
|
|
|
|
|
download.UpdateJobStatus(id, "paused", nil)
|
2025-06-15 15:21:11 +00:00
|
|
|
|
download.Broadcast() // ← on notifie tout de suite les clients SSE
|
|
|
|
|
|
|
2025-06-12 15:31:12 +00:00
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
|
|
|
|
|
}
|
|
|
|
|
|
func HandleResumeJob(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
id := mux.Vars(r)["id"]
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 1. Chercher en mémoire
|
2025-06-12 15:31:12 +00:00
|
|
|
|
jobsMu.Lock()
|
|
|
|
|
|
job, exists := jobs[id]
|
|
|
|
|
|
jobsMu.Unlock()
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 2. Si absent, fallback DB
|
2025-06-12 15:31:12 +00:00
|
|
|
|
if !exists {
|
2025-06-13 15:12:32 +00:00
|
|
|
|
var j download.DownloadJob
|
|
|
|
|
|
if err := db.First(&j, "id = ?", id).Error; err != nil {
|
|
|
|
|
|
http.Error(w, "Job introuvable", http.StatusNotFound)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
jobCopy := j
|
|
|
|
|
|
jobsMu.Lock()
|
|
|
|
|
|
jobs[id] = &jobCopy
|
|
|
|
|
|
job = &jobCopy
|
|
|
|
|
|
jobsMu.Unlock()
|
2025-06-12 15:31:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 3. Initialiser le client Debrid-Link
|
2025-06-12 15:31:12 +00:00
|
|
|
|
client := debridlink.NewClient(db)
|
|
|
|
|
|
account := download.GetFirstActiveAccount(client)
|
|
|
|
|
|
if account == nil {
|
|
|
|
|
|
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
client.SetAccount(account)
|
|
|
|
|
|
|
2025-06-13 15:12:32 +00:00
|
|
|
|
// 4. Redémarrer le téléchargement
|
|
|
|
|
|
go download.StartDownload(job,job.Link,client,db)
|
2025-06-12 15:31:12 +00:00
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-06-13 15:12:32 +00:00
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
func HandleDeleteJob(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2025-06-12 15:31:12 +00:00
|
|
|
|
id := mux.Vars(r)["id"]
|
2025-06-15 15:21:11 +00:00
|
|
|
|
download.DeleteJob(id,db)
|
|
|
|
|
|
go download.Broadcast() // ← on notifie tout de suite les clients SSE
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
|
|
|
|
|
}}
|
|
|
|
|
|
func HandleDeleteMultipleJobs(db *gorm.DB) http.HandlerFunc {
|
|
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
err := r.ParseForm()
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
http.Error(w, "Impossible de lire les IDs", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
ids := r.Form["ids[]"]
|
|
|
|
|
|
if len(ids) == 0 {
|
|
|
|
|
|
http.Error(w, "Aucun ID reçu", http.StatusBadRequest)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
for _, id := range ids {
|
|
|
|
|
|
download.DeleteJob(id, db)
|
|
|
|
|
|
}
|
|
|
|
|
|
download.Broadcast() // ← on notifie tout de suite les clients SSE
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
2025-06-15 15:21:11 +00:00
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
func StreamHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
|
base := "/app/upload"
|
|
|
|
|
|
cur := r.URL.Query().Get("path") // ex: "", "Icons", "Code/Sub"
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
// 1) sidebar : on ne gère que le niveau racine
|
|
|
|
|
|
root, _ := listEntries(base, "")
|
|
|
|
|
|
var dirs []Entry
|
|
|
|
|
|
for _, e := range root {
|
|
|
|
|
|
if e.IsDir {
|
|
|
|
|
|
dirs = append(dirs, e)
|
|
|
|
|
|
}
|
2025-06-06 07:42:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
// 2) contenu courant
|
|
|
|
|
|
entries, _ := listEntries(base, cur)
|
|
|
|
|
|
|
|
|
|
|
|
data := map[string]interface{}{
|
|
|
|
|
|
"Dirs": dirs,
|
|
|
|
|
|
"Entries": entries,
|
|
|
|
|
|
"CurrentPath": cur,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Si c’est un appel HTMX (liste partielle) : on renvoie juste la grille
|
|
|
|
|
|
if r.Header.Get("HX-Request") == "true" {
|
|
|
|
|
|
renderPartial(w, "_file_list", data)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
// Sinon on rend la page complète
|
|
|
|
|
|
renderTemplate(w, "folders", data)
|
|
|
|
|
|
}
|
|
|
|
|
|
func DetailHandler(w http.ResponseWriter, r *http.Request) {
|
2025-06-20 16:27:50 +00:00
|
|
|
|
base := "/app/upload"
|
2025-06-20 16:20:39 +00:00
|
|
|
|
rel := r.URL.Query().Get("path")
|
2025-06-20 16:12:28 +00:00
|
|
|
|
|
2025-06-20 16:23:10 +00:00
|
|
|
|
log.Printf("Reçu path: %q", rel)
|
|
|
|
|
|
|
2025-06-20 16:20:39 +00:00
|
|
|
|
// On sécurise : supprime les éventuels chemins relatifs
|
|
|
|
|
|
rel = filepath.Clean("/" + rel) // ça supprime .. etc.
|
|
|
|
|
|
rel = strings.TrimPrefix(rel, "/")
|
2025-06-20 16:12:28 +00:00
|
|
|
|
|
2025-06-20 16:23:10 +00:00
|
|
|
|
log.Printf("Path nettoyé (rel): %q", rel)
|
|
|
|
|
|
|
2025-06-20 16:20:39 +00:00
|
|
|
|
absPath := filepath.Join(base, rel)
|
2025-06-20 16:23:10 +00:00
|
|
|
|
log.Printf("Chemin absolu construit (absPath): %q", absPath)
|
2025-06-20 16:20:39 +00:00
|
|
|
|
|
|
|
|
|
|
info, err := os.Stat(absPath)
|
|
|
|
|
|
if err != nil {
|
2025-06-20 16:23:10 +00:00
|
|
|
|
log.Printf("Erreur os.Stat: %v", err)
|
2025-06-20 16:20:39 +00:00
|
|
|
|
http.NotFound(w, r)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Protection : vérifier qu'on reste bien dans base
|
|
|
|
|
|
if !strings.HasPrefix(absPath, base) {
|
2025-06-20 16:23:10 +00:00
|
|
|
|
log.Printf("Sécurité: chemin hors du base: %q", absPath)
|
2025-06-20 16:20:39 +00:00
|
|
|
|
http.NotFound(w, r)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2025-06-20 16:12:28 +00:00
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
entry := Entry{
|
|
|
|
|
|
Name: info.Name(),
|
|
|
|
|
|
Path: rel,
|
|
|
|
|
|
IsDir: info.IsDir(),
|
|
|
|
|
|
ModTime: info.ModTime(),
|
|
|
|
|
|
Size: info.Size(),
|
2025-06-06 07:42:55 +00:00
|
|
|
|
}
|
2025-06-20 16:20:39 +00:00
|
|
|
|
|
2025-06-20 16:23:10 +00:00
|
|
|
|
log.Printf("Entrée trouvée: %+v", entry)
|
|
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
renderPartial(w, "_file_detail", map[string]interface{}{
|
|
|
|
|
|
"Entry": entry,
|
|
|
|
|
|
})
|
2025-06-06 07:42:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
2025-06-20 16:20:39 +00:00
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
func renderPartial(w http.ResponseWriter, templ string, data map[string]interface{}) {
|
|
|
|
|
|
// Exécute directement le define `<templ>.pages.tmpl`
|
|
|
|
|
|
if err := templates.ExecuteTemplate(w, templ+".pages.tmpl", data); err != nil {
|
2025-06-06 07:42:55 +00:00
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
|
|
}
|
2025-06-20 13:41:37 +00:00
|
|
|
|
}
|
2025-06-06 07:42:55 +00:00
|
|
|
|
|
2025-06-20 13:41:37 +00:00
|
|
|
|
func renderTemplate(w http.ResponseWriter, templ string, data map[string]interface{}) {
|
|
|
|
|
|
// Pareil, on exécute le principal
|
|
|
|
|
|
if err := templates.ExecuteTemplate(w, templ+".pages.tmpl", data); err != nil {
|
2025-06-06 07:42:55 +00:00
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-09 14:13:32 +00:00
|
|
|
|
|