This commit is contained in:
cangui 2025-06-20 20:25:31 +02:00
parent 037d44ed0e
commit b31965c0cf

View File

@ -12,6 +12,7 @@ import (
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
"regexp"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@ -709,28 +710,31 @@ var seriesRegex = regexp.MustCompile(`^(.+?)\.S\d{2}E\d{2}`)
// HandleAddJobsMultiple gère le débridage de plusieurs liens, auto-création de sous-dossier, et enregistrement // HandleAddJobsMultiple gère le débridage de plusieurs liens, auto-création de sous-dossier, et enregistrement
func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc { func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
// 1) Parse form data
if err := r.ParseForm(); err != nil { if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest) http.Error(w, "Requête invalide", http.StatusBadRequest)
return return
} }
// Récupération des liens link := r.FormValue("link")
raw := r.FormValue("links") idStr := r.FormValue("path_id")
lines := strings.Split(raw, "\n") parsedID, err := strconv.Atoi(idStr)
// Chemin de base
pathIDStr := r.FormValue("path_id")
id, err := strconv.Atoi(pathIDStr)
if err != nil { if err != nil {
http.Error(w, "ID de chemin invalide", http.StatusBadRequest) http.Error(w, "Chemin invalide", http.StatusBadRequest)
return
}
var basePath models.PathDownload
if err := db.First(&basePath, id).Error; err != nil {
http.Error(w, "Chemin introuvable", http.StatusBadRequest)
return return
} }
log.Printf("[HTTP] Lien reçu : %s", link)
log.Printf("[HTTP] ID de chemin : %d", parsedID)
// 2) Fetch the base PathDownload
var basePath models.PathDownload
if err := db.First(&basePath, parsedID).Error; err != nil {
http.Error(w, "Dossier principal introuvable", http.StatusBadRequest)
return
}
ctx := r.Context()
client := debridlink.NewClient(db) client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client) account := download.GetFirstActiveAccount(client)
if account == nil { if account == nil {
@ -739,87 +743,61 @@ func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc {
} }
client.SetAccount(account) client.SetAccount(account)
// Itérer sur chaque lien à débrider // 3) Debrid and retrieve real links
for _, link := range lines { links, err := client.AddLink(ctx, link)
link = strings.TrimSpace(link)
if link == "" {
continue
}
// Débride le lien
links, err := client.AddLink(context.Background(), link)
if err != nil { if err != nil {
log.Printf("Échec débridage pour %s: %v", link, err) log.Printf("[ERROR] Echec lors de l'ajout du lien : %v", err)
continue http.Error(w, "Erreur côté Debrid-Link", http.StatusInternalServerError)
return
} }
// 4) For each returned link, create subfolder, register job, start download
for _, l := range links { for _, l := range links {
// Détermination automatique du sous-dossier via regex // Derive subfolder name from filename (e.g.: SeriesName...)
name := sanitizeFileName(l.Name) fileName := filepath.Base(l.Name)
series := "" re := regexp.MustCompile(`^(?P<series>[A-Za-z0-9\.]+)`)
if m := seriesRegex.FindStringSubmatch(name); len(m) == 2 { matches := re.FindStringSubmatch(fileName)
series = m[1] series := fileName
if len(matches) > 1 {
series = matches[1]
} }
// FinalDir selon détection ou création manuelle // Create filesystem subfolder
finalDir := basePath.Path dirPath := filepath.Join(basePath.Path, series)
var assignPathID uint = basePath.ID
if series != "" {
// créer sous-dossier pour cette série
dirName := sanitizeFileName(series)
dirPath := filepath.Join(basePath.Path, dirName)
if err := os.MkdirAll(dirPath, os.ModePerm); err != nil { if err := os.MkdirAll(dirPath, os.ModePerm); err != nil {
log.Printf("Erreur création dossier série %s: %v", dirName, err) log.Printf("[ERROR] Création du dossier %s échouée : %v", dirPath, err)
} else {
// vérifier/ajouter en base PathDownload
var pathRec models.PathDownload
if err := db.Where("path = ?", dirPath).First(&pathRec).Error; err != nil {
if err == gorm.ErrRecordNotFound {
pathRec = models.PathDownload{Name: series, Path: dirPath}
if err := db.Create(&pathRec).Error; err != nil {
log.Printf("Erreur création PathDownload en base: %v", err)
}
} else {
log.Printf("Erreur lecture PathDownload: %v", err)
}
}
assignPathID = pathRec.ID
finalDir = dirPath
}
} }
// Création transcode si besoin // Default to main folder's ID
streamInfo, err := client.CreateTranscode(context.Background(), l.ID) assignPathID := int(basePath.ID)
if err != nil {
log.Println("Erreur transcode:", err) // Optionally create a new PathDownload record for subfolder
// (Replace 'Title' with your model's actual field for folder name)
sub := models.PathDownload{
PathName: series,
Path: dirPath,
}
if err := db.Create(&sub).Error; err == nil {
assignPathID = int(sub.ID)
} }
// Enregistrement du job // Register and start job
job := &download.DownloadJob{ job := &download.DownloadJob{
ID: l.ID, ID: fmt.Sprintf("job-%d", time.Now().UnixNano()),
Link: l.DownloadURL, Link: l.DownloadURL,
Name: l.Name, Name: sanitizeFileName(fileName),
Status: "waiting", Status: "pending",
PathID: assignPathID, PathID: assignPathID,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: streamInfo.StreamURL,
} }
if err := download.RegisterJobWithDB(job, db); err != nil { if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("Erreur enregistrement job: %v", err) log.Printf("[ERROR] Enregistrement du job échoué : %v", err)
} continue
} }
go download.StartDownload(job, job.Link, client, db)
download.Broadcast()
} }
// Mise à jour vue w.WriteHeader(http.StatusNoContent)
data := map[string]interface{}{
"jobs": download.ListJobs(db),
"paths": getAllPaths(db),
}
download.Broadcast()
renderPartial(w, "downloads_table", data)
} }
} }