This commit is contained in:
cangui 2025-06-20 20:25:31 +02:00
parent 037d44ed0e
commit b31965c0cf

View File

@ -12,6 +12,7 @@ import (
"net/http"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
@ -708,119 +709,96 @@ var seriesRegex = regexp.MustCompile(`^(.+?)\.S\d{2}E\d{2}`)
// HandleAddJobsMultiple gère le débridage de plusieurs liens, auto-création de sous-dossier, et enregistrement
func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
// Récupération des liens
raw := r.FormValue("links")
lines := strings.Split(raw, "\n")
// Chemin de base
pathIDStr := r.FormValue("path_id")
id, err := strconv.Atoi(pathIDStr)
if err != nil {
http.Error(w, "ID de chemin invalide", http.StatusBadRequest)
return
}
var basePath models.PathDownload
if err := db.First(&basePath, id).Error; err != nil {
http.Error(w, "Chemin introuvable", http.StatusBadRequest)
return
}
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// Itérer sur chaque lien à débrider
for _, link := range lines {
link = strings.TrimSpace(link)
if link == "" {
continue
}
// Débride le lien
links, err := client.AddLink(context.Background(), link)
if err != nil {
log.Printf("Échec débridage pour %s: %v", link, err)
continue
}
for _, l := range links {
// Détermination automatique du sous-dossier via regex
name := sanitizeFileName(l.Name)
series := ""
if m := seriesRegex.FindStringSubmatch(name); len(m) == 2 {
series = m[1]
return func(w http.ResponseWriter, r *http.Request) {
// 1) Parse form data
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
// FinalDir selon détection ou création manuelle
finalDir := basePath.Path
var assignPathID uint = basePath.ID
if series != "" {
// créer sous-dossier pour cette série
dirName := sanitizeFileName(series)
dirPath := filepath.Join(basePath.Path, dirName)
if err := os.MkdirAll(dirPath, os.ModePerm); err != nil {
log.Printf("Erreur création dossier série %s: %v", dirName, err)
} else {
// vérifier/ajouter en base PathDownload
var pathRec models.PathDownload
if err := db.Where("path = ?", dirPath).First(&pathRec).Error; err != nil {
if err == gorm.ErrRecordNotFound {
pathRec = models.PathDownload{Name: series, Path: dirPath}
if err := db.Create(&pathRec).Error; err != nil {
log.Printf("Erreur création PathDownload en base: %v", err)
}
} else {
log.Printf("Erreur lecture PathDownload: %v", err)
}
}
assignPathID = pathRec.ID
finalDir = dirPath
}
}
// Création transcode si besoin
streamInfo, err := client.CreateTranscode(context.Background(), l.ID)
link := r.FormValue("link")
idStr := r.FormValue("path_id")
parsedID, err := strconv.Atoi(idStr)
if err != nil {
log.Println("Erreur transcode:", err)
http.Error(w, "Chemin invalide", http.StatusBadRequest)
return
}
// Enregistrement du job
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: assignPathID,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: streamInfo.StreamURL,
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("Erreur enregistrement job: %v", err)
}
}
}
log.Printf("[HTTP] Lien reçu : %s", link)
log.Printf("[HTTP] ID de chemin : %d", parsedID)
// Mise à jour vue
data := map[string]interface{}{
"jobs": download.ListJobs(db),
"paths": getAllPaths(db),
// 2) Fetch the base PathDownload
var basePath models.PathDownload
if err := db.First(&basePath, parsedID).Error; err != nil {
http.Error(w, "Dossier principal introuvable", http.StatusBadRequest)
return
}
ctx := r.Context()
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 3) Debrid and retrieve real links
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("[ERROR] Echec lors de l'ajout du lien : %v", err)
http.Error(w, "Erreur côté Debrid-Link", http.StatusInternalServerError)
return
}
// 4) For each returned link, create subfolder, register job, start download
for _, l := range links {
// Derive subfolder name from filename (e.g.: SeriesName...)
fileName := filepath.Base(l.Name)
re := regexp.MustCompile(`^(?P<series>[A-Za-z0-9\.]+)`)
matches := re.FindStringSubmatch(fileName)
series := fileName
if len(matches) > 1 {
series = matches[1]
}
// Create filesystem subfolder
dirPath := filepath.Join(basePath.Path, series)
if err := os.MkdirAll(dirPath, os.ModePerm); err != nil {
log.Printf("[ERROR] Création du dossier %s échouée : %v", dirPath, err)
}
// Default to main folder's ID
assignPathID := int(basePath.ID)
// Optionally create a new PathDownload record for subfolder
// (Replace 'Title' with your model's actual field for folder name)
sub := models.PathDownload{
PathName: series,
Path: dirPath,
}
if err := db.Create(&sub).Error; err == nil {
assignPathID = int(sub.ID)
}
// Register and start job
job := &download.DownloadJob{
ID: fmt.Sprintf("job-%d", time.Now().UnixNano()),
Link: l.DownloadURL,
Name: sanitizeFileName(fileName),
Status: "pending",
PathID: assignPathID,
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("[ERROR] Enregistrement du job échoué : %v", err)
continue
}
go download.StartDownload(job, job.Link, client, db)
download.Broadcast()
}
w.WriteHeader(http.StatusNoContent)
}
download.Broadcast()
renderPartial(w, "downloads_table", data)
}
}
// sanitizeFileName utilise la même logique que download.SanitizeFileName