shelfy/renders/renders.go

1516 lines
47 KiB
Go
Raw Permalink Normal View History

2025-06-06 07:42:55 +00:00
package renders
import (
2025-06-09 14:13:32 +00:00
"app/shelfly/internal/debridlink"
2025-06-12 08:57:10 +00:00
"app/shelfly/internal/download"
2025-06-06 07:42:55 +00:00
"app/shelfly/internal/models"
2025-06-22 17:16:09 +00:00
"bytes"
2025-06-09 14:13:32 +00:00
"context"
2025-06-06 07:42:55 +00:00
"encoding/json"
2025-06-15 15:21:11 +00:00
"fmt"
"io"
2025-06-09 14:13:32 +00:00
"log"
2025-06-06 07:42:55 +00:00
"net/http"
2025-06-21 16:44:12 +00:00
"net/url"
2025-06-20 14:26:56 +00:00
"os"
2025-06-21 16:17:16 +00:00
"os/exec"
2025-06-20 14:26:56 +00:00
"path/filepath"
2025-06-20 18:25:31 +00:00
"regexp"
2025-06-09 14:13:32 +00:00
"strconv"
2025-06-15 15:21:11 +00:00
"strings"
2025-06-12 15:31:12 +00:00
"sync"
2025-06-06 07:42:55 +00:00
"text/template"
2025-06-09 14:13:32 +00:00
"time"
2025-06-06 07:42:55 +00:00
2025-06-12 15:31:12 +00:00
"github.com/gorilla/mux"
2025-06-06 07:42:55 +00:00
"gorm.io/gorm"
)
2025-06-20 13:41:37 +00:00
var (
// templates contiendra TOUTES vos pages .pages.tmpl
templates *template.Template
)
func init() {
funcMap := template.FuncMap{
"hasSuffix": strings.HasSuffix,
"ext": func(name string) string {
return strings.TrimPrefix(filepath.Ext(name), ".")
},
2025-06-21 16:44:12 +00:00
"urlquery": func(s string) string { return url.QueryEscape(s) },
2025-06-20 13:41:37 +00:00
"split": strings.Split,
"trimPrefix": strings.TrimPrefix,
2025-06-20 14:40:04 +00:00
// nouveau helper pour convertir bytes -> kilobytes
"toKB": func(size int64) float64 {
return float64(size) / 1024
},
2025-06-20 13:41:37 +00:00
}
templates = template.Must(
template.New("").
Funcs(funcMap).
ParseGlob("./templates/*.pages.tmpl"),
)
}
2025-06-06 07:42:55 +00:00
2025-06-20 13:41:37 +00:00
type Entry struct {
Name, Path string
IsDir bool
ModTime time.Time
Size int64
}
// helper pour lister un dossier
func listEntries(base, rel string) ([]Entry, error) {
dir := filepath.Join(base, rel)
fis, err := os.ReadDir(dir)
if err != nil {
return nil, err
}
out := make([]Entry, 0, len(fis))
for _, fi := range fis {
info, _ := fi.Info()
out = append(out, Entry{
Name: fi.Name(),
Path: filepath.ToSlash(filepath.Join(rel, fi.Name())),
IsDir: fi.IsDir(),
ModTime: info.ModTime(),
Size: info.Size(),
})
}
return out, nil
}
2025-06-06 07:42:55 +00:00
func Login(w http.ResponseWriter, r *http.Request){
renderTemplate(w,"login",nil)
}
func Dashboard(db *gorm.DB)http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
2025-06-21 15:54:44 +00:00
root := "/app/upload"
// on sélectionne tout ce qui est sous /app/upload/, mais pas plus loin quun seul slash en plus
if err := db.
Where("path LIKE ? AND path NOT LIKE ?", root+"/%", root+"/%/%").
Find(&paths).Error; err != nil {
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
return
}
2025-06-06 07:42:55 +00:00
data := map[string]interface{}{
"paths": paths,
}
renderTemplate(w,"dashboard",data)
}
}
func MenuLibrary(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var currentPaths []models.PathDownload
if err := db.Find(&currentPaths).Error; err != nil {
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
return
}
// Récupérer l'ancienne version des paths (si existante)
lastUpdate := r.Header.Get("HX-Current-Paths")
var previousPaths []models.PathDownload
if lastUpdate != "" {
json.Unmarshal([]byte(lastUpdate), &previousPaths)
}
// Convertir en JSON pour comparaison
currentJSON, _ := json.Marshal(currentPaths)
previousJSON, _ := json.Marshal(previousPaths)
// Vérifier si les paths ont changé
pathsChanged := string(currentJSON) != string(previousJSON)
data := map[string]interface{}{
"paths": currentPaths,
}
// Si HTMX request, ajouter les headers appropriés
if r.Header.Get("HX-Request") == "true" {
if pathsChanged {
w.Header().Set("HX-Trigger", "pathsUpdated")
}
w.Header().Set("HX-Current-Paths", string(currentJSON))
}
renderPartial(w, "dashboard", data)
}
}
func Settings(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{
"Title": "Settings Page",
"Options": []string{"Option 1", "Option 2", "Option 3"},
}
renderPartial(w, "settings", data)
}
func Library(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "library",nil)
}
func GoDownload(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "godownloader_download",nil)
}
func GoDownloadLinkCollectors(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "godownloader_linkcollectors",nil)
}
2025-06-09 14:13:32 +00:00
func GetDebridClient(db *gorm.DB) *debridlink.Client {
return debridlink.NewClient(db)
2025-06-06 07:42:55 +00:00
}
2025-06-09 14:13:32 +00:00
func GoDownloadSettingDelete(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
DebridClient := GetDebridClient(db)
2025-06-06 07:42:55 +00:00
2025-06-09 14:13:32 +00:00
idStr := r.URL.Query().Get("id")
if idStr == "" {
http.Error(w, "ID manquant", http.StatusBadRequest)
return
}
idUint, err := strconv.ParseUint(idStr, 10, 64)
if err != nil {
http.Error(w, "ID invalide", http.StatusBadRequest)
return
}
if err := DebridClient.DeleteDebridAccount(ctx, uint(idUint)); err != nil {
http.Error(w, "Erreur lors de la suppression", http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/godownloader/settings", http.StatusSeeOther)
}
}
func GoDownloadSettingToggleActive(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
DebridClient := debridlink.NewClient(db)
idStr := r.URL.Query().Get("id")
idUint, err := strconv.ParseUint(idStr, 10, 32)
if err != nil {
http.Error(w, "ID invalide", http.StatusBadRequest)
return
}
err = DebridClient.ToggleActiveStatus(ctx, uint(idUint))
if err != nil {
log.Println("Erreur lors du toggle:", err)
http.Error(w, "Échec de mise à jour", http.StatusInternalServerError)
return
}
// Récupérer la liste mise à jour
accounts, err := DebridClient.ListDebridAccounts(ctx)
if err != nil {
http.Error(w, "Erreur lors du chargement des comptes", http.StatusInternalServerError)
return
}
// HTMX ou page normale
if r.Header.Get("HX-Request") == "true" {
renderPartial(w, "partials/accounts_table", map[string]interface{}{
"accounts": accounts,
})
} else {
renderPartial(w, "godownloader_setting", map[string]interface{}{
"accounts": accounts,
})
}
}
}
func GoDownloadSetting(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
switch r.Method {
case http.MethodPost:
if err := r.ParseForm(); err != nil {
http.Error(w, "Form invalide", http.StatusBadRequest)
return
}
username := r.FormValue("username")
password := r.FormValue("password")
deviceResp, err := client.RequestDeviceCodeWithCredentials(ctx, username, password)
if err != nil {
log.Println("[OAuth2] Erreur device_code:", err)
http.Error(w, "Erreur OAuth: "+err.Error(), http.StatusInternalServerError)
return
}
// Affiche le code + URL dans #auth-status
renderPartial(w, "oauth_device_code", map[string]any{
"code": deviceResp.UserCode,
"url": deviceResp.VerificationURL,
})
// Polling async
go func() {
tokens, err := client.PollDeviceToken(context.Background(), deviceResp.DeviceCode, deviceResp.Interval)
if err != nil {
log.Println("[OAuth2] Polling échoué:", err)
return
}
account := &debridlink.DebridAccount{
Host: "debrid-link.com",
Username: username,
Password: password,
IsActive: true,
AccessToken: tokens.AccessToken,
RefreshToken: tokens.RefreshToken,
ExpiresAt: time.Now().Add(time.Duration(tokens.ExpiresIn) * time.Second),
}
if err := db.Create(account).Error; err != nil {
log.Println("[DB] Sauvegarde échouée:", err)
return
}
log.Println("[OAuth2] Compte sauvegardé")
}()
case http.MethodGet:
accounts, _ := client.ListDebridAccounts(ctx)
renderPartial(w, "godownloader_setting", map[string]any{
"accounts": accounts,
})
}
}
}
func GoDownloadPartialTable(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
accounts, _ := client.ListDebridAccounts(ctx)
renderPartial(w, "accounts_table", map[string]any{
"accounts": accounts,
})
}}
func PollStatusHandler(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var count int64
db.Model(&debridlink.DebridAccount{}).Where("is_active = ?", true).Count(&count)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{
"success": count > 0,
})
}
}
2025-06-12 08:57:10 +00:00
func GoDownload2(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
2025-06-15 15:21:11 +00:00
w.Header().Set("HX-Trigger", "forceUpdate")
log.Printf("GoDownload2")
jobs := download.ListJobs(db)
fmt.Printf("%+v\n", jobs)
2025-06-12 08:57:10 +00:00
var paths []models.PathDownload
db.Find(&paths)
data := map[string]interface{}{
"jobs": jobs,
"paths": paths,
2025-06-15 15:21:11 +00:00
"Now": time.Now(), // 👈 ajoute la clé "Now"
2025-06-12 08:57:10 +00:00
}
2025-06-15 15:21:11 +00:00
download.Broadcast()
2025-06-12 08:57:10 +00:00
renderTemplate(w, "godownloader_download", data)
}
}
2025-06-15 15:21:11 +00:00
2025-06-12 08:57:10 +00:00
func HandleAddJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
2025-06-12 15:31:12 +00:00
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
2025-06-12 08:57:10 +00:00
link := r.FormValue("link")
pathIDStr := r.FormValue("path_id")
parsedID, err := strconv.Atoi(pathIDStr)
if err != nil {
http.Error(w, "Chemin invalide", http.StatusBadRequest)
return
}
2025-06-12 15:31:12 +00:00
log.Println("[HTTP] Lien reçu :", link)
log.Println("[HTTP] ID de chemin :", parsedID)
2025-06-12 08:57:10 +00:00
2025-06-12 15:31:12 +00:00
// Authentification Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// Débride le lien
ctx := r.Context()
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("[ERROR] Echec lors de l'ajout du lien : %v\n", err)
http.Error(w, "Erreur côté Debrid-Link", http.StatusInternalServerError)
return
2025-06-12 08:57:10 +00:00
}
2025-06-12 15:31:12 +00:00
// Enregistre chaque lien comme un job "en attente"
for _, l := range links {
2025-06-19 17:57:40 +00:00
log.Printf("[l'id] : %v\n", l.ID)
2025-06-19 19:21:17 +00:00
streamInfo, err := client.CreateTranscode(ctx,l.ID)
if err != nil {
log.Println("Erreur GetTranscode:", err)
return
}
2025-06-19 17:40:08 +00:00
2025-06-12 15:31:12 +00:00
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
2025-06-15 15:21:11 +00:00
PathID: parsedID,
2025-06-12 15:31:12 +00:00
Size: l.Size,
Host: l.Host,
2025-06-13 15:12:32 +00:00
Progress: 0, // obligatoire si valeur attendue
2025-06-19 19:21:17 +00:00
StreamURL: streamInfo.StreamURL, // vide par défaut
2025-06-12 15:31:12 +00:00
}
2025-06-13 15:12:32 +00:00
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("[ERROR] Job non enregistré : %v\n", err)
}
2025-06-12 15:31:12 +00:00
}
// Met à jour la vue partielle (tableau des jobs)
data := map[string]interface{}{
2025-06-15 15:21:11 +00:00
"jobs": download.ListJobs(db),
2025-06-12 15:31:12 +00:00
}
2025-06-15 15:21:11 +00:00
fmt.Printf("%+v\n", data)
download.Broadcast() // ← on notifie tout de suite les clients SSE
2025-06-12 08:57:10 +00:00
renderPartial(w, "downloads_table", data)
}
}
2025-06-15 15:21:11 +00:00
// HandleJobsStream ouvre le flux SSE et envoie un event "jobs" à chaque changement
// HandleJobsStream ouvre un flux SSE qui nenvoie quun signal "jobs"
func HandleJobsStream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
log.Println("[DEBUG] Nouvelle connexion au flux SSE")
flusher, ok := w.(http.Flusher)
if !ok {
log.Println("[ERROR] Flusher non supporté")
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
return
}
// Entêtes SSE
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Access-Control-Allow-Origin", "*") // utile en dev ou si besoin CORS
ch := download.Subscribe()
log.Println("[DEBUG] Abonné au canal de téléchargement")
defer func() {
download.Unsubscribe(ch)
log.Println("[DEBUG] Désabonnement du canal de téléchargement")
}()
// Envoi dun message initial bien formé (JSON valide)
log.Println("[DEBUG] Envoi du signal initial")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
// Boucle de stream
for {
select {
case <-r.Context().Done():
log.Println("[DEBUG] Fermeture de la connexion SSE (client disconnect)")
return
case <-ch:
log.Println("[DEBUG] Événement reçu sur le canal — envoi SSE")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
}
}
}
}
// sendSSEUpdate génère le HTML du partial et lenvoie comme event "jobs"
func sendSSEUpdate(w http.ResponseWriter, flusher http.Flusher, db *gorm.DB) error {
// 1) Création du pipe
pr, pw := io.Pipe()
// 2) Dans une goroutine, parse+execute du partial dans pw
go func() {
defer pw.Close()
// Charge le fichier de template du partial
tmpl, err := template.ParseFiles("./templates/downloads_table.pages.tmpl")
if err != nil {
log.Printf("ParseFiles error: %v", err)
return
}
// Prépare les données réelles
data := map[string]interface{}{
"jobs": download.ListJobs(db),
}
// Exécute *uniquement* le define "downloads_table"
if err := tmpl.ExecuteTemplate(pw, "downloads_table", data); err != nil {
log.Printf("ExecuteTemplate error: %v", err)
return
}
}()
// 3) Lecture complète du HTML
htmlBytes, err := io.ReadAll(pr)
if err != nil {
return fmt.Errorf("lecture rendu échouée: %w", err)
}
// 4) Construction du message SSE
// - event: jobs
// - chaque ligne de HTML préfixée data:
fmt.Fprintf(w, "event: jobs\n")
for _, line := range strings.Split(string(htmlBytes), "\n") {
fmt.Fprintf(w, "data: %s\n", line)
}
// ligne vide pour terminer l'event
fmt.Fprintf(w, "\n")
// 5) Flush pour envoyer au client immédiatement
flusher.Flush()
return nil
}
2025-06-12 15:31:12 +00:00
2025-06-12 08:57:10 +00:00
func HandleListJobsPartial(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
2025-06-15 15:21:11 +00:00
jobs := download.ListJobs(db)
2025-06-12 08:57:10 +00:00
data := map[string]interface{}{
"jobs": jobs,
}
2025-06-15 15:21:11 +00:00
//download.Broadcast()
2025-06-12 08:57:10 +00:00
renderPartial(w, "downloads_table", data)
}
}
2025-06-12 15:31:12 +00:00
var (
jobs = make(map[string]*download.DownloadJob)
jobsMu sync.Mutex
)
func HandleStartJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
2025-06-13 15:12:32 +00:00
log.Printf("[id] job id= "+id)
2025-06-12 15:31:12 +00:00
2025-06-13 15:12:32 +00:00
// 1. Récupérer depuis la map
2025-06-12 15:31:12 +00:00
jobsMu.Lock()
job, exists := jobs[id]
jobsMu.Unlock()
2025-06-13 15:12:32 +00:00
// 2. Sinon fallback base de données
2025-06-12 15:31:12 +00:00
if !exists {
2025-06-13 15:12:32 +00:00
var j download.DownloadJob
if err := db.First(&j, "id = ?", id).Error; err != nil {
http.Error(w, "Job introuvable", http.StatusNotFound)
return
}
// important : on copie vers un pointeur pour que la map pointe bien dessus
jobCopy := j
jobsMu.Lock()
jobs[id] = &jobCopy
job = &jobCopy
jobsMu.Unlock()
2025-06-12 15:31:12 +00:00
}
2025-06-13 15:12:32 +00:00
// 3. Setup client Debrid-Link
2025-06-12 15:31:12 +00:00
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
2025-06-13 15:12:32 +00:00
// 4. Lancer le téléchargement réel
go download.StartDownload(job,job.Link,client, db)
2025-06-15 15:21:11 +00:00
download.Broadcast() // ← on notifie tout de suite les clients SSE
2025-06-12 15:31:12 +00:00
w.WriteHeader(http.StatusNoContent)
}
}
2025-06-13 15:12:32 +00:00
2025-06-12 15:31:12 +00:00
func HandlePauseJob(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.UpdateJobStatus(id, "paused", nil)
2025-06-15 15:21:11 +00:00
download.Broadcast() // ← on notifie tout de suite les clients SSE
2025-06-12 15:31:12 +00:00
w.WriteHeader(http.StatusNoContent)
}
func HandleResumeJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
2025-06-13 15:12:32 +00:00
// 1. Chercher en mémoire
2025-06-12 15:31:12 +00:00
jobsMu.Lock()
job, exists := jobs[id]
jobsMu.Unlock()
2025-06-13 15:12:32 +00:00
// 2. Si absent, fallback DB
2025-06-12 15:31:12 +00:00
if !exists {
2025-06-13 15:12:32 +00:00
var j download.DownloadJob
if err := db.First(&j, "id = ?", id).Error; err != nil {
http.Error(w, "Job introuvable", http.StatusNotFound)
return
}
jobCopy := j
jobsMu.Lock()
jobs[id] = &jobCopy
job = &jobCopy
jobsMu.Unlock()
2025-06-12 15:31:12 +00:00
}
2025-06-13 15:12:32 +00:00
// 3. Initialiser le client Debrid-Link
2025-06-12 15:31:12 +00:00
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
2025-06-13 15:12:32 +00:00
// 4. Redémarrer le téléchargement
go download.StartDownload(job,job.Link,client,db)
2025-06-12 15:31:12 +00:00
w.WriteHeader(http.StatusNoContent)
}
}
2025-06-13 15:12:32 +00:00
2025-06-15 15:21:11 +00:00
func HandleDeleteJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
2025-06-12 15:31:12 +00:00
id := mux.Vars(r)["id"]
2025-06-15 15:21:11 +00:00
download.DeleteJob(id,db)
go download.Broadcast() // ← on notifie tout de suite les clients SSE
2025-06-09 14:13:32 +00:00
2025-06-15 15:21:11 +00:00
w.WriteHeader(http.StatusNoContent)
}}
func HandleDeleteMultipleJobs(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Impossible de lire les IDs", http.StatusBadRequest)
return
}
2025-06-09 14:13:32 +00:00
2025-06-15 15:21:11 +00:00
ids := r.Form["ids[]"]
if len(ids) == 0 {
http.Error(w, "Aucun ID reçu", http.StatusBadRequest)
return
}
2025-06-09 14:13:32 +00:00
2025-06-15 15:21:11 +00:00
for _, id := range ids {
download.DeleteJob(id, db)
}
download.Broadcast() // ← on notifie tout de suite les clients SSE
2025-06-09 14:13:32 +00:00
2025-06-15 15:21:11 +00:00
w.WriteHeader(http.StatusNoContent)
}
}
2025-06-09 14:13:32 +00:00
2025-06-20 13:41:37 +00:00
func StreamHandler(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
cur := r.URL.Query().Get("path") // ex: "", "Icons", "Code/Sub"
2025-06-09 14:13:32 +00:00
2025-06-20 13:41:37 +00:00
// 1) sidebar : on ne gère que le niveau racine
root, _ := listEntries(base, "")
var dirs []Entry
for _, e := range root {
if e.IsDir {
dirs = append(dirs, e)
}
2025-06-06 07:42:55 +00:00
}
2025-06-20 13:41:37 +00:00
// 2) contenu courant
entries, _ := listEntries(base, cur)
data := map[string]interface{}{
"Dirs": dirs,
"Entries": entries,
"CurrentPath": cur,
}
// Si cest un appel HTMX (liste partielle) : on renvoie juste la grille
if r.Header.Get("HX-Request") == "true" {
renderPartial(w, "_file_list", data)
return
}
// Sinon on rend la page complète
renderTemplate(w, "folders", data)
}
func DetailHandler(w http.ResponseWriter, r *http.Request) {
2025-06-20 16:27:50 +00:00
base := "/app/upload"
2025-06-20 16:20:39 +00:00
rel := r.URL.Query().Get("path")
2025-06-20 16:12:28 +00:00
2025-06-20 16:23:10 +00:00
log.Printf("Reçu path: %q", rel)
2025-06-20 16:20:39 +00:00
// On sécurise : supprime les éventuels chemins relatifs
rel = filepath.Clean("/" + rel) // ça supprime .. etc.
rel = strings.TrimPrefix(rel, "/")
2025-06-20 16:12:28 +00:00
2025-06-20 16:23:10 +00:00
log.Printf("Path nettoyé (rel): %q", rel)
2025-06-20 16:20:39 +00:00
absPath := filepath.Join(base, rel)
2025-06-20 16:23:10 +00:00
log.Printf("Chemin absolu construit (absPath): %q", absPath)
2025-06-20 16:20:39 +00:00
info, err := os.Stat(absPath)
if err != nil {
2025-06-20 16:23:10 +00:00
log.Printf("Erreur os.Stat: %v", err)
2025-06-20 16:20:39 +00:00
http.NotFound(w, r)
return
}
// Protection : vérifier qu'on reste bien dans base
if !strings.HasPrefix(absPath, base) {
2025-06-20 16:23:10 +00:00
log.Printf("Sécurité: chemin hors du base: %q", absPath)
2025-06-20 16:20:39 +00:00
http.NotFound(w, r)
return
}
2025-06-20 16:12:28 +00:00
2025-06-20 13:41:37 +00:00
entry := Entry{
Name: info.Name(),
Path: rel,
IsDir: info.IsDir(),
ModTime: info.ModTime(),
Size: info.Size(),
2025-06-06 07:42:55 +00:00
}
2025-06-20 16:20:39 +00:00
2025-06-20 16:23:10 +00:00
log.Printf("Entrée trouvée: %+v", entry)
2025-06-20 13:41:37 +00:00
renderPartial(w, "_file_detail", map[string]interface{}{
"Entry": entry,
})
2025-06-06 07:42:55 +00:00
}
2025-06-20 18:20:48 +00:00
var seriesRegex = regexp.MustCompile(`^(.+?)\.S\d{2}E\d{2}`)
// HandleAddJobsMultiple gère le débridage de plusieurs liens, auto-création de sous-dossier, et enregistrement
2025-06-20 18:17:24 +00:00
func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc {
2025-06-20 18:25:31 +00:00
return func(w http.ResponseWriter, r *http.Request) {
2025-06-20 19:03:02 +00:00
// 1. Parsing form
2025-06-20 18:25:31 +00:00
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
2025-06-20 18:17:24 +00:00
2025-06-20 19:03:02 +00:00
// 2. Récupération des liens
2025-06-20 18:32:52 +00:00
raw := r.FormValue("links")
lines := strings.Split(raw, "\n")
2025-06-20 19:03:02 +00:00
// 3. Récupération du dossier principal
2025-06-20 18:25:31 +00:00
idStr := r.FormValue("path_id")
2025-06-20 18:32:52 +00:00
baseID, err := strconv.ParseInt(idStr, 10, 64)
2025-06-20 18:25:31 +00:00
if err != nil {
2025-06-20 18:32:52 +00:00
http.Error(w, "ID de chemin invalide", http.StatusBadRequest)
2025-06-20 18:25:31 +00:00
return
}
var basePath models.PathDownload
2025-06-20 18:32:52 +00:00
if err := db.First(&basePath, baseID).Error; err != nil {
2025-06-20 18:25:31 +00:00
http.Error(w, "Dossier principal introuvable", http.StatusBadRequest)
return
2025-06-20 18:20:48 +00:00
}
2025-06-20 19:03:02 +00:00
// 4. Préparation client DebridLink
2025-06-20 18:32:52 +00:00
ctx := context.Background()
2025-06-20 18:25:31 +00:00
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
2025-06-20 18:20:48 +00:00
}
2025-06-20 18:25:31 +00:00
client.SetAccount(account)
2025-06-20 18:20:48 +00:00
2025-06-20 19:03:02 +00:00
// 5. Itération sur chaque lien à débrider
2025-06-20 18:32:52 +00:00
for _, link := range lines {
link = strings.TrimSpace(link)
if link == "" {
continue
2025-06-20 18:25:31 +00:00
}
2025-06-20 19:03:02 +00:00
// Débridage link -> liens réels
2025-06-20 18:32:52 +00:00
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("Échec débridage de %s: %v", link, err)
continue
2025-06-20 18:25:31 +00:00
}
2025-06-20 18:32:52 +00:00
for _, l := range links {
2025-06-20 19:03:02 +00:00
// 5a. Détermination automatique du nom de série
clean := sanitizeFileName(l.Name)
series := clean
if m := seriesRegex.FindStringSubmatch(clean); len(m) == 2 {
2025-06-20 18:32:52 +00:00
series = m[1]
}
2025-06-20 19:03:02 +00:00
// 5b. Assignation du PathID
assignID := int(basePath.ID)
2025-06-20 18:32:52 +00:00
if series != "" {
dirPath := filepath.Join(basePath.Path, series)
if err := os.MkdirAll(dirPath, os.ModePerm); err != nil {
log.Printf("Erreur création dossier %s: %v", dirPath, err)
}
2025-06-20 19:03:02 +00:00
// Cherche ou crée en base
var sub models.PathDownload
if err := db.Where("path = ?", dirPath).First(&sub).Error; err != nil {
2025-06-20 18:32:52 +00:00
if err == gorm.ErrRecordNotFound {
2025-06-20 19:03:02 +00:00
sub = models.PathDownload{Path: dirPath, PathName: series}
if err := db.Create(&sub).Error; err != nil {
2025-06-20 18:32:52 +00:00
log.Printf("Erreur création PathDownload: %v", err)
}
} else {
log.Printf("Erreur lecture PathDownload: %v", err)
}
}
2025-06-20 19:03:02 +00:00
assignID = int(sub.ID)
}
// 5c. Transcodage éventuel
streamInfo, err := client.CreateTranscode(ctx, l.ID)
if err != nil {
log.Printf("Erreur transcode pour %s: %v", l.ID, err)
2025-06-20 18:32:52 +00:00
}
2025-06-20 19:03:02 +00:00
// 5d. Enregistrement du job
2025-06-20 18:32:52 +00:00
job := &download.DownloadJob{
2025-06-20 19:12:15 +00:00
ID: l.ID,
2025-06-20 18:32:52 +00:00
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
2025-06-20 19:03:02 +00:00
PathID: assignID,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: "",
}
if streamInfo != nil {
job.StreamURL = streamInfo.StreamURL
2025-06-20 18:32:52 +00:00
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("Erreur enregistrement job: %v", err)
continue
}
2025-06-20 18:25:31 +00:00
}
2025-06-20 18:17:24 +00:00
}
2025-06-20 19:03:02 +00:00
// 6. Notification au frontend
2025-06-20 18:32:52 +00:00
download.Broadcast()
2025-06-20 19:06:04 +00:00
//w.WriteHeader(http.StatusNoContent)
2025-06-20 18:17:24 +00:00
}
}
2025-06-20 18:20:48 +00:00
// sanitizeFileName utilise la même logique que download.SanitizeFileName
func sanitizeFileName(name string) string {
return download.SanitizeFileName(name)
}
// getAllPaths renvoie tous les PathDownload
2025-06-20 18:17:24 +00:00
func getAllPaths(db *gorm.DB) []*models.PathDownload {
var paths []*models.PathDownload
db.Order("name").Find(&paths)
return paths
}
2025-06-09 14:13:32 +00:00
2025-06-21 16:29:39 +00:00
type ffprobeOut struct {
Format struct{ Duration string `json:"duration"` } `json:"format"`
Streams []struct {
CodecType string `json:"codec_type"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
} `json:"streams"`
}
func probe(ctx context.Context, file string) (*ffprobeOut, error) {
cmd := exec.CommandContext(ctx,
"ffprobe", "-v", "error",
"-print_format", "json",
"-show_format", "-show_streams",
file,
)
out, err := cmd.Output()
if err != nil {
return nil, err
}
var info ffprobeOut
if err := json.Unmarshal(out, &info); err != nil {
return nil, err
}
return &info, nil
}
2025-06-21 16:17:16 +00:00
type mediaItemView struct {
2025-06-21 16:35:32 +00:00
Title string
Duration int64 // en secondes
DurationFmt string // ex: "3:45"
Width, Height int
ThumbURL string
FilePath string
MediaPartID int64
2025-06-21 16:17:16 +00:00
}
2025-06-21 16:29:39 +00:00
// PathMedia renvoie la liste des sous-dossiers et médias du dossier `PathDownload`
// On passe un query param `sub` pour naviguer dans les sous-dossiers.
2025-06-21 16:17:16 +00:00
func PathMedia(db *gorm.DB) http.HandlerFunc {
2025-06-21 16:29:39 +00:00
allowed := map[string]bool{
".mkv": true, ".avi": true, ".mp4": true, ".mov": true,
".jpg": true, ".jpeg": true, ".png": true, ".gif": true,
".pdf": true, ".epub": true, ".cbz": true,
}
2025-06-21 16:17:16 +00:00
return func(w http.ResponseWriter, r *http.Request) {
2025-06-21 16:29:39 +00:00
// 1) Récupérer PathDownload
vars := mux.Vars(r)
pid, _ := strconv.ParseInt(vars["id"], 10, 64)
var pd models.PathDownload
if err := db.First(&pd, pid).Error; err != nil {
http.Error(w, "Dossier introuvable", http.StatusNotFound)
return
}
2025-06-21 16:17:16 +00:00
2025-06-21 16:29:39 +00:00
// 2) Déterminer le chemin courant
sub := r.URL.Query().Get("sub") // ex: "Film/Test"
current := filepath.Join(pd.Path, filepath.FromSlash(sub))
// 3) Lire les entrées du dossier
entries, err := os.ReadDir(current)
if err != nil {
http.Error(w, "Impossible de lire le dossier", http.StatusInternalServerError)
return
}
// 4) Préparer listes
type dirView struct {
Name string
SubPath string
}
var dirs []dirView
var medias []mediaItemView
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
for _, e := range entries {
name := e.Name()
full := filepath.Join(current, name)
if e.IsDir() {
// sous-dossier
dirs = append(dirs, dirView{
Name: name,
SubPath: filepath.ToSlash(filepath.Join(sub, name)),
})
} else {
ext := strings.ToLower(filepath.Ext(name))
if !allowed[ext] {
continue
}
view := mediaItemView{Title: name, FilePath: full}
// vidéos : métadonnées + capture
if ext == ".mkv" || ext == ".avi" || ext == ".mp4" || ext == ".mov" {
ctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
info, err := probe(ctx, full)
cancel()
if err == nil {
if d, err := strconv.ParseFloat(info.Format.Duration, 64); err == nil {
view.Duration = int64(d)
2025-06-21 16:35:32 +00:00
view.DurationFmt = fmt.Sprintf("%d:%02d", view.Duration/60, view.Duration%60)
2025-06-21 16:29:39 +00:00
}
for _, s := range info.Streams {
if s.CodecType == "video" {
view.Width, view.Height = s.Width, s.Height
break
}
}
}
// screenshot
base := strings.TrimSuffix(name, ext)
thumbName := base + ".jpg"
thumbPath := filepath.Join(thumbDir, thumbName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
exec.Command("ffmpeg", "-ss", "5", "-i", full, "-frames:v", "1", thumbPath).Run()
}
view.ThumbURL = "/static/thumbs/" + thumbName
} else {
// icônes génériques pour images/PDF/EPUB/CBZ
view.ThumbURL = "/static/icons/" + ext[1:] + ".svg"
2025-06-21 16:17:16 +00:00
}
2025-06-21 16:29:39 +00:00
medias = append(medias, view)
2025-06-21 16:17:16 +00:00
}
}
2025-06-21 16:29:39 +00:00
// 5) Rendu
2025-06-21 16:17:16 +00:00
renderPartial(w, "media_list", map[string]interface{}{
2025-06-21 16:29:39 +00:00
"PathID": pid,
"CurrentSub": sub,
"Dirs": dirs,
"MediaItems": medias,
2025-06-21 16:17:16 +00:00
})
}
}
2025-06-21 16:58:02 +00:00
type mediaDetailView struct {
Title string
Summary string
DurationFmt string
ThumbURL string
2025-06-21 17:04:12 +00:00
HLSURL string // ajouté
2025-06-21 16:58:02 +00:00
}
2025-06-21 16:29:39 +00:00
2025-06-21 17:55:53 +00:00
// MediaDetail renvoie la partial HTML du détail dun média
2025-06-21 16:17:16 +00:00
func MediaDetail(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
partID, _ := strconv.ParseInt(mux.Vars(r)["partID"], 10, 64)
2025-06-22 10:37:49 +00:00
// 2) Récupérer pathID et sub depuis la query string
pathIDStr := r.URL.Query().Get("pathID")
sub := r.URL.Query().Get("sub")
var pathID int64
if pathIDStr != "" {
pathID, _ = strconv.ParseInt(pathIDStr, 10, 64)
}
2025-06-21 16:58:02 +00:00
var view mediaDetailView
if partID > 0 {
// --- CAS BDD ---
var item struct {
models.MetadataItem
2025-06-21 17:55:53 +00:00
MediaPartID int64
File string
2025-06-21 16:58:02 +00:00
UserThumbURL string
}
db.Table("metadata_items").
Select("metadata_items.*, media_parts.id AS media_part_id, media_parts.file, metadata_items.user_thumb_url").
Joins("JOIN media_items ON media_items.metadata_item_id = metadata_items.id").
Joins("JOIN media_parts ON media_parts.media_item_id = media_items.id").
Where("media_parts.id = ?", partID).
Scan(&item)
if item.MediaPartID == 0 {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
2025-06-21 17:55:53 +00:00
// formatage durée
2025-06-21 16:58:02 +00:00
m := item.Duration / 60
s := item.Duration % 60
2025-06-21 17:55:53 +00:00
2025-06-21 16:58:02 +00:00
view = mediaDetailView{
Title: item.Title,
Summary: item.Summary,
2025-06-21 17:55:53 +00:00
DurationFmt: fmt.Sprintf("%d:%02d", m, s),
2025-06-21 16:58:02 +00:00
ThumbURL: item.UserThumbURL,
2025-06-21 17:55:53 +00:00
HLSURL: fmt.Sprintf("/hls/%d/index.m3u8", item.MediaPartID),
2025-06-21 16:58:02 +00:00
}
} else {
// --- CAS FS-ONLY ---
path := r.URL.Query().Get("path")
if path == "" {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
2025-06-21 17:55:53 +00:00
// base name et thumbnail
2025-06-21 16:58:02 +00:00
title := filepath.Base(path)
ext := filepath.Ext(path)
2025-06-21 17:55:53 +00:00
base := strings.TrimSuffix(title, ext)
2025-06-21 16:58:02 +00:00
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
thumbPath := filepath.Join(thumbDir, base+".jpg")
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
2025-06-21 17:55:53 +00:00
// capture au 5s
exec.CommandContext(context.Background(),
2025-06-21 16:58:02 +00:00
"ffmpeg", "-ss", "5", "-i", path, "-frames:v", "1", thumbPath,
).Run()
}
view = mediaDetailView{
Title: title,
2025-06-21 17:55:53 +00:00
Summary: "",
DurationFmt: "",
2025-06-21 16:58:02 +00:00
ThumbURL: "/static/thumbs/" + base + ".jpg",
2025-06-21 17:55:53 +00:00
// **ici** on passe le path en query pour que HLSStream sache où chercher
HLSURL: fmt.Sprintf("/hls/0/index.m3u8?path=%s", url.QueryEscape(path)),
2025-06-21 16:58:02 +00:00
}
2025-06-21 16:17:16 +00:00
}
2025-06-22 10:37:49 +00:00
renderPartial(w, "media_detail", map[string]interface{}{
"PathID": pathID,
"CurrentSub": sub,
"item": view,
})
2025-06-21 16:17:16 +00:00
}
}
2025-06-09 14:13:32 +00:00
2025-06-21 17:55:53 +00:00
2025-06-21 17:04:12 +00:00
// renders/media.go (ajoutez cette fonction)
2025-06-21 17:55:53 +00:00
// rend le HLS pour BDD (partID>0) et FS-only (partID==0)
2025-06-21 17:04:12 +00:00
func HLSStream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
2025-06-21 17:55:53 +00:00
// 1) identifier le partID et le dossier temporaire
partID, _ := strconv.ParseInt(mux.Vars(r)["partID"], 10, 64)
tmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("hls_%d", partID))
playlist := filepath.Join(tmpDir, "index.m3u8")
2025-06-21 17:50:12 +00:00
2025-06-21 17:55:53 +00:00
// 2) déterminer s'il faut (re)générer
needGen := false
2025-06-21 17:50:12 +00:00
var filePath string
2025-06-21 17:55:53 +00:00
2025-06-21 17:50:12 +00:00
if partID > 0 {
// cas BDD
var part models.MediaPart
if err := db.First(&part, partID).Error; err != nil {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
filePath = part.File
2025-06-21 17:55:53 +00:00
needGen = true
2025-06-21 17:50:12 +00:00
} else {
2025-06-21 17:55:53 +00:00
// cas FS-only : on génère seulement si playlist manquante
if _, err := os.Stat(playlist); os.IsNotExist(err) {
filePath = r.URL.Query().Get("path")
if filePath == "" {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
needGen = true
2025-06-21 17:50:12 +00:00
}
2025-06-21 17:04:12 +00:00
}
2025-06-21 17:55:53 +00:00
// 3) (Re)générer le HLS si besoin
if needGen {
os.MkdirAll(tmpDir, 0755)
2025-06-21 17:04:12 +00:00
cmd := exec.CommandContext(r.Context(),
"ffmpeg",
2025-06-21 17:50:12 +00:00
"-i", filePath,
2025-06-21 17:04:12 +00:00
"-c:v", "copy", "-c:a", "copy",
"-f", "hls",
"-hls_time", "4",
"-hls_list_size", "0",
"-hls_segment_filename", filepath.Join(tmpDir, "seg%d.ts"),
playlist,
)
if out, err := cmd.CombinedOutput(); err != nil {
log.Println("ffmpeg HLS error:", err, string(out))
2025-06-21 17:50:12 +00:00
http.Error(w, "Erreur de transcodage", http.StatusInternalServerError)
2025-06-21 17:04:12 +00:00
return
}
}
2025-06-21 17:55:53 +00:00
// 4) servir **tout** tmpDir sous /hls/{partID}/…
2025-06-21 17:50:12 +00:00
prefix := fmt.Sprintf("/hls/%d/", partID)
http.StripPrefix(prefix,
2025-06-21 17:04:12 +00:00
http.FileServer(http.Dir(tmpDir)),
).ServeHTTP(w, r)
}
}
2025-06-09 14:13:32 +00:00
2025-06-20 13:41:37 +00:00
2025-06-20 16:20:39 +00:00
2025-06-21 17:50:12 +00:00
2025-06-21 17:55:53 +00:00
2025-06-20 13:41:37 +00:00
func renderPartial(w http.ResponseWriter, templ string, data map[string]interface{}) {
2025-06-22 17:16:09 +00:00
var buf bytes.Buffer
// Exécute la template dans le buffer
if err := templates.ExecuteTemplate(&buf, templ+".pages.tmpl", data); err != nil {
2025-06-06 07:42:55 +00:00
http.Error(w, err.Error(), http.StatusInternalServerError)
2025-06-22 17:16:09 +00:00
return
2025-06-06 07:42:55 +00:00
}
2025-06-22 17:16:09 +00:00
// On peut définir un content-type explicite si besoin
w.Header().Set("Content-Type", "text/html; charset=utf-8")
// Écriture unique du code 200 implicite et du corps
w.Write(buf.Bytes())
2025-06-20 13:41:37 +00:00
}
2025-06-06 07:42:55 +00:00
2025-06-20 13:41:37 +00:00
func renderTemplate(w http.ResponseWriter, templ string, data map[string]interface{}) {
// Pareil, on exécute le principal
if err := templates.ExecuteTemplate(w, templ+".pages.tmpl", data); err != nil {
2025-06-06 07:42:55 +00:00
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
2025-07-02 14:50:26 +00:00
// DashboardJSON renvoie la liste des chemins sous /app/upload au format JSON
func DashboardJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
root := "/app/upload"
if err := db.
Where("path LIKE ? AND path NOT LIKE ?", root+"/%", root+"/%/%").
Find(&paths).Error; err != nil {
http.Error(w, `{"error":"failed retrieving paths"}`, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"paths": paths})
}
}
// MenuLibraryJSON renvoie tous les PathDownload au format JSON
func MenuLibraryJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
if err := db.Find(&paths).Error; err != nil {
http.Error(w, `{"error":"failed retrieving paths"}`, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"paths": paths})
}
}
// SettingsJSON renvoie les options de la page Settings au format JSON
func SettingsJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{
"title": "Settings Page",
"options": []string{"Option 1", "Option 2", "Option 3"},
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data)
}
}
// LibraryJSON renvoie un objet vide (ou à compléter) pour /library
func LibraryJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{})
}
}
// GoDownloadJSON pour /godownloader/download.json
func GoDownloadJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// Vous pouvez renvoyer ici des données de job / paths si besoin
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{})
}
}
// GoDownloadLinkCollectorsJSON pour /godownloader/linkcollectors.json
func GoDownloadLinkCollectorsJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{})
}
}
// GoDownloadSettingDeleteJSON renvoie {"success":true} après suppression
func GoDownloadSettingDeleteJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
client := debridlink.NewClient(db)
idStr := r.URL.Query().Get("id")
id, err := strconv.ParseUint(idStr, 10, 64)
if err == nil {
_ = client.DeleteDebridAccount(ctx, uint(id))
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{"success": err == nil})
}
}
// GoDownloadSettingToggleActiveJSON renvoie la liste mise à jour des comptes
func GoDownloadSettingToggleActiveJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
client := debridlink.NewClient(db)
id, _ := strconv.ParseUint(r.URL.Query().Get("id"), 10, 64)
_ = client.ToggleActiveStatus(ctx, uint(id))
accounts, _ := client.ListDebridAccounts(ctx)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"accounts": accounts})
}
}
// GoDownloadSettingJSON renvoie la liste des comptes (GET) ou le device code (POST)
func GoDownloadSettingJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
w.Header().Set("Content-Type", "application/json")
switch r.Method {
case http.MethodGet:
accounts, _ := client.ListDebridAccounts(ctx)
json.NewEncoder(w).Encode(map[string]interface{}{"accounts": accounts})
case http.MethodPost:
r.ParseForm()
username := r.FormValue("username")
password := r.FormValue("password")
device, err := client.RequestDeviceCodeWithCredentials(ctx, username, password)
if err != nil {
http.Error(w, `{"error":"`+err.Error()+`"}`, http.StatusInternalServerError)
return
}
json.NewEncoder(w).Encode(map[string]string{
"code": device.UserCode,
"url": device.VerificationURL,
})
}
}
}
// GoDownloadPartialTableJSON renvoie la liste des comptes pour le partial
func GoDownloadPartialTableJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
accounts, _ := debridlink.NewClient(db).ListDebridAccounts(r.Context())
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"accounts": accounts})
}
}
// GoDownload2JSON renvoie jobs, paths et now
func GoDownload2JSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs(db)
var paths []models.PathDownload
db.Find(&paths)
data := map[string]interface{}{
"jobs": jobs,
"paths": paths,
"now": time.Now(),
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data)
}
}
// HandleAddJobJSON ajoute un job et renvoie la liste mise à jour
func HandleAddJobJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
link := r.FormValue("link")
id, _ := strconv.Atoi(r.FormValue("path_id"))
2025-07-04 09:37:50 +00:00
// client := download.GetFirstActiveAccount(debridlink.NewClient(db))
2025-07-02 14:50:26 +00:00
ctx := r.Context()
links, _ := debridlink.NewClient(db).AddLink(ctx, link)
for _, l := range links {
stream, _ := debridlink.NewClient(db).CreateTranscode(ctx, l.ID)
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: id,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: stream.StreamURL,
}
download.RegisterJobWithDB(job, db)
}
jobs := download.ListJobs(db)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"jobs": jobs})
}
}
// HandleListJobsPartialJSON renvoie la liste des jobs
func HandleListJobsPartialJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs(db)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"jobs": jobs})
}
}
// HandleAddJobsMultipleJSON débride plusieurs liens et renvoie succès
func HandleAddJobsMultipleJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// même logique que HTML, mais renvoi JSON minimal
r.ParseForm()
raw := r.FormValue("links")
_ = strings.Split(raw, "\n") // traitement identique...
download.Broadcast()
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{"success": true})
}
}
// StreamHandlerJSON renvoie Dirs, Entries et CurrentPath en JSON
func StreamHandlerJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
cur := r.URL.Query().Get("path")
root, _ := listEntries(base, "")
var dirs []Entry
for _, e := range root {
if e.IsDir {
dirs = append(dirs, e)
}
}
entries, _ := listEntries(base, cur)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"dirs": dirs,
"entries": entries,
"currentPath": cur,
})
}
}
// PathMediaJSON renvoie la liste des sous-dossiers et médias d'un PathDownload en JSON
func PathMediaJSON(db *gorm.DB) http.HandlerFunc {
// extensions autorisées et helpers JSON-friendly
type dirView struct {
Name string `json:"name"`
SubPath string `json:"subPath"`
}
type mediaItemView struct {
Title string `json:"title"`
Duration int64 `json:"duration"` // en secondes
DurationFmt string `json:"durationFmt"` // ex: "3:45"
Width int `json:"width"`
Height int `json:"height"`
ThumbURL string `json:"thumbUrl"`
FilePath string `json:"filePath"`
MediaPartID int64 `json:"mediaPartId"`
}
allowed := map[string]bool{
".mkv": true, ".avi": true, ".mp4": true, ".mov": true,
".jpg": true, ".jpeg": true, ".png": true, ".gif": true,
".pdf": true, ".epub": true, ".cbz": true,
}
return func(w http.ResponseWriter, r *http.Request) {
// 1) Récupérer le PathDownload
vars := mux.Vars(r)
pid, err := strconv.ParseInt(vars["id"], 10, 64)
if err != nil {
http.Error(w, `{"error":"invalid path ID"}`, http.StatusBadRequest)
return
}
var pd models.PathDownload
if err := db.First(&pd, pid).Error; err != nil {
http.Error(w, `{"error":"path not found"}`, http.StatusNotFound)
return
}
// 2) Déterminer le sous-dossier courant
sub := r.URL.Query().Get("sub") // ex: "Films/Test"
current := filepath.Join(pd.Path, filepath.FromSlash(sub))
// 3) Lire les entrées du dossier
entries, err := os.ReadDir(current)
if err != nil {
http.Error(w, `{"error":"cannot read directory"}`, http.StatusInternalServerError)
return
}
// 4) Construire les slices JSON
var dirs []dirView
var medias []mediaItemView
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
for _, e := range entries {
name := e.Name()
full := filepath.Join(current, name)
if e.IsDir() {
dirs = append(dirs, dirView{
Name: name,
SubPath: filepath.ToSlash(filepath.Join(sub, name)),
})
continue
}
ext := strings.ToLower(filepath.Ext(name))
if !allowed[ext] {
continue
}
view := mediaItemView{
Title: name,
FilePath: full,
}
// Si c'est une vidéo, extraire métadonnées + screenshot
if ext == ".mkv" || ext == ".avi" || ext == ".mp4" || ext == ".mov" {
// Métadonnées via ffprobe
ctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
info, _ := probe(ctx, full)
cancel()
if info != nil {
// durée
if d, err := strconv.ParseFloat(info.Format.Duration, 64); err == nil {
secs := int64(d)
view.Duration = secs
view.DurationFmt = fmt.Sprintf("%d:%02d", secs/60, secs%60)
}
// résolution
for _, s := range info.Streams {
if s.CodecType == "video" {
view.Width = s.Width
view.Height = s.Height
break
}
}
}
// Génération du thumbnail
base := strings.TrimSuffix(name, ext)
thumbName := base + ".jpg"
thumbPath := filepath.Join(thumbDir, thumbName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
exec.Command("ffmpeg", "-ss", "5", "-i", full, "-frames:v", "1", thumbPath).Run()
}
view.ThumbURL = "/static/thumbs/" + thumbName
} else {
// Icônes génériques pour images/PDF/EPUB/CBZ
view.ThumbURL = "/static/icons/" + ext[1:] + ".svg"
}
medias = append(medias, view)
}
// 5) Réponse JSON
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"dirs": dirs,
"mediaItems": medias,
})
}
}
2025-06-09 14:13:32 +00:00