shelfy/renders/renders.go
2025-07-04 11:37:50 +02:00

1516 lines
47 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package renders
import (
"app/shelfly/internal/debridlink"
"app/shelfly/internal/download"
"app/shelfly/internal/models"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"net/url"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
"text/template"
"time"
"github.com/gorilla/mux"
"gorm.io/gorm"
)
var (
// templates contiendra TOUTES vos pages .pages.tmpl
templates *template.Template
)
func init() {
funcMap := template.FuncMap{
"hasSuffix": strings.HasSuffix,
"ext": func(name string) string {
return strings.TrimPrefix(filepath.Ext(name), ".")
},
"urlquery": func(s string) string { return url.QueryEscape(s) },
"split": strings.Split,
"trimPrefix": strings.TrimPrefix,
// nouveau helper pour convertir bytes -> kilobytes
"toKB": func(size int64) float64 {
return float64(size) / 1024
},
}
templates = template.Must(
template.New("").
Funcs(funcMap).
ParseGlob("./templates/*.pages.tmpl"),
)
}
type Entry struct {
Name, Path string
IsDir bool
ModTime time.Time
Size int64
}
// helper pour lister un dossier
func listEntries(base, rel string) ([]Entry, error) {
dir := filepath.Join(base, rel)
fis, err := os.ReadDir(dir)
if err != nil {
return nil, err
}
out := make([]Entry, 0, len(fis))
for _, fi := range fis {
info, _ := fi.Info()
out = append(out, Entry{
Name: fi.Name(),
Path: filepath.ToSlash(filepath.Join(rel, fi.Name())),
IsDir: fi.IsDir(),
ModTime: info.ModTime(),
Size: info.Size(),
})
}
return out, nil
}
func Login(w http.ResponseWriter, r *http.Request){
renderTemplate(w,"login",nil)
}
func Dashboard(db *gorm.DB)http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
root := "/app/upload"
// on sélectionne tout ce qui est sous /app/upload/, mais pas plus loin quun seul slash en plus
if err := db.
Where("path LIKE ? AND path NOT LIKE ?", root+"/%", root+"/%/%").
Find(&paths).Error; err != nil {
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
return
}
data := map[string]interface{}{
"paths": paths,
}
renderTemplate(w,"dashboard",data)
}
}
func MenuLibrary(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var currentPaths []models.PathDownload
if err := db.Find(&currentPaths).Error; err != nil {
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
return
}
// Récupérer l'ancienne version des paths (si existante)
lastUpdate := r.Header.Get("HX-Current-Paths")
var previousPaths []models.PathDownload
if lastUpdate != "" {
json.Unmarshal([]byte(lastUpdate), &previousPaths)
}
// Convertir en JSON pour comparaison
currentJSON, _ := json.Marshal(currentPaths)
previousJSON, _ := json.Marshal(previousPaths)
// Vérifier si les paths ont changé
pathsChanged := string(currentJSON) != string(previousJSON)
data := map[string]interface{}{
"paths": currentPaths,
}
// Si HTMX request, ajouter les headers appropriés
if r.Header.Get("HX-Request") == "true" {
if pathsChanged {
w.Header().Set("HX-Trigger", "pathsUpdated")
}
w.Header().Set("HX-Current-Paths", string(currentJSON))
}
renderPartial(w, "dashboard", data)
}
}
func Settings(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{
"Title": "Settings Page",
"Options": []string{"Option 1", "Option 2", "Option 3"},
}
renderPartial(w, "settings", data)
}
func Library(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "library",nil)
}
func GoDownload(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "godownloader_download",nil)
}
func GoDownloadLinkCollectors(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "godownloader_linkcollectors",nil)
}
func GetDebridClient(db *gorm.DB) *debridlink.Client {
return debridlink.NewClient(db)
}
func GoDownloadSettingDelete(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
DebridClient := GetDebridClient(db)
idStr := r.URL.Query().Get("id")
if idStr == "" {
http.Error(w, "ID manquant", http.StatusBadRequest)
return
}
idUint, err := strconv.ParseUint(idStr, 10, 64)
if err != nil {
http.Error(w, "ID invalide", http.StatusBadRequest)
return
}
if err := DebridClient.DeleteDebridAccount(ctx, uint(idUint)); err != nil {
http.Error(w, "Erreur lors de la suppression", http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/godownloader/settings", http.StatusSeeOther)
}
}
func GoDownloadSettingToggleActive(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
DebridClient := debridlink.NewClient(db)
idStr := r.URL.Query().Get("id")
idUint, err := strconv.ParseUint(idStr, 10, 32)
if err != nil {
http.Error(w, "ID invalide", http.StatusBadRequest)
return
}
err = DebridClient.ToggleActiveStatus(ctx, uint(idUint))
if err != nil {
log.Println("Erreur lors du toggle:", err)
http.Error(w, "Échec de mise à jour", http.StatusInternalServerError)
return
}
// Récupérer la liste mise à jour
accounts, err := DebridClient.ListDebridAccounts(ctx)
if err != nil {
http.Error(w, "Erreur lors du chargement des comptes", http.StatusInternalServerError)
return
}
// HTMX ou page normale
if r.Header.Get("HX-Request") == "true" {
renderPartial(w, "partials/accounts_table", map[string]interface{}{
"accounts": accounts,
})
} else {
renderPartial(w, "godownloader_setting", map[string]interface{}{
"accounts": accounts,
})
}
}
}
func GoDownloadSetting(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
switch r.Method {
case http.MethodPost:
if err := r.ParseForm(); err != nil {
http.Error(w, "Form invalide", http.StatusBadRequest)
return
}
username := r.FormValue("username")
password := r.FormValue("password")
deviceResp, err := client.RequestDeviceCodeWithCredentials(ctx, username, password)
if err != nil {
log.Println("[OAuth2] Erreur device_code:", err)
http.Error(w, "Erreur OAuth: "+err.Error(), http.StatusInternalServerError)
return
}
// Affiche le code + URL dans #auth-status
renderPartial(w, "oauth_device_code", map[string]any{
"code": deviceResp.UserCode,
"url": deviceResp.VerificationURL,
})
// Polling async
go func() {
tokens, err := client.PollDeviceToken(context.Background(), deviceResp.DeviceCode, deviceResp.Interval)
if err != nil {
log.Println("[OAuth2] Polling échoué:", err)
return
}
account := &debridlink.DebridAccount{
Host: "debrid-link.com",
Username: username,
Password: password,
IsActive: true,
AccessToken: tokens.AccessToken,
RefreshToken: tokens.RefreshToken,
ExpiresAt: time.Now().Add(time.Duration(tokens.ExpiresIn) * time.Second),
}
if err := db.Create(account).Error; err != nil {
log.Println("[DB] Sauvegarde échouée:", err)
return
}
log.Println("[OAuth2] Compte sauvegardé")
}()
case http.MethodGet:
accounts, _ := client.ListDebridAccounts(ctx)
renderPartial(w, "godownloader_setting", map[string]any{
"accounts": accounts,
})
}
}
}
func GoDownloadPartialTable(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
accounts, _ := client.ListDebridAccounts(ctx)
renderPartial(w, "accounts_table", map[string]any{
"accounts": accounts,
})
}}
func PollStatusHandler(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var count int64
db.Model(&debridlink.DebridAccount{}).Where("is_active = ?", true).Count(&count)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{
"success": count > 0,
})
}
}
func GoDownload2(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("HX-Trigger", "forceUpdate")
log.Printf("GoDownload2")
jobs := download.ListJobs(db)
fmt.Printf("%+v\n", jobs)
var paths []models.PathDownload
db.Find(&paths)
data := map[string]interface{}{
"jobs": jobs,
"paths": paths,
"Now": time.Now(), // 👈 ajoute la clé "Now"
}
download.Broadcast()
renderTemplate(w, "godownloader_download", data)
}
}
func HandleAddJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
link := r.FormValue("link")
pathIDStr := r.FormValue("path_id")
parsedID, err := strconv.Atoi(pathIDStr)
if err != nil {
http.Error(w, "Chemin invalide", http.StatusBadRequest)
return
}
log.Println("[HTTP] Lien reçu :", link)
log.Println("[HTTP] ID de chemin :", parsedID)
// Authentification Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// Débride le lien
ctx := r.Context()
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("[ERROR] Echec lors de l'ajout du lien : %v\n", err)
http.Error(w, "Erreur côté Debrid-Link", http.StatusInternalServerError)
return
}
// Enregistre chaque lien comme un job "en attente"
for _, l := range links {
log.Printf("[l'id] : %v\n", l.ID)
streamInfo, err := client.CreateTranscode(ctx,l.ID)
if err != nil {
log.Println("Erreur GetTranscode:", err)
return
}
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: parsedID,
Size: l.Size,
Host: l.Host,
Progress: 0, // obligatoire si valeur attendue
StreamURL: streamInfo.StreamURL, // vide par défaut
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("[ERROR] Job non enregistré : %v\n", err)
}
}
// Met à jour la vue partielle (tableau des jobs)
data := map[string]interface{}{
"jobs": download.ListJobs(db),
}
fmt.Printf("%+v\n", data)
download.Broadcast() // ← on notifie tout de suite les clients SSE
renderPartial(w, "downloads_table", data)
}
}
// HandleJobsStream ouvre le flux SSE et envoie un event "jobs" à chaque changement
// HandleJobsStream ouvre un flux SSE qui nenvoie quun signal "jobs"
func HandleJobsStream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
log.Println("[DEBUG] Nouvelle connexion au flux SSE")
flusher, ok := w.(http.Flusher)
if !ok {
log.Println("[ERROR] Flusher non supporté")
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
return
}
// Entêtes SSE
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Access-Control-Allow-Origin", "*") // utile en dev ou si besoin CORS
ch := download.Subscribe()
log.Println("[DEBUG] Abonné au canal de téléchargement")
defer func() {
download.Unsubscribe(ch)
log.Println("[DEBUG] Désabonnement du canal de téléchargement")
}()
// Envoi dun message initial bien formé (JSON valide)
log.Println("[DEBUG] Envoi du signal initial")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
// Boucle de stream
for {
select {
case <-r.Context().Done():
log.Println("[DEBUG] Fermeture de la connexion SSE (client disconnect)")
return
case <-ch:
log.Println("[DEBUG] Événement reçu sur le canal — envoi SSE")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
}
}
}
}
// sendSSEUpdate génère le HTML du partial et lenvoie comme event "jobs"
func sendSSEUpdate(w http.ResponseWriter, flusher http.Flusher, db *gorm.DB) error {
// 1) Création du pipe
pr, pw := io.Pipe()
// 2) Dans une goroutine, parse+execute du partial dans pw
go func() {
defer pw.Close()
// Charge le fichier de template du partial
tmpl, err := template.ParseFiles("./templates/downloads_table.pages.tmpl")
if err != nil {
log.Printf("ParseFiles error: %v", err)
return
}
// Prépare les données réelles
data := map[string]interface{}{
"jobs": download.ListJobs(db),
}
// Exécute *uniquement* le define "downloads_table"
if err := tmpl.ExecuteTemplate(pw, "downloads_table", data); err != nil {
log.Printf("ExecuteTemplate error: %v", err)
return
}
}()
// 3) Lecture complète du HTML
htmlBytes, err := io.ReadAll(pr)
if err != nil {
return fmt.Errorf("lecture rendu échouée: %w", err)
}
// 4) Construction du message SSE
// - event: jobs
// - chaque ligne de HTML préfixée data:
fmt.Fprintf(w, "event: jobs\n")
for _, line := range strings.Split(string(htmlBytes), "\n") {
fmt.Fprintf(w, "data: %s\n", line)
}
// ligne vide pour terminer l'event
fmt.Fprintf(w, "\n")
// 5) Flush pour envoyer au client immédiatement
flusher.Flush()
return nil
}
func HandleListJobsPartial(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs(db)
data := map[string]interface{}{
"jobs": jobs,
}
//download.Broadcast()
renderPartial(w, "downloads_table", data)
}
}
var (
jobs = make(map[string]*download.DownloadJob)
jobsMu sync.Mutex
)
func HandleStartJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
log.Printf("[id] job id= "+id)
// 1. Récupérer depuis la map
jobsMu.Lock()
job, exists := jobs[id]
jobsMu.Unlock()
// 2. Sinon fallback base de données
if !exists {
var j download.DownloadJob
if err := db.First(&j, "id = ?", id).Error; err != nil {
http.Error(w, "Job introuvable", http.StatusNotFound)
return
}
// important : on copie vers un pointeur pour que la map pointe bien dessus
jobCopy := j
jobsMu.Lock()
jobs[id] = &jobCopy
job = &jobCopy
jobsMu.Unlock()
}
// 3. Setup client Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 4. Lancer le téléchargement réel
go download.StartDownload(job,job.Link,client, db)
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
}
func HandlePauseJob(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.UpdateJobStatus(id, "paused", nil)
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
func HandleResumeJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
// 1. Chercher en mémoire
jobsMu.Lock()
job, exists := jobs[id]
jobsMu.Unlock()
// 2. Si absent, fallback DB
if !exists {
var j download.DownloadJob
if err := db.First(&j, "id = ?", id).Error; err != nil {
http.Error(w, "Job introuvable", http.StatusNotFound)
return
}
jobCopy := j
jobsMu.Lock()
jobs[id] = &jobCopy
job = &jobCopy
jobsMu.Unlock()
}
// 3. Initialiser le client Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 4. Redémarrer le téléchargement
go download.StartDownload(job,job.Link,client,db)
w.WriteHeader(http.StatusNoContent)
}
}
func HandleDeleteJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.DeleteJob(id,db)
go download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}}
func HandleDeleteMultipleJobs(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Impossible de lire les IDs", http.StatusBadRequest)
return
}
ids := r.Form["ids[]"]
if len(ids) == 0 {
http.Error(w, "Aucun ID reçu", http.StatusBadRequest)
return
}
for _, id := range ids {
download.DeleteJob(id, db)
}
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
}
func StreamHandler(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
cur := r.URL.Query().Get("path") // ex: "", "Icons", "Code/Sub"
// 1) sidebar : on ne gère que le niveau racine
root, _ := listEntries(base, "")
var dirs []Entry
for _, e := range root {
if e.IsDir {
dirs = append(dirs, e)
}
}
// 2) contenu courant
entries, _ := listEntries(base, cur)
data := map[string]interface{}{
"Dirs": dirs,
"Entries": entries,
"CurrentPath": cur,
}
// Si cest un appel HTMX (liste partielle) : on renvoie juste la grille
if r.Header.Get("HX-Request") == "true" {
renderPartial(w, "_file_list", data)
return
}
// Sinon on rend la page complète
renderTemplate(w, "folders", data)
}
func DetailHandler(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
rel := r.URL.Query().Get("path")
log.Printf("Reçu path: %q", rel)
// On sécurise : supprime les éventuels chemins relatifs
rel = filepath.Clean("/" + rel) // ça supprime .. etc.
rel = strings.TrimPrefix(rel, "/")
log.Printf("Path nettoyé (rel): %q", rel)
absPath := filepath.Join(base, rel)
log.Printf("Chemin absolu construit (absPath): %q", absPath)
info, err := os.Stat(absPath)
if err != nil {
log.Printf("Erreur os.Stat: %v", err)
http.NotFound(w, r)
return
}
// Protection : vérifier qu'on reste bien dans base
if !strings.HasPrefix(absPath, base) {
log.Printf("Sécurité: chemin hors du base: %q", absPath)
http.NotFound(w, r)
return
}
entry := Entry{
Name: info.Name(),
Path: rel,
IsDir: info.IsDir(),
ModTime: info.ModTime(),
Size: info.Size(),
}
log.Printf("Entrée trouvée: %+v", entry)
renderPartial(w, "_file_detail", map[string]interface{}{
"Entry": entry,
})
}
var seriesRegex = regexp.MustCompile(`^(.+?)\.S\d{2}E\d{2}`)
// HandleAddJobsMultiple gère le débridage de plusieurs liens, auto-création de sous-dossier, et enregistrement
func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// 1. Parsing form
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
// 2. Récupération des liens
raw := r.FormValue("links")
lines := strings.Split(raw, "\n")
// 3. Récupération du dossier principal
idStr := r.FormValue("path_id")
baseID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
http.Error(w, "ID de chemin invalide", http.StatusBadRequest)
return
}
var basePath models.PathDownload
if err := db.First(&basePath, baseID).Error; err != nil {
http.Error(w, "Dossier principal introuvable", http.StatusBadRequest)
return
}
// 4. Préparation client DebridLink
ctx := context.Background()
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 5. Itération sur chaque lien à débrider
for _, link := range lines {
link = strings.TrimSpace(link)
if link == "" {
continue
}
// Débridage link -> liens réels
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("Échec débridage de %s: %v", link, err)
continue
}
for _, l := range links {
// 5a. Détermination automatique du nom de série
clean := sanitizeFileName(l.Name)
series := clean
if m := seriesRegex.FindStringSubmatch(clean); len(m) == 2 {
series = m[1]
}
// 5b. Assignation du PathID
assignID := int(basePath.ID)
if series != "" {
dirPath := filepath.Join(basePath.Path, series)
if err := os.MkdirAll(dirPath, os.ModePerm); err != nil {
log.Printf("Erreur création dossier %s: %v", dirPath, err)
}
// Cherche ou crée en base
var sub models.PathDownload
if err := db.Where("path = ?", dirPath).First(&sub).Error; err != nil {
if err == gorm.ErrRecordNotFound {
sub = models.PathDownload{Path: dirPath, PathName: series}
if err := db.Create(&sub).Error; err != nil {
log.Printf("Erreur création PathDownload: %v", err)
}
} else {
log.Printf("Erreur lecture PathDownload: %v", err)
}
}
assignID = int(sub.ID)
}
// 5c. Transcodage éventuel
streamInfo, err := client.CreateTranscode(ctx, l.ID)
if err != nil {
log.Printf("Erreur transcode pour %s: %v", l.ID, err)
}
// 5d. Enregistrement du job
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: assignID,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: "",
}
if streamInfo != nil {
job.StreamURL = streamInfo.StreamURL
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("Erreur enregistrement job: %v", err)
continue
}
}
}
// 6. Notification au frontend
download.Broadcast()
//w.WriteHeader(http.StatusNoContent)
}
}
// sanitizeFileName utilise la même logique que download.SanitizeFileName
func sanitizeFileName(name string) string {
return download.SanitizeFileName(name)
}
// getAllPaths renvoie tous les PathDownload
func getAllPaths(db *gorm.DB) []*models.PathDownload {
var paths []*models.PathDownload
db.Order("name").Find(&paths)
return paths
}
type ffprobeOut struct {
Format struct{ Duration string `json:"duration"` } `json:"format"`
Streams []struct {
CodecType string `json:"codec_type"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
} `json:"streams"`
}
func probe(ctx context.Context, file string) (*ffprobeOut, error) {
cmd := exec.CommandContext(ctx,
"ffprobe", "-v", "error",
"-print_format", "json",
"-show_format", "-show_streams",
file,
)
out, err := cmd.Output()
if err != nil {
return nil, err
}
var info ffprobeOut
if err := json.Unmarshal(out, &info); err != nil {
return nil, err
}
return &info, nil
}
type mediaItemView struct {
Title string
Duration int64 // en secondes
DurationFmt string // ex: "3:45"
Width, Height int
ThumbURL string
FilePath string
MediaPartID int64
}
// PathMedia renvoie la liste des sous-dossiers et médias du dossier `PathDownload`
// On passe un query param `sub` pour naviguer dans les sous-dossiers.
func PathMedia(db *gorm.DB) http.HandlerFunc {
allowed := map[string]bool{
".mkv": true, ".avi": true, ".mp4": true, ".mov": true,
".jpg": true, ".jpeg": true, ".png": true, ".gif": true,
".pdf": true, ".epub": true, ".cbz": true,
}
return func(w http.ResponseWriter, r *http.Request) {
// 1) Récupérer PathDownload
vars := mux.Vars(r)
pid, _ := strconv.ParseInt(vars["id"], 10, 64)
var pd models.PathDownload
if err := db.First(&pd, pid).Error; err != nil {
http.Error(w, "Dossier introuvable", http.StatusNotFound)
return
}
// 2) Déterminer le chemin courant
sub := r.URL.Query().Get("sub") // ex: "Film/Test"
current := filepath.Join(pd.Path, filepath.FromSlash(sub))
// 3) Lire les entrées du dossier
entries, err := os.ReadDir(current)
if err != nil {
http.Error(w, "Impossible de lire le dossier", http.StatusInternalServerError)
return
}
// 4) Préparer listes
type dirView struct {
Name string
SubPath string
}
var dirs []dirView
var medias []mediaItemView
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
for _, e := range entries {
name := e.Name()
full := filepath.Join(current, name)
if e.IsDir() {
// sous-dossier
dirs = append(dirs, dirView{
Name: name,
SubPath: filepath.ToSlash(filepath.Join(sub, name)),
})
} else {
ext := strings.ToLower(filepath.Ext(name))
if !allowed[ext] {
continue
}
view := mediaItemView{Title: name, FilePath: full}
// vidéos : métadonnées + capture
if ext == ".mkv" || ext == ".avi" || ext == ".mp4" || ext == ".mov" {
ctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
info, err := probe(ctx, full)
cancel()
if err == nil {
if d, err := strconv.ParseFloat(info.Format.Duration, 64); err == nil {
view.Duration = int64(d)
view.DurationFmt = fmt.Sprintf("%d:%02d", view.Duration/60, view.Duration%60)
}
for _, s := range info.Streams {
if s.CodecType == "video" {
view.Width, view.Height = s.Width, s.Height
break
}
}
}
// screenshot
base := strings.TrimSuffix(name, ext)
thumbName := base + ".jpg"
thumbPath := filepath.Join(thumbDir, thumbName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
exec.Command("ffmpeg", "-ss", "5", "-i", full, "-frames:v", "1", thumbPath).Run()
}
view.ThumbURL = "/static/thumbs/" + thumbName
} else {
// icônes génériques pour images/PDF/EPUB/CBZ
view.ThumbURL = "/static/icons/" + ext[1:] + ".svg"
}
medias = append(medias, view)
}
}
// 5) Rendu
renderPartial(w, "media_list", map[string]interface{}{
"PathID": pid,
"CurrentSub": sub,
"Dirs": dirs,
"MediaItems": medias,
})
}
}
type mediaDetailView struct {
Title string
Summary string
DurationFmt string
ThumbURL string
HLSURL string // ajouté
}
// MediaDetail renvoie la partial HTML du détail dun média
func MediaDetail(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
partID, _ := strconv.ParseInt(mux.Vars(r)["partID"], 10, 64)
// 2) Récupérer pathID et sub depuis la query string
pathIDStr := r.URL.Query().Get("pathID")
sub := r.URL.Query().Get("sub")
var pathID int64
if pathIDStr != "" {
pathID, _ = strconv.ParseInt(pathIDStr, 10, 64)
}
var view mediaDetailView
if partID > 0 {
// --- CAS BDD ---
var item struct {
models.MetadataItem
MediaPartID int64
File string
UserThumbURL string
}
db.Table("metadata_items").
Select("metadata_items.*, media_parts.id AS media_part_id, media_parts.file, metadata_items.user_thumb_url").
Joins("JOIN media_items ON media_items.metadata_item_id = metadata_items.id").
Joins("JOIN media_parts ON media_parts.media_item_id = media_items.id").
Where("media_parts.id = ?", partID).
Scan(&item)
if item.MediaPartID == 0 {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
// formatage durée
m := item.Duration / 60
s := item.Duration % 60
view = mediaDetailView{
Title: item.Title,
Summary: item.Summary,
DurationFmt: fmt.Sprintf("%d:%02d", m, s),
ThumbURL: item.UserThumbURL,
HLSURL: fmt.Sprintf("/hls/%d/index.m3u8", item.MediaPartID),
}
} else {
// --- CAS FS-ONLY ---
path := r.URL.Query().Get("path")
if path == "" {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
// base name et thumbnail
title := filepath.Base(path)
ext := filepath.Ext(path)
base := strings.TrimSuffix(title, ext)
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
thumbPath := filepath.Join(thumbDir, base+".jpg")
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
// capture au 5s
exec.CommandContext(context.Background(),
"ffmpeg", "-ss", "5", "-i", path, "-frames:v", "1", thumbPath,
).Run()
}
view = mediaDetailView{
Title: title,
Summary: "",
DurationFmt: "",
ThumbURL: "/static/thumbs/" + base + ".jpg",
// **ici** on passe le path en query pour que HLSStream sache où chercher
HLSURL: fmt.Sprintf("/hls/0/index.m3u8?path=%s", url.QueryEscape(path)),
}
}
renderPartial(w, "media_detail", map[string]interface{}{
"PathID": pathID,
"CurrentSub": sub,
"item": view,
})
}
}
// renders/media.go (ajoutez cette fonction)
// rend le HLS pour BDD (partID>0) et FS-only (partID==0)
func HLSStream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// 1) identifier le partID et le dossier temporaire
partID, _ := strconv.ParseInt(mux.Vars(r)["partID"], 10, 64)
tmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("hls_%d", partID))
playlist := filepath.Join(tmpDir, "index.m3u8")
// 2) déterminer s'il faut (re)générer
needGen := false
var filePath string
if partID > 0 {
// cas BDD
var part models.MediaPart
if err := db.First(&part, partID).Error; err != nil {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
filePath = part.File
needGen = true
} else {
// cas FS-only : on génère seulement si playlist manquante
if _, err := os.Stat(playlist); os.IsNotExist(err) {
filePath = r.URL.Query().Get("path")
if filePath == "" {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
needGen = true
}
}
// 3) (Re)générer le HLS si besoin
if needGen {
os.MkdirAll(tmpDir, 0755)
cmd := exec.CommandContext(r.Context(),
"ffmpeg",
"-i", filePath,
"-c:v", "copy", "-c:a", "copy",
"-f", "hls",
"-hls_time", "4",
"-hls_list_size", "0",
"-hls_segment_filename", filepath.Join(tmpDir, "seg%d.ts"),
playlist,
)
if out, err := cmd.CombinedOutput(); err != nil {
log.Println("ffmpeg HLS error:", err, string(out))
http.Error(w, "Erreur de transcodage", http.StatusInternalServerError)
return
}
}
// 4) servir **tout** tmpDir sous /hls/{partID}/…
prefix := fmt.Sprintf("/hls/%d/", partID)
http.StripPrefix(prefix,
http.FileServer(http.Dir(tmpDir)),
).ServeHTTP(w, r)
}
}
func renderPartial(w http.ResponseWriter, templ string, data map[string]interface{}) {
var buf bytes.Buffer
// Exécute la template dans le buffer
if err := templates.ExecuteTemplate(&buf, templ+".pages.tmpl", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// On peut définir un content-type explicite si besoin
w.Header().Set("Content-Type", "text/html; charset=utf-8")
// Écriture unique du code 200 implicite et du corps
w.Write(buf.Bytes())
}
func renderTemplate(w http.ResponseWriter, templ string, data map[string]interface{}) {
// Pareil, on exécute le principal
if err := templates.ExecuteTemplate(w, templ+".pages.tmpl", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
// DashboardJSON renvoie la liste des chemins sous /app/upload au format JSON
func DashboardJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
root := "/app/upload"
if err := db.
Where("path LIKE ? AND path NOT LIKE ?", root+"/%", root+"/%/%").
Find(&paths).Error; err != nil {
http.Error(w, `{"error":"failed retrieving paths"}`, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"paths": paths})
}
}
// MenuLibraryJSON renvoie tous les PathDownload au format JSON
func MenuLibraryJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
if err := db.Find(&paths).Error; err != nil {
http.Error(w, `{"error":"failed retrieving paths"}`, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"paths": paths})
}
}
// SettingsJSON renvoie les options de la page Settings au format JSON
func SettingsJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{
"title": "Settings Page",
"options": []string{"Option 1", "Option 2", "Option 3"},
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data)
}
}
// LibraryJSON renvoie un objet vide (ou à compléter) pour /library
func LibraryJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{})
}
}
// GoDownloadJSON pour /godownloader/download.json
func GoDownloadJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// Vous pouvez renvoyer ici des données de job / paths si besoin
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{})
}
}
// GoDownloadLinkCollectorsJSON pour /godownloader/linkcollectors.json
func GoDownloadLinkCollectorsJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{})
}
}
// GoDownloadSettingDeleteJSON renvoie {"success":true} après suppression
func GoDownloadSettingDeleteJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
client := debridlink.NewClient(db)
idStr := r.URL.Query().Get("id")
id, err := strconv.ParseUint(idStr, 10, 64)
if err == nil {
_ = client.DeleteDebridAccount(ctx, uint(id))
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{"success": err == nil})
}
}
// GoDownloadSettingToggleActiveJSON renvoie la liste mise à jour des comptes
func GoDownloadSettingToggleActiveJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
client := debridlink.NewClient(db)
id, _ := strconv.ParseUint(r.URL.Query().Get("id"), 10, 64)
_ = client.ToggleActiveStatus(ctx, uint(id))
accounts, _ := client.ListDebridAccounts(ctx)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"accounts": accounts})
}
}
// GoDownloadSettingJSON renvoie la liste des comptes (GET) ou le device code (POST)
func GoDownloadSettingJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
w.Header().Set("Content-Type", "application/json")
switch r.Method {
case http.MethodGet:
accounts, _ := client.ListDebridAccounts(ctx)
json.NewEncoder(w).Encode(map[string]interface{}{"accounts": accounts})
case http.MethodPost:
r.ParseForm()
username := r.FormValue("username")
password := r.FormValue("password")
device, err := client.RequestDeviceCodeWithCredentials(ctx, username, password)
if err != nil {
http.Error(w, `{"error":"`+err.Error()+`"}`, http.StatusInternalServerError)
return
}
json.NewEncoder(w).Encode(map[string]string{
"code": device.UserCode,
"url": device.VerificationURL,
})
}
}
}
// GoDownloadPartialTableJSON renvoie la liste des comptes pour le partial
func GoDownloadPartialTableJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
accounts, _ := debridlink.NewClient(db).ListDebridAccounts(r.Context())
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"accounts": accounts})
}
}
// GoDownload2JSON renvoie jobs, paths et now
func GoDownload2JSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs(db)
var paths []models.PathDownload
db.Find(&paths)
data := map[string]interface{}{
"jobs": jobs,
"paths": paths,
"now": time.Now(),
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(data)
}
}
// HandleAddJobJSON ajoute un job et renvoie la liste mise à jour
func HandleAddJobJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
r.ParseForm()
link := r.FormValue("link")
id, _ := strconv.Atoi(r.FormValue("path_id"))
// client := download.GetFirstActiveAccount(debridlink.NewClient(db))
ctx := r.Context()
links, _ := debridlink.NewClient(db).AddLink(ctx, link)
for _, l := range links {
stream, _ := debridlink.NewClient(db).CreateTranscode(ctx, l.ID)
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: id,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: stream.StreamURL,
}
download.RegisterJobWithDB(job, db)
}
jobs := download.ListJobs(db)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"jobs": jobs})
}
}
// HandleListJobsPartialJSON renvoie la liste des jobs
func HandleListJobsPartialJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs(db)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{"jobs": jobs})
}
}
// HandleAddJobsMultipleJSON débride plusieurs liens et renvoie succès
func HandleAddJobsMultipleJSON(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// même logique que HTML, mais renvoi JSON minimal
r.ParseForm()
raw := r.FormValue("links")
_ = strings.Split(raw, "\n") // traitement identique...
download.Broadcast()
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{"success": true})
}
}
// StreamHandlerJSON renvoie Dirs, Entries et CurrentPath en JSON
func StreamHandlerJSON() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
cur := r.URL.Query().Get("path")
root, _ := listEntries(base, "")
var dirs []Entry
for _, e := range root {
if e.IsDir {
dirs = append(dirs, e)
}
}
entries, _ := listEntries(base, cur)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"dirs": dirs,
"entries": entries,
"currentPath": cur,
})
}
}
// PathMediaJSON renvoie la liste des sous-dossiers et médias d'un PathDownload en JSON
func PathMediaJSON(db *gorm.DB) http.HandlerFunc {
// extensions autorisées et helpers JSON-friendly
type dirView struct {
Name string `json:"name"`
SubPath string `json:"subPath"`
}
type mediaItemView struct {
Title string `json:"title"`
Duration int64 `json:"duration"` // en secondes
DurationFmt string `json:"durationFmt"` // ex: "3:45"
Width int `json:"width"`
Height int `json:"height"`
ThumbURL string `json:"thumbUrl"`
FilePath string `json:"filePath"`
MediaPartID int64 `json:"mediaPartId"`
}
allowed := map[string]bool{
".mkv": true, ".avi": true, ".mp4": true, ".mov": true,
".jpg": true, ".jpeg": true, ".png": true, ".gif": true,
".pdf": true, ".epub": true, ".cbz": true,
}
return func(w http.ResponseWriter, r *http.Request) {
// 1) Récupérer le PathDownload
vars := mux.Vars(r)
pid, err := strconv.ParseInt(vars["id"], 10, 64)
if err != nil {
http.Error(w, `{"error":"invalid path ID"}`, http.StatusBadRequest)
return
}
var pd models.PathDownload
if err := db.First(&pd, pid).Error; err != nil {
http.Error(w, `{"error":"path not found"}`, http.StatusNotFound)
return
}
// 2) Déterminer le sous-dossier courant
sub := r.URL.Query().Get("sub") // ex: "Films/Test"
current := filepath.Join(pd.Path, filepath.FromSlash(sub))
// 3) Lire les entrées du dossier
entries, err := os.ReadDir(current)
if err != nil {
http.Error(w, `{"error":"cannot read directory"}`, http.StatusInternalServerError)
return
}
// 4) Construire les slices JSON
var dirs []dirView
var medias []mediaItemView
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
for _, e := range entries {
name := e.Name()
full := filepath.Join(current, name)
if e.IsDir() {
dirs = append(dirs, dirView{
Name: name,
SubPath: filepath.ToSlash(filepath.Join(sub, name)),
})
continue
}
ext := strings.ToLower(filepath.Ext(name))
if !allowed[ext] {
continue
}
view := mediaItemView{
Title: name,
FilePath: full,
}
// Si c'est une vidéo, extraire métadonnées + screenshot
if ext == ".mkv" || ext == ".avi" || ext == ".mp4" || ext == ".mov" {
// Métadonnées via ffprobe
ctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
info, _ := probe(ctx, full)
cancel()
if info != nil {
// durée
if d, err := strconv.ParseFloat(info.Format.Duration, 64); err == nil {
secs := int64(d)
view.Duration = secs
view.DurationFmt = fmt.Sprintf("%d:%02d", secs/60, secs%60)
}
// résolution
for _, s := range info.Streams {
if s.CodecType == "video" {
view.Width = s.Width
view.Height = s.Height
break
}
}
}
// Génération du thumbnail
base := strings.TrimSuffix(name, ext)
thumbName := base + ".jpg"
thumbPath := filepath.Join(thumbDir, thumbName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
exec.Command("ffmpeg", "-ss", "5", "-i", full, "-frames:v", "1", thumbPath).Run()
}
view.ThumbURL = "/static/thumbs/" + thumbName
} else {
// Icônes génériques pour images/PDF/EPUB/CBZ
view.ThumbURL = "/static/icons/" + ext[1:] + ".svg"
}
medias = append(medias, view)
}
// 5) Réponse JSON
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"dirs": dirs,
"mediaItems": medias,
})
}
}