shelfy/renders/renders.go
2025-06-21 18:58:02 +02:00

1120 lines
33 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package renders
import (
"app/shelfly/internal/debridlink"
"app/shelfly/internal/download"
"app/shelfly/internal/models"
"context"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"net/url"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
"text/template"
"time"
"github.com/gorilla/mux"
"gorm.io/gorm"
)
var (
// templates contiendra TOUTES vos pages .pages.tmpl
templates *template.Template
)
func init() {
funcMap := template.FuncMap{
"hasSuffix": strings.HasSuffix,
"ext": func(name string) string {
return strings.TrimPrefix(filepath.Ext(name), ".")
},
"urlquery": func(s string) string { return url.QueryEscape(s) },
"split": strings.Split,
"trimPrefix": strings.TrimPrefix,
// nouveau helper pour convertir bytes -> kilobytes
"toKB": func(size int64) float64 {
return float64(size) / 1024
},
}
templates = template.Must(
template.New("").
Funcs(funcMap).
ParseGlob("./templates/*.pages.tmpl"),
)
}
type Entry struct {
Name, Path string
IsDir bool
ModTime time.Time
Size int64
}
// helper pour lister un dossier
func listEntries(base, rel string) ([]Entry, error) {
dir := filepath.Join(base, rel)
fis, err := os.ReadDir(dir)
if err != nil {
return nil, err
}
out := make([]Entry, 0, len(fis))
for _, fi := range fis {
info, _ := fi.Info()
out = append(out, Entry{
Name: fi.Name(),
Path: filepath.ToSlash(filepath.Join(rel, fi.Name())),
IsDir: fi.IsDir(),
ModTime: info.ModTime(),
Size: info.Size(),
})
}
return out, nil
}
func Login(w http.ResponseWriter, r *http.Request){
renderTemplate(w,"login",nil)
}
func Dashboard(db *gorm.DB)http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var paths []models.PathDownload
root := "/app/upload"
// on sélectionne tout ce qui est sous /app/upload/, mais pas plus loin quun seul slash en plus
if err := db.
Where("path LIKE ? AND path NOT LIKE ?", root+"/%", root+"/%/%").
Find(&paths).Error; err != nil {
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
return
}
data := map[string]interface{}{
"paths": paths,
}
renderTemplate(w,"dashboard",data)
}
}
func MenuLibrary(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var currentPaths []models.PathDownload
if err := db.Find(&currentPaths).Error; err != nil {
http.Error(w, `{"error": "Failed to retrieve paths"}`, http.StatusInternalServerError)
return
}
// Récupérer l'ancienne version des paths (si existante)
lastUpdate := r.Header.Get("HX-Current-Paths")
var previousPaths []models.PathDownload
if lastUpdate != "" {
json.Unmarshal([]byte(lastUpdate), &previousPaths)
}
// Convertir en JSON pour comparaison
currentJSON, _ := json.Marshal(currentPaths)
previousJSON, _ := json.Marshal(previousPaths)
// Vérifier si les paths ont changé
pathsChanged := string(currentJSON) != string(previousJSON)
data := map[string]interface{}{
"paths": currentPaths,
}
// Si HTMX request, ajouter les headers appropriés
if r.Header.Get("HX-Request") == "true" {
if pathsChanged {
w.Header().Set("HX-Trigger", "pathsUpdated")
}
w.Header().Set("HX-Current-Paths", string(currentJSON))
}
renderPartial(w, "dashboard", data)
}
}
func Settings(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{
"Title": "Settings Page",
"Options": []string{"Option 1", "Option 2", "Option 3"},
}
renderPartial(w, "settings", data)
}
func Library(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "library",nil)
}
func GoDownload(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "godownloader_download",nil)
}
func GoDownloadLinkCollectors(w http.ResponseWriter, r *http.Request) {
renderPartial(w, "godownloader_linkcollectors",nil)
}
func GetDebridClient(db *gorm.DB) *debridlink.Client {
return debridlink.NewClient(db)
}
func GoDownloadSettingDelete(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
DebridClient := GetDebridClient(db)
idStr := r.URL.Query().Get("id")
if idStr == "" {
http.Error(w, "ID manquant", http.StatusBadRequest)
return
}
idUint, err := strconv.ParseUint(idStr, 10, 64)
if err != nil {
http.Error(w, "ID invalide", http.StatusBadRequest)
return
}
if err := DebridClient.DeleteDebridAccount(ctx, uint(idUint)); err != nil {
http.Error(w, "Erreur lors de la suppression", http.StatusInternalServerError)
return
}
http.Redirect(w, r, "/godownloader/settings", http.StatusSeeOther)
}
}
func GoDownloadSettingToggleActive(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := context.Background()
DebridClient := debridlink.NewClient(db)
idStr := r.URL.Query().Get("id")
idUint, err := strconv.ParseUint(idStr, 10, 32)
if err != nil {
http.Error(w, "ID invalide", http.StatusBadRequest)
return
}
err = DebridClient.ToggleActiveStatus(ctx, uint(idUint))
if err != nil {
log.Println("Erreur lors du toggle:", err)
http.Error(w, "Échec de mise à jour", http.StatusInternalServerError)
return
}
// Récupérer la liste mise à jour
accounts, err := DebridClient.ListDebridAccounts(ctx)
if err != nil {
http.Error(w, "Erreur lors du chargement des comptes", http.StatusInternalServerError)
return
}
// HTMX ou page normale
if r.Header.Get("HX-Request") == "true" {
renderPartial(w, "partials/accounts_table", map[string]interface{}{
"accounts": accounts,
})
} else {
renderPartial(w, "godownloader_setting", map[string]interface{}{
"accounts": accounts,
})
}
}
}
func GoDownloadSetting(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
switch r.Method {
case http.MethodPost:
if err := r.ParseForm(); err != nil {
http.Error(w, "Form invalide", http.StatusBadRequest)
return
}
username := r.FormValue("username")
password := r.FormValue("password")
deviceResp, err := client.RequestDeviceCodeWithCredentials(ctx, username, password)
if err != nil {
log.Println("[OAuth2] Erreur device_code:", err)
http.Error(w, "Erreur OAuth: "+err.Error(), http.StatusInternalServerError)
return
}
// Affiche le code + URL dans #auth-status
renderPartial(w, "oauth_device_code", map[string]any{
"code": deviceResp.UserCode,
"url": deviceResp.VerificationURL,
})
// Polling async
go func() {
tokens, err := client.PollDeviceToken(context.Background(), deviceResp.DeviceCode, deviceResp.Interval)
if err != nil {
log.Println("[OAuth2] Polling échoué:", err)
return
}
account := &debridlink.DebridAccount{
Host: "debrid-link.com",
Username: username,
Password: password,
IsActive: true,
AccessToken: tokens.AccessToken,
RefreshToken: tokens.RefreshToken,
ExpiresAt: time.Now().Add(time.Duration(tokens.ExpiresIn) * time.Second),
}
if err := db.Create(account).Error; err != nil {
log.Println("[DB] Sauvegarde échouée:", err)
return
}
log.Println("[OAuth2] Compte sauvegardé")
}()
case http.MethodGet:
accounts, _ := client.ListDebridAccounts(ctx)
renderPartial(w, "godownloader_setting", map[string]any{
"accounts": accounts,
})
}
}
}
func GoDownloadPartialTable(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
client := debridlink.NewClient(db)
accounts, _ := client.ListDebridAccounts(ctx)
renderPartial(w, "accounts_table", map[string]any{
"accounts": accounts,
})
}}
func PollStatusHandler(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
var count int64
db.Model(&debridlink.DebridAccount{}).Where("is_active = ?", true).Count(&count)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]bool{
"success": count > 0,
})
}
}
func GoDownload2(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("HX-Trigger", "forceUpdate")
log.Printf("GoDownload2")
jobs := download.ListJobs(db)
fmt.Printf("%+v\n", jobs)
var paths []models.PathDownload
db.Find(&paths)
data := map[string]interface{}{
"jobs": jobs,
"paths": paths,
"Now": time.Now(), // 👈 ajoute la clé "Now"
}
download.Broadcast()
renderTemplate(w, "godownloader_download", data)
}
}
func HandleAddJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
link := r.FormValue("link")
pathIDStr := r.FormValue("path_id")
parsedID, err := strconv.Atoi(pathIDStr)
if err != nil {
http.Error(w, "Chemin invalide", http.StatusBadRequest)
return
}
log.Println("[HTTP] Lien reçu :", link)
log.Println("[HTTP] ID de chemin :", parsedID)
// Authentification Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// Débride le lien
ctx := r.Context()
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("[ERROR] Echec lors de l'ajout du lien : %v\n", err)
http.Error(w, "Erreur côté Debrid-Link", http.StatusInternalServerError)
return
}
// Enregistre chaque lien comme un job "en attente"
for _, l := range links {
log.Printf("[l'id] : %v\n", l.ID)
streamInfo, err := client.CreateTranscode(ctx,l.ID)
if err != nil {
log.Println("Erreur GetTranscode:", err)
return
}
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: parsedID,
Size: l.Size,
Host: l.Host,
Progress: 0, // obligatoire si valeur attendue
StreamURL: streamInfo.StreamURL, // vide par défaut
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("[ERROR] Job non enregistré : %v\n", err)
}
}
// Met à jour la vue partielle (tableau des jobs)
data := map[string]interface{}{
"jobs": download.ListJobs(db),
}
fmt.Printf("%+v\n", data)
download.Broadcast() // ← on notifie tout de suite les clients SSE
renderPartial(w, "downloads_table", data)
}
}
// HandleJobsStream ouvre le flux SSE et envoie un event "jobs" à chaque changement
// HandleJobsStream ouvre un flux SSE qui nenvoie quun signal "jobs"
func HandleJobsStream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
log.Println("[DEBUG] Nouvelle connexion au flux SSE")
flusher, ok := w.(http.Flusher)
if !ok {
log.Println("[ERROR] Flusher non supporté")
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
return
}
// Entêtes SSE
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Access-Control-Allow-Origin", "*") // utile en dev ou si besoin CORS
ch := download.Subscribe()
log.Println("[DEBUG] Abonné au canal de téléchargement")
defer func() {
download.Unsubscribe(ch)
log.Println("[DEBUG] Désabonnement du canal de téléchargement")
}()
// Envoi dun message initial bien formé (JSON valide)
log.Println("[DEBUG] Envoi du signal initial")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
// Boucle de stream
for {
select {
case <-r.Context().Done():
log.Println("[DEBUG] Fermeture de la connexion SSE (client disconnect)")
return
case <-ch:
log.Println("[DEBUG] Événement reçu sur le canal — envoi SSE")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
}
}
}
}
// sendSSEUpdate génère le HTML du partial et lenvoie comme event "jobs"
func sendSSEUpdate(w http.ResponseWriter, flusher http.Flusher, db *gorm.DB) error {
// 1) Création du pipe
pr, pw := io.Pipe()
// 2) Dans une goroutine, parse+execute du partial dans pw
go func() {
defer pw.Close()
// Charge le fichier de template du partial
tmpl, err := template.ParseFiles("./templates/downloads_table.pages.tmpl")
if err != nil {
log.Printf("ParseFiles error: %v", err)
return
}
// Prépare les données réelles
data := map[string]interface{}{
"jobs": download.ListJobs(db),
}
// Exécute *uniquement* le define "downloads_table"
if err := tmpl.ExecuteTemplate(pw, "downloads_table", data); err != nil {
log.Printf("ExecuteTemplate error: %v", err)
return
}
}()
// 3) Lecture complète du HTML
htmlBytes, err := io.ReadAll(pr)
if err != nil {
return fmt.Errorf("lecture rendu échouée: %w", err)
}
// 4) Construction du message SSE
// - event: jobs
// - chaque ligne de HTML préfixée data:
fmt.Fprintf(w, "event: jobs\n")
for _, line := range strings.Split(string(htmlBytes), "\n") {
fmt.Fprintf(w, "data: %s\n", line)
}
// ligne vide pour terminer l'event
fmt.Fprintf(w, "\n")
// 5) Flush pour envoyer au client immédiatement
flusher.Flush()
return nil
}
func HandleListJobsPartial(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs(db)
data := map[string]interface{}{
"jobs": jobs,
}
//download.Broadcast()
renderPartial(w, "downloads_table", data)
}
}
var (
jobs = make(map[string]*download.DownloadJob)
jobsMu sync.Mutex
)
func HandleStartJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
log.Printf("[id] job id= "+id)
// 1. Récupérer depuis la map
jobsMu.Lock()
job, exists := jobs[id]
jobsMu.Unlock()
// 2. Sinon fallback base de données
if !exists {
var j download.DownloadJob
if err := db.First(&j, "id = ?", id).Error; err != nil {
http.Error(w, "Job introuvable", http.StatusNotFound)
return
}
// important : on copie vers un pointeur pour que la map pointe bien dessus
jobCopy := j
jobsMu.Lock()
jobs[id] = &jobCopy
job = &jobCopy
jobsMu.Unlock()
}
// 3. Setup client Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 4. Lancer le téléchargement réel
go download.StartDownload(job,job.Link,client, db)
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
}
func HandlePauseJob(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.UpdateJobStatus(id, "paused", nil)
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
func HandleResumeJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
// 1. Chercher en mémoire
jobsMu.Lock()
job, exists := jobs[id]
jobsMu.Unlock()
// 2. Si absent, fallback DB
if !exists {
var j download.DownloadJob
if err := db.First(&j, "id = ?", id).Error; err != nil {
http.Error(w, "Job introuvable", http.StatusNotFound)
return
}
jobCopy := j
jobsMu.Lock()
jobs[id] = &jobCopy
job = &jobCopy
jobsMu.Unlock()
}
// 3. Initialiser le client Debrid-Link
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 4. Redémarrer le téléchargement
go download.StartDownload(job,job.Link,client,db)
w.WriteHeader(http.StatusNoContent)
}
}
func HandleDeleteJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.DeleteJob(id,db)
go download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}}
func HandleDeleteMultipleJobs(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Impossible de lire les IDs", http.StatusBadRequest)
return
}
ids := r.Form["ids[]"]
if len(ids) == 0 {
http.Error(w, "Aucun ID reçu", http.StatusBadRequest)
return
}
for _, id := range ids {
download.DeleteJob(id, db)
}
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
}
func StreamHandler(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
cur := r.URL.Query().Get("path") // ex: "", "Icons", "Code/Sub"
// 1) sidebar : on ne gère que le niveau racine
root, _ := listEntries(base, "")
var dirs []Entry
for _, e := range root {
if e.IsDir {
dirs = append(dirs, e)
}
}
// 2) contenu courant
entries, _ := listEntries(base, cur)
data := map[string]interface{}{
"Dirs": dirs,
"Entries": entries,
"CurrentPath": cur,
}
// Si cest un appel HTMX (liste partielle) : on renvoie juste la grille
if r.Header.Get("HX-Request") == "true" {
renderPartial(w, "_file_list", data)
return
}
// Sinon on rend la page complète
renderTemplate(w, "folders", data)
}
func DetailHandler(w http.ResponseWriter, r *http.Request) {
base := "/app/upload"
rel := r.URL.Query().Get("path")
log.Printf("Reçu path: %q", rel)
// On sécurise : supprime les éventuels chemins relatifs
rel = filepath.Clean("/" + rel) // ça supprime .. etc.
rel = strings.TrimPrefix(rel, "/")
log.Printf("Path nettoyé (rel): %q", rel)
absPath := filepath.Join(base, rel)
log.Printf("Chemin absolu construit (absPath): %q", absPath)
info, err := os.Stat(absPath)
if err != nil {
log.Printf("Erreur os.Stat: %v", err)
http.NotFound(w, r)
return
}
// Protection : vérifier qu'on reste bien dans base
if !strings.HasPrefix(absPath, base) {
log.Printf("Sécurité: chemin hors du base: %q", absPath)
http.NotFound(w, r)
return
}
entry := Entry{
Name: info.Name(),
Path: rel,
IsDir: info.IsDir(),
ModTime: info.ModTime(),
Size: info.Size(),
}
log.Printf("Entrée trouvée: %+v", entry)
renderPartial(w, "_file_detail", map[string]interface{}{
"Entry": entry,
})
}
var seriesRegex = regexp.MustCompile(`^(.+?)\.S\d{2}E\d{2}`)
// HandleAddJobsMultiple gère le débridage de plusieurs liens, auto-création de sous-dossier, et enregistrement
func HandleAddJobsMultiple(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// 1. Parsing form
if err := r.ParseForm(); err != nil {
http.Error(w, "Requête invalide", http.StatusBadRequest)
return
}
// 2. Récupération des liens
raw := r.FormValue("links")
lines := strings.Split(raw, "\n")
// 3. Récupération du dossier principal
idStr := r.FormValue("path_id")
baseID, err := strconv.ParseInt(idStr, 10, 64)
if err != nil {
http.Error(w, "ID de chemin invalide", http.StatusBadRequest)
return
}
var basePath models.PathDownload
if err := db.First(&basePath, baseID).Error; err != nil {
http.Error(w, "Dossier principal introuvable", http.StatusBadRequest)
return
}
// 4. Préparation client DebridLink
ctx := context.Background()
client := debridlink.NewClient(db)
account := download.GetFirstActiveAccount(client)
if account == nil {
http.Error(w, "Aucun compte Debrid-Link actif", http.StatusBadRequest)
return
}
client.SetAccount(account)
// 5. Itération sur chaque lien à débrider
for _, link := range lines {
link = strings.TrimSpace(link)
if link == "" {
continue
}
// Débridage link -> liens réels
links, err := client.AddLink(ctx, link)
if err != nil {
log.Printf("Échec débridage de %s: %v", link, err)
continue
}
for _, l := range links {
// 5a. Détermination automatique du nom de série
clean := sanitizeFileName(l.Name)
series := clean
if m := seriesRegex.FindStringSubmatch(clean); len(m) == 2 {
series = m[1]
}
// 5b. Assignation du PathID
assignID := int(basePath.ID)
if series != "" {
dirPath := filepath.Join(basePath.Path, series)
if err := os.MkdirAll(dirPath, os.ModePerm); err != nil {
log.Printf("Erreur création dossier %s: %v", dirPath, err)
}
// Cherche ou crée en base
var sub models.PathDownload
if err := db.Where("path = ?", dirPath).First(&sub).Error; err != nil {
if err == gorm.ErrRecordNotFound {
sub = models.PathDownload{Path: dirPath, PathName: series}
if err := db.Create(&sub).Error; err != nil {
log.Printf("Erreur création PathDownload: %v", err)
}
} else {
log.Printf("Erreur lecture PathDownload: %v", err)
}
}
assignID = int(sub.ID)
}
// 5c. Transcodage éventuel
streamInfo, err := client.CreateTranscode(ctx, l.ID)
if err != nil {
log.Printf("Erreur transcode pour %s: %v", l.ID, err)
}
// 5d. Enregistrement du job
job := &download.DownloadJob{
ID: l.ID,
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: assignID,
Size: l.Size,
Host: l.Host,
Progress: 0,
StreamURL: "",
}
if streamInfo != nil {
job.StreamURL = streamInfo.StreamURL
}
if err := download.RegisterJobWithDB(job, db); err != nil {
log.Printf("Erreur enregistrement job: %v", err)
continue
}
}
}
// 6. Notification au frontend
download.Broadcast()
//w.WriteHeader(http.StatusNoContent)
}
}
// sanitizeFileName utilise la même logique que download.SanitizeFileName
func sanitizeFileName(name string) string {
return download.SanitizeFileName(name)
}
// getAllPaths renvoie tous les PathDownload
func getAllPaths(db *gorm.DB) []*models.PathDownload {
var paths []*models.PathDownload
db.Order("name").Find(&paths)
return paths
}
type ffprobeOut struct {
Format struct{ Duration string `json:"duration"` } `json:"format"`
Streams []struct {
CodecType string `json:"codec_type"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
} `json:"streams"`
}
func probe(ctx context.Context, file string) (*ffprobeOut, error) {
cmd := exec.CommandContext(ctx,
"ffprobe", "-v", "error",
"-print_format", "json",
"-show_format", "-show_streams",
file,
)
out, err := cmd.Output()
if err != nil {
return nil, err
}
var info ffprobeOut
if err := json.Unmarshal(out, &info); err != nil {
return nil, err
}
return &info, nil
}
type mediaItemView struct {
Title string
Duration int64 // en secondes
DurationFmt string // ex: "3:45"
Width, Height int
ThumbURL string
FilePath string
MediaPartID int64
}
// PathMedia renvoie la liste des sous-dossiers et médias du dossier `PathDownload`
// On passe un query param `sub` pour naviguer dans les sous-dossiers.
func PathMedia(db *gorm.DB) http.HandlerFunc {
allowed := map[string]bool{
".mkv": true, ".avi": true, ".mp4": true, ".mov": true,
".jpg": true, ".jpeg": true, ".png": true, ".gif": true,
".pdf": true, ".epub": true, ".cbz": true,
}
return func(w http.ResponseWriter, r *http.Request) {
// 1) Récupérer PathDownload
vars := mux.Vars(r)
pid, _ := strconv.ParseInt(vars["id"], 10, 64)
var pd models.PathDownload
if err := db.First(&pd, pid).Error; err != nil {
http.Error(w, "Dossier introuvable", http.StatusNotFound)
return
}
// 2) Déterminer le chemin courant
sub := r.URL.Query().Get("sub") // ex: "Film/Test"
current := filepath.Join(pd.Path, filepath.FromSlash(sub))
// 3) Lire les entrées du dossier
entries, err := os.ReadDir(current)
if err != nil {
http.Error(w, "Impossible de lire le dossier", http.StatusInternalServerError)
return
}
// 4) Préparer listes
type dirView struct {
Name string
SubPath string
}
var dirs []dirView
var medias []mediaItemView
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
for _, e := range entries {
name := e.Name()
full := filepath.Join(current, name)
if e.IsDir() {
// sous-dossier
dirs = append(dirs, dirView{
Name: name,
SubPath: filepath.ToSlash(filepath.Join(sub, name)),
})
} else {
ext := strings.ToLower(filepath.Ext(name))
if !allowed[ext] {
continue
}
view := mediaItemView{Title: name, FilePath: full}
// vidéos : métadonnées + capture
if ext == ".mkv" || ext == ".avi" || ext == ".mp4" || ext == ".mov" {
ctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
info, err := probe(ctx, full)
cancel()
if err == nil {
if d, err := strconv.ParseFloat(info.Format.Duration, 64); err == nil {
view.Duration = int64(d)
view.DurationFmt = fmt.Sprintf("%d:%02d", view.Duration/60, view.Duration%60)
}
for _, s := range info.Streams {
if s.CodecType == "video" {
view.Width, view.Height = s.Width, s.Height
break
}
}
}
// screenshot
base := strings.TrimSuffix(name, ext)
thumbName := base + ".jpg"
thumbPath := filepath.Join(thumbDir, thumbName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
exec.Command("ffmpeg", "-ss", "5", "-i", full, "-frames:v", "1", thumbPath).Run()
}
view.ThumbURL = "/static/thumbs/" + thumbName
} else {
// icônes génériques pour images/PDF/EPUB/CBZ
view.ThumbURL = "/static/icons/" + ext[1:] + ".svg"
}
medias = append(medias, view)
}
}
// 5) Rendu
renderPartial(w, "media_list", map[string]interface{}{
"PathID": pid,
"CurrentSub": sub,
"Dirs": dirs,
"MediaItems": medias,
})
}
}
type mediaDetailView struct {
Title string
Summary string
DurationFmt string
ThumbURL string
StreamURL string
}
// MediaDetail affiche la page détail + player
func MediaDetail(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
// 1) Extraire le partID
partID, _ := strconv.ParseInt(mux.Vars(r)["partID"], 10, 64)
var view mediaDetailView
if partID > 0 {
// --- CAS BDD ---
var item struct {
models.MetadataItem
MediaPartID int64
File string
UserThumbURL string
}
db.Table("metadata_items").
Select("metadata_items.*, media_parts.id AS media_part_id, media_parts.file, metadata_items.user_thumb_url").
Joins("JOIN media_items ON media_items.metadata_item_id = metadata_items.id").
Joins("JOIN media_parts ON media_parts.media_item_id = media_items.id").
Where("media_parts.id = ?", partID).
Scan(&item)
if item.MediaPartID == 0 {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
// formater la durée
m := item.Duration / 60
s := item.Duration % 60
view = mediaDetailView{
Title: item.Title,
Summary: item.Summary,
DurationFmt: strconv.FormatInt(m, 10) + ":" + fmt.Sprintf("%02d", s),
ThumbURL: item.UserThumbURL,
StreamURL: "/stream/" + strconv.FormatInt(item.MediaPartID, 10),
}
} else {
// --- CAS FS-ONLY ---
path := r.URL.Query().Get("path")
if path == "" {
http.Error(w, "Média introuvable", http.StatusNotFound)
return
}
// titre
title := filepath.Base(path)
// génère un thumbnail si besoin
ext := filepath.Ext(path)
base := strings.TrimSuffix(filepath.Base(path), ext)
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
thumbPath := filepath.Join(thumbDir, base+".jpg")
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
// screenshot au 5s
exec.CommandContext(r.Context(),
"ffmpeg", "-ss", "5", "-i", path, "-frames:v", "1", thumbPath,
).Run()
}
view = mediaDetailView{
Title: title,
Summary: "", // pas de résumé en FS-only
DurationFmt: "", // on ne probe pas ici
ThumbURL: "/static/thumbs/" + base + ".jpg",
// on passe le path en query pour le streaming
StreamURL: "/stream/0?path=" + url.QueryEscape(path),
}
}
// 3) Render partial dans #content
renderPartial(w, "media_detail", map[string]interface{}{
"item": view,
})
}
}
// Stream : transcode à la volée en MP4 progressif et pipe directement dans la réponse
func Stream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
partID, _ := strconv.ParseInt(mux.Vars(r)["partID"], 10, 64)
var part models.MediaPart
if err := db.First(&part, partID).Error; err != nil {
http.Error(w, "Not found", http.StatusNotFound)
return
}
w.Header().Set("Content-Type", "video/mp4")
// ffmpeg en pipe
cmd := exec.CommandContext(r.Context(),
"ffmpeg",
"-i", part.File,
"-c:v", "libx264",
"-c:a", "aac",
"-movflags", "frag_keyframe+empty_moov+faststart",
"-f", "mp4",
"pipe:1",
)
cmd.Stdout = w
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
log.Println("ffmpeg:", err)
}
}
}
func renderPartial(w http.ResponseWriter, templ string, data map[string]interface{}) {
// Exécute directement le define `<templ>.pages.tmpl`
if err := templates.ExecuteTemplate(w, templ+".pages.tmpl", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
func renderTemplate(w http.ResponseWriter, templ string, data map[string]interface{}) {
// Pareil, on exécute le principal
if err := templates.ExecuteTemplate(w, templ+".pages.tmpl", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}