This commit is contained in:
cangui 2025-06-21 18:29:39 +02:00
parent a6d64980fa
commit 04e3430912

View File

@ -840,48 +840,143 @@ func getAllPaths(db *gorm.DB) []*models.PathDownload {
return paths
}
type ffprobeOut struct {
Format struct{ Duration string `json:"duration"` } `json:"format"`
Streams []struct {
CodecType string `json:"codec_type"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
} `json:"streams"`
}
func probe(ctx context.Context, file string) (*ffprobeOut, error) {
cmd := exec.CommandContext(ctx,
"ffprobe", "-v", "error",
"-print_format", "json",
"-show_format", "-show_streams",
file,
)
out, err := cmd.Output()
if err != nil {
return nil, err
}
var info ffprobeOut
if err := json.Unmarshal(out, &info); err != nil {
return nil, err
}
return &info, nil
}
type mediaItemView struct {
models.MetadataItem
Title string
Duration int64
Width, Height int
ThumbURL string
FilePath string
MediaPartID int64
}
// PathMedia renvoie la partial HTML de la grille de médias
// PathMedia renvoie la liste des sous-dossiers et médias du dossier `PathDownload`
// On passe un query param `sub` pour naviguer dans les sous-dossiers.
func PathMedia(db *gorm.DB) http.HandlerFunc {
allowed := map[string]bool{
".mkv": true, ".avi": true, ".mp4": true, ".mov": true,
".jpg": true, ".jpeg": true, ".png": true, ".gif": true,
".pdf": true, ".epub": true, ".cbz": true,
}
return func(w http.ResponseWriter, r *http.Request) {
pid, _ := strconv.ParseInt(mux.Vars(r)["id"], 10, 64)
// 1) tentative depuis la BDD
var items []mediaItemView
db.Table("metadata_items").
Select("metadata_items.*, media_parts.id AS media_part_id").
Joins("JOIN media_items ON media_items.metadata_item_id = metadata_items.id").
Joins("JOIN media_parts ON media_parts.media_item_id = media_items.id").
Where("metadata_items.library_section_id = ?", pid).
Scan(&items)
// 2) si rien en BDD, on scan physiquement le dossier et on crée un item minimal
if len(items) == 0 {
// 1) Récupérer PathDownload
vars := mux.Vars(r)
pid, _ := strconv.ParseInt(vars["id"], 10, 64)
var pd models.PathDownload
if err := db.First(&pd, pid).Error; err == nil {
files, _ := filepath.Glob(filepath.Join(pd.Path, "*.*"))
for _, f := range files {
// on prend juste le nom de fichier comme titre
items = append(items, mediaItemView{
MetadataItem: models.MetadataItem{
Title: filepath.Base(f),
},
MediaPartID: 0, // pas de part en BDD
})
if err := db.First(&pd, pid).Error; err != nil {
http.Error(w, "Dossier introuvable", http.StatusNotFound)
return
}
// 2) Déterminer le chemin courant
sub := r.URL.Query().Get("sub") // ex: "Film/Test"
current := filepath.Join(pd.Path, filepath.FromSlash(sub))
// 3) Lire les entrées du dossier
entries, err := os.ReadDir(current)
if err != nil {
http.Error(w, "Impossible de lire le dossier", http.StatusInternalServerError)
return
}
// 4) Préparer listes
type dirView struct {
Name string
SubPath string
}
var dirs []dirView
var medias []mediaItemView
thumbDir := filepath.Join("static", "thumbs")
os.MkdirAll(thumbDir, 0755)
for _, e := range entries {
name := e.Name()
full := filepath.Join(current, name)
if e.IsDir() {
// sous-dossier
dirs = append(dirs, dirView{
Name: name,
SubPath: filepath.ToSlash(filepath.Join(sub, name)),
})
} else {
ext := strings.ToLower(filepath.Ext(name))
if !allowed[ext] {
continue
}
view := mediaItemView{Title: name, FilePath: full}
// vidéos : métadonnées + capture
if ext == ".mkv" || ext == ".avi" || ext == ".mp4" || ext == ".mov" {
ctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
info, err := probe(ctx, full)
cancel()
if err == nil {
if d, err := strconv.ParseFloat(info.Format.Duration, 64); err == nil {
view.Duration = int64(d)
}
for _, s := range info.Streams {
if s.CodecType == "video" {
view.Width, view.Height = s.Width, s.Height
break
}
}
}
// screenshot
base := strings.TrimSuffix(name, ext)
thumbName := base + ".jpg"
thumbPath := filepath.Join(thumbDir, thumbName)
if _, err := os.Stat(thumbPath); os.IsNotExist(err) {
exec.Command("ffmpeg", "-ss", "5", "-i", full, "-frames:v", "1", thumbPath).Run()
}
view.ThumbURL = "/static/thumbs/" + thumbName
} else {
// icônes génériques pour images/PDF/EPUB/CBZ
view.ThumbURL = "/static/icons/" + ext[1:] + ".svg"
}
medias = append(medias, view)
}
}
// 5) Rendu
renderPartial(w, "media_list", map[string]interface{}{
"mediaItems": items,
"PathID": pid,
"CurrentSub": sub,
"Dirs": dirs,
"MediaItems": medias,
})
}
}
// MediaDetail affiche la page détail + player
func MediaDetail(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
@ -940,36 +1035,7 @@ func Stream(db *gorm.DB) http.HandlerFunc {
}
}
}
// FfmpegProbeOutput reflète la partie “format” et “streams” de ffprobe JSON
type ffprobeOut struct {
Format struct {
Duration string `json:"duration"`
} `json:"format"`
Streams []struct {
CodecType string `json:"codec_type"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
} `json:"streams"`
}
// appelle ffprobe et parse le JSON
func probe(ctx context.Context, file string) (*ffprobeOut, error) {
cmd := exec.CommandContext(ctx,
"ffprobe", "-v", "error",
"-print_format", "json",
"-show_format", "-show_streams",
file,
)
out, err := cmd.Output()
if err != nil {
return nil, err
}
var info ffprobeOut
if err := json.Unmarshal(out, &info); err != nil {
return nil, err
}
return &info, nil
}