resolution bug affichage download

This commit is contained in:
cangui 2025-06-15 17:21:11 +02:00
parent 10c265141a
commit 3c3c768d38
14 changed files with 633 additions and 502 deletions

1
.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/upload/

View File

@ -8,16 +8,21 @@ import (
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
)
func InitDB()*gorm.DB {
dbName:="shelfly_db.db"
// Ouvre une connexion à la base de données
db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{})
//db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{})
db, err := gorm.Open(sqlite.Open(dbName), &gorm.Config{
Logger: logger.Default.LogMode(logger.Info), // pour voir les types SQL utilisés
})
if err != nil {
panic("failed to connect database")
}
//db.Migrator().DropTable(&download.DownloadJob{}) // ⚠️ temporaire
// Migrate the schema
db.AutoMigrate(

View File

@ -2,12 +2,16 @@ package download
import (
"app/shelfly/internal/debridlink"
"app/shelfly/internal/models"
"errors"
"fmt"
"io"
"log"
"net/http"
"os"
"path/filepath"
"regexp"
"strings"
"sync"
"time"
@ -15,20 +19,20 @@ import (
)
type DownloadJob struct {
ID string `gorm:"primaryKey;column:id"`
Link string `gorm:"column:link"`
Name string `gorm:"column:name"`
Status string `gorm:"column:status"` // waiting, running, done, failed, paused
PathID uint `gorm:"column:path_id"`
Size int64 `gorm:"column:size"`
Host string `gorm:"column:host"`
Progress int `gorm:"column:progress"` // 0100
StreamURL string `gorm:"column:stream_url"` // <- nouveau champ
CreatedAt time.Time `gorm:"autoCreateTime"`
UpdatedAt time.Time `gorm:"autoUpdateTime"`
ID string `gorm:"primaryKey;column:id"`
Link string `gorm:"column:link"`
Name string `gorm:"column:name"`
Status string `gorm:"column:status"`
PathID int `gorm:"column:path_id"` // 👈 int pas uint
Size int64 `gorm:"column:size"`
Host string `gorm:"column:host"`
Progress int `gorm:"column:progress"` // 👈 int
StreamURL string `gorm:"column:stream_url"`
Speed int `gorm:"column:speed;default:0"` // vitesse en Ko/s
CreatedAt time.Time `gorm:"autoCreateTime"`
UpdatedAt time.Time `gorm:"autoUpdateTime"`
}
var (
jobs = make(map[string]*DownloadJob)
jobsMu sync.Mutex
@ -36,12 +40,50 @@ var (
// Enregistre un job en mémoire et en base
func RegisterJobWithDB(job *DownloadJob, db *gorm.DB) error {
var existing DownloadJob
// On cherche le job existant SANS les soft deletes si jamais ils sont activés par erreur
err := db.Unscoped().First(&existing, "id = ?", job.ID).Error
if err == nil {
// Le job existe déjà, on le met à jour
log.Printf("[INFO] Mise à jour du job existant : %s\n", job.ID)
err = db.Model(&existing).Updates(map[string]interface{}{
"link": job.Link,
"name": job.Name,
"status": job.Status,
"path_id": job.PathID,
"size": job.Size,
"host": job.Host,
"progress": job.Progress,
"stream_url": job.StreamURL,
"updated_at": time.Now(),
}).Error
if err != nil {
log.Printf("[ERROR] Échec de la mise à jour : %v\n", err)
return err
}
} else if errors.Is(err, gorm.ErrRecordNotFound) {
// Le job n'existe pas, on le crée
if err := db.Create(job).Error; err != nil {
log.Printf("[ERROR] Insertion échouée : %v\n", err)
return err
}
log.Printf("[INFO] Nouveau job enregistré : %s\n", job.ID)
} else {
// Une erreur inattendue
log.Printf("[ERROR] Erreur inattendue lors de la recherche du job : %v\n", err)
return err
}
// Mise à jour en mémoire
jobsMu.Lock()
jobs[job.ID] = job
jobsMu.Unlock()
log.Printf("[JOB] Enregistré : %s (%s)\n", job.Name, job.ID)
return db.Create(job).Error
return nil
}
// Charge tous les jobs depuis la base en mémoire (au démarrage)
@ -74,6 +116,7 @@ func UpdateJobStatus(id string, status string, db *gorm.DB) {
_ = db.Save(job)
}
}
Broadcast()
}
// Met à jour la progression dun job et le persiste
@ -91,27 +134,174 @@ func UpdateJobProgress(id string, progress int, db *gorm.DB) {
}
// Supprime un job (mémoire uniquement)
func DeleteJob(id string) {
func DeleteJob(id string, db *gorm.DB) error {
// Supprime en mémoire
jobsMu.Lock()
defer jobsMu.Unlock()
delete(jobs, id)
jobsMu.Unlock()
// Supprime en base
if err := db.Delete(&DownloadJob{}, "id = ?", id).Error; err != nil {
log.Printf("[ERROR] Échec de suppression du job en base : %v\n", err)
return err
}
log.Printf("[JOB] Supprimé : %s\n", id)
return nil
}
// Liste tous les jobs
func ListJobs() []*DownloadJob {
jobsMu.Lock()
defer jobsMu.Unlock()
list := make([]*DownloadJob, 0, len(jobs))
for _, job := range jobs {
list = append(list, job)
func ListJobs(db *gorm.DB) []*DownloadJob {
var jobsFromDB []*DownloadJob
if err := db.Order("created_at desc").Find(&jobsFromDB).Error; err != nil {
log.Printf("[ERROR] Impossible de charger les jobs depuis la base : %v\n", err)
return []*DownloadJob{}
}
return list
return jobsFromDB
}
const downloadDir = "./downloads"
func StartDownload(job *DownloadJob, downloadURL string, client *debridlink.Client, db *gorm.DB) {
UpdateJobStatus(job.ID, "downloading", db)
var path models.PathDownload
if err := db.First(&path, job.PathID).Error; err != nil {
UpdateJobStatus(job.ID, "failed", db)
return
}
resp, err := http.Head(downloadURL)
if err != nil || resp.StatusCode != http.StatusOK {
UpdateJobStatus(job.ID, "failed", db)
return
}
size := resp.ContentLength
if size <= 0 {
UpdateJobStatus(job.ID, "failed", db)
return
}
acceptRanges := resp.Header.Get("Accept-Ranges")
if acceptRanges != "bytes" {
log.Println("[INFO] Serveur ne supporte pas Range, fallback single thread")
StartDownloadSingleThread(job, downloadURL, db, path.Path)
return
}
const numSegments = 4
segmentSize := size / numSegments
tmpFiles := make([]string, numSegments)
wg := sync.WaitGroup{}
progressChan := make(chan int64, 100)
done := make(chan bool)
// Progression + Vitesse
var downloaded int64
go func() {
var lastTotal int64 = 0
lastUpdate := time.Now()
ticker := time.NewTicker(1 * time.Second)
defer ticker.Stop()
for {
select {
case n := <-progressChan:
downloaded += n
case <-ticker.C:
elapsed := time.Since(lastUpdate).Seconds()
if elapsed > 0 {
speed := int(float64(downloaded-lastTotal) / elapsed / 1024) // en Ko/s
lastTotal = downloaded
lastUpdate = time.Now()
progress := int((downloaded * 100) / size)
// Update en base
db.Model(&DownloadJob{}).Where("id = ?", job.ID).Updates(map[string]interface{}{
"progress": progress,
"speed": speed,
})
Broadcast()
}
case <-done:
return
}
}
}()
// Téléchargement parallèle
for i := 0; i < numSegments; i++ {
start := int64(i) * segmentSize
end := start + segmentSize - 1
if i == numSegments-1 {
end = size - 1
}
tmpPath := filepath.Join(os.TempDir(), fmt.Sprintf("%s.part%d", job.ID, i))
tmpFiles[i] = tmpPath
wg.Add(1)
go func(start, end int64, tmpPath string) {
defer wg.Done()
err := downloadSegment(downloadURL, start, end, tmpPath, progressChan)
if err != nil {
log.Printf("[ERROR] Segment %d-%d échoué : %v\n", start, end, err)
}
}(start, end, tmpPath)
}
wg.Wait()
close(done)
// Fusion
safeName := sanitizeFileName(job.Name)
finalPath := generateUniqueFilePath(path.Path, safeName)
out, err := os.Create(finalPath)
if err != nil {
UpdateJobStatus(job.ID, "failed", db)
return
}
defer out.Close()
for _, tmpPath := range tmpFiles {
part, err := os.Open(tmpPath)
if err != nil {
UpdateJobStatus(job.ID, "failed", db)
return
}
io.Copy(out, part)
part.Close()
os.Remove(tmpPath)
}
UpdateJobProgress(job.ID, 100, db)
UpdateJobStatus(job.ID, "done", db)
log.Printf("[OK] Fichier téléchargé : %s\n", finalPath)
}
// generateUniqueFilePath ajoute un suffixe si le fichier existe déjà
func generateUniqueFilePath(basePath, fileName string) string {
finalPath := filepath.Join(basePath, fileName)
if _, err := os.Stat(finalPath); os.IsNotExist(err) {
return finalPath
}
base := strings.TrimSuffix(fileName, filepath.Ext(fileName))
ext := filepath.Ext(fileName)
counter := 1
for {
newName := fmt.Sprintf("%s (%d)%s", base, counter, ext)
newPath := filepath.Join(basePath, newName)
if _, err := os.Stat(newPath); os.IsNotExist(err) {
return newPath
}
counter++
}
}
func StartDownloadSingleThread(job *DownloadJob, downloadURL string, db *gorm.DB, basePath string) {
UpdateJobStatus(job.ID, "running", db)
resp, err := http.Get(downloadURL)
@ -128,13 +318,14 @@ func StartDownload(job *DownloadJob, downloadURL string, client *debridlink.Clie
return
}
// Créer le fichier de destination
if err := os.MkdirAll(downloadDir, os.ModePerm); err != nil {
log.Printf("[ERROR] Création du dossier %s échouée : %v\n", downloadDir, err)
// Créer le répertoire si nécessaire
if err := os.MkdirAll(basePath, os.ModePerm); err != nil {
log.Printf("[ERROR] Création du dossier %s échouée : %v\n", basePath, err)
UpdateJobStatus(job.ID, "failed", db)
return
}
destPath := filepath.Join(downloadDir, sanitizeFileName(job.Name))
destPath := filepath.Join(basePath, sanitizeFileName(job.Name))
outFile, err := os.Create(destPath)
if err != nil {
log.Printf("[ERROR] Impossible de créer le fichier : %v\n", err)
@ -143,13 +334,12 @@ func StartDownload(job *DownloadJob, downloadURL string, client *debridlink.Clie
}
defer outFile.Close()
// Taille totale
// Calcul taille totale
totalSize := resp.ContentLength
if totalSize <= 0 && job.Size > 0 {
totalSize = job.Size
}
// Téléchargement avec suivi de progression
buf := make([]byte, 32*1024) // 32KB
var downloaded int64
lastUpdate := time.Now()
@ -173,7 +363,7 @@ func StartDownload(job *DownloadJob, downloadURL string, client *debridlink.Clie
return
}
// Mise à jour de la progression toutes les 500ms
// Mise à jour de la progression
if time.Since(lastUpdate) > 500*time.Millisecond && totalSize > 0 {
progress := int((downloaded * 100) / totalSize)
UpdateJobProgress(job.ID, progress, db)
@ -181,14 +371,85 @@ func StartDownload(job *DownloadJob, downloadURL string, client *debridlink.Clie
}
}
// 100% si on arrive ici
UpdateJobProgress(job.ID, 100, db)
UpdateJobStatus(job.ID, "done", db)
log.Printf("[OK] Fichier téléchargé : %s\n", destPath)
log.Printf("[OK] Fichier téléchargé (single) : %s\n", destPath)
}
func downloadSegment(url string, start, end int64, dest string, progressChan chan<- int64) error {
req, _ := http.NewRequest("GET", url, nil)
req.Header.Set("Range", fmt.Sprintf("bytes=%d-%d", start, end))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
out, err := os.Create(dest)
if err != nil {
return err
}
defer out.Close()
buf := make([]byte, 32*1024)
for {
n, err := resp.Body.Read(buf)
if n > 0 {
if _, err := out.Write(buf[:n]); err != nil {
return err
}
progressChan <- int64(n) // ← envoie progression
}
if err != nil {
if err == io.EOF {
break
}
return err
}
}
return nil
}
func sanitizeFileName(name string) string {
re := regexp.MustCompile(`[^\w\-.]`)
return re.ReplaceAllString(name, "_")
}
//***//
var (
subscribers = make(map[chan struct{}]struct{})
subscribersMu sync.Mutex
)
// Subscribe renvoie un chan à fermer par le client SSE
func Subscribe() chan struct{} {
ch := make(chan struct{}, 1)
subscribersMu.Lock()
subscribers[ch] = struct{}{}
subscribersMu.Unlock()
return ch
}
// Unsubscribe supprime le chan
func Unsubscribe(ch chan struct{}) {
subscribersMu.Lock()
delete(subscribers, ch)
subscribersMu.Unlock()
close(ch)
}
// Broadcast notifie tous les subscribers
func Broadcast() {
subscribersMu.Lock()
defer subscribersMu.Unlock()
for ch := range subscribers {
select {
case ch <- struct{}{}:
log.Println("Broadcast envoyé à un client")
default:
log.Println("Client bloqué, message ignoré")
}
}
}
//***//

View File

@ -34,6 +34,7 @@ func RoutesPublic(r *mux.Router, bd *gorm.DB) {
// Endpoint d'API pour se logger
r.HandleFunc("/api/login", login.LoginHandler(bd)).Methods("POST")
r.HandleFunc("/api/scan/{id}", library.ScanFolder(bd)).Methods("GET")
r.HandleFunc("/api/download/stream", renders.HandleJobsStream(bd))
}
@ -61,8 +62,9 @@ func RoutesProtected(r *mux.Router, bd *gorm.DB) {
r.HandleFunc("/api/download/start/{id}", renders.HandleStartJob(bd)).Methods("POST")
r.HandleFunc("/api/download/pause/{id}", renders.HandlePauseJob).Methods("POST")
r.HandleFunc("/api/download/resume/{id}", renders.HandleResumeJob(bd)).Methods("POST")
r.HandleFunc("/api/download/delete/{id}", renders.HandleDeleteJob).Methods("DELETE")
r.HandleFunc("/api/download/delete/{id}", renders.HandleDeleteJob(bd)).Methods("DELETE")
r.HandleFunc("/api/download/delete-multiple", renders.HandleDeleteMultipleJobs(bd)).Methods("POST")
// API user
r.HandleFunc("/api/user/create", users.CreateUser(bd)).Methods("POST")

View File

@ -6,9 +6,12 @@ import (
"app/shelfly/internal/models"
"context"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"strconv"
"strings"
"sync"
"text/template"
"time"
@ -245,19 +248,28 @@ func PollStatusHandler(db *gorm.DB) http.HandlerFunc {
}
func GoDownload2(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs()
w.Header().Set("HX-Trigger", "forceUpdate")
log.Printf("GoDownload2")
jobs := download.ListJobs(db)
fmt.Printf("%+v\n", jobs)
var paths []models.PathDownload
db.Find(&paths)
data := map[string]interface{}{
"jobs": jobs,
"paths": paths,
"Now": time.Now(), // 👈 ajoute la clé "Now"
}
download.Broadcast()
renderTemplate(w, "godownloader_download", data)
}
}
func HandleAddJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := r.ParseForm(); err != nil {
@ -302,7 +314,7 @@ func HandleAddJob(db *gorm.DB) http.HandlerFunc {
Link: l.DownloadURL,
Name: l.Name,
Status: "waiting",
PathID: uint(parsedID),
PathID: parsedID,
Size: l.Size,
Host: l.Host,
Progress: 0, // obligatoire si valeur attendue
@ -315,19 +327,120 @@ func HandleAddJob(db *gorm.DB) http.HandlerFunc {
// Met à jour la vue partielle (tableau des jobs)
data := map[string]interface{}{
"jobs": download.ListJobs(),
"jobs": download.ListJobs(db),
}
fmt.Printf("%+v\n", data)
download.Broadcast() // ← on notifie tout de suite les clients SSE
renderPartial(w, "downloads_table", data)
}
}
// HandleJobsStream ouvre le flux SSE et envoie un event "jobs" à chaque changement
// HandleJobsStream ouvre un flux SSE qui nenvoie quun signal "jobs"
func HandleJobsStream(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
log.Println("[DEBUG] Nouvelle connexion au flux SSE")
flusher, ok := w.(http.Flusher)
if !ok {
log.Println("[ERROR] Flusher non supporté")
http.Error(w, "Streaming unsupported", http.StatusInternalServerError)
return
}
// Entêtes SSE
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Access-Control-Allow-Origin", "*") // utile en dev ou si besoin CORS
ch := download.Subscribe()
log.Println("[DEBUG] Abonné au canal de téléchargement")
defer func() {
download.Unsubscribe(ch)
log.Println("[DEBUG] Désabonnement du canal de téléchargement")
}()
// Envoi dun message initial bien formé (JSON valide)
log.Println("[DEBUG] Envoi du signal initial")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
// Boucle de stream
for {
select {
case <-r.Context().Done():
log.Println("[DEBUG] Fermeture de la connexion SSE (client disconnect)")
return
case <-ch:
log.Println("[DEBUG] Événement reçu sur le canal — envoi SSE")
fmt.Fprintf(w, "event: jobs\ndata: {\"refresh\": true}\n\n")
flusher.Flush()
}
}
}
}
// sendSSEUpdate génère le HTML du partial et lenvoie comme event "jobs"
func sendSSEUpdate(w http.ResponseWriter, flusher http.Flusher, db *gorm.DB) error {
// 1) Création du pipe
pr, pw := io.Pipe()
// 2) Dans une goroutine, parse+execute du partial dans pw
go func() {
defer pw.Close()
// Charge le fichier de template du partial
tmpl, err := template.ParseFiles("./templates/downloads_table.pages.tmpl")
if err != nil {
log.Printf("ParseFiles error: %v", err)
return
}
// Prépare les données réelles
data := map[string]interface{}{
"jobs": download.ListJobs(db),
}
// Exécute *uniquement* le define "downloads_table"
if err := tmpl.ExecuteTemplate(pw, "downloads_table", data); err != nil {
log.Printf("ExecuteTemplate error: %v", err)
return
}
}()
// 3) Lecture complète du HTML
htmlBytes, err := io.ReadAll(pr)
if err != nil {
return fmt.Errorf("lecture rendu échouée: %w", err)
}
// 4) Construction du message SSE
// - event: jobs
// - chaque ligne de HTML préfixée data:
fmt.Fprintf(w, "event: jobs\n")
for _, line := range strings.Split(string(htmlBytes), "\n") {
fmt.Fprintf(w, "data: %s\n", line)
}
// ligne vide pour terminer l'event
fmt.Fprintf(w, "\n")
// 5) Flush pour envoyer au client immédiatement
flusher.Flush()
return nil
}
func HandleListJobsPartial(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
jobs := download.ListJobs()
jobs := download.ListJobs(db)
data := map[string]interface{}{
"jobs": jobs,
}
//download.Broadcast()
renderPartial(w, "downloads_table", data)
}
}
@ -372,6 +485,7 @@ func HandleStartJob(db *gorm.DB) http.HandlerFunc {
// 4. Lancer le téléchargement réel
go download.StartDownload(job,job.Link,client, db)
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
@ -380,6 +494,8 @@ func HandleStartJob(db *gorm.DB) http.HandlerFunc {
func HandlePauseJob(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.UpdateJobStatus(id, "paused", nil)
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
func HandleResumeJob(db *gorm.DB) http.HandlerFunc {
@ -421,86 +537,38 @@ func HandleResumeJob(db *gorm.DB) http.HandlerFunc {
}
}
func HandleDeleteJob(w http.ResponseWriter, r *http.Request) {
func HandleDeleteJob(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
id := mux.Vars(r)["id"]
download.DeleteJob(id)
download.DeleteJob(id,db)
go download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}}
func HandleDeleteMultipleJobs(db *gorm.DB) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Impossible de lire les IDs", http.StatusBadRequest)
return
}
ids := r.Form["ids[]"]
if len(ids) == 0 {
http.Error(w, "Aucun ID reçu", http.StatusBadRequest)
return
}
for _, id := range ids {
download.DeleteJob(id, db)
}
download.Broadcast() // ← on notifie tout de suite les clients SSE
w.WriteHeader(http.StatusNoContent)
}
}
// func GoDownloadSetting(db *gorm.DB) http.HandlerFunc {
// return func(w http.ResponseWriter, r *http.Request) {
// ctx := r.Context()
// // Initialise le client avec .env (client_id, secret)
// DebridClient := debridlink.NewClient(db)
// switch r.Method {
// case http.MethodPost:
// if err := r.ParseForm(); err != nil {
// http.Error(w, "Formulaire invalide", http.StatusBadRequest)
// return
// }
// host := r.FormValue("host")
// username := r.FormValue("username")
// password := r.FormValue("password")
// isActive := r.FormValue("is_active") == "on"
// // Authentification via Password Grant
// tokens, err := DebridClient.PasswordGrant(ctx, username, password)
// if err != nil {
// log.Println("[OAuth2] Erreur:", err)
// http.Error(w, "Authentification échouée", http.StatusUnauthorized)
// return
// }
// // Création du compte à enregistrer
// account := &debridlink.DebridAccount{
// Host: host,
// Username: username,
// Password: password,
// IsActive: isActive,
// AccessToken: tokens.AccessToken,
// RefreshToken: tokens.RefreshToken,
// ExpiresAt: time.Now().Add(time.Duration(tokens.ExpiresIn) * time.Second),
// }
// if err := db.Save(account).Error; err != nil {
// log.Println("[DB] Sauvegarde échouée:", err)
// http.Error(w, "Erreur DB", http.StatusInternalServerError)
// return
// }
// var accounts []debridlink.DebridAccount
// db.Order("id desc").Find(&accounts)
// if r.Header.Get("HX-Request") == "true" {
// renderPartial(w, "partials/accounts_table", map[string]interface{}{
// "accounts": accounts,
// })
// return
// }
// renderPartial(w, "godownloader_setting", map[string]interface{}{
// "accounts": accounts,
// })
// case http.MethodGet:
// var accounts []debridlink.DebridAccount
// db.Order("id desc").Find(&accounts)
// renderPartial(w, "godownloader_setting", map[string]interface{}{
// "accounts": accounts,
// })
// }
// }
// }
func renderPartial(w http.ResponseWriter, templ string, data map[string]interface{}) {
t, err := template.ParseFiles("./templates/" + templ + ".pages.tmpl")
if err != nil {

BIN
shelfly_db copy.sqlite Normal file

Binary file not shown.

Binary file not shown.

View File

@ -80,6 +80,6 @@ function hide(target){
}
document.addEventListener("htmx:afterOnLoad", function (event) {
console.log("Réponse du serveur :", event.detail.xhr.responseText);
// console.log("Réponse du serveur :", event.detail.xhr.responseText);
});

View File

@ -1,291 +1 @@
/*
Server Sent Events Extension
============================
This extension adds support for Server Sent Events to htmx. See /www/extensions/sse.md for usage instructions.
*/
(function() {
/** @type {import("../htmx").HtmxInternalApi} */
var api
htmx.defineExtension('sse', {
/**
* Init saves the provided reference to the internal HTMX API.
*
* @param {import("../htmx").HtmxInternalApi} api
* @returns void
*/
init: function(apiRef) {
// store a reference to the internal API.
api = apiRef
// set a function in the public API for creating new EventSource objects
if (htmx.createEventSource == undefined) {
htmx.createEventSource = createEventSource
}
},
getSelectors: function() {
return ['[sse-connect]', '[data-sse-connect]', '[sse-swap]', '[data-sse-swap]']
},
/**
* onEvent handles all events passed to this extension.
*
* @param {string} name
* @param {Event} evt
* @returns void
*/
onEvent: function(name, evt) {
var parent = evt.target || evt.detail.elt
switch (name) {
case 'htmx:beforeCleanupElement':
var internalData = api.getInternalData(parent)
// Try to remove remove an EventSource when elements are removed
var source = internalData.sseEventSource
if (source) {
api.triggerEvent(parent, 'htmx:sseClose', {
source,
type: 'nodeReplaced',
})
internalData.sseEventSource.close()
}
return
// Try to create EventSources when elements are processed
case 'htmx:afterProcessNode':
ensureEventSourceOnElement(parent)
}
}
})
/// ////////////////////////////////////////////
// HELPER FUNCTIONS
/// ////////////////////////////////////////////
/**
* createEventSource is the default method for creating new EventSource objects.
* it is hoisted into htmx.config.createEventSource to be overridden by the user, if needed.
*
* @param {string} url
* @returns EventSource
*/
function createEventSource(url) {
return new EventSource(url, { withCredentials: true })
}
/**
* registerSSE looks for attributes that can contain sse events, right
* now hx-trigger and sse-swap and adds listeners based on these attributes too
* the closest event source
*
* @param {HTMLElement} elt
*/
function registerSSE(elt) {
// Add message handlers for every `sse-swap` attribute
if (api.getAttributeValue(elt, 'sse-swap')) {
// Find closest existing event source
var sourceElement = api.getClosestMatch(elt, hasEventSource)
if (sourceElement == null) {
// api.triggerErrorEvent(elt, "htmx:noSSESourceError")
return null // no eventsource in parentage, orphaned element
}
// Set internalData and source
var internalData = api.getInternalData(sourceElement)
var source = internalData.sseEventSource
var sseSwapAttr = api.getAttributeValue(elt, 'sse-swap')
var sseEventNames = sseSwapAttr.split(',')
for (var i = 0; i < sseEventNames.length; i++) {
const sseEventName = sseEventNames[i].trim()
const listener = function(event) {
// If the source is missing then close SSE
if (maybeCloseSSESource(sourceElement)) {
return
}
// If the body no longer contains the element, remove the listener
if (!api.bodyContains(elt)) {
source.removeEventListener(sseEventName, listener)
return
}
// swap the response into the DOM and trigger a notification
if (!api.triggerEvent(elt, 'htmx:sseBeforeMessage', event)) {
return
}
swap(elt, event.data)
api.triggerEvent(elt, 'htmx:sseMessage', event)
}
// Register the new listener
api.getInternalData(elt).sseEventListener = listener
source.addEventListener(sseEventName, listener)
}
}
// Add message handlers for every `hx-trigger="sse:*"` attribute
if (api.getAttributeValue(elt, 'hx-trigger')) {
// Find closest existing event source
var sourceElement = api.getClosestMatch(elt, hasEventSource)
if (sourceElement == null) {
// api.triggerErrorEvent(elt, "htmx:noSSESourceError")
return null // no eventsource in parentage, orphaned element
}
// Set internalData and source
var internalData = api.getInternalData(sourceElement)
var source = internalData.sseEventSource
var triggerSpecs = api.getTriggerSpecs(elt)
triggerSpecs.forEach(function(ts) {
if (ts.trigger.slice(0, 4) !== 'sse:') {
return
}
var listener = function (event) {
if (maybeCloseSSESource(sourceElement)) {
return
}
if (!api.bodyContains(elt)) {
source.removeEventListener(ts.trigger.slice(4), listener)
}
// Trigger events to be handled by the rest of htmx
htmx.trigger(elt, ts.trigger, event)
htmx.trigger(elt, 'htmx:sseMessage', event)
}
// Register the new listener
api.getInternalData(elt).sseEventListener = listener
source.addEventListener(ts.trigger.slice(4), listener)
})
}
}
/**
* ensureEventSourceOnElement creates a new EventSource connection on the provided element.
* If a usable EventSource already exists, then it is returned. If not, then a new EventSource
* is created and stored in the element's internalData.
* @param {HTMLElement} elt
* @param {number} retryCount
* @returns {EventSource | null}
*/
function ensureEventSourceOnElement(elt, retryCount) {
if (elt == null) {
return null
}
// handle extension source creation attribute
if (api.getAttributeValue(elt, 'sse-connect')) {
var sseURL = api.getAttributeValue(elt, 'sse-connect')
if (sseURL == null) {
return
}
ensureEventSource(elt, sseURL, retryCount)
}
registerSSE(elt)
}
function ensureEventSource(elt, url, retryCount) {
var source = htmx.createEventSource(url)
source.onerror = function(err) {
// Log an error event
api.triggerErrorEvent(elt, 'htmx:sseError', { error: err, source })
// If parent no longer exists in the document, then clean up this EventSource
if (maybeCloseSSESource(elt)) {
return
}
// Otherwise, try to reconnect the EventSource
if (source.readyState === EventSource.CLOSED) {
retryCount = retryCount || 0
retryCount = Math.max(Math.min(retryCount * 2, 128), 1)
var timeout = retryCount * 500
window.setTimeout(function() {
ensureEventSourceOnElement(elt, retryCount)
}, timeout)
}
}
source.onopen = function(evt) {
api.triggerEvent(elt, 'htmx:sseOpen', { source })
if (retryCount && retryCount > 0) {
const childrenToFix = elt.querySelectorAll("[sse-swap], [data-sse-swap], [hx-trigger], [data-hx-trigger]")
for (let i = 0; i < childrenToFix.length; i++) {
registerSSE(childrenToFix[i])
}
// We want to increase the reconnection delay for consecutive failed attempts only
retryCount = 0
}
}
api.getInternalData(elt).sseEventSource = source
var closeAttribute = api.getAttributeValue(elt, "sse-close");
if (closeAttribute) {
// close eventsource when this message is received
source.addEventListener(closeAttribute, function() {
api.triggerEvent(elt, 'htmx:sseClose', {
source,
type: 'message',
})
source.close()
});
}
}
/**
* maybeCloseSSESource confirms that the parent element still exists.
* If not, then any associated SSE source is closed and the function returns true.
*
* @param {HTMLElement} elt
* @returns boolean
*/
function maybeCloseSSESource(elt) {
if (!api.bodyContains(elt)) {
var source = api.getInternalData(elt).sseEventSource
if (source != undefined) {
api.triggerEvent(elt, 'htmx:sseClose', {
source,
type: 'nodeMissing',
})
source.close()
// source = null
return true
}
}
return false
}
/**
* @param {HTMLElement} elt
* @param {string} content
*/
function swap(elt, content) {
api.withExtensions(elt, function(extension) {
content = extension.transformResponse(content, null, elt)
})
var swapSpec = api.getSwapSpecification(elt)
var target = api.getTarget(elt)
api.swap(target, content, swapSpec)
}
function hasEventSource(node) {
return api.getInternalData(node).sseEventSource != null
}
})()
(function(){var g;htmx.defineExtension("sse",{init:function(e){g=e;if(htmx.createEventSource==undefined){htmx.createEventSource=t}},getSelectors:function(){return["[sse-connect]","[data-sse-connect]","[sse-swap]","[data-sse-swap]"]},onEvent:function(e,t){var r=t.target||t.detail.elt;switch(e){case"htmx:beforeCleanupElement":var n=g.getInternalData(r);var s=n.sseEventSource;if(s){g.triggerEvent(r,"htmx:sseClose",{source:s,type:"nodeReplaced"});n.sseEventSource.close()}return;case"htmx:afterProcessNode":i(r)}}});function t(e){return new EventSource(e,{withCredentials:true})}function a(n){if(g.getAttributeValue(n,"sse-swap")){var s=g.getClosestMatch(n,v);if(s==null){return null}var e=g.getInternalData(s);var a=e.sseEventSource;var t=g.getAttributeValue(n,"sse-swap");var r=t.split(",");for(var i=0;i<r.length;i++){const u=r[i].trim();const c=function(e){if(l(s)){return}if(!g.bodyContains(n)){a.removeEventListener(u,c);return}if(!g.triggerEvent(n,"htmx:sseBeforeMessage",e)){return}f(n,e.data);g.triggerEvent(n,"htmx:sseMessage",e)};g.getInternalData(n).sseEventListener=c;a.addEventListener(u,c)}}if(g.getAttributeValue(n,"hx-trigger")){var s=g.getClosestMatch(n,v);if(s==null){return null}var e=g.getInternalData(s);var a=e.sseEventSource;var o=g.getTriggerSpecs(n);o.forEach(function(t){if(t.trigger.slice(0,4)!=="sse:"){return}var r=function(e){if(l(s)){return}if(!g.bodyContains(n)){a.removeEventListener(t.trigger.slice(4),r)}htmx.trigger(n,t.trigger,e);htmx.trigger(n,"htmx:sseMessage",e)};g.getInternalData(n).sseEventListener=r;a.addEventListener(t.trigger.slice(4),r)})}}function i(e,t){if(e==null){return null}if(g.getAttributeValue(e,"sse-connect")){var r=g.getAttributeValue(e,"sse-connect");if(r==null){return}n(e,r,t)}a(e)}function n(r,e,n){var s=htmx.createEventSource(e);s.onerror=function(e){g.triggerErrorEvent(r,"htmx:sseError",{error:e,source:s});if(l(r)){return}if(s.readyState===EventSource.CLOSED){n=n||0;n=Math.max(Math.min(n*2,128),1);var t=n*500;window.setTimeout(function(){i(r,n)},t)}};s.onopen=function(e){g.triggerEvent(r,"htmx:sseOpen",{source:s});if(n&&n>0){const t=r.querySelectorAll("[sse-swap], [data-sse-swap], [hx-trigger], [data-hx-trigger]");for(let e=0;e<t.length;e++){a(t[e])}n=0}};g.getInternalData(r).sseEventSource=s;var t=g.getAttributeValue(r,"sse-close");if(t){s.addEventListener(t,function(){g.triggerEvent(r,"htmx:sseClose",{source:s,type:"message"});s.close()})}}function l(e){if(!g.bodyContains(e)){var t=g.getInternalData(e).sseEventSource;if(t!=undefined){g.triggerEvent(e,"htmx:sseClose",{source:t,type:"nodeMissing"});t.close();return true}}return false}function f(t,r){g.withExtensions(t,function(e){r=e.transformResponse(r,null,t)});var e=g.getSwapSpecification(t);var n=g.getTarget(t);g.swap(n,r,e)}function v(e){return g.getInternalData(e).sseEventSource!=null}})();

View File

@ -1,42 +1,36 @@
<div id="downloads-table" hx-get="/api/download/all" hx-trigger="every 2s" hx-swap="outerHTML">
<table class="table is-fullwidth is-striped">
<thead>
<tr>
<th>Fichier</th>
<th>Statut</th>
<th>Vitesse</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{{ range .jobs }}
<tr class="border-b {{ if eq .Status "error" }}bg-red-100 text-red-800{{ end }}">
<td class="px-2 py-1 text-sm">{{ .Name }}</td>
<td class="px-2 py-1 text-sm">{{ .Status }}</td>
<td class="px-2 py-1 text-sm">{{ .Progress }}</td>
<td class="px-2 py-1 text-sm">
{{ if eq .Status "waiting" }}
<button hx-post="/api/download/start/{{ .ID }}" class="text-indigo-600">⬇ Télécharger</button>
{{ end }}
{{ if eq .Status "paused" }}
<button hx-post="/api/download/resume/{{ .ID }}" class="text-blue-600">▶</button>
{{ else if eq .Status "downloading" }}
<button hx-post="/api/download/pause/{{ .ID }}" class="text-yellow-600">⏸</button>
{{ end }}
{{ if and (eq .Status "downloaded") .StreamURL }}
<a href="/stream/{{ .ID }}" target="_blank" class="text-green-600 ml-2">🎬 Stream</a>
<button onclick="navigator.clipboard.writeText('{{ .StreamURL }}')" class="text-gray-600 ml-1">📋</button>
<a href="{{ .DownloadLink }}" class="text-blue-600 ml-1" download>⬇</a>
{{ end }}
<button hx-delete="/api/download/delete/{{ .ID }}" class="text-red-600 ml-2">✖</button>
</td>
</tr>
{{ if eq .Status "error" }}
<tr class="bg-red-50 text-sm text-red-600">
<td colspan="4" class="px-2 py-1">Erreur : {{ .ErrorMsg }}</td>
</tr>
{{ range .jobs }}
<tr id="job-{{ .ID }}" class="border-b {{ if eq .Status "error" }}bg-red-100 text-red-800{{ end }}">
<td class="px-2 py-1 text-sm">{{ .Name }}</td>
<td class="px-2 py-1 text-sm">{{ .Status }}</td>
<td class="px-2 py-1 text-sm">{{ .Speed }}</td>
<td class="px-2 py-1 text-sm">
<progress class="progress is-info" value="{{ .Progress }}" max="100">{{ .Progress }}</progress>
</td>
<td class="px-2 py-1 text-sm">
{{ if eq .Status "waiting" }}
<button data-action="start-job" data-id="{{ .ID }}" class="text-indigo-600">⬇ Télécharger</button>
{{ end }}
{{ if eq .Status "paused" }}
<button data-action="resume-job" data-id="{{ .ID }}" class="text-blue-600">▶</button>
{{ else if eq .Status "running" }}
<button data-action="pause-job" data-id="{{ .ID }}" class="text-yellow-600">⏸</button>
{{ end }}
{{ if eq .Status "done" }}
{{ if .StreamURL }}
<a href="/stream/{{ .ID }}" target="_blank" class="text-green-600 ml-2">🎬 Stream</a>
<button onclick="navigator.clipboard.writeText('{{ .StreamURL }}')" class="text-gray-600 ml-1">📋</button>
{{ end }}
{{ end }}
</tbody>
</table>
</div>
{{ end }}
<button data-action="delete-job" data-id="{{ .ID }}" class="text-red-600 ml-2">✖</button>
</td>
</tr>
{{ if eq .Status "error" }}
<tr class="bg-red-50 text-sm text-red-600">
<td colspan="5" class="px-2 py-1">Erreur : {{ .ErrorMsg }}</td>
</tr>
{{ end }}
{{ end }}

View File

@ -1,10 +1,12 @@
<h1>Download</h1>
<div class="box">
<form hx-post="/api/download/add"
hx-trigger="submit"
hx-swap="none"
hx-on="htmx:afterRequest: this.reset()"
class="mb-4">
<form
hx-post="/api/download/add"
hx-trigger="submit"
hx-swap="none"
hx-on="htmx:afterRequest: this.reset()"
class="mb-4"
>
<div class="field">
<label class="label">Lien à débrider</label>
<div class="control">
@ -29,47 +31,134 @@
</div>
</div>
</form>
<div
hx-ext="sse"
sse-connect="/api/download/stream"
hx-on="
htmx:sseOpen: console.log('✅ SSE ouvert');
htmx:sseError: console.error('❌ SSE erreur', event.detail.error);
htmx:sseMessage:console.log('📨 SSE reçu', event.detail);
"
>
<table
id="downloads-table"
class="table is-fullwidth is-striped"
hx-trigger="load,sse:jobs"
> <thead>
<tr>
<th>Fichier</th>
<th>Statut</th>
<th>Vitesse</th>
<th>Progress</th>
<th>Actions</th>
</tr>
</thead>
<tbody id="test"></tbody>
</table>
<div id="downloads-table" hx-get="/api/download/all" hx-trigger="every 2s" hx-swap="outerHTML">
<table class="table is-fullwidth is-striped">
<thead>
<tr>
<th>Fichier</th>
<th>Statut</th>
<th>Vitesse</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{{ range .jobs }}
<tr class="border-b {{ if eq .Status "error" }}bg-red-100 text-red-800{{ end }}">
<td class="px-2 py-1 text-sm">{{ .FileName }}</td>
<td class="px-2 py-1 text-sm">{{ .Status }}</td>
<td class="px-2 py-1 text-sm">{{ .Speed }}</td>
<td class="px-2 py-1 text-sm">
{{ if eq .Status "added" }}
<button hx-post="/api/download/start/{{ .ID }}" class="text-indigo-600">⬇ Télécharger</button>
{{ end }}
{{ if eq .Status "paused" }}
<button hx-post="/api/download/resume/{{ .ID }}" class="text-blue-600">▶</button>
{{ else if eq .Status "downloading" }}
<button hx-post="/api/download/pause/{{ .ID }}" class="text-yellow-600">⏸</button>
{{ end }}
{{ if and (eq .Status "downloaded") .StreamURL }}
<a href="/stream/{{ .ID }}" target="_blank" class="text-green-600 ml-2">🎬 Stream</a>
<button onclick="navigator.clipboard.writeText('{{ .StreamURL }}')" class="text-gray-600 ml-1">📋</button>
<a href="{{ .DownloadLink }}" class="text-blue-600 ml-1" download>⬇</a>
{{ end }}
<button hx-delete="/api/download/delete/{{ .ID }}" class="text-red-600 ml-2">✖</button>
</td>
</tr>
{{ if eq .Status "error" }}
<tr class="bg-red-50 text-sm text-red-600">
<td colspan="4" class="px-2 py-1">Erreur : {{ .ErrorMsg }}</td>
</tr>
{{ end }}
{{ end }}
</tbody>
</table>
</div>
</div>
</div>
<script>
const es = new EventSource("/api/download/stream");
es.addEventListener("jobs", async (e) => {
console.log("🧪 Event brut reçu es const:", e.data);
const tbody = document.getElementById("test");
if (!tbody) {
console.warn("❌ <tbody id='test'> non trouvé !");
return;
}
const url = `/api/download/all?t=${Date.now()}`;
console.log("🔁 Fetch direct :", url);
try {
const response = await fetch(url, {
headers: {
'HX-Request': 'true'
}
});
if (!response.ok) {
console.error("❌ Erreur serveur :", response.status);
return;
}
const html = await response.text();
tbody.innerHTML = html;
console.log("✅ Contenu injecté dans <tbody id='test'>");
} catch (err) {
console.error("❌ Erreur fetch :", err);
}
});
es.onerror = e => console.error("❌ Erreur EventSource", e);
es.onerror = e => console.error("❌ Erreur EventSource", e);
async function postJobAction(url) {
try {
const res = await fetch(url, {
method: "POST",
headers: { "HX-Request": "true" }
});
if (!res.ok) throw new Error("Échec POST : " + res.status);
console.log("✅ Action POST réussie :", url);
} catch (err) {
console.error("❌ Erreur action POST :", err);
}
}
// DELETE avec effet visuel
async function deleteJobAnimated(jobId) {
const row = document.getElementById(`job-${jobId}`);
if (!row) return;
try {
const res = await fetch(`/api/download/delete/${jobId}`, {
method: "DELETE",
headers: { "HX-Request": "true" }
});
if (!res.ok) throw new Error("Échec DELETE");
// Animation fade out
row.style.transition = "opacity 0.4s ease, height 0.4s ease";
row.style.opacity = "0";
row.style.height = "0px";
setTimeout(() => row.remove(), 400);
} catch (err) {
console.error("❌ Erreur suppression :", err);
}
}
// Dispatcher global
document.addEventListener("click", function (e) {
const btn = e.target.closest("button[data-action]");
if (!btn) return;
const action = btn.dataset.action;
const jobId = btn.dataset.id;
switch (action) {
case "start-job":
postJobAction(`/api/download/start/${jobId}`);
break;
case "pause-job":
postJobAction(`/api/download/pause/${jobId}`);
break;
case "resume-job":
postJobAction(`/api/download/resume/${jobId}`);
break;
case "delete-job":
deleteJobAnimated(jobId);
break;
}
});
</script>

View File

@ -8,14 +8,15 @@
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0-beta3/css/all.min.css"
rel="stylesheet"
/>
<link
<script src="https://unpkg.com/htmx.org@2.0.4" integrity="sha384-HGfztofotfshcF7+8n44JQL2oJmowVChPTg48S+jvZoztPfvwD79OC/LTtG6dMp+" crossorigin="anonymous"></script>
<script src="https://unpkg.com/htmx-ext-sse@2.2.3" integrity="sha384-Y4gc0CK6Kg+hmulDc6rZPJu0tqvk7EWlih0Oh+2OkAi1ZDlCbBDCQEE2uVk472Ky" crossorigin="anonymous"></script> <link
rel="stylesheet"
href="/templates/assets/css/bulma.min.css">
<link
rel="stylesheet"
href="/templates/assets/css/styles.css">
<script src="/templates/assets/js/htmx.js" ></script>
<script src="/templates/assets/js/sse.js"></script>
<!-- <script src="/templates/assets/js/htmx.js" ></script> -->
<!-- <script src="/templates/assets/js/sse.js"></script> -->
<script src="/templates/assets/js/index.js" ></script>
<script src="/templates/assets/js/json-enc.js"></script>
<script src="/templates/assets/js/ws.js"></script>

BIN
tmp/main

Binary file not shown.

View File

@ -1 +1 @@
exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1
exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1exit status 1