From c23206746ead77bfab5209c34647dc6e7a4414c2 Mon Sep 17 00:00:00 2001 From: cangui Date: Sat, 16 Aug 2025 19:35:22 +0200 Subject: [PATCH] UP --- docker-compose.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 31e2e45..ed23195 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,20 +1,23 @@ -# docker-compose.yml version: "3.9" + services: localai: - image: localai/localai:latest-aio-cpu + build: + context: . + dockerfile: Dockerfile container_name: localai - ports: ["8085:8080"] + ports: + - "8085:8080" # accès direct: http://:8085 environment: - MODELS_PATH=/models - - THREADS=8 - - CONTEXT_SIZE=2048 + - THREADS=8 # adapte au nombre de cœurs + - CONTEXT_SIZE=2048 # augmente si tu as plus de RAM - DEBUG=false - - DISABLE_DOWNLOAD=true # backends déjà inclus - command: ["--models-path","/models","--address","0.0.0.0:8080"] + - DISABLE_DOWNLOAD=false # laisse LocalAI récupérer le backend llama-cpp au 1er run + command: ["--models-path", "/models", "--address", "0.0.0.0:8080"] healthcheck: - test: ["CMD","curl","-fsS","http://localhost:8080/v1/models"] + test: ["CMD", "curl", "-fsS", "http://localhost:8080/v1/models"] interval: 10s timeout: 5s retries: 10 - restart: unless-stopped \ No newline at end of file + restart: unless-stopped