From 94aec101728ae7d97df73ab7e3edcb7865d60346 Mon Sep 17 00:00:00 2001 From: cangui Date: Sat, 16 Aug 2025 18:39:08 +0200 Subject: [PATCH] up --- docker-compose.yml | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index ed23195..c1fa3d2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,22 +1,19 @@ +# docker-compose.yml version: "3.9" - services: localai: - build: - context: . - dockerfile: Dockerfile + image: localai/localai:latest-aio-cpu container_name: localai - ports: - - "8085:8080" # accès direct: http://:8085 + ports: ["8085:8080"] environment: - MODELS_PATH=/models - - THREADS=8 # adapte au nombre de cœurs - - CONTEXT_SIZE=2048 # augmente si tu as plus de RAM + - THREADS=8 + - CONTEXT_SIZE=2048 - DEBUG=false - - DISABLE_DOWNLOAD=false # laisse LocalAI récupérer le backend llama-cpp au 1er run - command: ["--models-path", "/models", "--address", "0.0.0.0:8080"] + - DISABLE_DOWNLOAD=true # backends déjà inclus + command: ["--models-path","/models","--address","0.0.0.0:8080"] healthcheck: - test: ["CMD", "curl", "-fsS", "http://localhost:8080/v1/models"] + test: ["CMD","curl","-fsS","http://localhost:8080/v1/models"] interval: 10s timeout: 5s retries: 10