ia-test/docker-compose.yml

24 lines
732 B
YAML
Raw Permalink Normal View History

2025-08-16 14:26:36 +00:00
version: "3.9"
2025-08-16 17:35:22 +00:00
2025-08-16 14:26:36 +00:00
services:
localai:
2025-08-16 17:35:22 +00:00
build:
context: .
dockerfile: Dockerfile
2025-08-16 14:26:36 +00:00
container_name: localai
2025-08-16 17:35:22 +00:00
ports:
- "8085:8080" # accès direct: http://<hôte>:8085
2025-08-16 14:26:36 +00:00
environment:
- MODELS_PATH=/models
2025-08-16 17:35:22 +00:00
- THREADS=8 # adapte au nombre de cœurs
- CONTEXT_SIZE=2048 # augmente si tu as plus de RAM
2025-08-16 14:26:36 +00:00
- DEBUG=false
2025-08-16 17:35:22 +00:00
- DISABLE_DOWNLOAD=false # laisse LocalAI récupérer le backend llama-cpp au 1er run
command: ["--models-path", "/models", "--address", "0.0.0.0:8080"]
2025-08-16 14:26:36 +00:00
healthcheck:
2025-08-16 17:35:22 +00:00
test: ["CMD", "curl", "-fsS", "http://localhost:8080/v1/models"]
2025-08-16 14:26:36 +00:00
interval: 10s
timeout: 5s
2025-08-16 16:33:03 +00:00
retries: 10
2025-08-16 17:35:22 +00:00
restart: unless-stopped