Files
home-llm/docker-compose.yml
2025-08-11 21:19:36 -04:00

51 lines
1.2 KiB
YAML

version: '3.8'
services:
ollama:
image: ollama/ollama
container_name: ollama
ports:
- "11434:11434"
deploy:
restart_policy:
condition: always
environment:
- NVIDIA_VISIBLE_DEVICES=all
runtime: nvidia
volumes:
- /home/llm/ollama:/app/.ollama
env_file:
- .env
open-webui:
build:
context: .
dockerfile: Dockerfile
container_name: open-webui
ports:
- "3000:3000"
depends_on:
- ollama
environment:
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
- WEBUI_SECRET_KEY=zZzXE9XxOx2561sICfe2Oscf/3LVr4ZrnGvv+fcTqsZlsdakWYrZCt8z8Uesh9Vf
- HOME=/app
- OLLAMA_MODELS=/app/.ollama/models
- OLLAMA_HOME=/app/.ollama
- OLLAMA_API_BASE_URL=http://ollama:11434
- HF_HOME=/app/.cache
- NODE_OPTIONS=--max_old_space_size=8192
- DATABASE_URL=sqlite:////app/backend/data/database.db
volumes:
- /home/llm/open-webui:/app/backend/data
- /home/llm/config:/app/config
- /home/llm/nltk_data:/app/nltk_data
- /home/llm/cache:/app/.cache
user: "1001:1001"
restart: always
runtime: nvidia
env_file:
- .env