29 lines
608 B
YAML
29 lines
608 B
YAML
services:
|
|
ollama:
|
|
image: ollama/ollama:latest
|
|
container_name: ollama
|
|
restart: unless-stopped
|
|
|
|
# Uses the host network stack (routing/DNS identical to the host, incl. br0)
|
|
network_mode: "host"
|
|
|
|
volumes:
|
|
- ollama:/root/.ollama
|
|
|
|
environment:
|
|
OLLAMA_KEEP_ALIVE: "10m"
|
|
# Optional: pin to GPU 0 if you ever have multiple
|
|
# CUDA_VISIBLE_DEVICES: "0"
|
|
|
|
# GPU access (Docker Compose)
|
|
deploy:
|
|
resources:
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: 1
|
|
capabilities: [gpu]
|
|
|
|
volumes:
|
|
ollama:
|