15 lines
322 B
YAML
15 lines
322 B
YAML
services:
|
|
llama_server:
|
|
image: ollama/ollama:latest
|
|
container_name: llama_server
|
|
ports:
|
|
- "11434:11434"
|
|
volumes:
|
|
- ./container_data:/root/.ollama:z
|
|
privileged: true # Had to be done so nvidia can find gpus...
|
|
devices:
|
|
- nvidia.com/gpu=all
|
|
# - /dev/kfd
|
|
# - /dev/dri
|
|
|