Updating compose file to include nvidia and rocm containers

This commit is contained in:
Daniel Weber 2025-02-28 21:08:38 -05:00
parent 331efc8f3f
commit f8f4623e44
2 changed files with 18 additions and 16 deletions

View File

@ -1,7 +1,7 @@
services:
llama_server:
llama_server_rocm:
image: ollama/ollama:rocm
container_name: llama_server
restart: always
ports:
- "11434:11434"
volumes:
@ -10,3 +10,19 @@ services:
- /dev/kfd
- /dev/dri
llama_server_nvidia:
image: ollama/ollama:latest
ports:
- "11434:11434"
volumes:
- ./container_data:/root/.ollama:z
privileged: true # Had to be done so nvidia can find gpus...
restart: always
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1 # alternatively, use `count: all` for all GPUs
capabilities: [gpu]

View File

@ -1,14 +0,0 @@
services:
llama_server:
image: ollama/ollama:latest
container_name: llama_server
ports:
- "11434:11434"
volumes:
- ./container_data:/root/.ollama:z
privileged: true # Had to be done so nvidia can find gpus...
devices:
- nvidia.com/gpu=all
# - /dev/kfd
# - /dev/dri