Files
ollama-intel-gpu/docker-compose.ramalama.yml
2025-12-01 23:20:55 +01:00

30 lines
645 B
YAML

version: "3.9"
services:
ramalama:
build:
context: ./ramalama
dockerfile: Dockerfile
image: ramalama-ipex:local
container_name: ramalama-ipex
init: true
devices:
- /dev/dri:/dev/dri
security_opt:
- seccomp=unconfined
volumes:
- ramalama-models:/var/lib/ramalama/store
- /tmp:/tmp:rw
environment:
SYCL_DEVICE_FILTER: "level_zero:gpu:0"
SYCL_CACHE_PERSISTENT: 1
ZES_ENABLE_SYSMAN: 1
RAMALAMA_NO_CONTAINER: "true"
RAMALAMA_IN_CONTAINER: "true"
ports:
- 11434:8080
restart: unless-stopped
tty: true
volumes:
ramalama-models: {}