From e04b83d39d90bf99ffb81d6b256e76e77874204f Mon Sep 17 00:00:00 2001 From: eleiton Date: Mon, 1 Dec 2025 23:20:55 +0100 Subject: [PATCH] Adding initial support for ramalama --- docker-compose.ramalama.yml | 30 ++++++++++++++++++++++++++++++ ramalama/Dockerfile | 7 +++++++ 2 files changed, 37 insertions(+) create mode 100644 docker-compose.ramalama.yml create mode 100644 ramalama/Dockerfile diff --git a/docker-compose.ramalama.yml b/docker-compose.ramalama.yml new file mode 100644 index 0000000..6b4012a --- /dev/null +++ b/docker-compose.ramalama.yml @@ -0,0 +1,30 @@ +version: "3.9" + +services: + ramalama: + build: + context: ./ramalama + dockerfile: Dockerfile + image: ramalama-ipex:local + container_name: ramalama-ipex + init: true + devices: + - /dev/dri:/dev/dri + security_opt: + - seccomp=unconfined + volumes: + - ramalama-models:/var/lib/ramalama/store + - /tmp:/tmp:rw + environment: + SYCL_DEVICE_FILTER: "level_zero:gpu:0" + SYCL_CACHE_PERSISTENT: 1 + ZES_ENABLE_SYSMAN: 1 + RAMALAMA_NO_CONTAINER: "true" + RAMALAMA_IN_CONTAINER: "true" + ports: + - 11434:8080 + restart: unless-stopped + tty: true + +volumes: + ramalama-models: {} \ No newline at end of file diff --git a/ramalama/Dockerfile b/ramalama/Dockerfile new file mode 100644 index 0000000..77121f5 --- /dev/null +++ b/ramalama/Dockerfile @@ -0,0 +1,7 @@ +FROM quay.io/ramalama/intel-gpu:latest + +RUN --mount=type=cache,target=/var/cache/dnf \ + dnf install -y ramalama && \ + dnf clean all + +CMD ["sleep", "infinity"]