diff --git a/Dockerfile b/Dockerfile index 69a065d..3253c79 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,10 @@ -FROM intelanalytics/ipex-llm-inference-cpp-xpu:2.2.0-SNAPSHOT - -ENV ZES_ENABLE_SYSMAN=1 -ENV OLLAMA_HOST=0.0.0.0:11434 +FROM intelanalytics/ipex-llm-inference-cpp-xpu:latest RUN mkdir -p /llm/ollama; \ cd /llm/ollama; \ init-ollama; - WORKDIR /llm/ollama -ENTRYPOINT ["./ollama", "serve"] +COPY commands.sh /llm/ollama/commands.sh +RUN ["chmod", "+x", "/llm/ollama/commands.sh"] +ENTRYPOINT ["/llm/ollama/commands.sh"] diff --git a/commands.sh b/commands.sh new file mode 100644 index 0000000..8d7df6d --- /dev/null +++ b/commands.sh @@ -0,0 +1,5 @@ +#!/bin/bash +source ipex-llm-init --gpu --device $DEVICE +export OLLAMA_HOST=0.0.0.0:11434 +cd /llm/ollama +./ollama serve diff --git a/docker-compose.yml b/docker-compose.yml index f06a16d..be050d2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,19 +1,16 @@ -version: "3.9" services: ollama-intel-gpu: build: context: . dockerfile: Dockerfile container_name: ollama-intel-gpu - image: ollama-intel-gpu:latest restart: always devices: - /dev/dri:/dev/dri volumes: - - /tmp/.X11-unix:/tmp/.X11-unix - ollama-intel-gpu:/root/.ollama environment: - - DISPLAY=${DISPLAY} + - DEVICE=Arc ollama-webui: image: ghcr.io/open-webui/open-webui container_name: ollama-webui