Update to latest ipex-llm dockerfile 20250211

This commit is contained in:
Matt Curfman
2025-02-11 20:40:41 -08:00
parent 1581a505fb
commit 765a8c09d7
3 changed files with 10 additions and 10 deletions
+4 -6
View File
@@ -1,12 +1,10 @@
FROM intelanalytics/ipex-llm-inference-cpp-xpu:2.2.0-SNAPSHOT
ENV ZES_ENABLE_SYSMAN=1
ENV OLLAMA_HOST=0.0.0.0:11434
FROM intelanalytics/ipex-llm-inference-cpp-xpu:latest
RUN mkdir -p /llm/ollama; \
cd /llm/ollama; \
init-ollama;
WORKDIR /llm/ollama
ENTRYPOINT ["./ollama", "serve"]
COPY commands.sh /llm/ollama/commands.sh
RUN ["chmod", "+x", "/llm/ollama/commands.sh"]
ENTRYPOINT ["/llm/ollama/commands.sh"]
+5
View File
@@ -0,0 +1,5 @@
#!/bin/bash
source ipex-llm-init --gpu --device $DEVICE
export OLLAMA_HOST=0.0.0.0:11434
cd /llm/ollama
./ollama serve
+1 -4
View File
@@ -1,19 +1,16 @@
version: "3.9"
services:
ollama-intel-gpu:
build:
context: .
dockerfile: Dockerfile
container_name: ollama-intel-gpu
image: ollama-intel-gpu:latest
restart: always
devices:
- /dev/dri:/dev/dri
volumes:
- /tmp/.X11-unix:/tmp/.X11-unix
- ollama-intel-gpu:/root/.ollama
environment:
- DISPLAY=${DISPLAY}
- DEVICE=Arc
ollama-webui:
image: ghcr.io/open-webui/open-webui
container_name: ollama-webui