Update to latest ipex-llm dockerfile 20250211

This commit is contained in:
Matt Curfman
2025-02-11 20:40:41 -08:00
parent 1581a505fb
commit 765a8c09d7
3 changed files with 10 additions and 10 deletions
+4 -6
View File
@@ -1,12 +1,10 @@
FROM intelanalytics/ipex-llm-inference-cpp-xpu:2.2.0-SNAPSHOT FROM intelanalytics/ipex-llm-inference-cpp-xpu:latest
ENV ZES_ENABLE_SYSMAN=1
ENV OLLAMA_HOST=0.0.0.0:11434
RUN mkdir -p /llm/ollama; \ RUN mkdir -p /llm/ollama; \
cd /llm/ollama; \ cd /llm/ollama; \
init-ollama; init-ollama;
WORKDIR /llm/ollama WORKDIR /llm/ollama
ENTRYPOINT ["./ollama", "serve"] COPY commands.sh /llm/ollama/commands.sh
RUN ["chmod", "+x", "/llm/ollama/commands.sh"]
ENTRYPOINT ["/llm/ollama/commands.sh"]
+5
View File
@@ -0,0 +1,5 @@
#!/bin/bash
source ipex-llm-init --gpu --device $DEVICE
export OLLAMA_HOST=0.0.0.0:11434
cd /llm/ollama
./ollama serve
+1 -4
View File
@@ -1,19 +1,16 @@
version: "3.9"
services: services:
ollama-intel-gpu: ollama-intel-gpu:
build: build:
context: . context: .
dockerfile: Dockerfile dockerfile: Dockerfile
container_name: ollama-intel-gpu container_name: ollama-intel-gpu
image: ollama-intel-gpu:latest
restart: always restart: always
devices: devices:
- /dev/dri:/dev/dri - /dev/dri:/dev/dri
volumes: volumes:
- /tmp/.X11-unix:/tmp/.X11-unix
- ollama-intel-gpu:/root/.ollama - ollama-intel-gpu:/root/.ollama
environment: environment:
- DISPLAY=${DISPLAY} - DEVICE=Arc
ollama-webui: ollama-webui:
image: ghcr.io/open-webui/open-webui image: ghcr.io/open-webui/open-webui
container_name: ollama-webui container_name: ollama-webui