Update to latest ipex-llm dockerfile 20250211
This commit is contained in:
+4
-6
@@ -1,12 +1,10 @@
|
|||||||
FROM intelanalytics/ipex-llm-inference-cpp-xpu:2.2.0-SNAPSHOT
|
FROM intelanalytics/ipex-llm-inference-cpp-xpu:latest
|
||||||
|
|
||||||
ENV ZES_ENABLE_SYSMAN=1
|
|
||||||
ENV OLLAMA_HOST=0.0.0.0:11434
|
|
||||||
|
|
||||||
RUN mkdir -p /llm/ollama; \
|
RUN mkdir -p /llm/ollama; \
|
||||||
cd /llm/ollama; \
|
cd /llm/ollama; \
|
||||||
init-ollama;
|
init-ollama;
|
||||||
|
|
||||||
WORKDIR /llm/ollama
|
WORKDIR /llm/ollama
|
||||||
|
|
||||||
ENTRYPOINT ["./ollama", "serve"]
|
COPY commands.sh /llm/ollama/commands.sh
|
||||||
|
RUN ["chmod", "+x", "/llm/ollama/commands.sh"]
|
||||||
|
ENTRYPOINT ["/llm/ollama/commands.sh"]
|
||||||
|
|||||||
@@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
source ipex-llm-init --gpu --device $DEVICE
|
||||||
|
export OLLAMA_HOST=0.0.0.0:11434
|
||||||
|
cd /llm/ollama
|
||||||
|
./ollama serve
|
||||||
+1
-4
@@ -1,19 +1,16 @@
|
|||||||
version: "3.9"
|
|
||||||
services:
|
services:
|
||||||
ollama-intel-gpu:
|
ollama-intel-gpu:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
container_name: ollama-intel-gpu
|
container_name: ollama-intel-gpu
|
||||||
image: ollama-intel-gpu:latest
|
|
||||||
restart: always
|
restart: always
|
||||||
devices:
|
devices:
|
||||||
- /dev/dri:/dev/dri
|
- /dev/dri:/dev/dri
|
||||||
volumes:
|
volumes:
|
||||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
|
||||||
- ollama-intel-gpu:/root/.ollama
|
- ollama-intel-gpu:/root/.ollama
|
||||||
environment:
|
environment:
|
||||||
- DISPLAY=${DISPLAY}
|
- DEVICE=Arc
|
||||||
ollama-webui:
|
ollama-webui:
|
||||||
image: ghcr.io/open-webui/open-webui
|
image: ghcr.io/open-webui/open-webui
|
||||||
container_name: ollama-webui
|
container_name: ollama-webui
|
||||||
|
|||||||
Reference in New Issue
Block a user