diff --git a/Dockerfile b/Dockerfile index 61d28d8..fb4c73f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,7 @@ FROM ubuntu:24.04 ENV DEBIAN_FRONTEND=noninteractive ENV TZ=america/los_angeles + # Base packages RUN apt update && \ apt install --no-install-recommends -q -y \ @@ -22,10 +23,13 @@ RUN mkdir -p /tmp/gpu && \ dpkg -i *.deb && \ rm *.deb -# Install Ollama Portable Zip +# Install Ollama Portable Zip (with cached default) +ARG IPEXLLM_RELEASE_REPO=mattcurf/ollama-intel-gpu +ARG IPEXLLM_RELEASE_VERSON=v0.0.1 +ARG IPEXLLM_PORTABLE_ZIP_FILENAME=ollama-0.5.4-ipex-llm-2.2.0b20250220-ubuntu.tgz RUN cd / && \ - wget https://github.com/mattcurf/ollama-intel-gpu/releases/download/v0.0.1/ollama-0.5.4-ipex-llm-2.2.0b20250220-ubuntu.tgz && \ - tar xvf ollama-0.5.4-ipex-llm-2.2.0b20250220-ubuntu.tgz --strip-components=1 -C / + wget https://github.com/${IPEXLLM_RELEASE_REPO}/releases/download/${IPEXLLM_RELEASE_VERSON}/${IPEXLLM_PORTABLE_ZIP_FILENAME} && \ + tar xvf ${IPEXLLM_PORTABLE_ZIP_FILENAME} --strip-components=1 -C / ENV OLLAMA_HOST=0.0.0.0:11434 diff --git a/README.md b/README.md index 322ee59..5b0e464 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,21 @@ $ docker compose up Then launch your web browser to http://localhost:3000 to launch the web ui. Create a local OpenWeb UI credential, then click the settings icon in the top right of the screen, then select 'Models', then click 'Show', then download a model like 'llama3.1:8b-instruct-q8_0' for Intel ARC A770 16GB VRAM +## Update to the latest IPEX-LLM Portable Zip Version + +To update to the latest portable zip version of IPEX-LLM's Ollama, update the compose file with the build arguments shown below, using the latest `ollama-*.tgz` release from https://github.com/intel/ipex-llm/releases/tag/v2.2.0-nightly , then rebuild the image. + +```yaml +ollama-intel-gpu: + build: + context: . + dockerfile: Dockerfile + args: + IPEXLLM_RELEASE_REPO: intel/ipex-llm + IPEXLLM_RELEASE_VERSON: v2.2.0-nightly + IPEXLLM_PORTABLE_ZIP_FILENAME: ollama-ipex-llm-2.2.0b20250313-ubuntu.tgz +``` + # References * https://dgpu-docs.intel.com/driver/client/overview.html * https://github.com/intel/ipex-llm/blob/main/docs/mddocs/Quickstart/ollama_portablze_zip_quickstart.md diff --git a/docker-compose.yml b/docker-compose.yml index 117bac2..0301a2c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,6 +3,10 @@ services: build: context: . dockerfile: Dockerfile + args: + IPEXLLM_RELEASE_REPO: mattcurf/ollama-intel-gpu + IPEXLLM_RELEASE_VERSON: v0.0.1 + IPEXLLM_PORTABLE_ZIP_FILENAME: ollama-0.5.4-ipex-llm-2.2.0b20250220-ubuntu.tgz container_name: ollama-intel-gpu restart: always devices: