Merge pull request #54 from blebo/update-ipex-v2.2.0

Update default to ipex-llm v2.2.0 (guide for v2.3.0-nightly in docs)
This commit is contained in:
Matt Curfman
2025-04-19 17:00:06 -07:00
committed by GitHub
3 changed files with 10 additions and 10 deletions
+4 -4
View File
@@ -23,10 +23,10 @@ RUN mkdir -p /tmp/gpu && \
dpkg -i *.deb && \
rm *.deb
# Install Ollama Portable Zip (with cached default)
ARG IPEXLLM_RELEASE_REPO=mattcurf/ollama-intel-gpu
ARG IPEXLLM_RELEASE_VERSON=v0.0.1
ARG IPEXLLM_PORTABLE_ZIP_FILENAME=ollama-0.5.4-ipex-llm-2.2.0b20250220-ubuntu.tgz
# Install Ollama Portable Zip
ARG IPEXLLM_RELEASE_REPO=intel/ipex-llm
ARG IPEXLLM_RELEASE_VERSON=v2.2.0
ARG IPEXLLM_PORTABLE_ZIP_FILENAME=ollama-ipex-llm-2.2.0-ubuntu.tgz
RUN cd / && \
wget https://github.com/${IPEXLLM_RELEASE_REPO}/releases/download/${IPEXLLM_RELEASE_VERSON}/${IPEXLLM_PORTABLE_ZIP_FILENAME} && \
tar xvf ${IPEXLLM_PORTABLE_ZIP_FILENAME} --strip-components=1 -C /
+3 -3
View File
@@ -31,7 +31,7 @@ Then launch your web browser to http://localhost:3000 to launch the web ui. Cre
## Update to the latest IPEX-LLM Portable Zip Version
To update to the latest portable zip version of IPEX-LLM's Ollama, update the compose file with the build arguments shown below, using the latest `ollama-*.tgz` release from https://github.com/intel/ipex-llm/releases/tag/v2.2.0-nightly , then rebuild the image.
To update to the latest portable zip version of IPEX-LLM's Ollama, update the compose file with the build arguments shown below, using the latest `ollama-*.tgz` release from https://github.com/intel/ipex-llm/releases/tag/v2.3.0-nightly , then rebuild the image.
```yaml
ollama-intel-gpu:
@@ -40,8 +40,8 @@ ollama-intel-gpu:
dockerfile: Dockerfile
args:
IPEXLLM_RELEASE_REPO: intel/ipex-llm
IPEXLLM_RELEASE_VERSON: v2.2.0-nightly
IPEXLLM_PORTABLE_ZIP_FILENAME: ollama-ipex-llm-2.2.0b20250313-ubuntu.tgz
IPEXLLM_RELEASE_VERSON: v2.3.0-nightly
IPEXLLM_PORTABLE_ZIP_FILENAME: ollama-ipex-llm-2.3.0b20250415-ubuntu.tgz
```
# References
+3 -3
View File
@@ -4,9 +4,9 @@ services:
context: .
dockerfile: Dockerfile
args:
IPEXLLM_RELEASE_REPO: mattcurf/ollama-intel-gpu
IPEXLLM_RELEASE_VERSON: v0.0.2
IPEXLLM_PORTABLE_ZIP_FILENAME: ollama-ipex-llm-2.2.0b20250313-ubuntu.tar.gz
IPEXLLM_RELEASE_REPO: intel/ipex-llm
IPEXLLM_RELEASE_VERSON: v2.2.0
IPEXLLM_PORTABLE_ZIP_FILENAME: ollama-ipex-llm-2.2.0-ubuntu.tar.gz
container_name: ollama-intel-gpu
restart: always
devices: