Extracting SD.Next to its own docker compose file.
This commit is contained in:
10
README.md
10
README.md
@@ -20,18 +20,24 @@ streamline Stable Diffusion capabilities, all while tapping into the power of In
|
|||||||
* `ENABLE_OPENAI_API` and `ENABLE_OLLAMA_API` flags are set to off and on, respectively, allowing interactions via Ollama only.
|
* `ENABLE_OPENAI_API` and `ENABLE_OLLAMA_API` flags are set to off and on, respectively, allowing interactions via Ollama only.
|
||||||
* `ENABLE_IMAGE_GENERATION` is set to true, allowing you to generate images from the UI.
|
* `ENABLE_IMAGE_GENERATION` is set to true, allowing you to generate images from the UI.
|
||||||
* `IMAGE_GENERATION_ENGINE` is set to automatic1111 (SD.Next is compatible).
|
* `IMAGE_GENERATION_ENGINE` is set to automatic1111 (SD.Next is compatible).
|
||||||
3. SD.Next
|
|
||||||
|
4. SD.Next
|
||||||
* Uses as the base container the official [Intel® Extension for PyTorch](https://pytorch-extension.intel.com/installation?platform=gpu&version=v2.6.10%2Bxpu&os=linux%2Fwsl2&package=docker)
|
* Uses as the base container the official [Intel® Extension for PyTorch](https://pytorch-extension.intel.com/installation?platform=gpu&version=v2.6.10%2Bxpu&os=linux%2Fwsl2&package=docker)
|
||||||
* Uses a customized version of the SD.Next [docker file](https://github.com/vladmandic/sdnext/blob/dev/configs/Dockerfile.ipex), making it compatible with the Intel Extension for Pytorch image.
|
* Uses a customized version of the SD.Next [docker file](https://github.com/vladmandic/sdnext/blob/dev/configs/Dockerfile.ipex), making it compatible with the Intel Extension for Pytorch image.
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
Run the following commands to start your AI instance
|
Run the following commands to start your Ollama instance with Open WebUI
|
||||||
```bash
|
```bash
|
||||||
$ git clone https://github.com/eleiton/ollama-intel-arc.git
|
$ git clone https://github.com/eleiton/ollama-intel-arc.git
|
||||||
$ cd ollama-intel-arc
|
$ cd ollama-intel-arc
|
||||||
$ podman compose up
|
$ podman compose up
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Additionally, if you want to run the SD.Next service for image generation, run this command in a different terminal:
|
||||||
|
```bash
|
||||||
|
$ podman compose -f docker-compose.sdnext.yml up
|
||||||
|
```
|
||||||
|
|
||||||
## Validate
|
## Validate
|
||||||
Run the following command to verify your Ollama instance is up and running
|
Run the following command to verify your Ollama instance is up and running
|
||||||
```bash
|
```bash
|
||||||
|
|||||||
25
docker-compose.sdnext.yml
Normal file
25
docker-compose.sdnext.yml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
sdnext-ipex:
|
||||||
|
build:
|
||||||
|
context: sdnext
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
image: sdnext-ipex:latest
|
||||||
|
container_name: sdnext-ipex
|
||||||
|
restart: unless-stopped
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
ports:
|
||||||
|
- 7860:7860
|
||||||
|
volumes:
|
||||||
|
- sdnext-app-volume:/app
|
||||||
|
- sdnext-mnt-volume:/mnt
|
||||||
|
- sdnext-huggingface-volume:/root/.cache/huggingface
|
||||||
|
- sdnext-python-volume:/usr/local/lib/python3.10
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
sdnext-app-volume: {}
|
||||||
|
sdnext-mnt-volume: {}
|
||||||
|
sdnext-python-volume: {}
|
||||||
|
sdnext-huggingface-volume: {}
|
||||||
@@ -45,25 +45,6 @@ services:
|
|||||||
- host.docker.internal:host-gateway
|
- host.docker.internal:host-gateway
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
sdnext-ipex:
|
|
||||||
build:
|
|
||||||
context: sdnext
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
image: sdnext-ipex:latest
|
|
||||||
container_name: sdnext-ipex
|
|
||||||
restart: unless-stopped
|
|
||||||
devices:
|
|
||||||
- /dev/dri:/dev/dri
|
|
||||||
ports:
|
|
||||||
- 7860:7860
|
|
||||||
volumes:
|
|
||||||
- sdnext-app-volume:/app
|
|
||||||
- sdnext-mnt-volume:/mnt
|
|
||||||
- sdnext-huggingface-volume:/root/.cache/huggingface
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
ollama-volume: {}
|
ollama-volume: {}
|
||||||
open-webui-volume: {}
|
open-webui-volume: {}
|
||||||
sdnext-app-volume: {}
|
|
||||||
sdnext-mnt-volume: {}
|
|
||||||
sdnext-huggingface-volume: {}
|
|
||||||
|
|||||||
@@ -1,46 +1,16 @@
|
|||||||
FROM intel/intel-extension-for-pytorch:2.6.10-xpu
|
FROM intel/intel-extension-for-pytorch:2.6.10-xpu
|
||||||
|
|
||||||
# essentials
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y --no-install-recommends --fix-missing \
|
|
||||||
software-properties-common \
|
|
||||||
build-essential \
|
|
||||||
ca-certificates \
|
|
||||||
wget \
|
|
||||||
gpg \
|
|
||||||
git
|
|
||||||
|
|
||||||
# python3.10
|
|
||||||
RUN apt-get install -y --no-install-recommends --fix-missing python3.10-venv
|
|
||||||
|
|
||||||
# jemalloc is not required but it is highly recommended (also used with optional ipexrun)
|
|
||||||
RUN apt-get install -y --no-install-recommends --fix-missing libjemalloc-dev
|
|
||||||
ENV LD_PRELOAD=libjemalloc.so.2
|
|
||||||
|
|
||||||
# cleanup
|
|
||||||
RUN /usr/sbin/ldconfig
|
|
||||||
RUN apt-get clean && rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# stop pip and uv from caching
|
|
||||||
ENV PIP_NO_CACHE_DIR=true
|
|
||||||
ENV UV_NO_CACHE=true
|
|
||||||
|
|
||||||
# set paths to use with sdnext
|
# set paths to use with sdnext
|
||||||
ENV SD_DOCKER=true
|
|
||||||
ENV SD_DATADIR="/mnt/data"
|
ENV SD_DATADIR="/mnt/data"
|
||||||
ENV SD_MODELSDIR="/mnt/models"
|
ENV SD_MODELSDIR="/mnt/models"
|
||||||
ENV venv_dir="/mnt/python/venv"
|
|
||||||
|
|
||||||
# git clone and start sdnext
|
# git clone and start sdnext
|
||||||
RUN echo '#!/bin/bash\ngit status || git clone https://github.com/vladmandic/sdnext.git .\n/app/webui.sh "$@"' | tee /bin/startup.sh
|
RUN echo '#!/bin/bash\ngit status || git clone https://github.com/vladmandic/sdnext.git .\npython /app/launch.py "$@"' | tee /bin/startup.sh
|
||||||
RUN chmod 755 /bin/startup.sh
|
RUN chmod 755 /bin/startup.sh
|
||||||
|
|
||||||
# run sdnext
|
# run sdnext
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
ENTRYPOINT [ "startup.sh", "-f", "--use-ipex", "--uv", "--listen", "--debug", "--api-log", "--log", "sdnext.log" ]
|
ENTRYPOINT [ "startup.sh", "-f", "--use-ipex", "--uv", "--listen", "--debug", "--api-log", "--log", "sdnext.log" ]
|
||||||
|
|
||||||
# expose port
|
|
||||||
EXPOSE 7860
|
|
||||||
|
|
||||||
# stop signal
|
# stop signal
|
||||||
STOPSIGNAL SIGINT
|
STOPSIGNAL SIGINT
|
||||||
|
|||||||
Reference in New Issue
Block a user