Compare commits
6 Commits
91a2c18fc6
...
7922d42e73
| Author | SHA1 | Date | |
|---|---|---|---|
| 7922d42e73 | |||
| e9ac68082b | |||
| b0c77e3613 | |||
| 712a98f77c | |||
| 298b1c2489 | |||
| 22cd6bde60 |
51
Dockerfile
Normal file
51
Dockerfile
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# ---------- Stage 1: Frontend ----------
|
||||||
|
FROM node:20-bullseye-slim AS frontend
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
# Pull source with all submodules
|
||||||
|
RUN apt-get update && apt-get install -y git \
|
||||||
|
&& git clone --branch main --recurse-submodules https://github.com/open-webui/open-webui.git . \
|
||||||
|
&& git submodule update --init --recursive
|
||||||
|
|
||||||
|
# Build frontend
|
||||||
|
ENV NODE_OPTIONS="--max_old_space_size=8192"
|
||||||
|
RUN npm ci --legacy-peer-deps && npm run build
|
||||||
|
# Optional check
|
||||||
|
RUN ls -la /app/build
|
||||||
|
|
||||||
|
# ---------- Stage 2: Backend ----------
|
||||||
|
FROM nvidia/cuda:12.9.1-cudnn-runtime-ubuntu22.04
|
||||||
|
|
||||||
|
# Install system deps
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python3.11 python3-pip git curl libgl1 libglib2.0-0 ffmpeg \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 \
|
||||||
|
&& update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1
|
||||||
|
|
||||||
|
RUN python -m pip install --upgrade pip
|
||||||
|
|
||||||
|
# Clone backend code
|
||||||
|
WORKDIR /app
|
||||||
|
RUN git clone --branch main --recurse-submodules https://github.com/open-webui/open-webui.git . \
|
||||||
|
&& git submodule update --init --recursive
|
||||||
|
|
||||||
|
# Patch (if needed) for logging API change
|
||||||
|
RUN sed -i 's/logging.getLevelNamesMapping()/logging._nameToLevel/' backend/open_webui/env.py
|
||||||
|
|
||||||
|
# Copy frontend build from Stage 1
|
||||||
|
COPY --from=frontend /app/build /app/build
|
||||||
|
|
||||||
|
# Install Python deps
|
||||||
|
WORKDIR /app/backend
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt uvicorn \
|
||||||
|
&& pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu121
|
||||||
|
|
||||||
|
# Force Python to see /app as a top‑level package location
|
||||||
|
WORKDIR /app
|
||||||
|
ENV PYTHONPATH=/app:/app/backend
|
||||||
|
|
||||||
|
EXPOSE 3000
|
||||||
|
|
||||||
|
CMD ["uvicorn", "backend.open_webui.main:app", "--host", "0.0.0.0", "--port", "3000"]
|
||||||
45
Readme.md
Normal file
45
Readme.md
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
**home-llm Docker Compose**
|
||||||
|
|
||||||
|
So far, just the docker-compose.yml file used to pull and run the containers for ollama and open-webui.
|
||||||
|
|
||||||
|
**nvidia-container Installation**
|
||||||
|
|
||||||
|
Step 1: Download the NVIDIA Docker Packages
|
||||||
|
|
||||||
|
Download the NVIDIA Container Toolkit and its dependencies:
|
||||||
|
|
||||||
|
You can use the following commands to download the necessary packages. Make sure to adjust the version numbers if needed:
|
||||||
|
|
||||||
|
bash
|
||||||
|
```
|
||||||
|
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/nvidia-docker2_2.16.0-1_amd64.deb
|
||||||
|
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/nvidia-container-runtime_3.11.0-1_amd64.deb
|
||||||
|
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/nvidia-container-toolkit_1.12.0-1_amd64.deb
|
||||||
|
```
|
||||||
|
Note: The version numbers may change, so you might want to check the NVIDIA website for the latest versions.
|
||||||
|
|
||||||
|
Step 2: Install the Downloaded Packages
|
||||||
|
|
||||||
|
Install the downloaded packages:
|
||||||
|
|
||||||
|
Run the following command to install the packages:
|
||||||
|
|
||||||
|
bash
|
||||||
|
```
|
||||||
|
sudo dpkg -i nvidia-container-runtime_3.11.0-1_amd64.deb nvidia-container-toolkit_1.12.0-1_amd64.deb nvidia-docker2_2.16.0-1_amd64.deb
|
||||||
|
```
|
||||||
|
|
||||||
|
If you encounter any dependency issues, you can resolve them by running:
|
||||||
|
|
||||||
|
bash
|
||||||
|
```
|
||||||
|
sudo apt-get install -f
|
||||||
|
```
|
||||||
|
|
||||||
|
**Build Container**
|
||||||
|
|
||||||
|
bash
|
||||||
|
```
|
||||||
|
docker build --no-cache -t open-webui .
|
||||||
|
docker compose build && docker compose up -d
|
||||||
|
```
|
||||||
12
example.env
Normal file
12
example.env
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# Open WebUI environment variables
|
||||||
|
WEBUI_SECRET_KEY=zZzXE9XxOx2561sICfe2Oscf/3LVr4ZrnGvv+fcTqsZlsdakWYrZCt8z8Uesh9Vf
|
||||||
|
HOME=/app
|
||||||
|
OLLAMA_MODELS=/app/.ollama/models
|
||||||
|
OLLAMA_HOME=/app/.ollama
|
||||||
|
OLLAMA_API_BASE_URL=http://ollama:11434
|
||||||
|
HF_HOME=/app/.cache
|
||||||
|
NODE_OPTIONS=--max_old_space_size=8192
|
||||||
|
|
||||||
|
# NVIDIA GPU settings
|
||||||
|
NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||||
Reference in New Issue
Block a user