forked from open-webui/open-webui
All in one Dockerfile for including Ollama
This commit is contained in:
parent
afa591afb2
commit
fde0139bf7
5 changed files with 53 additions and 20 deletions
29
Dockerfile
29
Dockerfile
|
@ -2,6 +2,7 @@
|
||||||
# Initialize device type args
|
# Initialize device type args
|
||||||
ARG USE_CUDA=false
|
ARG USE_CUDA=false
|
||||||
ARG USE_MPS=false
|
ARG USE_MPS=false
|
||||||
|
ARG INCLUDE_OLLAMA=false
|
||||||
|
|
||||||
######## WebUI frontend ########
|
######## WebUI frontend ########
|
||||||
FROM node:21-alpine3.19 as build
|
FROM node:21-alpine3.19 as build
|
||||||
|
@ -29,10 +30,12 @@ FROM python:3.11-slim-bookworm as base
|
||||||
# Use args
|
# Use args
|
||||||
ARG USE_CUDA
|
ARG USE_CUDA
|
||||||
ARG USE_MPS
|
ARG USE_MPS
|
||||||
|
ARG INCLUDE_OLLAMA
|
||||||
|
|
||||||
## Basis ##
|
## Basis ##
|
||||||
ENV ENV=prod \
|
ENV ENV=prod \
|
||||||
PORT=8080
|
PORT=8080 \
|
||||||
|
INCLUDE_OLLAMA_ENV=${INCLUDE_OLLAMA}
|
||||||
|
|
||||||
## Basis URL Config ##
|
## Basis URL Config ##
|
||||||
ENV OLLAMA_BASE_URL="/ollama" \
|
ENV OLLAMA_BASE_URL="/ollama" \
|
||||||
|
@ -88,14 +91,28 @@ RUN if [ "$USE_CUDA" = "true" ]; then \
|
||||||
python -c "import os; from chromadb.utils import embedding_functions; sentence_transformer_ef = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=os.environ['RAG_EMBEDDING_MODEL'], device=os.environ['DEVICE_TYPE'])"; \
|
python -c "import os; from chromadb.utils import embedding_functions; sentence_transformer_ef = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=os.environ['RAG_EMBEDDING_MODEL'], device=os.environ['DEVICE_TYPE'])"; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# install required packages
|
|
||||||
RUN apt-get update \
|
RUN if [ "$INCLUDE_OLLAMA" = "true" ]; then \
|
||||||
|
apt-get update && \
|
||||||
# Install pandoc and netcat
|
# Install pandoc and netcat
|
||||||
&& apt-get install -y --no-install-recommends pandoc netcat-openbsd \
|
apt-get install -y --no-install-recommends pandoc netcat-openbsd && \
|
||||||
# for RAG OCR
|
# for RAG OCR
|
||||||
&& apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 \
|
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
|
||||||
|
# install helper tools
|
||||||
|
apt-get install -y --no-install-recommends curl && \
|
||||||
|
# install ollama
|
||||||
|
curl -fsSL https://ollama.com/install.sh | sh && \
|
||||||
# cleanup
|
# cleanup
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*; \
|
||||||
|
else \
|
||||||
|
apt-get update && \
|
||||||
|
# Install pandoc and netcat
|
||||||
|
apt-get install -y --no-install-recommends pandoc netcat-openbsd && \
|
||||||
|
# for RAG OCR
|
||||||
|
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \
|
||||||
|
# cleanup
|
||||||
|
rm -rf /var/lib/apt/lists/*; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -81,9 +81,14 @@ WORKDIR /app/backend
|
||||||
# apk del /var/cache/apk/*.tbz2
|
# apk del /var/cache/apk/*.tbz2
|
||||||
|
|
||||||
# Install only the dependencies in the container, python will come from the base image used
|
# Install only the dependencies in the container, python will come from the base image used
|
||||||
RUN apk update && \
|
RUN apt-get update && \
|
||||||
apk add --no-install-recommends ffmpeg libsm6 libxext6 pandoc netcat-openbsd && \
|
apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 pandoc netcat-openbsd && \
|
||||||
apk del /var/cache/apk/*.tbz2
|
rm -rf /var/cache/apk/*.tbz2
|
||||||
|
|
||||||
|
# Install python and pip
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y python3 python3-pip && \
|
||||||
|
rm -rf /var/cache/apk/*.tbz2
|
||||||
|
|
||||||
COPY ./backend/requirements.txt ./requirements.txt
|
COPY ./backend/requirements.txt ./requirements.txt
|
||||||
RUN pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 --no-cache-dir && \
|
RUN pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 --no-cache-dir && \
|
||||||
|
|
|
@ -64,7 +64,7 @@ RUN pip3 install -r requirements.txt --no-cache-dir
|
||||||
# Install pandoc and netcat
|
# Install pandoc and netcat
|
||||||
# RUN python -c "import pypandoc; pypandoc.download_pandoc()"
|
# RUN python -c "import pypandoc; pypandoc.download_pandoc()"
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y pandoc netcat-openbsd \
|
&& apt-get install -y pandoc netcat-openbsd curl \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# preload embedding model
|
# preload embedding model
|
||||||
|
|
|
@ -208,6 +208,7 @@ OLLAMA_API_BASE_URL = os.environ.get(
|
||||||
)
|
)
|
||||||
|
|
||||||
OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "")
|
OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "")
|
||||||
|
INCLUDE_OLLAMA = os.environ.get("OLLAMA_BASE_URL", "false")
|
||||||
|
|
||||||
|
|
||||||
if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "":
|
if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "":
|
||||||
|
@ -219,6 +220,9 @@ if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "":
|
||||||
|
|
||||||
if ENV == "prod":
|
if ENV == "prod":
|
||||||
if OLLAMA_BASE_URL == "/ollama":
|
if OLLAMA_BASE_URL == "/ollama":
|
||||||
|
if INCLUDE_OLLAMA:
|
||||||
|
OLLAMA_BASE_URL = "http://localhost:11434"
|
||||||
|
else:
|
||||||
OLLAMA_BASE_URL = "http://host.docker.internal:11434"
|
OLLAMA_BASE_URL = "http://host.docker.internal:11434"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
INCLUDE_OLLAMA=${INCLUDE_OLLAMA_ENV:-false}
|
||||||
|
|
||||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
cd "$SCRIPT_DIR" || exit
|
cd "$SCRIPT_DIR" || exit
|
||||||
|
|
||||||
|
@ -7,16 +9,21 @@ KEY_FILE=.webui_secret_key
|
||||||
|
|
||||||
PORT="${PORT:-8080}"
|
PORT="${PORT:-8080}"
|
||||||
if test "$WEBUI_SECRET_KEY $WEBUI_JWT_SECRET_KEY" = " "; then
|
if test "$WEBUI_SECRET_KEY $WEBUI_JWT_SECRET_KEY" = " "; then
|
||||||
echo No WEBUI_SECRET_KEY provided
|
echo "No WEBUI_SECRET_KEY provided"
|
||||||
|
|
||||||
if ! [ -e "$KEY_FILE" ]; then
|
if ! [ -e "$KEY_FILE" ]; then
|
||||||
echo Generating WEBUI_SECRET_KEY
|
echo "Generating WEBUI_SECRET_KEY"
|
||||||
# Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one.
|
# Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one.
|
||||||
echo $(head -c 12 /dev/random | base64) > $KEY_FILE
|
echo $(head -c 12 /dev/random | base64) > "$KEY_FILE"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo Loading WEBUI_SECRET_KEY from $KEY_FILE
|
echo "Loading WEBUI_SECRET_KEY from $KEY_FILE"
|
||||||
WEBUI_SECRET_KEY=`cat $KEY_FILE`
|
WEBUI_SECRET_KEY=$(cat "$KEY_FILE")
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$INCLUDE_OLLAMA" = "true" ]; then
|
||||||
|
echo "INCLUDE_OLLAMA is set to true, starting ollama serve."
|
||||||
|
ollama serve &
|
||||||
fi
|
fi
|
||||||
|
|
||||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'
|
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'
|
Loading…
Reference in a new issue