From 7bdef561921734c40b8487f51c10e3e667f55eff Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Wed, 27 Dec 2023 00:11:23 -0800 Subject: [PATCH] fix: docker container volume mount location --- README.md | 8 ++++---- backend/apps/web/internal/db.py | 2 +- backend/data/readme.txt | 1 + docker-compose.yml | 2 +- run.sh | 2 +- 5 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 backend/data/readme.txt diff --git a/README.md b/README.md index 0aa3361b..311f9006 100644 --- a/README.md +++ b/README.md @@ -112,14 +112,14 @@ After installing Ollama, verify that Ollama is running by accessing the followin If Ollama is hosted on your local machine and accessible at [http://127.0.0.1:11434/](http://127.0.0.1:11434/), run the following command: ```bash -docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main +docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main ``` Alternatively, if you prefer to build the container yourself, use the following command: ```bash docker build -t ollama-webui . -docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend --name ollama-webui --restart always ollama-webui +docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui ``` Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000) and accessible over LAN (or Network). Enjoy! 😄 @@ -129,14 +129,14 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh Change `OLLAMA_API_BASE_URL` environment variable to match the external Ollama Server url: ```bash -docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main +docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main ``` Alternatively, if you prefer to build the container yourself, use the following command: ```bash docker build -t ollama-webui . -docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend --name ollama-webui --restart always ollama-webui +docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui ``` ## How to Install Without Docker diff --git a/backend/apps/web/internal/db.py b/backend/apps/web/internal/db.py index d2f7db95..3d639f3c 100644 --- a/backend/apps/web/internal/db.py +++ b/backend/apps/web/internal/db.py @@ -1,4 +1,4 @@ from peewee import * -DB = SqliteDatabase("./ollama.db") +DB = SqliteDatabase("./data/ollama.db") DB.connect() diff --git a/backend/data/readme.txt b/backend/data/readme.txt new file mode 100644 index 00000000..30c12ace --- /dev/null +++ b/backend/data/readme.txt @@ -0,0 +1 @@ +dir for backend files (db, documents, etc.) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 309a827e..a7357740 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -19,7 +19,7 @@ services: image: ollama-webui:latest container_name: ollama-webui volumes: - - ollama-webui:/app/backend + - ollama-webui:/app/backend/data depends_on: - ollama ports: diff --git a/run.sh b/run.sh index d145c08d..0ada65d1 100644 --- a/run.sh +++ b/run.sh @@ -1,5 +1,5 @@ docker stop ollama-webui || true docker rm ollama-webui || true docker build -t ollama-webui . -docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend --name ollama-webui --restart always ollama-webui +docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui docker image prune -f \ No newline at end of file