From e6e05d46415e4ec2619ae89eea479f7837a5dc89 Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:21:36 +0600 Subject: [PATCH 1/7] chore: add shebang to shell scripts --- run-ollama-docker.sh | 2 ++ run.sh | 2 ++ 2 files changed, 4 insertions(+) diff --git a/run-ollama-docker.sh b/run-ollama-docker.sh index 5942fd3c..914975f4 100644 --- a/run-ollama-docker.sh +++ b/run-ollama-docker.sh @@ -1,3 +1,5 @@ +#!/bin/bash + docker rm -f ollama || true docker pull ollama/ollama # CPU Only diff --git a/run.sh b/run.sh index 6e2dc611..3abc5f03 100644 --- a/run.sh +++ b/run.sh @@ -1,3 +1,5 @@ +#!/bin/bash + docker build -t ollama-webui . docker stop ollama-webui || true docker rm ollama-webui || true From 0c0c5955c61ae64422374a53bd72a6e36f714dad Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:27:39 +0600 Subject: [PATCH 2/7] feat(docker): add option to run ollama w/o GPU support --- run-ollama-docker.sh | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/run-ollama-docker.sh b/run-ollama-docker.sh index 914975f4..20770a2d 100644 --- a/run-ollama-docker.sh +++ b/run-ollama-docker.sh @@ -1,9 +1,14 @@ #!/bin/bash +read -r -p "Do you want ollama in docker with GPU support? (y/n): " use_gpu + docker rm -f ollama || true docker pull ollama/ollama -# CPU Only -docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama -# GPU Support -# docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama -docker image prune -f \ No newline at end of file + +if [ "$use_gpu" == "y" ]; then + docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama +else + docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama +fi + +docker image prune -f From 93c73d355bd7bd83fb9d6b774b3b073e911f9e80 Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:28:35 +0600 Subject: [PATCH 3/7] chore(docker): update ollama docker image tag to latest --- run-ollama-docker.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run-ollama-docker.sh b/run-ollama-docker.sh index 20770a2d..fd2a8eb2 100644 --- a/run-ollama-docker.sh +++ b/run-ollama-docker.sh @@ -3,7 +3,7 @@ read -r -p "Do you want ollama in docker with GPU support? (y/n): " use_gpu docker rm -f ollama || true -docker pull ollama/ollama +docker pull ollama/ollama:latest if [ "$use_gpu" == "y" ]; then docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama From f87a1c95435832ad21f5e32ac57bf1bd5ae4c80c Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:30:58 +0600 Subject: [PATCH 4/7] refactor: update run-ollama-docker.sh --- run-ollama-docker.sh | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/run-ollama-docker.sh b/run-ollama-docker.sh index fd2a8eb2..89bf10be 100644 --- a/run-ollama-docker.sh +++ b/run-ollama-docker.sh @@ -1,14 +1,16 @@ #!/bin/bash -read -r -p "Do you want ollama in docker with GPU support? (y/n): " use_gpu +read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu docker rm -f ollama || true docker pull ollama/ollama:latest +docker_args="-d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama" + if [ "$use_gpu" == "y" ]; then - docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama -else - docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama + docker_args+=" --gpus=all" fi +docker run "$docker_args" + docker image prune -f From 8bb68cfcaece11ce9fca34dc39d3a4f82e7bd54d Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:33:20 +0600 Subject: [PATCH 5/7] refactor: update run.sh --- run.sh | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/run.sh b/run.sh index 3abc5f03..3fb41d93 100644 --- a/run.sh +++ b/run.sh @@ -1,7 +1,17 @@ #!/bin/bash -docker build -t ollama-webui . -docker stop ollama-webui || true -docker rm ollama-webui || true -docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui -docker image prune -f \ No newline at end of file +image_name="ollama-webui" +container_name="ollama-webui" + +docker build -t "$image_name" . +docker stop "$container_name" &>/dev/null || true +docker rm "$container_name" &>/dev/null || true + +docker run -d -p 3000:8080 \ + --add-host=host.docker.internal:host-gateway \ + -v "${image_name}:/app/backend/data" \ + --name "$container_name" \ + --restart always \ + "$image_name" + +docker image prune -f From fd33f68f284512e0f55ba07dabe0083c3132e46a Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:43:55 +0600 Subject: [PATCH 6/7] refactor(run.sh): update with port variables --- run.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/run.sh b/run.sh index 3fb41d93..c8ac77cc 100644 --- a/run.sh +++ b/run.sh @@ -2,12 +2,14 @@ image_name="ollama-webui" container_name="ollama-webui" +host_port=3000 +container_port=8080 docker build -t "$image_name" . docker stop "$container_name" &>/dev/null || true docker rm "$container_name" &>/dev/null || true -docker run -d -p 3000:8080 \ +docker run -d -p "$host_port":"$container_port" \ --add-host=host.docker.internal:host-gateway \ -v "${image_name}:/app/backend/data" \ --name "$container_name" \ From db51c22093f8bdee48a5444dbbe4fb30cd2b1b98 Mon Sep 17 00:00:00 2001 From: Tanvir Date: Tue, 9 Jan 2024 13:46:07 +0600 Subject: [PATCH 7/7] refactor(run-ollama-docker.sh): update with port variables --- run-ollama-docker.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/run-ollama-docker.sh b/run-ollama-docker.sh index 89bf10be..c8ce166a 100644 --- a/run-ollama-docker.sh +++ b/run-ollama-docker.sh @@ -1,11 +1,14 @@ #!/bin/bash +host_port=11434 +container_port=11434 + read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu docker rm -f ollama || true docker pull ollama/ollama:latest -docker_args="-d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama" +docker_args="-d -v ollama:/root/.ollama -p $host_port:$container_port --name ollama ollama/ollama" if [ "$use_gpu" == "y" ]; then docker_args+=" --gpus=all"