open-webui/run-ollama-docker.sh

19 lines
412 B
Bash
Raw Permalink Normal View History

2024-01-09 08:21:36 +01:00
#!/bin/bash
host_port=11434
container_port=11434
2024-01-09 08:30:58 +01:00
read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu
2024-01-05 06:44:19 +01:00
docker rm -f ollama || true
docker pull ollama/ollama:latest
docker_args="-d -v ollama:/root/.ollama -p $host_port:$container_port --name ollama ollama/ollama"
2024-01-09 08:30:58 +01:00
2024-01-30 00:38:56 +01:00
if [ "$use_gpu" = "y" ]; then
docker_args="--gpus=all $docker_args"
fi
2024-01-30 00:38:56 +01:00
docker run $docker_args
2024-01-09 08:30:58 +01:00
2024-01-30 00:38:56 +01:00
docker image prune -f