From 9bbae0e25ac08aa8a8e1db1e6c5456e4cafb0c4e Mon Sep 17 00:00:00 2001 From: icervero Date: Tue, 19 Dec 2023 17:34:50 +0100 Subject: [PATCH] Refactor docker-compose configuration for modularity Split the original docker-compose.yml into three separate files for enhanced modularity and ease of use. Created docker-compose.api.yml for API exposure configuration and docker-compose.gpu.yml for GPU support. This change simplifies the management of different deployment environments and configurations, making it easier to enable or disable specific features such as GPU support and API access without modifying the main docker-compose file. --- README.md | 9 ++++++++- docker-compose.api.yml | 7 +++++++ docker-compose.gpu.yml | 13 +++++++++++++ docker-compose.yml | 12 ------------ 4 files changed, 28 insertions(+), 13 deletions(-) create mode 100644 docker-compose.api.yml create mode 100644 docker-compose.gpu.yml diff --git a/README.md b/README.md index c5919688..549bce65 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,14 @@ If you don't have Ollama installed yet, you can use the provided Docker Compose docker compose up -d --build ``` -This command will install both Ollama and Ollama Web UI on your system. Ensure to modify the `compose.yaml` file for GPU support and Exposing Ollama API outside the container stack if needed. +This command will install both Ollama and Ollama Web UI on your system. +Enable GPU support or Exposing Ollama API outside the container stack with the following command: +```bash +docker compose -f docker-compose.yml \ + -f docker-compose.gpu.yml \ + -f docker-compose.api.yml \ + up -d --build +``` ### Installing Ollama Web UI Only diff --git a/docker-compose.api.yml b/docker-compose.api.yml new file mode 100644 index 00000000..c36cf11e --- /dev/null +++ b/docker-compose.api.yml @@ -0,0 +1,7 @@ +version: '3.6' + +services: + ollama: + # Expose Ollama API outside the container stack + ports: + - 11434:11434 \ No newline at end of file diff --git a/docker-compose.gpu.yml b/docker-compose.gpu.yml new file mode 100644 index 00000000..db47ae13 --- /dev/null +++ b/docker-compose.gpu.yml @@ -0,0 +1,13 @@ +version: '3.6' + +services: + ollama: + # GPU support + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: + - gpu diff --git a/docker-compose.yml b/docker-compose.yml index b5036354..427f8580 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,20 +2,8 @@ version: '3.6' services: ollama: - # Uncomment below for GPU support - # deploy: - # resources: - # reservations: - # devices: - # - driver: nvidia - # count: 1 - # capabilities: - # - gpu volumes: - ollama:/root/.ollama - # Uncomment below to expose Ollama API outside the container stack - # ports: - # - 11434:11434 container_name: ollama pull_policy: always tty: true