diff --git a/README.md b/README.md index c5919688..549bce65 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,14 @@ If you don't have Ollama installed yet, you can use the provided Docker Compose docker compose up -d --build ``` -This command will install both Ollama and Ollama Web UI on your system. Ensure to modify the `compose.yaml` file for GPU support and Exposing Ollama API outside the container stack if needed. +This command will install both Ollama and Ollama Web UI on your system. +Enable GPU support or Exposing Ollama API outside the container stack with the following command: +```bash +docker compose -f docker-compose.yml \ + -f docker-compose.gpu.yml \ + -f docker-compose.api.yml \ + up -d --build +``` ### Installing Ollama Web UI Only diff --git a/docker-compose.api.yml b/docker-compose.api.yml new file mode 100644 index 00000000..c36cf11e --- /dev/null +++ b/docker-compose.api.yml @@ -0,0 +1,7 @@ +version: '3.6' + +services: + ollama: + # Expose Ollama API outside the container stack + ports: + - 11434:11434 \ No newline at end of file diff --git a/docker-compose.gpu.yml b/docker-compose.gpu.yml new file mode 100644 index 00000000..db47ae13 --- /dev/null +++ b/docker-compose.gpu.yml @@ -0,0 +1,13 @@ +version: '3.6' + +services: + ollama: + # GPU support + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: + - gpu diff --git a/docker-compose.yml b/docker-compose.yml index b5036354..427f8580 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,20 +2,8 @@ version: '3.6' services: ollama: - # Uncomment below for GPU support - # deploy: - # resources: - # reservations: - # devices: - # - driver: nvidia - # count: 1 - # capabilities: - # - gpu volumes: - ollama:/root/.ollama - # Uncomment below to expose Ollama API outside the container stack - # ports: - # - 11434:11434 container_name: ollama pull_policy: always tty: true