diff --git a/README.md b/README.md index fced28e2..622ee29e 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ ChatGPT-Style Web Interface for Ollama 🦙 - 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions. -- ⚙️ **Many Models Conversations**: : Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel. +- ⚙️ **Many Models Conversations**: Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel. - 🤝 **OpenAI Model Integration**: Seamlessly utilize OpenAI models alongside Ollama models for a versatile conversational experience. @@ -62,10 +62,10 @@ ChatGPT-Style Web Interface for Ollama 🦙 If you don't have Ollama installed yet, you can use the provided Docker Compose file for a hassle-free installation. Simply run the following command: ```bash -docker compose up --build +docker compose up -d --build ``` -This command will install both Ollama and Ollama Web UI on your system. Ensure to modify the `compose.yaml` file for GPU support if needed. +This command will install both Ollama and Ollama Web UI on your system. Ensure to modify the `compose.yaml` file for GPU support and Exposing Ollama API outside the container stack if needed. ### Installing Ollama Web UI Only diff --git a/compose.yaml b/compose.yaml index 0a77d64f..b5036354 100644 --- a/compose.yaml +++ b/compose.yaml @@ -13,8 +13,9 @@ services: # - gpu volumes: - ollama:/root/.ollama - ports: - - 11434:11434 + # Uncomment below to expose Ollama API outside the container stack + # ports: + # - 11434:11434 container_name: ollama pull_policy: always tty: true @@ -29,10 +30,14 @@ services: dockerfile: Dockerfile image: ollama-webui:latest container_name: ollama-webui + depends_on: + - ollama ports: - 3000:8080 + environment: + - "OLLAMA_API_BASE_URL=http://ollama:11434/api" extra_hosts: - - host.docker.internal:host-gateway + - host.docker.internal:host-gateway restart: unless-stopped volumes: