diff --git a/.github/workflows/node.js.yaml b/.github/workflows/node.js.yaml index 29a9cd8d..20a04dc0 100644 --- a/.github/workflows/node.js.yaml +++ b/.github/workflows/node.js.yaml @@ -7,7 +7,7 @@ jobs: build: name: 'Fmt, Lint, & Build' env: - PUBLIC_API_ENDPOINT: '' + PUBLIC_API_BASE_URL: '' runs-on: ubuntu-latest strategy: matrix: diff --git a/Dockerfile b/Dockerfile index 4ac679a9..b3749ade 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,13 +3,13 @@ FROM node:latest WORKDIR /app -ARG OLLAMA_API_ENDPOINT='' -RUN echo $OLLAMA_API_ENDPOINT +ARG OLLAMA_API_BASE_URL='' +RUN echo $OLLAMA_API_BASE_URL ENV ENV prod -ENV PUBLIC_API_ENDPOINT $OLLAMA_API_ENDPOINT -RUN echo $PUBLIC_API_ENDPOINT +ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL +RUN echo $PUBLIC_API_BASE_URL COPY package.json package-lock.json ./ RUN npm ci diff --git a/README.md b/README.md index b67aaf10..d20e47fa 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ ChatGPT-Style Web Interface for Ollama 🦙 - 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature. -- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API endpoint in the Docker image: `docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .`. +- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API base URL in the Docker image: `docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11343/api' -t ollama-webui .`. - 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features. @@ -49,7 +49,8 @@ OLLAMA_HOST=0.0.0.0 OLLAMA_ORIGINS=* ollama serve ### Using Docker 🐳 ```bash -docker build -t ollama-webui . +docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11434/api' -t ollama-webui . + docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui ``` @@ -57,10 +58,10 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh #### Connecting to Ollama on a Different Server -If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable: +If Ollama is hosted on a server other than your local machine, change `OLLAMA_API_BASE_URL` to match: ```bash -docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui . +docker build --build-arg OLLAMA_API_BASE_URL='https://example.com/api' -t ollama-webui . docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui ``` diff --git a/.env.example b/example.env similarity index 81% rename from .env.example rename to example.env index 9ca55625..e7394bd8 100644 --- a/.env.example +++ b/example.env @@ -1,4 +1,4 @@ -PUBLIC_API_ENDPOINT="http://localhost:11434/api" +PUBLIC_API_BASE_URL="http://localhost:11434/api" OLLAMA_API_ID='my-api-token' OLLAMA_API_TOKEN='xxxxxxxxxxxxxxxx' diff --git a/src/lib/constants.ts b/src/lib/constants.ts index 4d7baeb1..5450a17a 100644 --- a/src/lib/constants.ts +++ b/src/lib/constants.ts @@ -1,12 +1,12 @@ import { browser } from '$app/environment'; -import { PUBLIC_API_ENDPOINT } from '$env/static/public'; +import { PUBLIC_API_BASE_URL } from '$env/static/public'; -export const API_ENDPOINT = - PUBLIC_API_ENDPOINT === '' +export const API_BASE_URL = + PUBLIC_API_BASE_URL === '' ? browser ? `http://${location.hostname}:11434/api` : `http://localhost:11434/api` - : PUBLIC_API_ENDPOINT; + : PUBLIC_API_BASE_URL; // Source: https://kit.svelte.dev/docs/modules#$env-static-public // This feature, akin to $env/static/private, exclusively incorporates environment variables @@ -14,6 +14,6 @@ export const API_ENDPOINT = // Consequently, these variables can be securely exposed to client-side code. // Example of the .env configuration: -// OLLAMA_API_ENDPOINT="http://localhost:11434/api" +// OLLAMA_API_BASE_URL="http://localhost:11434/api" // # Public -// PUBLIC_API_ENDPOINT=$OLLAMA_API_ENDPOINT +// PUBLIC_API_BASE_URL=$OLLAMA_API_BASE_URL diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte index cd9013c5..1d57ea4d 100644 --- a/src/routes/+page.svelte +++ b/src/routes/+page.svelte @@ -7,7 +7,7 @@ const { saveAs } = fileSaver; import hljs from 'highlight.js'; import 'highlight.js/styles/dark.min.css'; - import { API_ENDPOINT } from '$lib/constants'; + import { API_BASE_URL } from '$lib/constants'; import { onMount, tick } from 'svelte'; import Navbar from '$lib/components/layout/Navbar.svelte'; @@ -31,8 +31,8 @@ let messages = []; onMount(async () => { - console.log(API_ENDPOINT); - const res = await fetch(`${API_ENDPOINT}/tags`, { + console.log(API_BASE_URL); + const res = await fetch(`${API_BASE_URL}/tags`, { method: 'GET', headers: { Accept: 'application/json', @@ -277,7 +277,7 @@ messages = [...messages, responseMessage]; window.scrollTo({ top: document.body.scrollHeight }); - const res = await fetch(`${API_ENDPOINT}/generate`, { + const res = await fetch(`${API_BASE_URL}/generate`, { method: 'POST', headers: { 'Content-Type': 'text/event-stream' @@ -373,7 +373,7 @@ messages = [...messages, responseMessage]; window.scrollTo({ top: document.body.scrollHeight }); - const res = await fetch(`${API_ENDPOINT}/generate`, { + const res = await fetch(`${API_BASE_URL}/generate`, { method: 'POST', headers: { 'Content-Type': 'text/event-stream' @@ -453,7 +453,7 @@ const generateTitle = async (user_prompt) => { console.log('generateTitle'); - const res = await fetch(`${API_ENDPOINT}/generate`, { + const res = await fetch(`${API_BASE_URL}/generate`, { method: 'POST', headers: { 'Content-Type': 'text/event-stream'