forked from open-webui/open-webui
feat: enable buildtime API_ENDPOINT env var
This commit is contained in:
parent
f4f1283cd5
commit
86395a8c1f
10 changed files with 692 additions and 64 deletions
2
.env.example
Normal file
2
.env.example
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
OLLAMA_API_ENDPOINT=""
|
||||||
|
PUBLIC_API_ENDPOINT="$OLLAMA_API_ENDPOINT"
|
11
Dockerfile
11
Dockerfile
|
@ -1,15 +1,20 @@
|
||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
FROM node:latest
|
FROM node:latest
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
ARG OLLAMA_API_ENDPOINT=''
|
||||||
|
RUN echo $OLLAMA_API_ENDPOINT
|
||||||
|
|
||||||
ENV ENV prod
|
ENV ENV prod
|
||||||
|
|
||||||
|
ENV PUBLIC_API_ENDPOINT $OLLAMA_API_ENDPOINT
|
||||||
|
RUN echo $PUBLIC_API_ENDPOINT
|
||||||
|
|
||||||
COPY package.json package-lock.json ./
|
COPY package.json package-lock.json ./
|
||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN npm run build
|
RUN npm run build
|
||||||
|
|
||||||
CMD [ "node", "./build/index.js"]
|
CMD [ "npm", "run", "start"]
|
||||||
|
|
18
README.md
18
README.md
|
@ -7,15 +7,25 @@ ChatGPT-Style Web Interface for Ollama 🦙
|
||||||
## Features ⭐
|
## Features ⭐
|
||||||
|
|
||||||
- 🖥️ **Intuitive Interface**: Our chat interface takes inspiration from ChatGPT, ensuring a user-friendly experience.
|
- 🖥️ **Intuitive Interface**: Our chat interface takes inspiration from ChatGPT, ensuring a user-friendly experience.
|
||||||
|
|
||||||
- 📱 **Responsive Design**: Enjoy a seamless experience on both desktop and mobile devices.
|
- 📱 **Responsive Design**: Enjoy a seamless experience on both desktop and mobile devices.
|
||||||
|
|
||||||
- ⚡ **Swift Responsiveness**: Enjoy fast and responsive performance.
|
- ⚡ **Swift Responsiveness**: Enjoy fast and responsive performance.
|
||||||
|
|
||||||
- 🚀 **Effortless Setup**: Install seamlessly using Docker for a hassle-free experience.
|
- 🚀 **Effortless Setup**: Install seamlessly using Docker for a hassle-free experience.
|
||||||
|
|
||||||
- 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
|
- 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
|
||||||
|
|
||||||
- 📜 **Chat History**: Effortlessly access and manage your conversation history.
|
- 📜 **Chat History**: Effortlessly access and manage your conversation history.
|
||||||
|
|
||||||
- 📤📥 **Import/Export Chat History**: Seamlessly move your chat data in and out of the platform.
|
- 📤📥 **Import/Export Chat History**: Seamlessly move your chat data in and out of the platform.
|
||||||
|
|
||||||
- ⚙️ **Fine-Tuned Control with Advanced Parameters**: Gain a deeper level of control by adjusting parameters such as temperature and defining your system prompts to tailor the conversation to your specific preferences and needs.
|
- ⚙️ **Fine-Tuned Control with Advanced Parameters**: Gain a deeper level of control by adjusting parameters such as temperature and defining your system prompts to tailor the conversation to your specific preferences and needs.
|
||||||
|
|
||||||
- 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
|
- 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
|
||||||
- 🔗 **External Ollama Server Connection**: Link to the model when Ollama is hosted on a different server via the environment variable -e OLLAMA_ENDPOINT="http://[insert your Ollama address]".
|
|
||||||
|
- 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API endpoint in the Docker image: `docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .`.
|
||||||
|
|
||||||
- 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
|
- 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
|
||||||
|
|
||||||
## How to Install 🚀
|
## How to Install 🚀
|
||||||
|
@ -40,7 +50,7 @@ OLLAMA_HOST=0.0.0.0 OLLAMA_ORIGINS=* ollama serve
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t ollama-webui .
|
docker build -t ollama-webui .
|
||||||
docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway --name ollama-webui --restart always ollama-webui
|
docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
|
||||||
```
|
```
|
||||||
|
|
||||||
Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000). Enjoy! 😄
|
Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000). Enjoy! 😄
|
||||||
|
@ -50,8 +60,8 @@ Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localh
|
||||||
If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable:
|
If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t ollama-webui .
|
docker build --build-arg OLLAMA_API_ENDPOINT="http://[Your Ollama URL]/api" -t ollama-webui .
|
||||||
docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway -e OLLAMA_ENDPOINT="http://[insert your ollama url]" --name ollama-webui --restart always ollama-webui
|
docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
|
||||||
```
|
```
|
||||||
|
|
||||||
## What's Next? 🚀
|
## What's Next? 🚀
|
||||||
|
|
651
package-lock.json
generated
651
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -3,6 +3,7 @@
|
||||||
"version": "0.0.1",
|
"version": "0.0.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
"start": "http-server ./build",
|
||||||
"dev": "vite dev --host",
|
"dev": "vite dev --host",
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"preview": "vite preview",
|
"preview": "vite preview",
|
||||||
|
@ -40,6 +41,7 @@
|
||||||
"@sveltejs/adapter-node": "^1.3.1",
|
"@sveltejs/adapter-node": "^1.3.1",
|
||||||
"file-saver": "^2.0.5",
|
"file-saver": "^2.0.5",
|
||||||
"highlight.js": "^11.9.0",
|
"highlight.js": "^11.9.0",
|
||||||
|
"http-server": "^14.1.1",
|
||||||
"idb": "^7.1.1",
|
"idb": "^7.1.1",
|
||||||
"marked": "^9.1.0",
|
"marked": "^9.1.0",
|
||||||
"svelte-french-toast": "^1.2.0",
|
"svelte-french-toast": "^1.2.0",
|
||||||
|
|
2
run.sh
2
run.sh
|
@ -1,5 +1,5 @@
|
||||||
docker stop ollama-webui || true
|
docker stop ollama-webui || true
|
||||||
docker rm ollama-webui || true
|
docker rm ollama-webui || true
|
||||||
docker build -t ollama-webui .
|
docker build -t ollama-webui .
|
||||||
docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway --name ollama-webui --restart always ollama-webui
|
docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui
|
||||||
docker image prune -f
|
docker image prune -f
|
|
@ -1,7 +1,19 @@
|
||||||
import { browser, dev } from '$app/environment';
|
import { browser } from '$app/environment';
|
||||||
|
import { PUBLIC_API_ENDPOINT } from '$env/static/public';
|
||||||
|
|
||||||
export const API_ENDPOINT = browser
|
export const API_ENDPOINT =
|
||||||
? `https://localhost/api`
|
PUBLIC_API_ENDPOINT === ''
|
||||||
: dev
|
? browser
|
||||||
? `http://localhost:11434/api`
|
? `http://${location.hostname}:11434/api`
|
||||||
: 'http://host.docker.internal:11434/api';
|
: `http://localhost:11434/api`
|
||||||
|
: PUBLIC_API_ENDPOINT;
|
||||||
|
|
||||||
|
// Source: https://kit.svelte.dev/docs/modules#$env-static-public
|
||||||
|
// This feature, akin to $env/static/private, exclusively incorporates environment variables
|
||||||
|
// that are prefixed with config.kit.env.publicPrefix (usually set to PUBLIC_).
|
||||||
|
// Consequently, these variables can be securely exposed to client-side code.
|
||||||
|
|
||||||
|
// Example of the .env configuration:
|
||||||
|
// OLLAMA_API_ENDPOINT="http://localhost:11434/api"
|
||||||
|
// # Public
|
||||||
|
// PUBLIC_API_ENDPOINT=$OLLAMA_API_ENDPOINT
|
||||||
|
|
|
@ -7,7 +7,7 @@ export const prerender = true;
|
||||||
// you have to set ssr to false.
|
// you have to set ssr to false.
|
||||||
// This is not the case (so set as true or comment the line)
|
// This is not the case (so set as true or comment the line)
|
||||||
// Documentation: https://kit.svelte.dev/docs/page-options#ssr
|
// Documentation: https://kit.svelte.dev/docs/page-options#ssr
|
||||||
export const ssr = true;
|
// export const ssr = false;
|
||||||
|
|
||||||
// How to manage the trailing slashes in the URLs
|
// How to manage the trailing slashes in the URLs
|
||||||
// the URL for about page witll be /about with 'ignore' (default)
|
// the URL for about page witll be /about with 'ignore' (default)
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
import type { PageServerLoad } from './$types';
|
|
||||||
|
|
||||||
export const load: PageServerLoad = () => {
|
|
||||||
const API_ENDPOINT = process.env.API_ENDPOINT;
|
|
||||||
return {
|
|
||||||
API_ENDPOINT
|
|
||||||
};
|
|
||||||
};
|
|
|
@ -7,23 +7,13 @@
|
||||||
const { saveAs } = fileSaver;
|
const { saveAs } = fileSaver;
|
||||||
import hljs from 'highlight.js';
|
import hljs from 'highlight.js';
|
||||||
import 'highlight.js/styles/dark.min.css';
|
import 'highlight.js/styles/dark.min.css';
|
||||||
|
import { API_ENDPOINT } from '$lib/constants';
|
||||||
import type { PageData } from './$types';
|
|
||||||
import { API_ENDPOINT as DEV_API_ENDPOINT } from '$lib/constants';
|
|
||||||
import { onMount, tick } from 'svelte';
|
import { onMount, tick } from 'svelte';
|
||||||
import { page } from '$app/stores';
|
|
||||||
const suggestions = ''; // $page.url.searchParams.get('suggestions');
|
|
||||||
|
|
||||||
import Navbar from '$lib/components/layout/Navbar.svelte';
|
import Navbar from '$lib/components/layout/Navbar.svelte';
|
||||||
import SettingsModal from '$lib/components/chat/SettingsModal.svelte';
|
import SettingsModal from '$lib/components/chat/SettingsModal.svelte';
|
||||||
|
|
||||||
/* export let data: PageData; */
|
let suggestions = ''; // $page.url.searchParams.get('suggestions');
|
||||||
/* $: ({ API_ENDPOINT } = data); */
|
|
||||||
/* if (!API_ENDPOINT) { */
|
|
||||||
/* API_ENDPOINT = DEV_API_ENDPOINT; */
|
|
||||||
/* } */
|
|
||||||
/* console.log('API_ENDPOINT',API_ENDPOINT) */
|
|
||||||
/* console.log('DEV_API_ENDPOINT', DEV_API_ENDPOINT) */
|
|
||||||
|
|
||||||
let models = [];
|
let models = [];
|
||||||
let textareaElement;
|
let textareaElement;
|
||||||
|
@ -41,20 +31,24 @@
|
||||||
let messages = [];
|
let messages = [];
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
/* console.log('API_ENDPOINT 2', API_ENDPOINT) */
|
console.log(API_ENDPOINT);
|
||||||
const resp = await fetch(`${DEV_API_ENDPOINT}/tags`, {
|
const res = await fetch(`${API_ENDPOINT}/tags`, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
Accept: 'application/json',
|
Accept: 'application/json',
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
}
|
}
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.log(error);
|
||||||
|
return { models: [] };
|
||||||
});
|
});
|
||||||
if (!resp.ok) {
|
|
||||||
let msg = await resp.text();
|
const data = res;
|
||||||
let err = new Error(msg);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
const data = await resp.json();
|
|
||||||
models = data.models;
|
models = data.models;
|
||||||
|
|
||||||
let settings = localStorage.getItem('settings');
|
let settings = localStorage.getItem('settings');
|
||||||
|
|
Loading…
Reference in a new issue