external ollama server support added

This commit is contained in:
Timothy J. Baek 2023-10-18 02:59:00 -07:00
parent 7cc79f01cf
commit 52c563c6a0
3 changed files with 4 additions and 3 deletions

View file

@ -13,6 +13,7 @@ ChatGPT-Style Web Interface for Ollama 🦙
- 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
- 📜 **Chat History**: Effortlessly access and manage your conversation history.
- 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
- 🔗 **External Server Connection**: Link to the model when Ollama is hosted on a different server via the environment variable -e OLLAMA_ENDPOINT="http://[insert your Ollama address]".
- 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
## How to Install 🚀

View file

@ -19,6 +19,7 @@ export const load: PageServerLoad = async ({ url }) => {
});
return {
models: models?.models ?? []
models: models?.models ?? [],
ENDPOINT: ENDPOINT
};
};

View file

@ -8,13 +8,12 @@
import 'highlight.js/styles/dark.min.css';
import type { PageData } from './$types';
import { ENDPOINT } from '$lib/contants';
import { onMount, tick } from 'svelte';
import { openDB, deleteDB } from 'idb';
export let data: PageData;
$: ({ models } = data);
$: ({ models, ENDPOINT } = data);
let textareaElement;
let db;