fix: ollama custom url support

This commit is contained in:
Timothy J. Baek 2023-11-06 01:12:27 -08:00
parent c8cab48abf
commit 3b3b7fb46a
2 changed files with 38 additions and 33 deletions

View file

@ -65,7 +65,7 @@
if (API_BASE_URL === '') { if (API_BASE_URL === '') {
API_BASE_URL = BUILD_TIME_API_BASE_URL; API_BASE_URL = BUILD_TIME_API_BASE_URL;
} }
const res = await getModelTags(API_BASE_URL); const res = await getModelTags(API_BASE_URL, 'ollama');
if (res) { if (res) {
toast.success('Server connection verified'); toast.success('Server connection verified');
@ -774,7 +774,7 @@
<div> <div>
<a href="https://github.com/ollama-webui/ollama-webui"> <a href="https://github.com/ollama-webui/ollama-webui">
<img <img
alt="followers" alt="Github Repo"
src="https://img.shields.io/github/stars/ollama-webui/ollama-webui?style=social&label=Star us on Github" src="https://img.shields.io/github/stars/ollama-webui/ollama-webui?style=social&label=Star us on Github"
/> />
</a> </a>

View file

@ -236,6 +236,7 @@
console.log(updated); console.log(updated);
settings = { ...settings, ...updated }; settings = { ...settings, ...updated };
localStorage.setItem('settings', JSON.stringify(settings)); localStorage.setItem('settings', JSON.stringify(settings));
API_BASE_URL = updated?.API_BASE_URL ?? API_BASE_URL;
await getModelTags(); await getModelTags();
}; };
@ -374,7 +375,7 @@
// Ollama functions // Ollama functions
////////////////////////// //////////////////////////
const getModelTags = async (url = null) => { const getModelTags = async (url = null, type = 'all') => {
const res = await fetch(`${url === null ? API_BASE_URL : url}/tags`, { const res = await fetch(`${url === null ? API_BASE_URL : url}/tags`, {
method: 'GET', method: 'GET',
headers: { headers: {
@ -394,6 +395,7 @@
console.log(res); console.log(res);
if (type === 'all') {
if (settings.OPENAI_API_KEY) { if (settings.OPENAI_API_KEY) {
// Validate OPENAI_API_KEY // Validate OPENAI_API_KEY
const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, { const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
@ -431,6 +433,9 @@
} }
return models; return models;
} else {
return res?.models ?? null;
}
}; };
const sendPrompt = async (userPrompt) => { const sendPrompt = async (userPrompt) => {