feat: enable backend ollama url update

This commit is contained in:
Timothy J. Baek 2024-01-04 13:06:31 -08:00
parent bdb3f50771
commit 30aff2db53
13 changed files with 554 additions and 409 deletions

View file

@ -2,12 +2,6 @@
FROM node:alpine as build
ARG OLLAMA_API_BASE_URL='/ollama/api'
RUN echo $OLLAMA_API_BASE_URL
ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL
RUN echo $PUBLIC_API_BASE_URL
WORKDIR /app
COPY package.json package-lock.json ./

View file

@ -1,69 +1,68 @@
from flask import Flask, request, Response, jsonify
from flask_cors import CORS
from fastapi import FastAPI, Request, Response, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
import requests
import json
from pydantic import BaseModel
from apps.web.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import decode_token
from utils.utils import decode_token, get_current_user
from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
app = Flask(__name__)
CORS(
app
) # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Define the target server URL
TARGET_SERVER_URL = OLLAMA_API_BASE_URL
app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"])
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE"])
def proxy(path):
# Combine the base URL of the target server with the requested path
target_url = f"{TARGET_SERVER_URL}/{path}"
print(target_url)
@app.get("/url")
async def get_ollama_api_url(user=Depends(get_current_user)):
if user and user.role == "admin":
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
# Get data from the original request
data = request.get_data()
class UrlUpdateForm(BaseModel):
url: str
@app.post("/url/update")
async def update_ollama_api_url(
form_data: UrlUpdateForm, user=Depends(get_current_user)
):
if user and user.role == "admin":
app.state.OLLAMA_API_BASE_URL = form_data.url
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
body = await request.body()
headers = dict(request.headers)
# Basic RBAC support
if WEBUI_AUTH:
if "Authorization" in headers:
_, credentials = headers["Authorization"].split()
token_data = decode_token(credentials)
if token_data is None or "email" not in token_data:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
user = Users.get_user_by_email(token_data["email"])
if user:
# Only user and admin roles can access
if user.role in ["user", "admin"]:
if path in ["pull", "delete", "push", "copy", "create"]:
# Only admin role can perform actions above
if user.role == "admin":
pass
else:
return (
jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}),
401,
)
else:
pass
else:
return jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), 401
else:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
else:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
if user.role in ["user", "admin"]:
if path in ["pull", "delete", "push", "copy", "create"]:
if user.role != "admin":
raise HTTPException(
status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
)
else:
pass
r = None
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
headers.pop("Host", None)
headers.pop("Authorization", None)
@ -71,49 +70,30 @@ def proxy(path):
headers.pop("Referer", None)
try:
# Make a request to the target server
r = requests.request(
method=request.method,
url=target_url,
data=data,
data=body,
headers=headers,
stream=True, # Enable streaming for server-sent events
stream=True,
)
r.raise_for_status()
# Proxy the target server's response to the client
def generate():
for chunk in r.iter_content(chunk_size=8192):
yield chunk
response = Response(generate(), status=r.status_code)
# Copy headers from the target server's response to the client's response
for key, value in r.headers.items():
response.headers[key] = value
return response
return StreamingResponse(
r.iter_content(chunk_size=8192),
status_code=r.status_code,
headers=dict(r.headers),
)
except Exception as e:
print(e)
error_detail = "Ollama WebUI: Server Connection Error"
if r != None:
print(r.text)
res = r.json()
if "error" in res:
error_detail = f"Ollama: {res['error']}"
print(res)
if r is not None:
try:
res = r.json()
if "error" in res:
error_detail = f"Ollama: {res['error']}"
except:
error_detail = f"Ollama: {e}"
return (
jsonify(
{
"detail": error_detail,
"message": str(e),
}
),
400,
)
if __name__ == "__main__":
app.run(debug=True)
raise HTTPException(status_code=r.status_code, detail=error_detail)

View file

@ -0,0 +1,176 @@
from flask import Flask, request, Response, jsonify
from flask_cors import CORS
import requests
import json
from apps.web.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import decode_token
from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
app = Flask(__name__)
CORS(
app
) # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains
# Define the target server URL
TARGET_SERVER_URL = OLLAMA_API_BASE_URL
@app.route("/url", methods=["GET"])
def get_ollama_api_url():
headers = dict(request.headers)
if "Authorization" in headers:
_, credentials = headers["Authorization"].split()
token_data = decode_token(credentials)
if token_data is None or "email" not in token_data:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
user = Users.get_user_by_email(token_data["email"])
if user and user.role == "admin":
return (
jsonify({"OLLAMA_API_BASE_URL": TARGET_SERVER_URL}),
200,
)
else:
return (
jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}),
401,
)
else:
return (
jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}),
401,
)
@app.route("/url/update", methods=["POST"])
def update_ollama_api_url():
headers = dict(request.headers)
data = request.get_json(force=True)
if "Authorization" in headers:
_, credentials = headers["Authorization"].split()
token_data = decode_token(credentials)
if token_data is None or "email" not in token_data:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
user = Users.get_user_by_email(token_data["email"])
if user and user.role == "admin":
TARGET_SERVER_URL = data["url"]
return (
jsonify({"OLLAMA_API_BASE_URL": TARGET_SERVER_URL}),
200,
)
else:
return (
jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}),
401,
)
else:
return (
jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}),
401,
)
@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"])
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE"])
def proxy(path):
# Combine the base URL of the target server with the requested path
target_url = f"{TARGET_SERVER_URL}/{path}"
print(target_url)
# Get data from the original request
data = request.get_data()
headers = dict(request.headers)
# Basic RBAC support
if WEBUI_AUTH:
if "Authorization" in headers:
_, credentials = headers["Authorization"].split()
token_data = decode_token(credentials)
if token_data is None or "email" not in token_data:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
user = Users.get_user_by_email(token_data["email"])
if user:
# Only user and admin roles can access
if user.role in ["user", "admin"]:
if path in ["pull", "delete", "push", "copy", "create"]:
# Only admin role can perform actions above
if user.role == "admin":
pass
else:
return (
jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}),
401,
)
else:
pass
else:
return jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), 401
else:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
else:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
else:
pass
r = None
headers.pop("Host", None)
headers.pop("Authorization", None)
headers.pop("Origin", None)
headers.pop("Referer", None)
try:
# Make a request to the target server
r = requests.request(
method=request.method,
url=target_url,
data=data,
headers=headers,
stream=True, # Enable streaming for server-sent events
)
r.raise_for_status()
# Proxy the target server's response to the client
def generate():
for chunk in r.iter_content(chunk_size=8192):
yield chunk
response = Response(generate(), status=r.status_code)
# Copy headers from the target server's response to the client's response
for key, value in r.headers.items():
response.headers[key] = value
return response
except Exception as e:
print(e)
error_detail = "Ollama WebUI: Server Connection Error"
if r != None:
print(r.text)
res = r.json()
if "error" in res:
error_detail = f"Ollama: {res['error']}"
print(res)
return (
jsonify(
{
"detail": error_detail,
"message": str(e),
}
),
400,
)
if __name__ == "__main__":
app.run(debug=True)

View file

@ -46,5 +46,7 @@ async def check_url(request: Request, call_next):
app.mount("/api/v1", webui_app)
app.mount("/ollama/api", WSGIMiddleware(ollama_app))
# app.mount("/ollama/api", WSGIMiddleware(ollama_app))
app.mount("/ollama/api", ollama_app)
app.mount("/", SPAStaticFiles(directory="../build", html=True), name="spa-static-files")

View file

@ -1,12 +1,76 @@
import { OLLAMA_API_BASE_URL } from '$lib/constants';
export const getOllamaVersion = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = ''
) => {
export const getOllamaAPIUrl = async (token: string = '') => {
let error = null;
const res = await fetch(`${base_url}/version`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
url: url
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const getOllamaVersion = async (token: string = '') => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, {
method: 'GET',
headers: {
Accept: 'application/json',
@ -35,13 +99,10 @@ export const getOllamaVersion = async (
return res?.version ?? '';
};
export const getOllamaModels = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = ''
) => {
export const getOllamaModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${base_url}/tags`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, {
method: 'GET',
headers: {
Accept: 'application/json',
@ -72,15 +133,10 @@ export const getOllamaModels = async (
});
};
export const generateTitle = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = '',
model: string,
prompt: string
) => {
export const generateTitle = async (token: string = '', model: string, prompt: string) => {
let error = null;
const res = await fetch(`${base_url}/generate`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
@ -111,14 +167,10 @@ export const generateTitle = async (
return res?.response ?? 'New Chat';
};
export const generateChatCompletion = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = '',
body: object
) => {
export const generateChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${base_url}/chat`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
@ -137,15 +189,10 @@ export const generateChatCompletion = async (
return res;
};
export const createModel = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string,
tagName: string,
content: string
) => {
export const createModel = async (token: string, tagName: string, content: string) => {
let error = null;
const res = await fetch(`${base_url}/create`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
@ -167,14 +214,10 @@ export const createModel = async (
return res;
};
export const deleteModel = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string,
tagName: string
) => {
export const deleteModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${base_url}/delete`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'text/event-stream',
@ -204,3 +247,27 @@ export const deleteModel = async (
return res;
};
export const pullModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
name: tagName
})
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -7,19 +7,23 @@
import { config, models, settings, user, chats } from '$lib/stores';
import { splitStream, getGravatarURL } from '$lib/utils';
import { getOllamaVersion, getOllamaModels } from '$lib/apis/ollama';
import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
import {
WEB_UI_VERSION,
OLLAMA_API_BASE_URL,
WEBUI_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
getOllamaVersion,
getOllamaModels,
getOllamaAPIUrl,
updateOllamaAPIUrl,
pullModel,
createModel,
deleteModel
} from '$lib/apis/ollama';
import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
import { WEB_UI_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
import Advanced from './Settings/Advanced.svelte';
import Modal from '../common/Modal.svelte';
import { updateUserPassword } from '$lib/apis/auths';
import { goto } from '$app/navigation';
import Page from '../../../routes/(app)/+page.svelte';
export let show = false;
@ -33,7 +37,7 @@
let selectedTab = 'general';
// General
let API_BASE_URL = OLLAMA_API_BASE_URL;
let API_BASE_URL = '';
let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light'];
let theme = 'dark';
let notificationEnabled = false;
@ -139,19 +143,13 @@
// About
let ollamaVersion = '';
const checkOllamaConnection = async () => {
if (API_BASE_URL === '') {
API_BASE_URL = OLLAMA_API_BASE_URL;
}
const _models = await getModels(API_BASE_URL, 'ollama');
const updateOllamaAPIUrlHandler = async () => {
API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL);
const _models = await getModels('ollama');
if (_models.length > 0) {
toast.success('Server connection verified');
await models.set(_models);
saveSettings({
API_BASE_URL: API_BASE_URL
});
}
};
@ -229,67 +227,60 @@
const pullModelHandler = async () => {
modelTransferring = true;
const res = await fetch(`${API_BASE_URL}/pull`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: modelTag
})
});
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
const res = await pullModel(localStorage.token, modelTag);
while (true) {
const { value, done } = await reader.read();
if (done) break;
if (res) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
try {
let lines = value.split('\n');
while (true) {
const { value, done } = await reader.read();
if (done) break;
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
try {
let lines = value.split('\n');
if (data.error) {
throw data.error;
}
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
if (data.detail) {
throw data.detail;
}
if (data.status) {
if (!data.digest) {
toast.success(data.status);
if (data.error) {
throw data.error;
}
if (data.status === 'success') {
const notification = new Notification(`Ollama`, {
body: `Model '${modelTag}' has been successfully downloaded.`,
icon: '/favicon.png'
});
}
} else {
digest = data.digest;
if (data.completed) {
pullProgress = Math.round((data.completed / data.total) * 1000) / 10;
if (data.detail) {
throw data.detail;
}
if (data.status) {
if (!data.digest) {
toast.success(data.status);
if (data.status === 'success') {
const notification = new Notification(`Ollama`, {
body: `Model '${modelTag}' has been successfully downloaded.`,
icon: '/favicon.png'
});
}
} else {
pullProgress = 100;
digest = data.digest;
if (data.completed) {
pullProgress = Math.round((data.completed / data.total) * 1000) / 10;
} else {
pullProgress = 100;
}
}
}
}
}
} catch (error) {
console.log(error);
toast.error(error);
}
} catch (error) {
console.log(error);
toast.error(error);
}
}
@ -410,21 +401,11 @@
}
if (uploaded) {
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: `${name}:latest`,
modelfile: `FROM @${modelFileDigest}\n${modelFileContent}`
})
}).catch((err) => {
console.log(err);
return null;
});
const res = await createModel(
localStorage.token,
`${name}:latest`,
`FROM @${modelFileDigest}\n${modelFileContent}`
);
if (res && res.ok) {
const reader = res.body
@ -490,52 +471,44 @@
};
const deleteModelHandler = async () => {
const res = await fetch(`${API_BASE_URL}/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: deleteModelTag
})
});
const res = await deleteModel(localStorage.token, deleteModelTag);
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
if (res) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) break;
while (true) {
const { value, done } = await reader.read();
if (done) break;
try {
let lines = value.split('\n');
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '' && line !== 'null') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
for (const line of lines) {
if (line !== '' && line !== 'null') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
if (data.error) {
throw data.error;
if (data.error) {
throw data.error;
}
if (data.detail) {
throw data.detail;
}
if (data.status) {
}
} else {
toast.success(`Deleted ${deleteModelTag}`);
}
if (data.detail) {
throw data.detail;
}
if (data.status) {
}
} else {
toast.success(`Deleted ${deleteModelTag}`);
}
} catch (error) {
console.log(error);
toast.error(error);
}
} catch (error) {
console.log(error);
toast.error(error);
}
}
@ -543,13 +516,10 @@
models.set(await getModels());
};
const getModels = async (url = '', type = 'all') => {
const getModels = async (type = 'all') => {
let models = [];
models.push(
...(await getOllamaModels(
url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
@ -557,10 +527,10 @@
// If OpenAI API Key exists
if (type === 'all' && $settings.OPENAI_API_KEY) {
const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
const OPENAI_API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
// Validate OPENAI_API_KEY
const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
const openaiModelRes = await fetch(`${OPENAI_API_BASE_URL}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
@ -588,7 +558,7 @@
...openAIModels
.map((model) => ({ name: model.id, external: true }))
.filter((model) =>
API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
OPENAI_API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
)
]
: [])
@ -624,15 +594,18 @@
};
onMount(async () => {
console.log('settings', $user.role === 'admin');
if ($user.role === 'admin') {
API_BASE_URL = await getOllamaAPIUrl(localStorage.token);
}
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
console.log(settings);
theme = localStorage.theme ?? 'dark';
notificationEnabled = settings.notificationEnabled ?? false;
API_BASE_URL = settings.API_BASE_URL ?? OLLAMA_API_BASE_URL;
system = settings.system ?? '';
requestFormat = settings.requestFormat ?? '';
options.seed = settings.seed ?? 0;
@ -659,10 +632,7 @@
authContent = settings.authHeader.split(' ')[1];
}
ollamaVersion = await getOllamaVersion(
API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => {
return '';
});
});
@ -1026,51 +996,53 @@
</div>
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter URL (e.g. http://localhost:8080/ollama/api)"
bind:value={API_BASE_URL}
/>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
on:click={() => {
checkOllamaConnection();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
clip-rule="evenodd"
{#if $user.role === 'admin'}
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter URL (e.g. http://localhost:8080/ollama/api)"
bind:value={API_BASE_URL}
/>
</svg>
</button>
</div>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
on:click={() => {
updateOllamaAPIUrlHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
The field above should be set to <span
class=" text-gray-500 dark:text-gray-300 font-medium">'/ollama/api'</span
>;
<a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://github.com/ollama-webui/ollama-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
The field above should be set to <span
class=" text-gray-500 dark:text-gray-300 font-medium">'/ollama/api'</span
>;
<a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://github.com/ollama-webui/ollama-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
</div>
{/if}
<hr class=" dark:border-gray-700" />
@ -1088,7 +1060,6 @@
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
on:click={() => {
saveSettings({
API_BASE_URL: API_BASE_URL === '' ? OLLAMA_API_BASE_URL : API_BASE_URL,
system: system !== '' ? system : undefined
});
show = false;

View file

@ -1,13 +1,8 @@
import { dev, browser } from '$app/environment';
import { PUBLIC_API_BASE_URL } from '$env/static/public';
import { dev } from '$app/environment';
export const OLLAMA_API_BASE_URL = dev
? `http://${location.hostname}:8080/ollama/api`
: PUBLIC_API_BASE_URL === ''
? browser
? `http://${location.hostname}:11434/api`
: `http://localhost:11434/api`
: PUBLIC_API_BASE_URL;
: '/ollama/api';
export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``;
export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`;

View file

@ -14,7 +14,7 @@
import { getOpenAIModels } from '$lib/apis/openai';
import { user, showSettings, settings, models, modelfiles, prompts } from '$lib/stores';
import { OLLAMA_API_BASE_URL, REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
import SettingsModal from '$lib/components/chat/SettingsModal.svelte';
import Sidebar from '$lib/components/layout/Sidebar.svelte';
@ -32,10 +32,7 @@
const getModels = async () => {
let models = [];
models.push(
...(await getOllamaModels(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
@ -58,10 +55,7 @@
const setOllamaVersion = async (version: string = '') => {
if (version === '') {
version = await getOllamaVersion(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
version = await getOllamaVersion(localStorage.token).catch((error) => {
return '';
});
}

View file

@ -7,7 +7,6 @@
import { page } from '$app/stores';
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { copyToClipboard, splitStream } from '$lib/utils';
@ -163,36 +162,32 @@
// Scroll down
window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
{
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
}
);
const res = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
});
if (res && res.ok) {
const reader = res.body
@ -595,7 +590,6 @@
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
const title = await generateTitle(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
$settings?.titleAutoGenerateModel ?? selectedModels[0],
userPrompt

View file

@ -7,7 +7,6 @@
import { page } from '$app/stores';
import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { copyToClipboard, splitStream } from '$lib/utils';
@ -180,36 +179,32 @@
// Scroll down
window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
{
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
}
);
const res = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
});
if (res && res.ok) {
const reader = res.body
@ -611,12 +606,7 @@
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
const title = await generateTitle(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
selectedModels[0],
userPrompt
);
const title = await generateTitle(localStorage.token, selectedModels[0], userPrompt);
if (title) {
await setChatTitle(_chatId, title);

View file

@ -6,7 +6,6 @@
import { onMount } from 'svelte';
import { modelfiles, settings, user } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { createModel, deleteModel } from '$lib/apis/ollama';
import {
createNewModelfile,
@ -20,11 +19,7 @@
const deleteModelHandler = async (tagName) => {
let success = null;
success = await deleteModel(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
tagName
);
success = await deleteModel(localStorage.token, tagName);
if (success) {
toast.success(`Deleted ${tagName}`);

View file

@ -2,7 +2,6 @@
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-french-toast';
import { goto } from '$app/navigation';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { settings, user, config, modelfiles, models } from '$lib/stores';
import Advanced from '$lib/components/chat/Settings/Advanced.svelte';
@ -132,12 +131,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
Object.keys(categories).filter((category) => categories[category]).length > 0 &&
!$models.includes(tagName)
) {
const res = await createModel(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
tagName,
content
);
const res = await createModel(localStorage.token, tagName, content);
if (res) {
const reader = res.body

View file

@ -7,8 +7,6 @@
import { page } from '$app/stores';
import { settings, user, config, modelfiles } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { splitStream } from '$lib/utils';
import { createModel } from '$lib/apis/ollama';
@ -104,12 +102,7 @@
content !== '' &&
Object.keys(categories).filter((category) => categories[category]).length > 0
) {
const res = await createModel(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
tagName,
content
);
const res = await createModel(localStorage.token, tagName, content);
if (res) {
const reader = res.body