diff --git a/Dockerfile b/Dockerfile index 2dd89813..7080d73b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,12 +2,6 @@ FROM node:alpine as build -ARG OLLAMA_API_BASE_URL='/ollama/api' -RUN echo $OLLAMA_API_BASE_URL - -ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL -RUN echo $PUBLIC_API_BASE_URL - WORKDIR /app COPY package.json package-lock.json ./ diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 7e138c39..f77f7ea1 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -1,69 +1,68 @@ -from flask import Flask, request, Response, jsonify -from flask_cors import CORS - +from fastapi import FastAPI, Request, Response, HTTPException, Depends +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import StreamingResponse import requests import json - +from pydantic import BaseModel from apps.web.models.users import Users from constants import ERROR_MESSAGES -from utils.utils import decode_token +from utils.utils import decode_token, get_current_user from config import OLLAMA_API_BASE_URL, WEBUI_AUTH -app = Flask(__name__) -CORS( - app -) # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains +app = FastAPI() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) -# Define the target server URL -TARGET_SERVER_URL = OLLAMA_API_BASE_URL +app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL + +# TARGET_SERVER_URL = OLLAMA_API_BASE_URL -@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"]) -@app.route("/", methods=["GET", "POST", "PUT", "DELETE"]) -def proxy(path): - # Combine the base URL of the target server with the requested path - target_url = f"{TARGET_SERVER_URL}/{path}" - print(target_url) +@app.get("/url") +async def get_ollama_api_url(user=Depends(get_current_user)): + if user and user.role == "admin": + return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL} + else: + raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED) - # Get data from the original request - data = request.get_data() + +class UrlUpdateForm(BaseModel): + url: str + + +@app.post("/url/update") +async def update_ollama_api_url( + form_data: UrlUpdateForm, user=Depends(get_current_user) +): + if user and user.role == "admin": + app.state.OLLAMA_API_BASE_URL = form_data.url + return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL} + else: + raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED) + + +@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"]) +async def proxy(path: str, request: Request, user=Depends(get_current_user)): + target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}" + + body = await request.body() headers = dict(request.headers) - # Basic RBAC support - if WEBUI_AUTH: - if "Authorization" in headers: - _, credentials = headers["Authorization"].split() - token_data = decode_token(credentials) - if token_data is None or "email" not in token_data: - return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 - - user = Users.get_user_by_email(token_data["email"]) - if user: - # Only user and admin roles can access - if user.role in ["user", "admin"]: - if path in ["pull", "delete", "push", "copy", "create"]: - # Only admin role can perform actions above - if user.role == "admin": - pass - else: - return ( - jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), - 401, - ) - else: - pass - else: - return jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), 401 - else: - return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 - else: - return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 + if user.role in ["user", "admin"]: + if path in ["pull", "delete", "push", "copy", "create"]: + if user.role != "admin": + raise HTTPException( + status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED + ) else: - pass - - r = None + raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED) headers.pop("Host", None) headers.pop("Authorization", None) @@ -71,49 +70,30 @@ def proxy(path): headers.pop("Referer", None) try: - # Make a request to the target server r = requests.request( method=request.method, url=target_url, - data=data, + data=body, headers=headers, - stream=True, # Enable streaming for server-sent events + stream=True, ) r.raise_for_status() - # Proxy the target server's response to the client - def generate(): - for chunk in r.iter_content(chunk_size=8192): - yield chunk - - response = Response(generate(), status=r.status_code) - - # Copy headers from the target server's response to the client's response - for key, value in r.headers.items(): - response.headers[key] = value - - return response + return StreamingResponse( + r.iter_content(chunk_size=8192), + status_code=r.status_code, + headers=dict(r.headers), + ) except Exception as e: print(e) error_detail = "Ollama WebUI: Server Connection Error" - if r != None: - print(r.text) - res = r.json() - if "error" in res: - error_detail = f"Ollama: {res['error']}" - print(res) + if r is not None: + try: + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + except: + error_detail = f"Ollama: {e}" - return ( - jsonify( - { - "detail": error_detail, - "message": str(e), - } - ), - 400, - ) - - -if __name__ == "__main__": - app.run(debug=True) + raise HTTPException(status_code=r.status_code, detail=error_detail) diff --git a/backend/apps/ollama/old_main.py b/backend/apps/ollama/old_main.py new file mode 100644 index 00000000..2e680093 --- /dev/null +++ b/backend/apps/ollama/old_main.py @@ -0,0 +1,176 @@ +from flask import Flask, request, Response, jsonify +from flask_cors import CORS + + +import requests +import json + + +from apps.web.models.users import Users +from constants import ERROR_MESSAGES +from utils.utils import decode_token +from config import OLLAMA_API_BASE_URL, WEBUI_AUTH + +app = Flask(__name__) +CORS( + app +) # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains + +# Define the target server URL +TARGET_SERVER_URL = OLLAMA_API_BASE_URL + + +@app.route("/url", methods=["GET"]) +def get_ollama_api_url(): + headers = dict(request.headers) + if "Authorization" in headers: + _, credentials = headers["Authorization"].split() + token_data = decode_token(credentials) + if token_data is None or "email" not in token_data: + return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 + + user = Users.get_user_by_email(token_data["email"]) + if user and user.role == "admin": + return ( + jsonify({"OLLAMA_API_BASE_URL": TARGET_SERVER_URL}), + 200, + ) + else: + return ( + jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), + 401, + ) + else: + return ( + jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), + 401, + ) + + +@app.route("/url/update", methods=["POST"]) +def update_ollama_api_url(): + headers = dict(request.headers) + data = request.get_json(force=True) + + if "Authorization" in headers: + _, credentials = headers["Authorization"].split() + token_data = decode_token(credentials) + if token_data is None or "email" not in token_data: + return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 + + user = Users.get_user_by_email(token_data["email"]) + if user and user.role == "admin": + TARGET_SERVER_URL = data["url"] + return ( + jsonify({"OLLAMA_API_BASE_URL": TARGET_SERVER_URL}), + 200, + ) + else: + return ( + jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), + 401, + ) + else: + return ( + jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), + 401, + ) + + +@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"]) +@app.route("/", methods=["GET", "POST", "PUT", "DELETE"]) +def proxy(path): + # Combine the base URL of the target server with the requested path + target_url = f"{TARGET_SERVER_URL}/{path}" + print(target_url) + + # Get data from the original request + data = request.get_data() + headers = dict(request.headers) + + # Basic RBAC support + if WEBUI_AUTH: + if "Authorization" in headers: + _, credentials = headers["Authorization"].split() + token_data = decode_token(credentials) + if token_data is None or "email" not in token_data: + return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 + + user = Users.get_user_by_email(token_data["email"]) + if user: + # Only user and admin roles can access + if user.role in ["user", "admin"]: + if path in ["pull", "delete", "push", "copy", "create"]: + # Only admin role can perform actions above + if user.role == "admin": + pass + else: + return ( + jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), + 401, + ) + else: + pass + else: + return jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), 401 + else: + return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 + else: + return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401 + else: + pass + + r = None + + headers.pop("Host", None) + headers.pop("Authorization", None) + headers.pop("Origin", None) + headers.pop("Referer", None) + + try: + # Make a request to the target server + r = requests.request( + method=request.method, + url=target_url, + data=data, + headers=headers, + stream=True, # Enable streaming for server-sent events + ) + + r.raise_for_status() + + # Proxy the target server's response to the client + def generate(): + for chunk in r.iter_content(chunk_size=8192): + yield chunk + + response = Response(generate(), status=r.status_code) + + # Copy headers from the target server's response to the client's response + for key, value in r.headers.items(): + response.headers[key] = value + + return response + except Exception as e: + print(e) + error_detail = "Ollama WebUI: Server Connection Error" + if r != None: + print(r.text) + res = r.json() + if "error" in res: + error_detail = f"Ollama: {res['error']}" + print(res) + + return ( + jsonify( + { + "detail": error_detail, + "message": str(e), + } + ), + 400, + ) + + +if __name__ == "__main__": + app.run(debug=True) diff --git a/backend/main.py b/backend/main.py index 24bad0c9..5e3b7e83 100644 --- a/backend/main.py +++ b/backend/main.py @@ -46,5 +46,7 @@ async def check_url(request: Request, call_next): app.mount("/api/v1", webui_app) -app.mount("/ollama/api", WSGIMiddleware(ollama_app)) +# app.mount("/ollama/api", WSGIMiddleware(ollama_app)) +app.mount("/ollama/api", ollama_app) + app.mount("/", SPAStaticFiles(directory="../build", html=True), name="spa-static-files") diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts index 75a02a8b..e0048fc6 100644 --- a/src/lib/apis/ollama/index.ts +++ b/src/lib/apis/ollama/index.ts @@ -1,12 +1,76 @@ import { OLLAMA_API_BASE_URL } from '$lib/constants'; -export const getOllamaVersion = async ( - base_url: string = OLLAMA_API_BASE_URL, - token: string = '' -) => { +export const getOllamaAPIUrl = async (token: string = '') => { let error = null; - const res = await fetch(`${base_url}/version`, { + const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + } + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OLLAMA_API_BASE_URL; +}; + +export const updateOllamaAPIUrl = async (token: string = '', url: string) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + ...(token && { authorization: `Bearer ${token}` }) + }, + body: JSON.stringify({ + url: url + }) + }) + .then(async (res) => { + if (!res.ok) throw await res.json(); + return res.json(); + }) + .catch((err) => { + console.log(err); + if ('detail' in err) { + error = err.detail; + } else { + error = 'Server connection failed'; + } + return null; + }); + + if (error) { + throw error; + } + + return res.OLLAMA_API_BASE_URL; +}; + +export const getOllamaVersion = async (token: string = '') => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, { method: 'GET', headers: { Accept: 'application/json', @@ -35,13 +99,10 @@ export const getOllamaVersion = async ( return res?.version ?? ''; }; -export const getOllamaModels = async ( - base_url: string = OLLAMA_API_BASE_URL, - token: string = '' -) => { +export const getOllamaModels = async (token: string = '') => { let error = null; - const res = await fetch(`${base_url}/tags`, { + const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, { method: 'GET', headers: { Accept: 'application/json', @@ -72,15 +133,10 @@ export const getOllamaModels = async ( }); }; -export const generateTitle = async ( - base_url: string = OLLAMA_API_BASE_URL, - token: string = '', - model: string, - prompt: string -) => { +export const generateTitle = async (token: string = '', model: string, prompt: string) => { let error = null; - const res = await fetch(`${base_url}/generate`, { + const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, { method: 'POST', headers: { 'Content-Type': 'text/event-stream', @@ -111,14 +167,10 @@ export const generateTitle = async ( return res?.response ?? 'New Chat'; }; -export const generateChatCompletion = async ( - base_url: string = OLLAMA_API_BASE_URL, - token: string = '', - body: object -) => { +export const generateChatCompletion = async (token: string = '', body: object) => { let error = null; - const res = await fetch(`${base_url}/chat`, { + const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, { method: 'POST', headers: { 'Content-Type': 'text/event-stream', @@ -137,15 +189,10 @@ export const generateChatCompletion = async ( return res; }; -export const createModel = async ( - base_url: string = OLLAMA_API_BASE_URL, - token: string, - tagName: string, - content: string -) => { +export const createModel = async (token: string, tagName: string, content: string) => { let error = null; - const res = await fetch(`${base_url}/create`, { + const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, { method: 'POST', headers: { 'Content-Type': 'text/event-stream', @@ -167,14 +214,10 @@ export const createModel = async ( return res; }; -export const deleteModel = async ( - base_url: string = OLLAMA_API_BASE_URL, - token: string, - tagName: string -) => { +export const deleteModel = async (token: string, tagName: string) => { let error = null; - const res = await fetch(`${base_url}/delete`, { + const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, { method: 'DELETE', headers: { 'Content-Type': 'text/event-stream', @@ -204,3 +247,27 @@ export const deleteModel = async ( return res; }; + +export const pullModel = async (token: string, tagName: string) => { + let error = null; + + const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, { + method: 'POST', + headers: { + 'Content-Type': 'text/event-stream', + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + name: tagName + }) + }).catch((err) => { + error = err; + return null; + }); + + if (error) { + throw error; + } + + return res; +}; diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 7698407b..36367560 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -7,19 +7,23 @@ import { config, models, settings, user, chats } from '$lib/stores'; import { splitStream, getGravatarURL } from '$lib/utils'; - import { getOllamaVersion, getOllamaModels } from '$lib/apis/ollama'; - import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats'; import { - WEB_UI_VERSION, - OLLAMA_API_BASE_URL, - WEBUI_API_BASE_URL, - WEBUI_BASE_URL - } from '$lib/constants'; + getOllamaVersion, + getOllamaModels, + getOllamaAPIUrl, + updateOllamaAPIUrl, + pullModel, + createModel, + deleteModel + } from '$lib/apis/ollama'; + import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats'; + import { WEB_UI_VERSION, WEBUI_API_BASE_URL } from '$lib/constants'; import Advanced from './Settings/Advanced.svelte'; import Modal from '../common/Modal.svelte'; import { updateUserPassword } from '$lib/apis/auths'; import { goto } from '$app/navigation'; + import Page from '../../../routes/(app)/+page.svelte'; export let show = false; @@ -33,7 +37,7 @@ let selectedTab = 'general'; // General - let API_BASE_URL = OLLAMA_API_BASE_URL; + let API_BASE_URL = ''; let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light']; let theme = 'dark'; let notificationEnabled = false; @@ -139,19 +143,13 @@ // About let ollamaVersion = ''; - const checkOllamaConnection = async () => { - if (API_BASE_URL === '') { - API_BASE_URL = OLLAMA_API_BASE_URL; - } - const _models = await getModels(API_BASE_URL, 'ollama'); + const updateOllamaAPIUrlHandler = async () => { + API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL); + const _models = await getModels('ollama'); if (_models.length > 0) { toast.success('Server connection verified'); await models.set(_models); - - saveSettings({ - API_BASE_URL: API_BASE_URL - }); } }; @@ -229,67 +227,60 @@ const pullModelHandler = async () => { modelTransferring = true; - const res = await fetch(`${API_BASE_URL}/pull`, { - method: 'POST', - headers: { - 'Content-Type': 'text/event-stream', - ...($settings.authHeader && { Authorization: $settings.authHeader }), - ...($user && { Authorization: `Bearer ${localStorage.token}` }) - }, - body: JSON.stringify({ - name: modelTag - }) - }); - const reader = res.body - .pipeThrough(new TextDecoderStream()) - .pipeThrough(splitStream('\n')) - .getReader(); + const res = await pullModel(localStorage.token, modelTag); - while (true) { - const { value, done } = await reader.read(); - if (done) break; + if (res) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); - try { - let lines = value.split('\n'); + while (true) { + const { value, done } = await reader.read(); + if (done) break; - for (const line of lines) { - if (line !== '') { - console.log(line); - let data = JSON.parse(line); - console.log(data); + try { + let lines = value.split('\n'); - if (data.error) { - throw data.error; - } + for (const line of lines) { + if (line !== '') { + console.log(line); + let data = JSON.parse(line); + console.log(data); - if (data.detail) { - throw data.detail; - } - if (data.status) { - if (!data.digest) { - toast.success(data.status); + if (data.error) { + throw data.error; + } - if (data.status === 'success') { - const notification = new Notification(`Ollama`, { - body: `Model '${modelTag}' has been successfully downloaded.`, - icon: '/favicon.png' - }); - } - } else { - digest = data.digest; - if (data.completed) { - pullProgress = Math.round((data.completed / data.total) * 1000) / 10; + if (data.detail) { + throw data.detail; + } + if (data.status) { + if (!data.digest) { + toast.success(data.status); + + if (data.status === 'success') { + const notification = new Notification(`Ollama`, { + body: `Model '${modelTag}' has been successfully downloaded.`, + icon: '/favicon.png' + }); + } } else { - pullProgress = 100; + digest = data.digest; + if (data.completed) { + pullProgress = Math.round((data.completed / data.total) * 1000) / 10; + } else { + pullProgress = 100; + } } } } } + } catch (error) { + console.log(error); + toast.error(error); } - } catch (error) { - console.log(error); - toast.error(error); } } @@ -410,21 +401,11 @@ } if (uploaded) { - const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/create`, { - method: 'POST', - headers: { - 'Content-Type': 'text/event-stream', - ...($settings.authHeader && { Authorization: $settings.authHeader }), - ...($user && { Authorization: `Bearer ${localStorage.token}` }) - }, - body: JSON.stringify({ - name: `${name}:latest`, - modelfile: `FROM @${modelFileDigest}\n${modelFileContent}` - }) - }).catch((err) => { - console.log(err); - return null; - }); + const res = await createModel( + localStorage.token, + `${name}:latest`, + `FROM @${modelFileDigest}\n${modelFileContent}` + ); if (res && res.ok) { const reader = res.body @@ -490,52 +471,44 @@ }; const deleteModelHandler = async () => { - const res = await fetch(`${API_BASE_URL}/delete`, { - method: 'DELETE', - headers: { - 'Content-Type': 'text/event-stream', - ...($settings.authHeader && { Authorization: $settings.authHeader }), - ...($user && { Authorization: `Bearer ${localStorage.token}` }) - }, - body: JSON.stringify({ - name: deleteModelTag - }) - }); + const res = await deleteModel(localStorage.token, deleteModelTag); - const reader = res.body - .pipeThrough(new TextDecoderStream()) - .pipeThrough(splitStream('\n')) - .getReader(); + if (res) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); - while (true) { - const { value, done } = await reader.read(); - if (done) break; + while (true) { + const { value, done } = await reader.read(); + if (done) break; - try { - let lines = value.split('\n'); + try { + let lines = value.split('\n'); - for (const line of lines) { - if (line !== '' && line !== 'null') { - console.log(line); - let data = JSON.parse(line); - console.log(data); + for (const line of lines) { + if (line !== '' && line !== 'null') { + console.log(line); + let data = JSON.parse(line); + console.log(data); - if (data.error) { - throw data.error; + if (data.error) { + throw data.error; + } + if (data.detail) { + throw data.detail; + } + + if (data.status) { + } + } else { + toast.success(`Deleted ${deleteModelTag}`); } - if (data.detail) { - throw data.detail; - } - - if (data.status) { - } - } else { - toast.success(`Deleted ${deleteModelTag}`); } + } catch (error) { + console.log(error); + toast.error(error); } - } catch (error) { - console.log(error); - toast.error(error); } } @@ -543,13 +516,10 @@ models.set(await getModels()); }; - const getModels = async (url = '', type = 'all') => { + const getModels = async (type = 'all') => { let models = []; models.push( - ...(await getOllamaModels( - url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token - ).catch((error) => { + ...(await getOllamaModels(localStorage.token).catch((error) => { toast.error(error); return []; })) @@ -557,10 +527,10 @@ // If OpenAI API Key exists if (type === 'all' && $settings.OPENAI_API_KEY) { - const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; + const OPENAI_API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; // Validate OPENAI_API_KEY - const openaiModelRes = await fetch(`${API_BASE_URL}/models`, { + const openaiModelRes = await fetch(`${OPENAI_API_BASE_URL}/models`, { method: 'GET', headers: { 'Content-Type': 'application/json', @@ -588,7 +558,7 @@ ...openAIModels .map((model) => ({ name: model.id, external: true })) .filter((model) => - API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true + OPENAI_API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true ) ] : []) @@ -624,15 +594,18 @@ }; onMount(async () => { + console.log('settings', $user.role === 'admin'); + if ($user.role === 'admin') { + API_BASE_URL = await getOllamaAPIUrl(localStorage.token); + } + let settings = JSON.parse(localStorage.getItem('settings') ?? '{}'); console.log(settings); theme = localStorage.theme ?? 'dark'; notificationEnabled = settings.notificationEnabled ?? false; - API_BASE_URL = settings.API_BASE_URL ?? OLLAMA_API_BASE_URL; system = settings.system ?? ''; - requestFormat = settings.requestFormat ?? ''; options.seed = settings.seed ?? 0; @@ -659,10 +632,7 @@ authContent = settings.authHeader.split(' ')[1]; } - ollamaVersion = await getOllamaVersion( - API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token - ).catch((error) => { + ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => { return ''; }); }); @@ -1026,51 +996,53 @@ -
-
-
Ollama API URL
-
-
- -
- -
+
+ + -
- The field above should be set to '/ollama/api'; - - Click here for help. - +
+ The field above should be set to '/ollama/api'; + + Click here for help. + +
- + {/if}
@@ -1088,7 +1060,6 @@ class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded" on:click={() => { saveSettings({ - API_BASE_URL: API_BASE_URL === '' ? OLLAMA_API_BASE_URL : API_BASE_URL, system: system !== '' ? system : undefined }); show = false; diff --git a/src/lib/constants.ts b/src/lib/constants.ts index 7d67c7a4..c22ae207 100644 --- a/src/lib/constants.ts +++ b/src/lib/constants.ts @@ -1,13 +1,8 @@ -import { dev, browser } from '$app/environment'; -import { PUBLIC_API_BASE_URL } from '$env/static/public'; +import { dev } from '$app/environment'; export const OLLAMA_API_BASE_URL = dev ? `http://${location.hostname}:8080/ollama/api` - : PUBLIC_API_BASE_URL === '' - ? browser - ? `http://${location.hostname}:11434/api` - : `http://localhost:11434/api` - : PUBLIC_API_BASE_URL; + : '/ollama/api'; export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``; export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`; diff --git a/src/routes/(app)/+layout.svelte b/src/routes/(app)/+layout.svelte index dca6912f..013638cb 100644 --- a/src/routes/(app)/+layout.svelte +++ b/src/routes/(app)/+layout.svelte @@ -14,7 +14,7 @@ import { getOpenAIModels } from '$lib/apis/openai'; import { user, showSettings, settings, models, modelfiles, prompts } from '$lib/stores'; - import { OLLAMA_API_BASE_URL, REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants'; + import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants'; import SettingsModal from '$lib/components/chat/SettingsModal.svelte'; import Sidebar from '$lib/components/layout/Sidebar.svelte'; @@ -32,10 +32,7 @@ const getModels = async () => { let models = []; models.push( - ...(await getOllamaModels( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token - ).catch((error) => { + ...(await getOllamaModels(localStorage.token).catch((error) => { toast.error(error); return []; })) @@ -58,10 +55,7 @@ const setOllamaVersion = async (version: string = '') => { if (version === '') { - version = await getOllamaVersion( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token - ).catch((error) => { + version = await getOllamaVersion(localStorage.token).catch((error) => { return ''; }); } diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index 9638861b..8fab09a2 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -7,7 +7,6 @@ import { page } from '$app/stores'; import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores'; - import { OLLAMA_API_BASE_URL } from '$lib/constants'; import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; import { copyToClipboard, splitStream } from '$lib/utils'; @@ -163,36 +162,32 @@ // Scroll down window.scrollTo({ top: document.body.scrollHeight }); - const res = await generateChatCompletion( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token, - { - model: model, - messages: [ - $settings.system - ? { - role: 'system', - content: $settings.system - } - : undefined, - ...messages - ] - .filter((message) => message) - .map((message) => ({ - role: message.role, - content: message.content, - ...(message.files && { - images: message.files - .filter((file) => file.type === 'image') - .map((file) => file.url.slice(file.url.indexOf(',') + 1)) - }) - })), - options: { - ...($settings.options ?? {}) - }, - format: $settings.requestFormat ?? undefined - } - ); + const res = await generateChatCompletion(localStorage.token, { + model: model, + messages: [ + $settings.system + ? { + role: 'system', + content: $settings.system + } + : undefined, + ...messages + ] + .filter((message) => message) + .map((message) => ({ + role: message.role, + content: message.content, + ...(message.files && { + images: message.files + .filter((file) => file.type === 'image') + .map((file) => file.url.slice(file.url.indexOf(',') + 1)) + }) + })), + options: { + ...($settings.options ?? {}) + }, + format: $settings.requestFormat ?? undefined + }); if (res && res.ok) { const reader = res.body @@ -595,7 +590,6 @@ const generateChatTitle = async (_chatId, userPrompt) => { if ($settings.titleAutoGenerate ?? true) { const title = await generateTitle( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, localStorage.token, $settings?.titleAutoGenerateModel ?? selectedModels[0], userPrompt diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index e3a70ca2..61b1c768 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -7,7 +7,6 @@ import { page } from '$app/stores'; import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores'; - import { OLLAMA_API_BASE_URL } from '$lib/constants'; import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; import { copyToClipboard, splitStream } from '$lib/utils'; @@ -180,36 +179,32 @@ // Scroll down window.scrollTo({ top: document.body.scrollHeight }); - const res = await generateChatCompletion( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token, - { - model: model, - messages: [ - $settings.system - ? { - role: 'system', - content: $settings.system - } - : undefined, - ...messages - ] - .filter((message) => message) - .map((message) => ({ - role: message.role, - content: message.content, - ...(message.files && { - images: message.files - .filter((file) => file.type === 'image') - .map((file) => file.url.slice(file.url.indexOf(',') + 1)) - }) - })), - options: { - ...($settings.options ?? {}) - }, - format: $settings.requestFormat ?? undefined - } - ); + const res = await generateChatCompletion(localStorage.token, { + model: model, + messages: [ + $settings.system + ? { + role: 'system', + content: $settings.system + } + : undefined, + ...messages + ] + .filter((message) => message) + .map((message) => ({ + role: message.role, + content: message.content, + ...(message.files && { + images: message.files + .filter((file) => file.type === 'image') + .map((file) => file.url.slice(file.url.indexOf(',') + 1)) + }) + })), + options: { + ...($settings.options ?? {}) + }, + format: $settings.requestFormat ?? undefined + }); if (res && res.ok) { const reader = res.body @@ -611,12 +606,7 @@ const generateChatTitle = async (_chatId, userPrompt) => { if ($settings.titleAutoGenerate ?? true) { - const title = await generateTitle( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token, - selectedModels[0], - userPrompt - ); + const title = await generateTitle(localStorage.token, selectedModels[0], userPrompt); if (title) { await setChatTitle(_chatId, title); diff --git a/src/routes/(app)/modelfiles/+page.svelte b/src/routes/(app)/modelfiles/+page.svelte index 917908cd..823782ee 100644 --- a/src/routes/(app)/modelfiles/+page.svelte +++ b/src/routes/(app)/modelfiles/+page.svelte @@ -6,7 +6,6 @@ import { onMount } from 'svelte'; import { modelfiles, settings, user } from '$lib/stores'; - import { OLLAMA_API_BASE_URL } from '$lib/constants'; import { createModel, deleteModel } from '$lib/apis/ollama'; import { createNewModelfile, @@ -20,11 +19,7 @@ const deleteModelHandler = async (tagName) => { let success = null; - success = await deleteModel( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token, - tagName - ); + success = await deleteModel(localStorage.token, tagName); if (success) { toast.success(`Deleted ${tagName}`); diff --git a/src/routes/(app)/modelfiles/create/+page.svelte b/src/routes/(app)/modelfiles/create/+page.svelte index 4b23149c..a59fb445 100644 --- a/src/routes/(app)/modelfiles/create/+page.svelte +++ b/src/routes/(app)/modelfiles/create/+page.svelte @@ -2,7 +2,6 @@ import { v4 as uuidv4 } from 'uuid'; import { toast } from 'svelte-french-toast'; import { goto } from '$app/navigation'; - import { OLLAMA_API_BASE_URL } from '$lib/constants'; import { settings, user, config, modelfiles, models } from '$lib/stores'; import Advanced from '$lib/components/chat/Settings/Advanced.svelte'; @@ -132,12 +131,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, ''); Object.keys(categories).filter((category) => categories[category]).length > 0 && !$models.includes(tagName) ) { - const res = await createModel( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token, - tagName, - content - ); + const res = await createModel(localStorage.token, tagName, content); if (res) { const reader = res.body diff --git a/src/routes/(app)/modelfiles/edit/+page.svelte b/src/routes/(app)/modelfiles/edit/+page.svelte index 11dec182..3139c8b2 100644 --- a/src/routes/(app)/modelfiles/edit/+page.svelte +++ b/src/routes/(app)/modelfiles/edit/+page.svelte @@ -7,8 +7,6 @@ import { page } from '$app/stores'; import { settings, user, config, modelfiles } from '$lib/stores'; - - import { OLLAMA_API_BASE_URL } from '$lib/constants'; import { splitStream } from '$lib/utils'; import { createModel } from '$lib/apis/ollama'; @@ -104,12 +102,7 @@ content !== '' && Object.keys(categories).filter((category) => categories[category]).length > 0 ) { - const res = await createModel( - $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, - localStorage.token, - tagName, - content - ); + const res = await createModel(localStorage.token, tagName, content); if (res) { const reader = res.body