Merge pull request #844 from open-webui/litellm

feat: direct litellm integration
This commit is contained in:
Timothy Jaeryang Baek 2024-02-24 21:03:03 -05:00 committed by GitHub
commit 1a9a56d690
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 1119 additions and 545 deletions

View file

@ -5,6 +5,22 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.1.103] - UNRELEASED
### Added
- **Built-in LiteLLM Proxy**: Open WebUI now ships with LiteLLM Proxy.
- **Image Generation Enhancements**: Advanced Settings + Image Preview Feature.
### Fixed
- Issue with RAG scan that stops loading documents as soon as it reaches a file with unsupported mime type (or any other exceptions). (#866)
### Changed
- Ollama is no longer required to run Open WebUI.
- Our documentation can be found here https://docs.openwebui.com/
## [0.1.102] - 2024-02-22 ## [0.1.102] - 2024-02-22
### Added ### Added

7
backend/.gitignore vendored
View file

@ -6,6 +6,11 @@ uploads
*.db *.db
_test _test
Pipfile Pipfile
data/* !/data
/data/*
!/data/litellm
/data/litellm/*
!data/litellm/config.yaml
!data/config.json !data/config.json
.webui_secret_key .webui_secret_key

View file

@ -49,7 +49,7 @@ async def toggle_enabled(request: Request, user=Depends(get_admin_user)):
app.state.ENABLED = not app.state.ENABLED app.state.ENABLED = not app.state.ENABLED
return app.state.ENABLED return app.state.ENABLED
except Exception as e: except Exception as e:
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e)) raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
class UrlUpdateForm(BaseModel): class UrlUpdateForm(BaseModel):
@ -109,7 +109,8 @@ def get_models(user=Depends(get_current_user)):
models = r.json() models = r.json()
return models return models
except Exception as e: except Exception as e:
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e)) app.state.ENABLED = False
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
@app.get("/models/default") @app.get("/models/default")
@ -120,7 +121,8 @@ async def get_default_model(user=Depends(get_admin_user)):
return {"model": options["sd_model_checkpoint"]} return {"model": options["sd_model_checkpoint"]}
except Exception as e: except Exception as e:
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e)) app.state.ENABLED = False
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
class UpdateModelForm(BaseModel): class UpdateModelForm(BaseModel):
@ -190,4 +192,4 @@ def generate_image(
return r.json() return r.json()
except Exception as e: except Exception as e:
print(e) print(e)
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e)) raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))

View file

@ -83,8 +83,6 @@ for version in soup.find_all("h2"):
# Find the next sibling that is a h3 tag (section title) # Find the next sibling that is a h3 tag (section title)
current = version.find_next_sibling() current = version.find_next_sibling()
print(current)
while current and current.name != "h2": while current and current.name != "h2":
if current.name == "h3": if current.name == "h3":
section_title = current.get_text().lower() # e.g., "added", "fixed" section_title = current.get_text().lower() # e.g., "added", "fixed"

View file

@ -0,0 +1,4 @@
general_settings: {}
litellm_settings: {}
model_list: []
router_settings: {}

View file

@ -2,25 +2,31 @@ from bs4 import BeautifulSoup
import json import json
import markdown import markdown
import time import time
import os
import sys
from fastapi import FastAPI, Request, Depends
from fastapi import FastAPI, Request
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from fastapi import HTTPException from fastapi import HTTPException
from fastapi.responses import JSONResponse
from fastapi.middleware.wsgi import WSGIMiddleware from fastapi.middleware.wsgi import WSGIMiddleware
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.exceptions import HTTPException as StarletteHTTPException
from litellm.proxy.proxy_server import ProxyConfig, initialize
from litellm.proxy.proxy_server import app as litellm_app
from apps.ollama.main import app as ollama_app from apps.ollama.main import app as ollama_app
from apps.openai.main import app as openai_app from apps.openai.main import app as openai_app
from apps.audio.main import app as audio_app from apps.audio.main import app as audio_app
from apps.images.main import app as images_app from apps.images.main import app as images_app
from apps.rag.main import app as rag_app from apps.rag.main import app as rag_app
from apps.web.main import app as webui_app from apps.web.main import app as webui_app
from config import WEBUI_NAME, ENV, VERSION, CHANGELOG, FRONTEND_BUILD_DIR from config import WEBUI_NAME, ENV, VERSION, CHANGELOG, FRONTEND_BUILD_DIR
from utils.utils import get_http_authorization_cred, get_current_user
class SPAStaticFiles(StaticFiles): class SPAStaticFiles(StaticFiles):
@ -34,6 +40,21 @@ class SPAStaticFiles(StaticFiles):
raise ex raise ex
proxy_config = ProxyConfig()
async def config():
router, model_list, general_settings = await proxy_config.load_config(
router=None, config_file_path="./data/litellm/config.yaml"
)
await initialize(config="./data/litellm/config.yaml", telemetry=False)
async def startup():
await config()
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None) app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
origins = ["*"] origins = ["*"]
@ -47,6 +68,11 @@ app.add_middleware(
) )
@app.on_event("startup")
async def on_startup():
await startup()
@app.middleware("http") @app.middleware("http")
async def check_url(request: Request, call_next): async def check_url(request: Request, call_next):
start_time = int(time.time()) start_time = int(time.time())
@ -57,7 +83,23 @@ async def check_url(request: Request, call_next):
return response return response
@litellm_app.middleware("http")
async def auth_middleware(request: Request, call_next):
auth_header = request.headers.get("Authorization", "")
if ENV != "dev":
try:
user = get_current_user(get_http_authorization_cred(auth_header))
print(user)
except Exception as e:
return JSONResponse(status_code=400, content={"detail": str(e)})
response = await call_next(request)
return response
app.mount("/api/v1", webui_app) app.mount("/api/v1", webui_app)
app.mount("/litellm/api", litellm_app)
app.mount("/ollama/api", ollama_app) app.mount("/ollama/api", ollama_app)
app.mount("/openai/api", openai_app) app.mount("/openai/api", openai_app)

View file

@ -16,6 +16,9 @@ aiohttp
peewee peewee
bcrypt bcrypt
litellm
apscheduler
langchain langchain
langchain-community langchain-community
chromadb chromadb

View file

@ -58,6 +58,17 @@ def extract_token_from_auth_header(auth_header: str):
return auth_header[len("Bearer ") :] return auth_header[len("Bearer ") :]
def get_http_authorization_cred(auth_header: str):
try:
scheme, credentials = auth_header.split(" ")
return {
"scheme": scheme,
"credentials": credentials,
}
except:
raise ValueError(ERROR_MESSAGES.INVALID_TOKEN)
def get_current_user( def get_current_user(
auth_token: HTTPAuthorizationCredentials = Depends(bearer_security), auth_token: HTTPAuthorizationCredentials = Depends(bearer_security),
): ):

View file

@ -1,6 +1,6 @@
{ {
"name": "open-webui", "name": "open-webui",
"version": "0.1.102", "version": "0.1.103",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "vite dev --host", "dev": "vite dev --host",

View file

@ -0,0 +1,148 @@
import { LITELLM_API_BASE_URL } from '$lib/constants';
export const getLiteLLMModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/v1/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
? models
.map((model) => ({
id: model.id,
name: model.name ?? model.id,
external: true,
source: 'litellm'
}))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})
: models;
};
export const getLiteLLMModelInfo = async (token: string = '') => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/info`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models;
};
type AddLiteLLMModelForm = {
name: string;
model: string;
api_base: string;
api_key: string;
rpm: string;
};
export const addLiteLLMModel = async (token: string = '', payload: AddLiteLLMModelForm) => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/new`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
model_name: payload.name,
litellm_params: {
model: payload.model,
...(payload.api_base === '' ? {} : { api_base: payload.api_base }),
...(payload.api_key === '' ? {} : { api_key: payload.api_key }),
...(isNaN(parseInt(payload.rpm)) ? {} : { rpm: parseInt(payload.rpm) })
}
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
return res;
};
export const deleteLiteLLMModel = async (token: string = '', id: string) => {
let error = null;
const res = await fetch(`${LITELLM_API_BASE_URL}/model/delete`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
id: id
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `LiteLLM: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
return res;
};

View file

@ -128,9 +128,11 @@ export const getOllamaModels = async (token: string = '') => {
throw error; throw error;
} }
return (res?.models ?? []).sort((a, b) => { return (res?.models ?? [])
return a.name.localeCompare(b.name); .map((model) => ({ id: model.model, name: model.name ?? model.model, ...model }))
}); .sort((a, b) => {
return a.name.localeCompare(b.name);
});
}; };
// TODO: migrate to backend // TODO: migrate to backend

View file

@ -163,7 +163,7 @@ export const getOpenAIModels = async (token: string = '') => {
return models return models
? models ? models
.map((model) => ({ name: model.id, external: true })) .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
.sort((a, b) => { .sort((a, b) => {
return a.name.localeCompare(b.name); return a.name.localeCompare(b.name);
}) })
@ -200,17 +200,21 @@ export const getOpenAIModelsDirect = async (
const models = Array.isArray(res) ? res : res?.data ?? null; const models = Array.isArray(res) ? res : res?.data ?? null;
return models return models
.map((model) => ({ name: model.id, external: true })) .map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true)) .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
.sort((a, b) => { .sort((a, b) => {
return a.name.localeCompare(b.name); return a.name.localeCompare(b.name);
}); });
}; };
export const generateOpenAIChatCompletion = async (token: string = '', body: object) => { export const generateOpenAIChatCompletion = async (
token: string = '',
body: object,
url: string = OPENAI_API_BASE_URL
) => {
let error = null; let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, { const res = await fetch(`${url}/chat/completions`, {
method: 'POST', method: 'POST',
headers: { headers: {
Authorization: `Bearer ${token}`, Authorization: `Bearer ${token}`,

View file

@ -25,7 +25,7 @@
$: if (selectedModels.length > 0 && $models.length > 0) { $: if (selectedModels.length > 0 && $models.length > 0) {
selectedModels = selectedModels.map((model) => selectedModels = selectedModels.map((model) =>
$models.map((m) => m.name).includes(model) ? model : '' $models.map((m) => m.id).includes(model) ? model : ''
); );
} }
</script> </script>
@ -45,7 +45,7 @@
{#if model.name === 'hr'} {#if model.name === 'hr'}
<hr /> <hr />
{:else} {:else}
<option value={model.name} class="text-gray-700 text-lg" <option value={model.id} class="text-gray-700 text-lg"
>{model.name + >{model.name +
`${model.size ? ` (${(model.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}</option `${model.size ? ` (${(model.size / 1024 ** 3).toFixed(1)}GB)` : ''}`}</option
> >

View file

@ -38,16 +38,18 @@
</div> </div>
</div> </div>
<hr class=" dark:border-gray-700" /> {#if ollamaVersion}
<hr class=" dark:border-gray-700" />
<div> <div>
<div class=" mb-2.5 text-sm font-medium">Ollama Version</div> <div class=" mb-2.5 text-sm font-medium">Ollama Version</div>
<div class="flex w-full"> <div class="flex w-full">
<div class="flex-1 text-xs text-gray-700 dark:text-gray-200"> <div class="flex-1 text-xs text-gray-700 dark:text-gray-200">
{ollamaVersion ?? 'N/A'} {ollamaVersion ?? 'N/A'}
</div>
</div> </div>
</div> </div>
</div> {/if}
<hr class=" dark:border-gray-700" /> <hr class=" dark:border-gray-700" />

View file

@ -3,7 +3,7 @@
import { createEventDispatcher, onMount } from 'svelte'; import { createEventDispatcher, onMount } from 'svelte';
const dispatch = createEventDispatcher(); const dispatch = createEventDispatcher();
import { getOllamaAPIUrl, updateOllamaAPIUrl } from '$lib/apis/ollama'; import { getOllamaAPIUrl, getOllamaVersion, updateOllamaAPIUrl } from '$lib/apis/ollama';
import { getOpenAIKey, getOpenAIUrl, updateOpenAIKey, updateOpenAIUrl } from '$lib/apis/openai'; import { getOpenAIKey, getOpenAIUrl, updateOpenAIKey, updateOpenAIUrl } from '$lib/apis/openai';
import toast from 'svelte-french-toast'; import toast from 'svelte-french-toast';
@ -15,6 +15,9 @@
let OPENAI_API_KEY = ''; let OPENAI_API_KEY = '';
let OPENAI_API_BASE_URL = ''; let OPENAI_API_BASE_URL = '';
let showOpenAI = false;
let showLiteLLM = false;
const updateOpenAIHandler = async () => { const updateOpenAIHandler = async () => {
OPENAI_API_BASE_URL = await updateOpenAIUrl(localStorage.token, OPENAI_API_BASE_URL); OPENAI_API_BASE_URL = await updateOpenAIUrl(localStorage.token, OPENAI_API_BASE_URL);
OPENAI_API_KEY = await updateOpenAIKey(localStorage.token, OPENAI_API_KEY); OPENAI_API_KEY = await updateOpenAIKey(localStorage.token, OPENAI_API_KEY);
@ -24,11 +27,14 @@
const updateOllamaAPIUrlHandler = async () => { const updateOllamaAPIUrlHandler = async () => {
API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL); API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL);
const _models = await getModels('ollama');
if (_models.length > 0) { const ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => {
return null;
});
if (ollamaVersion) {
toast.success('Server connection verified'); toast.success('Server connection verified');
await models.set(_models); await models.set(await getModels());
} }
}; };
@ -42,7 +48,7 @@
</script> </script>
<form <form
class="flex flex-col h-full space-y-3 text-sm" class="flex flex-col h-full justify-between text-sm"
on:submit|preventDefault={() => { on:submit|preventDefault={() => {
updateOpenAIHandler(); updateOpenAIHandler();
dispatch('save'); dispatch('save');
@ -53,81 +59,100 @@
// }); // });
}} }}
> >
<div> <div class=" pr-1.5 overflow-y-scroll max-h-[21rem] space-y-3">
<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div> <div class=" space-y-3">
<div class="flex w-full"> <div class="mt-2 space-y-2 pr-1.5">
<div class="flex-1 mr-2"> <div class="flex justify-between items-center text-sm">
<input <div class=" font-medium">OpenAI API</div>
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none" <button
placeholder="Enter URL (e.g. http://localhost:11434/api)" class=" text-xs font-medium text-gray-500"
bind:value={API_BASE_URL} type="button"
/> on:click={() => {
showOpenAI = !showOpenAI;
}}>{showOpenAI ? 'Hide' : 'Show'}</button
>
</div>
{#if showOpenAI}
<div>
<div class=" mb-2.5 text-sm font-medium">API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Key"
bind:value={OPENAI_API_KEY}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Base URL"
bind:value={OPENAI_API_BASE_URL}
autocomplete="off"
/>
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
WebUI will make requests to <span class=" text-gray-200"
>'{OPENAI_API_BASE_URL}/chat'</span
>
</div>
</div>
{/if}
</div> </div>
<button </div>
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
on:click={() => { <hr class=" dark:border-gray-700" />
updateOllamaAPIUrlHandler();
}} <div>
type="button" <div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
> <div class="flex w-full">
<svg <div class="flex-1 mr-2">
xmlns="http://www.w3.org/2000/svg" <input
viewBox="0 0 20 20" class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
fill="currentColor" placeholder="Enter URL (e.g. http://localhost:11434/api)"
class="w-4 h-4" bind:value={API_BASE_URL}
/>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
on:click={() => {
updateOllamaAPIUrlHandler();
}}
type="button"
> >
<path <svg
fill-rule="evenodd" xmlns="http://www.w3.org/2000/svg"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z" viewBox="0 0 20 20"
clip-rule="evenodd" fill="currentColor"
/> class="w-4 h-4"
</svg> >
</button> <path
</div> fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500"> clip-rule="evenodd"
Trouble accessing Ollama? />
<a </svg>
class=" text-gray-300 font-medium" </button>
href="https://github.com/open-webui/open-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
<hr class=" dark:border-gray-700" />
<div class=" space-y-3">
<div>
<div class=" mb-2.5 text-sm font-medium">OpenAI API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Key"
bind:value={OPENAI_API_KEY}
autocomplete="off"
/>
</div>
</div> </div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">OpenAI API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Base URL"
bind:value={OPENAI_API_BASE_URL}
autocomplete="off"
/>
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500"> <div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
WebUI will make requests to <span class=" text-gray-200">'{OPENAI_API_BASE_URL}/chat'</span> Trouble accessing Ollama?
<a
class=" text-gray-300 font-medium"
href="https://github.com/open-webui/open-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
</div> </div>
</div> </div>
</div> </div>

View file

@ -32,9 +32,11 @@
const getModels = async () => { const getModels = async () => {
models = await getDiffusionModels(localStorage.token).catch((error) => { models = await getDiffusionModels(localStorage.token).catch((error) => {
toast.error(error); toast.error(error);
return null; return [];
});
selectedModel = await getDefaultDiffusionModel(localStorage.token).catch((error) => {
return '';
}); });
selectedModel = await getDefaultDiffusionModel(localStorage.token);
}; };
const updateAUTOMATIC1111UrlHandler = async () => { const updateAUTOMATIC1111UrlHandler = async () => {

View file

@ -2,14 +2,33 @@
import queue from 'async/queue'; import queue from 'async/queue';
import toast from 'svelte-french-toast'; import toast from 'svelte-french-toast';
import { createModel, deleteModel, pullModel } from '$lib/apis/ollama'; import { createModel, deleteModel, getOllamaVersion, pullModel } from '$lib/apis/ollama';
import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
import { WEBUI_NAME, models, user } from '$lib/stores'; import { WEBUI_NAME, models, user } from '$lib/stores';
import { splitStream } from '$lib/utils'; import { splitStream } from '$lib/utils';
import { onMount } from 'svelte';
import { addLiteLLMModel, deleteLiteLLMModel, getLiteLLMModelInfo } from '$lib/apis/litellm';
export let getModels: Function; export let getModels: Function;
let showLiteLLM = false;
let showLiteLLMParams = false;
let liteLLMModelInfo = [];
let liteLLMModel = '';
let liteLLMModelName = '';
let liteLLMAPIBase = '';
let liteLLMAPIKey = '';
let liteLLMRPM = '';
let deleteLiteLLMModelId = '';
$: liteLLMModelName = liteLLMModel;
// Models // Models
let showExperimentalOllama = false;
let ollamaVersion = '';
const MAX_PARALLEL_DOWNLOADS = 3; const MAX_PARALLEL_DOWNLOADS = 3;
const modelDownloadQueue = queue( const modelDownloadQueue = queue(
(task: { modelName: string }, cb) => (task: { modelName: string }, cb) =>
@ -286,256 +305,184 @@
opts.callback({ success: true, modelName: opts.modelName }); opts.callback({ success: true, modelName: opts.modelName });
} }
}; };
const addLiteLLMModelHandler = async () => {
if (!liteLLMModelInfo.find((info) => info.model_name === liteLLMModelName)) {
const res = await addLiteLLMModel(localStorage.token, {
name: liteLLMModelName,
model: liteLLMModel,
api_base: liteLLMAPIBase,
api_key: liteLLMAPIKey,
rpm: liteLLMRPM
}).catch((error) => {
toast.error(error);
return null;
});
if (res) {
if (res.message) {
toast.success(res.message);
}
}
} else {
toast.error(`Model ${liteLLMModelName} already exists.`);
}
liteLLMModelName = '';
liteLLMModel = '';
liteLLMAPIBase = '';
liteLLMAPIKey = '';
liteLLMRPM = '';
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
models.set(await getModels());
};
const deleteLiteLLMModelHandler = async () => {
const res = await deleteLiteLLMModel(localStorage.token, deleteLiteLLMModelId).catch(
(error) => {
toast.error(error);
return null;
}
);
if (res) {
if (res.message) {
toast.success(res.message);
}
}
deleteLiteLLMModelId = '';
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
models.set(await getModels());
};
onMount(async () => {
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
});
</script> </script>
<div class="flex flex-col h-full justify-between text-sm"> <div class="flex flex-col h-full justify-between text-sm">
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-80"> <div class=" space-y-3 pr-1.5 overflow-y-scroll h-[23rem]">
<div> {#if ollamaVersion}
<div class=" mb-2.5 text-sm font-medium">Pull a model from Ollama.com</div> <div class="space-y-2 pr-1.5">
<div class="flex w-full"> <div>
<div class="flex-1 mr-2"> <div class=" mb-2 text-sm font-medium">Manage Ollama Models</div>
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none" <div class=" mb-2 text-sm font-medium">Pull a model from Ollama.com</div>
placeholder="Enter model tag (e.g. mistral:7b)" <div class="flex w-full">
bind:value={modelTag} <div class="flex-1 mr-2">
/> <input
</div> class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
<button placeholder="Enter model tag (e.g. mistral:7b)"
class="px-3 text-gray-100 bg-emerald-600 hover:bg-emerald-700 disabled:bg-gray-700 disabled:cursor-not-allowed rounded transition" bind:value={modelTag}
on:click={() => { />
pullModelHandler();
}}
disabled={modelTransferring}
>
{#if modelTransferring}
<div class="self-center">
<svg
class=" w-4 h-4"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
</div> </div>
{:else} <button
<svg class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
xmlns="http://www.w3.org/2000/svg" on:click={() => {
viewBox="0 0 16 16" pullModelHandler();
fill="currentColor" }}
class="w-4 h-4" disabled={modelTransferring}
> >
<path {#if modelTransferring}
d="M8.75 2.75a.75.75 0 0 0-1.5 0v5.69L5.03 6.22a.75.75 0 0 0-1.06 1.06l3.5 3.5a.75.75 0 0 0 1.06 0l3.5-3.5a.75.75 0 0 0-1.06-1.06L8.75 8.44V2.75Z" <div class="self-center">
/> <svg
<path class=" w-4 h-4"
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z" viewBox="0 0 24 24"
/> fill="currentColor"
</svg> xmlns="http://www.w3.org/2000/svg"
{/if} ><style>
</button> .spinner_ajPY {
</div> transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
<div class="mt-2 mb-1 text-xs text-gray-400 dark:text-gray-500">
To access the available model names for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://ollama.com/library"
target="_blank">click here.</a
>
</div>
{#if Object.keys(modelDownloadStatus).length > 0}
{#each Object.keys(modelDownloadStatus) as model}
<div class="flex flex-col">
<div class="font-medium mb-1">{model}</div>
<div class="">
<div
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, modelDownloadStatus[model].pullProgress ?? 0)}%"
>
{modelDownloadStatus[model].pullProgress ?? 0}%
</div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{modelDownloadStatus[model].digest}
</div>
</div>
</div>
{/each}
{/if}
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
bind:value={deleteModelTag}
placeholder="Select a model"
>
{#if !deleteModelTag}
<option value="" disabled selected>Select a model</option>
{/if}
{#each $models.filter((m) => m.size != null) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
>
{/each}
</select>
</div>
<button
class="px-3 bg-red-700 hover:bg-red-800 text-gray-100 rounded transition"
on:click={() => {
deleteModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
<hr class=" dark:border-gray-700" />
<form
on:submit|preventDefault={() => {
uploadModelHandler();
}}
>
<div class=" mb-2 flex w-full justify-between">
<div class=" text-sm font-medium">
Upload a GGUF model <a
class=" text-xs font-medium text-gray-500 underline"
href="https://github.com/jmorganca/ollama/blob/main/README.md#import-from-gguf"
target="_blank">(Experimental)</a
>
</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
if (modelUploadMode === 'file') {
modelUploadMode = 'url';
} else {
modelUploadMode = 'file';
}
}}
type="button"
>
{#if modelUploadMode === 'file'}
<span class="ml-2 self-center">File Mode</span>
{:else}
<span class="ml-2 self-center">URL Mode</span>
{/if}
</button>
</div>
<div class="flex w-full mb-1.5">
<div class="flex flex-col w-full">
{#if modelUploadMode === 'file'}
<div class="flex-1 {modelInputFile && modelInputFile.length > 0 ? 'mr-2' : ''}">
<input
id="model-upload-input"
type="file"
bind:files={modelInputFile}
on:change={() => {
console.log(modelInputFile);
}}
accept=".gguf"
required
hidden
/>
<button
type="button"
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-800"
on:click={() => {
document.getElementById('model-upload-input').click();
}}
>
{#if modelInputFile && modelInputFile.length > 0}
{modelInputFile[0].name}
{:else}
Click here to select
{/if}
</button>
</div>
{:else}
<div class="flex-1 {modelFileUrl !== '' ? 'mr-2' : ''}">
<input
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-800 outline-none {modelFileUrl !==
''
? 'mr-2'
: ''}"
type="url"
required
bind:value={modelFileUrl}
placeholder="Type HuggingFace Resolve (Download) URL"
/>
</div>
{/if}
</div>
{#if (modelUploadMode === 'file' && modelInputFile && modelInputFile.length > 0) || (modelUploadMode === 'url' && modelFileUrl !== '')}
<button
class="px-3 text-gray-100 bg-emerald-600 hover:bg-emerald-700 disabled:bg-gray-700 disabled:cursor-not-allowed rounded transition"
type="submit"
disabled={modelTransferring}
>
{#if modelTransferring}
<div class="self-center">
<svg
class=" w-4 h-4"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
} }
} @keyframes spinner_AtaB {
</style><path 100% {
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z" transform: rotate(360deg);
opacity=".25" }
/><path }
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z" </style><path
class="spinner_ajPY" d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
/></svg opacity=".25"
/><path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
</div>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
> >
<path
d="M8.75 2.75a.75.75 0 0 0-1.5 0v5.69L5.03 6.22a.75.75 0 0 0-1.06 1.06l3.5 3.5a.75.75 0 0 0 1.06 0l3.5-3.5a.75.75 0 0 0-1.06-1.06L8.75 8.44V2.75Z"
/>
<path
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/>
</svg>
{/if}
</button>
</div>
<div class="mt-2 mb-1 text-xs text-gray-400 dark:text-gray-500">
To access the available model names for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://ollama.com/library"
target="_blank">click here.</a
>
</div>
{#if Object.keys(modelDownloadStatus).length > 0}
{#each Object.keys(modelDownloadStatus) as model}
<div class="flex flex-col">
<div class="font-medium mb-1">{model}</div>
<div class="">
<div
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, modelDownloadStatus[model].pullProgress ?? 0)}%"
>
{modelDownloadStatus[model].pullProgress ?? 0}%
</div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{modelDownloadStatus[model].digest}
</div>
</div>
</div> </div>
{:else} {/each}
{/if}
</div>
<div>
<div class=" mb-2 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={deleteModelTag}
placeholder="Select a model"
>
{#if !deleteModelTag}
<option value="" disabled selected>Select a model</option>
{/if}
{#each $models.filter((m) => m.size != null) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
>
{/each}
</select>
</div>
<button
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
deleteModelHandler();
}}
>
<svg <svg
xmlns="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16" viewBox="0 0 16 16"
@ -543,54 +490,438 @@
class="w-4 h-4" class="w-4 h-4"
> >
<path <path
d="M7.25 10.25a.75.75 0 0 0 1.5 0V4.56l2.22 2.22a.75.75 0 1 0 1.06-1.06l-3.5-3.5a.75.75 0 0 0-1.06 0l-3.5 3.5a.75.75 0 0 0 1.06 1.06l2.22-2.22v5.69Z" fill-rule="evenodd"
/> d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
<path clip-rule="evenodd"
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/> />
</svg> </svg>
</button>
</div>
</div>
<div>
<div class="flex justify-between items-center text-xs">
<div class=" text-sm font-medium">Experimental</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showExperimentalOllama = !showExperimentalOllama;
}}>{showExperimentalOllama ? 'Show' : 'Hide'}</button
>
</div>
</div>
{#if showExperimentalOllama}
<form
on:submit|preventDefault={() => {
uploadModelHandler();
}}
>
<div class=" mb-2 flex w-full justify-between">
<div class=" text-sm font-medium">Upload a GGUF model</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
if (modelUploadMode === 'file') {
modelUploadMode = 'url';
} else {
modelUploadMode = 'file';
}
}}
type="button"
>
{#if modelUploadMode === 'file'}
<span class="ml-2 self-center">File Mode</span>
{:else}
<span class="ml-2 self-center">URL Mode</span>
{/if}
</button>
</div>
<div class="flex w-full mb-1.5">
<div class="flex flex-col w-full">
{#if modelUploadMode === 'file'}
<div class="flex-1 {modelInputFile && modelInputFile.length > 0 ? 'mr-2' : ''}">
<input
id="model-upload-input"
type="file"
bind:files={modelInputFile}
on:change={() => {
console.log(modelInputFile);
}}
accept=".gguf"
required
hidden
/>
<button
type="button"
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850"
on:click={() => {
document.getElementById('model-upload-input').click();
}}
>
{#if modelInputFile && modelInputFile.length > 0}
{modelInputFile[0].name}
{:else}
Click here to select
{/if}
</button>
</div>
{:else}
<div class="flex-1 {modelFileUrl !== '' ? 'mr-2' : ''}">
<input
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850 outline-none {modelFileUrl !==
''
? 'mr-2'
: ''}"
type="url"
required
bind:value={modelFileUrl}
placeholder="Type HuggingFace Resolve (Download) URL"
/>
</div>
{/if}
</div>
{#if (modelUploadMode === 'file' && modelInputFile && modelInputFile.length > 0) || (modelUploadMode === 'url' && modelFileUrl !== '')}
<button
class="px-3 text-gray-100 bg-emerald-600 hover:bg-emerald-700 disabled:bg-gray-700 disabled:cursor-not-allowed rounded transition"
type="submit"
disabled={modelTransferring}
>
{#if modelTransferring}
<div class="self-center">
<svg
class=" w-4 h-4"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
</div>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M7.25 10.25a.75.75 0 0 0 1.5 0V4.56l2.22 2.22a.75.75 0 1 0 1.06-1.06l-3.5-3.5a.75.75 0 0 0-1.06 0l-3.5 3.5a.75.75 0 0 0 1.06 1.06l2.22-2.22v5.69Z"
/>
<path
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/>
</svg>
{/if}
</button>
{/if}
</div>
{#if (modelUploadMode === 'file' && modelInputFile && modelInputFile.length > 0) || (modelUploadMode === 'url' && modelFileUrl !== '')}
<div>
<div>
<div class=" my-2.5 text-sm font-medium">Modelfile Content</div>
<textarea
bind:value={modelFileContent}
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none resize-none"
rows="6"
/>
</div>
</div>
{/if} {/if}
</button> <div class=" mt-1 text-xs text-gray-400 dark:text-gray-500">
To access the GGUF models available for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://huggingface.co/models?search=gguf"
target="_blank">click here.</a
>
</div>
{#if uploadProgress !== null}
<div class="mt-2">
<div class=" mb-2 text-xs">Upload Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, uploadProgress ?? 0)}%"
>
{uploadProgress ?? 0}%
</div>
</div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{modelFileDigest}
</div>
</div>
{/if}
</form>
{/if} {/if}
</div> </div>
<hr class=" dark:border-gray-700 my-2" />
{/if}
{#if (modelUploadMode === 'file' && modelInputFile && modelInputFile.length > 0) || (modelUploadMode === 'url' && modelFileUrl !== '')} <div class=" space-y-3">
<div class="mt-2 space-y-3 pr-1.5">
<div> <div>
<div class=" mb-2 text-sm font-medium">Manage LiteLLM Models</div>
<div> <div>
<div class=" my-2.5 text-sm font-medium">Modelfile Content</div> <div class="flex justify-between items-center text-xs">
<textarea <div class=" text-sm font-medium">Add a model</div>
bind:value={modelFileContent} <button
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none resize-none" class=" text-xs font-medium text-gray-500"
rows="6" type="button"
/> on:click={() => {
</div> showLiteLLMParams = !showLiteLLMParams;
</div> }}>{showLiteLLMParams ? 'Advanced' : 'Default'}</button
{/if} >
<div class=" mt-1 text-xs text-gray-400 dark:text-gray-500">
To access the GGUF models available for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://huggingface.co/models?search=gguf"
target="_blank">click here.</a
>
</div>
{#if uploadProgress !== null}
<div class="mt-2">
<div class=" mb-2 text-xs">Upload Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, uploadProgress ?? 0)}%"
>
{uploadProgress ?? 0}%
</div> </div>
</div> </div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{modelFileDigest} <div class="my-2 space-y-2">
<div class="flex w-full mb-1.5">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM Model (litellm_params.model)"
bind:value={liteLLMModel}
autocomplete="off"
/>
</div>
<button
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
addLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
/>
</svg>
</button>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-1.5 text-sm font-medium">Model Name</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter Model Name (model_name)"
bind:value={liteLLMModelName}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Base URL (litellm_params.api_base)"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-1.5 text-sm font-medium">API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API Key (litellm_params.api_key)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class="mb-1.5 text-sm font-medium">API RPM</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
placeholder="Enter LiteLLM API RPM (litellm_params.rpm)"
bind:value={liteLLMRPM}
autocomplete="off"
/>
</div>
</div>
</div>
{/if}
</div>
<div class="mb-2 text-xs text-gray-400 dark:text-gray-500">
Not sure what to add?
<a
class=" text-gray-300 font-medium"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
Click here for help.
</a>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
bind:value={deleteLiteLLMModelId}
placeholder="Select a model"
>
{#if !deleteLiteLLMModelId}
<option value="" disabled selected>Select a model</option>
{/if}
{#each liteLLMModelInfo as model}
<option value={model.model_info.id} class="bg-gray-100 dark:bg-gray-700"
>{model.model_name}</option
>
{/each}
</select>
</div>
<button
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
deleteLiteLLMModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div> </div>
</div> </div>
{/if} </div>
</form>
<!-- <div class="mt-2 space-y-3 pr-1.5">
<div>
<div class=" mb-2.5 text-sm font-medium">Add LiteLLM Model</div>
<div class="flex w-full mb-2">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM Model (e.g. ollama/mistral)"
bind:value={liteLLMModel}
autocomplete="off"
/>
</div>
</div>
<div class="flex justify-between items-center text-sm">
<div class=" font-medium">Advanced Model Params</div>
<button
class=" text-xs font-medium text-gray-500"
type="button"
on:click={() => {
showLiteLLMParams = !showLiteLLMParams;
}}>{showLiteLLMParams ? 'Hide' : 'Show'}</button
>
</div>
{#if showLiteLLMParams}
<div>
<div class=" mb-2.5 text-sm font-medium">LiteLLM API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API Key (e.g. os.environ/AZURE_API_KEY_CA)"
bind:value={liteLLMAPIKey}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">LiteLLM API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API Base URL"
bind:value={liteLLMAPIBase}
autocomplete="off"
/>
</div>
</div>
</div>
<div>
<div class=" mb-2.5 text-sm font-medium">LiteLLM API RPM</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter LiteLLM API RPM"
bind:value={liteLLMRPM}
autocomplete="off"
/>
</div>
</div>
</div>
{/if}
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
Not sure what to add?
<a
class=" text-gray-300 font-medium"
href="https://litellm.vercel.app/docs/proxy/configs#quick-start"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
</div> -->
</div>
</div> </div>
</div> </div>

View file

@ -4,6 +4,7 @@
import { getOllamaModels } from '$lib/apis/ollama'; import { getOllamaModels } from '$lib/apis/ollama';
import { getOpenAIModels } from '$lib/apis/openai'; import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
import Modal from '../common/Modal.svelte'; import Modal from '../common/Modal.svelte';
import Account from './Settings/Account.svelte'; import Account from './Settings/Account.svelte';
@ -27,23 +28,29 @@
let selectedTab = 'general'; let selectedTab = 'general';
const getModels = async (type = 'all') => { const getModels = async () => {
const models = []; let models = await Promise.all([
models.push( await getOllamaModels(localStorage.token).catch((error) => {
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
);
if (type === 'all') {
const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
console.log(error); console.log(error);
return null; return null;
}); }),
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : [])); await getOpenAIModels(localStorage.token).catch((error) => {
} console.log(error);
return null;
}),
await getLiteLLMModels(localStorage.token).catch((error) => {
console.log(error);
return null;
})
]);
models = models
.filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
// models.push(...(ollamaModels ? [{ name: 'hr' }, ...ollamaModels] : []));
// models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
// models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : []));
return models; return models;
}; };
</script> </script>

View file

@ -5,6 +5,8 @@ export const APP_NAME = 'Open WebUI';
export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``; export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``;
export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`; export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`;
export const LITELLM_API_BASE_URL = `${WEBUI_BASE_URL}/litellm/api`;
export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama/api`; export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama/api`;
export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai/api`; export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai/api`;
export const AUDIO_API_BASE_URL = `${WEBUI_BASE_URL}/audio/api/v1`; export const AUDIO_API_BASE_URL = `${WEBUI_BASE_URL}/audio/api/v1`;

View file

@ -11,6 +11,7 @@
import { getModelfiles } from '$lib/apis/modelfiles'; import { getModelfiles } from '$lib/apis/modelfiles';
import { getPrompts } from '$lib/apis/prompts'; import { getPrompts } from '$lib/apis/prompts';
import { getOpenAIModels } from '$lib/apis/openai'; import { getOpenAIModels } from '$lib/apis/openai';
import { getLiteLLMModels } from '$lib/apis/litellm';
import { getDocs } from '$lib/apis/documents'; import { getDocs } from '$lib/apis/documents';
import { getAllChatTags } from '$lib/apis/chats'; import { getAllChatTags } from '$lib/apis/chats';
@ -43,24 +44,28 @@
let showShortcuts = false; let showShortcuts = false;
const getModels = async () => { const getModels = async () => {
let models = []; let models = await Promise.all([
models.push( await getOllamaModels(localStorage.token).catch((error) => {
...(await getOllamaModels(localStorage.token).catch((error) => { console.log(error);
toast.error(error); return null;
return []; }),
})) await getOpenAIModels(localStorage.token).catch((error) => {
); console.log(error);
return null;
}),
await getLiteLLMModels(localStorage.token).catch((error) => {
console.log(error);
return null;
})
]);
// $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1', models = models
// $settings.OPENAI_API_KEY .filter((models) => models)
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
// models.push(...(ollamaModels ? [{ name: 'hr' }, ...ollamaModels] : []));
// models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
// models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : []));
return models; return models;
}; };
@ -117,8 +122,6 @@
await models.set(await getModels()); await models.set(await getModels());
}); });
await setOllamaVersion();
document.addEventListener('keydown', function (event) { document.addEventListener('keydown', function (event) {
const isCtrlPressed = event.ctrlKey || event.metaKey; // metaKey is for Cmd key on Mac const isCtrlPressed = event.ctrlKey || event.metaKey; // metaKey is for Cmd key on Mac
// Check if the Shift key is pressed // Check if the Shift key is pressed
@ -250,60 +253,6 @@
</div> </div>
</div> </div>
</div> </div>
{:else if checkVersion(REQUIRED_OLLAMA_VERSION, ollamaVersion ?? '0')}
<div class="fixed w-full h-full flex z-50">
<div
class="absolute w-full h-full backdrop-blur-md bg-white/20 dark:bg-gray-900/50 flex justify-center"
>
<div class="m-auto pb-44 flex flex-col justify-center">
<div class="max-w-md">
<div class="text-center dark:text-white text-2xl font-medium z-50">
Connection Issue or Update Needed
</div>
<div class=" mt-4 text-center text-sm dark:text-gray-200 w-full">
Oops! It seems like your Ollama needs a little attention. <br
class=" hidden sm:flex"
/>We've detected either a connection hiccup or observed that you're using an older
version. Ensure you're on the latest Ollama version
<br class=" hidden sm:flex" />(version
<span class=" dark:text-white font-medium">{REQUIRED_OLLAMA_VERSION} or higher</span
>) or check your connection.
<div class="mt-1 text-sm">
Trouble accessing Ollama?
<a
class=" text-black dark:text-white font-semibold underline"
href="https://github.com/open-webui/open-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
<div class=" mt-6 mx-auto relative group w-fit">
<button
class="relative z-20 flex px-5 py-2 rounded-full bg-white border border-gray-100 dark:border-none hover:bg-gray-100 transition font-medium text-sm"
on:click={async () => {
location.href = '/';
// await setOllamaVersion();
}}
>
Check Again
</button>
<button
class="text-xs text-center w-full mt-2 text-gray-400 underline"
on:click={async () => {
await setOllamaVersion(REQUIRED_OLLAMA_VERSION);
}}>Close</button
>
</div>
</div>
</div>
</div>
</div>
{:else if localDBChats.length > 0} {:else if localDBChats.length > 0}
<div class="fixed w-full h-full flex z-50"> <div class="fixed w-full h-full flex z-50">
<div <div

View file

@ -36,6 +36,7 @@
import ModelSelector from '$lib/components/chat/ModelSelector.svelte'; import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte'; import Navbar from '$lib/components/layout/Navbar.svelte';
import { RAGTemplate } from '$lib/utils/rag'; import { RAGTemplate } from '$lib/utils/rag';
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
import { WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_BASE_URL } from '$lib/constants';
let stopResponseFlag = false; let stopResponseFlag = false;
@ -132,6 +133,10 @@
selectedModels = ['']; selectedModels = [''];
} }
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}'); let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
settings.set({ settings.set({
..._settings ..._settings
@ -150,6 +155,10 @@
const submitPrompt = async (userPrompt, _user = null) => { const submitPrompt = async (userPrompt, _user = null) => {
console.log('submitPrompt', $chatId); console.log('submitPrompt', $chatId);
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
if (selectedModels.includes('')) { if (selectedModels.includes('')) {
toast.error('Model not selected'); toast.error('Model not selected');
} else if (messages.length != 0 && messages.at(-1).done != true) { } else if (messages.length != 0 && messages.at(-1).done != true) {
@ -278,40 +287,41 @@
} }
await Promise.all( await Promise.all(
selectedModels.map(async (model) => { selectedModels.map(async (modelId) => {
console.log(model); const model = $models.filter((m) => m.id === modelId).at(0);
const modelTag = $models.filter((m) => m.name === model).at(0);
// Create response message if (model) {
let responseMessageId = uuidv4(); // Create response message
let responseMessage = { let responseMessageId = uuidv4();
parentId: parentId, let responseMessage = {
id: responseMessageId, parentId: parentId,
childrenIds: [], id: responseMessageId,
role: 'assistant', childrenIds: [],
content: '', role: 'assistant',
model: model, content: '',
timestamp: Math.floor(Date.now() / 1000) // Unix epoch model: model.id,
}; timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId // Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage; history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId; history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message // Append messageId to childrenIds of parent message
if (parentId !== null) { if (parentId !== null) {
history.messages[parentId].childrenIds = [ history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds, ...history.messages[parentId].childrenIds,
responseMessageId responseMessageId
]; ];
} }
if (modelTag?.external) { if (model?.external) {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId); await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (modelTag) { } else if (model) {
await sendPromptOllama(model, prompt, responseMessageId, _chatId); await sendPromptOllama(model, prompt, responseMessageId, _chatId);
}
} else { } else {
toast.error(`Model ${model} not found`); toast.error(`Model ${modelId} not found`);
} }
}) })
); );
@ -320,6 +330,7 @@
}; };
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => { const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
model = model.id;
const responseMessage = history.messages[responseMessageId]; const responseMessage = history.messages[responseMessageId];
// Wait until history/message have been updated // Wait until history/message have been updated
@ -531,54 +542,58 @@
const responseMessage = history.messages[responseMessageId]; const responseMessage = history.messages[responseMessageId];
scrollToBottom(); scrollToBottom();
const res = await generateOpenAIChatCompletion(localStorage.token, { const res = await generateOpenAIChatCompletion(
model: model, localStorage.token,
stream: true, {
messages: [ model: model.id,
$settings.system stream: true,
? { messages: [
role: 'system', $settings.system
content: $settings.system
}
: undefined,
...messages.filter((message) => !message.deleted)
]
.filter((message) => message)
.map((message, idx, arr) => ({
role: message.role,
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
? { ? {
content: [ role: 'system',
{ content: $settings.system
type: 'text',
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
} }
: { : undefined,
content: ...messages.filter((message) => !message.deleted)
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content ]
}) .filter((message) => message)
})), .map((message, idx, arr) => ({
seed: $settings?.options?.seed ?? undefined, role: message.role,
stop: $settings?.options?.stop ?? undefined, ...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
temperature: $settings?.options?.temperature ?? undefined, ? {
top_p: $settings?.options?.top_p ?? undefined, content: [
num_ctx: $settings?.options?.num_ctx ?? undefined, {
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, type: 'text',
max_tokens: $settings?.options?.num_predict ?? undefined text:
}); arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: {
content:
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
})
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
},
model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
);
if (res && res.ok) { if (res && res.ok) {
const reader = res.body const reader = res.body

6
test.json Normal file
View file

@ -0,0 +1,6 @@
{
"model_name": "string",
"litellm_params": {
"model": "ollama/mistral"
}
}