Merge branch 'main' into dev

This commit is contained in:
Timothy Jaeryang Baek 2023-12-24 04:19:55 -05:00 committed by GitHub
commit ac34a7978f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 1081 additions and 341 deletions

View file

@ -13,7 +13,7 @@
ChatGPT-Style Web Interface for Ollama 🦙
**Disclaimer:** *ollama-webui is a community-driven project and is not affiliated with the Ollama team in any way. This initiative is independent, and any inquiries or feedback should be directed to [our community on Discord](https://discord.gg/5rJgQTnV4s). We kindly request users to refrain from contacting or harassing the Ollama team regarding this project.*
**Disclaimer:** _ollama-webui is a community-driven project and is not affiliated with the Ollama team in any way. This initiative is independent, and any inquiries or feedback should be directed to [our community on Discord](https://discord.gg/5rJgQTnV4s). We kindly request users to refrain from contacting or harassing the Ollama team regarding this project._
![Ollama Web UI Demo](./demo.gif)
@ -35,6 +35,8 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
- 📥🗑️ **Download/Delete Models**: Easily download or remove models directly from the web UI.
- ⬆️ **GGUF File Model Creation**: Effortlessly create Ollama models by uploading GGUF files directly from the web UI. Streamlined process with options to upload from your machine or download GGUF files from Hugging Face.
- 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
- 🔄 **Multi-Modal Support**: Seamlessly engage with models that support multimodal interactions, including images (e.g., LLava).

2
backend/.gitignore vendored
View file

@ -1,2 +1,4 @@
__pycache__
.env
_old
uploads

View file

@ -1,7 +1,7 @@
from fastapi import FastAPI, Request, Depends, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from apps.web.routers import auths, users
from apps.web.routers import auths, users, utils
from config import WEBUI_VERSION, WEBUI_AUTH
app = FastAPI()
@ -19,6 +19,7 @@ app.add_middleware(
app.include_router(auths.router, prefix="/auths", tags=["auths"])
app.include_router(users.router, prefix="/users", tags=["users"])
app.include_router(utils.router, prefix="/utils", tags=["utils"])
@app.get("/")

View file

@ -0,0 +1,164 @@
from fastapi import APIRouter, UploadFile, File, BackgroundTasks
from fastapi import Depends, HTTPException, status
from starlette.responses import StreamingResponse
from pydantic import BaseModel
import requests
import os
import aiohttp
import json
from utils.misc import calculate_sha256
from config import OLLAMA_API_BASE_URL
router = APIRouter()
class UploadBlobForm(BaseModel):
filename: str
from urllib.parse import urlparse
def parse_huggingface_url(hf_url):
try:
# Parse the URL
parsed_url = urlparse(hf_url)
# Get the path and split it into components
path_components = parsed_url.path.split("/")
# Extract the desired output
user_repo = "/".join(path_components[1:3])
model_file = path_components[-1]
return model_file
except ValueError:
return None
async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
done = False
if os.path.exists(file_path):
current_size = os.path.getsize(file_path)
else:
current_size = 0
headers = {"Range": f"bytes={current_size}-"} if current_size > 0 else {}
timeout = aiohttp.ClientTimeout(total=600) # Set the timeout
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url, headers=headers) as response:
total_size = int(response.headers.get("content-length", 0)) + current_size
with open(file_path, "ab+") as file:
async for data in response.content.iter_chunked(chunk_size):
current_size += len(data)
file.write(data)
done = current_size == total_size
progress = round((current_size / total_size) * 100, 2)
yield f'data: {{"progress": {progress}, "completed": {current_size}, "total": {total_size}}}\n\n'
if done:
file.seek(0)
hashed = calculate_sha256(file)
file.seek(0)
url = f"{OLLAMA_API_BASE_URL}/blobs/sha256:{hashed}"
response = requests.post(url, data=file)
if response.ok:
res = {
"done": done,
"blob": f"sha256:{hashed}",
"name": file_name,
}
os.remove(file_path)
yield f"data: {json.dumps(res)}\n\n"
else:
raise "Ollama: Could not create blob, Please try again."
@router.get("/download")
async def download(
url: str,
):
# url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
file_name = parse_huggingface_url(url)
if file_name:
os.makedirs("./uploads", exist_ok=True)
file_path = os.path.join("./uploads", f"{file_name}")
return StreamingResponse(
download_file_stream(url, file_path, file_name),
media_type="text/event-stream",
)
else:
return None
@router.post("/upload")
async def upload(file: UploadFile = File(...)):
os.makedirs("./uploads", exist_ok=True)
file_path = os.path.join("./uploads", file.filename)
async def file_write_stream():
total = 0
total_size = file.size
chunk_size = 1024 * 1024
done = False
try:
with open(file_path, "wb+") as f:
while True:
chunk = file.file.read(chunk_size)
if not chunk:
break
f.write(chunk)
total += len(chunk)
done = total_size == total
progress = round((total / total_size) * 100, 2)
res = {
"progress": progress,
"total": total_size,
"completed": total,
}
yield f"data: {json.dumps(res)}\n\n"
if done:
f.seek(0)
hashed = calculate_sha256(f)
f.seek(0)
url = f"{OLLAMA_API_BASE_URL}/blobs/sha256:{hashed}"
response = requests.post(url, data=f)
if response.ok:
res = {
"done": done,
"blob": f"sha256:{hashed}",
"name": file.filename,
}
os.remove(file_path)
yield f"data: {json.dumps(res)}\n\n"
else:
raise "Ollama: Could not create blob, Please try again."
except Exception as e:
res = {"error": str(e)}
yield f"data: {json.dumps(res)}\n\n"
return StreamingResponse(file_write_stream(), media_type="text/event-stream")

View file

@ -30,7 +30,7 @@ if ENV == "prod":
# WEBUI_VERSION
####################################
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.34")
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.40")
####################################
# WEBUI_AUTH

View file

@ -12,6 +12,7 @@ passlib[bcrypt]
uuid
requests
aiohttp
pymongo
bcrypt

View file

@ -13,3 +13,11 @@ def get_gravatar_url(email):
# Grab the actual image URL
return f"https://www.gravatar.com/avatar/{hash_hex}?d=mp"
def calculate_sha256(file):
sha256 = hashlib.sha256()
# Read the file in chunks to efficiently handle large files
for chunk in iter(lambda: file.read(8192), b""):
sha256.update(chunk)
return sha256.hexdigest()

View file

@ -155,7 +155,7 @@
<div class="fixed bottom-0 w-full">
<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
{#if messages.length == 0 && suggestionPrompts.length !== 0}
<div class="max-w-3xl">
<div class="max-w-3xl w-full">
<Suggestions {suggestionPrompts} {submitPrompt} />
</div>
{/if}

View file

@ -3,7 +3,7 @@
export let suggestionPrompts = [];
</script>
<div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left">
<div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left w-full">
{#each suggestionPrompts as prompt, promptIdx}
<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]">
<button

View file

@ -1,12 +1,18 @@
<script lang="ts">
import Modal from '../common/Modal.svelte';
import { WEB_UI_VERSION, OLLAMA_API_BASE_URL } from '$lib/constants';
import {
WEB_UI_VERSION,
OLLAMA_API_BASE_URL,
WEBUI_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
import toast from 'svelte-french-toast';
import { onMount } from 'svelte';
import { config, info, models, settings, user } from '$lib/stores';
import { splitStream, getGravatarURL } from '$lib/utils';
import Advanced from './Settings/Advanced.svelte';
import { stringify } from 'postcss';
export let show = false;
@ -44,11 +50,21 @@
};
// Models
let modelTransferring = false;
let modelTag = '';
let deleteModelTag = '';
let digest = '';
let pullProgress = null;
let modelUploadMode = 'file';
let modelInputFile = '';
let modelFileUrl = '';
let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop "</s>"\nPARAMETER stop "USER:"\nPARAMETER stop "ASSSISTANT:"`;
let modelFileDigest = '';
let uploadProgress = null;
let deleteModelTag = '';
// Addons
let titleAutoGenerate = true;
let speechAutoSend = false;
@ -56,6 +72,7 @@
let gravatarEmail = '';
let OPENAI_API_KEY = '';
let OPENAI_API_BASE_URL = '';
// Auth
let authEnabled = false;
@ -151,6 +168,7 @@
};
const pullModelHandler = async () => {
modelTransferring = true;
const res = await fetch(`${API_BASE_URL}/pull`, {
method: 'POST',
headers: {
@ -216,6 +234,198 @@
}
modelTag = '';
modelTransferring = false;
models.set(await getModels());
};
const calculateSHA256 = async (file) => {
console.log(file);
// Create a FileReader to read the file asynchronously
const reader = new FileReader();
// Define a promise to handle the file reading
const readFile = new Promise((resolve, reject) => {
reader.onload = () => resolve(reader.result);
reader.onerror = reject;
});
// Read the file as an ArrayBuffer
reader.readAsArrayBuffer(file);
try {
// Wait for the FileReader to finish reading the file
const buffer = await readFile;
// Convert the ArrayBuffer to a Uint8Array
const uint8Array = new Uint8Array(buffer);
// Calculate the SHA-256 hash using Web Crypto API
const hashBuffer = await crypto.subtle.digest('SHA-256', uint8Array);
// Convert the hash to a hexadecimal string
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, '0')).join('');
return `sha256:${hashHex}`;
} catch (error) {
console.error('Error calculating SHA-256 hash:', error);
throw error;
}
};
const uploadModelHandler = async () => {
modelTransferring = true;
uploadProgress = 0;
let uploaded = false;
let fileResponse = null;
let name = '';
if (modelUploadMode === 'file') {
const file = modelInputFile[0];
const formData = new FormData();
formData.append('file', file);
fileResponse = await fetch(`${WEBUI_API_BASE_URL}/utils/upload`, {
method: 'POST',
headers: {
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: formData
}).catch((error) => {
console.log(error);
return null;
});
} else {
fileResponse = await fetch(`${WEBUI_API_BASE_URL}/utils/download?url=${modelFileUrl}`, {
method: 'GET',
headers: {
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
}
}).catch((error) => {
console.log(error);
return null;
});
}
if (fileResponse && fileResponse.ok) {
const reader = fileResponse.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) break;
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
let data = JSON.parse(line.replace(/^data: /, ''));
if (data.progress) {
uploadProgress = data.progress;
}
if (data.error) {
throw data.error;
}
if (data.done) {
modelFileDigest = data.blob;
name = data.name;
uploaded = true;
}
}
}
} catch (error) {
console.log(error);
}
}
}
if (uploaded) {
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: `${name}:latest`,
modelfile: `FROM @${modelFileDigest}\n${modelFileContent}`
})
}).catch((err) => {
console.log(err);
return null;
});
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) break;
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
if (data.error) {
throw data.error;
}
if (data.detail) {
throw data.detail;
}
if (data.status) {
if (
!data.digest &&
!data.status.includes('writing') &&
!data.status.includes('sha256')
) {
toast.success(data.status);
} else {
if (data.digest) {
digest = data.digest;
if (data.completed) {
pullProgress = Math.round((data.completed / data.total) * 1000) / 10;
} else {
pullProgress = 100;
}
}
}
}
}
}
} catch (error) {
console.log(error);
toast.error(error);
}
}
}
}
modelFileUrl = '';
modelInputFile = '';
modelTransferring = false;
uploadProgress = null;
models.set(await getModels());
};
@ -302,8 +512,10 @@
// If OpenAI API Key exists
if (type === 'all' && $settings.OPENAI_API_KEY) {
const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
// Validate OPENAI_API_KEY
const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
@ -320,15 +532,19 @@
return null;
});
const openAIModels = openaiModelRes?.data ?? null;
const openAIModels = Array.isArray(openaiModelRes)
? openaiModelRes
: openaiModelRes?.data ?? null;
models.push(
...(openAIModels
? [
{ name: 'hr' },
...openAIModels
.map((model) => ({ name: model.id, label: 'OpenAI' }))
.filter((model) => model.name.includes('gpt'))
.map((model) => ({ name: model.id, external: true }))
.filter((model) =>
API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
)
]
: [])
);
@ -363,6 +579,7 @@
gravatarEmail = settings.gravatarEmail ?? '';
OPENAI_API_KEY = settings.OPENAI_API_KEY ?? '';
OPENAI_API_BASE_URL = settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
authEnabled = settings.authHeader !== undefined ? true : false;
if (authEnabled) {
@ -476,6 +693,30 @@
<div class=" self-center">Models</div>
</button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'external'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'external';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z"
/>
</svg>
</div>
<div class=" self-center">External</div>
</button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'addons'
@ -758,115 +999,373 @@
</div>
</div>
{:else if selectedTab === 'models'}
<div class="flex flex-col space-y-3 text-sm mb-10">
<div>
<div class=" mb-2.5 text-sm font-medium">Pull a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter model tag (e.g. mistral:7b)"
bind:value={modelTag}
/>
</div>
<button
class="px-3 text-gray-100 bg-emerald-600 hover:bg-emerald-700 rounded transition"
on:click={() => {
pullModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
<div class="flex flex-col h-full justify-between text-sm">
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-80">
<div>
<div class=" mb-2.5 text-sm font-medium">Pull a model from Ollama.ai</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter model tag (e.g. mistral:7b)"
bind:value={modelTag}
/>
</div>
<button
class="px-3 text-gray-100 bg-emerald-600 hover:bg-emerald-700 disabled:bg-gray-700 disabled:cursor-not-allowed rounded transition"
on:click={() => {
pullModelHandler();
}}
disabled={modelTransferring}
>
<path
d="M10.75 2.75a.75.75 0 00-1.5 0v8.614L6.295 8.235a.75.75 0 10-1.09 1.03l4.25 4.5a.75.75 0 001.09 0l4.25-4.5a.75.75 0 00-1.09-1.03l-2.955 3.129V2.75z"
/>
<path
d="M3.5 12.75a.75.75 0 00-1.5 0v2.5A2.75 2.75 0 004.75 18h10.5A2.75 2.75 0 0018 15.25v-2.5a.75.75 0 00-1.5 0v2.5c0 .69-.56 1.25-1.25 1.25H4.75c-.69 0-1.25-.56-1.25-1.25v-2.5z"
/>
</svg>
</button>
</div>
{#if modelTransferring}
<div class="self-center">
<svg
class=" w-4 h-4"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
</div>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 2.75a.75.75 0 0 0-1.5 0v5.69L5.03 6.22a.75.75 0 0 0-1.06 1.06l3.5 3.5a.75.75 0 0 0 1.06 0l3.5-3.5a.75.75 0 0 0-1.06-1.06L8.75 8.44V2.75Z"
/>
<path
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/>
</svg>
{/if}
</button>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
To access the available model names for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://ollama.ai/library"
target="_blank">click here.</a
>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
To access the available model names for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://ollama.ai/library"
target="_blank">click here.</a
>
</div>
{#if pullProgress !== null}
<div class="mt-2">
<div class=" mb-2 text-xs">Pull Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 text-xs font-medium text-blue-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, pullProgress ?? 0)}%"
>
{pullProgress ?? 0}%
{#if pullProgress !== null}
<div class="mt-2">
<div class=" mb-2 text-xs">Pull Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 text-xs font-medium text-blue-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, pullProgress ?? 0)}%"
>
{pullProgress ?? 0}%
</div>
</div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{digest}
</div>
</div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{digest}
</div>
</div>
{/if}
</div>
<hr class=" dark:border-gray-700" />
{/if}
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
bind:value={deleteModelTag}
placeholder="Select a model"
<form
on:submit|preventDefault={() => {
uploadModelHandler();
}}
>
<div class=" mb-2 flex w-full justify-between">
<div class=" text-sm font-medium">Upload a GGUF model</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
if (modelUploadMode === 'file') {
modelUploadMode = 'url';
} else {
modelUploadMode = 'file';
}
}}
type="button"
>
{#if !deleteModelTag}
<option value="" disabled selected>Select a model</option>
{#if modelUploadMode === 'file'}
<span class="ml-2 self-center">File Mode</span>
{:else}
<span class="ml-2 self-center">URL Mode</span>
{/if}
{#each $models.filter((m) => m.size != null) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
>
{/each}
</select>
</button>
</div>
<button
class="px-3 bg-red-700 hover:bg-red-800 text-gray-100 rounded transition"
on:click={() => {
deleteModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
<div class="flex w-full mb-1.5">
<div class="flex flex-col w-full">
{#if modelUploadMode === 'file'}
<div
class="flex-1 {modelInputFile && modelInputFile.length > 0 ? 'mr-2' : ''}"
>
<input
id="model-upload-input"
type="file"
bind:files={modelInputFile}
on:change={() => {
console.log(modelInputFile);
}}
accept=".gguf"
required
hidden
/>
<button
type="button"
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-800"
on:click={() => {
document.getElementById('model-upload-input').click();
}}
>
{#if modelInputFile && modelInputFile.length > 0}
{modelInputFile[0].name}
{:else}
Click here to select
{/if}
</button>
</div>
{:else}
<div class="flex-1 {modelFileUrl !== '' ? 'mr-2' : ''}">
<input
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-800 outline-none {modelFileUrl !==
''
? 'mr-2'
: ''}"
type="url"
required
bind:value={modelFileUrl}
placeholder="Type HuggingFace Resolve (Download) URL"
/>
</div>
{/if}
</div>
{#if (modelUploadMode === 'file' && modelInputFile && modelInputFile.length > 0) || (modelUploadMode === 'url' && modelFileUrl !== '')}
<button
class="px-3 text-gray-100 bg-emerald-600 hover:bg-emerald-700 disabled:bg-gray-700 disabled:cursor-not-allowed rounded transition"
type="submit"
disabled={modelTransferring}
>
{#if modelTransferring}
<div class="self-center">
<svg
class=" w-4 h-4"
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_ajPY {
transform-origin: center;
animation: spinner_AtaB 0.75s infinite linear;
}
@keyframes spinner_AtaB {
100% {
transform: rotate(360deg);
}
}
</style><path
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
opacity=".25"
/><path
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
class="spinner_ajPY"
/></svg
>
</div>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M7.25 10.25a.75.75 0 0 0 1.5 0V4.56l2.22 2.22a.75.75 0 1 0 1.06-1.06l-3.5-3.5a.75.75 0 0 0-1.06 0l-3.5 3.5a.75.75 0 0 0 1.06 1.06l2.22-2.22v5.69Z"
/>
<path
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/>
</svg>
{/if}
</button>
{/if}
</div>
{#if (modelUploadMode === 'file' && modelInputFile && modelInputFile.length > 0) || (modelUploadMode === 'url' && modelFileUrl !== '')}
<div>
<div>
<div class=" my-2.5 text-sm font-medium">Modelfile Content</div>
<textarea
bind:value={modelFileContent}
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none resize-none"
rows="6"
/>
</div>
</div>
{/if}
<div class=" mt-1 text-xs text-gray-400 dark:text-gray-500">
To access the GGUF models available for downloading, <a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://huggingface.co/models?search=gguf"
target="_blank">click here.</a
>
<path
fill-rule="evenodd"
d="M8.75 1A2.75 2.75 0 006 3.75v.443c-.795.077-1.584.176-2.365.298a.75.75 0 10.23 1.482l.149-.022.841 10.518A2.75 2.75 0 007.596 19h4.807a2.75 2.75 0 002.742-2.53l.841-10.52.149.023a.75.75 0 00.23-1.482A41.03 41.03 0 0014 4.193V3.75A2.75 2.75 0 0011.25 1h-2.5zM10 4c.84 0 1.673.025 2.5.075V3.75c0-.69-.56-1.25-1.25-1.25h-2.5c-.69 0-1.25.56-1.25 1.25v.325C8.327 4.025 9.16 4 10 4zM8.58 7.72a.75.75 0 00-1.5.06l.3 7.5a.75.75 0 101.5-.06l-.3-7.5zm4.34.06a.75.75 0 10-1.5-.06l-.3 7.5a.75.75 0 101.5.06l.3-7.5z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
{#if uploadProgress !== null}
<div class="mt-2">
<div class=" mb-2 text-xs">Upload Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 text-xs font-medium text-blue-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, uploadProgress ?? 0)}%"
>
{uploadProgress ?? 0}%
</div>
</div>
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
{modelFileDigest}
</div>
</div>
{/if}
</form>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
bind:value={deleteModelTag}
placeholder="Select a model"
>
{#if !deleteModelTag}
<option value="" disabled selected>Select a model</option>
{/if}
{#each $models.filter((m) => m.size != null) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{model.name +
' (' +
(model.size / 1024 ** 3).toFixed(1) +
' GB)'}</option
>
{/each}
</select>
</div>
<button
class="px-3 bg-red-700 hover:bg-red-800 text-gray-100 rounded transition"
on:click={() => {
deleteModelHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M5 3.25V4H2.75a.75.75 0 0 0 0 1.5h.3l.815 8.15A1.5 1.5 0 0 0 5.357 15h5.285a1.5 1.5 0 0 0 1.493-1.35l.815-8.15h.3a.75.75 0 0 0 0-1.5H11v-.75A2.25 2.25 0 0 0 8.75 1h-1.5A2.25 2.25 0 0 0 5 3.25Zm2.25-.75a.75.75 0 0 0-.75.75V4h3v-.75a.75.75 0 0 0-.75-.75h-1.5ZM6.05 6a.75.75 0 0 1 .787.713l.275 5.5a.75.75 0 0 1-1.498.075l-.275-5.5A.75.75 0 0 1 6.05 6Zm3.9 0a.75.75 0 0 1 .712.787l-.275 5.5a.75.75 0 0 1-1.498-.075l.275-5.5a.75.75 0 0 1 .786-.711Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
</div>
</div>
{:else if selectedTab === 'external'}
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={() => {
saveSettings({
OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
});
show = false;
}}
>
<div class=" space-y-3">
<div>
<div class=" mb-2.5 text-sm font-medium">OpenAI API Key</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Key"
bind:value={OPENAI_API_KEY}
autocomplete="off"
/>
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
Adds optional support for online models.
</div>
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">OpenAI API Base URL</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Key"
bind:value={OPENAI_API_BASE_URL}
autocomplete="off"
/>
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
WebUI will make requests to <span class=" text-gray-200"
>'{OPENAI_API_BASE_URL}/chat'</span
>
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
type="submit"
>
Save
</button>
</div>
</form>
{:else if selectedTab === 'addons'}
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={() => {
saveSettings({
gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined,
gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined,
OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined
gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined
});
show = false;
}}
@ -962,26 +1461,6 @@
>
</div>
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">
OpenAI API Key <span class=" text-gray-400 text-sm">(optional)</span>
</div>
<div class="flex w-full">
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter OpenAI API Key"
bind:value={OPENAI_API_KEY}
autocomplete="off"
/>
</div>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
Adds optional support for 'gpt-*' models available.
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">

View file

@ -8,7 +8,8 @@ export const OLLAMA_API_BASE_URL =
: `http://localhost:11434/api`
: PUBLIC_API_BASE_URL;
export const WEBUI_API_BASE_URL = dev ? `http://${location.hostname}:8080/api/v1` : `/api/v1`;
export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``;
export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`;
export const WEB_UI_VERSION = 'v1.0.0-alpha-static';

View file

@ -55,7 +55,9 @@
// If OpenAI API Key exists
if ($settings.OPENAI_API_KEY) {
// Validate OPENAI_API_KEY
const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
@ -72,15 +74,19 @@
return null;
});
const openAIModels = openaiModelRes?.data ?? null;
const openAIModels = Array.isArray(openaiModelRes)
? openaiModelRes
: openaiModelRes?.data ?? null;
models.push(
...(openAIModels
? [
{ name: 'hr' },
...openAIModels
.map((model) => ({ name: model.id, label: 'OpenAI' }))
.filter((model) => model.name.includes('gpt'))
.map((model) => ({ name: model.id, external: true }))
.filter((model) =>
API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
)
]
: [])
);

View file

@ -7,7 +7,7 @@
import { splitStream } from '$lib/utils';
import { goto } from '$app/navigation';
import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
@ -130,7 +130,8 @@
const sendPrompt = async (userPrompt, parentId, _chatId) => {
await Promise.all(
selectedModels.map(async (model) => {
if (model.includes('gpt-')) {
console.log(model);
if ($models.filter((m) => m.name === model)[0].external) {
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
} else {
await sendPromptOllama(model, userPrompt, parentId, _chatId);
@ -244,6 +245,13 @@
}
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
@ -364,133 +372,163 @@
];
}
await tick();
window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
const res = await fetch(
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
role: 'system',
content: $settings.system
}
: { content: message.content })
})),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
frequency_penalty: $settings.repeat_penalty ?? undefined
})
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: { content: message.content })
})),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
frequency_penalty: $settings.repeat_penalty ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
});
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
try {
let lines = value.split('\n');
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
} catch (error) {
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
await $db.updateChatById(_chatId, {
title: title === '' ? 'New Chat' : title,
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
},
messages: messages,
history: history
});
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
}
} else {
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
await $db.updateChatById(_chatId, {
title: title === '' ? 'New Chat' : title,
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
},
messages: messages,
history: history
});
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}

View file

@ -6,7 +6,7 @@
import { onMount, tick } from 'svelte';
import { convertMessagesToHistory, splitStream } from '$lib/utils';
import { goto } from '$app/navigation';
import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
@ -144,7 +144,8 @@
const sendPrompt = async (userPrompt, parentId, _chatId) => {
await Promise.all(
selectedModels.map(async (model) => {
if (model.includes('gpt-')) {
console.log(model);
if ($models.filter((m) => m.name === model)[0].external) {
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
} else {
await sendPromptOllama(model, userPrompt, parentId, _chatId);
@ -258,6 +259,13 @@
}
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
@ -378,133 +386,163 @@
];
}
await tick();
window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
const res = await fetch(
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
role: 'system',
content: $settings.system
}
: { content: message.content })
})),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
frequency_penalty: $settings.repeat_penalty ?? undefined
})
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: { content: message.content })
})),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
frequency_penalty: $settings.repeat_penalty ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
});
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
try {
let lines = value.split('\n');
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
} catch (error) {
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
await $db.updateChatById(_chatId, {
title: title === '' ? 'New Chat' : title,
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
},
messages: messages,
history: history
});
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
}
} else {
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
await $db.updateChatById(_chatId, {
title: title === '' ? 'New Chat' : title,
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
},
messages: messages,
history: history
});
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}