forked from open-webui/open-webui
Merge pull request #265 from ollama-webui/custom-openai-endpoint
feat: custom openai endpoint
This commit is contained in:
commit
6ea9f6e198
7 changed files with 386 additions and 247 deletions
|
@ -30,7 +30,7 @@ if ENV == "prod":
|
||||||
# WEBUI_VERSION
|
# WEBUI_VERSION
|
||||||
####################################
|
####################################
|
||||||
|
|
||||||
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.34")
|
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.35")
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# WEBUI_AUTH
|
# WEBUI_AUTH
|
||||||
|
|
|
@ -155,7 +155,7 @@
|
||||||
<div class="fixed bottom-0 w-full">
|
<div class="fixed bottom-0 w-full">
|
||||||
<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
|
<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
|
||||||
{#if messages.length == 0 && suggestionPrompts.length !== 0}
|
{#if messages.length == 0 && suggestionPrompts.length !== 0}
|
||||||
<div class="max-w-3xl">
|
<div class="max-w-3xl w-full">
|
||||||
<Suggestions {suggestionPrompts} {submitPrompt} />
|
<Suggestions {suggestionPrompts} {submitPrompt} />
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
export let suggestionPrompts = [];
|
export let suggestionPrompts = [];
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left">
|
<div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left w-full">
|
||||||
{#each suggestionPrompts as prompt, promptIdx}
|
{#each suggestionPrompts as prompt, promptIdx}
|
||||||
<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]">
|
<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]">
|
||||||
<button
|
<button
|
||||||
|
|
|
@ -56,6 +56,7 @@
|
||||||
|
|
||||||
let gravatarEmail = '';
|
let gravatarEmail = '';
|
||||||
let OPENAI_API_KEY = '';
|
let OPENAI_API_KEY = '';
|
||||||
|
let OPENAI_API_BASE_URL = '';
|
||||||
|
|
||||||
// Auth
|
// Auth
|
||||||
let authEnabled = false;
|
let authEnabled = false;
|
||||||
|
@ -302,8 +303,10 @@
|
||||||
|
|
||||||
// If OpenAI API Key exists
|
// If OpenAI API Key exists
|
||||||
if (type === 'all' && $settings.OPENAI_API_KEY) {
|
if (type === 'all' && $settings.OPENAI_API_KEY) {
|
||||||
|
const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
|
||||||
|
|
||||||
// Validate OPENAI_API_KEY
|
// Validate OPENAI_API_KEY
|
||||||
const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
|
const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
|
@ -320,15 +323,19 @@
|
||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
const openAIModels = openaiModelRes?.data ?? null;
|
const openAIModels = Array.isArray(openaiModelRes)
|
||||||
|
? openaiModelRes
|
||||||
|
: openaiModelRes?.data ?? null;
|
||||||
|
|
||||||
models.push(
|
models.push(
|
||||||
...(openAIModels
|
...(openAIModels
|
||||||
? [
|
? [
|
||||||
{ name: 'hr' },
|
{ name: 'hr' },
|
||||||
...openAIModels
|
...openAIModels
|
||||||
.map((model) => ({ name: model.id, label: 'OpenAI' }))
|
.map((model) => ({ name: model.id, external: true }))
|
||||||
.filter((model) => model.name.includes('gpt'))
|
.filter((model) =>
|
||||||
|
API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
|
||||||
|
)
|
||||||
]
|
]
|
||||||
: [])
|
: [])
|
||||||
);
|
);
|
||||||
|
@ -363,6 +370,7 @@
|
||||||
|
|
||||||
gravatarEmail = settings.gravatarEmail ?? '';
|
gravatarEmail = settings.gravatarEmail ?? '';
|
||||||
OPENAI_API_KEY = settings.OPENAI_API_KEY ?? '';
|
OPENAI_API_KEY = settings.OPENAI_API_KEY ?? '';
|
||||||
|
OPENAI_API_BASE_URL = settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
|
||||||
|
|
||||||
authEnabled = settings.authHeader !== undefined ? true : false;
|
authEnabled = settings.authHeader !== undefined ? true : false;
|
||||||
if (authEnabled) {
|
if (authEnabled) {
|
||||||
|
@ -476,6 +484,30 @@
|
||||||
<div class=" self-center">Models</div>
|
<div class=" self-center">Models</div>
|
||||||
</button>
|
</button>
|
||||||
|
|
||||||
|
<button
|
||||||
|
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
|
||||||
|
'external'
|
||||||
|
? 'bg-gray-200 dark:bg-gray-700'
|
||||||
|
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
|
||||||
|
on:click={() => {
|
||||||
|
selectedTab = 'external';
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div class=" self-center mr-2">
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
viewBox="0 0 16 16"
|
||||||
|
fill="currentColor"
|
||||||
|
class="w-4 h-4"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div class=" self-center">External</div>
|
||||||
|
</button>
|
||||||
|
|
||||||
<button
|
<button
|
||||||
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
|
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
|
||||||
'addons'
|
'addons'
|
||||||
|
@ -859,14 +891,73 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
{:else if selectedTab === 'external'}
|
||||||
|
<form
|
||||||
|
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
||||||
|
on:submit|preventDefault={() => {
|
||||||
|
saveSettings({
|
||||||
|
OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
|
||||||
|
OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
|
||||||
|
});
|
||||||
|
show = false;
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<div class=" space-y-3">
|
||||||
|
<div>
|
||||||
|
<div class=" mb-2.5 text-sm font-medium">OpenAI API Key</div>
|
||||||
|
<div class="flex w-full">
|
||||||
|
<div class="flex-1">
|
||||||
|
<input
|
||||||
|
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
|
||||||
|
placeholder="Enter OpenAI API Key"
|
||||||
|
bind:value={OPENAI_API_KEY}
|
||||||
|
autocomplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
|
||||||
|
Adds optional support for online models.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<hr class=" dark:border-gray-700" />
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<div class=" mb-2.5 text-sm font-medium">OpenAI API Base URL</div>
|
||||||
|
<div class="flex w-full">
|
||||||
|
<div class="flex-1">
|
||||||
|
<input
|
||||||
|
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
|
||||||
|
placeholder="Enter OpenAI API Key"
|
||||||
|
bind:value={OPENAI_API_BASE_URL}
|
||||||
|
autocomplete="off"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
|
||||||
|
WebUI will make requests to <span class=" text-gray-200"
|
||||||
|
>'{OPENAI_API_BASE_URL}/chat'</span
|
||||||
|
>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||||
|
<button
|
||||||
|
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||||
|
type="submit"
|
||||||
|
>
|
||||||
|
Save
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
{:else if selectedTab === 'addons'}
|
{:else if selectedTab === 'addons'}
|
||||||
<form
|
<form
|
||||||
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
||||||
on:submit|preventDefault={() => {
|
on:submit|preventDefault={() => {
|
||||||
saveSettings({
|
saveSettings({
|
||||||
gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined,
|
gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined,
|
||||||
gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined,
|
gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined
|
||||||
OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined
|
|
||||||
});
|
});
|
||||||
show = false;
|
show = false;
|
||||||
}}
|
}}
|
||||||
|
@ -962,26 +1053,6 @@
|
||||||
>
|
>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<hr class=" dark:border-gray-700" />
|
|
||||||
<div>
|
|
||||||
<div class=" mb-2.5 text-sm font-medium">
|
|
||||||
OpenAI API Key <span class=" text-gray-400 text-sm">(optional)</span>
|
|
||||||
</div>
|
|
||||||
<div class="flex w-full">
|
|
||||||
<div class="flex-1">
|
|
||||||
<input
|
|
||||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
|
|
||||||
placeholder="Enter OpenAI API Key"
|
|
||||||
bind:value={OPENAI_API_KEY}
|
|
||||||
autocomplete="off"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
|
|
||||||
Adds optional support for 'gpt-*' models available.
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||||
|
|
|
@ -55,7 +55,9 @@
|
||||||
// If OpenAI API Key exists
|
// If OpenAI API Key exists
|
||||||
if ($settings.OPENAI_API_KEY) {
|
if ($settings.OPENAI_API_KEY) {
|
||||||
// Validate OPENAI_API_KEY
|
// Validate OPENAI_API_KEY
|
||||||
const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
|
|
||||||
|
const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
|
||||||
|
const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
|
@ -72,15 +74,19 @@
|
||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
const openAIModels = openaiModelRes?.data ?? null;
|
const openAIModels = Array.isArray(openaiModelRes)
|
||||||
|
? openaiModelRes
|
||||||
|
: openaiModelRes?.data ?? null;
|
||||||
|
|
||||||
models.push(
|
models.push(
|
||||||
...(openAIModels
|
...(openAIModels
|
||||||
? [
|
? [
|
||||||
{ name: 'hr' },
|
{ name: 'hr' },
|
||||||
...openAIModels
|
...openAIModels
|
||||||
.map((model) => ({ name: model.id, label: 'OpenAI' }))
|
.map((model) => ({ name: model.id, external: true }))
|
||||||
.filter((model) => model.name.includes('gpt'))
|
.filter((model) =>
|
||||||
|
API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
|
||||||
|
)
|
||||||
]
|
]
|
||||||
: [])
|
: [])
|
||||||
);
|
);
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
import { splitStream } from '$lib/utils';
|
import { splitStream } from '$lib/utils';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
|
|
||||||
import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
|
import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
|
||||||
|
|
||||||
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
||||||
import Messages from '$lib/components/chat/Messages.svelte';
|
import Messages from '$lib/components/chat/Messages.svelte';
|
||||||
|
@ -130,7 +130,8 @@
|
||||||
const sendPrompt = async (userPrompt, parentId, _chatId) => {
|
const sendPrompt = async (userPrompt, parentId, _chatId) => {
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
selectedModels.map(async (model) => {
|
selectedModels.map(async (model) => {
|
||||||
if (model.includes('gpt-')) {
|
console.log(model);
|
||||||
|
if ($models.filter((m) => m.name === model)[0].external) {
|
||||||
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
|
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
|
||||||
} else {
|
} else {
|
||||||
await sendPromptOllama(model, userPrompt, parentId, _chatId);
|
await sendPromptOllama(model, userPrompt, parentId, _chatId);
|
||||||
|
@ -364,15 +365,15 @@
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
await tick();
|
|
||||||
|
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
|
|
||||||
const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
|
const res = await fetch(
|
||||||
|
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
|
||||||
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
|
'Content-Type': 'application/json'
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: model,
|
model: model,
|
||||||
|
@ -413,8 +414,13 @@
|
||||||
num_ctx: $settings.num_ctx ?? undefined,
|
num_ctx: $settings.num_ctx ?? undefined,
|
||||||
frequency_penalty: $settings.repeat_penalty ?? undefined
|
frequency_penalty: $settings.repeat_penalty ?? undefined
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
).catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (res && res.ok) {
|
||||||
const reader = res.body
|
const reader = res.body
|
||||||
.pipeThrough(new TextDecoderStream())
|
.pipeThrough(new TextDecoderStream())
|
||||||
.pipeThrough(splitStream('\n'))
|
.pipeThrough(splitStream('\n'))
|
||||||
|
@ -454,6 +460,17 @@
|
||||||
console.log(error);
|
console.log(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||||
|
const notification = new Notification(`OpenAI ${model}`, {
|
||||||
|
body: responseMessage.content,
|
||||||
|
icon: '/favicon.png'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($settings.responseAutoCopy) {
|
||||||
|
copyToClipboard(responseMessage.content);
|
||||||
|
}
|
||||||
|
|
||||||
if (autoScroll) {
|
if (autoScroll) {
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
}
|
}
|
||||||
|
@ -475,22 +492,36 @@
|
||||||
history: history
|
history: history
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if (res !== null) {
|
||||||
|
const error = await res.json();
|
||||||
|
console.log(error);
|
||||||
|
if ('detail' in error) {
|
||||||
|
toast.error(error.detail);
|
||||||
|
responseMessage.content = error.detail;
|
||||||
|
} else {
|
||||||
|
if ('message' in error.error) {
|
||||||
|
toast.error(error.error.message);
|
||||||
|
responseMessage.content = error.error.message;
|
||||||
|
} else {
|
||||||
|
toast.error(error.error);
|
||||||
|
responseMessage.content = error.error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.error = true;
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
|
||||||
|
responseMessage.done = true;
|
||||||
|
messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
stopResponseFlag = false;
|
stopResponseFlag = false;
|
||||||
|
|
||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
|
||||||
const notification = new Notification(`OpenAI ${model}`, {
|
|
||||||
body: responseMessage.content,
|
|
||||||
icon: '/favicon.png'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($settings.responseAutoCopy) {
|
|
||||||
copyToClipboard(responseMessage.content);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (autoScroll) {
|
if (autoScroll) {
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
import { onMount, tick } from 'svelte';
|
import { onMount, tick } from 'svelte';
|
||||||
import { convertMessagesToHistory, splitStream } from '$lib/utils';
|
import { convertMessagesToHistory, splitStream } from '$lib/utils';
|
||||||
import { goto } from '$app/navigation';
|
import { goto } from '$app/navigation';
|
||||||
import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
|
import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
|
||||||
|
|
||||||
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
||||||
import Messages from '$lib/components/chat/Messages.svelte';
|
import Messages from '$lib/components/chat/Messages.svelte';
|
||||||
|
@ -144,7 +144,8 @@
|
||||||
const sendPrompt = async (userPrompt, parentId, _chatId) => {
|
const sendPrompt = async (userPrompt, parentId, _chatId) => {
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
selectedModels.map(async (model) => {
|
selectedModels.map(async (model) => {
|
||||||
if (model.includes('gpt-')) {
|
console.log(model);
|
||||||
|
if ($models.filter((m) => m.name === model)[0].external) {
|
||||||
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
|
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
|
||||||
} else {
|
} else {
|
||||||
await sendPromptOllama(model, userPrompt, parentId, _chatId);
|
await sendPromptOllama(model, userPrompt, parentId, _chatId);
|
||||||
|
@ -378,15 +379,15 @@
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
await tick();
|
|
||||||
|
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
|
|
||||||
const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
|
const res = await fetch(
|
||||||
|
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
|
||||||
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
|
'Content-Type': 'application/json'
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: model,
|
model: model,
|
||||||
|
@ -427,8 +428,13 @@
|
||||||
num_ctx: $settings.num_ctx ?? undefined,
|
num_ctx: $settings.num_ctx ?? undefined,
|
||||||
frequency_penalty: $settings.repeat_penalty ?? undefined
|
frequency_penalty: $settings.repeat_penalty ?? undefined
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
).catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (res && res.ok) {
|
||||||
const reader = res.body
|
const reader = res.body
|
||||||
.pipeThrough(new TextDecoderStream())
|
.pipeThrough(new TextDecoderStream())
|
||||||
.pipeThrough(splitStream('\n'))
|
.pipeThrough(splitStream('\n'))
|
||||||
|
@ -468,6 +474,17 @@
|
||||||
console.log(error);
|
console.log(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||||
|
const notification = new Notification(`OpenAI ${model}`, {
|
||||||
|
body: responseMessage.content,
|
||||||
|
icon: '/favicon.png'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($settings.responseAutoCopy) {
|
||||||
|
copyToClipboard(responseMessage.content);
|
||||||
|
}
|
||||||
|
|
||||||
if (autoScroll) {
|
if (autoScroll) {
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
}
|
}
|
||||||
|
@ -489,22 +506,36 @@
|
||||||
history: history
|
history: history
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if (res !== null) {
|
||||||
|
const error = await res.json();
|
||||||
|
console.log(error);
|
||||||
|
if ('detail' in error) {
|
||||||
|
toast.error(error.detail);
|
||||||
|
responseMessage.content = error.detail;
|
||||||
|
} else {
|
||||||
|
if ('message' in error.error) {
|
||||||
|
toast.error(error.error.message);
|
||||||
|
responseMessage.content = error.error.message;
|
||||||
|
} else {
|
||||||
|
toast.error(error.error);
|
||||||
|
responseMessage.content = error.error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.error = true;
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
|
||||||
|
responseMessage.done = true;
|
||||||
|
messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
stopResponseFlag = false;
|
stopResponseFlag = false;
|
||||||
|
|
||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
|
||||||
const notification = new Notification(`OpenAI ${model}`, {
|
|
||||||
body: responseMessage.content,
|
|
||||||
icon: '/favicon.png'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($settings.responseAutoCopy) {
|
|
||||||
copyToClipboard(responseMessage.content);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (autoScroll) {
|
if (autoScroll) {
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue