From f62228165c986f3b650690cdfdde1907045aa5e3 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 22 Dec 2023 20:09:50 -0800 Subject: [PATCH 1/6] feat: custom openai api endpoint support --- src/lib/components/chat/SettingsModal.svelte | 119 +++++++-- src/routes/(app)/+layout.svelte | 10 +- src/routes/(app)/+page.svelte | 245 +++++++++++-------- src/routes/(app)/c/[id]/+page.svelte | 245 +++++++++++-------- 4 files changed, 381 insertions(+), 238 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 67617218..dbca0e79 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -56,6 +56,7 @@ let gravatarEmail = ''; let OPENAI_API_KEY = ''; + let OPENAI_API_BASE_URL = ''; // Auth let authEnabled = false; @@ -302,8 +303,10 @@ // If OpenAI API Key exists if (type === 'all' && $settings.OPENAI_API_KEY) { + const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; + // Validate OPENAI_API_KEY - const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, { + const openaiModelRes = await fetch(`${API_BASE_URL}/models`, { method: 'GET', headers: { 'Content-Type': 'application/json', @@ -327,8 +330,10 @@ ? [ { name: 'hr' }, ...openAIModels - .map((model) => ({ name: model.id, label: 'OpenAI' })) - .filter((model) => model.name.includes('gpt')) + .map((model) => ({ name: model.id, external: true })) + .filter((model) => + API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true + ) ] : []) ); @@ -363,6 +368,7 @@ gravatarEmail = settings.gravatarEmail ?? ''; OPENAI_API_KEY = settings.OPENAI_API_KEY ?? ''; + OPENAI_API_BASE_URL = settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; authEnabled = settings.authHeader !== undefined ? true : false; if (authEnabled) { @@ -476,6 +482,30 @@
Models
+ + + + {:else if selectedTab === 'addons'}
{ saveSettings({ gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined, - gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined, - OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined + gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined }); show = false; }} @@ -962,26 +1051,6 @@ > - -
-
-
- OpenAI API Key (optional) -
-
-
- -
-
-
- Adds optional support for 'gpt-*' models available. -
-
diff --git a/src/routes/(app)/+layout.svelte b/src/routes/(app)/+layout.svelte index fe523d93..94d242e1 100644 --- a/src/routes/(app)/+layout.svelte +++ b/src/routes/(app)/+layout.svelte @@ -55,7 +55,9 @@ // If OpenAI API Key exists if ($settings.OPENAI_API_KEY) { // Validate OPENAI_API_KEY - const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, { + + const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; + const openaiModelRes = await fetch(`${API_BASE_URL}/models`, { method: 'GET', headers: { 'Content-Type': 'application/json', @@ -79,8 +81,10 @@ ? [ { name: 'hr' }, ...openAIModels - .map((model) => ({ name: model.id, label: 'OpenAI' })) - .filter((model) => model.name.includes('gpt')) + .map((model) => ({ name: model.id, external: true })) + .filter((model) => + API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true + ) ] : []) ); diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index d0b83b80..29e6b518 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -7,7 +7,7 @@ import { splitStream } from '$lib/utils'; import { goto } from '$app/navigation'; - import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; + import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; import MessageInput from '$lib/components/chat/MessageInput.svelte'; import Messages from '$lib/components/chat/Messages.svelte'; @@ -130,7 +130,8 @@ const sendPrompt = async (userPrompt, parentId, _chatId) => { await Promise.all( selectedModels.map(async (model) => { - if (model.includes('gpt-')) { + console.log(model); + if ($models.filter((m) => m.name === model)[0].external) { await sendPromptOpenAI(model, userPrompt, parentId, _chatId); } else { await sendPromptOllama(model, userPrompt, parentId, _chatId); @@ -368,129 +369,163 @@ window.scrollTo({ top: document.body.scrollHeight }); - const res = await fetch(`https://api.openai.com/v1/chat/completions`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${$settings.OPENAI_API_KEY}` - }, - body: JSON.stringify({ - model: model, - stream: true, - messages: [ - $settings.system - ? { - role: 'system', - content: $settings.system - } - : undefined, - ...messages - ] - .filter((message) => message) - .map((message) => ({ - role: message.role, - ...(message.files + const res = await fetch( + `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, + 'Content-Type': 'application/json', + 'HTTP-Referer': `https://ollamahub.com/`, + 'X-Title': `Ollama WebUI` + }, + body: JSON.stringify({ + model: model, + stream: true, + messages: [ + $settings.system ? { - content: [ - { - type: 'text', - text: message.content - }, - ...message.files - .filter((file) => file.type === 'image') - .map((file) => ({ - type: 'image_url', - image_url: { - url: file.url - } - })) - ] + role: 'system', + content: $settings.system } - : { content: message.content }) - })), - temperature: $settings.temperature ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - frequency_penalty: $settings.repeat_penalty ?? undefined - }) + : undefined, + ...messages + ] + .filter((message) => message) + .map((message) => ({ + role: message.role, + ...(message.files + ? { + content: [ + { + type: 'text', + text: message.content + }, + ...message.files + .filter((file) => file.type === 'image') + .map((file) => ({ + type: 'image_url', + image_url: { + url: file.url + } + })) + ] + } + : { content: message.content }) + })), + temperature: $settings.temperature ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + frequency_penalty: $settings.repeat_penalty ?? undefined + }) + } + ).catch((err) => { + console.log(err); + return null; }); - const reader = res.body - .pipeThrough(new TextDecoderStream()) - .pipeThrough(splitStream('\n')) - .getReader(); + if (res && res.ok) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); - while (true) { - const { value, done } = await reader.read(); - if (done || stopResponseFlag || _chatId !== $chatId) { - responseMessage.done = true; - messages = messages; - break; - } + while (true) { + const { value, done } = await reader.read(); + if (done || stopResponseFlag || _chatId !== $chatId) { + responseMessage.done = true; + messages = messages; + break; + } - try { - let lines = value.split('\n'); + try { + let lines = value.split('\n'); - for (const line of lines) { - if (line !== '') { - console.log(line); - if (line === 'data: [DONE]') { - responseMessage.done = true; - messages = messages; - } else { - let data = JSON.parse(line.replace(/^data: /, '')); - console.log(data); - - if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { - continue; - } else { - responseMessage.content += data.choices[0].delta.content ?? ''; + for (const line of lines) { + if (line !== '') { + console.log(line); + if (line === 'data: [DONE]') { + responseMessage.done = true; messages = messages; + } else { + let data = JSON.parse(line.replace(/^data: /, '')); + console.log(data); + + if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { + continue; + } else { + responseMessage.content += data.choices[0].delta.content ?? ''; + messages = messages; + } } } } + } catch (error) { + console.log(error); } - } catch (error) { + + if ($settings.notificationEnabled && !document.hasFocus()) { + const notification = new Notification(`OpenAI ${model}`, { + body: responseMessage.content, + icon: '/favicon.png' + }); + } + + if ($settings.responseAutoCopy) { + copyToClipboard(responseMessage.content); + } + + if (autoScroll) { + window.scrollTo({ top: document.body.scrollHeight }); + } + + await $db.updateChatById(_chatId, { + title: title === '' ? 'New Chat' : title, + models: selectedModels, + system: $settings.system ?? undefined, + options: { + seed: $settings.seed ?? undefined, + temperature: $settings.temperature ?? undefined, + repeat_penalty: $settings.repeat_penalty ?? undefined, + top_k: $settings.top_k ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + ...($settings.options ?? {}) + }, + messages: messages, + history: history + }); + } + } else { + if (res !== null) { + const error = await res.json(); console.log(error); + if ('detail' in error) { + toast.error(error.detail); + responseMessage.content = error.detail; + } else { + if ('message' in error.error) { + toast.error(error.error.message); + responseMessage.content = error.error.message; + } else { + toast.error(error.error); + responseMessage.content = error.error; + } + } + } else { + toast.error(`Uh-oh! There was an issue connecting to ${model}.`); + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; } - if (autoScroll) { - window.scrollTo({ top: document.body.scrollHeight }); - } - - await $db.updateChatById(_chatId, { - title: title === '' ? 'New Chat' : title, - models: selectedModels, - system: $settings.system ?? undefined, - options: { - seed: $settings.seed ?? undefined, - temperature: $settings.temperature ?? undefined, - repeat_penalty: $settings.repeat_penalty ?? undefined, - top_k: $settings.top_k ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - ...($settings.options ?? {}) - }, - messages: messages, - history: history - }); + responseMessage.error = true; + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; + responseMessage.done = true; + messages = messages; } stopResponseFlag = false; - await tick(); - if ($settings.notificationEnabled && !document.hasFocus()) { - const notification = new Notification(`OpenAI ${model}`, { - body: responseMessage.content, - icon: '/favicon.png' - }); - } - - if ($settings.responseAutoCopy) { - copyToClipboard(responseMessage.content); - } - if (autoScroll) { window.scrollTo({ top: document.body.scrollHeight }); } diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index bf7207fb..a33e8feb 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -6,7 +6,7 @@ import { onMount, tick } from 'svelte'; import { convertMessagesToHistory, splitStream } from '$lib/utils'; import { goto } from '$app/navigation'; - import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; + import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; import MessageInput from '$lib/components/chat/MessageInput.svelte'; import Messages from '$lib/components/chat/Messages.svelte'; @@ -144,7 +144,8 @@ const sendPrompt = async (userPrompt, parentId, _chatId) => { await Promise.all( selectedModels.map(async (model) => { - if (model.includes('gpt-')) { + console.log(model); + if ($models.filter((m) => m.name === model)[0].external) { await sendPromptOpenAI(model, userPrompt, parentId, _chatId); } else { await sendPromptOllama(model, userPrompt, parentId, _chatId); @@ -382,129 +383,163 @@ window.scrollTo({ top: document.body.scrollHeight }); - const res = await fetch(`https://api.openai.com/v1/chat/completions`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${$settings.OPENAI_API_KEY}` - }, - body: JSON.stringify({ - model: model, - stream: true, - messages: [ - $settings.system - ? { - role: 'system', - content: $settings.system - } - : undefined, - ...messages - ] - .filter((message) => message) - .map((message) => ({ - role: message.role, - ...(message.files + const res = await fetch( + `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, + 'Content-Type': 'application/json', + 'HTTP-Referer': `https://ollamahub.com/`, + 'X-Title': `Ollama WebUI` + }, + body: JSON.stringify({ + model: model, + stream: true, + messages: [ + $settings.system ? { - content: [ - { - type: 'text', - text: message.content - }, - ...message.files - .filter((file) => file.type === 'image') - .map((file) => ({ - type: 'image_url', - image_url: { - url: file.url - } - })) - ] + role: 'system', + content: $settings.system } - : { content: message.content }) - })), - temperature: $settings.temperature ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - frequency_penalty: $settings.repeat_penalty ?? undefined - }) + : undefined, + ...messages + ] + .filter((message) => message) + .map((message) => ({ + role: message.role, + ...(message.files + ? { + content: [ + { + type: 'text', + text: message.content + }, + ...message.files + .filter((file) => file.type === 'image') + .map((file) => ({ + type: 'image_url', + image_url: { + url: file.url + } + })) + ] + } + : { content: message.content }) + })), + temperature: $settings.temperature ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + frequency_penalty: $settings.repeat_penalty ?? undefined + }) + } + ).catch((err) => { + console.log(err); + return null; }); - const reader = res.body - .pipeThrough(new TextDecoderStream()) - .pipeThrough(splitStream('\n')) - .getReader(); + if (res && res.ok) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); - while (true) { - const { value, done } = await reader.read(); - if (done || stopResponseFlag || _chatId !== $chatId) { - responseMessage.done = true; - messages = messages; - break; - } + while (true) { + const { value, done } = await reader.read(); + if (done || stopResponseFlag || _chatId !== $chatId) { + responseMessage.done = true; + messages = messages; + break; + } - try { - let lines = value.split('\n'); + try { + let lines = value.split('\n'); - for (const line of lines) { - if (line !== '') { - console.log(line); - if (line === 'data: [DONE]') { - responseMessage.done = true; - messages = messages; - } else { - let data = JSON.parse(line.replace(/^data: /, '')); - console.log(data); - - if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { - continue; - } else { - responseMessage.content += data.choices[0].delta.content ?? ''; + for (const line of lines) { + if (line !== '') { + console.log(line); + if (line === 'data: [DONE]') { + responseMessage.done = true; messages = messages; + } else { + let data = JSON.parse(line.replace(/^data: /, '')); + console.log(data); + + if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { + continue; + } else { + responseMessage.content += data.choices[0].delta.content ?? ''; + messages = messages; + } } } } + } catch (error) { + console.log(error); } - } catch (error) { + + if ($settings.notificationEnabled && !document.hasFocus()) { + const notification = new Notification(`OpenAI ${model}`, { + body: responseMessage.content, + icon: '/favicon.png' + }); + } + + if ($settings.responseAutoCopy) { + copyToClipboard(responseMessage.content); + } + + if (autoScroll) { + window.scrollTo({ top: document.body.scrollHeight }); + } + + await $db.updateChatById(_chatId, { + title: title === '' ? 'New Chat' : title, + models: selectedModels, + system: $settings.system ?? undefined, + options: { + seed: $settings.seed ?? undefined, + temperature: $settings.temperature ?? undefined, + repeat_penalty: $settings.repeat_penalty ?? undefined, + top_k: $settings.top_k ?? undefined, + top_p: $settings.top_p ?? undefined, + num_ctx: $settings.num_ctx ?? undefined, + ...($settings.options ?? {}) + }, + messages: messages, + history: history + }); + } + } else { + if (res !== null) { + const error = await res.json(); console.log(error); + if ('detail' in error) { + toast.error(error.detail); + responseMessage.content = error.detail; + } else { + if ('message' in error.error) { + toast.error(error.error.message); + responseMessage.content = error.error.message; + } else { + toast.error(error.error); + responseMessage.content = error.error; + } + } + } else { + toast.error(`Uh-oh! There was an issue connecting to ${model}.`); + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; } - if (autoScroll) { - window.scrollTo({ top: document.body.scrollHeight }); - } - - await $db.updateChatById(_chatId, { - title: title === '' ? 'New Chat' : title, - models: selectedModels, - system: $settings.system ?? undefined, - options: { - seed: $settings.seed ?? undefined, - temperature: $settings.temperature ?? undefined, - repeat_penalty: $settings.repeat_penalty ?? undefined, - top_k: $settings.top_k ?? undefined, - top_p: $settings.top_p ?? undefined, - num_ctx: $settings.num_ctx ?? undefined, - ...($settings.options ?? {}) - }, - messages: messages, - history: history - }); + responseMessage.error = true; + responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; + responseMessage.done = true; + messages = messages; } stopResponseFlag = false; - await tick(); - if ($settings.notificationEnabled && !document.hasFocus()) { - const notification = new Notification(`OpenAI ${model}`, { - body: responseMessage.content, - icon: '/favicon.png' - }); - } - - if ($settings.responseAutoCopy) { - copyToClipboard(responseMessage.content); - } - if (autoScroll) { window.scrollTo({ top: document.body.scrollHeight }); } From 0fcdee60cd2d121b45c59e2dfdad33ea0b903f5c Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 22 Dec 2023 20:10:17 -0800 Subject: [PATCH 2/6] chore: version update --- backend/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/config.py b/backend/config.py index 6abea5ed..c5a79f57 100644 --- a/backend/config.py +++ b/backend/config.py @@ -30,7 +30,7 @@ if ENV == "prod": # WEBUI_VERSION #################################### -WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.34") +WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.35") #################################### # WEBUI_AUTH From ecc2466f1ed21b45147e967ebe7b44ff531904e8 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 22 Dec 2023 20:31:42 -0800 Subject: [PATCH 3/6] feat: alternative models response support --- src/lib/components/chat/SettingsModal.svelte | 4 +++- src/routes/(app)/+layout.svelte | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index dbca0e79..5d4334c4 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -323,7 +323,9 @@ return null; }); - const openAIModels = openaiModelRes?.data ?? null; + const openAIModels = Array.isArray(openaiModelRes) + ? openaiModelRes + : openaiModelRes?.data ?? null; models.push( ...(openAIModels diff --git a/src/routes/(app)/+layout.svelte b/src/routes/(app)/+layout.svelte index 94d242e1..af8c7522 100644 --- a/src/routes/(app)/+layout.svelte +++ b/src/routes/(app)/+layout.svelte @@ -74,7 +74,9 @@ return null; }); - const openAIModels = openaiModelRes?.data ?? null; + const openAIModels = Array.isArray(openaiModelRes) + ? openaiModelRes + : openaiModelRes?.data ?? null; models.push( ...(openAIModels From b79c06023b2cc2a8c8b0797893c75f361ad2ff59 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 22 Dec 2023 20:40:17 -0800 Subject: [PATCH 4/6] fix: custom suggestion prompts styling --- src/lib/components/chat/MessageInput.svelte | 2 +- src/lib/components/chat/MessageInput/Suggestions.svelte | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/components/chat/MessageInput.svelte b/src/lib/components/chat/MessageInput.svelte index 57f48bef..bb941c90 100644 --- a/src/lib/components/chat/MessageInput.svelte +++ b/src/lib/components/chat/MessageInput.svelte @@ -155,7 +155,7 @@
{#if messages.length == 0 && suggestionPrompts.length !== 0} -
+
{/if} diff --git a/src/lib/components/chat/MessageInput/Suggestions.svelte b/src/lib/components/chat/MessageInput/Suggestions.svelte index 6bd1876b..58c75fd1 100644 --- a/src/lib/components/chat/MessageInput/Suggestions.svelte +++ b/src/lib/components/chat/MessageInput/Suggestions.svelte @@ -3,7 +3,7 @@ export let suggestionPrompts = []; -
+
{#each suggestionPrompts as prompt, promptIdx}