+
{/if}
diff --git a/src/lib/components/chat/MessageInput/Suggestions.svelte b/src/lib/components/chat/MessageInput/Suggestions.svelte
index 6bd1876b..58c75fd1 100644
--- a/src/lib/components/chat/MessageInput/Suggestions.svelte
+++ b/src/lib/components/chat/MessageInput/Suggestions.svelte
@@ -3,7 +3,7 @@
export let suggestionPrompts = [];
-
+
{#each suggestionPrompts as prompt, promptIdx}
+
+
-
-
-
-
- OpenAI API Key (optional)
-
-
-
- Adds optional support for 'gpt-*' models available.
-
-
diff --git a/src/routes/(app)/+layout.svelte b/src/routes/(app)/+layout.svelte
index fe523d93..af8c7522 100644
--- a/src/routes/(app)/+layout.svelte
+++ b/src/routes/(app)/+layout.svelte
@@ -55,7 +55,9 @@
// If OpenAI API Key exists
if ($settings.OPENAI_API_KEY) {
// Validate OPENAI_API_KEY
- const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, {
+
+ const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
+ const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
@@ -72,15 +74,19 @@
return null;
});
- const openAIModels = openaiModelRes?.data ?? null;
+ const openAIModels = Array.isArray(openaiModelRes)
+ ? openaiModelRes
+ : openaiModelRes?.data ?? null;
models.push(
...(openAIModels
? [
{ name: 'hr' },
...openAIModels
- .map((model) => ({ name: model.id, label: 'OpenAI' }))
- .filter((model) => model.name.includes('gpt'))
+ .map((model) => ({ name: model.id, external: true }))
+ .filter((model) =>
+ API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
+ )
]
: [])
);
diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte
index d0b83b80..c0885d19 100644
--- a/src/routes/(app)/+page.svelte
+++ b/src/routes/(app)/+page.svelte
@@ -7,7 +7,7 @@
import { splitStream } from '$lib/utils';
import { goto } from '$app/navigation';
- import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
+ import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
@@ -130,7 +130,8 @@
const sendPrompt = async (userPrompt, parentId, _chatId) => {
await Promise.all(
selectedModels.map(async (model) => {
- if (model.includes('gpt-')) {
+ console.log(model);
+ if ($models.filter((m) => m.name === model)[0].external) {
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
} else {
await sendPromptOllama(model, userPrompt, parentId, _chatId);
@@ -364,133 +365,163 @@
];
}
- await tick();
-
window.scrollTo({ top: document.body.scrollHeight });
- const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
- },
- body: JSON.stringify({
- model: model,
- stream: true,
- messages: [
- $settings.system
- ? {
- role: 'system',
- content: $settings.system
- }
- : undefined,
- ...messages
- ]
- .filter((message) => message)
- .map((message) => ({
- role: message.role,
- ...(message.files
+ const res = await fetch(
+ `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ model: model,
+ stream: true,
+ messages: [
+ $settings.system
? {
- content: [
- {
- type: 'text',
- text: message.content
- },
- ...message.files
- .filter((file) => file.type === 'image')
- .map((file) => ({
- type: 'image_url',
- image_url: {
- url: file.url
- }
- }))
- ]
+ role: 'system',
+ content: $settings.system
}
- : { content: message.content })
- })),
- temperature: $settings.temperature ?? undefined,
- top_p: $settings.top_p ?? undefined,
- num_ctx: $settings.num_ctx ?? undefined,
- frequency_penalty: $settings.repeat_penalty ?? undefined
- })
+ : undefined,
+ ...messages
+ ]
+ .filter((message) => message)
+ .map((message) => ({
+ role: message.role,
+ ...(message.files
+ ? {
+ content: [
+ {
+ type: 'text',
+ text: message.content
+ },
+ ...message.files
+ .filter((file) => file.type === 'image')
+ .map((file) => ({
+ type: 'image_url',
+ image_url: {
+ url: file.url
+ }
+ }))
+ ]
+ }
+ : { content: message.content })
+ })),
+ temperature: $settings.temperature ?? undefined,
+ top_p: $settings.top_p ?? undefined,
+ num_ctx: $settings.num_ctx ?? undefined,
+ frequency_penalty: $settings.repeat_penalty ?? undefined
+ })
+ }
+ ).catch((err) => {
+ console.log(err);
+ return null;
});
- const reader = res.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
+ if (res && res.ok) {
+ const reader = res.body
+ .pipeThrough(new TextDecoderStream())
+ .pipeThrough(splitStream('\n'))
+ .getReader();
- while (true) {
- const { value, done } = await reader.read();
- if (done || stopResponseFlag || _chatId !== $chatId) {
- responseMessage.done = true;
- messages = messages;
- break;
- }
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done || stopResponseFlag || _chatId !== $chatId) {
+ responseMessage.done = true;
+ messages = messages;
+ break;
+ }
- try {
- let lines = value.split('\n');
+ try {
+ let lines = value.split('\n');
- for (const line of lines) {
- if (line !== '') {
- console.log(line);
- if (line === 'data: [DONE]') {
- responseMessage.done = true;
- messages = messages;
- } else {
- let data = JSON.parse(line.replace(/^data: /, ''));
- console.log(data);
-
- if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
- continue;
- } else {
- responseMessage.content += data.choices[0].delta.content ?? '';
+ for (const line of lines) {
+ if (line !== '') {
+ console.log(line);
+ if (line === 'data: [DONE]') {
+ responseMessage.done = true;
messages = messages;
+ } else {
+ let data = JSON.parse(line.replace(/^data: /, ''));
+ console.log(data);
+
+ if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
+ continue;
+ } else {
+ responseMessage.content += data.choices[0].delta.content ?? '';
+ messages = messages;
+ }
}
}
}
+ } catch (error) {
+ console.log(error);
}
- } catch (error) {
+
+ if ($settings.notificationEnabled && !document.hasFocus()) {
+ const notification = new Notification(`OpenAI ${model}`, {
+ body: responseMessage.content,
+ icon: '/favicon.png'
+ });
+ }
+
+ if ($settings.responseAutoCopy) {
+ copyToClipboard(responseMessage.content);
+ }
+
+ if (autoScroll) {
+ window.scrollTo({ top: document.body.scrollHeight });
+ }
+
+ await $db.updateChatById(_chatId, {
+ title: title === '' ? 'New Chat' : title,
+ models: selectedModels,
+ system: $settings.system ?? undefined,
+ options: {
+ seed: $settings.seed ?? undefined,
+ temperature: $settings.temperature ?? undefined,
+ repeat_penalty: $settings.repeat_penalty ?? undefined,
+ top_k: $settings.top_k ?? undefined,
+ top_p: $settings.top_p ?? undefined,
+ num_ctx: $settings.num_ctx ?? undefined,
+ ...($settings.options ?? {})
+ },
+ messages: messages,
+ history: history
+ });
+ }
+ } else {
+ if (res !== null) {
+ const error = await res.json();
console.log(error);
+ if ('detail' in error) {
+ toast.error(error.detail);
+ responseMessage.content = error.detail;
+ } else {
+ if ('message' in error.error) {
+ toast.error(error.error.message);
+ responseMessage.content = error.error.message;
+ } else {
+ toast.error(error.error);
+ responseMessage.content = error.error;
+ }
+ }
+ } else {
+ toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
+ responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
- if (autoScroll) {
- window.scrollTo({ top: document.body.scrollHeight });
- }
-
- await $db.updateChatById(_chatId, {
- title: title === '' ? 'New Chat' : title,
- models: selectedModels,
- system: $settings.system ?? undefined,
- options: {
- seed: $settings.seed ?? undefined,
- temperature: $settings.temperature ?? undefined,
- repeat_penalty: $settings.repeat_penalty ?? undefined,
- top_k: $settings.top_k ?? undefined,
- top_p: $settings.top_p ?? undefined,
- num_ctx: $settings.num_ctx ?? undefined,
- ...($settings.options ?? {})
- },
- messages: messages,
- history: history
- });
+ responseMessage.error = true;
+ responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
+ responseMessage.done = true;
+ messages = messages;
}
stopResponseFlag = false;
-
await tick();
- if ($settings.notificationEnabled && !document.hasFocus()) {
- const notification = new Notification(`OpenAI ${model}`, {
- body: responseMessage.content,
- icon: '/favicon.png'
- });
- }
-
- if ($settings.responseAutoCopy) {
- copyToClipboard(responseMessage.content);
- }
-
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte
index bf7207fb..6ff95cd5 100644
--- a/src/routes/(app)/c/[id]/+page.svelte
+++ b/src/routes/(app)/c/[id]/+page.svelte
@@ -6,7 +6,7 @@
import { onMount, tick } from 'svelte';
import { convertMessagesToHistory, splitStream } from '$lib/utils';
import { goto } from '$app/navigation';
- import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
+ import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
@@ -144,7 +144,8 @@
const sendPrompt = async (userPrompt, parentId, _chatId) => {
await Promise.all(
selectedModels.map(async (model) => {
- if (model.includes('gpt-')) {
+ console.log(model);
+ if ($models.filter((m) => m.name === model)[0].external) {
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
} else {
await sendPromptOllama(model, userPrompt, parentId, _chatId);
@@ -378,133 +379,163 @@
];
}
- await tick();
-
window.scrollTo({ top: document.body.scrollHeight });
- const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
- },
- body: JSON.stringify({
- model: model,
- stream: true,
- messages: [
- $settings.system
- ? {
- role: 'system',
- content: $settings.system
- }
- : undefined,
- ...messages
- ]
- .filter((message) => message)
- .map((message) => ({
- role: message.role,
- ...(message.files
+ const res = await fetch(
+ `${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
+ {
+ method: 'POST',
+ headers: {
+ Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ model: model,
+ stream: true,
+ messages: [
+ $settings.system
? {
- content: [
- {
- type: 'text',
- text: message.content
- },
- ...message.files
- .filter((file) => file.type === 'image')
- .map((file) => ({
- type: 'image_url',
- image_url: {
- url: file.url
- }
- }))
- ]
+ role: 'system',
+ content: $settings.system
}
- : { content: message.content })
- })),
- temperature: $settings.temperature ?? undefined,
- top_p: $settings.top_p ?? undefined,
- num_ctx: $settings.num_ctx ?? undefined,
- frequency_penalty: $settings.repeat_penalty ?? undefined
- })
+ : undefined,
+ ...messages
+ ]
+ .filter((message) => message)
+ .map((message) => ({
+ role: message.role,
+ ...(message.files
+ ? {
+ content: [
+ {
+ type: 'text',
+ text: message.content
+ },
+ ...message.files
+ .filter((file) => file.type === 'image')
+ .map((file) => ({
+ type: 'image_url',
+ image_url: {
+ url: file.url
+ }
+ }))
+ ]
+ }
+ : { content: message.content })
+ })),
+ temperature: $settings.temperature ?? undefined,
+ top_p: $settings.top_p ?? undefined,
+ num_ctx: $settings.num_ctx ?? undefined,
+ frequency_penalty: $settings.repeat_penalty ?? undefined
+ })
+ }
+ ).catch((err) => {
+ console.log(err);
+ return null;
});
- const reader = res.body
- .pipeThrough(new TextDecoderStream())
- .pipeThrough(splitStream('\n'))
- .getReader();
+ if (res && res.ok) {
+ const reader = res.body
+ .pipeThrough(new TextDecoderStream())
+ .pipeThrough(splitStream('\n'))
+ .getReader();
- while (true) {
- const { value, done } = await reader.read();
- if (done || stopResponseFlag || _chatId !== $chatId) {
- responseMessage.done = true;
- messages = messages;
- break;
- }
+ while (true) {
+ const { value, done } = await reader.read();
+ if (done || stopResponseFlag || _chatId !== $chatId) {
+ responseMessage.done = true;
+ messages = messages;
+ break;
+ }
- try {
- let lines = value.split('\n');
+ try {
+ let lines = value.split('\n');
- for (const line of lines) {
- if (line !== '') {
- console.log(line);
- if (line === 'data: [DONE]') {
- responseMessage.done = true;
- messages = messages;
- } else {
- let data = JSON.parse(line.replace(/^data: /, ''));
- console.log(data);
-
- if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
- continue;
- } else {
- responseMessage.content += data.choices[0].delta.content ?? '';
+ for (const line of lines) {
+ if (line !== '') {
+ console.log(line);
+ if (line === 'data: [DONE]') {
+ responseMessage.done = true;
messages = messages;
+ } else {
+ let data = JSON.parse(line.replace(/^data: /, ''));
+ console.log(data);
+
+ if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
+ continue;
+ } else {
+ responseMessage.content += data.choices[0].delta.content ?? '';
+ messages = messages;
+ }
}
}
}
+ } catch (error) {
+ console.log(error);
}
- } catch (error) {
+
+ if ($settings.notificationEnabled && !document.hasFocus()) {
+ const notification = new Notification(`OpenAI ${model}`, {
+ body: responseMessage.content,
+ icon: '/favicon.png'
+ });
+ }
+
+ if ($settings.responseAutoCopy) {
+ copyToClipboard(responseMessage.content);
+ }
+
+ if (autoScroll) {
+ window.scrollTo({ top: document.body.scrollHeight });
+ }
+
+ await $db.updateChatById(_chatId, {
+ title: title === '' ? 'New Chat' : title,
+ models: selectedModels,
+ system: $settings.system ?? undefined,
+ options: {
+ seed: $settings.seed ?? undefined,
+ temperature: $settings.temperature ?? undefined,
+ repeat_penalty: $settings.repeat_penalty ?? undefined,
+ top_k: $settings.top_k ?? undefined,
+ top_p: $settings.top_p ?? undefined,
+ num_ctx: $settings.num_ctx ?? undefined,
+ ...($settings.options ?? {})
+ },
+ messages: messages,
+ history: history
+ });
+ }
+ } else {
+ if (res !== null) {
+ const error = await res.json();
console.log(error);
+ if ('detail' in error) {
+ toast.error(error.detail);
+ responseMessage.content = error.detail;
+ } else {
+ if ('message' in error.error) {
+ toast.error(error.error.message);
+ responseMessage.content = error.error.message;
+ } else {
+ toast.error(error.error);
+ responseMessage.content = error.error;
+ }
+ }
+ } else {
+ toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
+ responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
- if (autoScroll) {
- window.scrollTo({ top: document.body.scrollHeight });
- }
-
- await $db.updateChatById(_chatId, {
- title: title === '' ? 'New Chat' : title,
- models: selectedModels,
- system: $settings.system ?? undefined,
- options: {
- seed: $settings.seed ?? undefined,
- temperature: $settings.temperature ?? undefined,
- repeat_penalty: $settings.repeat_penalty ?? undefined,
- top_k: $settings.top_k ?? undefined,
- top_p: $settings.top_p ?? undefined,
- num_ctx: $settings.num_ctx ?? undefined,
- ...($settings.options ?? {})
- },
- messages: messages,
- history: history
- });
+ responseMessage.error = true;
+ responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
+ responseMessage.done = true;
+ messages = messages;
}
stopResponseFlag = false;
-
await tick();
- if ($settings.notificationEnabled && !document.hasFocus()) {
- const notification = new Notification(`OpenAI ${model}`, {
- body: responseMessage.content,
- icon: '/favicon.png'
- });
- }
-
- if ($settings.responseAutoCopy) {
- copyToClipboard(responseMessage.content);
- }
-
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}