forked from open-webui/open-webui
feat: openai frontend refac
This commit is contained in:
parent
17c66fde0f
commit
c0b099da4f
4 changed files with 109 additions and 108 deletions
|
@ -82,6 +82,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
||||||
|
|
||||||
headers = {}
|
headers = {}
|
||||||
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
|
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
|
||||||
|
headers["Content-Type"] = "application/json"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = requests.request(
|
r = requests.request(
|
||||||
|
|
|
@ -206,3 +206,26 @@ export const getOpenAIModelsDirect = async (
|
||||||
return a.name.localeCompare(b.name);
|
return a.name.localeCompare(b.name);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const generateOpenAIChatCompletion = async (token: string = '', body: object) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
}).catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
error = err;
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
|
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
|
||||||
import Navbar from '$lib/components/layout/Navbar.svelte';
|
import Navbar from '$lib/components/layout/Navbar.svelte';
|
||||||
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
|
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
|
||||||
|
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
||||||
|
|
||||||
let stopResponseFlag = false;
|
let stopResponseFlag = false;
|
||||||
let autoScroll = true;
|
let autoScroll = true;
|
||||||
|
@ -345,15 +346,7 @@
|
||||||
|
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
|
|
||||||
const res = await fetch(
|
const res = await generateOpenAIChatCompletion(localStorage.token, {
|
||||||
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
model: model,
|
model: model,
|
||||||
stream: true,
|
stream: true,
|
||||||
messages: [
|
messages: [
|
||||||
|
@ -394,11 +387,6 @@
|
||||||
num_ctx: $settings?.options?.num_ctx ?? undefined,
|
num_ctx: $settings?.options?.num_ctx ?? undefined,
|
||||||
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
|
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
|
||||||
max_tokens: $settings?.options?.num_predict ?? undefined
|
max_tokens: $settings?.options?.num_predict ?? undefined
|
||||||
})
|
|
||||||
}
|
|
||||||
).catch((err) => {
|
|
||||||
console.log(err);
|
|
||||||
return null;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (res && res.ok) {
|
if (res && res.ok) {
|
||||||
|
|
|
@ -9,6 +9,8 @@
|
||||||
import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
|
import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
|
||||||
|
|
||||||
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
|
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
|
||||||
|
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
||||||
|
|
||||||
import { copyToClipboard, splitStream } from '$lib/utils';
|
import { copyToClipboard, splitStream } from '$lib/utils';
|
||||||
|
|
||||||
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
||||||
|
@ -362,15 +364,7 @@
|
||||||
|
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
|
|
||||||
const res = await fetch(
|
const res = await generateOpenAIChatCompletion(localStorage.token, {
|
||||||
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
model: model,
|
model: model,
|
||||||
stream: true,
|
stream: true,
|
||||||
messages: [
|
messages: [
|
||||||
|
@ -411,11 +405,6 @@
|
||||||
num_ctx: $settings?.options?.num_ctx ?? undefined,
|
num_ctx: $settings?.options?.num_ctx ?? undefined,
|
||||||
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
|
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
|
||||||
max_tokens: $settings?.options?.num_predict ?? undefined
|
max_tokens: $settings?.options?.num_predict ?? undefined
|
||||||
})
|
|
||||||
}
|
|
||||||
).catch((err) => {
|
|
||||||
console.log(err);
|
|
||||||
return null;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (res && res.ok) {
|
if (res && res.ok) {
|
||||||
|
|
Loading…
Reference in a new issue