feat: openai frontend refac

This commit is contained in:
Timothy J. Baek 2024-01-04 18:54:00 -08:00
parent 17c66fde0f
commit c0b099da4f
4 changed files with 109 additions and 108 deletions

View file

@ -82,6 +82,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
headers = {} headers = {}
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}" headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
headers["Content-Type"] = "application/json"
try: try:
r = requests.request( r = requests.request(

View file

@ -206,3 +206,26 @@ export const getOpenAIModelsDirect = async (
return a.name.localeCompare(b.name); return a.name.localeCompare(b.name);
}); });
}; };
export const generateOpenAIChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
}).catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -16,6 +16,7 @@
import ModelSelector from '$lib/components/chat/ModelSelector.svelte'; import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte'; import Navbar from '$lib/components/layout/Navbar.svelte';
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats'; import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
let stopResponseFlag = false; let stopResponseFlag = false;
let autoScroll = true; let autoScroll = true;
@ -345,60 +346,47 @@
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch( const res = await generateOpenAIChatCompletion(localStorage.token, {
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, model: model,
{ stream: true,
method: 'POST', messages: [
headers: { $settings.system
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, ? {
'Content-Type': 'application/json' role: 'system',
}, content: $settings.system
body: JSON.stringify({ }
model: model, : undefined,
stream: true, ...messages
messages: [ ]
$settings.system .filter((message) => message)
? { .map((message) => ({
role: 'system', role: message.role,
content: $settings.system ...(message.files
} ? {
: undefined, content: [
...messages {
] type: 'text',
.filter((message) => message) text: message.content
.map((message) => ({ },
role: message.role, ...message.files
...(message.files .filter((file) => file.type === 'image')
? { .map((file) => ({
content: [ type: 'image_url',
{ image_url: {
type: 'text', url: file.url
text: message.content }
}, }))
...message.files ]
.filter((file) => file.type === 'image') }
.map((file) => ({ : { content: message.content })
type: 'image_url', })),
image_url: { seed: $settings?.options?.seed ?? undefined,
url: file.url stop: $settings?.options?.stop ?? undefined,
} temperature: $settings?.options?.temperature ?? undefined,
})) top_p: $settings?.options?.top_p ?? undefined,
] num_ctx: $settings?.options?.num_ctx ?? undefined,
} frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
: { content: message.content }) max_tokens: $settings?.options?.num_predict ?? undefined
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
}); });
if (res && res.ok) { if (res && res.ok) {

View file

@ -9,6 +9,8 @@
import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores'; import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { copyToClipboard, splitStream } from '$lib/utils'; import { copyToClipboard, splitStream } from '$lib/utils';
import MessageInput from '$lib/components/chat/MessageInput.svelte'; import MessageInput from '$lib/components/chat/MessageInput.svelte';
@ -362,60 +364,47 @@
window.scrollTo({ top: document.body.scrollHeight }); window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch( const res = await generateOpenAIChatCompletion(localStorage.token, {
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, model: model,
{ stream: true,
method: 'POST', messages: [
headers: { $settings.system
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, ? {
'Content-Type': 'application/json' role: 'system',
}, content: $settings.system
body: JSON.stringify({ }
model: model, : undefined,
stream: true, ...messages
messages: [ ]
$settings.system .filter((message) => message)
? { .map((message) => ({
role: 'system', role: message.role,
content: $settings.system ...(message.files
} ? {
: undefined, content: [
...messages {
] type: 'text',
.filter((message) => message) text: message.content
.map((message) => ({ },
role: message.role, ...message.files
...(message.files .filter((file) => file.type === 'image')
? { .map((file) => ({
content: [ type: 'image_url',
{ image_url: {
type: 'text', url: file.url
text: message.content }
}, }))
...message.files ]
.filter((file) => file.type === 'image') }
.map((file) => ({ : { content: message.content })
type: 'image_url', })),
image_url: { seed: $settings?.options?.seed ?? undefined,
url: file.url stop: $settings?.options?.stop ?? undefined,
} temperature: $settings?.options?.temperature ?? undefined,
})) top_p: $settings?.options?.top_p ?? undefined,
] num_ctx: $settings?.options?.num_ctx ?? undefined,
} frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
: { content: message.content }) max_tokens: $settings?.options?.num_predict ?? undefined
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
}); });
if (res && res.ok) { if (res && res.ok) {