forked from open-webui/open-webui
		
	feat: openai frontend refac
This commit is contained in:
		
							parent
							
								
									17c66fde0f
								
							
						
					
					
						commit
						c0b099da4f
					
				
					 4 changed files with 109 additions and 108 deletions
				
			
		|  | @ -82,6 +82,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)): | |||
| 
 | ||||
|     headers = {} | ||||
|     headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}" | ||||
|     headers["Content-Type"] = "application/json" | ||||
| 
 | ||||
|     try: | ||||
|         r = requests.request( | ||||
|  |  | |||
|  | @ -206,3 +206,26 @@ export const getOpenAIModelsDirect = async ( | |||
| 			return a.name.localeCompare(b.name); | ||||
| 		}); | ||||
| }; | ||||
| 
 | ||||
| export const generateOpenAIChatCompletion = async (token: string = '', body: object) => { | ||||
| 	let error = null; | ||||
| 
 | ||||
| 	const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, { | ||||
| 		method: 'POST', | ||||
| 		headers: { | ||||
| 			Authorization: `Bearer ${token}`, | ||||
| 			'Content-Type': 'application/json' | ||||
| 		}, | ||||
| 		body: JSON.stringify(body) | ||||
| 	}).catch((err) => { | ||||
| 		console.log(err); | ||||
| 		error = err; | ||||
| 		return null; | ||||
| 	}); | ||||
| 
 | ||||
| 	if (error) { | ||||
| 		throw error; | ||||
| 	} | ||||
| 
 | ||||
| 	return res; | ||||
| }; | ||||
|  |  | |||
|  | @ -16,6 +16,7 @@ | |||
| 	import ModelSelector from '$lib/components/chat/ModelSelector.svelte'; | ||||
| 	import Navbar from '$lib/components/layout/Navbar.svelte'; | ||||
| 	import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats'; | ||||
| 	import { generateOpenAIChatCompletion } from '$lib/apis/openai'; | ||||
| 
 | ||||
| 	let stopResponseFlag = false; | ||||
| 	let autoScroll = true; | ||||
|  | @ -345,60 +346,47 @@ | |||
| 
 | ||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 				const res = await fetch( | ||||
| 					`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, | ||||
| 					{ | ||||
| 						method: 'POST', | ||||
| 						headers: { | ||||
| 							Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, | ||||
| 							'Content-Type': 'application/json' | ||||
| 						}, | ||||
| 						body: JSON.stringify({ | ||||
| 							model: model, | ||||
| 							stream: true, | ||||
| 							messages: [ | ||||
| 								$settings.system | ||||
| 									? { | ||||
| 											role: 'system', | ||||
| 											content: $settings.system | ||||
| 									  } | ||||
| 									: undefined, | ||||
| 								...messages | ||||
| 							] | ||||
| 								.filter((message) => message) | ||||
| 								.map((message) => ({ | ||||
| 									role: message.role, | ||||
| 									...(message.files | ||||
| 										? { | ||||
| 												content: [ | ||||
| 													{ | ||||
| 														type: 'text', | ||||
| 														text: message.content | ||||
| 													}, | ||||
| 													...message.files | ||||
| 														.filter((file) => file.type === 'image') | ||||
| 														.map((file) => ({ | ||||
| 															type: 'image_url', | ||||
| 															image_url: { | ||||
| 																url: file.url | ||||
| 															} | ||||
| 														})) | ||||
| 												] | ||||
| 										  } | ||||
| 										: { content: message.content }) | ||||
| 								})), | ||||
| 							seed: $settings?.options?.seed ?? undefined, | ||||
| 							stop: $settings?.options?.stop ?? undefined, | ||||
| 							temperature: $settings?.options?.temperature ?? undefined, | ||||
| 							top_p: $settings?.options?.top_p ?? undefined, | ||||
| 							num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 							frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 							max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 						}) | ||||
| 					} | ||||
| 				).catch((err) => { | ||||
| 					console.log(err); | ||||
| 					return null; | ||||
| 				const res = await generateOpenAIChatCompletion(localStorage.token, { | ||||
| 					model: model, | ||||
| 					stream: true, | ||||
| 					messages: [ | ||||
| 						$settings.system | ||||
| 							? { | ||||
| 									role: 'system', | ||||
| 									content: $settings.system | ||||
| 							  } | ||||
| 							: undefined, | ||||
| 						...messages | ||||
| 					] | ||||
| 						.filter((message) => message) | ||||
| 						.map((message) => ({ | ||||
| 							role: message.role, | ||||
| 							...(message.files | ||||
| 								? { | ||||
| 										content: [ | ||||
| 											{ | ||||
| 												type: 'text', | ||||
| 												text: message.content | ||||
| 											}, | ||||
| 											...message.files | ||||
| 												.filter((file) => file.type === 'image') | ||||
| 												.map((file) => ({ | ||||
| 													type: 'image_url', | ||||
| 													image_url: { | ||||
| 														url: file.url | ||||
| 													} | ||||
| 												})) | ||||
| 										] | ||||
| 								  } | ||||
| 								: { content: message.content }) | ||||
| 						})), | ||||
| 					seed: $settings?.options?.seed ?? undefined, | ||||
| 					stop: $settings?.options?.stop ?? undefined, | ||||
| 					temperature: $settings?.options?.temperature ?? undefined, | ||||
| 					top_p: $settings?.options?.top_p ?? undefined, | ||||
| 					num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 					frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 					max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 				}); | ||||
| 
 | ||||
| 				if (res && res.ok) { | ||||
|  |  | |||
|  | @ -9,6 +9,8 @@ | |||
| 	import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores'; | ||||
| 
 | ||||
| 	import { generateChatCompletion, generateTitle } from '$lib/apis/ollama'; | ||||
| 	import { generateOpenAIChatCompletion } from '$lib/apis/openai'; | ||||
| 
 | ||||
| 	import { copyToClipboard, splitStream } from '$lib/utils'; | ||||
| 
 | ||||
| 	import MessageInput from '$lib/components/chat/MessageInput.svelte'; | ||||
|  | @ -362,60 +364,47 @@ | |||
| 
 | ||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 				const res = await fetch( | ||||
| 					`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, | ||||
| 					{ | ||||
| 						method: 'POST', | ||||
| 						headers: { | ||||
| 							Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, | ||||
| 							'Content-Type': 'application/json' | ||||
| 						}, | ||||
| 						body: JSON.stringify({ | ||||
| 							model: model, | ||||
| 							stream: true, | ||||
| 							messages: [ | ||||
| 								$settings.system | ||||
| 									? { | ||||
| 											role: 'system', | ||||
| 											content: $settings.system | ||||
| 									  } | ||||
| 									: undefined, | ||||
| 								...messages | ||||
| 							] | ||||
| 								.filter((message) => message) | ||||
| 								.map((message) => ({ | ||||
| 									role: message.role, | ||||
| 									...(message.files | ||||
| 										? { | ||||
| 												content: [ | ||||
| 													{ | ||||
| 														type: 'text', | ||||
| 														text: message.content | ||||
| 													}, | ||||
| 													...message.files | ||||
| 														.filter((file) => file.type === 'image') | ||||
| 														.map((file) => ({ | ||||
| 															type: 'image_url', | ||||
| 															image_url: { | ||||
| 																url: file.url | ||||
| 															} | ||||
| 														})) | ||||
| 												] | ||||
| 										  } | ||||
| 										: { content: message.content }) | ||||
| 								})), | ||||
| 							seed: $settings?.options?.seed ?? undefined, | ||||
| 							stop: $settings?.options?.stop ?? undefined, | ||||
| 							temperature: $settings?.options?.temperature ?? undefined, | ||||
| 							top_p: $settings?.options?.top_p ?? undefined, | ||||
| 							num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 							frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 							max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 						}) | ||||
| 					} | ||||
| 				).catch((err) => { | ||||
| 					console.log(err); | ||||
| 					return null; | ||||
| 				const res = await generateOpenAIChatCompletion(localStorage.token, { | ||||
| 					model: model, | ||||
| 					stream: true, | ||||
| 					messages: [ | ||||
| 						$settings.system | ||||
| 							? { | ||||
| 									role: 'system', | ||||
| 									content: $settings.system | ||||
| 							  } | ||||
| 							: undefined, | ||||
| 						...messages | ||||
| 					] | ||||
| 						.filter((message) => message) | ||||
| 						.map((message) => ({ | ||||
| 							role: message.role, | ||||
| 							...(message.files | ||||
| 								? { | ||||
| 										content: [ | ||||
| 											{ | ||||
| 												type: 'text', | ||||
| 												text: message.content | ||||
| 											}, | ||||
| 											...message.files | ||||
| 												.filter((file) => file.type === 'image') | ||||
| 												.map((file) => ({ | ||||
| 													type: 'image_url', | ||||
| 													image_url: { | ||||
| 														url: file.url | ||||
| 													} | ||||
| 												})) | ||||
| 										] | ||||
| 								  } | ||||
| 								: { content: message.content }) | ||||
| 						})), | ||||
| 					seed: $settings?.options?.seed ?? undefined, | ||||
| 					stop: $settings?.options?.stop ?? undefined, | ||||
| 					temperature: $settings?.options?.temperature ?? undefined, | ||||
| 					top_p: $settings?.options?.top_p ?? undefined, | ||||
| 					num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 					frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 					max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 				}); | ||||
| 
 | ||||
| 				if (res && res.ok) { | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy J. Baek
						Timothy J. Baek