forked from open-webui/open-webui
		
	Merge pull request #265 from ollama-webui/custom-openai-endpoint
feat: custom openai endpoint
This commit is contained in:
		
						commit
						6ea9f6e198
					
				
					 7 changed files with 386 additions and 247 deletions
				
			
		|  | @ -30,7 +30,7 @@ if ENV == "prod": | |||
| # WEBUI_VERSION | ||||
| #################################### | ||||
| 
 | ||||
| WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.34") | ||||
| WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.35") | ||||
| 
 | ||||
| #################################### | ||||
| # WEBUI_AUTH | ||||
|  |  | |||
|  | @ -155,7 +155,7 @@ | |||
| <div class="fixed bottom-0 w-full"> | ||||
| 	<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center"> | ||||
| 		{#if messages.length == 0 && suggestionPrompts.length !== 0} | ||||
| 			<div class="max-w-3xl"> | ||||
| 			<div class="max-w-3xl w-full"> | ||||
| 				<Suggestions {suggestionPrompts} {submitPrompt} /> | ||||
| 			</div> | ||||
| 		{/if} | ||||
|  |  | |||
|  | @ -3,7 +3,7 @@ | |||
| 	export let suggestionPrompts = []; | ||||
| </script> | ||||
| 
 | ||||
| <div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left"> | ||||
| <div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left w-full"> | ||||
| 	{#each suggestionPrompts as prompt, promptIdx} | ||||
| 		<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]"> | ||||
| 			<button | ||||
|  |  | |||
|  | @ -56,6 +56,7 @@ | |||
| 
 | ||||
| 	let gravatarEmail = ''; | ||||
| 	let OPENAI_API_KEY = ''; | ||||
| 	let OPENAI_API_BASE_URL = ''; | ||||
| 
 | ||||
| 	// Auth | ||||
| 	let authEnabled = false; | ||||
|  | @ -302,8 +303,10 @@ | |||
| 
 | ||||
| 		// If OpenAI API Key exists | ||||
| 		if (type === 'all' && $settings.OPENAI_API_KEY) { | ||||
| 			const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; | ||||
| 
 | ||||
| 			// Validate OPENAI_API_KEY | ||||
| 			const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, { | ||||
| 			const openaiModelRes = await fetch(`${API_BASE_URL}/models`, { | ||||
| 				method: 'GET', | ||||
| 				headers: { | ||||
| 					'Content-Type': 'application/json', | ||||
|  | @ -320,15 +323,19 @@ | |||
| 					return null; | ||||
| 				}); | ||||
| 
 | ||||
| 			const openAIModels = openaiModelRes?.data ?? null; | ||||
| 			const openAIModels = Array.isArray(openaiModelRes) | ||||
| 				? openaiModelRes | ||||
| 				: openaiModelRes?.data ?? null; | ||||
| 
 | ||||
| 			models.push( | ||||
| 				...(openAIModels | ||||
| 					? [ | ||||
| 							{ name: 'hr' }, | ||||
| 							...openAIModels | ||||
| 								.map((model) => ({ name: model.id, label: 'OpenAI' })) | ||||
| 								.filter((model) => model.name.includes('gpt')) | ||||
| 								.map((model) => ({ name: model.id, external: true })) | ||||
| 								.filter((model) => | ||||
| 									API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true | ||||
| 								) | ||||
| 					  ] | ||||
| 					: []) | ||||
| 			); | ||||
|  | @ -363,6 +370,7 @@ | |||
| 
 | ||||
| 		gravatarEmail = settings.gravatarEmail ?? ''; | ||||
| 		OPENAI_API_KEY = settings.OPENAI_API_KEY ?? ''; | ||||
| 		OPENAI_API_BASE_URL = settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; | ||||
| 
 | ||||
| 		authEnabled = settings.authHeader !== undefined ? true : false; | ||||
| 		if (authEnabled) { | ||||
|  | @ -476,6 +484,30 @@ | |||
| 					<div class=" self-center">Models</div> | ||||
| 				</button> | ||||
| 
 | ||||
| 				<button | ||||
| 					class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab === | ||||
| 					'external' | ||||
| 						? 'bg-gray-200 dark:bg-gray-700' | ||||
| 						: ' hover:bg-gray-300 dark:hover:bg-gray-800'}" | ||||
| 					on:click={() => { | ||||
| 						selectedTab = 'external'; | ||||
| 					}} | ||||
| 				> | ||||
| 					<div class=" self-center mr-2"> | ||||
| 						<svg | ||||
| 							xmlns="http://www.w3.org/2000/svg" | ||||
| 							viewBox="0 0 16 16" | ||||
| 							fill="currentColor" | ||||
| 							class="w-4 h-4" | ||||
| 						> | ||||
| 							<path | ||||
| 								d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z" | ||||
| 							/> | ||||
| 						</svg> | ||||
| 					</div> | ||||
| 					<div class=" self-center">External</div> | ||||
| 				</button> | ||||
| 
 | ||||
| 				<button | ||||
| 					class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab === | ||||
| 					'addons' | ||||
|  | @ -859,14 +891,73 @@ | |||
| 							</div> | ||||
| 						</div> | ||||
| 					</div> | ||||
| 				{:else if selectedTab === 'external'} | ||||
| 					<form | ||||
| 						class="flex flex-col h-full justify-between space-y-3 text-sm" | ||||
| 						on:submit|preventDefault={() => { | ||||
| 							saveSettings({ | ||||
| 								OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined, | ||||
| 								OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined | ||||
| 							}); | ||||
| 							show = false; | ||||
| 						}} | ||||
| 					> | ||||
| 						<div class=" space-y-3"> | ||||
| 							<div> | ||||
| 								<div class=" mb-2.5 text-sm font-medium">OpenAI API Key</div> | ||||
| 								<div class="flex w-full"> | ||||
| 									<div class="flex-1"> | ||||
| 										<input | ||||
| 											class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none" | ||||
| 											placeholder="Enter OpenAI API Key" | ||||
| 											bind:value={OPENAI_API_KEY} | ||||
| 											autocomplete="off" | ||||
| 										/> | ||||
| 									</div> | ||||
| 								</div> | ||||
| 								<div class="mt-2 text-xs text-gray-400 dark:text-gray-500"> | ||||
| 									Adds optional support for online models. | ||||
| 								</div> | ||||
| 							</div> | ||||
| 
 | ||||
| 							<hr class=" dark:border-gray-700" /> | ||||
| 
 | ||||
| 							<div> | ||||
| 								<div class=" mb-2.5 text-sm font-medium">OpenAI API Base URL</div> | ||||
| 								<div class="flex w-full"> | ||||
| 									<div class="flex-1"> | ||||
| 										<input | ||||
| 											class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none" | ||||
| 											placeholder="Enter OpenAI API Key" | ||||
| 											bind:value={OPENAI_API_BASE_URL} | ||||
| 											autocomplete="off" | ||||
| 										/> | ||||
| 									</div> | ||||
| 								</div> | ||||
| 								<div class="mt-2 text-xs text-gray-400 dark:text-gray-500"> | ||||
| 									WebUI will make requests to <span class=" text-gray-200" | ||||
| 										>'{OPENAI_API_BASE_URL}/chat'</span | ||||
| 									> | ||||
| 								</div> | ||||
| 							</div> | ||||
| 						</div> | ||||
| 
 | ||||
| 						<div class="flex justify-end pt-3 text-sm font-medium"> | ||||
| 							<button | ||||
| 								class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded" | ||||
| 								type="submit" | ||||
| 							> | ||||
| 								Save | ||||
| 							</button> | ||||
| 						</div> | ||||
| 					</form> | ||||
| 				{:else if selectedTab === 'addons'} | ||||
| 					<form | ||||
| 						class="flex flex-col h-full justify-between space-y-3 text-sm" | ||||
| 						on:submit|preventDefault={() => { | ||||
| 							saveSettings({ | ||||
| 								gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined, | ||||
| 								gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined, | ||||
| 								OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined | ||||
| 								gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined | ||||
| 							}); | ||||
| 							show = false; | ||||
| 						}} | ||||
|  | @ -962,26 +1053,6 @@ | |||
| 									> | ||||
| 								</div> | ||||
| 							</div> | ||||
| 
 | ||||
| 							<hr class=" dark:border-gray-700" /> | ||||
| 							<div> | ||||
| 								<div class=" mb-2.5 text-sm font-medium"> | ||||
| 									OpenAI API Key <span class=" text-gray-400 text-sm">(optional)</span> | ||||
| 								</div> | ||||
| 								<div class="flex w-full"> | ||||
| 									<div class="flex-1"> | ||||
| 										<input | ||||
| 											class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none" | ||||
| 											placeholder="Enter OpenAI API Key" | ||||
| 											bind:value={OPENAI_API_KEY} | ||||
| 											autocomplete="off" | ||||
| 										/> | ||||
| 									</div> | ||||
| 								</div> | ||||
| 								<div class="mt-2 text-xs text-gray-400 dark:text-gray-500"> | ||||
| 									Adds optional support for 'gpt-*' models available. | ||||
| 								</div> | ||||
| 							</div> | ||||
| 						</div> | ||||
| 
 | ||||
| 						<div class="flex justify-end pt-3 text-sm font-medium"> | ||||
|  |  | |||
|  | @ -55,7 +55,9 @@ | |||
| 		// If OpenAI API Key exists | ||||
| 		if ($settings.OPENAI_API_KEY) { | ||||
| 			// Validate OPENAI_API_KEY | ||||
| 			const openaiModelRes = await fetch(`https://api.openai.com/v1/models`, { | ||||
| 
 | ||||
| 			const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'; | ||||
| 			const openaiModelRes = await fetch(`${API_BASE_URL}/models`, { | ||||
| 				method: 'GET', | ||||
| 				headers: { | ||||
| 					'Content-Type': 'application/json', | ||||
|  | @ -72,15 +74,19 @@ | |||
| 					return null; | ||||
| 				}); | ||||
| 
 | ||||
| 			const openAIModels = openaiModelRes?.data ?? null; | ||||
| 			const openAIModels = Array.isArray(openaiModelRes) | ||||
| 				? openaiModelRes | ||||
| 				: openaiModelRes?.data ?? null; | ||||
| 
 | ||||
| 			models.push( | ||||
| 				...(openAIModels | ||||
| 					? [ | ||||
| 							{ name: 'hr' }, | ||||
| 							...openAIModels | ||||
| 								.map((model) => ({ name: model.id, label: 'OpenAI' })) | ||||
| 								.filter((model) => model.name.includes('gpt')) | ||||
| 								.map((model) => ({ name: model.id, external: true })) | ||||
| 								.filter((model) => | ||||
| 									API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true | ||||
| 								) | ||||
| 					  ] | ||||
| 					: []) | ||||
| 			); | ||||
|  |  | |||
|  | @ -7,7 +7,7 @@ | |||
| 	import { splitStream } from '$lib/utils'; | ||||
| 	import { goto } from '$app/navigation'; | ||||
| 
 | ||||
| 	import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; | ||||
| 	import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; | ||||
| 
 | ||||
| 	import MessageInput from '$lib/components/chat/MessageInput.svelte'; | ||||
| 	import Messages from '$lib/components/chat/Messages.svelte'; | ||||
|  | @ -130,7 +130,8 @@ | |||
| 	const sendPrompt = async (userPrompt, parentId, _chatId) => { | ||||
| 		await Promise.all( | ||||
| 			selectedModels.map(async (model) => { | ||||
| 				if (model.includes('gpt-')) { | ||||
| 				console.log(model); | ||||
| 				if ($models.filter((m) => m.name === model)[0].external) { | ||||
| 					await sendPromptOpenAI(model, userPrompt, parentId, _chatId); | ||||
| 				} else { | ||||
| 					await sendPromptOllama(model, userPrompt, parentId, _chatId); | ||||
|  | @ -364,133 +365,163 @@ | |||
| 					]; | ||||
| 				} | ||||
| 
 | ||||
| 				await tick(); | ||||
| 
 | ||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 				const res = await fetch(`https://api.openai.com/v1/chat/completions`, { | ||||
| 					method: 'POST', | ||||
| 					headers: { | ||||
| 						'Content-Type': 'application/json', | ||||
| 						Authorization: `Bearer ${$settings.OPENAI_API_KEY}` | ||||
| 					}, | ||||
| 					body: JSON.stringify({ | ||||
| 						model: model, | ||||
| 						stream: true, | ||||
| 						messages: [ | ||||
| 							$settings.system | ||||
| 								? { | ||||
| 										role: 'system', | ||||
| 										content: $settings.system | ||||
| 								  } | ||||
| 								: undefined, | ||||
| 							...messages | ||||
| 						] | ||||
| 							.filter((message) => message) | ||||
| 							.map((message) => ({ | ||||
| 								role: message.role, | ||||
| 								...(message.files | ||||
| 				const res = await fetch( | ||||
| 					`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, | ||||
| 					{ | ||||
| 						method: 'POST', | ||||
| 						headers: { | ||||
| 							Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, | ||||
| 							'Content-Type': 'application/json' | ||||
| 						}, | ||||
| 						body: JSON.stringify({ | ||||
| 							model: model, | ||||
| 							stream: true, | ||||
| 							messages: [ | ||||
| 								$settings.system | ||||
| 									? { | ||||
| 											content: [ | ||||
| 												{ | ||||
| 													type: 'text', | ||||
| 													text: message.content | ||||
| 												}, | ||||
| 												...message.files | ||||
| 													.filter((file) => file.type === 'image') | ||||
| 													.map((file) => ({ | ||||
| 														type: 'image_url', | ||||
| 														image_url: { | ||||
| 															url: file.url | ||||
| 														} | ||||
| 													})) | ||||
| 											] | ||||
| 											role: 'system', | ||||
| 											content: $settings.system | ||||
| 									  } | ||||
| 									: { content: message.content }) | ||||
| 							})), | ||||
| 						temperature: $settings.temperature ?? undefined, | ||||
| 						top_p: $settings.top_p ?? undefined, | ||||
| 						num_ctx: $settings.num_ctx ?? undefined, | ||||
| 						frequency_penalty: $settings.repeat_penalty ?? undefined | ||||
| 					}) | ||||
| 									: undefined, | ||||
| 								...messages | ||||
| 							] | ||||
| 								.filter((message) => message) | ||||
| 								.map((message) => ({ | ||||
| 									role: message.role, | ||||
| 									...(message.files | ||||
| 										? { | ||||
| 												content: [ | ||||
| 													{ | ||||
| 														type: 'text', | ||||
| 														text: message.content | ||||
| 													}, | ||||
| 													...message.files | ||||
| 														.filter((file) => file.type === 'image') | ||||
| 														.map((file) => ({ | ||||
| 															type: 'image_url', | ||||
| 															image_url: { | ||||
| 																url: file.url | ||||
| 															} | ||||
| 														})) | ||||
| 												] | ||||
| 										  } | ||||
| 										: { content: message.content }) | ||||
| 								})), | ||||
| 							temperature: $settings.temperature ?? undefined, | ||||
| 							top_p: $settings.top_p ?? undefined, | ||||
| 							num_ctx: $settings.num_ctx ?? undefined, | ||||
| 							frequency_penalty: $settings.repeat_penalty ?? undefined | ||||
| 						}) | ||||
| 					} | ||||
| 				).catch((err) => { | ||||
| 					console.log(err); | ||||
| 					return null; | ||||
| 				}); | ||||
| 
 | ||||
| 				const reader = res.body | ||||
| 					.pipeThrough(new TextDecoderStream()) | ||||
| 					.pipeThrough(splitStream('\n')) | ||||
| 					.getReader(); | ||||
| 				if (res && res.ok) { | ||||
| 					const reader = res.body | ||||
| 						.pipeThrough(new TextDecoderStream()) | ||||
| 						.pipeThrough(splitStream('\n')) | ||||
| 						.getReader(); | ||||
| 
 | ||||
| 				while (true) { | ||||
| 					const { value, done } = await reader.read(); | ||||
| 					if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 						responseMessage.done = true; | ||||
| 						messages = messages; | ||||
| 						break; | ||||
| 					} | ||||
| 					while (true) { | ||||
| 						const { value, done } = await reader.read(); | ||||
| 						if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 							responseMessage.done = true; | ||||
| 							messages = messages; | ||||
| 							break; | ||||
| 						} | ||||
| 
 | ||||
| 					try { | ||||
| 						let lines = value.split('\n'); | ||||
| 						try { | ||||
| 							let lines = value.split('\n'); | ||||
| 
 | ||||
| 						for (const line of lines) { | ||||
| 							if (line !== '') { | ||||
| 								console.log(line); | ||||
| 								if (line === 'data: [DONE]') { | ||||
| 									responseMessage.done = true; | ||||
| 									messages = messages; | ||||
| 								} else { | ||||
| 									let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 									console.log(data); | ||||
| 
 | ||||
| 									if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 										continue; | ||||
| 									} else { | ||||
| 										responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 							for (const line of lines) { | ||||
| 								if (line !== '') { | ||||
| 									console.log(line); | ||||
| 									if (line === 'data: [DONE]') { | ||||
| 										responseMessage.done = true; | ||||
| 										messages = messages; | ||||
| 									} else { | ||||
| 										let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 										console.log(data); | ||||
| 
 | ||||
| 										if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 											continue; | ||||
| 										} else { | ||||
| 											responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 											messages = messages; | ||||
| 										} | ||||
| 									} | ||||
| 								} | ||||
| 							} | ||||
| 						} catch (error) { | ||||
| 							console.log(error); | ||||
| 						} | ||||
| 					} catch (error) { | ||||
| 
 | ||||
| 						if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 							const notification = new Notification(`OpenAI ${model}`, { | ||||
| 								body: responseMessage.content, | ||||
| 								icon: '/favicon.png' | ||||
| 							}); | ||||
| 						} | ||||
| 
 | ||||
| 						if ($settings.responseAutoCopy) { | ||||
| 							copyToClipboard(responseMessage.content); | ||||
| 						} | ||||
| 
 | ||||
| 						if (autoScroll) { | ||||
| 							window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 						} | ||||
| 
 | ||||
| 						await $db.updateChatById(_chatId, { | ||||
| 							title: title === '' ? 'New Chat' : title, | ||||
| 							models: selectedModels, | ||||
| 							system: $settings.system ?? undefined, | ||||
| 							options: { | ||||
| 								seed: $settings.seed ?? undefined, | ||||
| 								temperature: $settings.temperature ?? undefined, | ||||
| 								repeat_penalty: $settings.repeat_penalty ?? undefined, | ||||
| 								top_k: $settings.top_k ?? undefined, | ||||
| 								top_p: $settings.top_p ?? undefined, | ||||
| 								num_ctx: $settings.num_ctx ?? undefined, | ||||
| 								...($settings.options ?? {}) | ||||
| 							}, | ||||
| 							messages: messages, | ||||
| 							history: history | ||||
| 						}); | ||||
| 					} | ||||
| 				} else { | ||||
| 					if (res !== null) { | ||||
| 						const error = await res.json(); | ||||
| 						console.log(error); | ||||
| 						if ('detail' in error) { | ||||
| 							toast.error(error.detail); | ||||
| 							responseMessage.content = error.detail; | ||||
| 						} else { | ||||
| 							if ('message' in error.error) { | ||||
| 								toast.error(error.error.message); | ||||
| 								responseMessage.content = error.error.message; | ||||
| 							} else { | ||||
| 								toast.error(error.error); | ||||
| 								responseMessage.content = error.error; | ||||
| 							} | ||||
| 						} | ||||
| 					} else { | ||||
| 						toast.error(`Uh-oh! There was an issue connecting to ${model}.`); | ||||
| 						responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 					} | ||||
| 
 | ||||
| 					if (autoScroll) { | ||||
| 						window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 					} | ||||
| 
 | ||||
| 					await $db.updateChatById(_chatId, { | ||||
| 						title: title === '' ? 'New Chat' : title, | ||||
| 						models: selectedModels, | ||||
| 						system: $settings.system ?? undefined, | ||||
| 						options: { | ||||
| 							seed: $settings.seed ?? undefined, | ||||
| 							temperature: $settings.temperature ?? undefined, | ||||
| 							repeat_penalty: $settings.repeat_penalty ?? undefined, | ||||
| 							top_k: $settings.top_k ?? undefined, | ||||
| 							top_p: $settings.top_p ?? undefined, | ||||
| 							num_ctx: $settings.num_ctx ?? undefined, | ||||
| 							...($settings.options ?? {}) | ||||
| 						}, | ||||
| 						messages: messages, | ||||
| 						history: history | ||||
| 					}); | ||||
| 					responseMessage.error = true; | ||||
| 					responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 					responseMessage.done = true; | ||||
| 					messages = messages; | ||||
| 				} | ||||
| 
 | ||||
| 				stopResponseFlag = false; | ||||
| 
 | ||||
| 				await tick(); | ||||
| 
 | ||||
| 				if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 					const notification = new Notification(`OpenAI ${model}`, { | ||||
| 						body: responseMessage.content, | ||||
| 						icon: '/favicon.png' | ||||
| 					}); | ||||
| 				} | ||||
| 
 | ||||
| 				if ($settings.responseAutoCopy) { | ||||
| 					copyToClipboard(responseMessage.content); | ||||
| 				} | ||||
| 
 | ||||
| 				if (autoScroll) { | ||||
| 					window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 				} | ||||
|  |  | |||
|  | @ -6,7 +6,7 @@ | |||
| 	import { onMount, tick } from 'svelte'; | ||||
| 	import { convertMessagesToHistory, splitStream } from '$lib/utils'; | ||||
| 	import { goto } from '$app/navigation'; | ||||
| 	import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; | ||||
| 	import { config, models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores'; | ||||
| 
 | ||||
| 	import MessageInput from '$lib/components/chat/MessageInput.svelte'; | ||||
| 	import Messages from '$lib/components/chat/Messages.svelte'; | ||||
|  | @ -144,7 +144,8 @@ | |||
| 	const sendPrompt = async (userPrompt, parentId, _chatId) => { | ||||
| 		await Promise.all( | ||||
| 			selectedModels.map(async (model) => { | ||||
| 				if (model.includes('gpt-')) { | ||||
| 				console.log(model); | ||||
| 				if ($models.filter((m) => m.name === model)[0].external) { | ||||
| 					await sendPromptOpenAI(model, userPrompt, parentId, _chatId); | ||||
| 				} else { | ||||
| 					await sendPromptOllama(model, userPrompt, parentId, _chatId); | ||||
|  | @ -378,133 +379,163 @@ | |||
| 					]; | ||||
| 				} | ||||
| 
 | ||||
| 				await tick(); | ||||
| 
 | ||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 				const res = await fetch(`https://api.openai.com/v1/chat/completions`, { | ||||
| 					method: 'POST', | ||||
| 					headers: { | ||||
| 						'Content-Type': 'application/json', | ||||
| 						Authorization: `Bearer ${$settings.OPENAI_API_KEY}` | ||||
| 					}, | ||||
| 					body: JSON.stringify({ | ||||
| 						model: model, | ||||
| 						stream: true, | ||||
| 						messages: [ | ||||
| 							$settings.system | ||||
| 								? { | ||||
| 										role: 'system', | ||||
| 										content: $settings.system | ||||
| 								  } | ||||
| 								: undefined, | ||||
| 							...messages | ||||
| 						] | ||||
| 							.filter((message) => message) | ||||
| 							.map((message) => ({ | ||||
| 								role: message.role, | ||||
| 								...(message.files | ||||
| 				const res = await fetch( | ||||
| 					`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`, | ||||
| 					{ | ||||
| 						method: 'POST', | ||||
| 						headers: { | ||||
| 							Authorization: `Bearer ${$settings.OPENAI_API_KEY}`, | ||||
| 							'Content-Type': 'application/json' | ||||
| 						}, | ||||
| 						body: JSON.stringify({ | ||||
| 							model: model, | ||||
| 							stream: true, | ||||
| 							messages: [ | ||||
| 								$settings.system | ||||
| 									? { | ||||
| 											content: [ | ||||
| 												{ | ||||
| 													type: 'text', | ||||
| 													text: message.content | ||||
| 												}, | ||||
| 												...message.files | ||||
| 													.filter((file) => file.type === 'image') | ||||
| 													.map((file) => ({ | ||||
| 														type: 'image_url', | ||||
| 														image_url: { | ||||
| 															url: file.url | ||||
| 														} | ||||
| 													})) | ||||
| 											] | ||||
| 											role: 'system', | ||||
| 											content: $settings.system | ||||
| 									  } | ||||
| 									: { content: message.content }) | ||||
| 							})), | ||||
| 						temperature: $settings.temperature ?? undefined, | ||||
| 						top_p: $settings.top_p ?? undefined, | ||||
| 						num_ctx: $settings.num_ctx ?? undefined, | ||||
| 						frequency_penalty: $settings.repeat_penalty ?? undefined | ||||
| 					}) | ||||
| 									: undefined, | ||||
| 								...messages | ||||
| 							] | ||||
| 								.filter((message) => message) | ||||
| 								.map((message) => ({ | ||||
| 									role: message.role, | ||||
| 									...(message.files | ||||
| 										? { | ||||
| 												content: [ | ||||
| 													{ | ||||
| 														type: 'text', | ||||
| 														text: message.content | ||||
| 													}, | ||||
| 													...message.files | ||||
| 														.filter((file) => file.type === 'image') | ||||
| 														.map((file) => ({ | ||||
| 															type: 'image_url', | ||||
| 															image_url: { | ||||
| 																url: file.url | ||||
| 															} | ||||
| 														})) | ||||
| 												] | ||||
| 										  } | ||||
| 										: { content: message.content }) | ||||
| 								})), | ||||
| 							temperature: $settings.temperature ?? undefined, | ||||
| 							top_p: $settings.top_p ?? undefined, | ||||
| 							num_ctx: $settings.num_ctx ?? undefined, | ||||
| 							frequency_penalty: $settings.repeat_penalty ?? undefined | ||||
| 						}) | ||||
| 					} | ||||
| 				).catch((err) => { | ||||
| 					console.log(err); | ||||
| 					return null; | ||||
| 				}); | ||||
| 
 | ||||
| 				const reader = res.body | ||||
| 					.pipeThrough(new TextDecoderStream()) | ||||
| 					.pipeThrough(splitStream('\n')) | ||||
| 					.getReader(); | ||||
| 				if (res && res.ok) { | ||||
| 					const reader = res.body | ||||
| 						.pipeThrough(new TextDecoderStream()) | ||||
| 						.pipeThrough(splitStream('\n')) | ||||
| 						.getReader(); | ||||
| 
 | ||||
| 				while (true) { | ||||
| 					const { value, done } = await reader.read(); | ||||
| 					if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 						responseMessage.done = true; | ||||
| 						messages = messages; | ||||
| 						break; | ||||
| 					} | ||||
| 					while (true) { | ||||
| 						const { value, done } = await reader.read(); | ||||
| 						if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 							responseMessage.done = true; | ||||
| 							messages = messages; | ||||
| 							break; | ||||
| 						} | ||||
| 
 | ||||
| 					try { | ||||
| 						let lines = value.split('\n'); | ||||
| 						try { | ||||
| 							let lines = value.split('\n'); | ||||
| 
 | ||||
| 						for (const line of lines) { | ||||
| 							if (line !== '') { | ||||
| 								console.log(line); | ||||
| 								if (line === 'data: [DONE]') { | ||||
| 									responseMessage.done = true; | ||||
| 									messages = messages; | ||||
| 								} else { | ||||
| 									let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 									console.log(data); | ||||
| 
 | ||||
| 									if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 										continue; | ||||
| 									} else { | ||||
| 										responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 							for (const line of lines) { | ||||
| 								if (line !== '') { | ||||
| 									console.log(line); | ||||
| 									if (line === 'data: [DONE]') { | ||||
| 										responseMessage.done = true; | ||||
| 										messages = messages; | ||||
| 									} else { | ||||
| 										let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 										console.log(data); | ||||
| 
 | ||||
| 										if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 											continue; | ||||
| 										} else { | ||||
| 											responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 											messages = messages; | ||||
| 										} | ||||
| 									} | ||||
| 								} | ||||
| 							} | ||||
| 						} catch (error) { | ||||
| 							console.log(error); | ||||
| 						} | ||||
| 					} catch (error) { | ||||
| 
 | ||||
| 						if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 							const notification = new Notification(`OpenAI ${model}`, { | ||||
| 								body: responseMessage.content, | ||||
| 								icon: '/favicon.png' | ||||
| 							}); | ||||
| 						} | ||||
| 
 | ||||
| 						if ($settings.responseAutoCopy) { | ||||
| 							copyToClipboard(responseMessage.content); | ||||
| 						} | ||||
| 
 | ||||
| 						if (autoScroll) { | ||||
| 							window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 						} | ||||
| 
 | ||||
| 						await $db.updateChatById(_chatId, { | ||||
| 							title: title === '' ? 'New Chat' : title, | ||||
| 							models: selectedModels, | ||||
| 							system: $settings.system ?? undefined, | ||||
| 							options: { | ||||
| 								seed: $settings.seed ?? undefined, | ||||
| 								temperature: $settings.temperature ?? undefined, | ||||
| 								repeat_penalty: $settings.repeat_penalty ?? undefined, | ||||
| 								top_k: $settings.top_k ?? undefined, | ||||
| 								top_p: $settings.top_p ?? undefined, | ||||
| 								num_ctx: $settings.num_ctx ?? undefined, | ||||
| 								...($settings.options ?? {}) | ||||
| 							}, | ||||
| 							messages: messages, | ||||
| 							history: history | ||||
| 						}); | ||||
| 					} | ||||
| 				} else { | ||||
| 					if (res !== null) { | ||||
| 						const error = await res.json(); | ||||
| 						console.log(error); | ||||
| 						if ('detail' in error) { | ||||
| 							toast.error(error.detail); | ||||
| 							responseMessage.content = error.detail; | ||||
| 						} else { | ||||
| 							if ('message' in error.error) { | ||||
| 								toast.error(error.error.message); | ||||
| 								responseMessage.content = error.error.message; | ||||
| 							} else { | ||||
| 								toast.error(error.error); | ||||
| 								responseMessage.content = error.error; | ||||
| 							} | ||||
| 						} | ||||
| 					} else { | ||||
| 						toast.error(`Uh-oh! There was an issue connecting to ${model}.`); | ||||
| 						responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 					} | ||||
| 
 | ||||
| 					if (autoScroll) { | ||||
| 						window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 					} | ||||
| 
 | ||||
| 					await $db.updateChatById(_chatId, { | ||||
| 						title: title === '' ? 'New Chat' : title, | ||||
| 						models: selectedModels, | ||||
| 						system: $settings.system ?? undefined, | ||||
| 						options: { | ||||
| 							seed: $settings.seed ?? undefined, | ||||
| 							temperature: $settings.temperature ?? undefined, | ||||
| 							repeat_penalty: $settings.repeat_penalty ?? undefined, | ||||
| 							top_k: $settings.top_k ?? undefined, | ||||
| 							top_p: $settings.top_p ?? undefined, | ||||
| 							num_ctx: $settings.num_ctx ?? undefined, | ||||
| 							...($settings.options ?? {}) | ||||
| 						}, | ||||
| 						messages: messages, | ||||
| 						history: history | ||||
| 					}); | ||||
| 					responseMessage.error = true; | ||||
| 					responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 					responseMessage.done = true; | ||||
| 					messages = messages; | ||||
| 				} | ||||
| 
 | ||||
| 				stopResponseFlag = false; | ||||
| 
 | ||||
| 				await tick(); | ||||
| 
 | ||||
| 				if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 					const notification = new Notification(`OpenAI ${model}`, { | ||||
| 						body: responseMessage.content, | ||||
| 						icon: '/favicon.png' | ||||
| 					}); | ||||
| 				} | ||||
| 
 | ||||
| 				if ($settings.responseAutoCopy) { | ||||
| 					copyToClipboard(responseMessage.content); | ||||
| 				} | ||||
| 
 | ||||
| 				if (autoScroll) { | ||||
| 					window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 				} | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy Jaeryang Baek
						Timothy Jaeryang Baek