forked from open-webui/open-webui
		
	fix: openai multi-user issue
This commit is contained in:
		
							parent
							
								
									a96795d752
								
							
						
					
					
						commit
						6fb036711b
					
				
					 2 changed files with 300 additions and 308 deletions
				
			
		|  | @ -322,175 +322,171 @@ | |||
| 	}; | ||||
| 
 | ||||
| 	const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => { | ||||
| 		if ($settings.OPENAI_API_KEY) { | ||||
| 			if (models) { | ||||
| 				let responseMessageId = uuidv4(); | ||||
| 		let responseMessageId = uuidv4(); | ||||
| 
 | ||||
| 				let responseMessage = { | ||||
| 					parentId: parentId, | ||||
| 					id: responseMessageId, | ||||
| 					childrenIds: [], | ||||
| 					role: 'assistant', | ||||
| 					content: '', | ||||
| 					model: model | ||||
| 				}; | ||||
| 		let responseMessage = { | ||||
| 			parentId: parentId, | ||||
| 			id: responseMessageId, | ||||
| 			childrenIds: [], | ||||
| 			role: 'assistant', | ||||
| 			content: '', | ||||
| 			model: model | ||||
| 		}; | ||||
| 
 | ||||
| 				history.messages[responseMessageId] = responseMessage; | ||||
| 				history.currentId = responseMessageId; | ||||
| 				if (parentId !== null) { | ||||
| 					history.messages[parentId].childrenIds = [ | ||||
| 						...history.messages[parentId].childrenIds, | ||||
| 						responseMessageId | ||||
| 					]; | ||||
| 				} | ||||
| 		history.messages[responseMessageId] = responseMessage; | ||||
| 		history.currentId = responseMessageId; | ||||
| 		if (parentId !== null) { | ||||
| 			history.messages[parentId].childrenIds = [ | ||||
| 				...history.messages[parentId].childrenIds, | ||||
| 				responseMessageId | ||||
| 			]; | ||||
| 		} | ||||
| 
 | ||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 		window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 				const res = await generateOpenAIChatCompletion(localStorage.token, { | ||||
| 					model: model, | ||||
| 					stream: true, | ||||
| 					messages: [ | ||||
| 						$settings.system | ||||
| 							? { | ||||
| 									role: 'system', | ||||
| 									content: $settings.system | ||||
| 							  } | ||||
| 							: undefined, | ||||
| 						...messages | ||||
| 					] | ||||
| 						.filter((message) => message) | ||||
| 						.map((message) => ({ | ||||
| 							role: message.role, | ||||
| 							...(message.files | ||||
| 								? { | ||||
| 										content: [ | ||||
| 											{ | ||||
| 												type: 'text', | ||||
| 												text: message.content | ||||
| 											}, | ||||
| 											...message.files | ||||
| 												.filter((file) => file.type === 'image') | ||||
| 												.map((file) => ({ | ||||
| 													type: 'image_url', | ||||
| 													image_url: { | ||||
| 														url: file.url | ||||
| 													} | ||||
| 												})) | ||||
| 										] | ||||
| 								  } | ||||
| 								: { content: message.content }) | ||||
| 						})), | ||||
| 					seed: $settings?.options?.seed ?? undefined, | ||||
| 					stop: $settings?.options?.stop ?? undefined, | ||||
| 					temperature: $settings?.options?.temperature ?? undefined, | ||||
| 					top_p: $settings?.options?.top_p ?? undefined, | ||||
| 					num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 					frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 					max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 				}); | ||||
| 		const res = await generateOpenAIChatCompletion(localStorage.token, { | ||||
| 			model: model, | ||||
| 			stream: true, | ||||
| 			messages: [ | ||||
| 				$settings.system | ||||
| 					? { | ||||
| 							role: 'system', | ||||
| 							content: $settings.system | ||||
| 					  } | ||||
| 					: undefined, | ||||
| 				...messages | ||||
| 			] | ||||
| 				.filter((message) => message) | ||||
| 				.map((message) => ({ | ||||
| 					role: message.role, | ||||
| 					...(message.files | ||||
| 						? { | ||||
| 								content: [ | ||||
| 									{ | ||||
| 										type: 'text', | ||||
| 										text: message.content | ||||
| 									}, | ||||
| 									...message.files | ||||
| 										.filter((file) => file.type === 'image') | ||||
| 										.map((file) => ({ | ||||
| 											type: 'image_url', | ||||
| 											image_url: { | ||||
| 												url: file.url | ||||
| 											} | ||||
| 										})) | ||||
| 								] | ||||
| 						  } | ||||
| 						: { content: message.content }) | ||||
| 				})), | ||||
| 			seed: $settings?.options?.seed ?? undefined, | ||||
| 			stop: $settings?.options?.stop ?? undefined, | ||||
| 			temperature: $settings?.options?.temperature ?? undefined, | ||||
| 			top_p: $settings?.options?.top_p ?? undefined, | ||||
| 			num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 			frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 			max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 		}); | ||||
| 
 | ||||
| 				if (res && res.ok) { | ||||
| 					const reader = res.body | ||||
| 						.pipeThrough(new TextDecoderStream()) | ||||
| 						.pipeThrough(splitStream('\n')) | ||||
| 						.getReader(); | ||||
| 		if (res && res.ok) { | ||||
| 			const reader = res.body | ||||
| 				.pipeThrough(new TextDecoderStream()) | ||||
| 				.pipeThrough(splitStream('\n')) | ||||
| 				.getReader(); | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						const { value, done } = await reader.read(); | ||||
| 						if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 							responseMessage.done = true; | ||||
| 							messages = messages; | ||||
| 							break; | ||||
| 						} | ||||
| 
 | ||||
| 						try { | ||||
| 							let lines = value.split('\n'); | ||||
| 
 | ||||
| 							for (const line of lines) { | ||||
| 								if (line !== '') { | ||||
| 									console.log(line); | ||||
| 									if (line === 'data: [DONE]') { | ||||
| 										responseMessage.done = true; | ||||
| 										messages = messages; | ||||
| 									} else { | ||||
| 										let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 										console.log(data); | ||||
| 
 | ||||
| 										if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 											continue; | ||||
| 										} else { | ||||
| 											responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 											messages = messages; | ||||
| 										} | ||||
| 									} | ||||
| 								} | ||||
| 							} | ||||
| 						} catch (error) { | ||||
| 							console.log(error); | ||||
| 						} | ||||
| 
 | ||||
| 						if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 							const notification = new Notification(`OpenAI ${model}`, { | ||||
| 								body: responseMessage.content, | ||||
| 								icon: '/favicon.png' | ||||
| 							}); | ||||
| 						} | ||||
| 
 | ||||
| 						if ($settings.responseAutoCopy) { | ||||
| 							copyToClipboard(responseMessage.content); | ||||
| 						} | ||||
| 
 | ||||
| 						if (autoScroll) { | ||||
| 							window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
| 					if ($chatId == _chatId) { | ||||
| 						chat = await updateChatById(localStorage.token, _chatId, { | ||||
| 							messages: messages, | ||||
| 							history: history | ||||
| 						}); | ||||
| 						await chats.set(await getChatList(localStorage.token)); | ||||
| 					} | ||||
| 				} else { | ||||
| 					if (res !== null) { | ||||
| 						const error = await res.json(); | ||||
| 						console.log(error); | ||||
| 						if ('detail' in error) { | ||||
| 							toast.error(error.detail); | ||||
| 							responseMessage.content = error.detail; | ||||
| 						} else { | ||||
| 							if ('message' in error.error) { | ||||
| 								toast.error(error.error.message); | ||||
| 								responseMessage.content = error.error.message; | ||||
| 							} else { | ||||
| 								toast.error(error.error); | ||||
| 								responseMessage.content = error.error; | ||||
| 							} | ||||
| 						} | ||||
| 					} else { | ||||
| 						toast.error(`Uh-oh! There was an issue connecting to ${model}.`); | ||||
| 						responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 					} | ||||
| 
 | ||||
| 					responseMessage.error = true; | ||||
| 					responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 			while (true) { | ||||
| 				const { value, done } = await reader.read(); | ||||
| 				if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 					responseMessage.done = true; | ||||
| 					messages = messages; | ||||
| 					break; | ||||
| 				} | ||||
| 
 | ||||
| 				stopResponseFlag = false; | ||||
| 				await tick(); | ||||
| 				try { | ||||
| 					let lines = value.split('\n'); | ||||
| 
 | ||||
| 					for (const line of lines) { | ||||
| 						if (line !== '') { | ||||
| 							console.log(line); | ||||
| 							if (line === 'data: [DONE]') { | ||||
| 								responseMessage.done = true; | ||||
| 								messages = messages; | ||||
| 							} else { | ||||
| 								let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 								console.log(data); | ||||
| 
 | ||||
| 								if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 									continue; | ||||
| 								} else { | ||||
| 									responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 									messages = messages; | ||||
| 								} | ||||
| 							} | ||||
| 						} | ||||
| 					} | ||||
| 				} catch (error) { | ||||
| 					console.log(error); | ||||
| 				} | ||||
| 
 | ||||
| 				if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 					const notification = new Notification(`OpenAI ${model}`, { | ||||
| 						body: responseMessage.content, | ||||
| 						icon: '/favicon.png' | ||||
| 					}); | ||||
| 				} | ||||
| 
 | ||||
| 				if ($settings.responseAutoCopy) { | ||||
| 					copyToClipboard(responseMessage.content); | ||||
| 				} | ||||
| 
 | ||||
| 				if (autoScroll) { | ||||
| 					window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 				} | ||||
| 
 | ||||
| 				if (messages.length == 2) { | ||||
| 					window.history.replaceState(history.state, '', `/c/${_chatId}`); | ||||
| 					await setChatTitle(_chatId, userPrompt); | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			if ($chatId == _chatId) { | ||||
| 				chat = await updateChatById(localStorage.token, _chatId, { | ||||
| 					messages: messages, | ||||
| 					history: history | ||||
| 				}); | ||||
| 				await chats.set(await getChatList(localStorage.token)); | ||||
| 			} | ||||
| 		} else { | ||||
| 			if (res !== null) { | ||||
| 				const error = await res.json(); | ||||
| 				console.log(error); | ||||
| 				if ('detail' in error) { | ||||
| 					toast.error(error.detail); | ||||
| 					responseMessage.content = error.detail; | ||||
| 				} else { | ||||
| 					if ('message' in error.error) { | ||||
| 						toast.error(error.error.message); | ||||
| 						responseMessage.content = error.error.message; | ||||
| 					} else { | ||||
| 						toast.error(error.error); | ||||
| 						responseMessage.content = error.error; | ||||
| 					} | ||||
| 				} | ||||
| 			} else { | ||||
| 				toast.error(`Uh-oh! There was an issue connecting to ${model}.`); | ||||
| 				responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 			} | ||||
| 
 | ||||
| 			responseMessage.error = true; | ||||
| 			responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 			responseMessage.done = true; | ||||
| 			messages = messages; | ||||
| 		} | ||||
| 
 | ||||
| 		stopResponseFlag = false; | ||||
| 		await tick(); | ||||
| 
 | ||||
| 		if (autoScroll) { | ||||
| 			window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 		} | ||||
| 
 | ||||
| 		if (messages.length == 2) { | ||||
| 			window.history.replaceState(history.state, '', `/c/${_chatId}`); | ||||
| 			await setChatTitle(_chatId, userPrompt); | ||||
| 		} | ||||
| 	}; | ||||
| 
 | ||||
|  |  | |||
|  | @ -340,175 +340,171 @@ | |||
| 	}; | ||||
| 
 | ||||
| 	const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => { | ||||
| 		if ($settings.OPENAI_API_KEY) { | ||||
| 			if (models) { | ||||
| 				let responseMessageId = uuidv4(); | ||||
| 		let responseMessageId = uuidv4(); | ||||
| 
 | ||||
| 				let responseMessage = { | ||||
| 					parentId: parentId, | ||||
| 					id: responseMessageId, | ||||
| 					childrenIds: [], | ||||
| 					role: 'assistant', | ||||
| 					content: '', | ||||
| 					model: model | ||||
| 				}; | ||||
| 		let responseMessage = { | ||||
| 			parentId: parentId, | ||||
| 			id: responseMessageId, | ||||
| 			childrenIds: [], | ||||
| 			role: 'assistant', | ||||
| 			content: '', | ||||
| 			model: model | ||||
| 		}; | ||||
| 
 | ||||
| 				history.messages[responseMessageId] = responseMessage; | ||||
| 				history.currentId = responseMessageId; | ||||
| 				if (parentId !== null) { | ||||
| 					history.messages[parentId].childrenIds = [ | ||||
| 						...history.messages[parentId].childrenIds, | ||||
| 						responseMessageId | ||||
| 					]; | ||||
| 				} | ||||
| 		history.messages[responseMessageId] = responseMessage; | ||||
| 		history.currentId = responseMessageId; | ||||
| 		if (parentId !== null) { | ||||
| 			history.messages[parentId].childrenIds = [ | ||||
| 				...history.messages[parentId].childrenIds, | ||||
| 				responseMessageId | ||||
| 			]; | ||||
| 		} | ||||
| 
 | ||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 		window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 				const res = await generateOpenAIChatCompletion(localStorage.token, { | ||||
| 					model: model, | ||||
| 					stream: true, | ||||
| 					messages: [ | ||||
| 						$settings.system | ||||
| 							? { | ||||
| 									role: 'system', | ||||
| 									content: $settings.system | ||||
| 							  } | ||||
| 							: undefined, | ||||
| 						...messages | ||||
| 					] | ||||
| 						.filter((message) => message) | ||||
| 						.map((message) => ({ | ||||
| 							role: message.role, | ||||
| 							...(message.files | ||||
| 								? { | ||||
| 										content: [ | ||||
| 											{ | ||||
| 												type: 'text', | ||||
| 												text: message.content | ||||
| 											}, | ||||
| 											...message.files | ||||
| 												.filter((file) => file.type === 'image') | ||||
| 												.map((file) => ({ | ||||
| 													type: 'image_url', | ||||
| 													image_url: { | ||||
| 														url: file.url | ||||
| 													} | ||||
| 												})) | ||||
| 										] | ||||
| 								  } | ||||
| 								: { content: message.content }) | ||||
| 						})), | ||||
| 					seed: $settings?.options?.seed ?? undefined, | ||||
| 					stop: $settings?.options?.stop ?? undefined, | ||||
| 					temperature: $settings?.options?.temperature ?? undefined, | ||||
| 					top_p: $settings?.options?.top_p ?? undefined, | ||||
| 					num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 					frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 					max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 				}); | ||||
| 		const res = await generateOpenAIChatCompletion(localStorage.token, { | ||||
| 			model: model, | ||||
| 			stream: true, | ||||
| 			messages: [ | ||||
| 				$settings.system | ||||
| 					? { | ||||
| 							role: 'system', | ||||
| 							content: $settings.system | ||||
| 					  } | ||||
| 					: undefined, | ||||
| 				...messages | ||||
| 			] | ||||
| 				.filter((message) => message) | ||||
| 				.map((message) => ({ | ||||
| 					role: message.role, | ||||
| 					...(message.files | ||||
| 						? { | ||||
| 								content: [ | ||||
| 									{ | ||||
| 										type: 'text', | ||||
| 										text: message.content | ||||
| 									}, | ||||
| 									...message.files | ||||
| 										.filter((file) => file.type === 'image') | ||||
| 										.map((file) => ({ | ||||
| 											type: 'image_url', | ||||
| 											image_url: { | ||||
| 												url: file.url | ||||
| 											} | ||||
| 										})) | ||||
| 								] | ||||
| 						  } | ||||
| 						: { content: message.content }) | ||||
| 				})), | ||||
| 			seed: $settings?.options?.seed ?? undefined, | ||||
| 			stop: $settings?.options?.stop ?? undefined, | ||||
| 			temperature: $settings?.options?.temperature ?? undefined, | ||||
| 			top_p: $settings?.options?.top_p ?? undefined, | ||||
| 			num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 			frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 			max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 		}); | ||||
| 
 | ||||
| 				if (res && res.ok) { | ||||
| 					const reader = res.body | ||||
| 						.pipeThrough(new TextDecoderStream()) | ||||
| 						.pipeThrough(splitStream('\n')) | ||||
| 						.getReader(); | ||||
| 		if (res && res.ok) { | ||||
| 			const reader = res.body | ||||
| 				.pipeThrough(new TextDecoderStream()) | ||||
| 				.pipeThrough(splitStream('\n')) | ||||
| 				.getReader(); | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						const { value, done } = await reader.read(); | ||||
| 						if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 							responseMessage.done = true; | ||||
| 							messages = messages; | ||||
| 							break; | ||||
| 						} | ||||
| 
 | ||||
| 						try { | ||||
| 							let lines = value.split('\n'); | ||||
| 
 | ||||
| 							for (const line of lines) { | ||||
| 								if (line !== '') { | ||||
| 									console.log(line); | ||||
| 									if (line === 'data: [DONE]') { | ||||
| 										responseMessage.done = true; | ||||
| 										messages = messages; | ||||
| 									} else { | ||||
| 										let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 										console.log(data); | ||||
| 
 | ||||
| 										if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 											continue; | ||||
| 										} else { | ||||
| 											responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 											messages = messages; | ||||
| 										} | ||||
| 									} | ||||
| 								} | ||||
| 							} | ||||
| 						} catch (error) { | ||||
| 							console.log(error); | ||||
| 						} | ||||
| 
 | ||||
| 						if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 							const notification = new Notification(`OpenAI ${model}`, { | ||||
| 								body: responseMessage.content, | ||||
| 								icon: '/favicon.png' | ||||
| 							}); | ||||
| 						} | ||||
| 
 | ||||
| 						if ($settings.responseAutoCopy) { | ||||
| 							copyToClipboard(responseMessage.content); | ||||
| 						} | ||||
| 
 | ||||
| 						if (autoScroll) { | ||||
| 							window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
| 					if ($chatId == _chatId) { | ||||
| 						chat = await updateChatById(localStorage.token, _chatId, { | ||||
| 							messages: messages, | ||||
| 							history: history | ||||
| 						}); | ||||
| 						await chats.set(await getChatList(localStorage.token)); | ||||
| 					} | ||||
| 				} else { | ||||
| 					if (res !== null) { | ||||
| 						const error = await res.json(); | ||||
| 						console.log(error); | ||||
| 						if ('detail' in error) { | ||||
| 							toast.error(error.detail); | ||||
| 							responseMessage.content = error.detail; | ||||
| 						} else { | ||||
| 							if ('message' in error.error) { | ||||
| 								toast.error(error.error.message); | ||||
| 								responseMessage.content = error.error.message; | ||||
| 							} else { | ||||
| 								toast.error(error.error); | ||||
| 								responseMessage.content = error.error; | ||||
| 							} | ||||
| 						} | ||||
| 					} else { | ||||
| 						toast.error(`Uh-oh! There was an issue connecting to ${model}.`); | ||||
| 						responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 					} | ||||
| 
 | ||||
| 					responseMessage.error = true; | ||||
| 					responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 			while (true) { | ||||
| 				const { value, done } = await reader.read(); | ||||
| 				if (done || stopResponseFlag || _chatId !== $chatId) { | ||||
| 					responseMessage.done = true; | ||||
| 					messages = messages; | ||||
| 					break; | ||||
| 				} | ||||
| 
 | ||||
| 				stopResponseFlag = false; | ||||
| 				await tick(); | ||||
| 				try { | ||||
| 					let lines = value.split('\n'); | ||||
| 
 | ||||
| 					for (const line of lines) { | ||||
| 						if (line !== '') { | ||||
| 							console.log(line); | ||||
| 							if (line === 'data: [DONE]') { | ||||
| 								responseMessage.done = true; | ||||
| 								messages = messages; | ||||
| 							} else { | ||||
| 								let data = JSON.parse(line.replace(/^data: /, '')); | ||||
| 								console.log(data); | ||||
| 
 | ||||
| 								if (responseMessage.content == '' && data.choices[0].delta.content == '\n') { | ||||
| 									continue; | ||||
| 								} else { | ||||
| 									responseMessage.content += data.choices[0].delta.content ?? ''; | ||||
| 									messages = messages; | ||||
| 								} | ||||
| 							} | ||||
| 						} | ||||
| 					} | ||||
| 				} catch (error) { | ||||
| 					console.log(error); | ||||
| 				} | ||||
| 
 | ||||
| 				if ($settings.notificationEnabled && !document.hasFocus()) { | ||||
| 					const notification = new Notification(`OpenAI ${model}`, { | ||||
| 						body: responseMessage.content, | ||||
| 						icon: '/favicon.png' | ||||
| 					}); | ||||
| 				} | ||||
| 
 | ||||
| 				if ($settings.responseAutoCopy) { | ||||
| 					copyToClipboard(responseMessage.content); | ||||
| 				} | ||||
| 
 | ||||
| 				if (autoScroll) { | ||||
| 					window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 				} | ||||
| 
 | ||||
| 				if (messages.length == 2) { | ||||
| 					window.history.replaceState(history.state, '', `/c/${_chatId}`); | ||||
| 					await setChatTitle(_chatId, userPrompt); | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			if ($chatId == _chatId) { | ||||
| 				chat = await updateChatById(localStorage.token, _chatId, { | ||||
| 					messages: messages, | ||||
| 					history: history | ||||
| 				}); | ||||
| 				await chats.set(await getChatList(localStorage.token)); | ||||
| 			} | ||||
| 		} else { | ||||
| 			if (res !== null) { | ||||
| 				const error = await res.json(); | ||||
| 				console.log(error); | ||||
| 				if ('detail' in error) { | ||||
| 					toast.error(error.detail); | ||||
| 					responseMessage.content = error.detail; | ||||
| 				} else { | ||||
| 					if ('message' in error.error) { | ||||
| 						toast.error(error.error.message); | ||||
| 						responseMessage.content = error.error.message; | ||||
| 					} else { | ||||
| 						toast.error(error.error); | ||||
| 						responseMessage.content = error.error; | ||||
| 					} | ||||
| 				} | ||||
| 			} else { | ||||
| 				toast.error(`Uh-oh! There was an issue connecting to ${model}.`); | ||||
| 				responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 			} | ||||
| 
 | ||||
| 			responseMessage.error = true; | ||||
| 			responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`; | ||||
| 			responseMessage.done = true; | ||||
| 			messages = messages; | ||||
| 		} | ||||
| 
 | ||||
| 		stopResponseFlag = false; | ||||
| 		await tick(); | ||||
| 
 | ||||
| 		if (autoScroll) { | ||||
| 			window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 		} | ||||
| 
 | ||||
| 		if (messages.length == 2) { | ||||
| 			window.history.replaceState(history.state, '', `/c/${_chatId}`); | ||||
| 			await setChatTitle(_chatId, userPrompt); | ||||
| 		} | ||||
| 	}; | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy J. Baek
						Timothy J. Baek