forked from open-webui/open-webui
		
	fix: continue generation
This commit is contained in:
		
							parent
							
								
									6245146661
								
							
						
					
					
						commit
						1ff0c9a95d
					
				
					 2 changed files with 117 additions and 112 deletions
				
			
		|  | @ -732,25 +732,26 @@ | ||||||
| 			responseMessage.done = false; | 			responseMessage.done = false; | ||||||
| 			await tick(); | 			await tick(); | ||||||
| 
 | 
 | ||||||
| 			const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0); | 			const model = $models.filter((m) => m.id === responseMessage.model).at(0); | ||||||
| 
 | 
 | ||||||
| 			if (modelTag?.external) { | 			if (model) { | ||||||
| 				await sendPromptOpenAI( | 				if (model?.external) { | ||||||
| 					responseMessage.model, | 					await sendPromptOpenAI( | ||||||
| 					history.messages[responseMessage.parentId].content, | 						model, | ||||||
| 					responseMessage.id, | 						history.messages[responseMessage.parentId].content, | ||||||
| 					_chatId | 						responseMessage.id, | ||||||
| 				); | 						_chatId | ||||||
| 			} else if (modelTag) { | 					); | ||||||
| 				await sendPromptOllama( | 				} else | ||||||
| 					responseMessage.model, | 					await sendPromptOllama( | ||||||
| 					history.messages[responseMessage.parentId].content, | 						model, | ||||||
| 					responseMessage.id, | 						history.messages[responseMessage.parentId].content, | ||||||
| 					_chatId | 						responseMessage.id, | ||||||
| 				); | 						_chatId | ||||||
| 			} else { | 					); | ||||||
| 				toast.error(`Model ${model} not found`); |  | ||||||
| 			} | 			} | ||||||
|  | 		} else { | ||||||
|  | 			toast.error(`Model ${modelId} not found`); | ||||||
| 		} | 		} | ||||||
| 	}; | 	}; | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -238,7 +238,6 @@ | ||||||
| 			await sendPrompt(userPrompt, userMessageId); | 			await sendPrompt(userPrompt, userMessageId); | ||||||
| 		} | 		} | ||||||
| 	}; | 	}; | ||||||
| 
 |  | ||||||
| 	const sendPrompt = async (prompt, parentId) => { | 	const sendPrompt = async (prompt, parentId) => { | ||||||
| 		const _chatId = JSON.parse(JSON.stringify($chatId)); | 		const _chatId = JSON.parse(JSON.stringify($chatId)); | ||||||
| 
 | 
 | ||||||
|  | @ -292,40 +291,41 @@ | ||||||
| 		} | 		} | ||||||
| 
 | 
 | ||||||
| 		await Promise.all( | 		await Promise.all( | ||||||
| 			selectedModels.map(async (model) => { | 			selectedModels.map(async (modelId) => { | ||||||
| 				console.log(model); | 				const model = $models.filter((m) => m.id === modelId).at(0); | ||||||
| 				const modelTag = $models.filter((m) => m.name === model).at(0); |  | ||||||
| 
 | 
 | ||||||
| 				// Create response message | 				if (model) { | ||||||
| 				let responseMessageId = uuidv4(); | 					// Create response message | ||||||
| 				let responseMessage = { | 					let responseMessageId = uuidv4(); | ||||||
| 					parentId: parentId, | 					let responseMessage = { | ||||||
| 					id: responseMessageId, | 						parentId: parentId, | ||||||
| 					childrenIds: [], | 						id: responseMessageId, | ||||||
| 					role: 'assistant', | 						childrenIds: [], | ||||||
| 					content: '', | 						role: 'assistant', | ||||||
| 					model: model, | 						content: '', | ||||||
| 					timestamp: Math.floor(Date.now() / 1000) // Unix epoch | 						model: model.id, | ||||||
| 				}; | 						timestamp: Math.floor(Date.now() / 1000) // Unix epoch | ||||||
|  | 					}; | ||||||
| 
 | 
 | ||||||
| 				// Add message to history and Set currentId to messageId | 					// Add message to history and Set currentId to messageId | ||||||
| 				history.messages[responseMessageId] = responseMessage; | 					history.messages[responseMessageId] = responseMessage; | ||||||
| 				history.currentId = responseMessageId; | 					history.currentId = responseMessageId; | ||||||
| 
 | 
 | ||||||
| 				// Append messageId to childrenIds of parent message | 					// Append messageId to childrenIds of parent message | ||||||
| 				if (parentId !== null) { | 					if (parentId !== null) { | ||||||
| 					history.messages[parentId].childrenIds = [ | 						history.messages[parentId].childrenIds = [ | ||||||
| 						...history.messages[parentId].childrenIds, | 							...history.messages[parentId].childrenIds, | ||||||
| 						responseMessageId | 							responseMessageId | ||||||
| 					]; | 						]; | ||||||
| 				} | 					} | ||||||
| 
 | 
 | ||||||
| 				if (modelTag?.external) { | 					if (model?.external) { | ||||||
| 					await sendPromptOpenAI(model, prompt, responseMessageId, _chatId); | 						await sendPromptOpenAI(model, prompt, responseMessageId, _chatId); | ||||||
| 				} else if (modelTag) { | 					} else if (model) { | ||||||
| 					await sendPromptOllama(model, prompt, responseMessageId, _chatId); | 						await sendPromptOllama(model, prompt, responseMessageId, _chatId); | ||||||
|  | 					} | ||||||
| 				} else { | 				} else { | ||||||
| 					toast.error(`Model ${model} not found`); | 					toast.error(`Model ${modelId} not found`); | ||||||
| 				} | 				} | ||||||
| 			}) | 			}) | ||||||
| 		); | 		); | ||||||
|  | @ -334,6 +334,7 @@ | ||||||
| 	}; | 	}; | ||||||
| 
 | 
 | ||||||
| 	const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => { | 	const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => { | ||||||
|  | 		model = model.id; | ||||||
| 		const responseMessage = history.messages[responseMessageId]; | 		const responseMessage = history.messages[responseMessageId]; | ||||||
| 
 | 
 | ||||||
| 		// Wait until history/message have been updated | 		// Wait until history/message have been updated | ||||||
|  | @ -543,57 +544,60 @@ | ||||||
| 
 | 
 | ||||||
| 	const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => { | 	const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => { | ||||||
| 		const responseMessage = history.messages[responseMessageId]; | 		const responseMessage = history.messages[responseMessageId]; | ||||||
| 
 |  | ||||||
| 		scrollToBottom(); | 		scrollToBottom(); | ||||||
| 
 | 
 | ||||||
| 		const res = await generateOpenAIChatCompletion(localStorage.token, { | 		const res = await generateOpenAIChatCompletion( | ||||||
| 			model: model, | 			localStorage.token, | ||||||
| 			stream: true, | 			{ | ||||||
| 			messages: [ | 				model: model.id, | ||||||
| 				$settings.system | 				stream: true, | ||||||
| 					? { | 				messages: [ | ||||||
| 							role: 'system', | 					$settings.system | ||||||
| 							content: $settings.system |  | ||||||
| 					  } |  | ||||||
| 					: undefined, |  | ||||||
| 				...messages.filter((message) => !message.deleted) |  | ||||||
| 			] |  | ||||||
| 				.filter((message) => message) |  | ||||||
| 				.map((message, idx, arr) => ({ |  | ||||||
| 					role: message.role, |  | ||||||
| 					...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false |  | ||||||
| 						? { | 						? { | ||||||
| 								content: [ | 								role: 'system', | ||||||
| 									{ | 								content: $settings.system | ||||||
| 										type: 'text', |  | ||||||
| 										text: |  | ||||||
| 											arr.length - 1 !== idx |  | ||||||
| 												? message.content |  | ||||||
| 												: message?.raContent ?? message.content |  | ||||||
| 									}, |  | ||||||
| 									...message.files |  | ||||||
| 										.filter((file) => file.type === 'image') |  | ||||||
| 										.map((file) => ({ |  | ||||||
| 											type: 'image_url', |  | ||||||
| 											image_url: { |  | ||||||
| 												url: file.url |  | ||||||
| 											} |  | ||||||
| 										})) |  | ||||||
| 								] |  | ||||||
| 						  } | 						  } | ||||||
| 						: { | 						: undefined, | ||||||
| 								content: | 					...messages.filter((message) => !message.deleted) | ||||||
| 									arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content | 				] | ||||||
| 						  }) | 					.filter((message) => message) | ||||||
| 				})), | 					.map((message, idx, arr) => ({ | ||||||
| 			seed: $settings?.options?.seed ?? undefined, | 						role: message.role, | ||||||
| 			stop: $settings?.options?.stop ?? undefined, | 						...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false | ||||||
| 			temperature: $settings?.options?.temperature ?? undefined, | 							? { | ||||||
| 			top_p: $settings?.options?.top_p ?? undefined, | 									content: [ | ||||||
| 			num_ctx: $settings?.options?.num_ctx ?? undefined, | 										{ | ||||||
| 			frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | 											type: 'text', | ||||||
| 			max_tokens: $settings?.options?.num_predict ?? undefined | 											text: | ||||||
| 		}); | 												arr.length - 1 !== idx | ||||||
|  | 													? message.content | ||||||
|  | 													: message?.raContent ?? message.content | ||||||
|  | 										}, | ||||||
|  | 										...message.files | ||||||
|  | 											.filter((file) => file.type === 'image') | ||||||
|  | 											.map((file) => ({ | ||||||
|  | 												type: 'image_url', | ||||||
|  | 												image_url: { | ||||||
|  | 													url: file.url | ||||||
|  | 												} | ||||||
|  | 											})) | ||||||
|  | 									] | ||||||
|  | 							  } | ||||||
|  | 							: { | ||||||
|  | 									content: | ||||||
|  | 										arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content | ||||||
|  | 							  }) | ||||||
|  | 					})), | ||||||
|  | 				seed: $settings?.options?.seed ?? undefined, | ||||||
|  | 				stop: $settings?.options?.stop ?? undefined, | ||||||
|  | 				temperature: $settings?.options?.temperature ?? undefined, | ||||||
|  | 				top_p: $settings?.options?.top_p ?? undefined, | ||||||
|  | 				num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||||
|  | 				frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||||
|  | 				max_tokens: $settings?.options?.num_predict ?? undefined | ||||||
|  | 			}, | ||||||
|  | 			model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` | ||||||
|  | 		); | ||||||
| 
 | 
 | ||||||
| 		if (res && res.ok) { | 		if (res && res.ok) { | ||||||
| 			const reader = res.body | 			const reader = res.body | ||||||
|  | @ -704,7 +708,6 @@ | ||||||
| 			await setChatTitle(_chatId, userPrompt); | 			await setChatTitle(_chatId, userPrompt); | ||||||
| 		} | 		} | ||||||
| 	}; | 	}; | ||||||
| 
 |  | ||||||
| 	const stopResponse = () => { | 	const stopResponse = () => { | ||||||
| 		stopResponseFlag = true; | 		stopResponseFlag = true; | ||||||
| 		console.log('stopResponse'); | 		console.log('stopResponse'); | ||||||
|  | @ -719,25 +722,26 @@ | ||||||
| 			responseMessage.done = false; | 			responseMessage.done = false; | ||||||
| 			await tick(); | 			await tick(); | ||||||
| 
 | 
 | ||||||
| 			const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0); | 			const model = $models.filter((m) => m.id === responseMessage.model).at(0); | ||||||
| 
 | 
 | ||||||
| 			if (modelTag?.external) { | 			if (model) { | ||||||
| 				await sendPromptOpenAI( | 				if (model?.external) { | ||||||
| 					responseMessage.model, | 					await sendPromptOpenAI( | ||||||
| 					history.messages[responseMessage.parentId].content, | 						model, | ||||||
| 					responseMessage.id, | 						history.messages[responseMessage.parentId].content, | ||||||
| 					_chatId | 						responseMessage.id, | ||||||
| 				); | 						_chatId | ||||||
| 			} else if (modelTag) { | 					); | ||||||
| 				await sendPromptOllama( | 				} else | ||||||
| 					responseMessage.model, | 					await sendPromptOllama( | ||||||
| 					history.messages[responseMessage.parentId].content, | 						model, | ||||||
| 					responseMessage.id, | 						history.messages[responseMessage.parentId].content, | ||||||
| 					_chatId | 						responseMessage.id, | ||||||
| 				); | 						_chatId | ||||||
| 			} else { | 					); | ||||||
| 				toast.error(`Model ${model} not found`); |  | ||||||
| 			} | 			} | ||||||
|  | 		} else { | ||||||
|  | 			toast.error(`Model ${modelId} not found`); | ||||||
| 		} | 		} | ||||||
| 	}; | 	}; | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy J. Baek
						Timothy J. Baek