forked from open-webui/open-webui
		
	feat: improved chat history openai support
This commit is contained in:
		
							parent
							
								
									b6f41a1706
								
							
						
					
					
						commit
						5dff474438
					
				
					 1 changed files with 17 additions and 3 deletions
				
			
		|  | @ -479,6 +479,7 @@ | ||||||
| 		history.messages[userMessageId] = userMessage; | 		history.messages[userMessageId] = userMessage; | ||||||
| 		history.currentId = userMessageId; | 		history.currentId = userMessageId; | ||||||
| 
 | 
 | ||||||
|  | 		await tick(); | ||||||
| 		await sendPrompt(userPrompt, userMessageId); | 		await sendPrompt(userPrompt, userMessageId); | ||||||
| 	}; | 	}; | ||||||
| 
 | 
 | ||||||
|  | @ -809,15 +810,28 @@ | ||||||
| 		} | 		} | ||||||
| 	}; | 	}; | ||||||
| 
 | 
 | ||||||
| 	const sendPromptOpenAI = async (userPrompt) => { | 	const sendPromptOpenAI = async (userPrompt, parentId) => { | ||||||
| 		if (settings.OPENAI_API_KEY) { | 		if (settings.OPENAI_API_KEY) { | ||||||
| 			if (models) { | 			if (models) { | ||||||
|  | 				let responseMessageId = uuidv4(); | ||||||
|  | 
 | ||||||
| 				let responseMessage = { | 				let responseMessage = { | ||||||
|  | 					parentId: parentId, | ||||||
|  | 					id: responseMessageId, | ||||||
|  | 					childrenIds: [], | ||||||
| 					role: 'assistant', | 					role: 'assistant', | ||||||
| 					content: '' | 					content: '' | ||||||
| 				}; | 				}; | ||||||
| 
 | 
 | ||||||
| 				messages = [...messages, responseMessage]; | 				history.messages[responseMessageId] = responseMessage; | ||||||
|  | 				history.currentId = responseMessageId; | ||||||
|  | 				if (parentId !== null) { | ||||||
|  | 					history.messages[parentId].childrenIds = [ | ||||||
|  | 						...history.messages[parentId].childrenIds, | ||||||
|  | 						responseMessageId | ||||||
|  | 					]; | ||||||
|  | 				} | ||||||
|  | 
 | ||||||
| 				window.scrollTo({ top: document.body.scrollHeight }); | 				window.scrollTo({ top: document.body.scrollHeight }); | ||||||
| 
 | 
 | ||||||
| 				const res = await fetch(`https://api.openai.com/v1/chat/completions`, { | 				const res = await fetch(`https://api.openai.com/v1/chat/completions`, { | ||||||
|  | @ -839,7 +853,7 @@ | ||||||
| 							...messages | 							...messages | ||||||
| 						] | 						] | ||||||
| 							.filter((message) => message) | 							.filter((message) => message) | ||||||
| 							.map((message) => ({ ...message, done: undefined })), | 							.map((message) => ({ role: message.role, content: message.content })), | ||||||
| 						temperature: settings.temperature ?? undefined, | 						temperature: settings.temperature ?? undefined, | ||||||
| 						top_p: settings.top_p ?? undefined, | 						top_p: settings.top_p ?? undefined, | ||||||
| 						frequency_penalty: settings.repeat_penalty ?? undefined | 						frequency_penalty: settings.repeat_penalty ?? undefined | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy J. Baek
						Timothy J. Baek