forked from open-webui/open-webui
		
	feat: abort openai text completion when stopping responses
This commit is contained in:
		
							parent
							
								
									c1d85f8a6f
								
							
						
					
					
						commit
						c095a7c291
					
				
					 4 changed files with 20 additions and 8 deletions
				
			
		|  | @ -211,10 +211,12 @@ export const generateOpenAIChatCompletion = async ( | ||||||
| 	token: string = '', | 	token: string = '', | ||||||
| 	body: object, | 	body: object, | ||||||
| 	url: string = OPENAI_API_BASE_URL | 	url: string = OPENAI_API_BASE_URL | ||||||
| ) => { | ): Promise<[Response | null, AbortController]> => { | ||||||
|  | 	const controller = new AbortController(); | ||||||
| 	let error = null; | 	let error = null; | ||||||
| 
 | 
 | ||||||
| 	const res = await fetch(`${url}/chat/completions`, { | 	const res = await fetch(`${url}/chat/completions`, { | ||||||
|  | 		signal: controller.signal, | ||||||
| 		method: 'POST', | 		method: 'POST', | ||||||
| 		headers: { | 		headers: { | ||||||
| 			Authorization: `Bearer ${token}`, | 			Authorization: `Bearer ${token}`, | ||||||
|  | @ -231,7 +233,7 @@ export const generateOpenAIChatCompletion = async ( | ||||||
| 		throw error; | 		throw error; | ||||||
| 	} | 	} | ||||||
| 
 | 
 | ||||||
| 	return res; | 	return [res, controller]; | ||||||
| }; | }; | ||||||
| 
 | 
 | ||||||
| export const synthesizeOpenAISpeech = async ( | export const synthesizeOpenAISpeech = async ( | ||||||
|  |  | ||||||
|  | @ -532,7 +532,7 @@ | ||||||
| 
 | 
 | ||||||
| 		console.log(model); | 		console.log(model); | ||||||
| 
 | 
 | ||||||
| 		const res = await generateOpenAIChatCompletion( | 		const [res, controller] = await generateOpenAIChatCompletion( | ||||||
| 			localStorage.token, | 			localStorage.token, | ||||||
| 			{ | 			{ | ||||||
| 				model: model.id, | 				model: model.id, | ||||||
|  | @ -608,6 +608,11 @@ | ||||||
| 				if (done || stopResponseFlag || _chatId !== $chatId) { | 				if (done || stopResponseFlag || _chatId !== $chatId) { | ||||||
| 					responseMessage.done = true; | 					responseMessage.done = true; | ||||||
| 					messages = messages; | 					messages = messages; | ||||||
|  | 
 | ||||||
|  | 					if (stopResponseFlag) { | ||||||
|  | 						controller.abort('User: Stop Response'); | ||||||
|  | 					} | ||||||
|  | 
 | ||||||
| 					break; | 					break; | ||||||
| 				} | 				} | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -544,7 +544,7 @@ | ||||||
| 
 | 
 | ||||||
| 		console.log(docs); | 		console.log(docs); | ||||||
| 
 | 
 | ||||||
| 		const res = await generateOpenAIChatCompletion( | 		const [res, controller] = await generateOpenAIChatCompletion( | ||||||
| 			localStorage.token, | 			localStorage.token, | ||||||
| 			{ | 			{ | ||||||
| 				model: model.id, | 				model: model.id, | ||||||
|  | @ -620,6 +620,11 @@ | ||||||
| 				if (done || stopResponseFlag || _chatId !== $chatId) { | 				if (done || stopResponseFlag || _chatId !== $chatId) { | ||||||
| 					responseMessage.done = true; | 					responseMessage.done = true; | ||||||
| 					messages = messages; | 					messages = messages; | ||||||
|  | 
 | ||||||
|  | 					if (stopResponseFlag) { | ||||||
|  | 						controller.abort('User: Stop Response'); | ||||||
|  | 					} | ||||||
|  | 
 | ||||||
| 					break; | 					break; | ||||||
| 				} | 				} | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -67,7 +67,7 @@ | ||||||
| 	const textCompletionHandler = async () => { | 	const textCompletionHandler = async () => { | ||||||
| 		const model = $models.find((model) => model.id === selectedModelId); | 		const model = $models.find((model) => model.id === selectedModelId); | ||||||
| 
 | 
 | ||||||
| 		const res = await generateOpenAIChatCompletion( | 		const [res, controller] = await generateOpenAIChatCompletion( | ||||||
| 			localStorage.token, | 			localStorage.token, | ||||||
| 			{ | 			{ | ||||||
| 				model: model.id, | 				model: model.id, | ||||||
|  | @ -96,7 +96,7 @@ | ||||||
| 				const { value, done } = await reader.read(); | 				const { value, done } = await reader.read(); | ||||||
| 				if (done || stopResponseFlag) { | 				if (done || stopResponseFlag) { | ||||||
| 					if (stopResponseFlag) { | 					if (stopResponseFlag) { | ||||||
| 						await cancelOllamaRequest(localStorage.token, currentRequestId); | 						controller.abort('User: Stop Response'); | ||||||
| 					} | 					} | ||||||
| 
 | 
 | ||||||
| 					currentRequestId = null; | 					currentRequestId = null; | ||||||
|  | @ -135,7 +135,7 @@ | ||||||
| 	const chatCompletionHandler = async () => { | 	const chatCompletionHandler = async () => { | ||||||
| 		const model = $models.find((model) => model.id === selectedModelId); | 		const model = $models.find((model) => model.id === selectedModelId); | ||||||
| 
 | 
 | ||||||
| 		const res = await generateOpenAIChatCompletion( | 		const [res, controller] = await generateOpenAIChatCompletion( | ||||||
| 			localStorage.token, | 			localStorage.token, | ||||||
| 			{ | 			{ | ||||||
| 				model: model.id, | 				model: model.id, | ||||||
|  | @ -182,7 +182,7 @@ | ||||||
| 				const { value, done } = await reader.read(); | 				const { value, done } = await reader.read(); | ||||||
| 				if (done || stopResponseFlag) { | 				if (done || stopResponseFlag) { | ||||||
| 					if (stopResponseFlag) { | 					if (stopResponseFlag) { | ||||||
| 						await cancelOllamaRequest(localStorage.token, currentRequestId); | 						controller.abort('User: Stop Response'); | ||||||
| 					} | 					} | ||||||
| 
 | 
 | ||||||
| 					currentRequestId = null; | 					currentRequestId = null; | ||||||
|  |  | ||||||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue