forked from open-webui/open-webui
		
	Merge branch 'dev' into feat/add-i18n
This commit is contained in:
		
						commit
						b8902072fd
					
				
					 27 changed files with 802 additions and 493 deletions
				
			
		|  | @ -237,53 +237,6 @@ | |||
| 	const sendPrompt = async (prompt, parentId) => { | ||||
| 		const _chatId = JSON.parse(JSON.stringify($chatId)); | ||||
| 
 | ||||
| 		const docs = messages | ||||
| 			.filter((message) => message?.files ?? null) | ||||
| 			.map((message) => | ||||
| 				message.files.filter((item) => item.type === 'doc' || item.type === 'collection') | ||||
| 			) | ||||
| 			.flat(1); | ||||
| 
 | ||||
| 		console.log(docs); | ||||
| 		if (docs.length > 0) { | ||||
| 			processing = 'Reading'; | ||||
| 			const query = history.messages[parentId].content; | ||||
| 
 | ||||
| 			let relevantContexts = await Promise.all( | ||||
| 				docs.map(async (doc) => { | ||||
| 					if (doc.type === 'collection') { | ||||
| 						return await queryCollection(localStorage.token, doc.collection_names, query).catch( | ||||
| 							(error) => { | ||||
| 								console.log(error); | ||||
| 								return null; | ||||
| 							} | ||||
| 						); | ||||
| 					} else { | ||||
| 						return await queryDoc(localStorage.token, doc.collection_name, query).catch((error) => { | ||||
| 							console.log(error); | ||||
| 							return null; | ||||
| 						}); | ||||
| 					} | ||||
| 				}) | ||||
| 			); | ||||
| 			relevantContexts = relevantContexts.filter((context) => context); | ||||
| 
 | ||||
| 			const contextString = relevantContexts.reduce((a, context, i, arr) => { | ||||
| 				return `${a}${context.documents.join(' ')}\n`; | ||||
| 			}, ''); | ||||
| 
 | ||||
| 			console.log(contextString); | ||||
| 
 | ||||
| 			history.messages[parentId].raContent = await RAGTemplate( | ||||
| 				localStorage.token, | ||||
| 				contextString, | ||||
| 				query | ||||
| 			); | ||||
| 			history.messages[parentId].contexts = relevantContexts; | ||||
| 			await tick(); | ||||
| 			processing = ''; | ||||
| 		} | ||||
| 
 | ||||
| 		await Promise.all( | ||||
| 			selectedModels.map(async (modelId) => { | ||||
| 				const model = $models.filter((m) => m.id === modelId).at(0); | ||||
|  | @ -347,15 +300,25 @@ | |||
| 			...messages | ||||
| 		] | ||||
| 			.filter((message) => message) | ||||
| 			.map((message, idx, arr) => ({ | ||||
| 				role: message.role, | ||||
| 				content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content, | ||||
| 				...(message.files && { | ||||
| 					images: message.files | ||||
| 						.filter((file) => file.type === 'image') | ||||
| 						.map((file) => file.url.slice(file.url.indexOf(',') + 1)) | ||||
| 				}) | ||||
| 			})); | ||||
| 			.map((message, idx, arr) => { | ||||
| 				// Prepare the base message object | ||||
| 				const baseMessage = { | ||||
| 					role: message.role, | ||||
| 					content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content | ||||
| 				}; | ||||
| 
 | ||||
| 				// Extract and format image URLs if any exist | ||||
| 				const imageUrls = message.files | ||||
| 					?.filter((file) => file.type === 'image') | ||||
| 					.map((file) => file.url.slice(file.url.indexOf(',') + 1)); | ||||
| 
 | ||||
| 				// Add images array only if it contains elements | ||||
| 				if (imageUrls && imageUrls.length > 0) { | ||||
| 					baseMessage.images = imageUrls; | ||||
| 				} | ||||
| 
 | ||||
| 				return baseMessage; | ||||
| 			}); | ||||
| 
 | ||||
| 		let lastImageIndex = -1; | ||||
| 
 | ||||
|  | @ -373,6 +336,13 @@ | |||
| 			} | ||||
| 		}); | ||||
| 
 | ||||
| 		const docs = messages | ||||
| 			.filter((message) => message?.files ?? null) | ||||
| 			.map((message) => | ||||
| 				message.files.filter((item) => item.type === 'doc' || item.type === 'collection') | ||||
| 			) | ||||
| 			.flat(1); | ||||
| 
 | ||||
| 		const [res, controller] = await generateChatCompletion(localStorage.token, { | ||||
| 			model: model, | ||||
| 			messages: messagesBody, | ||||
|  | @ -380,7 +350,8 @@ | |||
| 				...($settings.options ?? {}) | ||||
| 			}, | ||||
| 			format: $settings.requestFormat ?? undefined, | ||||
| 			keep_alive: $settings.keepAlive ?? undefined | ||||
| 			keep_alive: $settings.keepAlive ?? undefined, | ||||
| 			docs: docs.length > 0 ? docs : undefined | ||||
| 		}); | ||||
| 
 | ||||
| 		if (res && res.ok) { | ||||
|  | @ -546,6 +517,15 @@ | |||
| 		const responseMessage = history.messages[responseMessageId]; | ||||
| 		scrollToBottom(); | ||||
| 
 | ||||
| 		const docs = messages | ||||
| 			.filter((message) => message?.files ?? null) | ||||
| 			.map((message) => | ||||
| 				message.files.filter((item) => item.type === 'doc' || item.type === 'collection') | ||||
| 			) | ||||
| 			.flat(1); | ||||
| 
 | ||||
| 		console.log(docs); | ||||
| 
 | ||||
| 		const res = await generateOpenAIChatCompletion( | ||||
| 			localStorage.token, | ||||
| 			{ | ||||
|  | @ -594,7 +574,8 @@ | |||
| 				top_p: $settings?.options?.top_p ?? undefined, | ||||
| 				num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 				frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 				max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 				max_tokens: $settings?.options?.num_predict ?? undefined, | ||||
| 				docs: docs.length > 0 ? docs : undefined | ||||
| 			}, | ||||
| 			model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` | ||||
| 		); | ||||
|  | @ -711,7 +692,12 @@ | |||
| 
 | ||||
| 		if (messages.length == 2) { | ||||
| 			window.history.replaceState(history.state, '', `/c/${_chatId}`); | ||||
| 			await setChatTitle(_chatId, userPrompt); | ||||
| 
 | ||||
| 			if ($settings?.titleAutoGenerateModel) { | ||||
| 				await generateChatTitle(_chatId, userPrompt); | ||||
| 			} else { | ||||
| 				await setChatTitle(_chatId, userPrompt); | ||||
| 			} | ||||
| 		} | ||||
| 	}; | ||||
| 
 | ||||
|  |  | |||
|  | @ -247,53 +247,6 @@ | |||
| 	const sendPrompt = async (prompt, parentId) => { | ||||
| 		const _chatId = JSON.parse(JSON.stringify($chatId)); | ||||
| 
 | ||||
| 		const docs = messages | ||||
| 			.filter((message) => message?.files ?? null) | ||||
| 			.map((message) => | ||||
| 				message.files.filter((item) => item.type === 'doc' || item.type === 'collection') | ||||
| 			) | ||||
| 			.flat(1); | ||||
| 
 | ||||
| 		console.log(docs); | ||||
| 		if (docs.length > 0) { | ||||
| 			processing = 'Reading'; | ||||
| 			const query = history.messages[parentId].content; | ||||
| 
 | ||||
| 			let relevantContexts = await Promise.all( | ||||
| 				docs.map(async (doc) => { | ||||
| 					if (doc.type === 'collection') { | ||||
| 						return await queryCollection(localStorage.token, doc.collection_names, query).catch( | ||||
| 							(error) => { | ||||
| 								console.log(error); | ||||
| 								return null; | ||||
| 							} | ||||
| 						); | ||||
| 					} else { | ||||
| 						return await queryDoc(localStorage.token, doc.collection_name, query).catch((error) => { | ||||
| 							console.log(error); | ||||
| 							return null; | ||||
| 						}); | ||||
| 					} | ||||
| 				}) | ||||
| 			); | ||||
| 			relevantContexts = relevantContexts.filter((context) => context); | ||||
| 
 | ||||
| 			const contextString = relevantContexts.reduce((a, context, i, arr) => { | ||||
| 				return `${a}${context.documents.join(' ')}\n`; | ||||
| 			}, ''); | ||||
| 
 | ||||
| 			console.log(contextString); | ||||
| 
 | ||||
| 			history.messages[parentId].raContent = await RAGTemplate( | ||||
| 				localStorage.token, | ||||
| 				contextString, | ||||
| 				query | ||||
| 			); | ||||
| 			history.messages[parentId].contexts = relevantContexts; | ||||
| 			await tick(); | ||||
| 			processing = ''; | ||||
| 		} | ||||
| 
 | ||||
| 		await Promise.all( | ||||
| 			selectedModels.map(async (modelId) => { | ||||
| 				const model = $models.filter((m) => m.id === modelId).at(0); | ||||
|  | @ -357,15 +310,25 @@ | |||
| 			...messages | ||||
| 		] | ||||
| 			.filter((message) => message) | ||||
| 			.map((message, idx, arr) => ({ | ||||
| 				role: message.role, | ||||
| 				content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content, | ||||
| 				...(message.files && { | ||||
| 					images: message.files | ||||
| 						.filter((file) => file.type === 'image') | ||||
| 						.map((file) => file.url.slice(file.url.indexOf(',') + 1)) | ||||
| 				}) | ||||
| 			})); | ||||
| 			.map((message, idx, arr) => { | ||||
| 				// Prepare the base message object | ||||
| 				const baseMessage = { | ||||
| 					role: message.role, | ||||
| 					content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content | ||||
| 				}; | ||||
| 
 | ||||
| 				// Extract and format image URLs if any exist | ||||
| 				const imageUrls = message.files | ||||
| 					?.filter((file) => file.type === 'image') | ||||
| 					.map((file) => file.url.slice(file.url.indexOf(',') + 1)); | ||||
| 
 | ||||
| 				// Add images array only if it contains elements | ||||
| 				if (imageUrls && imageUrls.length > 0) { | ||||
| 					baseMessage.images = imageUrls; | ||||
| 				} | ||||
| 
 | ||||
| 				return baseMessage; | ||||
| 			}); | ||||
| 
 | ||||
| 		let lastImageIndex = -1; | ||||
| 
 | ||||
|  | @ -383,6 +346,13 @@ | |||
| 			} | ||||
| 		}); | ||||
| 
 | ||||
| 		const docs = messages | ||||
| 			.filter((message) => message?.files ?? null) | ||||
| 			.map((message) => | ||||
| 				message.files.filter((item) => item.type === 'doc' || item.type === 'collection') | ||||
| 			) | ||||
| 			.flat(1); | ||||
| 
 | ||||
| 		const [res, controller] = await generateChatCompletion(localStorage.token, { | ||||
| 			model: model, | ||||
| 			messages: messagesBody, | ||||
|  | @ -390,7 +360,8 @@ | |||
| 				...($settings.options ?? {}) | ||||
| 			}, | ||||
| 			format: $settings.requestFormat ?? undefined, | ||||
| 			keep_alive: $settings.keepAlive ?? undefined | ||||
| 			keep_alive: $settings.keepAlive ?? undefined, | ||||
| 			docs: docs.length > 0 ? docs : undefined | ||||
| 		}); | ||||
| 
 | ||||
| 		if (res && res.ok) { | ||||
|  | @ -556,6 +527,15 @@ | |||
| 		const responseMessage = history.messages[responseMessageId]; | ||||
| 		scrollToBottom(); | ||||
| 
 | ||||
| 		const docs = messages | ||||
| 			.filter((message) => message?.files ?? null) | ||||
| 			.map((message) => | ||||
| 				message.files.filter((item) => item.type === 'doc' || item.type === 'collection') | ||||
| 			) | ||||
| 			.flat(1); | ||||
| 
 | ||||
| 		console.log(docs); | ||||
| 
 | ||||
| 		const res = await generateOpenAIChatCompletion( | ||||
| 			localStorage.token, | ||||
| 			{ | ||||
|  | @ -604,7 +584,8 @@ | |||
| 				top_p: $settings?.options?.top_p ?? undefined, | ||||
| 				num_ctx: $settings?.options?.num_ctx ?? undefined, | ||||
| 				frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, | ||||
| 				max_tokens: $settings?.options?.num_predict ?? undefined | ||||
| 				max_tokens: $settings?.options?.num_predict ?? undefined, | ||||
| 				docs: docs.length > 0 ? docs : undefined | ||||
| 			}, | ||||
| 			model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}` | ||||
| 		); | ||||
|  | @ -724,6 +705,7 @@ | |||
| 			await setChatTitle(_chatId, userPrompt); | ||||
| 		} | ||||
| 	}; | ||||
| 
 | ||||
| 	const stopResponse = () => { | ||||
| 		stopResponseFlag = true; | ||||
| 		console.log('stopResponse'); | ||||
|  |  | |||
|  | @ -562,7 +562,9 @@ | |||
| 
 | ||||
| 					<button | ||||
| 						class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition" | ||||
| 						on:click={documentsImportInputElement.click} | ||||
| 						on:click={() => { | ||||
| 							documentsImportInputElement.click(); | ||||
| 						}} | ||||
| 					> | ||||
| 						<div class=" self-center mr-2 font-medium">{$i18n.t('Import Documents Mapping')}</div> | ||||
| 
 | ||||
|  |  | |||
|  | @ -268,7 +268,9 @@ | |||
| 
 | ||||
| 					<button | ||||
| 						class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition" | ||||
| 						on:click={modelfilesImportInputElement.click} | ||||
| 						on:click={() => { | ||||
| 							modelfilesImportInputElement.click(); | ||||
| 						}} | ||||
| 					> | ||||
| 						<div class=" self-center mr-2 font-medium">{$i18n.t('Import Modelfiles')}</div> | ||||
| 
 | ||||
|  |  | |||
|  | @ -269,7 +269,7 @@ | |||
| 
 | ||||
| <div class="min-h-screen max-h-[100dvh] w-full flex justify-center dark:text-white"> | ||||
| 	<div class=" flex flex-col justify-between w-full overflow-y-auto h-[100dvh]"> | ||||
| 		<div class="max-w-2xl mx-auto w-full px-3 p-3 md:px-0 h-full"> | ||||
| 		<div class="max-w-2xl mx-auto w-full px-3 md:px-0 my-10 h-full"> | ||||
| 			<div class=" flex flex-col h-full"> | ||||
| 				<div class="flex flex-col justify-between mb-2.5 gap-1"> | ||||
| 					<div class="flex justify-between items-center gap-2"> | ||||
|  |  | |||
|  | @ -244,7 +244,9 @@ | |||
| 
 | ||||
| 					<button | ||||
| 						class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition" | ||||
| 						on:click={promptsImportInputElement.click} | ||||
| 						on:click={() => { | ||||
| 							promptsImportInputElement.click(); | ||||
| 						}} | ||||
| 					> | ||||
| 						<div class=" self-center mr-2 font-medium">{$i18n.t('Import Prompts')}</div> | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Ased Mammad
						Ased Mammad