forked from open-webui/open-webui
		
	fix: pip dependency
This commit is contained in:
		
							parent
							
								
									e8904ac3f7
								
							
						
					
					
						commit
						bfba72d486
					
				
					 3 changed files with 25 additions and 24 deletions
				
			
		|  | @ -3,7 +3,7 @@ | |||
| 	import { createEventDispatcher, onMount } from 'svelte'; | ||||
| 	const dispatch = createEventDispatcher(); | ||||
| 
 | ||||
| 	import { getOllamaAPIUrl, updateOllamaAPIUrl } from '$lib/apis/ollama'; | ||||
| 	import { getOllamaAPIUrl, getOllamaVersion, updateOllamaAPIUrl } from '$lib/apis/ollama'; | ||||
| 	import { getOpenAIKey, getOpenAIUrl, updateOpenAIKey, updateOpenAIUrl } from '$lib/apis/openai'; | ||||
| 	import toast from 'svelte-french-toast'; | ||||
| 
 | ||||
|  | @ -24,11 +24,14 @@ | |||
| 
 | ||||
| 	const updateOllamaAPIUrlHandler = async () => { | ||||
| 		API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL); | ||||
| 		const _models = await getModels('ollama'); | ||||
| 
 | ||||
| 		if (_models.length > 0) { | ||||
| 		const ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => { | ||||
| 			return null; | ||||
| 		}); | ||||
| 
 | ||||
| 		if (ollamaVersion) { | ||||
| 			toast.success('Server connection verified'); | ||||
| 			await models.set(_models); | ||||
| 			await models.set(await getModels()); | ||||
| 		} | ||||
| 	}; | ||||
| 
 | ||||
|  |  | |||
|  | @ -28,31 +28,29 @@ | |||
| 
 | ||||
| 	let selectedTab = 'general'; | ||||
| 
 | ||||
| 	const getModels = async (type = 'all') => { | ||||
| 		const models = []; | ||||
| 		models.push( | ||||
| 			...(await getOllamaModels(localStorage.token).catch((error) => { | ||||
| 				toast.error(error); | ||||
| 				return []; | ||||
| 			})) | ||||
| 		); | ||||
| 
 | ||||
| 		if (type === 'all') { | ||||
| 			const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => { | ||||
| 	const getModels = async () => { | ||||
| 		let models = await Promise.all([ | ||||
| 			await getOllamaModels(localStorage.token).catch((error) => { | ||||
| 				console.log(error); | ||||
| 				return null; | ||||
| 			}); | ||||
| 
 | ||||
| 			models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : [])); | ||||
| 
 | ||||
| 			const liteLLMModels = await getLiteLLMModels(localStorage.token).catch((error) => { | ||||
| 			}), | ||||
| 			await getOpenAIModels(localStorage.token).catch((error) => { | ||||
| 				console.log(error); | ||||
| 				return null; | ||||
| 			}); | ||||
| 			}), | ||||
| 			await getLiteLLMModels(localStorage.token).catch((error) => { | ||||
| 				console.log(error); | ||||
| 				return null; | ||||
| 			}) | ||||
| 		]); | ||||
| 
 | ||||
| 			models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : [])); | ||||
| 		} | ||||
| 		models = models | ||||
| 			.filter((models) => models) | ||||
| 			.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []); | ||||
| 
 | ||||
| 		// models.push(...(ollamaModels ? [{ name: 'hr' }, ...ollamaModels] : [])); | ||||
| 		// models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : [])); | ||||
| 		// models.push(...(liteLLMModels ? [{ name: 'hr' }, ...liteLLMModels] : [])); | ||||
| 		return models; | ||||
| 	}; | ||||
| </script> | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy J. Baek
						Timothy J. Baek