From f6640c4e8b90b9e9a34bd103444f4f273f3e7ae1 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Wed, 3 Jan 2024 23:46:49 -0800 Subject: [PATCH] feat: sort model list by alphabetical order --- src/lib/apis/ollama/index.ts | 4 ++- src/lib/apis/openai/index.ts | 5 ++- src/lib/components/chat/SettingsModal.svelte | 35 ++++++-------------- 3 files changed, 17 insertions(+), 27 deletions(-) diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts index 198ea418..75a02a8b 100644 --- a/src/lib/apis/ollama/index.ts +++ b/src/lib/apis/ollama/index.ts @@ -67,7 +67,9 @@ export const getOllamaModels = async ( throw error; } - return res?.models ?? []; + return (res?.models ?? []).sort((a, b) => { + return a.name.localeCompare(b.name); + }); }; export const generateTitle = async ( diff --git a/src/lib/apis/openai/index.ts b/src/lib/apis/openai/index.ts index 89776268..c144ae89 100644 --- a/src/lib/apis/openai/index.ts +++ b/src/lib/apis/openai/index.ts @@ -29,5 +29,8 @@ export const getOpenAIModels = async ( return models .map((model) => ({ name: model.id, external: true })) - .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true)); + .filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true)) + .sort((a, b) => { + return a.name.localeCompare(b.name); + }); }; diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 8ba3b1e0..7698407b 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -7,7 +7,7 @@ import { config, models, settings, user, chats } from '$lib/stores'; import { splitStream, getGravatarURL } from '$lib/utils'; - import { getOllamaVersion } from '$lib/apis/ollama'; + import { getOllamaVersion, getOllamaModels } from '$lib/apis/ollama'; import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats'; import { WEB_UI_VERSION, @@ -545,30 +545,15 @@ const getModels = async (url = '', type = 'all') => { let models = []; - const res = await fetch(`${url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/tags`, { - method: 'GET', - headers: { - Accept: 'application/json', - 'Content-Type': 'application/json', - ...($settings.authHeader && { Authorization: $settings.authHeader }), - ...($user && { Authorization: `Bearer ${localStorage.token}` }) - } - }) - .then(async (res) => { - if (!res.ok) throw await res.json(); - return res.json(); - }) - .catch((error) => { - console.log(error); - if ('detail' in error) { - toast.error(error.detail); - } else { - toast.error('Server connection failed'); - } - return null; - }); - console.log(res); - models.push(...(res?.models ?? [])); + models.push( + ...(await getOllamaModels( + url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL, + localStorage.token + ).catch((error) => { + toast.error(error); + return []; + })) + ); // If OpenAI API Key exists if (type === 'all' && $settings.OPENAI_API_KEY) {