From ea721feea901928477e3aa38925dda110b66aa76 Mon Sep 17 00:00:00 2001 From: Anuraag Jain Date: Sat, 6 Jan 2024 12:10:41 +0200 Subject: [PATCH 01/14] feat: parallel model downloads --- package-lock.json | 11 +++ package.json | 1 + src/lib/components/chat/SettingsModal.svelte | 80 ++++++++++++++++---- 3 files changed, 79 insertions(+), 13 deletions(-) diff --git a/package-lock.json b/package-lock.json index af8790a0..9e32e95b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,7 @@ "version": "0.0.1", "dependencies": { "@sveltejs/adapter-node": "^1.3.1", + "async": "^3.2.5", "file-saver": "^2.0.5", "highlight.js": "^11.9.0", "idb": "^7.1.1", @@ -1208,6 +1209,11 @@ "node": ">=8" } }, + "node_modules/async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + }, "node_modules/autoprefixer": { "version": "10.4.16", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", @@ -4645,6 +4651,11 @@ "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", "dev": true }, + "async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==" + }, "autoprefixer": { "version": "10.4.16", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", diff --git a/package.json b/package.json index 9c3b6ddf..92e8fdc8 100644 --- a/package.json +++ b/package.json @@ -39,6 +39,7 @@ "type": "module", "dependencies": { "@sveltejs/adapter-node": "^1.3.1", + "async": "^3.2.5", "file-saver": "^2.0.5", "highlight.js": "^11.9.0", "idb": "^7.1.1", diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index a220bd4e..d98a8f39 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -6,6 +6,7 @@ import { onMount } from 'svelte'; import { config, models, settings, user, chats } from '$lib/stores'; import { splitStream, getGravatarURL } from '$lib/utils'; + import queue from 'async/queue'; import { getOllamaVersion } from '$lib/apis/ollama'; import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats'; @@ -38,6 +39,8 @@ let theme = 'dark'; let notificationEnabled = false; let system = ''; + const modelDownloadQueue = queue((task:{modelName: string}, cb) => pullModelHandlerProcessor({modelName: task.modelName, callback: cb}), 3); + let modelDownloadStatus: Record = {}; // Advanced let requestFormat = ''; @@ -224,8 +227,9 @@ authEnabled = !authEnabled; }; - const pullModelHandler = async () => { - modelTransferring = true; + const pullModelHandlerProcessor = async (opts:{modelName:string, callback: Function}) => { + console.log('Pull model name', opts.modelName); + const res = await fetch(`${API_BASE_URL}/pull`, { method: 'POST', headers: { @@ -234,7 +238,7 @@ ...($user && { Authorization: `Bearer ${localStorage.token}` }) }, body: JSON.stringify({ - name: modelTag + name: opts.modelName }) }); @@ -265,11 +269,9 @@ } if (data.status) { if (!data.digest) { - toast.success(data.status); - if (data.status === 'success') { const notification = new Notification(`Ollama`, { - body: `Model '${modelTag}' has been successfully downloaded.`, + body: `Model '${opts.modelName}' has been successfully downloaded.`, icon: '/favicon.png' }); } @@ -280,21 +282,48 @@ } else { pullProgress = 100; } + modelDownloadStatus[opts.modelName] = {pullProgress}; } } } } } catch (error) { - console.log(error); - toast.error(error); + console.error(error); + opts.callback({success:false, error, modelName: opts.modelName}); } } + opts.callback({success: true, modelName: opts.modelName}); + }; + + const pullModelHandler = async() => { + if(modelDownloadStatus[modelTag]){ + toast.error("Model already in queue for downloading."); + return; + } + if(Object.keys(modelDownloadStatus).length === 3){ + toast.error('Maximum of 3 models can be downloading simultaneously. Please try again later'); + return; + } + modelTransferring = true; + + modelDownloadQueue.push({modelName: modelTag},async (data:{modelName: string; success: boolean; error?: Error}) => { + const {modelName} = data; + // Remove the downloaded model + delete modelDownloadStatus[modelName]; + + if(!data.success){ + toast.error(`There was some issue in downloading the model ${modelName}`); + return; + } + + toast.success(`Model ${modelName} was successfully downloaded`); + models.set(await getModels()); + }); modelTag = ''; - modelTransferring = false; + modelTransferring = false; + } - models.set(await getModels()); - }; const calculateSHA256 = async (file) => { console.log(file); @@ -1248,7 +1277,7 @@ > - {#if pullProgress !== null} + + {#if Object.keys(modelDownloadStatus).length > 0} + + + + + + + + + {#each Object.entries(modelDownloadStatus) as [modelName, payload]} + + + + + {/each} + +
Model Name Download progress
{modelName}
+ { payload.pullProgress ?? 0}% +
+ {/if}
From fd0dcec61d90ceee6f2b859cbacd0d23058cdcd8 Mon Sep 17 00:00:00 2001 From: Anuraag Jain Date: Sat, 6 Jan 2024 13:15:21 +0200 Subject: [PATCH 02/14] fix: progress bar colors for light theme --- src/lib/components/chat/SettingsModal.svelte | 34 ++++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index d98a8f39..f2cb4f17 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -277,12 +277,13 @@ } } else { digest = data.digest; + let downloadProgress = 0; if (data.completed) { - pullProgress = Math.round((data.completed / data.total) * 1000) / 10; + downloadProgress = Math.round((data.completed / data.total) * 1000) / 10; } else { - pullProgress = 100; + downloadProgress = 100; } - modelDownloadStatus[opts.modelName] = {pullProgress}; + modelDownloadStatus[opts.modelName] = {pullProgress: downloadProgress, digest: data.digest}; } } } @@ -1277,22 +1278,22 @@ >
- + + --> + {#if Object.keys(modelDownloadStatus).length > 0} @@ -1308,12 +1309,17 @@ {#each Object.entries(modelDownloadStatus) as [modelName, payload]} - + +
+ {payload.digest} +
+ {/each} From fd42422d6c981f7a5205f639a80b5dc6b2310c2d Mon Sep 17 00:00:00 2001 From: Anuraag Jain Date: Sat, 6 Jan 2024 15:01:47 +0200 Subject: [PATCH 03/14] refac: pullModel api --- src/lib/apis/ollama/index.ts | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts index 10eddc1c..5b0f9870 100644 --- a/src/lib/apis/ollama/index.ts +++ b/src/lib/apis/ollama/index.ts @@ -249,8 +249,7 @@ export const deleteModel = async (token: string, tagName: string) => { }; export const pullModel = async (token: string, tagName: string) => { - let error = null; - +try { const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, { method: 'POST', headers: { @@ -260,14 +259,9 @@ export const pullModel = async (token: string, tagName: string) => { body: JSON.stringify({ name: tagName }) - }).catch((err) => { - error = err; - return null; - }); - - if (error) { - throw error; - } - + }) return res; +} catch (error) { + throw error; +} }; From 8f570bc2ee83a87ad3654cc5e5ae4094e5fa904f Mon Sep 17 00:00:00 2001 From: Anuraag Jain Date: Sat, 6 Jan 2024 15:06:57 +0200 Subject: [PATCH 04/14] refac: code cleanup --- src/lib/components/chat/SettingsModal.svelte | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 3fe15bf1..4ef2f016 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -50,7 +50,8 @@ let theme = 'dark'; let notificationEnabled = false; let system = ''; - const modelDownloadQueue = queue((task:{modelName: string}, cb) => pullModelHandlerProcessor({modelName: task.modelName, callback: cb}), 3); + const MAX_PARALLEL_DOWNLOADS = 3; + const modelDownloadQueue = queue((task:{modelName: string}, cb) => pullModelHandlerProcessor({modelName: task.modelName, callback: cb}), MAX_PARALLEL_DOWNLOADS); let modelDownloadStatus: Record = {}; // Advanced @@ -1187,23 +1188,6 @@ target="_blank">click here. - - - - {#if Object.keys(modelDownloadStatus).length > 0}
{modelName}
+
{ payload.pullProgress ?? 0}% -
From 3853261b40f04648379654f693cffbce82363347 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 6 Jan 2024 13:02:09 -0800 Subject: [PATCH 05/14] refac --- src/lib/apis/ollama/index.ts | 33 +++- src/lib/components/chat/SettingsModal.svelte | 180 ++++++++----------- src/lib/utils/index.ts | 35 ++++ 3 files changed, 134 insertions(+), 114 deletions(-) diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts index 5b0f9870..9d3ac0a8 100644 --- a/src/lib/apis/ollama/index.ts +++ b/src/lib/apis/ollama/index.ts @@ -249,7 +249,8 @@ export const deleteModel = async (token: string, tagName: string) => { }; export const pullModel = async (token: string, tagName: string) => { -try { + let error = null; + const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, { method: 'POST', headers: { @@ -259,9 +260,31 @@ try { body: JSON.stringify({ name: tagName }) - }) + }).catch((err) => { + console.log(err); + error = err; + + if ('detail' in err) { + error = err.detail; + } + + return null; + }); + if (error) { + throw error; + } return res; -} catch (error) { - throw error; -} }; + +// export const pullModel = async (token: string, tagName: string) => { +// return await fetch(`${OLLAMA_API_BASE_URL}/pull`, { +// method: 'POST', +// headers: { +// 'Content-Type': 'text/event-stream', +// Authorization: `Bearer ${token}` +// }, +// body: JSON.stringify({ +// name: tagName +// }) +// }); +// }; diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 4ef2f016..e0f04d82 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -51,7 +51,11 @@ let notificationEnabled = false; let system = ''; const MAX_PARALLEL_DOWNLOADS = 3; - const modelDownloadQueue = queue((task:{modelName: string}, cb) => pullModelHandlerProcessor({modelName: task.modelName, callback: cb}), MAX_PARALLEL_DOWNLOADS); + const modelDownloadQueue = queue( + (task: { modelName: string }, cb) => + pullModelHandlerProcessor({ modelName: task.modelName, callback: cb }), + MAX_PARALLEL_DOWNLOADS + ); let modelDownloadStatus: Record = {}; // Advanced @@ -250,11 +254,13 @@ saveSettings({ saveChatHistory: saveChatHistory }); }; - const pullModelHandlerProcessor = async (opts:{modelName:string, callback: Function}) => { + const pullModelHandlerProcessor = async (opts: { modelName: string; callback: Function }) => { + const res = await pullModel(localStorage.token, opts.modelName).catch((error) => { + opts.callback({ success: false, error, modelName: opts.modelName }); + return null; + }); - try { - const res = await pullModel(localStorage.token, opts.modelName); - + if (res) { const reader = res.body .pipeThrough(new TextDecoderStream()) .pipeThrough(splitStream('\n')) @@ -270,102 +276,70 @@ for (const line of lines) { if (line !== '') { let data = JSON.parse(line); - if (data.error) { - throw data.error; - } - if (data.detail) { - throw data.detail; - } - if (data.status) { - if (data.digest) { - let downloadProgress = 0; - if (data.completed) { - downloadProgress = Math.round((data.completed / data.total) * 1000) / 10; - } else { - downloadProgress = 100; - } - modelDownloadStatus[opts.modelName] = {pullProgress: downloadProgress, digest: data.digest}; + if (data.error) { + throw data.error; + } + if (data.detail) { + throw data.detail; + } + if (data.status) { + if (data.digest) { + let downloadProgress = 0; + if (data.completed) { + downloadProgress = Math.round((data.completed / data.total) * 1000) / 10; + } else { + downloadProgress = 100; + } + modelDownloadStatus[opts.modelName] = { + pullProgress: downloadProgress, + digest: data.digest + }; + } } - } } } } catch (error) { - console.log('Failed to read from data stream', error); - throw error; + console.log(error); + opts.callback({ success: false, error, modelName: opts.modelName }); } } - opts.callback({success: true, modelName: opts.modelName}); - } catch (error) { - console.error(error); - opts.callback({success:false, error, modelName: opts.modelName}); + opts.callback({ success: true, modelName: opts.modelName }); } + }; - - }; - - const pullModelHandler = async() => { - if(modelDownloadStatus[modelTag]){ - toast.error("Model already in queue for downloading."); + const pullModelHandler = async () => { + if (modelDownloadStatus[modelTag]) { + toast.error('Model already in queue for downloading.'); return; } - if(Object.keys(modelDownloadStatus).length === 3){ + if (Object.keys(modelDownloadStatus).length === 3) { toast.error('Maximum of 3 models can be downloading simultaneously. Please try again later'); return; } + modelTransferring = true; - modelDownloadQueue.push({modelName: modelTag},async (data:{modelName: string; success: boolean; error?: Error}) => { - const {modelName} = data; - // Remove the downloaded model - delete modelDownloadStatus[modelName]; + modelDownloadQueue.push( + { modelName: modelTag }, + async (data: { modelName: string; success: boolean; error?: Error }) => { + const { modelName } = data; + // Remove the downloaded model + delete modelDownloadStatus[modelName]; - if(!data.success){ - toast.error(`There was some issue in downloading the model ${modelName}`); - return; + console.log(data); + + if (!data.success) { + toast.error(data.error); + return; + } + + toast.success(`Model ${modelName} was successfully downloaded`); + models.set(await getModels()); } - - toast.success(`Model ${modelName} was successfully downloaded`); - models.set(await getModels()); - }); + ); modelTag = ''; - modelTransferring = false; - } - - - const calculateSHA256 = async (file) => { - console.log(file); - // Create a FileReader to read the file asynchronously - const reader = new FileReader(); - - // Define a promise to handle the file reading - const readFile = new Promise((resolve, reject) => { - reader.onload = () => resolve(reader.result); - reader.onerror = reject; - }); - - // Read the file as an ArrayBuffer - reader.readAsArrayBuffer(file); - - try { - // Wait for the FileReader to finish reading the file - const buffer = await readFile; - - // Convert the ArrayBuffer to a Uint8Array - const uint8Array = new Uint8Array(buffer); - - // Calculate the SHA-256 hash using Web Crypto API - const hashBuffer = await crypto.subtle.digest('SHA-256', uint8Array); - - // Convert the hash to a hexadecimal string - const hashArray = Array.from(new Uint8Array(hashBuffer)); - const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, '0')).join(''); - - return `sha256:${hashHex}`; - } catch (error) { - console.error('Error calculating SHA-256 hash:', error); - throw error; - } + modelTransferring = false; }; const uploadModelHandler = async () => { @@ -1190,35 +1164,23 @@ {#if Object.keys(modelDownloadStatus).length > 0} -
- - - - - - - - {#each Object.entries(modelDownloadStatus) as [modelName, payload]} - - - - - {/each} - -
Model Name Download progress
{modelName} + {#each Object.entries(modelDownloadStatus) as [modelName, payload]} +
+
{modelName}
+
- { payload.pullProgress ?? 0}% + class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full" + style="width: {Math.max(15, payload.pullProgress ?? 0)}%" + > + {payload.pullProgress ?? 0}% +
+
+ {payload.digest} +
-
- {payload.digest} -
-
- {/if} + + {/each} + {/if}
diff --git a/src/lib/utils/index.ts b/src/lib/utils/index.ts index 2029604e..d9f6fd7d 100644 --- a/src/lib/utils/index.ts +++ b/src/lib/utils/index.ts @@ -127,3 +127,38 @@ export const findWordIndices = (text) => { return matches; }; + +export const calculateSHA256 = async (file) => { + console.log(file); + // Create a FileReader to read the file asynchronously + const reader = new FileReader(); + + // Define a promise to handle the file reading + const readFile = new Promise((resolve, reject) => { + reader.onload = () => resolve(reader.result); + reader.onerror = reject; + }); + + // Read the file as an ArrayBuffer + reader.readAsArrayBuffer(file); + + try { + // Wait for the FileReader to finish reading the file + const buffer = await readFile; + + // Convert the ArrayBuffer to a Uint8Array + const uint8Array = new Uint8Array(buffer); + + // Calculate the SHA-256 hash using Web Crypto API + const hashBuffer = await crypto.subtle.digest('SHA-256', uint8Array); + + // Convert the hash to a hexadecimal string + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, '0')).join(''); + + return `sha256:${hashHex}`; + } catch (error) { + console.error('Error calculating SHA-256 hash:', error); + throw error; + } +}; From 84f0cb41bb8cac00ac42106a350d7bd56842bd6d Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 6 Jan 2024 13:23:08 -0800 Subject: [PATCH 06/14] fix: restore status toast --- src/lib/components/chat/SettingsModal.svelte | 50 +++++++++++--------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index e0f04d82..108e631c 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -295,11 +295,16 @@ digest: data.digest }; } + } else { + toast.success(data.status); } } } } catch (error) { console.log(error); + if (typeof error !== 'string') { + error = error.message; + } opts.callback({ success: false, error, modelName: opts.modelName }); } } @@ -330,11 +335,10 @@ if (!data.success) { toast.error(data.error); - return; + } else { + toast.success(`Model ${modelName} was successfully downloaded`); + models.set(await getModels()); } - - toast.success(`Model ${modelName} was successfully downloaded`); - models.set(await getModels()); } ); @@ -1155,32 +1159,34 @@
-
+
To access the available model names for downloading, click here.
-
- {#if Object.keys(modelDownloadStatus).length > 0} - {#each Object.entries(modelDownloadStatus) as [modelName, payload]} -
-
{modelName}
-
-
- {payload.pullProgress ?? 0}% -
-
- {payload.digest} + + {#if Object.keys(modelDownloadStatus).length > 0} + {#each Object.entries(modelDownloadStatus) as [modelName, payload]} +
+
{modelName}
+
+
+ {payload.pullProgress ?? 0}% +
+
+ {payload.digest} +
-
- {/each} - {/if} + {/each} + {/if} +
+
From cf23c231346b4543092f37367c48e8e5b9fb0141 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 6 Jan 2024 13:24:11 -0800 Subject: [PATCH 07/14] fix --- src/lib/components/chat/SettingsModal.svelte | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 108e631c..59d406c1 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -294,9 +294,9 @@ pullProgress: downloadProgress, digest: data.digest }; + } else { + toast.success(data.status); } - } else { - toast.success(data.status); } } } From e7d8d4937455e684ea4b60b9553be300dbc9f44f Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 6 Jan 2024 13:27:02 -0800 Subject: [PATCH 08/14] fix: restore download notification --- src/lib/components/chat/SettingsModal.svelte | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 59d406c1..d82b640b 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -337,6 +337,12 @@ toast.error(data.error); } else { toast.success(`Model ${modelName} was successfully downloaded`); + + const notification = new Notification(`Ollama`, { + body: `Model '${modelName}' has been successfully downloaded.`, + icon: '/favicon.png' + }); + models.set(await getModels()); } } From 560dfd80dd9f48d241b2a1a31a421999315b5365 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 6 Jan 2024 13:30:43 -0800 Subject: [PATCH 09/14] refac: explicit var name for ambiguous term payload --- src/lib/components/chat/SettingsModal.svelte | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index d82b640b..047125ff 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -1174,18 +1174,21 @@
{#if Object.keys(modelDownloadStatus).length > 0} - {#each Object.entries(modelDownloadStatus) as [modelName, payload]} + {#each Object.keys(modelDownloadStatus) as model}
-
{modelName}
+
{modelDownloadStatus[model].modelName}
- {payload.pullProgress ?? 0}% + {modelDownloadStatus[model].pullProgress ?? 0}%
- {payload.digest} + {modelDownloadStatus[model].digest}
From 1482119af7b799fa0e105bb0327a2737b07413fb Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 6 Jan 2024 13:31:35 -0800 Subject: [PATCH 10/14] fix --- src/lib/components/chat/SettingsModal.svelte | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 047125ff..c5a430e8 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -1176,7 +1176,7 @@ {#if Object.keys(modelDownloadStatus).length > 0} {#each Object.keys(modelDownloadStatus) as model}
-
{modelDownloadStatus[model].modelName}
+
{model}
Date: Sat, 6 Jan 2024 13:35:25 -0800 Subject: [PATCH 11/14] refac --- src/lib/components/chat/SettingsModal.svelte | 82 ++++++++++---------- 1 file changed, 41 insertions(+), 41 deletions(-) diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index c5a430e8..7c287827 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -1,12 +1,11 @@