diff --git a/src/lib/components/chat/Settings/Models.svelte b/src/lib/components/chat/Settings/Models.svelte index a04939e1..0a23e837 100644 --- a/src/lib/components/chat/Settings/Models.svelte +++ b/src/lib/components/chat/Settings/Models.svelte @@ -14,6 +14,7 @@ import { splitStream } from '$lib/utils'; import { onMount } from 'svelte'; import { addLiteLLMModel, deleteLiteLLMModel, getLiteLLMModelInfo } from '$lib/apis/litellm'; + import Tooltip from '$lib/components/common/Tooltip.svelte'; export let getModels: Function; @@ -37,6 +38,10 @@ let OLLAMA_URLS = []; let selectedOllamaUrlIdx: string | null = null; + + let updateModelId = null; + let updateProgress = null; + let showExperimentalOllama = false; let ollamaVersion = ''; const MAX_PARALLEL_DOWNLOADS = 3; @@ -61,6 +66,71 @@ let deleteModelTag = ''; + const updateModelsHandler = async () => { + for (const model of $models.filter( + (m) => + m.size != null && + (selectedOllamaUrlIdx === null ? true : (m?.urls ?? []).includes(selectedOllamaUrlIdx)) + )) { + console.log(model); + + updateModelId = model.id; + const res = await pullModel(localStorage.token, model.id, selectedOllamaUrlIdx).catch( + (error) => { + toast.error(error); + return null; + } + ); + + if (res) { + const reader = res.body + .pipeThrough(new TextDecoderStream()) + .pipeThrough(splitStream('\n')) + .getReader(); + + while (true) { + try { + const { value, done } = await reader.read(); + if (done) break; + + let lines = value.split('\n'); + + for (const line of lines) { + if (line !== '') { + let data = JSON.parse(line); + + console.log(data); + if (data.error) { + throw data.error; + } + if (data.detail) { + throw data.detail; + } + if (data.status) { + if (data.digest) { + updateProgress = 0; + if (data.completed) { + updateProgress = Math.round((data.completed / data.total) * 1000) / 10; + } else { + updateProgress = 100; + } + } else { + toast.success(data.status); + } + } + } + } + } catch (error) { + console.log(error); + } + } + } + } + + updateModelId = null; + updateProgress = null; + }; + const pullModelHandler = async () => { const sanitizedModelTag = modelTag.trim(); if (modelDownloadStatus[sanitizedModelTag]) { @@ -379,7 +449,7 @@ return []; }); - if (OLLAMA_URLS.length > 1) { + if (OLLAMA_URLS.length > 0) { selectedOllamaUrlIdx = 0; } @@ -394,18 +464,51 @@
Manage Ollama Models
- {#if OLLAMA_URLS.length > 1} -
- + {#if OLLAMA_URLS.length > 0} +
+
+ +
+ +
+
+ + + +
+
+ + {#if updateModelId} + Updating "{updateModelId}" {updateProgress ? `(${updateProgress}%)` : ''} + {/if} {/if}
@@ -470,12 +573,14 @@
-
- To access the available model names for downloading, click here. +
+
+ To access the available model names for downloading, click here. +
{#if Object.keys(modelDownloadStatus).length > 0}