diff --git a/src/lib/apis/ollama/index.ts b/src/lib/apis/ollama/index.ts index 2047fede..ba04e060 100644 --- a/src/lib/apis/ollama/index.ts +++ b/src/lib/apis/ollama/index.ts @@ -271,7 +271,7 @@ export const generateChatCompletion = async (token: string = '', body: object) = return [res, controller]; }; -export const cancelChatCompletion = async (token: string = '', requestId: string) => { +export const cancelOllamaRequest = async (token: string = '', requestId: string) => { let error = null; const res = await fetch(`${OLLAMA_API_BASE_URL}/cancel/${requestId}`, { diff --git a/src/lib/components/chat/Settings/Models.svelte b/src/lib/components/chat/Settings/Models.svelte index 2d0b1d7a..d961b72b 100644 --- a/src/lib/components/chat/Settings/Models.svelte +++ b/src/lib/components/chat/Settings/Models.svelte @@ -7,7 +7,8 @@ deleteModel, getOllamaUrls, getOllamaVersion, - pullModel + pullModel, + cancelOllamaRequest } from '$lib/apis/ollama'; import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants'; import { WEBUI_NAME, models, user } from '$lib/stores'; @@ -364,12 +365,24 @@ for (const line of lines) { if (line !== '') { let data = JSON.parse(line); + console.log(data); if (data.error) { throw data.error; } if (data.detail) { throw data.detail; } + + if (data.id) { + modelDownloadStatus[opts.modelName] = { + ...modelDownloadStatus[opts.modelName], + requestId: data.id, + reader, + done: false + }; + console.log(data); + } + if (data.status) { if (data.digest) { let downloadProgress = 0; @@ -379,12 +392,17 @@ downloadProgress = 100; } modelDownloadStatus[opts.modelName] = { - reader, + ...modelDownloadStatus[opts.modelName], pullProgress: downloadProgress, digest: data.digest }; } else { toast.success(data.status); + + modelDownloadStatus[opts.modelName] = { + ...modelDownloadStatus[opts.modelName], + done: data.status === 'success' + }; } } } @@ -397,7 +415,14 @@ opts.callback({ success: false, error, modelName: opts.modelName }); } } - opts.callback({ success: true, modelName: opts.modelName }); + + console.log(modelDownloadStatus[opts.modelName]); + + if (modelDownloadStatus[opts.modelName].done) { + opts.callback({ success: true, modelName: opts.modelName }); + } else { + opts.callback({ success: false, error: 'Download canceled', modelName: opts.modelName }); + } } }; @@ -467,10 +492,13 @@ ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false); liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token); }); - const deleteModelPull = async (model: string) => { - const { reader } = modelDownloadStatus[model]; + + const cancelModelPullHandler = async (model: string) => { + const { reader, requestId } = modelDownloadStatus[model]; if (reader) { await reader.cancel(); + + await cancelOllamaRequest(localStorage.token, requestId); delete modelDownloadStatus[model]; await deleteModel(localStorage.token, model); toast.success(`${model} download has been canceled`); @@ -606,46 +634,58 @@ {#if Object.keys(modelDownloadStatus).length > 0} {#each Object.keys(modelDownloadStatus) as model} -
-
{model}
-
-
-
- {modelDownloadStatus[model].pullProgress ?? 0}% + {#if 'pullProgress' in modelDownloadStatus[model]} +
+
{model}
+
+
+
+
+ {modelDownloadStatus[model].pullProgress ?? 0}% +
+
+ + + +
- -
-
- {modelDownloadStatus[model].digest} + {#if 'digest' in modelDownloadStatus[model]} +
+ {modelDownloadStatus[model].digest} +
+ {/if}
-
+ {/if} {/each} {/if}
diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index 0fca312a..417ddccd 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -19,7 +19,7 @@ } from '$lib/stores'; import { copyToClipboard, splitStream } from '$lib/utils'; - import { generateChatCompletion, cancelChatCompletion, generateTitle } from '$lib/apis/ollama'; + import { generateChatCompletion, cancelOllamaRequest, generateTitle } from '$lib/apis/ollama'; import { addTagById, createNewChat, @@ -104,7 +104,7 @@ const initNewChat = async () => { if (currentRequestId !== null) { - await cancelChatCompletion(localStorage.token, currentRequestId); + await cancelOllamaRequest(localStorage.token, currentRequestId); currentRequestId = null; } window.history.replaceState(history.state, '', `/`); @@ -372,7 +372,7 @@ if (stopResponseFlag) { controller.abort('User: Stop Response'); - await cancelChatCompletion(localStorage.token, currentRequestId); + await cancelOllamaRequest(localStorage.token, currentRequestId); } currentRequestId = null; diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index faa15b4b..836fc90a 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -19,7 +19,7 @@ } from '$lib/stores'; import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils'; - import { generateChatCompletion, generateTitle, cancelChatCompletion } from '$lib/apis/ollama'; + import { generateChatCompletion, generateTitle, cancelOllamaRequest } from '$lib/apis/ollama'; import { addTagById, createNewChat, @@ -382,7 +382,7 @@ if (stopResponseFlag) { controller.abort('User: Stop Response'); - await cancelChatCompletion(localStorage.token, currentRequestId); + await cancelOllamaRequest(localStorage.token, currentRequestId); } currentRequestId = null; @@ -843,7 +843,7 @@ shareEnabled={messages.length > 0} initNewChat={async () => { if (currentRequestId !== null) { - await cancelChatCompletion(localStorage.token, currentRequestId); + await cancelOllamaRequest(localStorage.token, currentRequestId); currentRequestId = null; } diff --git a/src/routes/(app)/playground/+page.svelte b/src/routes/(app)/playground/+page.svelte index 737eff22..d8e9320d 100644 --- a/src/routes/(app)/playground/+page.svelte +++ b/src/routes/(app)/playground/+page.svelte @@ -13,7 +13,7 @@ } from '$lib/constants'; import { WEBUI_NAME, config, user, models, settings } from '$lib/stores'; - import { cancelChatCompletion, generateChatCompletion } from '$lib/apis/ollama'; + import { cancelOllamaRequest, generateChatCompletion } from '$lib/apis/ollama'; import { generateOpenAIChatCompletion } from '$lib/apis/openai'; import { splitStream } from '$lib/utils'; @@ -52,7 +52,7 @@ // const cancelHandler = async () => { // if (currentRequestId) { - // const res = await cancelChatCompletion(localStorage.token, currentRequestId); + // const res = await cancelOllamaRequest(localStorage.token, currentRequestId); // currentRequestId = null; // loading = false; // } @@ -95,7 +95,7 @@ const { value, done } = await reader.read(); if (done || stopResponseFlag) { if (stopResponseFlag) { - await cancelChatCompletion(localStorage.token, currentRequestId); + await cancelOllamaRequest(localStorage.token, currentRequestId); } currentRequestId = null; @@ -181,7 +181,7 @@ const { value, done } = await reader.read(); if (done || stopResponseFlag) { if (stopResponseFlag) { - await cancelChatCompletion(localStorage.token, currentRequestId); + await cancelOllamaRequest(localStorage.token, currentRequestId); } currentRequestId = null;