From e008738f30081e4697a1b3c3278e2e51d47539ad Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sat, 23 Mar 2024 13:12:54 -0700 Subject: [PATCH] feat: cancel download from backend --- backend/apps/ollama/main.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 6f56f3cf..2283774e 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -234,11 +234,26 @@ async def pull_model( def get_request(): nonlocal url nonlocal r + + request_id = str(uuid.uuid4()) try: + REQUEST_POOL.append(request_id) def stream_content(): - for chunk in r.iter_content(chunk_size=8192): - yield chunk + try: + yield json.dumps({"id": request_id, "done": False}) + "\n" + + for chunk in r.iter_content(chunk_size=8192): + if request_id in REQUEST_POOL: + yield chunk + else: + print("User: canceled request") + break + finally: + if hasattr(r, "close"): + r.close() + if request_id in REQUEST_POOL: + REQUEST_POOL.remove(request_id) r = requests.request( method="POST", @@ -259,6 +274,7 @@ async def pull_model( try: return await run_in_threadpool(get_request) + except Exception as e: print(e) error_detail = "Open WebUI: Server Connection Error"