diff --git a/backend/apps/web/routers/utils.py b/backend/apps/web/routers/utils.py index 588ef5a6..b2ca409a 100644 --- a/backend/apps/web/routers/utils.py +++ b/backend/apps/web/routers/utils.py @@ -12,6 +12,8 @@ import json from utils.misc import calculate_sha256 from config import OLLAMA_API_BASE_URL +from constants import ERROR_MESSAGES + router = APIRouter() @@ -40,10 +42,7 @@ def parse_huggingface_url(hf_url): return None -async def download_file_stream(url, - file_path, - file_name, - chunk_size=1024 * 1024): +async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024): done = False if os.path.exists(file_path): @@ -57,8 +56,7 @@ async def download_file_stream(url, async with aiohttp.ClientSession(timeout=timeout) as session: async with session.get(url, headers=headers) as response: - total_size = int(response.headers.get("content-length", - 0)) + current_size + total_size = int(response.headers.get("content-length", 0)) + current_size with open(file_path, "ab+") as file: async for data in response.content.iter_chunked(chunk_size): @@ -91,7 +89,9 @@ async def download_file_stream(url, @router.get("/download") -async def download(url: str, ): +async def download( + url: str, +): # url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf" file_name = parse_huggingface_url(url) @@ -108,25 +108,30 @@ async def download(url: str, ): @router.post("/upload") -async def upload(file: UploadFile = File(...)): - os.makedirs("./uploads", exist_ok=True) - file_path = os.path.join("./uploads", file.filename) +def upload(file: UploadFile = File(...)): + os.makedirs("./data/uploads", exist_ok=True) + file_path = os.path.join("./data/uploads", file.filename) - async def file_write_stream(): - total = 0 - total_size = file.size + # Save file in chunks + with open(file_path, "wb+") as f: + for chunk in file.file: + f.write(chunk) + + def file_process_stream(): + total_size = os.path.getsize(file_path) chunk_size = 1024 * 1024 - - done = False try: - with open(file_path, "wb+") as f: - while True: - chunk = file.file.read(chunk_size) + with open(file_path, "rb") as f: + total = 0 + done = False + + while not done: + chunk = f.read(chunk_size) if not chunk: - break - f.write(chunk) + done = True + continue + total += len(chunk) - done = total_size == total progress = round((total / total_size) * 100, 2) res = { @@ -134,7 +139,6 @@ async def upload(file: UploadFile = File(...)): "total": total_size, "completed": total, } - yield f"data: {json.dumps(res)}\n\n" if done: @@ -152,14 +156,14 @@ async def upload(file: UploadFile = File(...)): "name": file.filename, } os.remove(file_path) - yield f"data: {json.dumps(res)}\n\n" else: - raise "Ollama: Could not create blob, Please try again." + raise Exception( + "Ollama: Could not create blob, Please try again." + ) except Exception as e: res = {"error": str(e)} yield f"data: {json.dumps(res)}\n\n" - return StreamingResponse(file_write_stream(), - media_type="text/event-stream") + return StreamingResponse(file_process_stream(), media_type="text/event-stream") diff --git a/src/lib/components/chat/SettingsModal.svelte b/src/lib/components/chat/SettingsModal.svelte index 03a02532..7f3bf0ca 100644 --- a/src/lib/components/chat/SettingsModal.svelte +++ b/src/lib/components/chat/SettingsModal.svelte @@ -363,7 +363,6 @@ fileResponse = await fetch(`${WEBUI_API_BASE_URL}/utils/upload`, { method: 'POST', headers: { - ...($settings.authHeader && { Authorization: $settings.authHeader }), ...($user && { Authorization: `Bearer ${localStorage.token}` }) }, body: formData @@ -375,7 +374,6 @@ fileResponse = await fetch(`${WEBUI_API_BASE_URL}/utils/download?url=${modelFileUrl}`, { method: 'GET', headers: { - ...($settings.authHeader && { Authorization: $settings.authHeader }), ...($user && { Authorization: `Bearer ${localStorage.token}` }) } }).catch((error) => { @@ -1388,9 +1386,10 @@ {#if uploadProgress !== null}