forked from open-webui/open-webui
main #3
2 changed files with 62 additions and 2 deletions
|
@ -16,6 +16,7 @@ from fastapi.concurrency import run_in_threadpool
|
||||||
from pydantic import BaseModel, ConfigDict
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import copy
|
||||||
import random
|
import random
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
|
@ -1082,6 +1083,42 @@ def upload_model(file: UploadFile = File(...), url_idx: Optional[int] = None):
|
||||||
return StreamingResponse(file_process_stream(), media_type="text/event-stream")
|
return StreamingResponse(file_process_stream(), media_type="text/event-stream")
|
||||||
|
|
||||||
|
|
||||||
|
# async def upload_model(file: UploadFile = File(), url_idx: Optional[int] = None):
|
||||||
|
# if url_idx == None:
|
||||||
|
# url_idx = 0
|
||||||
|
# url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||||
|
|
||||||
|
# file_location = os.path.join(UPLOAD_DIR, file.filename)
|
||||||
|
# total_size = file.size
|
||||||
|
|
||||||
|
# async def file_upload_generator(file):
|
||||||
|
# print(file)
|
||||||
|
# try:
|
||||||
|
# async with aiofiles.open(file_location, "wb") as f:
|
||||||
|
# completed_size = 0
|
||||||
|
# while True:
|
||||||
|
# chunk = await file.read(1024*1024)
|
||||||
|
# if not chunk:
|
||||||
|
# break
|
||||||
|
# await f.write(chunk)
|
||||||
|
# completed_size += len(chunk)
|
||||||
|
# progress = (completed_size / total_size) * 100
|
||||||
|
|
||||||
|
# print(progress)
|
||||||
|
# yield f'data: {json.dumps({"status": "uploading", "percentage": progress, "total": total_size, "completed": completed_size, "done": False})}\n'
|
||||||
|
# except Exception as e:
|
||||||
|
# print(e)
|
||||||
|
# yield f"data: {json.dumps({'status': 'error', 'message': str(e)})}\n"
|
||||||
|
# finally:
|
||||||
|
# await file.close()
|
||||||
|
# print("done")
|
||||||
|
# yield f'data: {json.dumps({"status": "completed", "percentage": 100, "total": total_size, "completed": completed_size, "done": True})}\n'
|
||||||
|
|
||||||
|
# return StreamingResponse(
|
||||||
|
# file_upload_generator(copy.deepcopy(file)), media_type="text/event-stream"
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
||||||
async def deprecated_proxy(path: str, request: Request, user=Depends(get_current_user)):
|
async def deprecated_proxy(path: str, request: Request, user=Depends(get_current_user)):
|
||||||
url = app.state.OLLAMA_BASE_URLS[0]
|
url = app.state.OLLAMA_BASE_URLS[0]
|
||||||
|
|
|
@ -66,7 +66,9 @@
|
||||||
let modelFileUrl = '';
|
let modelFileUrl = '';
|
||||||
let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop "</s>"\nPARAMETER stop "USER:"\nPARAMETER stop "ASSISTANT:"`;
|
let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop "</s>"\nPARAMETER stop "USER:"\nPARAMETER stop "ASSISTANT:"`;
|
||||||
let modelFileDigest = '';
|
let modelFileDigest = '';
|
||||||
|
|
||||||
let uploadProgress = null;
|
let uploadProgress = null;
|
||||||
|
let uploadMessage = '';
|
||||||
|
|
||||||
let deleteModelTag = '';
|
let deleteModelTag = '';
|
||||||
|
|
||||||
|
@ -186,7 +188,6 @@
|
||||||
|
|
||||||
const uploadModelHandler = async () => {
|
const uploadModelHandler = async () => {
|
||||||
modelTransferring = true;
|
modelTransferring = true;
|
||||||
uploadProgress = 0;
|
|
||||||
|
|
||||||
let uploaded = false;
|
let uploaded = false;
|
||||||
let fileResponse = null;
|
let fileResponse = null;
|
||||||
|
@ -196,6 +197,8 @@
|
||||||
const file = modelInputFile ? modelInputFile[0] : null;
|
const file = modelInputFile ? modelInputFile[0] : null;
|
||||||
|
|
||||||
if (file) {
|
if (file) {
|
||||||
|
uploadMessage = 'Uploading...';
|
||||||
|
|
||||||
fileResponse = await uploadModel(localStorage.token, file, selectedOllamaUrlIdx).catch(
|
fileResponse = await uploadModel(localStorage.token, file, selectedOllamaUrlIdx).catch(
|
||||||
(error) => {
|
(error) => {
|
||||||
toast.error(error);
|
toast.error(error);
|
||||||
|
@ -204,6 +207,7 @@
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
uploadProgress = 0;
|
||||||
fileResponse = await downloadModel(
|
fileResponse = await downloadModel(
|
||||||
localStorage.token,
|
localStorage.token,
|
||||||
modelFileUrl,
|
modelFileUrl,
|
||||||
|
@ -232,6 +236,9 @@
|
||||||
let data = JSON.parse(line.replace(/^data: /, ''));
|
let data = JSON.parse(line.replace(/^data: /, ''));
|
||||||
|
|
||||||
if (data.progress) {
|
if (data.progress) {
|
||||||
|
if (uploadMessage) {
|
||||||
|
uploadMessage = '';
|
||||||
|
}
|
||||||
uploadProgress = data.progress;
|
uploadProgress = data.progress;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -816,7 +823,23 @@
|
||||||
>
|
>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{#if uploadProgress !== null}
|
{#if uploadMessage}
|
||||||
|
<div class="mt-2">
|
||||||
|
<div class=" mb-2 text-xs">{$i18n.t('Upload Progress')}</div>
|
||||||
|
|
||||||
|
<div class="w-full rounded-full dark:bg-gray-800">
|
||||||
|
<div
|
||||||
|
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
|
||||||
|
style="width: 100%"
|
||||||
|
>
|
||||||
|
{uploadMessage}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="mt-1 text-xs dark:text-gray-500" style="font-size: 0.5rem;">
|
||||||
|
{modelFileDigest}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else if uploadProgress !== null}
|
||||||
<div class="mt-2">
|
<div class="mt-2">
|
||||||
<div class=" mb-2 text-xs">{$i18n.t('Upload Progress')}</div>
|
<div class=" mb-2 text-xs">{$i18n.t('Upload Progress')}</div>
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue