Merge pull request 'main' (!2) from open-webui/open-webui:main into main
Some checks failed
Release / release (push) Failing after 6s
Python CI / Format Backend (latest, 3.12.2) (push) Failing after 7s
Bun CI / Format & Build Frontend (push) Successful in 1m9s
Create and publish a Docker image / build-and-push-image (push) Successful in 31m21s

Reviewed-on: #2
This commit is contained in:
Tibo De Peuter 2024-03-20 21:29:11 +01:00
commit 50a2f02906
84 changed files with 7498 additions and 987 deletions

View file

@ -98,13 +98,14 @@ def merge_models_lists(model_lists):
merged_models = {}
for idx, model_list in enumerate(model_lists):
for model in model_list:
digest = model["digest"]
if digest not in merged_models:
model["urls"] = [idx]
merged_models[digest] = model
else:
merged_models[digest]["urls"].append(idx)
if model_list is not None:
for model in model_list:
digest = model["digest"]
if digest not in merged_models:
model["urls"] = [idx]
merged_models[digest] = model
else:
merged_models[digest]["urls"].append(idx)
return list(merged_models.values())
@ -116,11 +117,10 @@ async def get_all_models():
print("get_all_models")
tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
responses = await asyncio.gather(*tasks)
responses = list(filter(lambda x: x is not None, responses))
models = {
"models": merge_models_lists(
map(lambda response: response["models"], responses)
map(lambda response: response["models"] if response else None, responses)
)
}

View file

@ -111,6 +111,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEYS[idx]}"
headers["Content-Type"] = "application/json"
r = None
try:
r = requests.post(
url=f"{app.state.OPENAI_API_BASE_URLS[idx]}/audio/speech",
@ -143,7 +144,9 @@ async def speech(request: Request, user=Depends(get_verified_user)):
except:
error_detail = f"External: {e}"
raise HTTPException(status_code=r.status_code, detail=error_detail)
raise HTTPException(
status_code=r.status_code if r else 500, detail=error_detail
)
except ValueError:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.OPENAI_NOT_FOUND)
@ -165,14 +168,15 @@ def merge_models_lists(model_lists):
merged_list = []
for idx, models in enumerate(model_lists):
merged_list.extend(
[
{**model, "urlIdx": idx}
for model in models
if "api.openai.com" not in app.state.OPENAI_API_BASE_URLS[idx]
or "gpt" in model["id"]
]
)
if models is not None and "error" not in models:
merged_list.extend(
[
{**model, "urlIdx": idx}
for model in models
if "api.openai.com" not in app.state.OPENAI_API_BASE_URLS[idx]
or "gpt" in model["id"]
]
)
return merged_list
@ -187,15 +191,24 @@ async def get_all_models():
fetch_url(f"{url}/models", app.state.OPENAI_API_KEYS[idx])
for idx, url in enumerate(app.state.OPENAI_API_BASE_URLS)
]
responses = await asyncio.gather(*tasks)
responses = list(
filter(lambda x: x is not None and "error" not in x, responses)
)
models = {
"data": merge_models_lists(
list(map(lambda response: response["data"], responses))
list(
map(
lambda response: (
response["data"]
if response and "data" in response
else None
),
responses,
)
)
)
}
print(models)
app.state.MODELS = {model["id"]: model for model in models["data"]}
return models
@ -218,6 +231,9 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_current_use
return models
else:
url = app.state.OPENAI_API_BASE_URLS[url_idx]
r = None
try:
r = requests.request(method="GET", url=f"{url}/models")
r.raise_for_status()
@ -290,6 +306,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
headers["Authorization"] = f"Bearer {key}"
headers["Content-Type"] = "application/json"
r = None
try:
r = requests.request(
method=request.method,
@ -322,4 +340,6 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
except:
error_detail = f"External: {e}"
raise HTTPException(status_code=r.status_code, detail=error_detail)
raise HTTPException(
status_code=r.status_code if r else 500, detail=error_detail
)

View file

@ -400,9 +400,9 @@ def get_loader(filename: str, file_content_type: str, file_path: str):
elif file_ext in known_source_ext or (
file_content_type and file_content_type.find("text/") >= 0
):
loader = TextLoader(file_path)
loader = TextLoader(file_path, autodetect_encoding=True)
else:
loader = TextLoader(file_path)
loader = TextLoader(file_path, autodetect_encoding=True)
known_type = False
return loader, known_type

View file

@ -91,9 +91,8 @@ def query_collection(
def rag_template(template: str, context: str, query: str):
template = re.sub(r"\[context\]", context, template)
template = re.sub(r"\[query\]", query, template)
template = template.replace("[context]", context)
template = template.replace("[query]", query)
return template

View file

@ -75,7 +75,7 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
hashed = calculate_sha256(file)
file.seek(0)
url = f"{OLLAMA_BASE_URLS[0]}/blobs/sha256:{hashed}"
url = f"{OLLAMA_BASE_URLS[0]}/api/blobs/sha256:{hashed}"
response = requests.post(url, data=file)
if response.ok:

View file

@ -250,8 +250,10 @@ OPENAI_API_BASE_URLS = (
OPENAI_API_BASE_URLS if OPENAI_API_BASE_URLS != "" else OPENAI_API_BASE_URL
)
OPENAI_API_BASE_URLS = [url.strip() for url in OPENAI_API_BASE_URLS.split(";")]
OPENAI_API_BASE_URLS = [
url.strip() if url != "" else "https://api.openai.com/v1"
for url in OPENAI_API_BASE_URLS.split(";")
]
####################################
# WEBUI