forked from open-webui/open-webui
main #2
3 changed files with 31 additions and 23 deletions
|
@ -98,6 +98,7 @@ def merge_models_lists(model_lists):
|
||||||
merged_models = {}
|
merged_models = {}
|
||||||
|
|
||||||
for idx, model_list in enumerate(model_lists):
|
for idx, model_list in enumerate(model_lists):
|
||||||
|
if model_list is not None:
|
||||||
for model in model_list:
|
for model in model_list:
|
||||||
digest = model["digest"]
|
digest = model["digest"]
|
||||||
if digest not in merged_models:
|
if digest not in merged_models:
|
||||||
|
@ -116,11 +117,10 @@ async def get_all_models():
|
||||||
print("get_all_models")
|
print("get_all_models")
|
||||||
tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
|
tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
|
||||||
responses = await asyncio.gather(*tasks)
|
responses = await asyncio.gather(*tasks)
|
||||||
responses = list(filter(lambda x: x is not None, responses))
|
|
||||||
|
|
||||||
models = {
|
models = {
|
||||||
"models": merge_models_lists(
|
"models": merge_models_lists(
|
||||||
map(lambda response: response["models"], responses)
|
map(lambda response: response["models"] if response else None, responses)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -168,6 +168,7 @@ def merge_models_lists(model_lists):
|
||||||
merged_list = []
|
merged_list = []
|
||||||
|
|
||||||
for idx, models in enumerate(model_lists):
|
for idx, models in enumerate(model_lists):
|
||||||
|
if models is not None and "error" not in models:
|
||||||
merged_list.extend(
|
merged_list.extend(
|
||||||
[
|
[
|
||||||
{**model, "urlIdx": idx}
|
{**model, "urlIdx": idx}
|
||||||
|
@ -190,15 +191,20 @@ async def get_all_models():
|
||||||
fetch_url(f"{url}/models", app.state.OPENAI_API_KEYS[idx])
|
fetch_url(f"{url}/models", app.state.OPENAI_API_KEYS[idx])
|
||||||
for idx, url in enumerate(app.state.OPENAI_API_BASE_URLS)
|
for idx, url in enumerate(app.state.OPENAI_API_BASE_URLS)
|
||||||
]
|
]
|
||||||
|
|
||||||
responses = await asyncio.gather(*tasks)
|
responses = await asyncio.gather(*tasks)
|
||||||
responses = list(
|
|
||||||
filter(lambda x: x is not None and "error" not in x, responses)
|
|
||||||
)
|
|
||||||
models = {
|
models = {
|
||||||
"data": merge_models_lists(
|
"data": merge_models_lists(
|
||||||
list(map(lambda response: response["data"], responses))
|
list(
|
||||||
|
map(
|
||||||
|
lambda response: response["data"] if response else None,
|
||||||
|
responses,
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
print(models)
|
||||||
app.state.MODELS = {model["id"]: model for model in models["data"]}
|
app.state.MODELS = {model["id"]: model for model in models["data"]}
|
||||||
|
|
||||||
return models
|
return models
|
||||||
|
|
|
@ -250,8 +250,10 @@ OPENAI_API_BASE_URLS = (
|
||||||
OPENAI_API_BASE_URLS if OPENAI_API_BASE_URLS != "" else OPENAI_API_BASE_URL
|
OPENAI_API_BASE_URLS if OPENAI_API_BASE_URLS != "" else OPENAI_API_BASE_URL
|
||||||
)
|
)
|
||||||
|
|
||||||
OPENAI_API_BASE_URLS = [url.strip() for url in OPENAI_API_BASE_URLS.split(";")]
|
OPENAI_API_BASE_URLS = [
|
||||||
|
url.strip() if url != "" else "https://api.openai.com/v1"
|
||||||
|
for url in OPENAI_API_BASE_URLS.split(";")
|
||||||
|
]
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# WEBUI
|
# WEBUI
|
||||||
|
|
Loading…
Reference in a new issue