forked from open-webui/open-webui
fix: error handling
This commit is contained in:
parent
e3fc97241d
commit
be3ab88c88
2 changed files with 14 additions and 4 deletions
|
@ -123,6 +123,7 @@ async def get_all_models():
|
||||||
map(lambda response: response["models"], responses)
|
map(lambda response: response["models"], responses)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
app.state.MODELS = {model["model"]: model for model in models["models"]}
|
app.state.MODELS = {model["model"]: model for model in models["models"]}
|
||||||
|
|
||||||
return models
|
return models
|
||||||
|
@ -181,11 +182,19 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
|
||||||
responses = await asyncio.gather(*tasks)
|
responses = await asyncio.gather(*tasks)
|
||||||
responses = list(filter(lambda x: x is not None, responses))
|
responses = list(filter(lambda x: x is not None, responses))
|
||||||
|
|
||||||
|
print(responses)
|
||||||
|
|
||||||
|
if len(responses) > 0:
|
||||||
lowest_version = min(
|
lowest_version = min(
|
||||||
responses, key=lambda x: tuple(map(int, x["version"].split(".")))
|
responses, key=lambda x: tuple(map(int, x["version"].split(".")))
|
||||||
)
|
)
|
||||||
|
|
||||||
return {"version": lowest_version["version"]}
|
return {"version": lowest_version["version"]}
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -52,3 +52,4 @@ class ERROR_MESSAGES(str, Enum):
|
||||||
|
|
||||||
MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
|
MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
|
||||||
OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found"
|
OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found"
|
||||||
|
OLLAMA_NOT_FOUND = "WebUI could not connect to Ollama"
|
||||||
|
|
Loading…
Reference in a new issue