forked from open-webui/open-webui
feat: improved backend error message
This commit is contained in:
parent
c87a80f9a5
commit
d10e367664
1 changed files with 23 additions and 6 deletions
|
@ -59,9 +59,11 @@ def proxy(path):
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
r = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Make a request to the target server
|
# Make a request to the target server
|
||||||
target_response = requests.request(
|
r = requests.request(
|
||||||
method=request.method,
|
method=request.method,
|
||||||
url=target_url,
|
url=target_url,
|
||||||
data=data,
|
data=data,
|
||||||
|
@ -69,22 +71,37 @@ def proxy(path):
|
||||||
stream=True, # Enable streaming for server-sent events
|
stream=True, # Enable streaming for server-sent events
|
||||||
)
|
)
|
||||||
|
|
||||||
target_response.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
# Proxy the target server's response to the client
|
# Proxy the target server's response to the client
|
||||||
def generate():
|
def generate():
|
||||||
for chunk in target_response.iter_content(chunk_size=8192):
|
for chunk in r.iter_content(chunk_size=8192):
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
response = Response(generate(), status=target_response.status_code)
|
response = Response(generate(), status=r.status_code)
|
||||||
|
|
||||||
# Copy headers from the target server's response to the client's response
|
# Copy headers from the target server's response to the client's response
|
||||||
for key, value in target_response.headers.items():
|
for key, value in r.headers.items():
|
||||||
response.headers[key] = value
|
response.headers[key] = value
|
||||||
|
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return jsonify({"detail": "Server Connection Error", "message": str(e)}), 400
|
error_detail = "Ollama WebUI: Server Connection Error"
|
||||||
|
if r != None:
|
||||||
|
res = r.json()
|
||||||
|
if "error" in res:
|
||||||
|
error_detail = f"Ollama: {res['error']}"
|
||||||
|
print(res)
|
||||||
|
|
||||||
|
return (
|
||||||
|
jsonify(
|
||||||
|
{
|
||||||
|
"detail": error_detail,
|
||||||
|
"message": str(e),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
400,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Loading…
Reference in a new issue