From 25987fe3c901ef386d2d84c96f878396565dce10 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Wed, 13 Dec 2023 17:37:29 -0800 Subject: [PATCH 1/3] feat: better error handling for ollama reverse proxy --- backend/apps/ollama/main.py | 39 +++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 53daefac..98048ca5 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -59,27 +59,32 @@ def proxy(path): else: pass - # Make a request to the target server - target_response = requests.request( - method=request.method, - url=target_url, - data=data, - headers=headers, - stream=True, # Enable streaming for server-sent events - ) + try: + # Make a request to the target server + target_response = requests.request( + method=request.method, + url=target_url, + data=data, + headers=headers, + stream=True, # Enable streaming for server-sent events + ) - # Proxy the target server's response to the client - def generate(): - for chunk in target_response.iter_content(chunk_size=8192): - yield chunk + target_response.raise_for_status() - response = Response(generate(), status=target_response.status_code) + # Proxy the target server's response to the client + def generate(): + for chunk in target_response.iter_content(chunk_size=8192): + yield chunk - # Copy headers from the target server's response to the client's response - for key, value in target_response.headers.items(): - response.headers[key] = value + response = Response(generate(), status=target_response.status_code) - return response + # Copy headers from the target server's response to the client's response + for key, value in target_response.headers.items(): + response.headers[key] = value + + return response + except Exception as e: + return jsonify({"detail": "Server Connection Error", "message": str(e)}), 400 if __name__ == "__main__": From 7db3f198653cdf9ad2b7d8065818d34542031b54 Mon Sep 17 00:00:00 2001 From: Burak Kakillioglu Date: Sun, 10 Dec 2023 17:27:26 -0500 Subject: [PATCH 2/3] Update TROUBLESHOOTING.md Suggests changes Ollama Server URL to `/ollama/api`. --- TROUBLESHOOTING.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md index 679eb657..3377a898 100644 --- a/TROUBLESHOOTING.md +++ b/TROUBLESHOOTING.md @@ -20,9 +20,10 @@ This configuration allows Ollama to accept connections from any source. Ensure that the Ollama URL is correctly formatted in the application settings. Follow these steps: +- If your Ollama runs in a different host than Web UI make sure Ollama host address is provided when running Web UI container via `OLLAMA_API_BASE_URL` environment variable. [(e.g. OLLAMA_API_BASE_URL=http://192.168.1.1/api)](https://github.com/ollama-webui/ollama-webui#accessing-external-ollama-on-a-different-server) - Go to "Settings" within the Ollama WebUI. - Navigate to the "General" section. -- Verify that the Ollama URL is in the following format: `http://localhost:11434/api`. +- Verify that the Ollama Server URL is set to: `/ollama/api`. It is crucial to include the `/api` at the end of the URL to ensure that the Ollama Web UI can communicate with the server. From 43823d2521701c99fc5d039cc4afa00889e8bd22 Mon Sep 17 00:00:00 2001 From: Burak Kakillioglu Date: Wed, 13 Dec 2023 22:34:36 -0500 Subject: [PATCH 3/3] Update TROUBLESHOOTING.md --- TROUBLESHOOTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/TROUBLESHOOTING.md b/TROUBLESHOOTING.md index 3377a898..d0d8ce2d 100644 --- a/TROUBLESHOOTING.md +++ b/TROUBLESHOOTING.md @@ -20,7 +20,7 @@ This configuration allows Ollama to accept connections from any source. Ensure that the Ollama URL is correctly formatted in the application settings. Follow these steps: -- If your Ollama runs in a different host than Web UI make sure Ollama host address is provided when running Web UI container via `OLLAMA_API_BASE_URL` environment variable. [(e.g. OLLAMA_API_BASE_URL=http://192.168.1.1/api)](https://github.com/ollama-webui/ollama-webui#accessing-external-ollama-on-a-different-server) +- If your Ollama runs in a different host than Web UI make sure Ollama host address is provided when running Web UI container via `OLLAMA_API_BASE_URL` environment variable. [(e.g. OLLAMA_API_BASE_URL=http://192.168.1.1:11434/api)](https://github.com/ollama-webui/ollama-webui#accessing-external-ollama-on-a-different-server) - Go to "Settings" within the Ollama WebUI. - Navigate to the "General" section. - Verify that the Ollama Server URL is set to: `/ollama/api`.