forked from open-webui/open-webui
feat: backend reverse proxy
This commit is contained in:
parent
611b10a79d
commit
6a9bef755b
13 changed files with 179 additions and 13 deletions
55
backend/apps/ollama/main.py
Normal file
55
backend/apps/ollama/main.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
from flask import Flask, request, Response
|
||||
from flask_cors import CORS
|
||||
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
|
||||
from config import OLLAMA_API_BASE_URL
|
||||
|
||||
app = Flask(__name__)
|
||||
CORS(
|
||||
app
|
||||
) # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains
|
||||
|
||||
# Define the target server URL
|
||||
TARGET_SERVER_URL = OLLAMA_API_BASE_URL
|
||||
|
||||
|
||||
@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"])
|
||||
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE"])
|
||||
def proxy(path):
|
||||
# Combine the base URL of the target server with the requested path
|
||||
target_url = f"{TARGET_SERVER_URL}/{path}"
|
||||
print(target_url)
|
||||
|
||||
# Get data from the original request
|
||||
data = request.get_data()
|
||||
headers = dict(request.headers)
|
||||
|
||||
# Make a request to the target server
|
||||
target_response = requests.request(
|
||||
method=request.method,
|
||||
url=target_url,
|
||||
data=data,
|
||||
headers=headers,
|
||||
stream=True, # Enable streaming for server-sent events
|
||||
)
|
||||
|
||||
# Proxy the target server's response to the client
|
||||
def generate():
|
||||
for chunk in target_response.iter_content(chunk_size=8192):
|
||||
yield chunk
|
||||
|
||||
response = Response(generate(), status=target_response.status_code)
|
||||
|
||||
# Copy headers from the target server's response to the client's response
|
||||
for key, value in target_response.headers.items():
|
||||
response.headers[key] = value
|
||||
|
||||
return response
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(debug=True)
|
Loading…
Add table
Add a link
Reference in a new issue