open-webui/backend/main.py

303 lines
9.4 KiB
Python
Raw Permalink Normal View History

2024-02-23 09:30:26 +01:00
from bs4 import BeautifulSoup
import json
import markdown
2024-01-07 11:48:21 +01:00
import time
2024-02-24 09:21:53 +01:00
import os
import sys
2024-02-25 20:26:58 +01:00
import requests
2024-02-23 09:30:26 +01:00
2024-02-25 20:26:58 +01:00
from fastapi import FastAPI, Request, Depends, status
2023-11-15 01:28:51 +01:00
from fastapi.staticfiles import StaticFiles
from fastapi import HTTPException
from fastapi.middleware.wsgi import WSGIMiddleware
from fastapi.middleware.cors import CORSMiddleware
2023-11-19 01:47:12 +01:00
from starlette.exceptions import HTTPException as StarletteHTTPException
2024-03-09 07:34:47 +01:00
from starlette.middleware.base import BaseHTTPMiddleware
2023-11-15 01:28:51 +01:00
2024-01-07 07:07:20 +01:00
2023-11-15 01:28:51 +01:00
from apps.ollama.main import app as ollama_app
2024-01-05 03:38:03 +01:00
from apps.openai.main import app as openai_app
2024-03-08 22:33:56 +01:00
from apps.litellm.main import app as litellm_app, startup as litellm_app_startup
2024-02-11 09:17:50 +01:00
from apps.audio.main import app as audio_app
2024-02-22 03:12:01 +01:00
from apps.images.main import app as images_app
from apps.rag.main import app as rag_app
2023-11-19 01:47:12 +01:00
from apps.web.main import app as webui_app
2024-01-07 07:07:20 +01:00
2024-03-10 06:19:20 +01:00
from pydantic import BaseModel
from typing import List
2024-02-24 07:44:56 +01:00
2024-03-10 06:19:20 +01:00
from utils.utils import get_admin_user
2024-03-09 07:34:47 +01:00
from apps.rag.utils import query_doc, query_collection, rag_template
2024-03-10 06:47:01 +01:00
from config import (
WEBUI_NAME,
ENV,
VERSION,
CHANGELOG,
FRONTEND_BUILD_DIR,
MODEL_FILTER_ENABLED,
MODEL_FILTER_LIST,
)
2024-02-25 20:26:58 +01:00
from constants import ERROR_MESSAGES
2023-11-15 01:28:51 +01:00
class SPAStaticFiles(StaticFiles):
async def get_response(self, path: str, scope):
try:
return await super().get_response(path, scope)
except (HTTPException, StarletteHTTPException) as ex:
if ex.status_code == 404:
return await super().get_response("index.html", scope)
else:
raise ex
2024-01-07 11:48:21 +01:00
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
2023-11-15 01:28:51 +01:00
2024-03-10 06:47:01 +01:00
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
2024-03-10 06:19:20 +01:00
2023-11-15 01:28:51 +01:00
origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
2024-02-24 09:21:53 +01:00
@app.on_event("startup")
async def on_startup():
2024-03-08 22:33:56 +01:00
await litellm_app_startup()
2024-02-24 09:21:53 +01:00
2024-03-09 07:34:47 +01:00
class RAGMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
if request.method == "POST" and (
"/api/chat" in request.url.path or "/chat/completions" in request.url.path
):
2024-03-09 07:52:42 +01:00
print(request.url.path)
2024-03-09 07:34:47 +01:00
# Read the original request body
body = await request.body()
# Decode body to string
body_str = body.decode("utf-8")
# Parse string to JSON
data = json.loads(body_str) if body_str else {}
# Example: Add a new key-value pair or modify existing ones
# data["modified"] = True # Example modification
if "docs" in data:
docs = data["docs"]
print(docs)
last_user_message_idx = None
for i in range(len(data["messages"]) - 1, -1, -1):
if data["messages"][i]["role"] == "user":
last_user_message_idx = i
break
2024-03-09 08:19:20 +01:00
user_message = data["messages"][last_user_message_idx]
if isinstance(user_message["content"], list):
# Handle list content input
content_type = "list"
query = ""
for content_item in user_message["content"]:
if content_item["type"] == "text":
query = content_item["text"]
break
elif isinstance(user_message["content"], str):
# Handle text content input
content_type = "text"
query = user_message["content"]
else:
# Fallback in case the input does not match expected types
content_type = None
query = ""
2024-03-09 07:34:47 +01:00
relevant_contexts = []
for doc in docs:
context = None
2024-03-09 08:21:00 +01:00
try:
if doc["type"] == "collection":
context = query_collection(
collection_names=doc["collection_names"],
query=query,
k=rag_app.state.TOP_K,
embedding_function=rag_app.state.sentence_transformer_ef,
)
else:
context = query_doc(
collection_name=doc["collection_name"],
query=query,
k=rag_app.state.TOP_K,
embedding_function=rag_app.state.sentence_transformer_ef,
)
except Exception as e:
print(e)
context = None
2024-03-09 07:34:47 +01:00
relevant_contexts.append(context)
context_string = ""
for context in relevant_contexts:
if context:
context_string += " ".join(context["documents"][0]) + "\n"
2024-03-09 08:19:20 +01:00
ra_content = rag_template(
2024-03-09 07:34:47 +01:00
template=rag_app.state.RAG_TEMPLATE,
context=context_string,
query=query,
)
2024-03-09 08:19:20 +01:00
if content_type == "list":
new_content = []
for content_item in user_message["content"]:
if content_item["type"] == "text":
# Update the text item's content with ra_content
new_content.append({"type": "text", "text": ra_content})
else:
# Keep other types of content as they are
new_content.append(content_item)
new_user_message = {**user_message, "content": new_content}
else:
new_user_message = {
**user_message,
"content": ra_content,
}
2024-03-09 07:34:47 +01:00
data["messages"][last_user_message_idx] = new_user_message
del data["docs"]
2024-03-10 05:12:32 +01:00
print(data["messages"])
2024-03-09 07:34:47 +01:00
modified_body_bytes = json.dumps(data).encode("utf-8")
# Create a new request with the modified body
scope = request.scope
scope["body"] = modified_body_bytes
request = Request(scope, receive=lambda: self._receive(modified_body_bytes))
response = await call_next(request)
return response
async def _receive(self, body: bytes):
return {"type": "http.request", "body": body, "more_body": False}
app.add_middleware(RAGMiddleware)
2023-11-15 01:28:51 +01:00
@app.middleware("http")
async def check_url(request: Request, call_next):
start_time = int(time.time())
response = await call_next(request)
process_time = int(time.time()) - start_time
response.headers["X-Process-Time"] = str(process_time)
return response
2023-11-19 01:47:12 +01:00
app.mount("/api/v1", webui_app)
2024-02-22 12:22:23 +01:00
app.mount("/litellm/api", litellm_app)
app.mount("/ollama", ollama_app)
2024-01-05 03:38:03 +01:00
app.mount("/openai/api", openai_app)
2024-02-11 09:17:50 +01:00
2024-02-22 03:12:01 +01:00
app.mount("/images/api/v1", images_app)
2024-02-11 09:17:50 +01:00
app.mount("/audio/api/v1", audio_app)
2024-01-07 07:07:20 +01:00
app.mount("/rag/api/v1", rag_app)
2024-01-04 22:06:31 +01:00
2024-02-22 03:12:01 +01:00
@app.get("/api/config")
async def get_app_config():
2024-02-23 09:30:26 +01:00
2024-02-22 03:12:01 +01:00
return {
"status": True,
2024-02-24 02:12:19 +01:00
"name": WEBUI_NAME,
2024-02-23 09:30:26 +01:00
"version": VERSION,
2024-02-22 03:12:01 +01:00
"images": images_app.state.ENABLED,
"default_models": webui_app.state.DEFAULT_MODELS,
"default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
}
2024-03-10 06:19:20 +01:00
@app.get("/api/config/model/filter")
async def get_model_filter_config(user=Depends(get_admin_user)):
2024-03-10 06:47:01 +01:00
return {
"enabled": app.state.MODEL_FILTER_ENABLED,
"models": app.state.MODEL_FILTER_LIST,
}
2024-03-10 06:19:20 +01:00
class ModelFilterConfigForm(BaseModel):
enabled: bool
models: List[str]
@app.post("/api/config/model/filter")
async def get_model_filter_config(
form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
):
app.state.MODEL_FILTER_ENABLED = form_data.enabled
2024-03-10 06:47:01 +01:00
app.state.MODEL_FILTER_LIST = form_data.models
2024-03-10 06:19:20 +01:00
ollama_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
2024-03-10 06:47:01 +01:00
ollama_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
2024-03-10 06:19:20 +01:00
openai_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
2024-03-10 06:47:01 +01:00
openai_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
2024-03-10 06:19:20 +01:00
2024-03-10 06:47:01 +01:00
return {
"enabled": app.state.MODEL_FILTER_ENABLED,
"models": app.state.MODEL_FILTER_LIST,
}
2024-03-10 06:19:20 +01:00
2024-03-05 09:59:35 +01:00
@app.get("/api/version")
async def get_app_config():
return {
"version": VERSION,
}
2024-02-23 09:30:26 +01:00
@app.get("/api/changelog")
async def get_app_changelog():
2024-02-23 09:36:53 +01:00
return CHANGELOG
2024-02-23 09:30:26 +01:00
2024-02-25 20:26:58 +01:00
@app.get("/api/version/updates")
async def get_app_latest_release_version():
try:
response = requests.get(
f"https://api.github.com/repos/open-webui/open-webui/releases/latest"
)
response.raise_for_status()
latest_version = response.json()["tag_name"]
2024-02-25 20:55:15 +01:00
return {"current": VERSION, "latest": latest_version[1:]}
2024-02-25 20:26:58 +01:00
except Exception as e:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
2024-02-25 20:55:15 +01:00
detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
2024-02-25 20:26:58 +01:00
)
2024-02-24 02:12:19 +01:00
app.mount("/static", StaticFiles(directory="static"), name="static")
2024-03-09 02:38:10 +01:00
app.mount("/cache", StaticFiles(directory="data/cache"), name="cache")
2024-02-24 02:12:19 +01:00
2024-01-22 10:47:07 +01:00
app.mount(
"/",
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
2024-01-22 10:47:07 +01:00
name="spa-static-files",
)