forked from open-webui/open-webui
Merge branch 'open-webui:main' into main
This commit is contained in:
commit
c5e9ceeb06
5 changed files with 93 additions and 22 deletions
17
CHANGELOG.md
17
CHANGELOG.md
|
@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.1.114] - 2024-03-20
|
||||
|
||||
### Added
|
||||
|
||||
- **🔗 Webhook Integration**: Now you can subscribe to new user sign-up events via webhook. Simply navigate to the admin panel > admin settings > webhook URL.
|
||||
- **🛡️ Enhanced Model Filtering**: Alongside Ollama, OpenAI proxy model whitelisting, we've added model filtering functionality for LiteLLM proxy.
|
||||
- **🌍 Expanded Language Support**: Spanish, Catalan, and Vietnamese languages are now available, with improvements made to others.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔧 Input Field Spelling**: Resolved issue with spelling mistakes in input fields.
|
||||
- **🖊️ Light Mode Styling**: Fixed styling issue with light mode in document adding.
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔄 Language Sorting**: Languages are now sorted alphabetically by their code for improved organization.
|
||||
|
||||
## [0.1.113] - 2024-03-18
|
||||
|
||||
### Added
|
||||
|
|
|
@ -1,11 +1,23 @@
|
|||
from litellm.proxy.proxy_server import ProxyConfig, initialize
|
||||
from litellm.proxy.proxy_server import app
|
||||
|
||||
from fastapi import FastAPI, Request, Depends, status
|
||||
from fastapi import FastAPI, Request, Depends, status, Response
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
|
||||
from starlette.responses import StreamingResponse
|
||||
import json
|
||||
|
||||
from utils.utils import get_http_authorization_cred, get_current_user
|
||||
from config import ENV
|
||||
|
||||
|
||||
from config import (
|
||||
MODEL_FILTER_ENABLED,
|
||||
MODEL_FILTER_LIST,
|
||||
)
|
||||
|
||||
|
||||
proxy_config = ProxyConfig()
|
||||
|
||||
|
||||
|
@ -26,16 +38,58 @@ async def on_startup():
|
|||
await startup()
|
||||
|
||||
|
||||
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
|
||||
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def auth_middleware(request: Request, call_next):
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
request.state.user = None
|
||||
|
||||
if ENV != "dev":
|
||||
try:
|
||||
user = get_current_user(get_http_authorization_cred(auth_header))
|
||||
print(user)
|
||||
except Exception as e:
|
||||
return JSONResponse(status_code=400, content={"detail": str(e)})
|
||||
try:
|
||||
user = get_current_user(get_http_authorization_cred(auth_header))
|
||||
print(user)
|
||||
request.state.user = user
|
||||
except Exception as e:
|
||||
return JSONResponse(status_code=400, content={"detail": str(e)})
|
||||
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
|
||||
class ModifyModelsResponseMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(
|
||||
self, request: Request, call_next: RequestResponseEndpoint
|
||||
) -> Response:
|
||||
|
||||
response = await call_next(request)
|
||||
user = request.state.user
|
||||
|
||||
if "/models" in request.url.path:
|
||||
if isinstance(response, StreamingResponse):
|
||||
# Read the content of the streaming response
|
||||
body = b""
|
||||
async for chunk in response.body_iterator:
|
||||
body += chunk
|
||||
|
||||
data = json.loads(body.decode("utf-8"))
|
||||
|
||||
if app.state.MODEL_FILTER_ENABLED:
|
||||
if user and user.role == "user":
|
||||
data["data"] = list(
|
||||
filter(
|
||||
lambda model: model["id"]
|
||||
in app.state.MODEL_FILTER_LIST,
|
||||
data["data"],
|
||||
)
|
||||
)
|
||||
|
||||
# Modified Flag
|
||||
data["modified"] = True
|
||||
return JSONResponse(content=data)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
app.add_middleware(ModifyModelsResponseMiddleware)
|
||||
|
|
|
@ -298,7 +298,7 @@ USER_PERMISSIONS_CHAT_DELETION = (
|
|||
USER_PERMISSIONS = {"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}}
|
||||
|
||||
|
||||
MODEL_FILTER_ENABLED = os.environ.get("MODEL_FILTER_ENABLED", False)
|
||||
MODEL_FILTER_ENABLED = os.environ.get("MODEL_FILTER_ENABLED", "False").lower() == "true"
|
||||
MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "")
|
||||
MODEL_FILTER_LIST = [model.strip() for model in MODEL_FILTER_LIST.split(";")]
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "open-webui",
|
||||
"version": "0.1.113",
|
||||
"version": "0.1.114",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "vite dev --host",
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
"title": "English (US)"
|
||||
},
|
||||
{
|
||||
"code": "fa-IR",
|
||||
"title": "فارسی (Farsi)"
|
||||
"code": "ca-ES",
|
||||
"title": "Catalan"
|
||||
},
|
||||
{
|
||||
"code": "de-DE",
|
||||
|
@ -16,13 +16,17 @@
|
|||
"title": "Spanish"
|
||||
},
|
||||
{
|
||||
"code": "fr-FR",
|
||||
"title": "French (France)"
|
||||
"code": "fa-IR",
|
||||
"title": "فارسی (Farsi)"
|
||||
},
|
||||
{
|
||||
"code": "fr-CA",
|
||||
"title": "French (Canada)"
|
||||
},
|
||||
{
|
||||
"code": "fr-FR",
|
||||
"title": "French (France)"
|
||||
},
|
||||
{
|
||||
"code": "ru-RU",
|
||||
"title": "Russian (Russia)"
|
||||
|
@ -32,19 +36,15 @@
|
|||
"title": "Ukrainian"
|
||||
},
|
||||
{
|
||||
"code": "zh-TW",
|
||||
"title": "Chinese (Traditional)"
|
||||
"code": "vi-VN",
|
||||
"title": "Tiếng Việt"
|
||||
},
|
||||
{
|
||||
"code": "zh-CN",
|
||||
"title": "Chinese (Simplified)"
|
||||
},
|
||||
{
|
||||
"code": "vi-VN",
|
||||
"title": "Tiếng Việt"
|
||||
},
|
||||
{
|
||||
"code": "ca-ES",
|
||||
"title": "Catalan"
|
||||
"code": "zh-TW",
|
||||
"title": "Chinese (Traditional)"
|
||||
}
|
||||
]
|
||||
]
|
Loading…
Reference in a new issue