forked from open-webui/open-webui
Merge pull request #537 from CreatorGhost/fix-gpt-4-vision
Add max_tokens workaround for gpt-4-vision-preview model
This commit is contained in:
commit
1abe5a5487
1 changed files with 29 additions and 20 deletions
|
@ -37,19 +37,16 @@ async def get_openai_url(user=Depends(get_current_user)):
|
|||
if user and user.role == "admin":
|
||||
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
@app.post("/url/update")
|
||||
async def update_openai_url(form_data: UrlUpdateForm,
|
||||
user=Depends(get_current_user)):
|
||||
async def update_openai_url(form_data: UrlUpdateForm, user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
app.state.OPENAI_API_BASE_URL = form_data.url
|
||||
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
@app.get("/key")
|
||||
|
@ -57,19 +54,16 @@ async def get_openai_key(user=Depends(get_current_user)):
|
|||
if user and user.role == "admin":
|
||||
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
@app.post("/key/update")
|
||||
async def update_openai_key(form_data: KeyUpdateForm,
|
||||
user=Depends(get_current_user)):
|
||||
async def update_openai_key(form_data: KeyUpdateForm, user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
app.state.OPENAI_API_KEY = form_data.key
|
||||
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
||||
|
@ -78,15 +72,30 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
print(target_url, app.state.OPENAI_API_KEY)
|
||||
|
||||
if user.role not in ["user", "admin"]:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
if app.state.OPENAI_API_KEY == "":
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
|
||||
|
||||
body = await request.body()
|
||||
# headers = dict(request.headers)
|
||||
# print(headers)
|
||||
|
||||
# TODO: Remove below after gpt-4-vision fix from Open AI
|
||||
# Try to decode the body of the request from bytes to a UTF-8 string (Require add max_token to fix gpt-4-vision)
|
||||
try:
|
||||
body = body.decode("utf-8")
|
||||
body = json.loads(body)
|
||||
|
||||
# Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
|
||||
# This is a workaround until OpenAI fixes the issue with this model
|
||||
if body.get("model") == "gpt-4-vision-preview":
|
||||
if "max_tokens" not in body:
|
||||
body["max_tokens"] = 4000
|
||||
print("Modified body_dict:", body)
|
||||
|
||||
# Convert the modified body back to JSON
|
||||
body = json.dumps(body)
|
||||
except json.JSONDecodeError as e:
|
||||
print("Error loading request body into a dictionary:", e)
|
||||
raise HTTPException(status_code=400, detail="Invalid JSON in request body")
|
||||
|
||||
headers = {}
|
||||
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
|
||||
|
@ -125,8 +134,8 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
|
||||
if "openai" in app.state.OPENAI_API_BASE_URL and path == "models":
|
||||
response_data["data"] = list(
|
||||
filter(lambda model: "gpt" in model["id"],
|
||||
response_data["data"]))
|
||||
filter(lambda model: "gpt" in model["id"], response_data["data"])
|
||||
)
|
||||
|
||||
return response_data
|
||||
except Exception as e:
|
||||
|
|
Loading…
Reference in a new issue