Add workaround for gpt-4-vision-preview model that support 4k tokens

This commit is contained in:
Aditya Pratap Singh 2024-01-20 04:34:47 +05:30
parent 8662437a9f
commit 60afd6ecdd

View file

@ -99,10 +99,10 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
print("Error loading request body into a dictionary:", e) print("Error loading request body into a dictionary:", e)
raise HTTPException(status_code=400, detail="Invalid JSON in request body") raise HTTPException(status_code=400, detail="Invalid JSON in request body")
# Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 10000 # Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
# This is a workaround until OpenAI fixes the issue with this model # This is a workaround until OpenAI fixes the issue with this model
if body_dict.get("model") == "gpt-4-vision-preview": if body_dict.get("model") == "gpt-4-vision-preview":
body_dict["max_tokens"] = 10000 body_dict["max_tokens"] = 4000
print("Modified body_dict:", body_dict) print("Modified body_dict:", body_dict)
# Try to convert the modified body back to JSON # Try to convert the modified body back to JSON