forked from open-webui/open-webui
fix: more edge cases
This commit is contained in:
parent
f40147ce58
commit
b6ab357e8c
2 changed files with 37 additions and 30 deletions
|
@ -17,7 +17,8 @@ from apps.web.models.chats import (
|
|||
)
|
||||
|
||||
from utils.utils import (
|
||||
bearer_scheme, )
|
||||
bearer_scheme,
|
||||
)
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -29,7 +30,8 @@ router = APIRouter()
|
|||
|
||||
@router.get("/", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chats(
|
||||
user=Depends(get_current_user), skip: int = 0, limit: int = 50):
|
||||
user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
|
||||
|
||||
|
||||
|
@ -41,9 +43,8 @@ async def get_user_chats(
|
|||
@router.get("/all", response_model=List[ChatResponse])
|
||||
async def get_all_user_chats(user=Depends(get_current_user)):
|
||||
return [
|
||||
ChatResponse(**{
|
||||
**chat.model_dump(), "chat": json.loads(chat.chat)
|
||||
}) for chat in Chats.get_all_chats_by_user_id(user.id)
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
for chat in Chats.get_all_chats_by_user_id(user.id)
|
||||
]
|
||||
|
||||
|
||||
|
@ -54,8 +55,14 @@ async def get_all_user_chats(user=Depends(get_current_user)):
|
|||
|
||||
@router.post("/new", response_model=Optional[ChatResponse])
|
||||
async def create_new_chat(form_data: ChatForm, user=Depends(get_current_user)):
|
||||
try:
|
||||
chat = Chats.insert_new_chat(user.id, form_data)
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -68,12 +75,11 @@ async def get_chat_by_id(id: str, user=Depends(get_current_user)):
|
|||
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
|
||||
|
||||
if chat:
|
||||
return ChatResponse(**{
|
||||
**chat.model_dump(), "chat": json.loads(chat.chat)
|
||||
})
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
else:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -82,17 +88,15 @@ async def get_chat_by_id(id: str, user=Depends(get_current_user)):
|
|||
|
||||
|
||||
@router.post("/{id}", response_model=Optional[ChatResponse])
|
||||
async def update_chat_by_id(id: str,
|
||||
form_data: ChatForm,
|
||||
user=Depends(get_current_user)):
|
||||
async def update_chat_by_id(
|
||||
id: str, form_data: ChatForm, user=Depends(get_current_user)
|
||||
):
|
||||
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
|
||||
if chat:
|
||||
updated_chat = {**json.loads(chat.chat), **form_data.chat}
|
||||
|
||||
chat = Chats.update_chat_by_id(id, updated_chat)
|
||||
return ChatResponse(**{
|
||||
**chat.model_dump(), "chat": json.loads(chat.chat)
|
||||
})
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
|
|
@ -216,11 +216,11 @@ const convertOpenAIMessages = (convo) => {
|
|||
} else {
|
||||
const new_chat = {
|
||||
id: message_id,
|
||||
parentId: messages.length > 0 ? message['parent'] : null,
|
||||
parentId: messages.length > 0 && message['parent'] in mapping ? message['parent'] : null,
|
||||
childrenIds: message['children'] || [],
|
||||
role: message['message']?.['author']?.['role'] !== 'user' ? 'assistant' : 'user',
|
||||
content: message['message']?.['content']?.['parts']?.[0] || '',
|
||||
model: '',
|
||||
model: 'gpt-3.5-turbo',
|
||||
done: true,
|
||||
context: null
|
||||
};
|
||||
|
@ -236,11 +236,11 @@ const convertOpenAIMessages = (convo) => {
|
|||
currentId: currentId,
|
||||
messages: history // Need to convert this to not a list and instead a json object
|
||||
},
|
||||
models: [''],
|
||||
models: ['gpt-3.5-turbo'],
|
||||
messages: messages,
|
||||
options: {},
|
||||
timestamp: convo['create_time'],
|
||||
title: convo['title']
|
||||
title: convo['title'] ?? 'New Chat'
|
||||
};
|
||||
return chat;
|
||||
};
|
||||
|
@ -249,14 +249,17 @@ export const convertOpenAIChats = (_chats) => {
|
|||
// Create a list of dictionaries with each conversation from import
|
||||
const chats = [];
|
||||
for (let convo of _chats) {
|
||||
const chat = {
|
||||
const chat = convertOpenAIMessages(convo);
|
||||
|
||||
if (Object.keys(chat.history.messages).length > 0) {
|
||||
chats.push({
|
||||
id: convo['id'],
|
||||
user_id: '',
|
||||
title: convo['title'],
|
||||
chat: convertOpenAIMessages(convo),
|
||||
chat: chat,
|
||||
timestamp: convo['timestamp']
|
||||
};
|
||||
chats.push(chat);
|
||||
});
|
||||
}
|
||||
}
|
||||
return chats;
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue