forked from open-webui/open-webui
feat: frontend file upload support
This commit is contained in:
parent
3229ec116c
commit
fef4725d56
6 changed files with 223 additions and 92 deletions
|
@ -7,16 +7,18 @@
|
|||
import { page } from '$app/stores';
|
||||
|
||||
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores';
|
||||
import { copyToClipboard, splitStream } from '$lib/utils';
|
||||
|
||||
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
|
||||
import { copyToClipboard, splitStream } from '$lib/utils';
|
||||
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
|
||||
import { queryVectorDB } from '$lib/apis/rag';
|
||||
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
||||
|
||||
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
||||
import Messages from '$lib/components/chat/Messages.svelte';
|
||||
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
|
||||
import Navbar from '$lib/components/layout/Navbar.svelte';
|
||||
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
|
||||
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
||||
import { RAGTemplate } from '$lib/utils/rag';
|
||||
|
||||
let stopResponseFlag = false;
|
||||
let autoScroll = true;
|
||||
|
@ -113,8 +115,103 @@
|
|||
// Ollama functions
|
||||
//////////////////////////
|
||||
|
||||
const submitPrompt = async (userPrompt) => {
|
||||
console.log('submitPrompt', $chatId);
|
||||
|
||||
if (selectedModels.includes('')) {
|
||||
toast.error('Model not selected');
|
||||
} else if (messages.length != 0 && messages.at(-1).done != true) {
|
||||
// Response not done
|
||||
console.log('wait');
|
||||
} else {
|
||||
// Reset chat message textarea height
|
||||
document.getElementById('chat-textarea').style.height = '';
|
||||
|
||||
// Create user message
|
||||
let userMessageId = uuidv4();
|
||||
let userMessage = {
|
||||
id: userMessageId,
|
||||
parentId: messages.length !== 0 ? messages.at(-1).id : null,
|
||||
childrenIds: [],
|
||||
role: 'user',
|
||||
content: userPrompt,
|
||||
files: files.length > 0 ? files : undefined
|
||||
};
|
||||
|
||||
// Add message to history and Set currentId to messageId
|
||||
history.messages[userMessageId] = userMessage;
|
||||
history.currentId = userMessageId;
|
||||
|
||||
// Append messageId to childrenIds of parent message
|
||||
if (messages.length !== 0) {
|
||||
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
|
||||
}
|
||||
|
||||
// Wait until history/message have been updated
|
||||
await tick();
|
||||
|
||||
// Create new chat if only one message in messages
|
||||
if (messages.length == 1) {
|
||||
if ($settings.saveChatHistory ?? true) {
|
||||
chat = await createNewChat(localStorage.token, {
|
||||
id: $chatId,
|
||||
title: 'New Chat',
|
||||
models: selectedModels,
|
||||
system: $settings.system ?? undefined,
|
||||
options: {
|
||||
...($settings.options ?? {})
|
||||
},
|
||||
messages: messages,
|
||||
history: history,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
await chats.set(await getChatList(localStorage.token));
|
||||
await chatId.set(chat.id);
|
||||
} else {
|
||||
await chatId.set('local');
|
||||
}
|
||||
await tick();
|
||||
}
|
||||
|
||||
// Reset chat input textarea
|
||||
prompt = '';
|
||||
files = [];
|
||||
|
||||
// Send prompt
|
||||
await sendPrompt(userPrompt, userMessageId);
|
||||
}
|
||||
};
|
||||
|
||||
const sendPrompt = async (prompt, parentId) => {
|
||||
const _chatId = JSON.parse(JSON.stringify($chatId));
|
||||
|
||||
// TODO: update below to include all ancestral files
|
||||
const docs = history.messages[parentId].files.filter((item) => item.type === 'file');
|
||||
|
||||
if (docs.length > 0) {
|
||||
const query = history.messages[parentId].content;
|
||||
|
||||
let relevantContexts = await Promise.all(
|
||||
docs.map(async (doc) => {
|
||||
return await queryVectorDB(localStorage.token, doc.collection_name, query, 4).catch(
|
||||
(error) => {
|
||||
console.log(error);
|
||||
return null;
|
||||
}
|
||||
);
|
||||
})
|
||||
);
|
||||
relevantContexts = relevantContexts.filter((context) => context);
|
||||
|
||||
const contextString = relevantContexts.reduce((a, context, i, arr) => {
|
||||
return `${a}${context.documents.join(' ')}\n`;
|
||||
}, '');
|
||||
|
||||
history.messages[parentId].raContent = RAGTemplate(contextString, query);
|
||||
history.messages[parentId].contexts = relevantContexts;
|
||||
await tick();
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
selectedModels.map(async (model) => {
|
||||
console.log(model);
|
||||
|
@ -177,7 +274,7 @@
|
|||
.filter((message) => message)
|
||||
.map((message) => ({
|
||||
role: message.role,
|
||||
content: message.content,
|
||||
content: message?.raContent ?? message.content,
|
||||
...(message.files && {
|
||||
images: message.files
|
||||
.filter((file) => file.type === 'image')
|
||||
|
@ -366,7 +463,7 @@
|
|||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: message.content
|
||||
text: message?.raContent ?? message.content
|
||||
},
|
||||
...message.files
|
||||
.filter((file) => file.type === 'image')
|
||||
|
@ -378,7 +475,7 @@
|
|||
}))
|
||||
]
|
||||
}
|
||||
: { content: message.content })
|
||||
: { content: message?.raContent ?? message.content })
|
||||
})),
|
||||
seed: $settings?.options?.seed ?? undefined,
|
||||
stop: $settings?.options?.stop ?? undefined,
|
||||
|
@ -494,73 +591,6 @@
|
|||
}
|
||||
};
|
||||
|
||||
const submitPrompt = async (userPrompt) => {
|
||||
console.log('submitPrompt', $chatId);
|
||||
|
||||
if (selectedModels.includes('')) {
|
||||
toast.error('Model not selected');
|
||||
} else if (messages.length != 0 && messages.at(-1).done != true) {
|
||||
// Response not done
|
||||
console.log('wait');
|
||||
} else {
|
||||
// Reset chat message textarea height
|
||||
document.getElementById('chat-textarea').style.height = '';
|
||||
|
||||
// Create user message
|
||||
let userMessageId = uuidv4();
|
||||
let userMessage = {
|
||||
id: userMessageId,
|
||||
parentId: messages.length !== 0 ? messages.at(-1).id : null,
|
||||
childrenIds: [],
|
||||
role: 'user',
|
||||
content: userPrompt,
|
||||
files: files.length > 0 ? files : undefined
|
||||
};
|
||||
|
||||
// Add message to history and Set currentId to messageId
|
||||
history.messages[userMessageId] = userMessage;
|
||||
history.currentId = userMessageId;
|
||||
|
||||
// Append messageId to childrenIds of parent message
|
||||
if (messages.length !== 0) {
|
||||
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
|
||||
}
|
||||
|
||||
// Wait until history/message have been updated
|
||||
await tick();
|
||||
|
||||
// Create new chat if only one message in messages
|
||||
if (messages.length == 1) {
|
||||
if ($settings.saveChatHistory ?? true) {
|
||||
chat = await createNewChat(localStorage.token, {
|
||||
id: $chatId,
|
||||
title: 'New Chat',
|
||||
models: selectedModels,
|
||||
system: $settings.system ?? undefined,
|
||||
options: {
|
||||
...($settings.options ?? {})
|
||||
},
|
||||
messages: messages,
|
||||
history: history,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
await chats.set(await getChatList(localStorage.token));
|
||||
await chatId.set(chat.id);
|
||||
} else {
|
||||
await chatId.set('local');
|
||||
}
|
||||
await tick();
|
||||
}
|
||||
|
||||
// Reset chat input textarea
|
||||
prompt = '';
|
||||
files = [];
|
||||
|
||||
// Send prompt
|
||||
await sendPrompt(userPrompt, userMessageId);
|
||||
}
|
||||
};
|
||||
|
||||
const stopResponse = () => {
|
||||
stopResponseFlag = true;
|
||||
console.log('stopResponse');
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue