Merge pull request #915 from open-webui/dev

fix: continue generation
This commit is contained in:
Timothy Jaeryang Baek 2024-02-25 18:48:40 -05:00 committed by GitHub
commit 6b0eae9fad
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 117 additions and 158 deletions

View file

@ -281,52 +281,6 @@
{showNextMessage} {showNextMessage}
{copyToClipboard} {copyToClipboard}
/> />
{#if messages.length - 1 === messageIdx && processing !== ''}
<div class="flex my-2.5 ml-12 items-center w-fit space-x-2.5">
<div class=" dark:text-blue-100">
<svg
class=" w-4 h-4 translate-y-[0.5px]"
fill="currentColor"
viewBox="0 0 24 24"
xmlns="http://www.w3.org/2000/svg"
><style>
.spinner_qM83 {
animation: spinner_8HQG 1.05s infinite;
}
.spinner_oXPr {
animation-delay: 0.1s;
}
.spinner_ZTLf {
animation-delay: 0.2s;
}
@keyframes spinner_8HQG {
0%,
57.14% {
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
transform: translate(0);
}
28.57% {
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
transform: translateY(-6px);
}
100% {
transform: translate(0);
}
}
</style><circle class="spinner_qM83" cx="4" cy="12" r="2.5" /><circle
class="spinner_qM83 spinner_oXPr"
cx="12"
cy="12"
r="2.5"
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="2.5" /></svg
>
</div>
<div class=" text-sm font-medium">
{processing}
</div>
</div>
{/if}
{:else} {:else}
<ResponseMessage <ResponseMessage
{message} {message}

View file

@ -732,25 +732,26 @@
responseMessage.done = false; responseMessage.done = false;
await tick(); await tick();
const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0); const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (modelTag?.external) { if (model) {
await sendPromptOpenAI( if (model?.external) {
responseMessage.model, await sendPromptOpenAI(
history.messages[responseMessage.parentId].content, model,
responseMessage.id, history.messages[responseMessage.parentId].content,
_chatId responseMessage.id,
); _chatId
} else if (modelTag) { );
await sendPromptOllama( } else
responseMessage.model, await sendPromptOllama(
history.messages[responseMessage.parentId].content, model,
responseMessage.id, history.messages[responseMessage.parentId].content,
_chatId responseMessage.id,
); _chatId
} else { );
toast.error(`Model ${model} not found`);
} }
} else {
toast.error(`Model ${modelId} not found`);
} }
}; };

View file

@ -238,7 +238,6 @@
await sendPrompt(userPrompt, userMessageId); await sendPrompt(userPrompt, userMessageId);
} }
}; };
const sendPrompt = async (prompt, parentId) => { const sendPrompt = async (prompt, parentId) => {
const _chatId = JSON.parse(JSON.stringify($chatId)); const _chatId = JSON.parse(JSON.stringify($chatId));
@ -292,40 +291,41 @@
} }
await Promise.all( await Promise.all(
selectedModels.map(async (model) => { selectedModels.map(async (modelId) => {
console.log(model); const model = $models.filter((m) => m.id === modelId).at(0);
const modelTag = $models.filter((m) => m.name === model).at(0);
// Create response message if (model) {
let responseMessageId = uuidv4(); // Create response message
let responseMessage = { let responseMessageId = uuidv4();
parentId: parentId, let responseMessage = {
id: responseMessageId, parentId: parentId,
childrenIds: [], id: responseMessageId,
role: 'assistant', childrenIds: [],
content: '', role: 'assistant',
model: model, content: '',
timestamp: Math.floor(Date.now() / 1000) // Unix epoch model: model.id,
}; timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId // Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage; history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId; history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message // Append messageId to childrenIds of parent message
if (parentId !== null) { if (parentId !== null) {
history.messages[parentId].childrenIds = [ history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds, ...history.messages[parentId].childrenIds,
responseMessageId responseMessageId
]; ];
} }
if (modelTag?.external) { if (model?.external) {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId); await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (modelTag) { } else if (model) {
await sendPromptOllama(model, prompt, responseMessageId, _chatId); await sendPromptOllama(model, prompt, responseMessageId, _chatId);
}
} else { } else {
toast.error(`Model ${model} not found`); toast.error(`Model ${modelId} not found`);
} }
}) })
); );
@ -334,6 +334,7 @@
}; };
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => { const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
model = model.id;
const responseMessage = history.messages[responseMessageId]; const responseMessage = history.messages[responseMessageId];
// Wait until history/message have been updated // Wait until history/message have been updated
@ -543,57 +544,60 @@
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => { const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
const responseMessage = history.messages[responseMessageId]; const responseMessage = history.messages[responseMessageId];
scrollToBottom(); scrollToBottom();
const res = await generateOpenAIChatCompletion(localStorage.token, { const res = await generateOpenAIChatCompletion(
model: model, localStorage.token,
stream: true, {
messages: [ model: model.id,
$settings.system stream: true,
? { messages: [
role: 'system', $settings.system
content: $settings.system
}
: undefined,
...messages.filter((message) => !message.deleted)
]
.filter((message) => message)
.map((message, idx, arr) => ({
role: message.role,
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
? { ? {
content: [ role: 'system',
{ content: $settings.system
type: 'text',
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
} }
: { : undefined,
content: ...messages.filter((message) => !message.deleted)
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content ]
}) .filter((message) => message)
})), .map((message, idx, arr) => ({
seed: $settings?.options?.seed ?? undefined, role: message.role,
stop: $settings?.options?.stop ?? undefined, ...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
temperature: $settings?.options?.temperature ?? undefined, ? {
top_p: $settings?.options?.top_p ?? undefined, content: [
num_ctx: $settings?.options?.num_ctx ?? undefined, {
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined, type: 'text',
max_tokens: $settings?.options?.num_predict ?? undefined text:
}); arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: {
content:
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
})
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
},
model.source === 'litellm' ? `${LITELLM_API_BASE_URL}/v1` : `${OPENAI_API_BASE_URL}`
);
if (res && res.ok) { if (res && res.ok) {
const reader = res.body const reader = res.body
@ -704,7 +708,6 @@
await setChatTitle(_chatId, userPrompt); await setChatTitle(_chatId, userPrompt);
} }
}; };
const stopResponse = () => { const stopResponse = () => {
stopResponseFlag = true; stopResponseFlag = true;
console.log('stopResponse'); console.log('stopResponse');
@ -719,25 +722,26 @@
responseMessage.done = false; responseMessage.done = false;
await tick(); await tick();
const modelTag = $models.filter((m) => m.name === responseMessage.model).at(0); const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (modelTag?.external) { if (model) {
await sendPromptOpenAI( if (model?.external) {
responseMessage.model, await sendPromptOpenAI(
history.messages[responseMessage.parentId].content, model,
responseMessage.id, history.messages[responseMessage.parentId].content,
_chatId responseMessage.id,
); _chatId
} else if (modelTag) { );
await sendPromptOllama( } else
responseMessage.model, await sendPromptOllama(
history.messages[responseMessage.parentId].content, model,
responseMessage.id, history.messages[responseMessage.parentId].content,
_chatId responseMessage.id,
); _chatId
} else { );
toast.error(`Model ${model} not found`);
} }
} else {
toast.error(`Model ${modelId} not found`);
} }
}; };