diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index b2d2cbd4..1580e365 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -520,11 +520,6 @@ const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => { const responseMessage = history.messages[responseMessageId]; - // Wait until history/message have been updated - await tick(); - - scrollToBottom(); - const docs = messages .filter((message) => message?.files ?? null) .map((message) => @@ -593,6 +588,11 @@ : `${OPENAI_API_BASE_URL}` ); + // Wait until history/message have been updated + await tick(); + + scrollToBottom(); + if (res && res.ok) { const reader = res.body .pipeThrough(new TextDecoderStream()) diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index 1a304f6d..139fcd40 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -536,11 +536,6 @@ const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => { const responseMessage = history.messages[responseMessageId]; - // Wait until history/message have been updated - await tick(); - - scrollToBottom(); - const docs = messages .filter((message) => message?.files ?? null) .map((message) => @@ -607,6 +602,11 @@ : `${OPENAI_API_BASE_URL}` ); + // Wait until history/message have been updated + await tick(); + + scrollToBottom(); + if (res && res.ok) { const reader = res.body .pipeThrough(new TextDecoderStream())