feat: make chunk splitting a configurable option

This commit is contained in:
Jun Siang Cheah 2024-04-21 10:45:07 +01:00
parent efa258c695
commit 67df928c7a
5 changed files with 41 additions and 7 deletions

View file

@ -6,9 +6,14 @@ type TextStreamUpdate = {
// createOpenAITextStream takes a ReadableStreamDefaultReader from an SSE response, // createOpenAITextStream takes a ReadableStreamDefaultReader from an SSE response,
// and returns an async generator that emits delta updates with large deltas chunked into random sized chunks // and returns an async generator that emits delta updates with large deltas chunked into random sized chunks
export async function createOpenAITextStream( export async function createOpenAITextStream(
messageStream: ReadableStreamDefaultReader messageStream: ReadableStreamDefaultReader,
splitLargeDeltas: boolean
): Promise<AsyncGenerator<TextStreamUpdate>> { ): Promise<AsyncGenerator<TextStreamUpdate>> {
return streamLargeDeltasAsRandomChunks(openAIStreamToIterator(messageStream)); let iterator = openAIStreamToIterator(messageStream);
if (splitLargeDeltas) {
iterator = streamLargeDeltasAsRandomChunks(iterator);
}
return iterator;
} }
async function* openAIStreamToIterator( async function* openAIStreamToIterator(

View file

@ -17,11 +17,17 @@
let titleAutoGenerateModelExternal = ''; let titleAutoGenerateModelExternal = '';
let fullScreenMode = false; let fullScreenMode = false;
let titleGenerationPrompt = ''; let titleGenerationPrompt = '';
let splitLargeChunks = false;
// Interface // Interface
let promptSuggestions = []; let promptSuggestions = [];
let showUsername = false; let showUsername = false;
const toggleSplitLargeChunks = async () => {
splitLargeChunks = !splitLargeChunks;
saveSettings({ splitLargeChunks: splitLargeChunks });
};
const toggleFullScreenMode = async () => { const toggleFullScreenMode = async () => {
fullScreenMode = !fullScreenMode; fullScreenMode = !fullScreenMode;
saveSettings({ fullScreenMode: fullScreenMode }); saveSettings({ fullScreenMode: fullScreenMode });
@ -197,6 +203,28 @@
</button> </button>
</div> </div>
</div> </div>
<div>
<div class=" py-0.5 flex w-full justify-between">
<div class=" self-center text-xs font-medium">
{$i18n.t('Fluidly stream large external response chunks')}
</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleSplitLargeChunks();
}}
type="button"
>
{#if splitLargeChunks === true}
<span class="ml-2 self-center">{$i18n.t('On')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
{/if}
</button>
</div>
</div>
</div> </div>
<hr class=" dark:border-gray-700" /> <hr class=" dark:border-gray-700" />

View file

@ -152,6 +152,7 @@
"File Mode": "", "File Mode": "",
"File not found.": "", "File not found.": "",
"Fingerprint spoofing detected: Unable to use initials as avatar. Defaulting to default profile image.": "", "Fingerprint spoofing detected: Unable to use initials as avatar. Defaulting to default profile image.": "",
"Fluidly stream large external response chunks": "",
"Focus chat input": "", "Focus chat input": "",
"Format your variables using square brackets like this:": "", "Format your variables using square brackets like this:": "",
"From (Base Model)": "", "From (Base Model)": "",

View file

@ -600,7 +600,7 @@
.pipeThrough(splitStream('\n')) .pipeThrough(splitStream('\n'))
.getReader(); .getReader();
const textStream = await createOpenAITextStream(reader); const textStream = await createOpenAITextStream(reader, $settings.splitLargeChunks);
console.log(textStream); console.log(textStream);
for await (const update of textStream) { for await (const update of textStream) {

View file

@ -552,9 +552,9 @@
messages: [ messages: [
$settings.system $settings.system
? { ? {
role: 'system', role: 'system',
content: $settings.system content: $settings.system
} }
: undefined, : undefined,
...messages ...messages
] ]
@ -612,7 +612,7 @@
.pipeThrough(splitStream('\n')) .pipeThrough(splitStream('\n'))
.getReader(); .getReader();
const textStream = await createOpenAITextStream(reader); const textStream = await createOpenAITextStream(reader, $settings.splitLargeChunks);
console.log(textStream); console.log(textStream);
for await (const update of textStream) { for await (const update of textStream) {