Merge pull request #1630 from cheahjs/feat/split-large-chunks

feat: split large openai responses into smaller chunks
This commit is contained in:
Timothy Jaeryang Baek 2024-04-22 13:56:26 -07:00 committed by GitHub
commit 2d7d6cfffc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 121 additions and 52 deletions

View file

@ -17,11 +17,17 @@
let titleAutoGenerateModelExternal = '';
let fullScreenMode = false;
let titleGenerationPrompt = '';
let splitLargeChunks = false;
// Interface
let promptSuggestions = [];
let showUsername = false;
const toggleSplitLargeChunks = async () => {
splitLargeChunks = !splitLargeChunks;
saveSettings({ splitLargeChunks: splitLargeChunks });
};
const toggleFullScreenMode = async () => {
fullScreenMode = !fullScreenMode;
saveSettings({ fullScreenMode: fullScreenMode });
@ -197,6 +203,28 @@
</button>
</div>
</div>
<div>
<div class=" py-0.5 flex w-full justify-between">
<div class=" self-center text-xs font-medium">
{$i18n.t('Fluidly stream large external response chunks')}
</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleSplitLargeChunks();
}}
type="button"
>
{#if splitLargeChunks === true}
<span class="ml-2 self-center">{$i18n.t('On')}</span>
{:else}
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
{/if}
</button>
</div>
</div>
</div>
<hr class=" dark:border-gray-700" />