forked from open-webui/open-webui
feat: split large openai responses into smaller chunkers
This commit is contained in:
parent
a4083f43cb
commit
efa258c695
3 changed files with 90 additions and 55 deletions
65
src/lib/apis/streaming/index.ts
Normal file
65
src/lib/apis/streaming/index.ts
Normal file
|
@ -0,0 +1,65 @@
|
|||
type TextStreamUpdate = {
|
||||
done: boolean;
|
||||
value: string;
|
||||
};
|
||||
|
||||
// createOpenAITextStream takes a ReadableStreamDefaultReader from an SSE response,
|
||||
// and returns an async generator that emits delta updates with large deltas chunked into random sized chunks
|
||||
export async function createOpenAITextStream(
|
||||
messageStream: ReadableStreamDefaultReader
|
||||
): Promise<AsyncGenerator<TextStreamUpdate>> {
|
||||
return streamLargeDeltasAsRandomChunks(openAIStreamToIterator(messageStream));
|
||||
}
|
||||
|
||||
async function* openAIStreamToIterator(
|
||||
reader: ReadableStreamDefaultReader
|
||||
): AsyncGenerator<TextStreamUpdate> {
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) {
|
||||
yield { done: true, value: '' };
|
||||
break;
|
||||
}
|
||||
const lines = value.split('\n');
|
||||
for (const line of lines) {
|
||||
if (line !== '') {
|
||||
console.log(line);
|
||||
if (line === 'data: [DONE]') {
|
||||
yield { done: true, value: '' };
|
||||
} else {
|
||||
const data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
|
||||
yield { done: false, value: data.choices[0].delta.content ?? '' };
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// streamLargeDeltasAsRandomChunks will chunk large deltas (length > 5) into random sized chunks between 1-3 characters
|
||||
// This is to simulate a more fluid streaming, even though some providers may send large chunks of text at once
|
||||
async function* streamLargeDeltasAsRandomChunks(
|
||||
iterator: AsyncGenerator<TextStreamUpdate>
|
||||
): AsyncGenerator<TextStreamUpdate> {
|
||||
for await (const textStreamUpdate of iterator) {
|
||||
if (textStreamUpdate.done) {
|
||||
yield textStreamUpdate;
|
||||
return;
|
||||
}
|
||||
let content = textStreamUpdate.value;
|
||||
if (content.length < 5) {
|
||||
yield { done: false, value: content };
|
||||
continue;
|
||||
}
|
||||
while (content != '') {
|
||||
const chunkSize = Math.min(Math.floor(Math.random() * 3) + 1, content.length);
|
||||
const chunk = content.slice(0, chunkSize);
|
||||
yield { done: false, value: chunk };
|
||||
await sleep(5);
|
||||
content = content.slice(chunkSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
@ -39,6 +39,7 @@
|
|||
import { RAGTemplate } from '$lib/utils/rag';
|
||||
import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
|
||||
import { WEBUI_BASE_URL } from '$lib/constants';
|
||||
import { createOpenAITextStream } from '$lib/apis/streaming';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
|
@ -599,38 +600,22 @@
|
|||
.pipeThrough(splitStream('\n'))
|
||||
.getReader();
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
const textStream = await createOpenAITextStream(reader);
|
||||
console.log(textStream);
|
||||
|
||||
for await (const update of textStream) {
|
||||
const { value, done } = update;
|
||||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||
responseMessage.done = true;
|
||||
messages = messages;
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
let lines = value.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
if (line !== '') {
|
||||
console.log(line);
|
||||
if (line === 'data: [DONE]') {
|
||||
responseMessage.done = true;
|
||||
messages = messages;
|
||||
} else {
|
||||
let data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
|
||||
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
|
||||
continue;
|
||||
} else {
|
||||
responseMessage.content += data.choices[0].delta.content ?? '';
|
||||
messages = messages;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
if (responseMessage.content == '' && value == '\n') {
|
||||
continue;
|
||||
} else {
|
||||
responseMessage.content += value;
|
||||
messages = messages;
|
||||
}
|
||||
|
||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
OLLAMA_API_BASE_URL,
|
||||
WEBUI_BASE_URL
|
||||
} from '$lib/constants';
|
||||
import { createOpenAITextStream } from '$lib/apis/streaming';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
|
@ -551,9 +552,9 @@
|
|||
messages: [
|
||||
$settings.system
|
||||
? {
|
||||
role: 'system',
|
||||
content: $settings.system
|
||||
}
|
||||
role: 'system',
|
||||
content: $settings.system
|
||||
}
|
||||
: undefined,
|
||||
...messages
|
||||
]
|
||||
|
@ -611,38 +612,22 @@
|
|||
.pipeThrough(splitStream('\n'))
|
||||
.getReader();
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
const textStream = await createOpenAITextStream(reader);
|
||||
console.log(textStream);
|
||||
|
||||
for await (const update of textStream) {
|
||||
const { value, done } = update;
|
||||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||
responseMessage.done = true;
|
||||
messages = messages;
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
let lines = value.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
if (line !== '') {
|
||||
console.log(line);
|
||||
if (line === 'data: [DONE]') {
|
||||
responseMessage.done = true;
|
||||
messages = messages;
|
||||
} else {
|
||||
let data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
|
||||
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
|
||||
continue;
|
||||
} else {
|
||||
responseMessage.content += data.choices[0].delta.content ?? '';
|
||||
messages = messages;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
if (responseMessage.content == '' && value == '\n') {
|
||||
continue;
|
||||
} else {
|
||||
responseMessage.content += value;
|
||||
messages = messages;
|
||||
}
|
||||
|
||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||
|
|
Loading…
Reference in a new issue