Merge pull request #1798 from cheahjs/feat/abort-openai-responses-on-stop

feat: abort openai text completion when stopping responses
This commit is contained in:
Timothy Jaeryang Baek 2024-04-27 14:15:14 -07:00 committed by GitHub
commit 2f8164d75f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 20 additions and 8 deletions

View file

@ -211,10 +211,12 @@ export const generateOpenAIChatCompletion = async (
token: string = '', token: string = '',
body: object, body: object,
url: string = OPENAI_API_BASE_URL url: string = OPENAI_API_BASE_URL
) => { ): Promise<[Response | null, AbortController]> => {
const controller = new AbortController();
let error = null; let error = null;
const res = await fetch(`${url}/chat/completions`, { const res = await fetch(`${url}/chat/completions`, {
signal: controller.signal,
method: 'POST', method: 'POST',
headers: { headers: {
Authorization: `Bearer ${token}`, Authorization: `Bearer ${token}`,
@ -231,7 +233,7 @@ export const generateOpenAIChatCompletion = async (
throw error; throw error;
} }
return res; return [res, controller];
}; };
export const synthesizeOpenAISpeech = async ( export const synthesizeOpenAISpeech = async (

View file

@ -532,7 +532,7 @@
console.log(model); console.log(model);
const res = await generateOpenAIChatCompletion( const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token, localStorage.token,
{ {
model: model.id, model: model.id,
@ -608,6 +608,11 @@
if (done || stopResponseFlag || _chatId !== $chatId) { if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true; responseMessage.done = true;
messages = messages; messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
}
break; break;
} }

View file

@ -544,7 +544,7 @@
console.log(docs); console.log(docs);
const res = await generateOpenAIChatCompletion( const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token, localStorage.token,
{ {
model: model.id, model: model.id,
@ -620,6 +620,11 @@
if (done || stopResponseFlag || _chatId !== $chatId) { if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true; responseMessage.done = true;
messages = messages; messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
}
break; break;
} }

View file

@ -67,7 +67,7 @@
const textCompletionHandler = async () => { const textCompletionHandler = async () => {
const model = $models.find((model) => model.id === selectedModelId); const model = $models.find((model) => model.id === selectedModelId);
const res = await generateOpenAIChatCompletion( const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token, localStorage.token,
{ {
model: model.id, model: model.id,
@ -96,7 +96,7 @@
const { value, done } = await reader.read(); const { value, done } = await reader.read();
if (done || stopResponseFlag) { if (done || stopResponseFlag) {
if (stopResponseFlag) { if (stopResponseFlag) {
await cancelOllamaRequest(localStorage.token, currentRequestId); controller.abort('User: Stop Response');
} }
currentRequestId = null; currentRequestId = null;
@ -135,7 +135,7 @@
const chatCompletionHandler = async () => { const chatCompletionHandler = async () => {
const model = $models.find((model) => model.id === selectedModelId); const model = $models.find((model) => model.id === selectedModelId);
const res = await generateOpenAIChatCompletion( const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token, localStorage.token,
{ {
model: model.id, model: model.id,
@ -182,7 +182,7 @@
const { value, done } = await reader.read(); const { value, done } = await reader.read();
if (done || stopResponseFlag) { if (done || stopResponseFlag) {
if (stopResponseFlag) { if (stopResponseFlag) {
await cancelOllamaRequest(localStorage.token, currentRequestId); controller.abort('User: Stop Response');
} }
currentRequestId = null; currentRequestId = null;