forked from open-webui/open-webui
feat: title auto generation for external models
This commit is contained in:
parent
a1fc2f4df0
commit
3edc547389
3 changed files with 122 additions and 30 deletions
|
@ -263,3 +263,53 @@ export const synthesizeOpenAISpeech = async (
|
|||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const generateTitle = async (
|
||||
token: string = '',
|
||||
template: string,
|
||||
model: string,
|
||||
prompt: string,
|
||||
url: string = OPENAI_API_BASE_URL
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
template = template.replace(/{{prompt}}/g, prompt);
|
||||
|
||||
console.log(template);
|
||||
|
||||
const res = await fetch(`${url}/chat/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: template
|
||||
}
|
||||
],
|
||||
stream: false
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res?.choices[0]?.message?.content ?? 'New Chat';
|
||||
};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script lang="ts">
|
||||
import { getBackendConfig } from '$lib/apis';
|
||||
import { setDefaultPromptSuggestions } from '$lib/apis/configs';
|
||||
import { config, models, user } from '$lib/stores';
|
||||
import { config, models, settings, user } from '$lib/stores';
|
||||
import { createEventDispatcher, onMount, getContext } from 'svelte';
|
||||
import { toast } from 'svelte-sonner';
|
||||
const dispatch = createEventDispatcher();
|
||||
|
@ -14,6 +14,7 @@
|
|||
let titleAutoGenerate = true;
|
||||
let responseAutoCopy = false;
|
||||
let titleAutoGenerateModel = '';
|
||||
let titleAutoGenerateModelExternal = '';
|
||||
let fullScreenMode = false;
|
||||
let titleGenerationPrompt = '';
|
||||
|
||||
|
@ -33,7 +34,12 @@
|
|||
|
||||
const toggleTitleAutoGenerate = async () => {
|
||||
titleAutoGenerate = !titleAutoGenerate;
|
||||
saveSettings({ titleAutoGenerate: titleAutoGenerate });
|
||||
saveSettings({
|
||||
title: {
|
||||
...$settings.title,
|
||||
auto: titleAutoGenerate
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const toggleResponseAutoCopy = async () => {
|
||||
|
@ -65,8 +71,13 @@
|
|||
}
|
||||
|
||||
saveSettings({
|
||||
titleAutoGenerateModel: titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined,
|
||||
titleGenerationPrompt: titleGenerationPrompt ? titleGenerationPrompt : undefined
|
||||
title: {
|
||||
...$settings.title,
|
||||
model: titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined,
|
||||
modelExternal:
|
||||
titleAutoGenerateModelExternal !== '' ? titleAutoGenerateModelExternal : undefined,
|
||||
prompt: titleGenerationPrompt ? titleGenerationPrompt : undefined
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -77,16 +88,18 @@
|
|||
|
||||
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
|
||||
|
||||
titleAutoGenerate = settings.titleAutoGenerate ?? true;
|
||||
responseAutoCopy = settings.responseAutoCopy ?? false;
|
||||
showUsername = settings.showUsername ?? false;
|
||||
fullScreenMode = settings.fullScreenMode ?? false;
|
||||
titleAutoGenerateModel = settings.titleAutoGenerateModel ?? '';
|
||||
titleAutoGenerate = settings?.title?.auto ?? true;
|
||||
titleAutoGenerateModel = settings?.title?.model ?? '';
|
||||
titleAutoGenerateModelExternal = settings?.title?.modelExternal ?? '';
|
||||
titleGenerationPrompt =
|
||||
settings.titleGenerationPrompt ??
|
||||
settings?.title?.prompt ??
|
||||
$i18n.t(
|
||||
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
|
||||
) + ' {{prompt}}';
|
||||
|
||||
responseAutoCopy = settings.responseAutoCopy ?? false;
|
||||
showUsername = settings.showUsername ?? false;
|
||||
fullScreenMode = settings.fullScreenMode ?? false;
|
||||
});
|
||||
</script>
|
||||
|
||||
|
@ -190,8 +203,9 @@
|
|||
|
||||
<div>
|
||||
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Set Title Auto-Generation Model')}</div>
|
||||
<div class="flex w-full">
|
||||
<div class="flex-1 mr-2">
|
||||
<div class="flex w-full gap-2 pr-2">
|
||||
<div class="flex-1">
|
||||
<div class=" text-xs mb-1">Local Models</div>
|
||||
<select
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
bind:value={titleAutoGenerateModel}
|
||||
|
@ -207,6 +221,24 @@
|
|||
{/each}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="flex-1">
|
||||
<div class=" text-xs mb-1">External Models</div>
|
||||
<select
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
bind:value={titleAutoGenerateModelExternal}
|
||||
placeholder={$i18n.t('Select a model')}
|
||||
>
|
||||
<option value="" selected>{$i18n.t('Current Model')}</option>
|
||||
{#each $models as model}
|
||||
{#if model.name !== 'hr'}
|
||||
<option value={model.name} class="bg-gray-100 dark:bg-gray-700">
|
||||
{model.name}
|
||||
</option>
|
||||
{/if}
|
||||
{/each}
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mt-3 mr-2">
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
} from '$lib/stores';
|
||||
import { copyToClipboard, splitStream } from '$lib/utils';
|
||||
|
||||
import { generateChatCompletion, cancelOllamaRequest, generateTitle } from '$lib/apis/ollama';
|
||||
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
|
||||
import {
|
||||
addTagById,
|
||||
createNewChat,
|
||||
|
@ -30,14 +30,14 @@
|
|||
updateChatById
|
||||
} from '$lib/apis/chats';
|
||||
import { queryCollection, queryDoc } from '$lib/apis/rag';
|
||||
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
||||
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
|
||||
|
||||
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
||||
import Messages from '$lib/components/chat/Messages.svelte';
|
||||
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
|
||||
import Navbar from '$lib/components/layout/Navbar.svelte';
|
||||
import { RAGTemplate } from '$lib/utils/rag';
|
||||
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
|
||||
import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
|
||||
import { WEBUI_BASE_URL } from '$lib/constants';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
@ -511,7 +511,8 @@
|
|||
|
||||
if (messages.length == 2 && messages.at(1).content !== '') {
|
||||
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
||||
await generateChatTitle(_chatId, userPrompt);
|
||||
const _title = await generateChatTitle(userPrompt);
|
||||
await setChatTitle(_chatId, _title);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -696,11 +697,8 @@
|
|||
if (messages.length == 2) {
|
||||
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
||||
|
||||
if ($settings?.titleAutoGenerateModel) {
|
||||
await generateChatTitle(_chatId, userPrompt);
|
||||
} else {
|
||||
await setChatTitle(_chatId, userPrompt);
|
||||
}
|
||||
const _title = await generateChatTitle(userPrompt);
|
||||
await setChatTitle(_chatId, _title);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -754,23 +752,35 @@
|
|||
}
|
||||
};
|
||||
|
||||
const generateChatTitle = async (_chatId, userPrompt) => {
|
||||
if ($settings.titleAutoGenerate ?? true) {
|
||||
const generateChatTitle = async (userPrompt) => {
|
||||
if ($settings?.title?.auto ?? true) {
|
||||
const model = $models.find((model) => model.id === selectedModels[0]);
|
||||
|
||||
const titleModelId =
|
||||
model?.external ?? false
|
||||
? $settings?.title?.modelExternal ?? selectedModels[0]
|
||||
: $settings?.title?.model ?? selectedModels[0];
|
||||
const titleModel = $models.find((model) => model.id === titleModelId);
|
||||
|
||||
console.log(titleModel);
|
||||
const title = await generateTitle(
|
||||
localStorage.token,
|
||||
$settings?.titleGenerationPrompt ??
|
||||
$settings?.title?.prompt ??
|
||||
$i18n.t(
|
||||
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
|
||||
) + ' {{prompt}}',
|
||||
$settings?.titleAutoGenerateModel ?? selectedModels[0],
|
||||
userPrompt
|
||||
titleModelId,
|
||||
userPrompt,
|
||||
titleModel?.external ?? false
|
||||
? titleModel.source === 'litellm'
|
||||
? `${LITELLM_API_BASE_URL}/v1`
|
||||
: `${OPENAI_API_BASE_URL}`
|
||||
: `${OLLAMA_API_BASE_URL}/v1`
|
||||
);
|
||||
|
||||
if (title) {
|
||||
await setChatTitle(_chatId, title);
|
||||
}
|
||||
return title;
|
||||
} else {
|
||||
await setChatTitle(_chatId, `${userPrompt}`);
|
||||
return `${userPrompt}`;
|
||||
}
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in a new issue