forked from open-webui/open-webui
feat: title auto generation for external models
This commit is contained in:
parent
a1fc2f4df0
commit
3edc547389
3 changed files with 122 additions and 30 deletions
|
@ -263,3 +263,53 @@ export const synthesizeOpenAISpeech = async (
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const generateTitle = async (
|
||||||
|
token: string = '',
|
||||||
|
template: string,
|
||||||
|
model: string,
|
||||||
|
prompt: string,
|
||||||
|
url: string = OPENAI_API_BASE_URL
|
||||||
|
) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
template = template.replace(/{{prompt}}/g, prompt);
|
||||||
|
|
||||||
|
console.log(template);
|
||||||
|
|
||||||
|
const res = await fetch(`${url}/chat/completions`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${token}`
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model: model,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
content: template
|
||||||
|
}
|
||||||
|
],
|
||||||
|
stream: false
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
if ('detail' in err) {
|
||||||
|
error = err.detail;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res?.choices[0]?.message?.content ?? 'New Chat';
|
||||||
|
};
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { getBackendConfig } from '$lib/apis';
|
import { getBackendConfig } from '$lib/apis';
|
||||||
import { setDefaultPromptSuggestions } from '$lib/apis/configs';
|
import { setDefaultPromptSuggestions } from '$lib/apis/configs';
|
||||||
import { config, models, user } from '$lib/stores';
|
import { config, models, settings, user } from '$lib/stores';
|
||||||
import { createEventDispatcher, onMount, getContext } from 'svelte';
|
import { createEventDispatcher, onMount, getContext } from 'svelte';
|
||||||
import { toast } from 'svelte-sonner';
|
import { toast } from 'svelte-sonner';
|
||||||
const dispatch = createEventDispatcher();
|
const dispatch = createEventDispatcher();
|
||||||
|
@ -14,6 +14,7 @@
|
||||||
let titleAutoGenerate = true;
|
let titleAutoGenerate = true;
|
||||||
let responseAutoCopy = false;
|
let responseAutoCopy = false;
|
||||||
let titleAutoGenerateModel = '';
|
let titleAutoGenerateModel = '';
|
||||||
|
let titleAutoGenerateModelExternal = '';
|
||||||
let fullScreenMode = false;
|
let fullScreenMode = false;
|
||||||
let titleGenerationPrompt = '';
|
let titleGenerationPrompt = '';
|
||||||
|
|
||||||
|
@ -33,7 +34,12 @@
|
||||||
|
|
||||||
const toggleTitleAutoGenerate = async () => {
|
const toggleTitleAutoGenerate = async () => {
|
||||||
titleAutoGenerate = !titleAutoGenerate;
|
titleAutoGenerate = !titleAutoGenerate;
|
||||||
saveSettings({ titleAutoGenerate: titleAutoGenerate });
|
saveSettings({
|
||||||
|
title: {
|
||||||
|
...$settings.title,
|
||||||
|
auto: titleAutoGenerate
|
||||||
|
}
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const toggleResponseAutoCopy = async () => {
|
const toggleResponseAutoCopy = async () => {
|
||||||
|
@ -65,8 +71,13 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
saveSettings({
|
saveSettings({
|
||||||
titleAutoGenerateModel: titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined,
|
title: {
|
||||||
titleGenerationPrompt: titleGenerationPrompt ? titleGenerationPrompt : undefined
|
...$settings.title,
|
||||||
|
model: titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined,
|
||||||
|
modelExternal:
|
||||||
|
titleAutoGenerateModelExternal !== '' ? titleAutoGenerateModelExternal : undefined,
|
||||||
|
prompt: titleGenerationPrompt ? titleGenerationPrompt : undefined
|
||||||
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -77,16 +88,18 @@
|
||||||
|
|
||||||
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
|
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
|
||||||
|
|
||||||
titleAutoGenerate = settings.titleAutoGenerate ?? true;
|
titleAutoGenerate = settings?.title?.auto ?? true;
|
||||||
responseAutoCopy = settings.responseAutoCopy ?? false;
|
titleAutoGenerateModel = settings?.title?.model ?? '';
|
||||||
showUsername = settings.showUsername ?? false;
|
titleAutoGenerateModelExternal = settings?.title?.modelExternal ?? '';
|
||||||
fullScreenMode = settings.fullScreenMode ?? false;
|
|
||||||
titleAutoGenerateModel = settings.titleAutoGenerateModel ?? '';
|
|
||||||
titleGenerationPrompt =
|
titleGenerationPrompt =
|
||||||
settings.titleGenerationPrompt ??
|
settings?.title?.prompt ??
|
||||||
$i18n.t(
|
$i18n.t(
|
||||||
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
|
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
|
||||||
) + ' {{prompt}}';
|
) + ' {{prompt}}';
|
||||||
|
|
||||||
|
responseAutoCopy = settings.responseAutoCopy ?? false;
|
||||||
|
showUsername = settings.showUsername ?? false;
|
||||||
|
fullScreenMode = settings.fullScreenMode ?? false;
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -190,8 +203,9 @@
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Set Title Auto-Generation Model')}</div>
|
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('Set Title Auto-Generation Model')}</div>
|
||||||
<div class="flex w-full">
|
<div class="flex w-full gap-2 pr-2">
|
||||||
<div class="flex-1 mr-2">
|
<div class="flex-1">
|
||||||
|
<div class=" text-xs mb-1">Local Models</div>
|
||||||
<select
|
<select
|
||||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||||
bind:value={titleAutoGenerateModel}
|
bind:value={titleAutoGenerateModel}
|
||||||
|
@ -207,6 +221,24 @@
|
||||||
{/each}
|
{/each}
|
||||||
</select>
|
</select>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="flex-1">
|
||||||
|
<div class=" text-xs mb-1">External Models</div>
|
||||||
|
<select
|
||||||
|
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||||
|
bind:value={titleAutoGenerateModelExternal}
|
||||||
|
placeholder={$i18n.t('Select a model')}
|
||||||
|
>
|
||||||
|
<option value="" selected>{$i18n.t('Current Model')}</option>
|
||||||
|
{#each $models as model}
|
||||||
|
{#if model.name !== 'hr'}
|
||||||
|
<option value={model.name} class="bg-gray-100 dark:bg-gray-700">
|
||||||
|
{model.name}
|
||||||
|
</option>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="mt-3 mr-2">
|
<div class="mt-3 mr-2">
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
} from '$lib/stores';
|
} from '$lib/stores';
|
||||||
import { copyToClipboard, splitStream } from '$lib/utils';
|
import { copyToClipboard, splitStream } from '$lib/utils';
|
||||||
|
|
||||||
import { generateChatCompletion, cancelOllamaRequest, generateTitle } from '$lib/apis/ollama';
|
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
|
||||||
import {
|
import {
|
||||||
addTagById,
|
addTagById,
|
||||||
createNewChat,
|
createNewChat,
|
||||||
|
@ -30,14 +30,14 @@
|
||||||
updateChatById
|
updateChatById
|
||||||
} from '$lib/apis/chats';
|
} from '$lib/apis/chats';
|
||||||
import { queryCollection, queryDoc } from '$lib/apis/rag';
|
import { queryCollection, queryDoc } from '$lib/apis/rag';
|
||||||
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
|
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
|
||||||
|
|
||||||
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
import MessageInput from '$lib/components/chat/MessageInput.svelte';
|
||||||
import Messages from '$lib/components/chat/Messages.svelte';
|
import Messages from '$lib/components/chat/Messages.svelte';
|
||||||
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
|
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
|
||||||
import Navbar from '$lib/components/layout/Navbar.svelte';
|
import Navbar from '$lib/components/layout/Navbar.svelte';
|
||||||
import { RAGTemplate } from '$lib/utils/rag';
|
import { RAGTemplate } from '$lib/utils/rag';
|
||||||
import { LITELLM_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
|
import { LITELLM_API_BASE_URL, OLLAMA_API_BASE_URL, OPENAI_API_BASE_URL } from '$lib/constants';
|
||||||
import { WEBUI_BASE_URL } from '$lib/constants';
|
import { WEBUI_BASE_URL } from '$lib/constants';
|
||||||
|
|
||||||
const i18n = getContext('i18n');
|
const i18n = getContext('i18n');
|
||||||
|
@ -511,7 +511,8 @@
|
||||||
|
|
||||||
if (messages.length == 2 && messages.at(1).content !== '') {
|
if (messages.length == 2 && messages.at(1).content !== '') {
|
||||||
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
||||||
await generateChatTitle(_chatId, userPrompt);
|
const _title = await generateChatTitle(userPrompt);
|
||||||
|
await setChatTitle(_chatId, _title);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -696,11 +697,8 @@
|
||||||
if (messages.length == 2) {
|
if (messages.length == 2) {
|
||||||
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
||||||
|
|
||||||
if ($settings?.titleAutoGenerateModel) {
|
const _title = await generateChatTitle(userPrompt);
|
||||||
await generateChatTitle(_chatId, userPrompt);
|
await setChatTitle(_chatId, _title);
|
||||||
} else {
|
|
||||||
await setChatTitle(_chatId, userPrompt);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -754,23 +752,35 @@
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const generateChatTitle = async (_chatId, userPrompt) => {
|
const generateChatTitle = async (userPrompt) => {
|
||||||
if ($settings.titleAutoGenerate ?? true) {
|
if ($settings?.title?.auto ?? true) {
|
||||||
|
const model = $models.find((model) => model.id === selectedModels[0]);
|
||||||
|
|
||||||
|
const titleModelId =
|
||||||
|
model?.external ?? false
|
||||||
|
? $settings?.title?.modelExternal ?? selectedModels[0]
|
||||||
|
: $settings?.title?.model ?? selectedModels[0];
|
||||||
|
const titleModel = $models.find((model) => model.id === titleModelId);
|
||||||
|
|
||||||
|
console.log(titleModel);
|
||||||
const title = await generateTitle(
|
const title = await generateTitle(
|
||||||
localStorage.token,
|
localStorage.token,
|
||||||
$settings?.titleGenerationPrompt ??
|
$settings?.title?.prompt ??
|
||||||
$i18n.t(
|
$i18n.t(
|
||||||
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
|
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
|
||||||
) + ' {{prompt}}',
|
) + ' {{prompt}}',
|
||||||
$settings?.titleAutoGenerateModel ?? selectedModels[0],
|
titleModelId,
|
||||||
userPrompt
|
userPrompt,
|
||||||
|
titleModel?.external ?? false
|
||||||
|
? titleModel.source === 'litellm'
|
||||||
|
? `${LITELLM_API_BASE_URL}/v1`
|
||||||
|
: `${OPENAI_API_BASE_URL}`
|
||||||
|
: `${OLLAMA_API_BASE_URL}/v1`
|
||||||
);
|
);
|
||||||
|
|
||||||
if (title) {
|
return title;
|
||||||
await setChatTitle(_chatId, title);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
await setChatTitle(_chatId, `${userPrompt}`);
|
return `${userPrompt}`;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue