fix: chat general

This commit is contained in:
Timothy J. Baek 2024-01-02 16:06:11 -08:00
parent cb93038abf
commit 092884fec5
4 changed files with 20 additions and 6 deletions

View file

@ -298,7 +298,7 @@
id="chat-textarea" id="chat-textarea"
class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
? '' ? ''
: ' pl-4'} rounded-xl resize-none" : ' pl-4'} rounded-xl resize-none h-[48px]"
placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'} placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'}
bind:value={prompt} bind:value={prompt}
on:keypress={(e) => { on:keypress={(e) => {

View file

@ -1,5 +1,6 @@
<script lang="ts"> <script lang="ts">
import { models, showSettings, settings } from '$lib/stores'; import { models, showSettings, settings } from '$lib/stores';
import { onMount, tick } from 'svelte';
import toast from 'svelte-french-toast'; import toast from 'svelte-french-toast';
export let selectedModels = ['']; export let selectedModels = [''];
@ -15,6 +16,12 @@
localStorage.setItem('settings', JSON.stringify($settings)); localStorage.setItem('settings', JSON.stringify($settings));
toast.success('Default model updated'); toast.success('Default model updated');
}; };
$: if (selectedModels.length > 0 && $models.length > 0) {
selectedModels = selectedModels.map((model) =>
$models.map((m) => m.name).includes(model) ? model : ''
);
}
</script> </script>
<div class="flex flex-col my-2"> <div class="flex flex-col my-2">

View file

@ -109,10 +109,14 @@
await Promise.all( await Promise.all(
selectedModels.map(async (model) => { selectedModels.map(async (model) => {
console.log(model); console.log(model);
if ($models.filter((m) => m.name === model)[0].external) { const modelTag = $models.filter((m) => m.name === model).at(0);
if (modelTag?.external) {
await sendPromptOpenAI(model, prompt, parentId, _chatId); await sendPromptOpenAI(model, prompt, parentId, _chatId);
} else { } else if (modelTag) {
await sendPromptOllama(model, prompt, parentId, _chatId); await sendPromptOllama(model, prompt, parentId, _chatId);
} else {
toast.error(`Model ${model} not found`);
} }
}) })
); );

View file

@ -136,17 +136,20 @@
await Promise.all( await Promise.all(
selectedModels.map(async (model) => { selectedModels.map(async (model) => {
console.log(model); console.log(model);
if ($models.filter((m) => m.name === model)[0].external) { const modelTag = $models.filter((m) => m.name === model).at(0);
if (modelTag?.external) {
await sendPromptOpenAI(model, prompt, parentId, _chatId); await sendPromptOpenAI(model, prompt, parentId, _chatId);
} else { } else if (modelTag) {
await sendPromptOllama(model, prompt, parentId, _chatId); await sendPromptOllama(model, prompt, parentId, _chatId);
} else {
toast.error(`Model ${model} not found`);
} }
}) })
); );
await chats.set(await getChatList(localStorage.token)); await chats.set(await getChatList(localStorage.token));
}; };
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => { const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
// Create response message // Create response message
let responseMessageId = uuidv4(); let responseMessageId = uuidv4();