forked from open-webui/open-webui
commit
d7ae3b1fd6
7 changed files with 333 additions and 246 deletions
|
@ -35,6 +35,8 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
|
||||||
|
|
||||||
- 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
|
- 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions.
|
||||||
|
|
||||||
|
- 🔄 **Multi-Modal Support**: Seamlessly engage with models that support multimodal interactions, including images (e.g., LLava).
|
||||||
|
|
||||||
- 🧩 **Modelfile Builder**: Easily create Ollama modelfiles via the web UI. Create and add characters/agents, customize chat elements, and import modelfiles effortlessly through [OllamaHub](https://ollamahub.com/) integration.
|
- 🧩 **Modelfile Builder**: Easily create Ollama modelfiles via the web UI. Create and add characters/agents, customize chat elements, and import modelfiles effortlessly through [OllamaHub](https://ollamahub.com/) integration.
|
||||||
|
|
||||||
- ⚙️ **Many Models Conversations**: Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel.
|
- ⚙️ **Many Models Conversations**: Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel.
|
||||||
|
@ -234,7 +236,6 @@ See [TROUBLESHOOTING.md](/TROUBLESHOOTING.md) for information on how to troubles
|
||||||
|
|
||||||
Here are some exciting tasks on our roadmap:
|
Here are some exciting tasks on our roadmap:
|
||||||
|
|
||||||
- 🔄 **Multi-Modal Support**: Seamlessly engage with models that support multimodal interactions, including images (e.g., LLava).
|
|
||||||
- 📚 **RAG Integration**: Experience first-class retrieval augmented generation support, enabling chat with your documents.
|
- 📚 **RAG Integration**: Experience first-class retrieval augmented generation support, enabling chat with your documents.
|
||||||
- 🔐 **Access Control**: Securely manage requests to Ollama by utilizing the backend as a reverse proxy gateway, ensuring only authenticated users can send specific requests.
|
- 🔐 **Access Control**: Securely manage requests to Ollama by utilizing the backend as a reverse proxy gateway, ensuring only authenticated users can send specific requests.
|
||||||
- 🧪 **Research-Centric Features**: Empower researchers in the fields of LLM and HCI with a comprehensive web UI for conducting user studies. Stay tuned for ongoing feature enhancements (e.g., surveys, analytics, and participant tracking) to facilitate their research.
|
- 🧪 **Research-Centric Features**: Empower researchers in the fields of LLM and HCI with a comprehensive web UI for conducting user studies. Stay tuned for ongoing feature enhancements (e.g., surveys, analytics, and participant tracking) to facilitate their research.
|
||||||
|
|
BIN
demo.gif
BIN
demo.gif
Binary file not shown.
Before Width: | Height: | Size: 2.5 MiB After Width: | Height: | Size: 6.3 MiB |
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
export let files = [];
|
export let files = [];
|
||||||
|
|
||||||
export let fileUploadEnabled = false;
|
export let fileUploadEnabled = true;
|
||||||
export let speechRecognitionEnabled = true;
|
export let speechRecognitionEnabled = true;
|
||||||
export let speechRecognitionListening = false;
|
export let speechRecognitionListening = false;
|
||||||
|
|
||||||
|
@ -84,11 +84,12 @@
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="fixed bottom-0 w-full bg-white dark:bg-gray-800">
|
<div class="fixed bottom-0 w-full">
|
||||||
<div class=" absolute right-0 left-0 bottom-0 mb-20">
|
<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
|
||||||
<div class="max-w-3xl px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0">
|
|
||||||
{#if messages.length == 0 && suggestionPrompts.length !== 0}
|
{#if messages.length == 0 && suggestionPrompts.length !== 0}
|
||||||
|
<div class="max-w-3xl">
|
||||||
<Suggestions {suggestionPrompts} {submitPrompt} />
|
<Suggestions {suggestionPrompts} {submitPrompt} />
|
||||||
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
{#if autoScroll === false && messages.length > 0}
|
{#if autoScroll === false && messages.length > 0}
|
||||||
|
@ -116,8 +117,7 @@
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
<div class="bg-white dark:bg-gray-800">
|
||||||
<div>
|
|
||||||
<div class="max-w-3xl px-2.5 -mb-0.5 mx-auto inset-x-0">
|
<div class="max-w-3xl px-2.5 -mb-0.5 mx-auto inset-x-0">
|
||||||
<div class="bg-gradient-to-t from-white dark:from-gray-800 from-40% pb-2">
|
<div class="bg-gradient-to-t from-white dark:from-gray-800 from-40% pb-2">
|
||||||
<input
|
<input
|
||||||
|
@ -136,6 +136,7 @@
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
inputFiles = null;
|
inputFiles = null;
|
||||||
|
filesInputElement.value = '';
|
||||||
};
|
};
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
export let sendPrompt: Function;
|
export let sendPrompt: Function;
|
||||||
export let regenerateResponse: Function;
|
export let regenerateResponse: Function;
|
||||||
|
|
||||||
|
export let bottomPadding = false;
|
||||||
export let autoScroll;
|
export let autoScroll;
|
||||||
export let selectedModels;
|
export let selectedModels;
|
||||||
export let history = {};
|
export let history = {};
|
||||||
|
@ -31,6 +32,13 @@
|
||||||
})();
|
})();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$: if (autoScroll && bottomPadding) {
|
||||||
|
(async () => {
|
||||||
|
await tick();
|
||||||
|
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
|
||||||
|
})();
|
||||||
|
}
|
||||||
|
|
||||||
const speakMessage = (message) => {
|
const speakMessage = (message) => {
|
||||||
const speak = new SpeechSynthesisUtterance(message);
|
const speak = new SpeechSynthesisUtterance(message);
|
||||||
speechSynthesis.speak(speak);
|
speechSynthesis.speak(speak);
|
||||||
|
@ -184,7 +192,8 @@
|
||||||
parentId: history.messages[messageId].parentId,
|
parentId: history.messages[messageId].parentId,
|
||||||
childrenIds: [],
|
childrenIds: [],
|
||||||
role: 'user',
|
role: 'user',
|
||||||
content: userPrompt
|
content: userPrompt,
|
||||||
|
...(history.messages[messageId].files && { files: history.messages[messageId].files })
|
||||||
};
|
};
|
||||||
|
|
||||||
let messageParentId = history.messages[messageId].parentId;
|
let messageParentId = history.messages[messageId].parentId;
|
||||||
|
@ -425,6 +434,18 @@
|
||||||
class="prose chat-{message.role} w-full max-w-full dark:prose-invert prose-headings:my-0 prose-p:my-0 prose-p:-mb-4 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-img:my-0 prose-ul:-my-4 prose-ol:-my-4 prose-li:-my-3 prose-ul:-mb-6 prose-ol:-mb-6 prose-li:-mb-4 whitespace-pre-line"
|
class="prose chat-{message.role} w-full max-w-full dark:prose-invert prose-headings:my-0 prose-p:my-0 prose-p:-mb-4 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-img:my-0 prose-ul:-my-4 prose-ol:-my-4 prose-li:-my-3 prose-ul:-mb-6 prose-ol:-mb-6 prose-li:-mb-4 whitespace-pre-line"
|
||||||
>
|
>
|
||||||
{#if message.role == 'user'}
|
{#if message.role == 'user'}
|
||||||
|
{#if message.files}
|
||||||
|
<div class="my-3 w-full flex overflow-x-auto space-x-2">
|
||||||
|
{#each message.files as file}
|
||||||
|
<div>
|
||||||
|
{#if file.type === 'image'}
|
||||||
|
<img src={file.url} alt="input" class=" max-h-96 rounded-lg" />
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
{#if message?.edit === true}
|
{#if message?.edit === true}
|
||||||
<div class=" w-full">
|
<div class=" w-full">
|
||||||
<textarea
|
<textarea
|
||||||
|
@ -458,17 +479,6 @@
|
||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
{#if message.files}
|
|
||||||
<div class="my-3">
|
|
||||||
{#each message.files as file}
|
|
||||||
<div>
|
|
||||||
{#if file.type === 'image'}
|
|
||||||
<img src={file.url} alt="input" class=" max-h-96" />
|
|
||||||
{/if}
|
|
||||||
</div>
|
|
||||||
{/each}
|
|
||||||
</div>
|
|
||||||
{/if}
|
|
||||||
<pre id="user-message">{message.content}</pre>
|
<pre id="user-message">{message.content}</pre>
|
||||||
|
|
||||||
<div class=" flex justify-start space-x-1">
|
<div class=" flex justify-start space-x-1">
|
||||||
|
@ -655,7 +665,32 @@
|
||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<div class="w-full">
|
<div class="w-full">
|
||||||
|
{#if message?.error === true}
|
||||||
|
<div
|
||||||
|
class="flex mt-2 mb-4 space-x-2 border px-4 py-3 border-red-800 bg-red-800/30 font-medium rounded-lg"
|
||||||
|
>
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
fill="none"
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke-width="1.5"
|
||||||
|
stroke="currentColor"
|
||||||
|
class="w-5 h-5 self-center"
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
d="M12 9v3.75m9-.75a9 9 0 11-18 0 9 9 0 0118 0zm-9 3.75h.008v.008H12v-.008z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
|
||||||
|
<div class=" self-center">
|
||||||
|
{message.content}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
{@html marked(message.content.replace('\\\\', '\\\\\\'))}
|
{@html marked(message.content.replace('\\\\', '\\\\\\'))}
|
||||||
|
{/if}
|
||||||
|
|
||||||
{#if message.done}
|
{#if message.done}
|
||||||
<div class=" flex justify-start space-x-1 -mt-2">
|
<div class=" flex justify-start space-x-1 -mt-2">
|
||||||
|
@ -864,4 +899,8 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{/each}
|
{/each}
|
||||||
|
|
||||||
|
{#if bottomPadding}
|
||||||
|
<div class=" mb-10" />
|
||||||
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -751,11 +751,20 @@
|
||||||
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
|
<div class=" mb-2.5 text-sm font-medium">Delete a model</div>
|
||||||
<div class="flex w-full">
|
<div class="flex w-full">
|
||||||
<div class="flex-1 mr-2">
|
<div class="flex-1 mr-2">
|
||||||
<input
|
<select
|
||||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
|
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
|
||||||
placeholder="Enter model tag (e.g. mistral:7b)"
|
|
||||||
bind:value={deleteModelTag}
|
bind:value={deleteModelTag}
|
||||||
/>
|
placeholder="Select a model"
|
||||||
|
>
|
||||||
|
{#if !deleteModelTag}
|
||||||
|
<option value="" disabled selected>Select a model</option>
|
||||||
|
{/if}
|
||||||
|
{#each $models.filter((m) => m.size != null) as model}
|
||||||
|
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
|
||||||
|
>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
|
||||||
|
>
|
||||||
|
{/each}
|
||||||
|
</select>
|
||||||
</div>
|
</div>
|
||||||
<button
|
<button
|
||||||
class="px-3 bg-red-700 hover:bg-red-800 text-gray-100 rounded transition"
|
class="px-3 bg-red-700 hover:bg-red-800 text-gray-100 rounded transition"
|
||||||
|
|
|
@ -50,6 +50,10 @@
|
||||||
messages = [];
|
messages = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$: if (files) {
|
||||||
|
console.log(files);
|
||||||
|
}
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
await chatId.set(uuidv4());
|
await chatId.set(uuidv4());
|
||||||
|
|
||||||
|
@ -106,7 +110,6 @@
|
||||||
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
|
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
|
||||||
console.log('sendPromptOllama');
|
console.log('sendPromptOllama');
|
||||||
let responseMessageId = uuidv4();
|
let responseMessageId = uuidv4();
|
||||||
|
|
||||||
let responseMessage = {
|
let responseMessage = {
|
||||||
parentId: parentId,
|
parentId: parentId,
|
||||||
id: responseMessageId,
|
id: responseMessageId,
|
||||||
|
@ -126,38 +129,8 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
|
|
||||||
// const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/generate`, {
|
|
||||||
// method: 'POST',
|
|
||||||
// headers: {
|
|
||||||
// 'Content-Type': 'text/event-stream',
|
|
||||||
// ...($settings.authHeader && { Authorization: $settings.authHeader }),
|
|
||||||
// ...($user && { Authorization: `Bearer ${localStorage.token}` })
|
|
||||||
// },
|
|
||||||
// body: JSON.stringify({
|
|
||||||
// model: model,
|
|
||||||
// prompt: userPrompt,
|
|
||||||
// system: $settings.system ?? undefined,
|
|
||||||
// options: {
|
|
||||||
// seed: $settings.seed ?? undefined,
|
|
||||||
// temperature: $settings.temperature ?? undefined,
|
|
||||||
// repeat_penalty: $settings.repeat_penalty ?? undefined,
|
|
||||||
// top_k: $settings.top_k ?? undefined,
|
|
||||||
// top_p: $settings.top_p ?? undefined,
|
|
||||||
// num_ctx: $settings.num_ctx ?? undefined,
|
|
||||||
// ...($settings.options ?? {})
|
|
||||||
// },
|
|
||||||
// format: $settings.requestFormat ?? undefined,
|
|
||||||
// context:
|
|
||||||
// history.messages[parentId] !== null &&
|
|
||||||
// history.messages[parentId].parentId in history.messages
|
|
||||||
// ? history.messages[history.messages[parentId].parentId]?.context ?? undefined
|
|
||||||
// : undefined
|
|
||||||
// })
|
|
||||||
// });
|
|
||||||
|
|
||||||
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
|
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -177,7 +150,15 @@
|
||||||
...messages
|
...messages
|
||||||
]
|
]
|
||||||
.filter((message) => message)
|
.filter((message) => message)
|
||||||
.map((message) => ({ role: message.role, content: message.content })),
|
.map((message) => ({
|
||||||
|
role: message.role,
|
||||||
|
content: message.content,
|
||||||
|
...(message.files && {
|
||||||
|
images: message.files
|
||||||
|
.filter((file) => file.type === 'image')
|
||||||
|
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
|
||||||
|
})
|
||||||
|
})),
|
||||||
options: {
|
options: {
|
||||||
seed: $settings.seed ?? undefined,
|
seed: $settings.seed ?? undefined,
|
||||||
temperature: $settings.temperature ?? undefined,
|
temperature: $settings.temperature ?? undefined,
|
||||||
|
@ -189,8 +170,12 @@
|
||||||
},
|
},
|
||||||
format: $settings.requestFormat ?? undefined
|
format: $settings.requestFormat ?? undefined
|
||||||
})
|
})
|
||||||
|
}).catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (res && res.ok) {
|
||||||
const reader = res.body
|
const reader = res.body
|
||||||
.pipeThrough(new TextDecoderStream())
|
.pipeThrough(new TextDecoderStream())
|
||||||
.pipeThrough(splitStream('\n'))
|
.pipeThrough(splitStream('\n'))
|
||||||
|
@ -266,6 +251,27 @@
|
||||||
history: history
|
history: history
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if (res !== null) {
|
||||||
|
const error = await res.json();
|
||||||
|
console.log(error);
|
||||||
|
if ('detail' in error) {
|
||||||
|
toast.error(error.detail);
|
||||||
|
responseMessage.content = error.detail;
|
||||||
|
} else {
|
||||||
|
toast.error(error.error);
|
||||||
|
responseMessage.content = error.error;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.error = true;
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
|
||||||
|
responseMessage.done = true;
|
||||||
|
messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
stopResponseFlag = false;
|
stopResponseFlag = false;
|
||||||
await tick();
|
await tick();
|
||||||
|
@ -325,7 +331,27 @@
|
||||||
...messages
|
...messages
|
||||||
]
|
]
|
||||||
.filter((message) => message)
|
.filter((message) => message)
|
||||||
.map((message) => ({ role: message.role, content: message.content })),
|
.map((message) => ({
|
||||||
|
role: message.role,
|
||||||
|
...(message.files
|
||||||
|
? {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: message.content
|
||||||
|
},
|
||||||
|
...message.files
|
||||||
|
.filter((file) => file.type === 'image')
|
||||||
|
.map((file) => ({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: file.url
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: { content: message.content })
|
||||||
|
})),
|
||||||
temperature: $settings.temperature ?? undefined,
|
temperature: $settings.temperature ?? undefined,
|
||||||
top_p: $settings.top_p ?? undefined,
|
top_p: $settings.top_p ?? undefined,
|
||||||
num_ctx: $settings.num_ctx ?? undefined,
|
num_ctx: $settings.num_ctx ?? undefined,
|
||||||
|
@ -554,6 +580,7 @@
|
||||||
bind:history
|
bind:history
|
||||||
bind:messages
|
bind:messages
|
||||||
bind:autoScroll
|
bind:autoScroll
|
||||||
|
bottomPadding={files.length > 0}
|
||||||
{sendPrompt}
|
{sendPrompt}
|
||||||
{regenerateResponse}
|
{regenerateResponse}
|
||||||
/>
|
/>
|
||||||
|
@ -561,8 +588,8 @@
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<MessageInput
|
<MessageInput
|
||||||
bind:prompt
|
|
||||||
bind:files
|
bind:files
|
||||||
|
bind:prompt
|
||||||
bind:autoScroll
|
bind:autoScroll
|
||||||
suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
|
suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
|
||||||
{
|
{
|
||||||
|
|
|
@ -51,17 +51,6 @@
|
||||||
messages = [];
|
messages = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// onMount(async () => {
|
|
||||||
// let chat = await loadChat();
|
|
||||||
|
|
||||||
// await tick();
|
|
||||||
// if (chat) {
|
|
||||||
// loaded = true;
|
|
||||||
// } else {
|
|
||||||
// await goto('/');
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
$: if ($page.params.id) {
|
$: if ($page.params.id) {
|
||||||
(async () => {
|
(async () => {
|
||||||
let chat = await loadChat();
|
let chat = await loadChat();
|
||||||
|
@ -133,7 +122,6 @@
|
||||||
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
|
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
|
||||||
console.log('sendPromptOllama');
|
console.log('sendPromptOllama');
|
||||||
let responseMessageId = uuidv4();
|
let responseMessageId = uuidv4();
|
||||||
|
|
||||||
let responseMessage = {
|
let responseMessage = {
|
||||||
parentId: parentId,
|
parentId: parentId,
|
||||||
id: responseMessageId,
|
id: responseMessageId,
|
||||||
|
@ -153,38 +141,8 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
window.scrollTo({ top: document.body.scrollHeight });
|
window.scrollTo({ top: document.body.scrollHeight });
|
||||||
|
|
||||||
// const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/generate`, {
|
|
||||||
// method: 'POST',
|
|
||||||
// headers: {
|
|
||||||
// 'Content-Type': 'text/event-stream',
|
|
||||||
// ...($settings.authHeader && { Authorization: $settings.authHeader }),
|
|
||||||
// ...($user && { Authorization: `Bearer ${localStorage.token}` })
|
|
||||||
// },
|
|
||||||
// body: JSON.stringify({
|
|
||||||
// model: model,
|
|
||||||
// prompt: userPrompt,
|
|
||||||
// system: $settings.system ?? undefined,
|
|
||||||
// options: {
|
|
||||||
// seed: $settings.seed ?? undefined,
|
|
||||||
// temperature: $settings.temperature ?? undefined,
|
|
||||||
// repeat_penalty: $settings.repeat_penalty ?? undefined,
|
|
||||||
// top_k: $settings.top_k ?? undefined,
|
|
||||||
// top_p: $settings.top_p ?? undefined,
|
|
||||||
// num_ctx: $settings.num_ctx ?? undefined,
|
|
||||||
// ...($settings.options ?? {})
|
|
||||||
// },
|
|
||||||
// format: $settings.requestFormat ?? undefined,
|
|
||||||
// context:
|
|
||||||
// history.messages[parentId] !== null &&
|
|
||||||
// history.messages[parentId].parentId in history.messages
|
|
||||||
// ? history.messages[history.messages[parentId].parentId]?.context ?? undefined
|
|
||||||
// : undefined
|
|
||||||
// })
|
|
||||||
// });
|
|
||||||
|
|
||||||
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
|
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -204,7 +162,15 @@
|
||||||
...messages
|
...messages
|
||||||
]
|
]
|
||||||
.filter((message) => message)
|
.filter((message) => message)
|
||||||
.map((message) => ({ role: message.role, content: message.content })),
|
.map((message) => ({
|
||||||
|
role: message.role,
|
||||||
|
content: message.content,
|
||||||
|
...(message.files && {
|
||||||
|
images: message.files
|
||||||
|
.filter((file) => file.type === 'image')
|
||||||
|
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
|
||||||
|
})
|
||||||
|
})),
|
||||||
options: {
|
options: {
|
||||||
seed: $settings.seed ?? undefined,
|
seed: $settings.seed ?? undefined,
|
||||||
temperature: $settings.temperature ?? undefined,
|
temperature: $settings.temperature ?? undefined,
|
||||||
|
@ -216,8 +182,12 @@
|
||||||
},
|
},
|
||||||
format: $settings.requestFormat ?? undefined
|
format: $settings.requestFormat ?? undefined
|
||||||
})
|
})
|
||||||
|
}).catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
return null;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (res && res.ok) {
|
||||||
const reader = res.body
|
const reader = res.body
|
||||||
.pipeThrough(new TextDecoderStream())
|
.pipeThrough(new TextDecoderStream())
|
||||||
.pipeThrough(splitStream('\n'))
|
.pipeThrough(splitStream('\n'))
|
||||||
|
@ -293,6 +263,27 @@
|
||||||
history: history
|
history: history
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if (res !== null) {
|
||||||
|
const error = await res.json();
|
||||||
|
console.log(error);
|
||||||
|
if ('detail' in error) {
|
||||||
|
toast.error(error.detail);
|
||||||
|
responseMessage.content = error.detail;
|
||||||
|
} else {
|
||||||
|
toast.error(error.error);
|
||||||
|
responseMessage.content = error.error;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.error = true;
|
||||||
|
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
|
||||||
|
responseMessage.done = true;
|
||||||
|
messages = messages;
|
||||||
|
}
|
||||||
|
|
||||||
stopResponseFlag = false;
|
stopResponseFlag = false;
|
||||||
await tick();
|
await tick();
|
||||||
|
@ -352,7 +343,27 @@
|
||||||
...messages
|
...messages
|
||||||
]
|
]
|
||||||
.filter((message) => message)
|
.filter((message) => message)
|
||||||
.map((message) => ({ role: message.role, content: message.content })),
|
.map((message) => ({
|
||||||
|
role: message.role,
|
||||||
|
...(message.files
|
||||||
|
? {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: message.content
|
||||||
|
},
|
||||||
|
...message.files
|
||||||
|
.filter((file) => file.type === 'image')
|
||||||
|
.map((file) => ({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: file.url
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
]
|
||||||
|
}
|
||||||
|
: { content: message.content })
|
||||||
|
})),
|
||||||
temperature: $settings.temperature ?? undefined,
|
temperature: $settings.temperature ?? undefined,
|
||||||
top_p: $settings.top_p ?? undefined,
|
top_p: $settings.top_p ?? undefined,
|
||||||
num_ctx: $settings.num_ctx ?? undefined,
|
num_ctx: $settings.num_ctx ?? undefined,
|
||||||
|
@ -367,12 +378,9 @@
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const { value, done } = await reader.read();
|
const { value, done } = await reader.read();
|
||||||
if (done || stopResponseFlag) {
|
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||||
if (stopResponseFlag) {
|
|
||||||
responseMessage.done = true;
|
responseMessage.done = true;
|
||||||
messages = messages;
|
messages = messages;
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -585,6 +593,7 @@
|
||||||
bind:history
|
bind:history
|
||||||
bind:messages
|
bind:messages
|
||||||
bind:autoScroll
|
bind:autoScroll
|
||||||
|
bottomPadding={files.length > 0}
|
||||||
{sendPrompt}
|
{sendPrompt}
|
||||||
{regenerateResponse}
|
{regenerateResponse}
|
||||||
/>
|
/>
|
||||||
|
@ -592,6 +601,7 @@
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<MessageInput
|
<MessageInput
|
||||||
|
bind:files
|
||||||
bind:prompt
|
bind:prompt
|
||||||
bind:autoScroll
|
bind:autoScroll
|
||||||
suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
|
suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
|
||||||
|
|
Loading…
Reference in a new issue