forked from open-webui/open-webui
commit
5ce421e7fa
11 changed files with 58 additions and 30 deletions
|
@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.1.112] - 2024-03-15
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- 🗨️ Resolved chat malfunction after image generation.
|
||||||
|
- 🎨 Fixed various RAG issues.
|
||||||
|
- 🧪 Rectified experimental broken GGUF upload logic.
|
||||||
|
|
||||||
## [0.1.111] - 2024-03-10
|
## [0.1.111] - 2024-03-10
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
@ -293,6 +293,7 @@ def generate_image(
|
||||||
"size": form_data.size if form_data.size else app.state.IMAGE_SIZE,
|
"size": form_data.size if form_data.size else app.state.IMAGE_SIZE,
|
||||||
"response_format": "b64_json",
|
"response_format": "b64_json",
|
||||||
}
|
}
|
||||||
|
|
||||||
r = requests.post(
|
r = requests.post(
|
||||||
url=f"https://api.openai.com/v1/images/generations",
|
url=f"https://api.openai.com/v1/images/generations",
|
||||||
json=data,
|
json=data,
|
||||||
|
@ -300,7 +301,6 @@ def generate_image(
|
||||||
)
|
)
|
||||||
|
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
|
|
||||||
res = r.json()
|
res = r.json()
|
||||||
|
|
||||||
images = []
|
images = []
|
||||||
|
@ -356,7 +356,10 @@ def generate_image(
|
||||||
return images
|
return images
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
error = e
|
||||||
if r:
|
|
||||||
print(r.json())
|
if r != None:
|
||||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
|
data = r.json()
|
||||||
|
if "error" in data:
|
||||||
|
error = data["error"]["message"]
|
||||||
|
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(error))
|
||||||
|
|
|
@ -123,6 +123,7 @@ async def get_all_models():
|
||||||
map(lambda response: response["models"], responses)
|
map(lambda response: response["models"], responses)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
app.state.MODELS = {model["model"]: model for model in models["models"]}
|
app.state.MODELS = {model["model"]: model for model in models["models"]}
|
||||||
|
|
||||||
return models
|
return models
|
||||||
|
@ -181,11 +182,17 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
|
||||||
responses = await asyncio.gather(*tasks)
|
responses = await asyncio.gather(*tasks)
|
||||||
responses = list(filter(lambda x: x is not None, responses))
|
responses = list(filter(lambda x: x is not None, responses))
|
||||||
|
|
||||||
lowest_version = min(
|
if len(responses) > 0:
|
||||||
responses, key=lambda x: tuple(map(int, x["version"].split(".")))
|
lowest_version = min(
|
||||||
)
|
responses, key=lambda x: tuple(map(int, x["version"].split(".")))
|
||||||
|
)
|
||||||
|
|
||||||
return {"version": lowest_version["version"]}
|
return {"version": lowest_version["version"]}
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -91,9 +91,8 @@ def query_collection(
|
||||||
|
|
||||||
|
|
||||||
def rag_template(template: str, context: str, query: str):
|
def rag_template(template: str, context: str, query: str):
|
||||||
template = re.sub(r"\[context\]", context, template)
|
template = template.replace("[context]", context)
|
||||||
template = re.sub(r"\[query\]", query, template)
|
template = template.replace("[query]", query)
|
||||||
|
|
||||||
return template
|
return template
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
|
||||||
hashed = calculate_sha256(file)
|
hashed = calculate_sha256(file)
|
||||||
file.seek(0)
|
file.seek(0)
|
||||||
|
|
||||||
url = f"{OLLAMA_BASE_URLS[0]}/blobs/sha256:{hashed}"
|
url = f"{OLLAMA_BASE_URLS[0]}/api/blobs/sha256:{hashed}"
|
||||||
response = requests.post(url, data=file)
|
response = requests.post(url, data=file)
|
||||||
|
|
||||||
if response.ok:
|
if response.ok:
|
||||||
|
|
|
@ -52,3 +52,4 @@ class ERROR_MESSAGES(str, Enum):
|
||||||
|
|
||||||
MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
|
MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
|
||||||
OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found"
|
OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found"
|
||||||
|
OLLAMA_NOT_FOUND = "WebUI could not connect to Ollama"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "open-webui",
|
"name": "open-webui",
|
||||||
"version": "0.1.111",
|
"version": "0.1.112",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "vite dev --host",
|
"dev": "vite dev --host",
|
||||||
|
|
|
@ -116,11 +116,13 @@
|
||||||
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
||||||
on:submit|preventDefault={async () => {
|
on:submit|preventDefault={async () => {
|
||||||
loading = true;
|
loading = true;
|
||||||
await updateOpenAIKey(localStorage.token, OPENAI_API_KEY);
|
|
||||||
|
if (imageGenerationEngine === 'openai') {
|
||||||
|
await updateOpenAIKey(localStorage.token, OPENAI_API_KEY);
|
||||||
|
}
|
||||||
|
|
||||||
await updateDefaultImageGenerationModel(localStorage.token, selectedModel);
|
await updateDefaultImageGenerationModel(localStorage.token, selectedModel);
|
||||||
|
|
||||||
await updateDefaultImageGenerationModel(localStorage.token, selectedModel);
|
|
||||||
await updateImageSize(localStorage.token, imageSize).catch((error) => {
|
await updateImageSize(localStorage.token, imageSize).catch((error) => {
|
||||||
toast.error(error);
|
toast.error(error);
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -140,7 +140,9 @@
|
||||||
};
|
};
|
||||||
|
|
||||||
const scrollToBottom = () => {
|
const scrollToBottom = () => {
|
||||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
if (messagesContainerElement) {
|
||||||
|
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
//////////////////////////
|
//////////////////////////
|
||||||
|
@ -308,7 +310,7 @@
|
||||||
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
|
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
|
||||||
|
|
||||||
// Add images array only if it contains elements
|
// Add images array only if it contains elements
|
||||||
if (imageUrls && imageUrls.length > 0) {
|
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
|
||||||
baseMessage.images = imageUrls;
|
baseMessage.images = imageUrls;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -532,7 +534,8 @@
|
||||||
.filter((message) => message)
|
.filter((message) => message)
|
||||||
.map((message, idx, arr) => ({
|
.map((message, idx, arr) => ({
|
||||||
role: message.role,
|
role: message.role,
|
||||||
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
|
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
|
||||||
|
message.role === 'user'
|
||||||
? {
|
? {
|
||||||
content: [
|
content: [
|
||||||
{
|
{
|
||||||
|
|
|
@ -160,7 +160,9 @@
|
||||||
};
|
};
|
||||||
|
|
||||||
const scrollToBottom = () => {
|
const scrollToBottom = () => {
|
||||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
if (messagesContainerElement) {
|
||||||
|
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
//////////////////////////
|
//////////////////////////
|
||||||
|
@ -321,7 +323,7 @@
|
||||||
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
|
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
|
||||||
|
|
||||||
// Add images array only if it contains elements
|
// Add images array only if it contains elements
|
||||||
if (imageUrls && imageUrls.length > 0) {
|
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
|
||||||
baseMessage.images = imageUrls;
|
baseMessage.images = imageUrls;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -545,7 +547,8 @@
|
||||||
.filter((message) => message)
|
.filter((message) => message)
|
||||||
.map((message, idx, arr) => ({
|
.map((message, idx, arr) => ({
|
||||||
role: message.role,
|
role: message.role,
|
||||||
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
|
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
|
||||||
|
message.role === 'user'
|
||||||
? {
|
? {
|
||||||
content: [
|
content: [
|
||||||
{
|
{
|
||||||
|
@ -688,7 +691,12 @@
|
||||||
|
|
||||||
if (messages.length == 2) {
|
if (messages.length == 2) {
|
||||||
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
||||||
await setChatTitle(_chatId, userPrompt);
|
|
||||||
|
if ($settings?.titleAutoGenerateModel) {
|
||||||
|
await generateChatTitle(_chatId, userPrompt);
|
||||||
|
} else {
|
||||||
|
await setChatTitle(_chatId, userPrompt);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -3,16 +3,13 @@
|
||||||
@tailwind utilities;
|
@tailwind utilities;
|
||||||
|
|
||||||
@layer base {
|
@layer base {
|
||||||
html {
|
html, pre {
|
||||||
font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu,
|
font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu,
|
||||||
Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
|
Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
|
||||||
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||||
}
|
}
|
||||||
|
|
||||||
pre {
|
pre {
|
||||||
font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu,
|
white-space: pre-wrap;
|
||||||
Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
|
}
|
||||||
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
|
||||||
white-space: pre-wrap;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue