forked from open-webui/open-webui
Compare commits
13 commits
af55e94c4f
...
f7ec002fe6
Author | SHA1 | Date | |
---|---|---|---|
f7ec002fe6 | |||
8524e28589 | |||
7144e07122 | |||
336868c487 | |||
c7c81f538d | |||
|
75607541c6 | ||
|
3790790a18 | ||
|
5a567ce4d0 | ||
|
7f78e58488 | ||
|
cc4b82a3f3 | ||
|
97842d037e | ||
|
26a187f5ac | ||
|
be3ab88c88 |
8 changed files with 52 additions and 26 deletions
7
.github/workflows/docker-build.yaml
vendored
7
.github/workflows/docker-build.yaml
vendored
|
@ -12,13 +12,16 @@ on:
|
|||
|
||||
# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
REGISTRY: git.depeuter.dev
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
RUNNER_TOOL_CACHE: /toolcache
|
||||
|
||||
# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
|
||||
permissions:
|
||||
contents: read
|
||||
|
@ -39,7 +42,7 @@ jobs:
|
|||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
password: ${{ secrets.CI_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker images
|
||||
id: meta
|
||||
|
|
6
.github/workflows/format-backend.yaml
vendored
6
.github/workflows/format-backend.yaml
vendored
|
@ -8,15 +8,19 @@ jobs:
|
|||
name: 'Format Backend'
|
||||
env:
|
||||
PUBLIC_API_BASE_URL: ''
|
||||
RUNNER_TOOL_CACHE: /toolcache
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version:
|
||||
- latest
|
||||
python-version: [ '3.12.2' ]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Use Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
- name: Install dependencies
|
||||
|
|
|
@ -293,6 +293,7 @@ def generate_image(
|
|||
"size": form_data.size if form_data.size else app.state.IMAGE_SIZE,
|
||||
"response_format": "b64_json",
|
||||
}
|
||||
|
||||
r = requests.post(
|
||||
url=f"https://api.openai.com/v1/images/generations",
|
||||
json=data,
|
||||
|
@ -300,7 +301,6 @@ def generate_image(
|
|||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
res = r.json()
|
||||
|
||||
images = []
|
||||
|
@ -356,7 +356,10 @@ def generate_image(
|
|||
return images
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
if r:
|
||||
print(r.json())
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
error = e
|
||||
|
||||
if r != None:
|
||||
data = r.json()
|
||||
if "error" in data:
|
||||
error = data["error"]["message"]
|
||||
raise HTTPException(status_code=400, detail=ERROR_MESSAGES.DEFAULT(error))
|
||||
|
|
|
@ -123,6 +123,7 @@ async def get_all_models():
|
|||
map(lambda response: response["models"], responses)
|
||||
)
|
||||
}
|
||||
|
||||
app.state.MODELS = {model["model"]: model for model in models["models"]}
|
||||
|
||||
return models
|
||||
|
@ -181,11 +182,17 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
|
|||
responses = await asyncio.gather(*tasks)
|
||||
responses = list(filter(lambda x: x is not None, responses))
|
||||
|
||||
if len(responses) > 0:
|
||||
lowest_version = min(
|
||||
responses, key=lambda x: tuple(map(int, x["version"].split(".")))
|
||||
)
|
||||
|
||||
return {"version": lowest_version["version"]}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=ERROR_MESSAGES.OLLAMA_NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
try:
|
||||
|
|
|
@ -52,3 +52,4 @@ class ERROR_MESSAGES(str, Enum):
|
|||
|
||||
MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
|
||||
OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found"
|
||||
OLLAMA_NOT_FOUND = "WebUI could not connect to Ollama"
|
||||
|
|
|
@ -140,7 +140,9 @@
|
|||
};
|
||||
|
||||
const scrollToBottom = () => {
|
||||
if (messagesContainerElement) {
|
||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||
}
|
||||
};
|
||||
|
||||
//////////////////////////
|
||||
|
@ -308,7 +310,7 @@
|
|||
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
|
||||
|
||||
// Add images array only if it contains elements
|
||||
if (imageUrls && imageUrls.length > 0) {
|
||||
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
|
||||
baseMessage.images = imageUrls;
|
||||
}
|
||||
|
||||
|
@ -532,7 +534,8 @@
|
|||
.filter((message) => message)
|
||||
.map((message, idx, arr) => ({
|
||||
role: message.role,
|
||||
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
|
||||
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
|
||||
message.role === 'user'
|
||||
? {
|
||||
content: [
|
||||
{
|
||||
|
|
|
@ -160,7 +160,9 @@
|
|||
};
|
||||
|
||||
const scrollToBottom = () => {
|
||||
if (messagesContainerElement) {
|
||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||
}
|
||||
};
|
||||
|
||||
//////////////////////////
|
||||
|
@ -321,7 +323,7 @@
|
|||
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
|
||||
|
||||
// Add images array only if it contains elements
|
||||
if (imageUrls && imageUrls.length > 0) {
|
||||
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
|
||||
baseMessage.images = imageUrls;
|
||||
}
|
||||
|
||||
|
@ -545,7 +547,8 @@
|
|||
.filter((message) => message)
|
||||
.map((message, idx, arr) => ({
|
||||
role: message.role,
|
||||
...(message.files?.filter((file) => file.type === 'image').length > 0 ?? false
|
||||
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
|
||||
message.role === 'user'
|
||||
? {
|
||||
content: [
|
||||
{
|
||||
|
@ -688,8 +691,13 @@
|
|||
|
||||
if (messages.length == 2) {
|
||||
window.history.replaceState(history.state, '', `/c/${_chatId}`);
|
||||
|
||||
if ($settings?.titleAutoGenerateModel) {
|
||||
await generateChatTitle(_chatId, userPrompt);
|
||||
} else {
|
||||
await setChatTitle(_chatId, userPrompt);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const stopResponse = () => {
|
||||
|
|
|
@ -3,16 +3,13 @@
|
|||
@tailwind utilities;
|
||||
|
||||
@layer base {
|
||||
html {
|
||||
html, pre {
|
||||
font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu,
|
||||
Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
|
||||
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
}
|
||||
|
||||
pre {
|
||||
font-family: -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto, Ubuntu,
|
||||
Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
|
||||
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue