forked from open-webui/open-webui
commit
92c98eda2e
85 changed files with 4506 additions and 879 deletions
|
@ -4,6 +4,7 @@ module.exports = {
|
|||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:svelte/recommended',
|
||||
'plugin:cypress/recommended',
|
||||
'prettier'
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
|
|
30
.github/pull_request_template.md
vendored
30
.github/pull_request_template.md
vendored
|
@ -2,14 +2,16 @@
|
|||
|
||||
- [ ] **Description:** Briefly describe the changes in this pull request.
|
||||
- [ ] **Changelog:** Ensure a changelog entry following the format of [Keep a Changelog](https://keepachangelog.com/) is added at the bottom of the PR description.
|
||||
- [ ] **Documentation:** Have you updated relevant documentation?
|
||||
- [ ] **Documentation:** Have you updated relevant documentation [Open WebUI Docs](https://github.com/open-webui/docs), or other documentation sources?
|
||||
- [ ] **Dependencies:** Are there any new dependencies? Have you updated the dependency versions in the documentation?
|
||||
- [ ] **Testing:** Have you written and run sufficient tests for the changes?
|
||||
- [ ] **Code Review:** Have you self-reviewed your code and addressed any coding standard issues?
|
||||
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
[Insert a brief description of the changes made in this pull request]
|
||||
[Insert a brief description of the changes made in this pull request, including any relevant motivation and impact.]
|
||||
|
||||
---
|
||||
|
||||
|
@ -17,16 +19,32 @@
|
|||
|
||||
### Added
|
||||
|
||||
- [List any new features or additions]
|
||||
- [List any new features, functionalities, or additions]
|
||||
|
||||
### Fixed
|
||||
|
||||
- [List any fixes or corrections]
|
||||
- [List any fixes, corrections, or bug fixes]
|
||||
|
||||
### Changed
|
||||
|
||||
- [List any changes or updates]
|
||||
- [List any changes, updates, refactorings, or optimizations]
|
||||
|
||||
### Removed
|
||||
|
||||
- [List any removed features or files]
|
||||
- [List any removed features, files, or deprecated functionalities]
|
||||
|
||||
### Security
|
||||
|
||||
- [List any new or updated security-related changes, including vulnerability fixes]
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- [List any breaking changes affecting compatibility or functionality]
|
||||
|
||||
---
|
||||
|
||||
### Additional Information
|
||||
|
||||
- [Insert any additional context, notes, or explanations for the changes]
|
||||
|
||||
- [Reference any related issues, commits, or other relevant information]
|
||||
|
|
55
.github/workflows/integration-test.yml
vendored
Normal file
55
.github/workflows/integration-test.yml
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
name: Integration Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
cypress-run:
|
||||
name: Run Cypress Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and run Compose Stack
|
||||
run: |
|
||||
docker compose up --detach --build
|
||||
|
||||
- name: Preload Ollama model
|
||||
run: |
|
||||
docker exec ollama ollama pull qwen:0.5b-chat-v1.5-q2_K
|
||||
|
||||
- name: Cypress run
|
||||
uses: cypress-io/github-action@v6
|
||||
with:
|
||||
browser: chrome
|
||||
wait-on: 'http://localhost:3000'
|
||||
config: baseUrl=http://localhost:3000
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
name: Upload Cypress videos
|
||||
with:
|
||||
name: cypress-videos
|
||||
path: cypress/videos
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Extract Compose logs
|
||||
if: always()
|
||||
run: |
|
||||
docker compose logs > compose-logs.txt
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
name: Upload Compose logs
|
||||
with:
|
||||
name: compose-logs
|
||||
path: compose-logs.txt
|
||||
if-no-files-found: ignore
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -297,4 +297,8 @@ dist
|
|||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
.pnp.*
|
||||
|
||||
# cypress artifacts
|
||||
cypress/videos
|
||||
cypress/screenshots
|
||||
|
|
24
CHANGELOG.md
24
CHANGELOG.md
|
@ -5,6 +5,30 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.1.122] - 2024-04-27
|
||||
|
||||
### Added
|
||||
|
||||
- **🌟 Enhanced RAG Pipeline**: Now with hybrid searching via 'BM25', reranking powered by 'CrossEncoder', and configurable relevance score thresholds.
|
||||
- **🛢️ External Database Support**: Seamlessly connect to custom SQLite or Postgres databases using the 'DATABASE_URL' environment variable.
|
||||
- **🌐 Remote ChromaDB Support**: Introducing the capability to connect to remote ChromaDB servers.
|
||||
- **👨💼 Improved Admin Panel**: Admins can now conveniently check users' chat lists and last active status directly from the admin panel.
|
||||
- **🎨 Splash Screen**: Introducing a loading splash screen for a smoother user experience.
|
||||
- **🌍 Language Support Expansion**: Added support for Bangla (bn-BD), along with enhancements to Chinese, Spanish, and Ukrainian translations.
|
||||
- **💻 Improved LaTeX Rendering Performance**: Enjoy faster rendering times for LaTeX equations.
|
||||
- **🔧 More Environment Variables**: Explore additional environment variables in our documentation (https://docs.openwebui.com), including the 'ENABLE_LITELLM' option to manage memory usage.
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🔧 Ollama Compatibility**: Resolved errors occurring when Ollama server version isn't an integer, such as SHA builds or RCs.
|
||||
- **🐛 Various OpenAI API Issues**: Addressed several issues related to the OpenAI API.
|
||||
- **🛑 Stop Sequence Issue**: Fixed the problem where the stop sequence with a backslash '\' was not functioning.
|
||||
- **🔤 Font Fallback**: Corrected font fallback issue.
|
||||
|
||||
### Changed
|
||||
|
||||
- **⌨️ Prompt Input Behavior on Mobile**: Enter key prompt submission disabled on mobile devices for improved user experience.
|
||||
|
||||
## [0.1.121] - 2024-04-24
|
||||
|
||||
### Fixed
|
||||
|
|
12
Dockerfile
12
Dockerfile
|
@ -8,8 +8,9 @@ ARG USE_CUDA_VER=cu121
|
|||
# any sentence transformer model; models to use can be found at https://huggingface.co/models?library=sentence-transformers
|
||||
# Leaderboard: https://huggingface.co/spaces/mteb/leaderboard
|
||||
# for better performance and multilangauge support use "intfloat/multilingual-e5-large" (~2.5GB) or "intfloat/multilingual-e5-base" (~1.5GB)
|
||||
# IMPORTANT: If you change the default model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
|
||||
# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
|
||||
ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
|
||||
ARG USE_RERANKING_MODEL=""
|
||||
|
||||
######## WebUI frontend ########
|
||||
FROM --platform=$BUILDPLATFORM node:21-alpine3.19 as build
|
||||
|
@ -30,6 +31,7 @@ ARG USE_CUDA
|
|||
ARG USE_OLLAMA
|
||||
ARG USE_CUDA_VER
|
||||
ARG USE_EMBEDDING_MODEL
|
||||
ARG USE_RERANKING_MODEL
|
||||
|
||||
## Basis ##
|
||||
ENV ENV=prod \
|
||||
|
@ -38,7 +40,8 @@ ENV ENV=prod \
|
|||
USE_OLLAMA_DOCKER=${USE_OLLAMA} \
|
||||
USE_CUDA_DOCKER=${USE_CUDA} \
|
||||
USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \
|
||||
USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL}
|
||||
USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \
|
||||
USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL}
|
||||
|
||||
## Basis URL Config ##
|
||||
ENV OLLAMA_BASE_URL="/ollama" \
|
||||
|
@ -62,8 +65,11 @@ ENV WHISPER_MODEL="base" \
|
|||
|
||||
## RAG Embedding model settings ##
|
||||
ENV RAG_EMBEDDING_MODEL="$USE_EMBEDDING_MODEL_DOCKER" \
|
||||
RAG_EMBEDDING_MODEL_DIR="/app/backend/data/cache/embedding/models" \
|
||||
RAG_RERANKING_MODEL="$USE_RERANKING_MODEL_DOCKER" \
|
||||
SENTENCE_TRANSFORMERS_HOME="/app/backend/data/cache/embedding/models"
|
||||
|
||||
## Hugging Face download cache ##
|
||||
ENV HF_HOME="/app/backend/data/cache/embedding/models"
|
||||
#### Other models ##########################################################
|
||||
|
||||
WORKDIR /app/backend
|
||||
|
|
22
Makefile
22
Makefile
|
@ -1,27 +1,33 @@
|
|||
|
||||
ifneq ($(shell which docker-compose 2>/dev/null),)
|
||||
DOCKER_COMPOSE := docker-compose
|
||||
else
|
||||
DOCKER_COMPOSE := docker compose
|
||||
endif
|
||||
|
||||
install:
|
||||
@docker-compose up -d
|
||||
$(DOCKER_COMPOSE) up -d
|
||||
|
||||
remove:
|
||||
@chmod +x confirm_remove.sh
|
||||
@./confirm_remove.sh
|
||||
|
||||
|
||||
start:
|
||||
@docker-compose start
|
||||
$(DOCKER_COMPOSE) start
|
||||
startAndBuild:
|
||||
docker-compose up -d --build
|
||||
$(DOCKER_COMPOSE) up -d --build
|
||||
|
||||
stop:
|
||||
@docker-compose stop
|
||||
$(DOCKER_COMPOSE) stop
|
||||
|
||||
update:
|
||||
# Calls the LLM update script
|
||||
chmod +x update_ollama_models.sh
|
||||
@./update_ollama_models.sh
|
||||
@git pull
|
||||
@docker-compose down
|
||||
$(DOCKER_COMPOSE) down
|
||||
# Make sure the ollama-webui container is stopped before rebuilding
|
||||
@docker stop open-webui || true
|
||||
@docker-compose up --build -d
|
||||
@docker-compose start
|
||||
$(DOCKER_COMPOSE) up --build -d
|
||||
$(DOCKER_COMPOSE) start
|
||||
|
||||
|
|
|
@ -32,11 +32,15 @@ import logging
|
|||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
CACHE_DIR,
|
||||
IMAGE_GENERATION_ENGINE,
|
||||
ENABLE_IMAGE_GENERATION,
|
||||
AUTOMATIC1111_BASE_URL,
|
||||
COMFYUI_BASE_URL,
|
||||
IMAGES_OPENAI_API_BASE_URL,
|
||||
IMAGES_OPENAI_API_KEY,
|
||||
IMAGE_GENERATION_MODEL,
|
||||
IMAGE_SIZE,
|
||||
IMAGE_STEPS,
|
||||
)
|
||||
|
||||
|
||||
|
@ -55,21 +59,21 @@ app.add_middleware(
|
|||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.state.ENGINE = ""
|
||||
app.state.ENGINE = IMAGE_GENERATION_ENGINE
|
||||
app.state.ENABLED = ENABLE_IMAGE_GENERATION
|
||||
|
||||
app.state.OPENAI_API_BASE_URL = IMAGES_OPENAI_API_BASE_URL
|
||||
app.state.OPENAI_API_KEY = IMAGES_OPENAI_API_KEY
|
||||
|
||||
app.state.MODEL = ""
|
||||
app.state.MODEL = IMAGE_GENERATION_MODEL
|
||||
|
||||
|
||||
app.state.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
|
||||
app.state.COMFYUI_BASE_URL = COMFYUI_BASE_URL
|
||||
|
||||
|
||||
app.state.IMAGE_SIZE = "512x512"
|
||||
app.state.IMAGE_STEPS = 50
|
||||
app.state.IMAGE_SIZE = IMAGE_SIZE
|
||||
app.state.IMAGE_STEPS = IMAGE_STEPS
|
||||
|
||||
|
||||
@app.get("/config")
|
||||
|
|
|
@ -21,12 +21,15 @@ from utils.utils import get_verified_user, get_current_user, get_admin_user
|
|||
from config import SRC_LOG_LEVELS, ENV
|
||||
from constants import MESSAGES
|
||||
|
||||
import os
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["LITELLM"])
|
||||
|
||||
|
||||
from config import (
|
||||
MODEL_FILTER_ENABLED,
|
||||
ENABLE_LITELLM,
|
||||
ENABLE_MODEL_FILTER,
|
||||
MODEL_FILTER_LIST,
|
||||
DATA_DIR,
|
||||
LITELLM_PROXY_PORT,
|
||||
|
@ -57,11 +60,20 @@ LITELLM_CONFIG_DIR = f"{DATA_DIR}/litellm/config.yaml"
|
|||
with open(LITELLM_CONFIG_DIR, "r") as file:
|
||||
litellm_config = yaml.safe_load(file)
|
||||
|
||||
|
||||
app.state.ENABLE = ENABLE_LITELLM
|
||||
app.state.CONFIG = litellm_config
|
||||
|
||||
# Global variable to store the subprocess reference
|
||||
background_process = None
|
||||
|
||||
CONFLICT_ENV_VARS = [
|
||||
# Uvicorn uses PORT, so LiteLLM might use it as well
|
||||
"PORT",
|
||||
# LiteLLM uses DATABASE_URL for Prisma connections
|
||||
"DATABASE_URL",
|
||||
]
|
||||
|
||||
|
||||
async def run_background_process(command):
|
||||
global background_process
|
||||
|
@ -70,9 +82,11 @@ async def run_background_process(command):
|
|||
try:
|
||||
# Log the command to be executed
|
||||
log.info(f"Executing command: {command}")
|
||||
# Filter environment variables known to conflict with litellm
|
||||
env = {k: v for k, v in os.environ.items() if k not in CONFLICT_ENV_VARS}
|
||||
# Execute the command and create a subprocess
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
*command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
|
||||
)
|
||||
background_process = process
|
||||
log.info("Subprocess started successfully.")
|
||||
|
@ -130,7 +144,7 @@ async def startup_event():
|
|||
asyncio.create_task(start_litellm_background())
|
||||
|
||||
|
||||
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
|
||||
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||
|
||||
|
||||
|
@ -198,49 +212,56 @@ async def update_config(form_data: LiteLLMConfigForm, user=Depends(get_admin_use
|
|||
@app.get("/models")
|
||||
@app.get("/v1/models")
|
||||
async def get_models(user=Depends(get_current_user)):
|
||||
while not background_process:
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
url = f"http://localhost:{LITELLM_PROXY_PORT}/v1"
|
||||
r = None
|
||||
try:
|
||||
r = requests.request(method="GET", url=f"{url}/models")
|
||||
r.raise_for_status()
|
||||
if app.state.ENABLE:
|
||||
while not background_process:
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
data = r.json()
|
||||
url = f"http://localhost:{LITELLM_PROXY_PORT}/v1"
|
||||
r = None
|
||||
try:
|
||||
r = requests.request(method="GET", url=f"{url}/models")
|
||||
r.raise_for_status()
|
||||
|
||||
if app.state.MODEL_FILTER_ENABLED:
|
||||
if user and user.role == "user":
|
||||
data["data"] = list(
|
||||
filter(
|
||||
lambda model: model["id"] in app.state.MODEL_FILTER_LIST,
|
||||
data["data"],
|
||||
data = r.json()
|
||||
|
||||
if app.state.ENABLE_MODEL_FILTER:
|
||||
if user and user.role == "user":
|
||||
data["data"] = list(
|
||||
filter(
|
||||
lambda model: model["id"] in app.state.MODEL_FILTER_LIST,
|
||||
data["data"],
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return data
|
||||
except Exception as e:
|
||||
return data
|
||||
except Exception as e:
|
||||
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']}"
|
||||
except:
|
||||
error_detail = f"External: {e}"
|
||||
log.exception(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']}"
|
||||
except:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
return {
|
||||
"data": [
|
||||
{
|
||||
"id": model["model_name"],
|
||||
"object": "model",
|
||||
"created": int(time.time()),
|
||||
"owned_by": "openai",
|
||||
}
|
||||
for model in app.state.CONFIG["model_list"]
|
||||
],
|
||||
"object": "list",
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"data": [
|
||||
{
|
||||
"id": model["model_name"],
|
||||
"object": "model",
|
||||
"created": int(time.time()),
|
||||
"owned_by": "openai",
|
||||
}
|
||||
for model in app.state.CONFIG["model_list"]
|
||||
],
|
||||
"data": [],
|
||||
"object": "list",
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ from fastapi.concurrency import run_in_threadpool
|
|||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
import random
|
||||
import requests
|
||||
|
@ -36,7 +37,7 @@ from utils.utils import decode_token, get_current_user, get_admin_user
|
|||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
OLLAMA_BASE_URLS,
|
||||
MODEL_FILTER_ENABLED,
|
||||
ENABLE_MODEL_FILTER,
|
||||
MODEL_FILTER_LIST,
|
||||
UPLOAD_DIR,
|
||||
)
|
||||
|
@ -55,7 +56,7 @@ app.add_middleware(
|
|||
)
|
||||
|
||||
|
||||
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
|
||||
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||
|
||||
app.state.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
|
||||
|
@ -168,7 +169,7 @@ async def get_ollama_tags(
|
|||
if url_idx == None:
|
||||
models = await get_all_models()
|
||||
|
||||
if app.state.MODEL_FILTER_ENABLED:
|
||||
if app.state.ENABLE_MODEL_FILTER:
|
||||
if user.role == "user":
|
||||
models["models"] = list(
|
||||
filter(
|
||||
|
@ -216,7 +217,9 @@ async def get_ollama_versions(url_idx: Optional[int] = None):
|
|||
if len(responses) > 0:
|
||||
lowest_version = min(
|
||||
responses,
|
||||
key=lambda x: tuple(map(int, x["version"].split("-")[0].split("."))),
|
||||
key=lambda x: tuple(
|
||||
map(int, re.sub(r"^v|-.*", "", x["version"]).split("."))
|
||||
),
|
||||
)
|
||||
|
||||
return {"version": lowest_version["version"]}
|
||||
|
|
|
@ -24,7 +24,7 @@ from config import (
|
|||
OPENAI_API_BASE_URLS,
|
||||
OPENAI_API_KEYS,
|
||||
CACHE_DIR,
|
||||
MODEL_FILTER_ENABLED,
|
||||
ENABLE_MODEL_FILTER,
|
||||
MODEL_FILTER_LIST,
|
||||
)
|
||||
from typing import List, Optional
|
||||
|
@ -45,7 +45,7 @@ app.add_middleware(
|
|||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
|
||||
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||
|
||||
app.state.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
|
||||
|
@ -225,7 +225,7 @@ async def get_all_models():
|
|||
async def get_models(url_idx: Optional[int] = None, user=Depends(get_current_user)):
|
||||
if url_idx == None:
|
||||
models = await get_all_models()
|
||||
if app.state.MODEL_FILTER_ENABLED:
|
||||
if app.state.ENABLE_MODEL_FILTER:
|
||||
if user.role == "user":
|
||||
models["data"] = list(
|
||||
filter(
|
||||
|
|
|
@ -39,8 +39,6 @@ import json
|
|||
|
||||
import sentence_transformers
|
||||
|
||||
from apps.ollama.main import generate_ollama_embeddings, GenerateEmbeddingsForm
|
||||
|
||||
from apps.web.models.documents import (
|
||||
Documents,
|
||||
DocumentForm,
|
||||
|
@ -48,9 +46,12 @@ from apps.web.models.documents import (
|
|||
)
|
||||
|
||||
from apps.rag.utils import (
|
||||
query_embeddings_doc,
|
||||
query_embeddings_collection,
|
||||
generate_openai_embeddings,
|
||||
get_model_path,
|
||||
get_embedding_function,
|
||||
query_doc,
|
||||
query_doc_with_hybrid_search,
|
||||
query_collection,
|
||||
query_collection_with_hybrid_search,
|
||||
)
|
||||
|
||||
from utils.misc import (
|
||||
|
@ -60,13 +61,22 @@ from utils.misc import (
|
|||
extract_folders_after_data_docs,
|
||||
)
|
||||
from utils.utils import get_current_user, get_admin_user
|
||||
|
||||
from config import (
|
||||
SRC_LOG_LEVELS,
|
||||
UPLOAD_DIR,
|
||||
DOCS_DIR,
|
||||
RAG_TOP_K,
|
||||
RAG_RELEVANCE_THRESHOLD,
|
||||
RAG_EMBEDDING_ENGINE,
|
||||
RAG_EMBEDDING_MODEL,
|
||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
||||
RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
|
||||
ENABLE_RAG_HYBRID_SEARCH,
|
||||
RAG_RERANKING_MODEL,
|
||||
PDF_EXTRACT_IMAGES,
|
||||
RAG_RERANKING_MODEL_AUTO_UPDATE,
|
||||
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
||||
RAG_OPENAI_API_BASE_URL,
|
||||
RAG_OPENAI_API_KEY,
|
||||
DEVICE_TYPE,
|
||||
|
@ -83,31 +93,75 @@ log.setLevel(SRC_LOG_LEVELS["RAG"])
|
|||
|
||||
app = FastAPI()
|
||||
|
||||
app.state.TOP_K = RAG_TOP_K
|
||||
app.state.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
|
||||
|
||||
app.state.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
|
||||
|
||||
app.state.TOP_K = 4
|
||||
app.state.CHUNK_SIZE = CHUNK_SIZE
|
||||
app.state.CHUNK_OVERLAP = CHUNK_OVERLAP
|
||||
|
||||
|
||||
app.state.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE
|
||||
app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
|
||||
app.state.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL
|
||||
app.state.RAG_TEMPLATE = RAG_TEMPLATE
|
||||
|
||||
app.state.OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL
|
||||
app.state.OPENAI_API_KEY = RAG_OPENAI_API_KEY
|
||||
|
||||
app.state.PDF_EXTRACT_IMAGES = False
|
||||
app.state.PDF_EXTRACT_IMAGES = PDF_EXTRACT_IMAGES
|
||||
|
||||
if app.state.RAG_EMBEDDING_ENGINE == "":
|
||||
app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer(
|
||||
app.state.RAG_EMBEDDING_MODEL,
|
||||
device=DEVICE_TYPE,
|
||||
trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
|
||||
)
|
||||
|
||||
def update_embedding_model(
|
||||
embedding_model: str,
|
||||
update_model: bool = False,
|
||||
):
|
||||
if embedding_model and app.state.RAG_EMBEDDING_ENGINE == "":
|
||||
app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer(
|
||||
get_model_path(embedding_model, update_model),
|
||||
device=DEVICE_TYPE,
|
||||
trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
|
||||
)
|
||||
else:
|
||||
app.state.sentence_transformer_ef = None
|
||||
|
||||
|
||||
def update_reranking_model(
|
||||
reranking_model: str,
|
||||
update_model: bool = False,
|
||||
):
|
||||
if reranking_model:
|
||||
app.state.sentence_transformer_rf = sentence_transformers.CrossEncoder(
|
||||
get_model_path(reranking_model, update_model),
|
||||
device=DEVICE_TYPE,
|
||||
trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
||||
)
|
||||
else:
|
||||
app.state.sentence_transformer_rf = None
|
||||
|
||||
|
||||
update_embedding_model(
|
||||
app.state.RAG_EMBEDDING_MODEL,
|
||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
||||
)
|
||||
|
||||
update_reranking_model(
|
||||
app.state.RAG_RERANKING_MODEL,
|
||||
RAG_RERANKING_MODEL_AUTO_UPDATE,
|
||||
)
|
||||
|
||||
|
||||
app.state.EMBEDDING_FUNCTION = get_embedding_function(
|
||||
app.state.RAG_EMBEDDING_ENGINE,
|
||||
app.state.RAG_EMBEDDING_MODEL,
|
||||
app.state.sentence_transformer_ef,
|
||||
app.state.OPENAI_API_KEY,
|
||||
app.state.OPENAI_API_BASE_URL,
|
||||
)
|
||||
|
||||
origins = ["*"]
|
||||
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
|
@ -134,6 +188,7 @@ async def get_status():
|
|||
"template": app.state.RAG_TEMPLATE,
|
||||
"embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
|
||||
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
||||
"reranking_model": app.state.RAG_RERANKING_MODEL,
|
||||
}
|
||||
|
||||
|
||||
|
@ -150,6 +205,11 @@ async def get_embedding_config(user=Depends(get_admin_user)):
|
|||
}
|
||||
|
||||
|
||||
@app.get("/reranking")
|
||||
async def get_reraanking_config(user=Depends(get_admin_user)):
|
||||
return {"status": True, "reranking_model": app.state.RAG_RERANKING_MODEL}
|
||||
|
||||
|
||||
class OpenAIConfigForm(BaseModel):
|
||||
url: str
|
||||
key: str
|
||||
|
@ -170,22 +230,22 @@ async def update_embedding_config(
|
|||
)
|
||||
try:
|
||||
app.state.RAG_EMBEDDING_ENGINE = form_data.embedding_engine
|
||||
app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
|
||||
|
||||
if app.state.RAG_EMBEDDING_ENGINE in ["ollama", "openai"]:
|
||||
app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
|
||||
app.state.sentence_transformer_ef = None
|
||||
|
||||
if form_data.openai_config != None:
|
||||
app.state.OPENAI_API_BASE_URL = form_data.openai_config.url
|
||||
app.state.OPENAI_API_KEY = form_data.openai_config.key
|
||||
else:
|
||||
sentence_transformer_ef = sentence_transformers.SentenceTransformer(
|
||||
app.state.RAG_EMBEDDING_MODEL,
|
||||
device=DEVICE_TYPE,
|
||||
trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
|
||||
)
|
||||
app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
|
||||
app.state.sentence_transformer_ef = sentence_transformer_ef
|
||||
|
||||
update_embedding_model(app.state.RAG_EMBEDDING_MODEL, True)
|
||||
|
||||
app.state.EMBEDDING_FUNCTION = get_embedding_function(
|
||||
app.state.RAG_EMBEDDING_ENGINE,
|
||||
app.state.RAG_EMBEDDING_MODEL,
|
||||
app.state.sentence_transformer_ef,
|
||||
app.state.OPENAI_API_KEY,
|
||||
app.state.OPENAI_API_BASE_URL,
|
||||
)
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
|
@ -196,7 +256,6 @@ async def update_embedding_config(
|
|||
"key": app.state.OPENAI_API_KEY,
|
||||
},
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
log.exception(f"Problem updating embedding model: {e}")
|
||||
raise HTTPException(
|
||||
|
@ -205,6 +264,34 @@ async def update_embedding_config(
|
|||
)
|
||||
|
||||
|
||||
class RerankingModelUpdateForm(BaseModel):
|
||||
reranking_model: str
|
||||
|
||||
|
||||
@app.post("/reranking/update")
|
||||
async def update_reranking_config(
|
||||
form_data: RerankingModelUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
log.info(
|
||||
f"Updating reranking model: {app.state.RAG_RERANKING_MODEL} to {form_data.reranking_model}"
|
||||
)
|
||||
try:
|
||||
app.state.RAG_RERANKING_MODEL = form_data.reranking_model
|
||||
|
||||
update_reranking_model(app.state.RAG_RERANKING_MODEL, True)
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
"reranking_model": app.state.RAG_RERANKING_MODEL,
|
||||
}
|
||||
except Exception as e:
|
||||
log.exception(f"Problem updating reranking model: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
||||
|
||||
|
||||
@app.get("/config")
|
||||
async def get_rag_config(user=Depends(get_admin_user)):
|
||||
return {
|
||||
|
@ -257,12 +344,16 @@ async def get_query_settings(user=Depends(get_admin_user)):
|
|||
"status": True,
|
||||
"template": app.state.RAG_TEMPLATE,
|
||||
"k": app.state.TOP_K,
|
||||
"r": app.state.RELEVANCE_THRESHOLD,
|
||||
"hybrid": app.state.ENABLE_RAG_HYBRID_SEARCH,
|
||||
}
|
||||
|
||||
|
||||
class QuerySettingsForm(BaseModel):
|
||||
k: Optional[int] = None
|
||||
r: Optional[float] = None
|
||||
template: Optional[str] = None
|
||||
hybrid: Optional[bool] = None
|
||||
|
||||
|
||||
@app.post("/query/settings/update")
|
||||
|
@ -271,13 +362,23 @@ async def update_query_settings(
|
|||
):
|
||||
app.state.RAG_TEMPLATE = form_data.template if form_data.template else RAG_TEMPLATE
|
||||
app.state.TOP_K = form_data.k if form_data.k else 4
|
||||
return {"status": True, "template": app.state.RAG_TEMPLATE}
|
||||
app.state.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0
|
||||
app.state.ENABLE_RAG_HYBRID_SEARCH = form_data.hybrid if form_data.hybrid else False
|
||||
return {
|
||||
"status": True,
|
||||
"template": app.state.RAG_TEMPLATE,
|
||||
"k": app.state.TOP_K,
|
||||
"r": app.state.RELEVANCE_THRESHOLD,
|
||||
"hybrid": app.state.ENABLE_RAG_HYBRID_SEARCH,
|
||||
}
|
||||
|
||||
|
||||
class QueryDocForm(BaseModel):
|
||||
collection_name: str
|
||||
query: str
|
||||
k: Optional[int] = None
|
||||
r: Optional[float] = None
|
||||
hybrid: Optional[bool] = None
|
||||
|
||||
|
||||
@app.post("/query/doc")
|
||||
|
@ -286,34 +387,22 @@ def query_doc_handler(
|
|||
user=Depends(get_current_user),
|
||||
):
|
||||
try:
|
||||
if app.state.RAG_EMBEDDING_ENGINE == "":
|
||||
query_embeddings = app.state.sentence_transformer_ef.encode(
|
||||
form_data.query
|
||||
).tolist()
|
||||
elif app.state.RAG_EMBEDDING_ENGINE == "ollama":
|
||||
query_embeddings = generate_ollama_embeddings(
|
||||
GenerateEmbeddingsForm(
|
||||
**{
|
||||
"model": app.state.RAG_EMBEDDING_MODEL,
|
||||
"prompt": form_data.query,
|
||||
}
|
||||
)
|
||||
if app.state.ENABLE_RAG_HYBRID_SEARCH:
|
||||
return query_doc_with_hybrid_search(
|
||||
collection_name=form_data.collection_name,
|
||||
query=form_data.query,
|
||||
embeddings_function=app.state.EMBEDDING_FUNCTION,
|
||||
reranking_function=app.state.sentence_transformer_rf,
|
||||
k=form_data.k if form_data.k else app.state.TOP_K,
|
||||
r=form_data.r if form_data.r else app.state.RELEVANCE_THRESHOLD,
|
||||
)
|
||||
elif app.state.RAG_EMBEDDING_ENGINE == "openai":
|
||||
query_embeddings = generate_openai_embeddings(
|
||||
model=app.state.RAG_EMBEDDING_MODEL,
|
||||
text=form_data.query,
|
||||
key=app.state.OPENAI_API_KEY,
|
||||
url=app.state.OPENAI_API_BASE_URL,
|
||||
else:
|
||||
return query_doc(
|
||||
collection_name=form_data.collection_name,
|
||||
query=form_data.query,
|
||||
embeddings_function=app.state.EMBEDDING_FUNCTION,
|
||||
k=form_data.k if form_data.k else app.state.TOP_K,
|
||||
)
|
||||
|
||||
return query_embeddings_doc(
|
||||
collection_name=form_data.collection_name,
|
||||
query=form_data.query,
|
||||
query_embeddings=query_embeddings,
|
||||
k=form_data.k if form_data.k else app.state.TOP_K,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
|
@ -326,6 +415,8 @@ class QueryCollectionsForm(BaseModel):
|
|||
collection_names: List[str]
|
||||
query: str
|
||||
k: Optional[int] = None
|
||||
r: Optional[float] = None
|
||||
hybrid: Optional[bool] = None
|
||||
|
||||
|
||||
@app.post("/query/collection")
|
||||
|
@ -334,33 +425,23 @@ def query_collection_handler(
|
|||
user=Depends(get_current_user),
|
||||
):
|
||||
try:
|
||||
if app.state.RAG_EMBEDDING_ENGINE == "":
|
||||
query_embeddings = app.state.sentence_transformer_ef.encode(
|
||||
form_data.query
|
||||
).tolist()
|
||||
elif app.state.RAG_EMBEDDING_ENGINE == "ollama":
|
||||
query_embeddings = generate_ollama_embeddings(
|
||||
GenerateEmbeddingsForm(
|
||||
**{
|
||||
"model": app.state.RAG_EMBEDDING_MODEL,
|
||||
"prompt": form_data.query,
|
||||
}
|
||||
)
|
||||
if app.state.ENABLE_RAG_HYBRID_SEARCH:
|
||||
return query_collection_with_hybrid_search(
|
||||
collection_names=form_data.collection_names,
|
||||
query=form_data.query,
|
||||
embeddings_function=app.state.EMBEDDING_FUNCTION,
|
||||
reranking_function=app.state.sentence_transformer_rf,
|
||||
k=form_data.k if form_data.k else app.state.TOP_K,
|
||||
r=form_data.r if form_data.r else app.state.RELEVANCE_THRESHOLD,
|
||||
)
|
||||
elif app.state.RAG_EMBEDDING_ENGINE == "openai":
|
||||
query_embeddings = generate_openai_embeddings(
|
||||
model=app.state.RAG_EMBEDDING_MODEL,
|
||||
text=form_data.query,
|
||||
key=app.state.OPENAI_API_KEY,
|
||||
url=app.state.OPENAI_API_BASE_URL,
|
||||
else:
|
||||
return query_collection(
|
||||
collection_names=form_data.collection_names,
|
||||
query=form_data.query,
|
||||
embeddings_function=app.state.EMBEDDING_FUNCTION,
|
||||
k=form_data.k if form_data.k else app.state.TOP_K,
|
||||
)
|
||||
|
||||
return query_embeddings_collection(
|
||||
collection_names=form_data.collection_names,
|
||||
query_embeddings=query_embeddings,
|
||||
k=form_data.k if form_data.k else app.state.TOP_K,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
|
@ -427,8 +508,6 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b
|
|||
log.info(f"store_docs_in_vector_db {docs} {collection_name}")
|
||||
|
||||
texts = [doc.page_content for doc in docs]
|
||||
texts = list(map(lambda x: x.replace("\n", " "), texts))
|
||||
|
||||
metadatas = [doc.metadata for doc in docs]
|
||||
|
||||
try:
|
||||
|
@ -440,27 +519,16 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b
|
|||
|
||||
collection = CHROMA_CLIENT.create_collection(name=collection_name)
|
||||
|
||||
if app.state.RAG_EMBEDDING_ENGINE == "":
|
||||
embeddings = app.state.sentence_transformer_ef.encode(texts).tolist()
|
||||
elif app.state.RAG_EMBEDDING_ENGINE == "ollama":
|
||||
embeddings = [
|
||||
generate_ollama_embeddings(
|
||||
GenerateEmbeddingsForm(
|
||||
**{"model": app.state.RAG_EMBEDDING_MODEL, "prompt": text}
|
||||
)
|
||||
)
|
||||
for text in texts
|
||||
]
|
||||
elif app.state.RAG_EMBEDDING_ENGINE == "openai":
|
||||
embeddings = [
|
||||
generate_openai_embeddings(
|
||||
model=app.state.RAG_EMBEDDING_MODEL,
|
||||
text=text,
|
||||
key=app.state.OPENAI_API_KEY,
|
||||
url=app.state.OPENAI_API_BASE_URL,
|
||||
)
|
||||
for text in texts
|
||||
]
|
||||
embedding_func = get_embedding_function(
|
||||
app.state.RAG_EMBEDDING_ENGINE,
|
||||
app.state.RAG_EMBEDDING_MODEL,
|
||||
app.state.sentence_transformer_ef,
|
||||
app.state.OPENAI_API_KEY,
|
||||
app.state.OPENAI_API_BASE_URL,
|
||||
)
|
||||
|
||||
embedding_texts = list(map(lambda x: x.replace("\n", " "), texts))
|
||||
embeddings = embedding_func(embedding_texts)
|
||||
|
||||
for batch in create_batches(
|
||||
api=CHROMA_CLIENT,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
import logging
|
||||
import requests
|
||||
|
||||
|
@ -8,6 +9,16 @@ from apps.ollama.main import (
|
|||
GenerateEmbeddingsForm,
|
||||
)
|
||||
|
||||
from huggingface_hub import snapshot_download
|
||||
|
||||
from langchain_core.documents import Document
|
||||
from langchain_community.retrievers import BM25Retriever
|
||||
from langchain.retrievers import (
|
||||
ContextualCompressionRetriever,
|
||||
EnsembleRetriever,
|
||||
)
|
||||
|
||||
from typing import Optional
|
||||
from config import SRC_LOG_LEVELS, CHROMA_CLIENT
|
||||
|
||||
|
||||
|
@ -15,88 +26,164 @@ log = logging.getLogger(__name__)
|
|||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def query_embeddings_doc(collection_name: str, query: str, query_embeddings, k: int):
|
||||
def query_doc(
|
||||
collection_name: str,
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
):
|
||||
try:
|
||||
# if you use docker use the model from the environment variable
|
||||
log.info(f"query_embeddings_doc {query_embeddings}")
|
||||
collection = CHROMA_CLIENT.get_collection(name=collection_name)
|
||||
|
||||
query_embeddings = embedding_function(query)
|
||||
result = collection.query(
|
||||
query_embeddings=[query_embeddings],
|
||||
n_results=k,
|
||||
)
|
||||
|
||||
log.info(f"query_embeddings_doc:result {result}")
|
||||
log.info(f"query_doc:result {result}")
|
||||
return result
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
|
||||
def merge_and_sort_query_results(query_results, k):
|
||||
def query_doc_with_hybrid_search(
|
||||
collection_name: str,
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
reranking_function,
|
||||
r: int,
|
||||
):
|
||||
try:
|
||||
collection = CHROMA_CLIENT.get_collection(name=collection_name)
|
||||
documents = collection.get() # get all documents
|
||||
|
||||
bm25_retriever = BM25Retriever.from_texts(
|
||||
texts=documents.get("documents"),
|
||||
metadatas=documents.get("metadatas"),
|
||||
)
|
||||
bm25_retriever.k = k
|
||||
|
||||
chroma_retriever = ChromaRetriever(
|
||||
collection=collection,
|
||||
embedding_function=embedding_function,
|
||||
top_n=k,
|
||||
)
|
||||
|
||||
ensemble_retriever = EnsembleRetriever(
|
||||
retrievers=[bm25_retriever, chroma_retriever], weights=[0.5, 0.5]
|
||||
)
|
||||
|
||||
compressor = RerankCompressor(
|
||||
embedding_function=embedding_function,
|
||||
reranking_function=reranking_function,
|
||||
r_score=r,
|
||||
top_n=k,
|
||||
)
|
||||
|
||||
compression_retriever = ContextualCompressionRetriever(
|
||||
base_compressor=compressor, base_retriever=ensemble_retriever
|
||||
)
|
||||
|
||||
result = compression_retriever.invoke(query)
|
||||
result = {
|
||||
"distances": [[d.metadata.get("score") for d in result]],
|
||||
"documents": [[d.page_content for d in result]],
|
||||
"metadatas": [[d.metadata for d in result]],
|
||||
}
|
||||
log.info(f"query_doc_with_hybrid_search:result {result}")
|
||||
return result
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
|
||||
def merge_and_sort_query_results(query_results, k, reverse=False):
|
||||
# Initialize lists to store combined data
|
||||
combined_ids = []
|
||||
combined_distances = []
|
||||
combined_metadatas = []
|
||||
combined_documents = []
|
||||
combined_metadatas = []
|
||||
|
||||
# Combine data from each dictionary
|
||||
for data in query_results:
|
||||
combined_ids.extend(data["ids"][0])
|
||||
combined_distances.extend(data["distances"][0])
|
||||
combined_metadatas.extend(data["metadatas"][0])
|
||||
combined_documents.extend(data["documents"][0])
|
||||
combined_metadatas.extend(data["metadatas"][0])
|
||||
|
||||
# Create a list of tuples (distance, id, metadata, document)
|
||||
combined = list(
|
||||
zip(combined_distances, combined_ids, combined_metadatas, combined_documents)
|
||||
)
|
||||
# Create a list of tuples (distance, document, metadata)
|
||||
combined = list(zip(combined_distances, combined_documents, combined_metadatas))
|
||||
|
||||
# Sort the list based on distances
|
||||
combined.sort(key=lambda x: x[0])
|
||||
combined.sort(key=lambda x: x[0], reverse=reverse)
|
||||
|
||||
# Unzip the sorted list
|
||||
sorted_distances, sorted_ids, sorted_metadatas, sorted_documents = zip(*combined)
|
||||
# We don't have anything :-(
|
||||
if not combined:
|
||||
sorted_distances = []
|
||||
sorted_documents = []
|
||||
sorted_metadatas = []
|
||||
else:
|
||||
# Unzip the sorted list
|
||||
sorted_distances, sorted_documents, sorted_metadatas = zip(*combined)
|
||||
|
||||
# Slicing the lists to include only k elements
|
||||
sorted_distances = list(sorted_distances)[:k]
|
||||
sorted_ids = list(sorted_ids)[:k]
|
||||
sorted_metadatas = list(sorted_metadatas)[:k]
|
||||
sorted_documents = list(sorted_documents)[:k]
|
||||
# Slicing the lists to include only k elements
|
||||
sorted_distances = list(sorted_distances)[:k]
|
||||
sorted_documents = list(sorted_documents)[:k]
|
||||
sorted_metadatas = list(sorted_metadatas)[:k]
|
||||
|
||||
# Create the output dictionary
|
||||
merged_query_results = {
|
||||
"ids": [sorted_ids],
|
||||
result = {
|
||||
"distances": [sorted_distances],
|
||||
"metadatas": [sorted_metadatas],
|
||||
"documents": [sorted_documents],
|
||||
"embeddings": None,
|
||||
"uris": None,
|
||||
"data": None,
|
||||
"metadatas": [sorted_metadatas],
|
||||
}
|
||||
|
||||
return merged_query_results
|
||||
return result
|
||||
|
||||
|
||||
def query_embeddings_collection(
|
||||
collection_names: List[str], query: str, query_embeddings, k: int
|
||||
def query_collection(
|
||||
collection_names: List[str],
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
):
|
||||
results = []
|
||||
for collection_name in collection_names:
|
||||
try:
|
||||
result = query_doc(
|
||||
collection_name=collection_name,
|
||||
query=query,
|
||||
k=k,
|
||||
embedding_function=embedding_function,
|
||||
)
|
||||
results.append(result)
|
||||
except:
|
||||
pass
|
||||
return merge_and_sort_query_results(results, k=k)
|
||||
|
||||
|
||||
def query_collection_with_hybrid_search(
|
||||
collection_names: List[str],
|
||||
query: str,
|
||||
embedding_function,
|
||||
k: int,
|
||||
reranking_function,
|
||||
r: float,
|
||||
):
|
||||
|
||||
results = []
|
||||
log.info(f"query_embeddings_collection {query_embeddings}")
|
||||
|
||||
for collection_name in collection_names:
|
||||
try:
|
||||
result = query_embeddings_doc(
|
||||
result = query_doc_with_hybrid_search(
|
||||
collection_name=collection_name,
|
||||
query=query,
|
||||
query_embeddings=query_embeddings,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
reranking_function=reranking_function,
|
||||
r=r,
|
||||
)
|
||||
results.append(result)
|
||||
except:
|
||||
pass
|
||||
|
||||
return merge_and_sort_query_results(results, k)
|
||||
return merge_and_sort_query_results(results, k=k, reverse=True)
|
||||
|
||||
|
||||
def rag_template(template: str, context: str, query: str):
|
||||
|
@ -105,20 +192,53 @@ def rag_template(template: str, context: str, query: str):
|
|||
return template
|
||||
|
||||
|
||||
def rag_messages(
|
||||
docs,
|
||||
messages,
|
||||
template,
|
||||
k,
|
||||
def get_embedding_function(
|
||||
embedding_engine,
|
||||
embedding_model,
|
||||
embedding_function,
|
||||
openai_key,
|
||||
openai_url,
|
||||
):
|
||||
log.debug(
|
||||
f"docs: {docs} {messages} {embedding_engine} {embedding_model} {embedding_function} {openai_key} {openai_url}"
|
||||
)
|
||||
if embedding_engine == "":
|
||||
return lambda query: embedding_function.encode(query).tolist()
|
||||
elif embedding_engine in ["ollama", "openai"]:
|
||||
if embedding_engine == "ollama":
|
||||
func = lambda query: generate_ollama_embeddings(
|
||||
GenerateEmbeddingsForm(
|
||||
**{
|
||||
"model": embedding_model,
|
||||
"prompt": query,
|
||||
}
|
||||
)
|
||||
)
|
||||
elif embedding_engine == "openai":
|
||||
func = lambda query: generate_openai_embeddings(
|
||||
model=embedding_model,
|
||||
text=query,
|
||||
key=openai_key,
|
||||
url=openai_url,
|
||||
)
|
||||
|
||||
def generate_multiple(query, f):
|
||||
if isinstance(query, list):
|
||||
return [f(q) for q in query]
|
||||
else:
|
||||
return f(query)
|
||||
|
||||
return lambda query: generate_multiple(query, func)
|
||||
|
||||
|
||||
def rag_messages(
|
||||
docs,
|
||||
messages,
|
||||
template,
|
||||
embedding_function,
|
||||
k,
|
||||
reranking_function,
|
||||
r,
|
||||
hybrid_search,
|
||||
):
|
||||
log.debug(f"docs: {docs} {messages} {embedding_function} {reranking_function}")
|
||||
|
||||
last_user_message_idx = None
|
||||
for i in range(len(messages) - 1, -1, -1):
|
||||
|
@ -145,62 +265,65 @@ def rag_messages(
|
|||
content_type = None
|
||||
query = ""
|
||||
|
||||
extracted_collections = []
|
||||
relevant_contexts = []
|
||||
|
||||
for doc in docs:
|
||||
context = None
|
||||
|
||||
try:
|
||||
collection = doc.get("collection_name")
|
||||
if collection:
|
||||
collection = [collection]
|
||||
else:
|
||||
collection = doc.get("collection_names", [])
|
||||
|
||||
collection = set(collection).difference(extracted_collections)
|
||||
if not collection:
|
||||
log.debug(f"skipping {doc} as it has already been extracted")
|
||||
continue
|
||||
|
||||
try:
|
||||
if doc["type"] == "text":
|
||||
context = doc["content"]
|
||||
else:
|
||||
if embedding_engine == "":
|
||||
query_embeddings = embedding_function.encode(query).tolist()
|
||||
elif embedding_engine == "ollama":
|
||||
query_embeddings = generate_ollama_embeddings(
|
||||
GenerateEmbeddingsForm(
|
||||
**{
|
||||
"model": embedding_model,
|
||||
"prompt": query,
|
||||
}
|
||||
)
|
||||
)
|
||||
elif embedding_engine == "openai":
|
||||
query_embeddings = generate_openai_embeddings(
|
||||
model=embedding_model,
|
||||
text=query,
|
||||
key=openai_key,
|
||||
url=openai_url,
|
||||
)
|
||||
|
||||
if doc["type"] == "collection":
|
||||
context = query_embeddings_collection(
|
||||
collection_names=doc["collection_names"],
|
||||
if hybrid_search:
|
||||
context = query_collection_with_hybrid_search(
|
||||
collection_names=(
|
||||
doc["collection_names"]
|
||||
if doc["type"] == "collection"
|
||||
else [doc["collection_name"]]
|
||||
),
|
||||
query=query,
|
||||
query_embeddings=query_embeddings,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
reranking_function=reranking_function,
|
||||
r=r,
|
||||
)
|
||||
else:
|
||||
context = query_embeddings_doc(
|
||||
collection_name=doc["collection_name"],
|
||||
context = query_collection(
|
||||
collection_names=(
|
||||
doc["collection_names"]
|
||||
if doc["type"] == "collection"
|
||||
else [doc["collection_name"]]
|
||||
),
|
||||
query=query,
|
||||
query_embeddings=query_embeddings,
|
||||
embedding_function=embedding_function,
|
||||
k=k,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
context = None
|
||||
|
||||
relevant_contexts.append(context)
|
||||
if context:
|
||||
relevant_contexts.append(context)
|
||||
|
||||
log.debug(f"relevant_contexts: {relevant_contexts}")
|
||||
extracted_collections.extend(collection)
|
||||
|
||||
context_string = ""
|
||||
for context in relevant_contexts:
|
||||
if context:
|
||||
context_string += " ".join(context["documents"][0]) + "\n"
|
||||
items = context["documents"][0]
|
||||
context_string += "\n\n".join(items)
|
||||
context_string = context_string.strip()
|
||||
|
||||
ra_content = rag_template(
|
||||
template=template,
|
||||
|
@ -208,6 +331,8 @@ def rag_messages(
|
|||
query=query,
|
||||
)
|
||||
|
||||
log.debug(f"ra_content: {ra_content}")
|
||||
|
||||
if content_type == "list":
|
||||
new_content = []
|
||||
for content_item in user_message["content"]:
|
||||
|
@ -229,6 +354,44 @@ def rag_messages(
|
|||
return messages
|
||||
|
||||
|
||||
def get_model_path(model: str, update_model: bool = False):
|
||||
# Construct huggingface_hub kwargs with local_files_only to return the snapshot path
|
||||
cache_dir = os.getenv("SENTENCE_TRANSFORMERS_HOME")
|
||||
|
||||
local_files_only = not update_model
|
||||
|
||||
snapshot_kwargs = {
|
||||
"cache_dir": cache_dir,
|
||||
"local_files_only": local_files_only,
|
||||
}
|
||||
|
||||
log.debug(f"model: {model}")
|
||||
log.debug(f"snapshot_kwargs: {snapshot_kwargs}")
|
||||
|
||||
# Inspiration from upstream sentence_transformers
|
||||
if (
|
||||
os.path.exists(model)
|
||||
or ("\\" in model or model.count("/") > 1)
|
||||
and local_files_only
|
||||
):
|
||||
# If fully qualified path exists, return input, else set repo_id
|
||||
return model
|
||||
elif "/" not in model:
|
||||
# Set valid repo_id for model short-name
|
||||
model = "sentence-transformers" + "/" + model
|
||||
|
||||
snapshot_kwargs["repo_id"] = model
|
||||
|
||||
# Attempt to query the huggingface_hub library to determine the local path and/or to update
|
||||
try:
|
||||
model_repo_path = snapshot_download(**snapshot_kwargs)
|
||||
log.debug(f"model_repo_path: {model_repo_path}")
|
||||
return model_repo_path
|
||||
except Exception as e:
|
||||
log.exception(f"Cannot determine model snapshot path: {e}")
|
||||
return model
|
||||
|
||||
|
||||
def generate_openai_embeddings(
|
||||
model: str, text: str, key: str, url: str = "https://api.openai.com/v1"
|
||||
):
|
||||
|
@ -250,3 +413,99 @@ def generate_openai_embeddings(
|
|||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
|
||||
from typing import Any
|
||||
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
from langchain_core.callbacks import CallbackManagerForRetrieverRun
|
||||
|
||||
|
||||
class ChromaRetriever(BaseRetriever):
|
||||
collection: Any
|
||||
embedding_function: Any
|
||||
top_n: int
|
||||
|
||||
def _get_relevant_documents(
|
||||
self,
|
||||
query: str,
|
||||
*,
|
||||
run_manager: CallbackManagerForRetrieverRun,
|
||||
) -> List[Document]:
|
||||
query_embeddings = self.embedding_function(query)
|
||||
|
||||
results = self.collection.query(
|
||||
query_embeddings=[query_embeddings],
|
||||
n_results=self.top_n,
|
||||
)
|
||||
|
||||
ids = results["ids"][0]
|
||||
metadatas = results["metadatas"][0]
|
||||
documents = results["documents"][0]
|
||||
|
||||
return [
|
||||
Document(
|
||||
metadata=metadatas[idx],
|
||||
page_content=documents[idx],
|
||||
)
|
||||
for idx in range(len(ids))
|
||||
]
|
||||
|
||||
|
||||
import operator
|
||||
|
||||
from typing import Optional, Sequence
|
||||
|
||||
from langchain_core.documents import BaseDocumentCompressor, Document
|
||||
from langchain_core.callbacks import Callbacks
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
|
||||
from sentence_transformers import util
|
||||
|
||||
|
||||
class RerankCompressor(BaseDocumentCompressor):
|
||||
embedding_function: Any
|
||||
reranking_function: Any
|
||||
r_score: float
|
||||
top_n: int
|
||||
|
||||
class Config:
|
||||
extra = Extra.forbid
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
def compress_documents(
|
||||
self,
|
||||
documents: Sequence[Document],
|
||||
query: str,
|
||||
callbacks: Optional[Callbacks] = None,
|
||||
) -> Sequence[Document]:
|
||||
if self.reranking_function:
|
||||
scores = self.reranking_function.predict(
|
||||
[(query, doc.page_content) for doc in documents]
|
||||
)
|
||||
else:
|
||||
query_embedding = self.embedding_function(query)
|
||||
document_embedding = self.embedding_function(
|
||||
[doc.page_content for doc in documents]
|
||||
)
|
||||
scores = util.cos_sim(query_embedding, document_embedding)[0]
|
||||
|
||||
docs_with_scores = list(zip(documents, scores.tolist()))
|
||||
if self.r_score:
|
||||
docs_with_scores = [
|
||||
(d, s) for d, s in docs_with_scores if s >= self.r_score
|
||||
]
|
||||
|
||||
reverse = self.reranking_function is not None
|
||||
result = sorted(docs_with_scores, key=operator.itemgetter(1), reverse=reverse)
|
||||
|
||||
final_results = []
|
||||
for doc, doc_score in result[: self.top_n]:
|
||||
metadata = doc.metadata
|
||||
metadata["score"] = doc_score
|
||||
doc = Document(
|
||||
page_content=doc.page_content,
|
||||
metadata=metadata,
|
||||
)
|
||||
final_results.append(doc)
|
||||
return final_results
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from peewee import *
|
||||
from peewee_migrate import Router
|
||||
from config import SRC_LOG_LEVELS, DATA_DIR
|
||||
from playhouse.db_url import connect
|
||||
from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL
|
||||
import os
|
||||
import logging
|
||||
|
||||
|
@ -11,12 +12,12 @@ log.setLevel(SRC_LOG_LEVELS["DB"])
|
|||
if os.path.exists(f"{DATA_DIR}/ollama.db"):
|
||||
# Rename the file
|
||||
os.rename(f"{DATA_DIR}/ollama.db", f"{DATA_DIR}/webui.db")
|
||||
log.info("File renamed successfully.")
|
||||
log.info("Database migrated from Ollama-WebUI successfully.")
|
||||
else:
|
||||
pass
|
||||
|
||||
|
||||
DB = SqliteDatabase(f"{DATA_DIR}/webui.db")
|
||||
DB = connect(DATABASE_URL)
|
||||
log.info(f"Connected to a {DB.__class__.__name__} database.")
|
||||
router = Router(DB, migrate_dir="apps/web/internal/migrations", logger=log)
|
||||
router.run()
|
||||
DB.connect(reuse_if_open=True)
|
||||
|
|
|
@ -37,6 +37,18 @@ with suppress(ImportError):
|
|||
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your migrations here."""
|
||||
|
||||
# We perform different migrations for SQLite and other databases
|
||||
# This is because SQLite is very loose with enforcing its schema, and trying to migrate other databases like SQLite
|
||||
# will require per-database SQL queries.
|
||||
# Instead, we assume that because external DB support was added at a later date, it is safe to assume a newer base
|
||||
# schema instead of trying to migrate from an older schema.
|
||||
if isinstance(database, pw.SqliteDatabase):
|
||||
migrate_sqlite(migrator, database, fake=fake)
|
||||
else:
|
||||
migrate_external(migrator, database, fake=fake)
|
||||
|
||||
|
||||
def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
@migrator.create_model
|
||||
class Auth(pw.Model):
|
||||
id = pw.CharField(max_length=255, unique=True)
|
||||
|
@ -129,6 +141,99 @@ def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
|||
table_name = "user"
|
||||
|
||||
|
||||
def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
@migrator.create_model
|
||||
class Auth(pw.Model):
|
||||
id = pw.CharField(max_length=255, unique=True)
|
||||
email = pw.CharField(max_length=255)
|
||||
password = pw.TextField()
|
||||
active = pw.BooleanField()
|
||||
|
||||
class Meta:
|
||||
table_name = "auth"
|
||||
|
||||
@migrator.create_model
|
||||
class Chat(pw.Model):
|
||||
id = pw.CharField(max_length=255, unique=True)
|
||||
user_id = pw.CharField(max_length=255)
|
||||
title = pw.TextField()
|
||||
chat = pw.TextField()
|
||||
timestamp = pw.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
table_name = "chat"
|
||||
|
||||
@migrator.create_model
|
||||
class ChatIdTag(pw.Model):
|
||||
id = pw.CharField(max_length=255, unique=True)
|
||||
tag_name = pw.CharField(max_length=255)
|
||||
chat_id = pw.CharField(max_length=255)
|
||||
user_id = pw.CharField(max_length=255)
|
||||
timestamp = pw.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
table_name = "chatidtag"
|
||||
|
||||
@migrator.create_model
|
||||
class Document(pw.Model):
|
||||
id = pw.AutoField()
|
||||
collection_name = pw.CharField(max_length=255, unique=True)
|
||||
name = pw.CharField(max_length=255, unique=True)
|
||||
title = pw.TextField()
|
||||
filename = pw.TextField()
|
||||
content = pw.TextField(null=True)
|
||||
user_id = pw.CharField(max_length=255)
|
||||
timestamp = pw.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
table_name = "document"
|
||||
|
||||
@migrator.create_model
|
||||
class Modelfile(pw.Model):
|
||||
id = pw.AutoField()
|
||||
tag_name = pw.CharField(max_length=255, unique=True)
|
||||
user_id = pw.CharField(max_length=255)
|
||||
modelfile = pw.TextField()
|
||||
timestamp = pw.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
table_name = "modelfile"
|
||||
|
||||
@migrator.create_model
|
||||
class Prompt(pw.Model):
|
||||
id = pw.AutoField()
|
||||
command = pw.CharField(max_length=255, unique=True)
|
||||
user_id = pw.CharField(max_length=255)
|
||||
title = pw.TextField()
|
||||
content = pw.TextField()
|
||||
timestamp = pw.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
table_name = "prompt"
|
||||
|
||||
@migrator.create_model
|
||||
class Tag(pw.Model):
|
||||
id = pw.CharField(max_length=255, unique=True)
|
||||
name = pw.CharField(max_length=255)
|
||||
user_id = pw.CharField(max_length=255)
|
||||
data = pw.TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
table_name = "tag"
|
||||
|
||||
@migrator.create_model
|
||||
class User(pw.Model):
|
||||
id = pw.CharField(max_length=255, unique=True)
|
||||
name = pw.CharField(max_length=255)
|
||||
email = pw.CharField(max_length=255)
|
||||
role = pw.CharField(max_length=255)
|
||||
profile_image_url = pw.TextField()
|
||||
timestamp = pw.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
table_name = "user"
|
||||
|
||||
|
||||
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your rollback migrations here."""
|
||||
|
||||
|
|
|
@ -37,6 +37,13 @@ with suppress(ImportError):
|
|||
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your migrations here."""
|
||||
|
||||
if isinstance(database, pw.SqliteDatabase):
|
||||
migrate_sqlite(migrator, database, fake=fake)
|
||||
else:
|
||||
migrate_external(migrator, database, fake=fake)
|
||||
|
||||
|
||||
def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
# Adding fields created_at and updated_at to the 'chat' table
|
||||
migrator.add_fields(
|
||||
"chat",
|
||||
|
@ -60,9 +67,40 @@ def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
|||
)
|
||||
|
||||
|
||||
def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
# Adding fields created_at and updated_at to the 'chat' table
|
||||
migrator.add_fields(
|
||||
"chat",
|
||||
created_at=pw.BigIntegerField(null=True), # Allow null for transition
|
||||
updated_at=pw.BigIntegerField(null=True), # Allow null for transition
|
||||
)
|
||||
|
||||
# Populate the new fields from an existing 'timestamp' field
|
||||
migrator.sql(
|
||||
"UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL"
|
||||
)
|
||||
|
||||
# Now that the data has been copied, remove the original 'timestamp' field
|
||||
migrator.remove_fields("chat", "timestamp")
|
||||
|
||||
# Update the fields to be not null now that they are populated
|
||||
migrator.change_fields(
|
||||
"chat",
|
||||
created_at=pw.BigIntegerField(null=False),
|
||||
updated_at=pw.BigIntegerField(null=False),
|
||||
)
|
||||
|
||||
|
||||
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your rollback migrations here."""
|
||||
|
||||
if isinstance(database, pw.SqliteDatabase):
|
||||
rollback_sqlite(migrator, database, fake=fake)
|
||||
else:
|
||||
rollback_external(migrator, database, fake=fake)
|
||||
|
||||
|
||||
def rollback_sqlite(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
# Recreate the timestamp field initially allowing null values for safe transition
|
||||
migrator.add_fields("chat", timestamp=pw.DateTimeField(null=True))
|
||||
|
||||
|
@ -75,3 +113,18 @@ def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
|||
|
||||
# Finally, alter the timestamp field to not allow nulls if that was the original setting
|
||||
migrator.change_fields("chat", timestamp=pw.DateTimeField(null=False))
|
||||
|
||||
|
||||
def rollback_external(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
# Recreate the timestamp field initially allowing null values for safe transition
|
||||
migrator.add_fields("chat", timestamp=pw.BigIntegerField(null=True))
|
||||
|
||||
# Copy the earliest created_at date back into the new timestamp field
|
||||
# This assumes created_at was originally a copy of timestamp
|
||||
migrator.sql("UPDATE chat SET timestamp = created_at")
|
||||
|
||||
# Remove the created_at and updated_at fields
|
||||
migrator.remove_fields("chat", "created_at", "updated_at")
|
||||
|
||||
# Finally, alter the timestamp field to not allow nulls if that was the original setting
|
||||
migrator.change_fields("chat", timestamp=pw.BigIntegerField(null=False))
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
"""Peewee migrations -- 006_migrate_timestamps_and_charfields.py.
|
||||
|
||||
Some examples (model - class or model name)::
|
||||
|
||||
> Model = migrator.orm['table_name'] # Return model in current state by name
|
||||
> Model = migrator.ModelClass # Return model in current state by name
|
||||
|
||||
> migrator.sql(sql) # Run custom SQL
|
||||
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
||||
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
||||
> migrator.remove_model(model, cascade=True) # Remove a model
|
||||
> migrator.add_fields(model, **fields) # Add fields to a model
|
||||
> migrator.change_fields(model, **fields) # Change fields
|
||||
> migrator.remove_fields(model, *field_names, cascade=True)
|
||||
> migrator.rename_field(model, old_field_name, new_field_name)
|
||||
> migrator.rename_table(model, new_table_name)
|
||||
> migrator.add_index(model, *col_names, unique=False)
|
||||
> migrator.add_not_null(model, *field_names)
|
||||
> migrator.add_default(model, field_name, default)
|
||||
> migrator.add_constraint(model, name, sql)
|
||||
> migrator.drop_index(model, *col_names)
|
||||
> migrator.drop_not_null(model, *field_names)
|
||||
> migrator.drop_constraints(model, *constraints)
|
||||
|
||||
"""
|
||||
|
||||
from contextlib import suppress
|
||||
|
||||
import peewee as pw
|
||||
from peewee_migrate import Migrator
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
import playhouse.postgres_ext as pw_pext
|
||||
|
||||
|
||||
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your migrations here."""
|
||||
|
||||
# Alter the tables with timestamps
|
||||
migrator.change_fields(
|
||||
"chatidtag",
|
||||
timestamp=pw.BigIntegerField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"document",
|
||||
timestamp=pw.BigIntegerField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"modelfile",
|
||||
timestamp=pw.BigIntegerField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"prompt",
|
||||
timestamp=pw.BigIntegerField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"user",
|
||||
timestamp=pw.BigIntegerField(),
|
||||
)
|
||||
# Alter the tables with varchar to text where necessary
|
||||
migrator.change_fields(
|
||||
"auth",
|
||||
password=pw.TextField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"chat",
|
||||
title=pw.TextField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"document",
|
||||
title=pw.TextField(),
|
||||
filename=pw.TextField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"prompt",
|
||||
title=pw.TextField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"user",
|
||||
profile_image_url=pw.TextField(),
|
||||
)
|
||||
|
||||
|
||||
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your rollback migrations here."""
|
||||
|
||||
if isinstance(database, pw.SqliteDatabase):
|
||||
# Alter the tables with timestamps
|
||||
migrator.change_fields(
|
||||
"chatidtag",
|
||||
timestamp=pw.DateField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"document",
|
||||
timestamp=pw.DateField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"modelfile",
|
||||
timestamp=pw.DateField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"prompt",
|
||||
timestamp=pw.DateField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"user",
|
||||
timestamp=pw.DateField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"auth",
|
||||
password=pw.CharField(max_length=255),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"chat",
|
||||
title=pw.CharField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"document",
|
||||
title=pw.CharField(),
|
||||
filename=pw.CharField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"prompt",
|
||||
title=pw.CharField(),
|
||||
)
|
||||
migrator.change_fields(
|
||||
"user",
|
||||
profile_image_url=pw.CharField(),
|
||||
)
|
|
@ -0,0 +1,79 @@
|
|||
"""Peewee migrations -- 002_add_local_sharing.py.
|
||||
|
||||
Some examples (model - class or model name)::
|
||||
|
||||
> Model = migrator.orm['table_name'] # Return model in current state by name
|
||||
> Model = migrator.ModelClass # Return model in current state by name
|
||||
|
||||
> migrator.sql(sql) # Run custom SQL
|
||||
> migrator.run(func, *args, **kwargs) # Run python function with the given args
|
||||
> migrator.create_model(Model) # Create a model (could be used as decorator)
|
||||
> migrator.remove_model(model, cascade=True) # Remove a model
|
||||
> migrator.add_fields(model, **fields) # Add fields to a model
|
||||
> migrator.change_fields(model, **fields) # Change fields
|
||||
> migrator.remove_fields(model, *field_names, cascade=True)
|
||||
> migrator.rename_field(model, old_field_name, new_field_name)
|
||||
> migrator.rename_table(model, new_table_name)
|
||||
> migrator.add_index(model, *col_names, unique=False)
|
||||
> migrator.add_not_null(model, *field_names)
|
||||
> migrator.add_default(model, field_name, default)
|
||||
> migrator.add_constraint(model, name, sql)
|
||||
> migrator.drop_index(model, *col_names)
|
||||
> migrator.drop_not_null(model, *field_names)
|
||||
> migrator.drop_constraints(model, *constraints)
|
||||
|
||||
"""
|
||||
|
||||
from contextlib import suppress
|
||||
|
||||
import peewee as pw
|
||||
from peewee_migrate import Migrator
|
||||
|
||||
|
||||
with suppress(ImportError):
|
||||
import playhouse.postgres_ext as pw_pext
|
||||
|
||||
|
||||
def migrate(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your migrations here."""
|
||||
|
||||
# Adding fields created_at and updated_at to the 'user' table
|
||||
migrator.add_fields(
|
||||
"user",
|
||||
created_at=pw.BigIntegerField(null=True), # Allow null for transition
|
||||
updated_at=pw.BigIntegerField(null=True), # Allow null for transition
|
||||
last_active_at=pw.BigIntegerField(null=True), # Allow null for transition
|
||||
)
|
||||
|
||||
# Populate the new fields from an existing 'timestamp' field
|
||||
migrator.sql(
|
||||
"UPDATE user SET created_at = timestamp, updated_at = timestamp, last_active_at = timestamp WHERE timestamp IS NOT NULL"
|
||||
)
|
||||
|
||||
# Now that the data has been copied, remove the original 'timestamp' field
|
||||
migrator.remove_fields("user", "timestamp")
|
||||
|
||||
# Update the fields to be not null now that they are populated
|
||||
migrator.change_fields(
|
||||
"user",
|
||||
created_at=pw.BigIntegerField(null=False),
|
||||
updated_at=pw.BigIntegerField(null=False),
|
||||
last_active_at=pw.BigIntegerField(null=False),
|
||||
)
|
||||
|
||||
|
||||
def rollback(migrator: Migrator, database: pw.Database, *, fake=False):
|
||||
"""Write your rollback migrations here."""
|
||||
|
||||
# Recreate the timestamp field initially allowing null values for safe transition
|
||||
migrator.add_fields("user", timestamp=pw.BigIntegerField(null=True))
|
||||
|
||||
# Copy the earliest created_at date back into the new timestamp field
|
||||
# This assumes created_at was originally a copy of timestamp
|
||||
migrator.sql("UPDATE user SET timestamp = created_at")
|
||||
|
||||
# Remove the created_at and updated_at fields
|
||||
migrator.remove_fields("user", "created_at", "updated_at", "last_active_at")
|
||||
|
||||
# Finally, alter the timestamp field to not allow nulls if that was the original setting
|
||||
migrator.change_fields("user", timestamp=pw.BigIntegerField(null=False))
|
|
@ -23,7 +23,7 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
|||
class Auth(Model):
|
||||
id = CharField(unique=True)
|
||||
email = CharField()
|
||||
password = CharField()
|
||||
password = TextField()
|
||||
active = BooleanField()
|
||||
|
||||
class Meta:
|
||||
|
|
|
@ -17,11 +17,11 @@ from apps.web.internal.db import DB
|
|||
class Chat(Model):
|
||||
id = CharField(unique=True)
|
||||
user_id = CharField()
|
||||
title = CharField()
|
||||
title = TextField()
|
||||
chat = TextField() # Save Chat JSON as Text
|
||||
|
||||
created_at = DateTimeField()
|
||||
updated_at = DateTimeField()
|
||||
created_at = BigIntegerField()
|
||||
updated_at = BigIntegerField()
|
||||
|
||||
share_id = CharField(null=True, unique=True)
|
||||
archived = BooleanField(default=False)
|
||||
|
@ -191,7 +191,7 @@ class ChatTable:
|
|||
except:
|
||||
return None
|
||||
|
||||
def get_archived_chat_lists_by_user_id(
|
||||
def get_archived_chat_list_by_user_id(
|
||||
self, user_id: str, skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
return [
|
||||
|
@ -204,7 +204,7 @@ class ChatTable:
|
|||
# .offset(skip)
|
||||
]
|
||||
|
||||
def get_chat_lists_by_user_id(
|
||||
def get_chat_list_by_user_id(
|
||||
self, user_id: str, skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
return [
|
||||
|
@ -217,7 +217,7 @@ class ChatTable:
|
|||
# .offset(skip)
|
||||
]
|
||||
|
||||
def get_chat_lists_by_chat_ids(
|
||||
def get_chat_list_by_chat_ids(
|
||||
self, chat_ids: List[str], skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
return [
|
||||
|
@ -228,20 +228,6 @@ class ChatTable:
|
|||
.order_by(Chat.updated_at.desc())
|
||||
]
|
||||
|
||||
def get_all_chats(self) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select().order_by(Chat.updated_at.desc())
|
||||
]
|
||||
|
||||
def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select()
|
||||
.where(Chat.user_id == user_id)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
]
|
||||
|
||||
def get_chat_by_id(self, id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
chat = Chat.get(Chat.id == id)
|
||||
|
@ -271,9 +257,28 @@ class ChatTable:
|
|||
def get_chats(self, skip: int = 0, limit: int = 50) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select().limit(limit).offset(skip)
|
||||
for chat in Chat.select().order_by(Chat.updated_at.desc())
|
||||
# .limit(limit).offset(skip)
|
||||
]
|
||||
|
||||
def get_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select()
|
||||
.where(Chat.user_id == user_id)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
# .limit(limit).offset(skip)
|
||||
]
|
||||
|
||||
def delete_chat_by_id(self, id: str) -> bool:
|
||||
try:
|
||||
query = Chat.delete().where((Chat.id == id))
|
||||
query.execute() # Remove the rows, return number of rows removed.
|
||||
|
||||
return True and self.delete_shared_chat_by_chat_id(id)
|
||||
except:
|
||||
return False
|
||||
|
||||
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
try:
|
||||
query = Chat.delete().where((Chat.id == id) & (Chat.user_id == user_id))
|
||||
|
|
|
@ -25,11 +25,11 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
|||
class Document(Model):
|
||||
collection_name = CharField(unique=True)
|
||||
name = CharField(unique=True)
|
||||
title = CharField()
|
||||
filename = CharField()
|
||||
title = TextField()
|
||||
filename = TextField()
|
||||
content = TextField(null=True)
|
||||
user_id = CharField()
|
||||
timestamp = DateField()
|
||||
timestamp = BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
|
|
@ -20,7 +20,7 @@ class Modelfile(Model):
|
|||
tag_name = CharField(unique=True)
|
||||
user_id = CharField()
|
||||
modelfile = TextField()
|
||||
timestamp = DateField()
|
||||
timestamp = BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
|
|
@ -19,9 +19,9 @@ import json
|
|||
class Prompt(Model):
|
||||
command = CharField(unique=True)
|
||||
user_id = CharField()
|
||||
title = CharField()
|
||||
title = TextField()
|
||||
content = TextField()
|
||||
timestamp = DateField()
|
||||
timestamp = BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
|
|
@ -35,7 +35,7 @@ class ChatIdTag(Model):
|
|||
tag_name = CharField()
|
||||
chat_id = CharField()
|
||||
user_id = CharField()
|
||||
timestamp = DateField()
|
||||
timestamp = BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
|
|
@ -18,8 +18,12 @@ class User(Model):
|
|||
name = CharField()
|
||||
email = CharField()
|
||||
role = CharField()
|
||||
profile_image_url = CharField()
|
||||
timestamp = DateField()
|
||||
profile_image_url = TextField()
|
||||
|
||||
last_active_at = BigIntegerField()
|
||||
updated_at = BigIntegerField()
|
||||
created_at = BigIntegerField()
|
||||
|
||||
api_key = CharField(null=True, unique=True)
|
||||
|
||||
class Meta:
|
||||
|
@ -32,7 +36,11 @@ class UserModel(BaseModel):
|
|||
email: str
|
||||
role: str = "pending"
|
||||
profile_image_url: str
|
||||
timestamp: int # timestamp in epoch
|
||||
|
||||
last_active_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
created_at: int # timestamp in epoch
|
||||
|
||||
api_key: Optional[str] = None
|
||||
|
||||
|
||||
|
@ -73,7 +81,9 @@ class UsersTable:
|
|||
"email": email,
|
||||
"role": role,
|
||||
"profile_image_url": profile_image_url,
|
||||
"timestamp": int(time.time()),
|
||||
"last_active_at": int(time.time()),
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
result = User.create(**user.model_dump())
|
||||
|
@ -137,6 +147,16 @@ class UsersTable:
|
|||
except:
|
||||
return None
|
||||
|
||||
def update_user_last_active_by_id(self, id: str) -> Optional[UserModel]:
|
||||
try:
|
||||
query = User.update(last_active_at=int(time.time())).where(User.id == id)
|
||||
query.execute()
|
||||
|
||||
user = User.get(User.id == id)
|
||||
return UserModel(**model_to_dict(user))
|
||||
except:
|
||||
return None
|
||||
|
||||
def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]:
|
||||
try:
|
||||
query = User.update(**updated).where(User.id == id)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import logging
|
||||
|
||||
from fastapi import Request
|
||||
from fastapi import Depends, HTTPException, status
|
||||
|
||||
|
|
|
@ -36,15 +36,49 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
|||
router = APIRouter()
|
||||
|
||||
############################
|
||||
# GetChats
|
||||
# GetChatList
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chats(
|
||||
@router.get("/list", response_model=List[ChatTitleIdResponse])
|
||||
async def get_session_user_chat_list(
|
||||
user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
|
||||
return Chats.get_chat_list_by_user_id(user.id, skip, limit)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteAllChats
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/", response_model=bool)
|
||||
async def delete_all_user_chats(request: Request, user=Depends(get_current_user)):
|
||||
|
||||
if (
|
||||
user.role == "user"
|
||||
and not request.app.state.USER_PERMISSIONS["chat"]["deletion"]
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
result = Chats.delete_chats_by_user_id(user.id)
|
||||
return result
|
||||
|
||||
|
||||
############################
|
||||
# GetUserChatList
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/list/user/{user_id}", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chat_list_by_user_id(
|
||||
user_id: str, user=Depends(get_admin_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
return Chats.get_chat_list_by_user_id(user_id, skip, limit)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -53,22 +87,22 @@ async def get_user_chats(
|
|||
|
||||
|
||||
@router.get("/archived", response_model=List[ChatTitleIdResponse])
|
||||
async def get_archived_user_chats(
|
||||
async def get_archived_session_user_chat_list(
|
||||
user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
return Chats.get_archived_chat_lists_by_user_id(user.id, skip, limit)
|
||||
return Chats.get_archived_chat_list_by_user_id(user.id, skip, limit)
|
||||
|
||||
|
||||
############################
|
||||
# GetAllChats
|
||||
# GetChats
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/all", response_model=List[ChatResponse])
|
||||
async def get_all_user_chats(user=Depends(get_current_user)):
|
||||
async def get_user_chats(user=Depends(get_current_user)):
|
||||
return [
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
for chat in Chats.get_all_chats_by_user_id(user.id)
|
||||
for chat in Chats.get_chats_by_user_id(user.id)
|
||||
]
|
||||
|
||||
|
||||
|
@ -86,7 +120,7 @@ async def get_all_user_chats_in_db(user=Depends(get_admin_user)):
|
|||
)
|
||||
return [
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
for chat in Chats.get_all_chats()
|
||||
for chat in Chats.get_chats()
|
||||
]
|
||||
|
||||
|
||||
|
@ -107,45 +141,6 @@ async def create_new_chat(form_data: ChatForm, user=Depends(get_current_user)):
|
|||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetAllTags
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/all", response_model=List[TagModel])
|
||||
async def get_all_tags(user=Depends(get_current_user)):
|
||||
try:
|
||||
tags = Tags.get_tags_by_user_id(user.id)
|
||||
return tags
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetChatsByTags
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/tag/{tag_name}", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chats_by_tag_name(
|
||||
tag_name: str, user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
chat_ids = [
|
||||
chat_id_tag.chat_id
|
||||
for chat_id_tag in Tags.get_chat_ids_by_tag_name_and_user_id(tag_name, user.id)
|
||||
]
|
||||
|
||||
chats = Chats.get_chat_lists_by_chat_ids(chat_ids, skip, limit)
|
||||
|
||||
if len(chats) == 0:
|
||||
Tags.delete_tag_by_tag_name_and_user_id(tag_name, user.id)
|
||||
|
||||
return chats
|
||||
|
||||
|
||||
############################
|
||||
# GetChatById
|
||||
############################
|
||||
|
@ -193,17 +188,18 @@ async def update_chat_by_id(
|
|||
@router.delete("/{id}", response_model=bool)
|
||||
async def delete_chat_by_id(request: Request, id: str, user=Depends(get_current_user)):
|
||||
|
||||
if (
|
||||
user.role == "user"
|
||||
and not request.app.state.USER_PERMISSIONS["chat"]["deletion"]
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
if user.role == "admin":
|
||||
result = Chats.delete_chat_by_id(id)
|
||||
return result
|
||||
else:
|
||||
if not request.app.state.USER_PERMISSIONS["chat"]["deletion"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
result = Chats.delete_chat_by_id_and_user_id(id, user.id)
|
||||
return result
|
||||
result = Chats.delete_chat_by_id_and_user_id(id, user.id)
|
||||
return result
|
||||
|
||||
|
||||
############################
|
||||
|
@ -303,6 +299,45 @@ async def get_shared_chat_by_id(share_id: str, user=Depends(get_current_user)):
|
|||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetAllTags
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/all", response_model=List[TagModel])
|
||||
async def get_all_tags(user=Depends(get_current_user)):
|
||||
try:
|
||||
tags = Tags.get_tags_by_user_id(user.id)
|
||||
return tags
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetChatsByTags
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/tag/{tag_name}", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chat_list_by_tag_name(
|
||||
tag_name: str, user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
chat_ids = [
|
||||
chat_id_tag.chat_id
|
||||
for chat_id_tag in Tags.get_chat_ids_by_tag_name_and_user_id(tag_name, user.id)
|
||||
]
|
||||
|
||||
chats = Chats.get_chat_list_by_chat_ids(chat_ids, skip, limit)
|
||||
|
||||
if len(chats) == 0:
|
||||
Tags.delete_tag_by_tag_name_and_user_id(tag_name, user.id)
|
||||
|
||||
return chats
|
||||
|
||||
|
||||
############################
|
||||
# GetChatTagsById
|
||||
############################
|
||||
|
@ -383,24 +418,3 @@ async def delete_all_chat_tags_by_id(id: str, user=Depends(get_current_user)):
|
|||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteAllChats
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/", response_model=bool)
|
||||
async def delete_all_user_chats(request: Request, user=Depends(get_current_user)):
|
||||
|
||||
if (
|
||||
user.role == "user"
|
||||
and not request.app.state.USER_PERMISSIONS["chat"]["deletion"]
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
result = Chats.delete_chats_by_user_id(user.id)
|
||||
return result
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from fastapi import APIRouter, UploadFile, File, Response
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from peewee import SqliteDatabase
|
||||
from starlette.responses import StreamingResponse, FileResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
@ -7,7 +8,7 @@ from pydantic import BaseModel
|
|||
from fpdf import FPDF
|
||||
import markdown
|
||||
|
||||
|
||||
from apps.web.internal.db import DB
|
||||
from utils.utils import get_admin_user
|
||||
from utils.misc import calculate_sha256, get_gravatar_url
|
||||
|
||||
|
@ -96,8 +97,13 @@ async def download_db(user=Depends(get_admin_user)):
|
|||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
if not isinstance(DB, SqliteDatabase):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DB_NOT_SQLITE,
|
||||
)
|
||||
return FileResponse(
|
||||
f"{DATA_DIR}/webui.db",
|
||||
DB.database,
|
||||
media_type="application/octet-stream",
|
||||
filename="webui.db",
|
||||
)
|
||||
|
|
|
@ -71,6 +71,9 @@ except ImportError:
|
|||
log.warning("dotenv not installed, skipping...")
|
||||
|
||||
WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
|
||||
if WEBUI_NAME != "Open WebUI":
|
||||
WEBUI_NAME += " (Open WebUI)"
|
||||
|
||||
WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png"
|
||||
|
||||
####################################
|
||||
|
@ -195,9 +198,6 @@ if CUSTOM_NAME:
|
|||
except Exception as e:
|
||||
log.exception(e)
|
||||
pass
|
||||
else:
|
||||
if WEBUI_NAME != "Open WebUI":
|
||||
WEBUI_NAME += " (Open WebUI)"
|
||||
|
||||
|
||||
####################################
|
||||
|
@ -220,7 +220,7 @@ Path(CACHE_DIR).mkdir(parents=True, exist_ok=True)
|
|||
# Docs DIR
|
||||
####################################
|
||||
|
||||
DOCS_DIR = f"{DATA_DIR}/docs"
|
||||
DOCS_DIR = os.getenv("DOCS_DIR", f"{DATA_DIR}/docs")
|
||||
Path(DOCS_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
|
@ -375,8 +375,7 @@ USER_PERMISSIONS_CHAT_DELETION = (
|
|||
|
||||
USER_PERMISSIONS = {"chat": {"deletion": USER_PERMISSIONS_CHAT_DELETION}}
|
||||
|
||||
|
||||
MODEL_FILTER_ENABLED = os.environ.get("MODEL_FILTER_ENABLED", "False").lower() == "true"
|
||||
ENABLE_MODEL_FILTER = os.environ.get("ENABLE_MODEL_FILTER", "False").lower() == "true"
|
||||
MODEL_FILTER_LIST = os.environ.get("MODEL_FILTER_LIST", "")
|
||||
MODEL_FILTER_LIST = [model.strip() for model in MODEL_FILTER_LIST.split(";")]
|
||||
|
||||
|
@ -418,19 +417,57 @@ if WEBUI_AUTH and WEBUI_SECRET_KEY == "":
|
|||
####################################
|
||||
|
||||
CHROMA_DATA_PATH = f"{DATA_DIR}/vector_db"
|
||||
CHROMA_TENANT = os.environ.get("CHROMA_TENANT", chromadb.DEFAULT_TENANT)
|
||||
CHROMA_DATABASE = os.environ.get("CHROMA_DATABASE", chromadb.DEFAULT_DATABASE)
|
||||
CHROMA_HTTP_HOST = os.environ.get("CHROMA_HTTP_HOST", "")
|
||||
CHROMA_HTTP_PORT = int(os.environ.get("CHROMA_HTTP_PORT", "8000"))
|
||||
# Comma-separated list of header=value pairs
|
||||
CHROMA_HTTP_HEADERS = os.environ.get("CHROMA_HTTP_HEADERS", "")
|
||||
if CHROMA_HTTP_HEADERS:
|
||||
CHROMA_HTTP_HEADERS = dict(
|
||||
[pair.split("=") for pair in CHROMA_HTTP_HEADERS.split(",")]
|
||||
)
|
||||
else:
|
||||
CHROMA_HTTP_HEADERS = None
|
||||
CHROMA_HTTP_SSL = os.environ.get("CHROMA_HTTP_SSL", "false").lower() == "true"
|
||||
# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (sentence-transformers/all-MiniLM-L6-v2)
|
||||
|
||||
RAG_TOP_K = int(os.environ.get("RAG_TOP_K", "5"))
|
||||
RAG_RELEVANCE_THRESHOLD = float(os.environ.get("RAG_RELEVANCE_THRESHOLD", "0.0"))
|
||||
|
||||
ENABLE_RAG_HYBRID_SEARCH = (
|
||||
os.environ.get("ENABLE_RAG_HYBRID_SEARCH", "").lower() == "true"
|
||||
)
|
||||
|
||||
RAG_EMBEDDING_ENGINE = os.environ.get("RAG_EMBEDDING_ENGINE", "")
|
||||
|
||||
PDF_EXTRACT_IMAGES = os.environ.get("PDF_EXTRACT_IMAGES", "False").lower() == "true"
|
||||
|
||||
RAG_EMBEDDING_MODEL = os.environ.get(
|
||||
"RAG_EMBEDDING_MODEL", "sentence-transformers/all-MiniLM-L6-v2"
|
||||
)
|
||||
log.info(f"Embedding model set: {RAG_EMBEDDING_MODEL}"),
|
||||
|
||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE = (
|
||||
os.environ.get("RAG_EMBEDDING_MODEL_AUTO_UPDATE", "").lower() == "true"
|
||||
)
|
||||
|
||||
RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE = (
|
||||
os.environ.get("RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true"
|
||||
)
|
||||
|
||||
RAG_RERANKING_MODEL = os.environ.get("RAG_RERANKING_MODEL", "")
|
||||
if not RAG_RERANKING_MODEL == "":
|
||||
log.info(f"Reranking model set: {RAG_RERANKING_MODEL}"),
|
||||
|
||||
RAG_RERANKING_MODEL_AUTO_UPDATE = (
|
||||
os.environ.get("RAG_RERANKING_MODEL_AUTO_UPDATE", "").lower() == "true"
|
||||
)
|
||||
|
||||
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE = (
|
||||
os.environ.get("RAG_RERANKING_MODEL_TRUST_REMOTE_CODE", "").lower() == "true"
|
||||
)
|
||||
|
||||
# device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
|
||||
USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false")
|
||||
|
||||
|
@ -439,16 +476,28 @@ if USE_CUDA.lower() == "true":
|
|||
else:
|
||||
DEVICE_TYPE = "cpu"
|
||||
|
||||
if CHROMA_HTTP_HOST != "":
|
||||
CHROMA_CLIENT = chromadb.HttpClient(
|
||||
host=CHROMA_HTTP_HOST,
|
||||
port=CHROMA_HTTP_PORT,
|
||||
headers=CHROMA_HTTP_HEADERS,
|
||||
ssl=CHROMA_HTTP_SSL,
|
||||
tenant=CHROMA_TENANT,
|
||||
database=CHROMA_DATABASE,
|
||||
settings=Settings(allow_reset=True, anonymized_telemetry=False),
|
||||
)
|
||||
else:
|
||||
CHROMA_CLIENT = chromadb.PersistentClient(
|
||||
path=CHROMA_DATA_PATH,
|
||||
settings=Settings(allow_reset=True, anonymized_telemetry=False),
|
||||
tenant=CHROMA_TENANT,
|
||||
database=CHROMA_DATABASE,
|
||||
)
|
||||
|
||||
CHROMA_CLIENT = chromadb.PersistentClient(
|
||||
path=CHROMA_DATA_PATH,
|
||||
settings=Settings(allow_reset=True, anonymized_telemetry=False),
|
||||
)
|
||||
CHUNK_SIZE = 1500
|
||||
CHUNK_OVERLAP = 100
|
||||
CHUNK_SIZE = int(os.environ.get("CHUNK_SIZE", "1500"))
|
||||
CHUNK_OVERLAP = int(os.environ.get("CHUNK_OVERLAP", "100"))
|
||||
|
||||
|
||||
RAG_TEMPLATE = """Use the following context as your learned knowledge, inside <context></context> XML tags.
|
||||
DEFAULT_RAG_TEMPLATE = """Use the following context as your learned knowledge, inside <context></context> XML tags.
|
||||
<context>
|
||||
[context]
|
||||
</context>
|
||||
|
@ -458,10 +507,12 @@ When answer to user:
|
|||
- If you don't know when you are not sure, ask for clarification.
|
||||
Avoid mentioning that you obtained the information from the context.
|
||||
And answer according to the language of the user's question.
|
||||
|
||||
|
||||
Given the context information, answer the query.
|
||||
Query: [query]"""
|
||||
|
||||
RAG_TEMPLATE = os.environ.get("RAG_TEMPLATE", DEFAULT_RAG_TEMPLATE)
|
||||
|
||||
RAG_OPENAI_API_BASE_URL = os.getenv("RAG_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL)
|
||||
RAG_OPENAI_API_KEY = os.getenv("RAG_OPENAI_API_KEY", OPENAI_API_KEY)
|
||||
|
||||
|
@ -480,18 +531,25 @@ WHISPER_MODEL_AUTO_UPDATE = (
|
|||
# Images
|
||||
####################################
|
||||
|
||||
IMAGE_GENERATION_ENGINE = os.getenv("IMAGE_GENERATION_ENGINE", "")
|
||||
|
||||
ENABLE_IMAGE_GENERATION = (
|
||||
os.environ.get("ENABLE_IMAGE_GENERATION", "").lower() == "true"
|
||||
)
|
||||
AUTOMATIC1111_BASE_URL = os.getenv("AUTOMATIC1111_BASE_URL", "")
|
||||
COMFYUI_BASE_URL = os.getenv("COMFYUI_BASE_URL", "")
|
||||
|
||||
COMFYUI_BASE_URL = os.getenv("COMFYUI_BASE_URL", "")
|
||||
|
||||
IMAGES_OPENAI_API_BASE_URL = os.getenv(
|
||||
"IMAGES_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL
|
||||
)
|
||||
IMAGES_OPENAI_API_KEY = os.getenv("IMAGES_OPENAI_API_KEY", OPENAI_API_KEY)
|
||||
|
||||
IMAGE_SIZE = os.getenv("IMAGE_SIZE", "512x512")
|
||||
|
||||
IMAGE_STEPS = int(os.getenv("IMAGE_STEPS", 50))
|
||||
|
||||
IMAGE_GENERATION_MODEL = os.getenv("IMAGE_GENERATION_MODEL", "")
|
||||
|
||||
####################################
|
||||
# Audio
|
||||
|
@ -504,7 +562,17 @@ AUDIO_OPENAI_API_KEY = os.getenv("AUDIO_OPENAI_API_KEY", OPENAI_API_KEY)
|
|||
# LiteLLM
|
||||
####################################
|
||||
|
||||
|
||||
ENABLE_LITELLM = os.environ.get("ENABLE_LITELLM", "True").lower() == "true"
|
||||
|
||||
LITELLM_PROXY_PORT = int(os.getenv("LITELLM_PROXY_PORT", "14365"))
|
||||
if LITELLM_PROXY_PORT < 0 or LITELLM_PROXY_PORT > 65535:
|
||||
raise ValueError("Invalid port number for LITELLM_PROXY_PORT")
|
||||
LITELLM_PROXY_HOST = os.getenv("LITELLM_PROXY_HOST", "127.0.0.1")
|
||||
|
||||
|
||||
####################################
|
||||
# Database
|
||||
####################################
|
||||
|
||||
DATABASE_URL = os.environ.get("DATABASE_URL", f"sqlite:///{DATA_DIR}/webui.db")
|
||||
|
|
|
@ -69,3 +69,5 @@ class ERROR_MESSAGES(str, Enum):
|
|||
CREATE_API_KEY_ERROR = "Oops! Something went wrong while creating your API key. Please try again later. If the issue persists, contact support for assistance."
|
||||
|
||||
EMPTY_CONTENT = "The content provided is empty. Please ensure that there is text or data present before proceeding."
|
||||
|
||||
DB_NOT_SQLITE = "This feature is only available when running with SQLite databases."
|
||||
|
|
|
@ -47,7 +47,8 @@ from config import (
|
|||
FRONTEND_BUILD_DIR,
|
||||
CACHE_DIR,
|
||||
STATIC_DIR,
|
||||
MODEL_FILTER_ENABLED,
|
||||
ENABLE_LITELLM,
|
||||
ENABLE_MODEL_FILTER,
|
||||
MODEL_FILTER_LIST,
|
||||
GLOBAL_LOG_LEVEL,
|
||||
SRC_LOG_LEVELS,
|
||||
|
@ -89,7 +90,7 @@ https://github.com/open-webui/open-webui
|
|||
|
||||
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
|
||||
|
||||
app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
|
||||
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||
|
||||
app.state.WEBHOOK_URL = WEBHOOK_URL
|
||||
|
@ -116,15 +117,14 @@ class RAGMiddleware(BaseHTTPMiddleware):
|
|||
if "docs" in data:
|
||||
data = {**data}
|
||||
data["messages"] = rag_messages(
|
||||
data["docs"],
|
||||
data["messages"],
|
||||
rag_app.state.RAG_TEMPLATE,
|
||||
rag_app.state.TOP_K,
|
||||
rag_app.state.RAG_EMBEDDING_ENGINE,
|
||||
rag_app.state.RAG_EMBEDDING_MODEL,
|
||||
rag_app.state.sentence_transformer_ef,
|
||||
rag_app.state.OPENAI_API_KEY,
|
||||
rag_app.state.OPENAI_API_BASE_URL,
|
||||
docs=data["docs"],
|
||||
messages=data["messages"],
|
||||
template=rag_app.state.RAG_TEMPLATE,
|
||||
embedding_function=rag_app.state.EMBEDDING_FUNCTION,
|
||||
k=rag_app.state.TOP_K,
|
||||
reranking_function=rag_app.state.sentence_transformer_rf,
|
||||
r=rag_app.state.RELEVANCE_THRESHOLD,
|
||||
hybrid_search=rag_app.state.ENABLE_RAG_HYBRID_SEARCH,
|
||||
)
|
||||
del data["docs"]
|
||||
|
||||
|
@ -176,7 +176,8 @@ async def check_url(request: Request, call_next):
|
|||
|
||||
@app.on_event("startup")
|
||||
async def on_startup():
|
||||
asyncio.create_task(start_litellm_background())
|
||||
if ENABLE_LITELLM:
|
||||
asyncio.create_task(start_litellm_background())
|
||||
|
||||
|
||||
app.mount("/api/v1", webui_app)
|
||||
|
@ -215,7 +216,7 @@ async def get_app_config():
|
|||
@app.get("/api/config/model/filter")
|
||||
async def get_model_filter_config(user=Depends(get_admin_user)):
|
||||
return {
|
||||
"enabled": app.state.MODEL_FILTER_ENABLED,
|
||||
"enabled": app.state.ENABLE_MODEL_FILTER,
|
||||
"models": app.state.MODEL_FILTER_LIST,
|
||||
}
|
||||
|
||||
|
@ -229,20 +230,20 @@ class ModelFilterConfigForm(BaseModel):
|
|||
async def update_model_filter_config(
|
||||
form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
|
||||
):
|
||||
app.state.MODEL_FILTER_ENABLED = form_data.enabled
|
||||
app.state.ENABLE_MODEL_FILTER = form_data.enabled
|
||||
app.state.MODEL_FILTER_LIST = form_data.models
|
||||
|
||||
ollama_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
|
||||
ollama_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
|
||||
ollama_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
|
||||
|
||||
openai_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
|
||||
openai_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
|
||||
openai_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
|
||||
|
||||
litellm_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
|
||||
litellm_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
|
||||
litellm_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
|
||||
|
||||
return {
|
||||
"enabled": app.state.MODEL_FILTER_ENABLED,
|
||||
"enabled": app.state.ENABLE_MODEL_FILTER,
|
||||
"models": app.state.MODEL_FILTER_LIST,
|
||||
}
|
||||
|
||||
|
@ -326,4 +327,5 @@ app.mount(
|
|||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
await shutdown_litellm_background()
|
||||
if ENABLE_LITELLM:
|
||||
await shutdown_litellm_background()
|
||||
|
|
|
@ -15,6 +15,8 @@ requests
|
|||
aiohttp
|
||||
peewee
|
||||
peewee-migrate
|
||||
psycopg2-binary
|
||||
pymysql
|
||||
bcrypt
|
||||
|
||||
litellm==1.35.17
|
||||
|
|
|
@ -6,6 +6,7 @@ cd "$SCRIPT_DIR" || exit
|
|||
KEY_FILE=.webui_secret_key
|
||||
|
||||
PORT="${PORT:-8080}"
|
||||
HOST="${HOST:-0.0.0.0}"
|
||||
if test "$WEBUI_SECRET_KEY $WEBUI_JWT_SECRET_KEY" = " "; then
|
||||
echo "No WEBUI_SECRET_KEY provided"
|
||||
|
||||
|
@ -29,4 +30,4 @@ if [ "$USE_CUDA_DOCKER" = "true" ]; then
|
|||
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib/python3.11/site-packages/torch/lib:/usr/local/lib/python3.11/site-packages/nvidia/cudnn/lib"
|
||||
fi
|
||||
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'
|
||||
|
|
|
@ -7,7 +7,7 @@ SET "SCRIPT_DIR=%~dp0"
|
|||
cd /d "%SCRIPT_DIR%" || exit /b
|
||||
|
||||
SET "KEY_FILE=.webui_secret_key"
|
||||
SET "PORT=%PORT:8080%"
|
||||
IF "%PORT%"=="" SET PORT=8080
|
||||
SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%"
|
||||
SET "WEBUI_JWT_SECRET_KEY=%WEBUI_JWT_SECRET_KEY%"
|
||||
|
||||
|
|
|
@ -89,6 +89,8 @@ def get_current_user(
|
|||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.INVALID_TOKEN,
|
||||
)
|
||||
else:
|
||||
Users.update_user_last_active_by_id(user.id)
|
||||
return user
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
@ -99,11 +101,15 @@ def get_current_user(
|
|||
|
||||
def get_current_user_by_api_key(api_key: str):
|
||||
user = Users.get_user_by_api_key(api_key)
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.INVALID_TOKEN,
|
||||
)
|
||||
else:
|
||||
Users.update_user_last_active_by_id(user.id)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,12 @@
|
|||
echo "Warning: This will remove all containers and volumes, including persistent data. Do you want to continue? [Y/N]"
|
||||
read ans
|
||||
if [ "$ans" == "Y" ] || [ "$ans" == "y" ]; then
|
||||
docker-compose down -v
|
||||
command docker-compose 2>/dev/null
|
||||
if [ "$?" == "0" ]; then
|
||||
docker-compose down -v
|
||||
else
|
||||
docker compose down -v
|
||||
fi
|
||||
else
|
||||
echo "Operation cancelled."
|
||||
fi
|
||||
|
|
8
cypress.config.ts
Normal file
8
cypress.config.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { defineConfig } from 'cypress';
|
||||
|
||||
export default defineConfig({
|
||||
e2e: {
|
||||
baseUrl: 'http://localhost:8080'
|
||||
},
|
||||
video: true
|
||||
});
|
46
cypress/e2e/chat.cy.ts
Normal file
46
cypress/e2e/chat.cy.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
// eslint-disable-next-line @typescript-eslint/triple-slash-reference
|
||||
/// <reference path="../support/index.d.ts" />
|
||||
|
||||
// These tests run through the chat flow.
|
||||
describe('Settings', () => {
|
||||
// Wait for 2 seconds after all tests to fix an issue with Cypress's video recording missing the last few frames
|
||||
after(() => {
|
||||
// eslint-disable-next-line cypress/no-unnecessary-waiting
|
||||
cy.wait(2000);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// Login as the admin user
|
||||
cy.loginAdmin();
|
||||
// Visit the home page
|
||||
cy.visit('/');
|
||||
});
|
||||
|
||||
context('Ollama', () => {
|
||||
it('user can select a model', () => {
|
||||
// Click on the model selector
|
||||
cy.get('button[aria-label="Select a model"]').click();
|
||||
// Select the first model
|
||||
cy.get('div[role="option"][data-value]').first().click();
|
||||
});
|
||||
|
||||
it('user can perform text chat', () => {
|
||||
// Click on the model selector
|
||||
cy.get('button[aria-label="Select a model"]').click();
|
||||
// Select the first model
|
||||
cy.get('div[role="option"][data-value]').first().click();
|
||||
// Type a message
|
||||
cy.get('#chat-textarea').type('Hi, what can you do? A single sentence only please.', {
|
||||
force: true
|
||||
});
|
||||
// Send the message
|
||||
cy.get('button[type="submit"]').click();
|
||||
// User's message should be visible
|
||||
cy.get('.chat-user').should('exist');
|
||||
// Wait for the response
|
||||
cy.get('.chat-assistant', { timeout: 120_000 }) // .chat-assistant is created after the first token is received
|
||||
.find('div[aria-label="Generation Info"]', { timeout: 120_000 }) // Generation Info is created after the stop token is received
|
||||
.should('exist');
|
||||
});
|
||||
});
|
||||
});
|
52
cypress/e2e/registration.cy.ts
Normal file
52
cypress/e2e/registration.cy.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
// eslint-disable-next-line @typescript-eslint/triple-slash-reference
|
||||
/// <reference path="../support/index.d.ts" />
|
||||
import { adminUser } from '../support/e2e';
|
||||
|
||||
// These tests assume the following defaults:
|
||||
// 1. No users exist in the database or that the test admin user is an admin
|
||||
// 2. Language is set to English
|
||||
// 3. The default role for new users is 'pending'
|
||||
describe('Registration and Login', () => {
|
||||
// Wait for 2 seconds after all tests to fix an issue with Cypress's video recording missing the last few frames
|
||||
after(() => {
|
||||
// eslint-disable-next-line cypress/no-unnecessary-waiting
|
||||
cy.wait(2000);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit('/');
|
||||
});
|
||||
|
||||
it('should register a new user as pending', () => {
|
||||
const userName = `Test User - ${Date.now()}`;
|
||||
const userEmail = `cypress-${Date.now()}@example.com`;
|
||||
// Toggle from sign in to sign up
|
||||
cy.contains('Sign up').click();
|
||||
// Fill out the form
|
||||
cy.get('input[autocomplete="name"]').type(userName);
|
||||
cy.get('input[autocomplete="email"]').type(userEmail);
|
||||
cy.get('input[type="password"]').type('password');
|
||||
// Submit the form
|
||||
cy.get('button[type="submit"]').click();
|
||||
// Wait until the user is redirected to the home page
|
||||
cy.contains(userName);
|
||||
// Expect the user to be pending
|
||||
cy.contains('Check Again');
|
||||
});
|
||||
|
||||
it('can login with the admin user', () => {
|
||||
// Fill out the form
|
||||
cy.get('input[autocomplete="email"]').type(adminUser.email);
|
||||
cy.get('input[type="password"]').type(adminUser.password);
|
||||
// Submit the form
|
||||
cy.get('button[type="submit"]').click();
|
||||
// Wait until the user is redirected to the home page
|
||||
cy.contains(adminUser.name);
|
||||
// Dismiss the changelog dialog if it is visible
|
||||
cy.getAllLocalStorage().then((ls) => {
|
||||
if (!ls['version']) {
|
||||
cy.get('button').contains("Okay, Let's Go!").click();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
88
cypress/e2e/settings.cy.ts
Normal file
88
cypress/e2e/settings.cy.ts
Normal file
|
@ -0,0 +1,88 @@
|
|||
// eslint-disable-next-line @typescript-eslint/triple-slash-reference
|
||||
/// <reference path="../support/index.d.ts" />
|
||||
import { adminUser } from '../support/e2e';
|
||||
|
||||
// These tests run through the various settings pages, ensuring that the user can interact with them as expected
|
||||
describe('Settings', () => {
|
||||
// Wait for 2 seconds after all tests to fix an issue with Cypress's video recording missing the last few frames
|
||||
after(() => {
|
||||
// eslint-disable-next-line cypress/no-unnecessary-waiting
|
||||
cy.wait(2000);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// Login as the admin user
|
||||
cy.loginAdmin();
|
||||
// Visit the home page
|
||||
cy.visit('/');
|
||||
// Open the sidebar if it is not already open
|
||||
cy.get('[aria-label="Open sidebar"]').then(() => {
|
||||
cy.get('button[id="sidebar-toggle-button"]').click();
|
||||
});
|
||||
// Click on the profile link
|
||||
cy.get('button').contains(adminUser.name).click();
|
||||
// Click on the settings link
|
||||
cy.get('button').contains('Settings').click();
|
||||
});
|
||||
|
||||
context('General', () => {
|
||||
it('user can open the General modal and hit save', () => {
|
||||
cy.get('button').contains('General').click();
|
||||
cy.get('button').contains('Save').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Connections', () => {
|
||||
it('user can open the Connections modal and hit save', () => {
|
||||
cy.get('button').contains('Connections').click();
|
||||
cy.get('button').contains('Save').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Models', () => {
|
||||
it('user can open the Models modal', () => {
|
||||
cy.get('button').contains('Models').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Interface', () => {
|
||||
it('user can open the Interface modal and hit save', () => {
|
||||
cy.get('button').contains('Interface').click();
|
||||
cy.get('button').contains('Save').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Audio', () => {
|
||||
it('user can open the Audio modal and hit save', () => {
|
||||
cy.get('button').contains('Audio').click();
|
||||
cy.get('button').contains('Save').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Images', () => {
|
||||
it('user can open the Images modal and hit save', () => {
|
||||
cy.get('button').contains('Images').click();
|
||||
// Currently fails because the backend requires a valid URL
|
||||
// cy.get('button').contains('Save').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Chats', () => {
|
||||
it('user can open the Chats modal', () => {
|
||||
cy.get('button').contains('Chats').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('Account', () => {
|
||||
it('user can open the Account modal and hit save', () => {
|
||||
cy.get('button').contains('Account').click();
|
||||
cy.get('button').contains('Save').click();
|
||||
});
|
||||
});
|
||||
|
||||
context('About', () => {
|
||||
it('user can open the About modal', () => {
|
||||
cy.get('button').contains('About').click();
|
||||
});
|
||||
});
|
||||
});
|
73
cypress/support/e2e.ts
Normal file
73
cypress/support/e2e.ts
Normal file
|
@ -0,0 +1,73 @@
|
|||
/// <reference types="cypress" />
|
||||
|
||||
export const adminUser = {
|
||||
name: 'Admin User',
|
||||
email: 'admin@example.com',
|
||||
password: 'password'
|
||||
};
|
||||
|
||||
const login = (email: string, password: string) => {
|
||||
return cy.session(
|
||||
email,
|
||||
() => {
|
||||
// Visit auth page
|
||||
cy.visit('/auth');
|
||||
// Fill out the form
|
||||
cy.get('input[autocomplete="email"]').type(email);
|
||||
cy.get('input[type="password"]').type(password);
|
||||
// Submit the form
|
||||
cy.get('button[type="submit"]').click();
|
||||
// Wait until the user is redirected to the home page
|
||||
cy.get('#chat-search').should('exist');
|
||||
// Get the current version to skip the changelog dialog
|
||||
if (localStorage.getItem('version') === null) {
|
||||
cy.get('button').contains("Okay, Let's Go!").click();
|
||||
}
|
||||
},
|
||||
{
|
||||
validate: () => {
|
||||
cy.request({
|
||||
method: 'GET',
|
||||
url: '/api/v1/auths/',
|
||||
headers: {
|
||||
Authorization: 'Bearer ' + localStorage.getItem('token')
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
const register = (name: string, email: string, password: string) => {
|
||||
return cy
|
||||
.request({
|
||||
method: 'POST',
|
||||
url: '/api/v1/auths/signup',
|
||||
body: {
|
||||
name: name,
|
||||
email: email,
|
||||
password: password
|
||||
},
|
||||
failOnStatusCode: false
|
||||
})
|
||||
.then((response) => {
|
||||
expect(response.status).to.be.oneOf([200, 400]);
|
||||
});
|
||||
};
|
||||
|
||||
const registerAdmin = () => {
|
||||
return register(adminUser.name, adminUser.email, adminUser.password);
|
||||
};
|
||||
|
||||
const loginAdmin = () => {
|
||||
return login(adminUser.email, adminUser.password);
|
||||
};
|
||||
|
||||
Cypress.Commands.add('login', (email, password) => login(email, password));
|
||||
Cypress.Commands.add('register', (name, email, password) => register(name, email, password));
|
||||
Cypress.Commands.add('registerAdmin', () => registerAdmin());
|
||||
Cypress.Commands.add('loginAdmin', () => loginAdmin());
|
||||
|
||||
before(() => {
|
||||
cy.registerAdmin();
|
||||
});
|
11
cypress/support/index.d.ts
vendored
Normal file
11
cypress/support/index.d.ts
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
// load the global Cypress types
|
||||
/// <reference types="cypress" />
|
||||
|
||||
declare namespace Cypress {
|
||||
interface Chainable {
|
||||
login(email: string, password: string): Chainable<Element>;
|
||||
register(name: string, email: string, password: string): Chainable<Element>;
|
||||
registerAdmin(): Chainable<Element>;
|
||||
loginAdmin(): Chainable<Element>;
|
||||
}
|
||||
}
|
7
cypress/tsconfig.json
Normal file
7
cypress/tsconfig.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"inlineSourceMap": true,
|
||||
"sourceMap": false
|
||||
}
|
||||
}
|
1769
package-lock.json
generated
1769
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "open-webui",
|
||||
"version": "0.1.121",
|
||||
"version": "0.1.122",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "vite dev --host",
|
||||
|
@ -14,7 +14,8 @@
|
|||
"lint:backend": "pylint backend/",
|
||||
"format": "prettier --plugin-search-dir --write '**/*.{js,ts,svelte,css,md,html,json}'",
|
||||
"format:backend": "black . --exclude \"/venv/\"",
|
||||
"i18n:parse": "i18next --config i18next-parser.config.ts && prettier --write 'src/lib/i18n/**/*.{js,json}'"
|
||||
"i18n:parse": "i18next --config i18next-parser.config.ts && prettier --write 'src/lib/i18n/**/*.{js,json}'",
|
||||
"cy:open": "cypress open"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-auto": "^2.0.0",
|
||||
|
@ -25,8 +26,10 @@
|
|||
"@typescript-eslint/eslint-plugin": "^6.17.0",
|
||||
"@typescript-eslint/parser": "^6.17.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"cypress": "^13.8.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"eslint-plugin-cypress": "^3.0.2",
|
||||
"eslint-plugin-svelte": "^2.30.0",
|
||||
"i18next-parser": "^8.13.0",
|
||||
"postcss": "^8.4.31",
|
||||
|
@ -53,7 +56,6 @@
|
|||
"i18next-resources-to-backend": "^1.2.0",
|
||||
"idb": "^7.1.1",
|
||||
"js-sha256": "^0.10.1",
|
||||
"jspdf": "^2.5.1",
|
||||
"katex": "^0.16.9",
|
||||
"marked": "^9.1.0",
|
||||
"svelte-sonner": "^0.3.19",
|
||||
|
|
|
@ -82,6 +82,7 @@ usage() {
|
|||
echo "Examples:"
|
||||
echo " $0 --drop"
|
||||
echo " $0 --enable-gpu[count=1]"
|
||||
echo " $0 --enable-gpu[count=all]"
|
||||
echo " $0 --enable-api[port=11435]"
|
||||
echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000]"
|
||||
echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data]"
|
||||
|
@ -160,7 +161,7 @@ else
|
|||
if [[ $enable_gpu == true ]]; then
|
||||
# Validate and process command-line arguments
|
||||
if [[ -n $gpu_count ]]; then
|
||||
if ! [[ $gpu_count =~ ^[0-9]+$ ]]; then
|
||||
if ! [[ $gpu_count =~ ^([0-9]+|all)$ ]]; then
|
||||
echo "Invalid GPU count: $gpu_count"
|
||||
exit 1
|
||||
fi
|
||||
|
|
37
src/app.html
37
src/app.html
|
@ -43,9 +43,46 @@
|
|||
})();
|
||||
</script>
|
||||
|
||||
<title>Open WebUI</title>
|
||||
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data="hover">
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
|
||||
<div
|
||||
id="splash-screen"
|
||||
style="
|
||||
position: fixed;
|
||||
z-index: 100;
|
||||
background: #fff;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
"
|
||||
>
|
||||
<style type="text/css" nonce="">
|
||||
html {
|
||||
overflow-y: scroll !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
<img
|
||||
style="
|
||||
position: absolute;
|
||||
width: 6rem;
|
||||
height: 6rem;
|
||||
top: 46%;
|
||||
left: 50%;
|
||||
margin: -40px 0 0 -40px;
|
||||
"
|
||||
src="/logo.svg"
|
||||
/>
|
||||
|
||||
<!-- <span style="position: absolute; bottom: 32px; left: 50%; margin: -36px 0 0 -36px">
|
||||
Footer content
|
||||
</span> -->
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -62,6 +62,37 @@ export const getChatList = async (token: string = '') => {
|
|||
return res;
|
||||
};
|
||||
|
||||
export const getChatListByUserId = async (token: string = '', userId: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/list/user/${userId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getArchivedChatList = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
|
|
|
@ -211,10 +211,12 @@ export const generateOpenAIChatCompletion = async (
|
|||
token: string = '',
|
||||
body: object,
|
||||
url: string = OPENAI_API_BASE_URL
|
||||
) => {
|
||||
): Promise<[Response | null, AbortController]> => {
|
||||
const controller = new AbortController();
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${url}/chat/completions`, {
|
||||
signal: controller.signal,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
|
@ -231,7 +233,7 @@ export const generateOpenAIChatCompletion = async (
|
|||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
return [res, controller];
|
||||
};
|
||||
|
||||
export const synthesizeOpenAISpeech = async (
|
||||
|
|
|
@ -123,6 +123,7 @@ export const getQuerySettings = async (token: string) => {
|
|||
|
||||
type QuerySettings = {
|
||||
k: number | null;
|
||||
r: number | null;
|
||||
template: string | null;
|
||||
};
|
||||
|
||||
|
@ -413,3 +414,64 @@ export const updateEmbeddingConfig = async (token: string, payload: EmbeddingMod
|
|||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getRerankingConfig = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/reranking`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
type RerankingModelUpdateForm = {
|
||||
reranking_model: string;
|
||||
};
|
||||
|
||||
export const updateRerankingConfig = async (token: string, payload: RerankingModelUpdateForm) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/reranking/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...payload
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
|
|
@ -26,16 +26,28 @@ async function* openAIStreamToIterator(
|
|||
break;
|
||||
}
|
||||
const lines = value.split('\n');
|
||||
for (const line of lines) {
|
||||
for (let line of lines) {
|
||||
if (line.endsWith('\r')) {
|
||||
// Remove trailing \r
|
||||
line = line.slice(0, -1);
|
||||
}
|
||||
if (line !== '') {
|
||||
console.log(line);
|
||||
if (line === 'data: [DONE]') {
|
||||
yield { done: true, value: '' };
|
||||
} else if (line.startsWith(':')) {
|
||||
// Events starting with : are comments https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#event_stream_format
|
||||
// OpenRouter sends heartbeats like ": OPENROUTER PROCESSING"
|
||||
continue;
|
||||
} else {
|
||||
const data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
try {
|
||||
const data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
|
||||
yield { done: false, value: data.choices[0].delta.content ?? '' };
|
||||
yield { done: false, value: data.choices?.[0]?.delta?.content ?? '' };
|
||||
} catch (e) {
|
||||
console.error('Error extracting delta from SSE event:', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,9 +83,9 @@ export const downloadDatabase = async (token: string) => {
|
|||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then((response) => {
|
||||
.then(async (response) => {
|
||||
if (!response.ok) {
|
||||
throw new Error('Network response was not ok');
|
||||
throw await response.json();
|
||||
}
|
||||
return response.blob();
|
||||
})
|
||||
|
@ -100,7 +100,11 @@ export const downloadDatabase = async (token: string) => {
|
|||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err;
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -110,7 +110,7 @@
|
|||
localStorage.version = $config.version;
|
||||
show = false;
|
||||
}}
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
>
|
||||
<span class="relative">{$i18n.t("Okay, Let's Go!")}</span>
|
||||
</button>
|
||||
|
|
|
@ -86,7 +86,7 @@
|
|||
|
||||
<div class="text-xs text-gray-500">
|
||||
{$i18n.t('Created at')}
|
||||
{dayjs(selectedUser.timestamp * 1000).format($i18n.t('MMMM DD, YYYY'))}
|
||||
{dayjs(selectedUser.created_at * 1000).format($i18n.t('MMMM DD, YYYY'))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -139,7 +139,7 @@
|
|||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import { downloadDatabase } from '$lib/apis/utils';
|
||||
import { onMount, getContext } from 'svelte';
|
||||
import { config } from '$lib/stores';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
|
@ -32,7 +33,9 @@
|
|||
on:click={() => {
|
||||
// exportAllUserChats();
|
||||
|
||||
downloadDatabase(localStorage.token);
|
||||
downloadDatabase(localStorage.token).catch((error) => {
|
||||
toast.error(error);
|
||||
});
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-3">
|
||||
|
@ -59,10 +62,11 @@
|
|||
|
||||
<!-- <div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
|
||||
</div> -->
|
||||
</form>
|
||||
|
|
|
@ -159,7 +159,7 @@
|
|||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
|
|
@ -190,7 +190,7 @@
|
|||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
|
141
src/lib/components/admin/UserChatsModal.svelte
Normal file
141
src/lib/components/admin/UserChatsModal.svelte
Normal file
|
@ -0,0 +1,141 @@
|
|||
<script lang="ts">
|
||||
import { toast } from 'svelte-sonner';
|
||||
import dayjs from 'dayjs';
|
||||
import { getContext, createEventDispatcher } from 'svelte';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
import Modal from '$lib/components/common/Modal.svelte';
|
||||
import { getChatListByUserId, deleteChatById, getArchivedChatList } from '$lib/apis/chats';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
export let show = false;
|
||||
export let user;
|
||||
|
||||
let chats = [];
|
||||
|
||||
const deleteChatHandler = async (chatId) => {
|
||||
const res = await deleteChatById(localStorage.token, chatId).catch((error) => {
|
||||
toast.error(error);
|
||||
});
|
||||
|
||||
chats = await getChatListByUserId(localStorage.token, user.id);
|
||||
};
|
||||
|
||||
$: if (show) {
|
||||
(async () => {
|
||||
if (user.id) {
|
||||
chats = await getChatListByUserId(localStorage.token, user.id);
|
||||
}
|
||||
})();
|
||||
}
|
||||
</script>
|
||||
|
||||
<Modal size="lg" bind:show>
|
||||
<div>
|
||||
<div class=" flex justify-between dark:text-gray-300 px-5 py-4">
|
||||
<div class=" text-lg font-medium self-center capitalize">
|
||||
{$i18n.t("{{user}}'s Chats", { user: user.name })}
|
||||
</div>
|
||||
<button
|
||||
class="self-center"
|
||||
on:click={() => {
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-5 h-5"
|
||||
>
|
||||
<path
|
||||
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<hr class=" dark:border-gray-850" />
|
||||
|
||||
<div class="flex flex-col md:flex-row w-full px-5 py-4 md:space-x-4 dark:text-gray-200">
|
||||
<div class=" flex flex-col w-full sm:flex-row sm:justify-center sm:space-x-6">
|
||||
{#if chats.length > 0}
|
||||
<div class="text-left text-sm w-full mb-4 max-h-[22rem] overflow-y-scroll">
|
||||
<div class="relative overflow-x-auto">
|
||||
<table class="w-full text-sm text-left text-gray-600 dark:text-gray-400 table-auto">
|
||||
<thead
|
||||
class="text-xs text-gray-700 uppercase bg-transparent dark:text-gray-200 border-b-2 dark:border-gray-800"
|
||||
>
|
||||
<tr>
|
||||
<th scope="col" class="px-3 py-2"> {$i18n.t('Name')} </th>
|
||||
<th scope="col" class="px-3 py-2 hidden md:flex"> {$i18n.t('Created At')} </th>
|
||||
<th scope="col" class="px-3 py-2 text-right" />
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each chats as chat, idx}
|
||||
<tr
|
||||
class="bg-transparent {idx !== chats.length - 1 &&
|
||||
'border-b'} dark:bg-gray-900 dark:border-gray-850 text-xs"
|
||||
>
|
||||
<td class="px-3 py-1 w-2/3">
|
||||
<a href="/s/{chat.id}" target="_blank">
|
||||
<div class=" underline line-clamp-1">
|
||||
{chat.title}
|
||||
</div>
|
||||
</a>
|
||||
</td>
|
||||
|
||||
<td class=" px-3 py-1 hidden md:flex h-[2.5rem]">
|
||||
<div class="my-auto">
|
||||
{dayjs(chat.created_at * 1000).format($i18n.t('MMMM DD, YYYY HH:mm'))}
|
||||
</div>
|
||||
</td>
|
||||
|
||||
<td class="px-3 py-1 text-right">
|
||||
<div class="flex justify-end w-full">
|
||||
<Tooltip content="Delete Chat">
|
||||
<button
|
||||
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
|
||||
on:click={async () => {
|
||||
deleteChatHandler(chat.id);
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m14.74 9-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 0 1-2.244 2.077H8.084a2.25 2.25 0 0 1-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 0 0-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 0 1 3.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 0 0-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 0 0-7.5 0"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<!-- {#each chats as chat}
|
||||
<div>
|
||||
{JSON.stringify(chat)}
|
||||
</div>
|
||||
{/each} -->
|
||||
</div>
|
||||
{:else}
|
||||
<div class="text-left text-sm w-full mb-8">You have no archived conversations.</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
|
@ -13,6 +13,7 @@
|
|||
import Models from './MessageInput/Models.svelte';
|
||||
import { transcribeAudio } from '$lib/apis/audio';
|
||||
import Tooltip from '../common/Tooltip.svelte';
|
||||
import Page from '../../../routes/(app)/+page.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
|
@ -688,11 +689,13 @@
|
|||
: $i18n.t('Send a Message')}
|
||||
bind:value={prompt}
|
||||
on:keypress={(e) => {
|
||||
if (e.keyCode == 13 && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
}
|
||||
if (prompt !== '' && e.keyCode == 13 && !e.shiftKey) {
|
||||
submitPrompt(prompt, user);
|
||||
if (window.innerWidth > 1024) {
|
||||
if (e.keyCode == 13 && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
}
|
||||
if (prompt !== '' && e.keyCode == 13 && !e.shiftKey) {
|
||||
submitPrompt(prompt, user);
|
||||
}
|
||||
}
|
||||
}}
|
||||
on:keydown={async (e) => {
|
||||
|
@ -756,7 +759,11 @@
|
|||
...document.getElementsByClassName('selected-command-option-button')
|
||||
]?.at(-1);
|
||||
|
||||
commandOptionButton?.click();
|
||||
if (commandOptionButton) {
|
||||
commandOptionButton?.click();
|
||||
} else {
|
||||
document.getElementById('send-message-button')?.click();
|
||||
}
|
||||
}
|
||||
|
||||
if (['/', '#', '@'].includes(prompt.charAt(0)) && e.key === 'Tab') {
|
||||
|
@ -895,6 +902,7 @@
|
|||
|
||||
<Tooltip content={$i18n.t('Send message')}>
|
||||
<button
|
||||
id="send-message-button"
|
||||
class="{prompt !== ''
|
||||
? 'bg-black text-white hover:bg-gray-900 dark:bg-white dark:text-black dark:hover:bg-gray-100 '
|
||||
: 'text-white bg-gray-100 dark:text-gray-900 dark:bg-gray-800 disabled'} transition rounded-full p-1.5 self-center"
|
||||
|
|
|
@ -133,8 +133,9 @@
|
|||
};
|
||||
|
||||
const renderLatex = () => {
|
||||
let chatMessageElements = document.getElementsByClassName('chat-assistant');
|
||||
// let lastChatMessageElement = chatMessageElements[chatMessageElements.length - 1];
|
||||
let chatMessageElements = document
|
||||
.getElementById(`message-${message.id}`)
|
||||
?.getElementsByClassName('chat-assistant');
|
||||
|
||||
for (const element of chatMessageElements) {
|
||||
auto_render(element, {
|
||||
|
@ -322,7 +323,7 @@
|
|||
</script>
|
||||
|
||||
{#key message.id}
|
||||
<div class=" flex w-full message-{message.id}">
|
||||
<div class=" flex w-full message-{message.id}" id="message-{message.id}">
|
||||
<ProfileImage
|
||||
src={modelfiles[message.model]?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`}
|
||||
/>
|
||||
|
@ -376,7 +377,7 @@
|
|||
|
||||
<div class=" mt-2 mb-1 flex justify-center space-x-2 text-sm font-medium">
|
||||
<button
|
||||
class="px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded-lg"
|
||||
class="px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg-lg"
|
||||
on:click={() => {
|
||||
editMessageConfirmHandler();
|
||||
}}
|
||||
|
|
|
@ -193,7 +193,7 @@
|
|||
<div class=" mt-2 mb-1 flex justify-center space-x-2 text-sm font-medium">
|
||||
<button
|
||||
id="save-edit-message-button"
|
||||
class="px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded-lg"
|
||||
class="px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg-lg"
|
||||
on:click={() => {
|
||||
editMessageConfirmHandler();
|
||||
}}
|
||||
|
|
|
@ -125,7 +125,7 @@
|
|||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
on:click={() => {
|
||||
saveSettings({
|
||||
options: {
|
||||
|
|
|
@ -147,8 +147,8 @@
|
|||
<option value="dark">🌑 {$i18n.t('Dark')}</option>
|
||||
<option value="oled-dark">🌃 {$i18n.t('OLED Dark')}</option>
|
||||
<option value="light">☀️ {$i18n.t('Light')}</option>
|
||||
<option value="rose-pine dark">🪻 {$i18n.t('Rosé Pine')}</option>
|
||||
<option value="rose-pine-dawn light">🌷 {$i18n.t('Rosé Pine Dawn')}</option>
|
||||
<!-- <option value="rose-pine dark">🪻 {$i18n.t('Rosé Pine')}</option>
|
||||
<option value="rose-pine-dawn light">🌷 {$i18n.t('Rosé Pine Dawn')}</option> -->
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -161,7 +161,7 @@
|
|||
|
||||
<div class="flex justify-end pt-5 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
|
|
@ -158,7 +158,7 @@
|
|||
|
||||
<div class="flex justify-end pt-5 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
|
|
@ -8,7 +8,9 @@
|
|||
updateQuerySettings,
|
||||
resetVectorDB,
|
||||
getEmbeddingConfig,
|
||||
updateEmbeddingConfig
|
||||
updateEmbeddingConfig,
|
||||
getRerankingConfig,
|
||||
updateRerankingConfig
|
||||
} from '$lib/apis/rag';
|
||||
|
||||
import { documents, models } from '$lib/stores';
|
||||
|
@ -23,11 +25,13 @@
|
|||
|
||||
let scanDirLoading = false;
|
||||
let updateEmbeddingModelLoading = false;
|
||||
let updateRerankingModelLoading = false;
|
||||
|
||||
let showResetConfirm = false;
|
||||
|
||||
let embeddingEngine = '';
|
||||
let embeddingModel = '';
|
||||
let rerankingModel = '';
|
||||
|
||||
let OpenAIKey = '';
|
||||
let OpenAIUrl = '';
|
||||
|
@ -38,7 +42,9 @@
|
|||
|
||||
let querySettings = {
|
||||
template: '',
|
||||
k: 4
|
||||
r: 0.0,
|
||||
k: 4,
|
||||
hybrid: false
|
||||
};
|
||||
|
||||
const scanHandler = async () => {
|
||||
|
@ -115,6 +121,29 @@
|
|||
}
|
||||
};
|
||||
|
||||
const rerankingModelUpdateHandler = async () => {
|
||||
console.log('Update reranking model attempt:', rerankingModel);
|
||||
|
||||
updateRerankingModelLoading = true;
|
||||
const res = await updateRerankingConfig(localStorage.token, {
|
||||
reranking_model: rerankingModel
|
||||
}).catch(async (error) => {
|
||||
toast.error(error);
|
||||
await setRerankingConfig();
|
||||
return null;
|
||||
});
|
||||
updateRerankingModelLoading = false;
|
||||
|
||||
if (res) {
|
||||
console.log('rerankingModelUpdateHandler:', res);
|
||||
if (res.status === true) {
|
||||
toast.success($i18n.t('Reranking model set to "{{reranking_model}}"', res), {
|
||||
duration: 1000 * 10
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const submitHandler = async () => {
|
||||
const res = await updateRAGConfig(localStorage.token, {
|
||||
pdf_extract_images: pdfExtractImages,
|
||||
|
@ -138,6 +167,20 @@
|
|||
}
|
||||
};
|
||||
|
||||
const setRerankingConfig = async () => {
|
||||
const rerankingConfig = await getRerankingConfig(localStorage.token);
|
||||
|
||||
if (rerankingConfig) {
|
||||
rerankingModel = rerankingConfig.reranking_model;
|
||||
}
|
||||
};
|
||||
|
||||
const toggleHybridSearch = async () => {
|
||||
querySettings.hybrid = !querySettings.hybrid;
|
||||
|
||||
querySettings = await updateQuerySettings(localStorage.token, querySettings);
|
||||
};
|
||||
|
||||
onMount(async () => {
|
||||
const res = await getRAGConfig(localStorage.token);
|
||||
|
||||
|
@ -149,6 +192,7 @@
|
|||
}
|
||||
|
||||
await setEmbeddingConfig();
|
||||
await setRerankingConfig();
|
||||
|
||||
querySettings = await getQuerySettings(localStorage.token);
|
||||
});
|
||||
|
@ -165,6 +209,24 @@
|
|||
<div>
|
||||
<div class=" mb-2 text-sm font-medium">{$i18n.t('General Settings')}</div>
|
||||
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('Hybrid Search')}</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded transition"
|
||||
on:click={() => {
|
||||
toggleHybridSearch();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if querySettings.hybrid === true}
|
||||
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('Embedding Model Engine')}</div>
|
||||
<div class="flex items-center relative">
|
||||
|
@ -349,6 +411,75 @@
|
|||
|
||||
<hr class=" dark:border-gray-700 my-3" />
|
||||
|
||||
{#if querySettings.hybrid === true}
|
||||
<div class=" ">
|
||||
<div class=" mb-2 text-sm font-medium">{$i18n.t('Update Reranking Model')}</div>
|
||||
|
||||
<div class="flex w-full">
|
||||
<div class="flex-1 mr-2">
|
||||
<input
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder={$i18n.t('Update reranking model (e.g. {{model}})', {
|
||||
model: rerankingModel.slice(-40)
|
||||
})}
|
||||
bind:value={rerankingModel}
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
|
||||
on:click={() => {
|
||||
rerankingModelUpdateHandler();
|
||||
}}
|
||||
disabled={updateRerankingModelLoading}
|
||||
>
|
||||
{#if updateRerankingModelLoading}
|
||||
<div class="self-center">
|
||||
<svg
|
||||
class=" w-4 h-4"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
><style>
|
||||
.spinner_ajPY {
|
||||
transform-origin: center;
|
||||
animation: spinner_AtaB 0.75s infinite linear;
|
||||
}
|
||||
@keyframes spinner_AtaB {
|
||||
100% {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
</style><path
|
||||
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
|
||||
opacity=".25"
|
||||
/><path
|
||||
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
|
||||
class="spinner_ajPY"
|
||||
/></svg
|
||||
>
|
||||
</div>
|
||||
{:else}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
d="M8.75 2.75a.75.75 0 0 0-1.5 0v5.69L5.03 6.22a.75.75 0 0 0-1.06 1.06l3.5 3.5a.75.75 0 0 0 1.06 0l3.5-3.5a.75.75 0 0 0-1.06-1.06L8.75 8.44V2.75Z"
|
||||
/>
|
||||
<path
|
||||
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
|
||||
/>
|
||||
</svg>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-700 my-3" />
|
||||
{/if}
|
||||
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
{$i18n.t('Scan for documents from {{path}}', { path: '/data/docs' })}
|
||||
|
@ -473,6 +604,28 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
{#if querySettings.hybrid === true}
|
||||
<div class=" flex">
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class="self-center text-xs font-medium flex-1">
|
||||
{$i18n.t('Relevance Threshold')}
|
||||
</div>
|
||||
|
||||
<div class="self-center p-3">
|
||||
<input
|
||||
class=" w-full rounded-lg py-1.5 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
type="number"
|
||||
step="0.01"
|
||||
placeholder={$i18n.t('Enter Relevance Threshold')}
|
||||
bind:value={querySettings.r}
|
||||
autocomplete="off"
|
||||
min="0.0"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div>
|
||||
<div class=" mb-2.5 text-sm font-medium">{$i18n.t('RAG Template')}</div>
|
||||
<textarea
|
||||
|
@ -581,7 +734,7 @@
|
|||
</div>
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
class=" px-4 py-2 bg-emerald-700 hover:bg-emerald-800 text-gray-100 transition rounded-lg"
|
||||
type="submit"
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
|
19
src/lib/components/icons/ChatBubble.svelte
Normal file
19
src/lib/components/icons/ChatBubble.svelte
Normal file
|
@ -0,0 +1,19 @@
|
|||
<script lang="ts">
|
||||
export let className = 'size-4';
|
||||
export let strokeWidth = '1.5';
|
||||
</script>
|
||||
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width={strokeWidth}
|
||||
stroke="currentColor"
|
||||
class={className}
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="M8.625 12a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Zm0 0H8.25m4.125 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Zm0 0H12m4.125 0a.375.375 0 1 1-.75 0 .375.375 0 0 1 .75 0Zm0 0h-.375M21 12c0 4.556-4.03 8.25-9 8.25a9.764 9.764 0 0 1-2.555-.337A5.972 5.972 0 0 1 5.41 20.97a5.969 5.969 0 0 1-.474-.065 4.48 4.48 0 0 0 .978-2.025c.09-.457-.133-.901-.467-1.226C3.93 16.178 3 14.189 3 12c0-4.556 4.03-8.25 9-8.25s9 3.694 9 8.25Z"
|
||||
/>
|
||||
</svg>
|
19
src/lib/components/icons/ChatBubbles.svelte
Normal file
19
src/lib/components/icons/ChatBubbles.svelte
Normal file
|
@ -0,0 +1,19 @@
|
|||
<script lang="ts">
|
||||
export let className = 'size-4';
|
||||
export let strokeWidth = '1.5';
|
||||
</script>
|
||||
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width={strokeWidth}
|
||||
stroke="currentColor"
|
||||
class={className}
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="M20.25 8.511c.884.284 1.5 1.128 1.5 2.097v4.286c0 1.136-.847 2.1-1.98 2.193-.34.027-.68.052-1.02.072v3.091l-3-3c-1.354 0-2.694-.055-4.02-.163a2.115 2.115 0 0 1-.825-.242m9.345-8.334a2.126 2.126 0 0 0-.476-.095 48.64 48.64 0 0 0-8.048 0c-1.131.094-1.976 1.057-1.976 2.192v4.286c0 .837.46 1.58 1.155 1.951m9.345-8.334V6.637c0-1.621-1.152-3.026-2.76-3.235A48.455 48.455 0 0 0 11.25 3c-2.115 0-4.198.137-6.24.402-1.608.209-2.76 1.614-2.76 3.235v6.226c0 1.621 1.152 3.026 2.76 3.235.577.075 1.157.14 1.74.194V21l4.155-4.155"
|
||||
/>
|
||||
</svg>
|
|
@ -4,8 +4,6 @@
|
|||
import fileSaver from 'file-saver';
|
||||
const { saveAs } = fileSaver;
|
||||
|
||||
import { jsPDF } from 'jspdf';
|
||||
|
||||
import { showSettings } from '$lib/stores';
|
||||
import { flyAndScale } from '$lib/utils/transitions';
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
import ArchiveBox from '../icons/ArchiveBox.svelte';
|
||||
import ArchivedChatsModal from './Sidebar/ArchivedChatsModal.svelte';
|
||||
|
||||
const BREAKPOINT = 1024;
|
||||
let show = false;
|
||||
let navElement;
|
||||
|
||||
|
@ -49,9 +50,7 @@
|
|||
let isEditing = false;
|
||||
|
||||
onMount(async () => {
|
||||
if (window.innerWidth > 1024) {
|
||||
show = true;
|
||||
}
|
||||
show = window.innerWidth > BREAKPOINT;
|
||||
await chats.set(await getChatList(localStorage.token));
|
||||
|
||||
let touchstartX = 0;
|
||||
|
@ -79,12 +78,20 @@
|
|||
checkDirection();
|
||||
};
|
||||
|
||||
const onResize = () => {
|
||||
if (show && window.innerWidth < BREAKPOINT) {
|
||||
show = false;
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('touchstart', onTouchStart);
|
||||
document.addEventListener('touchend', onTouchEnd);
|
||||
window.addEventListener('resize', onResize);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('touchstart', onTouchStart);
|
||||
document.removeEventListener('touchend', onTouchEnd);
|
||||
document.removeEventListener('resize', onResize);
|
||||
};
|
||||
});
|
||||
|
||||
|
@ -172,7 +179,7 @@
|
|||
<div class="px-2 flex justify-center space-x-2">
|
||||
<a
|
||||
id="sidebar-new-chat-button"
|
||||
class="flex-grow flex justify-between rounded-xl px-4 py-2 hover:bg-gray-200 dark:hover:bg-gray-900 transition"
|
||||
class="flex-grow flex justify-between rounded-xl px-4 py-2 hover:bg-gray-100 dark:hover:bg-gray-900 transition"
|
||||
href="/"
|
||||
on:click={async () => {
|
||||
selectedChatId = null;
|
||||
|
@ -217,7 +224,7 @@
|
|||
{#if $user?.role === 'admin'}
|
||||
<div class="px-2 flex justify-center mt-0.5">
|
||||
<a
|
||||
class="flex-grow flex space-x-3 rounded-xl px-3.5 py-2 hover:bg-gray-200 dark:hover:bg-gray-900 transition"
|
||||
class="flex-grow flex space-x-3 rounded-xl px-3.5 py-2 hover:bg-gray-100 dark:hover:bg-gray-900 transition"
|
||||
href="/modelfiles"
|
||||
on:click={() => {
|
||||
selectedChatId = null;
|
||||
|
@ -249,7 +256,7 @@
|
|||
|
||||
<div class="px-2 flex justify-center">
|
||||
<a
|
||||
class="flex-grow flex space-x-3 rounded-xl px-3.5 py-2 hover:bg-gray-200 dark:hover:bg-gray-900 transition"
|
||||
class="flex-grow flex space-x-3 rounded-xl px-3.5 py-2 hover:bg-gray-100 dark:hover:bg-gray-900 transition"
|
||||
href="/prompts"
|
||||
on:click={() => {
|
||||
selectedChatId = null;
|
||||
|
@ -281,7 +288,7 @@
|
|||
|
||||
<div class="px-2 flex justify-center mb-1">
|
||||
<a
|
||||
class="flex-grow flex space-x-3 rounded-xl px-3.5 py-2 hover:bg-gray-200 dark:hover:bg-gray-900 transition"
|
||||
class="flex-grow flex space-x-3 rounded-xl px-3.5 py-2 hover:bg-gray-100 dark:hover:bg-gray-900 transition"
|
||||
href="/documents"
|
||||
on:click={() => {
|
||||
selectedChatId = null;
|
||||
|
@ -328,7 +335,7 @@
|
|||
|
||||
<div class="mt-3">
|
||||
<button
|
||||
class="flex justify-center items-center space-x-1.5 px-3 py-2.5 rounded-lg text-xs bg-gray-200 hover:bg-gray-300 transition text-gray-800 font-medium w-full"
|
||||
class="flex justify-center items-center space-x-1.5 px-3 py-2.5 rounded-lg text-xs bg-gray-100 hover:bg-gray-200 transition text-gray-800 font-medium w-full"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
saveSettings({
|
||||
|
@ -438,7 +445,7 @@
|
|||
class=" w-full flex justify-between rounded-xl px-3 py-2 {chat.id === $chatId ||
|
||||
chat.id === chatTitleEditId ||
|
||||
chat.id === chatDeleteId
|
||||
? 'bg-gray-300 dark:bg-gray-900'
|
||||
? 'bg-gray-200 dark:bg-gray-900'
|
||||
: chat.id === selectedChatId
|
||||
? 'bg-gray-100 dark:bg-gray-950'
|
||||
: 'group-hover:bg-gray-100 dark:group-hover:bg-gray-950'} whitespace-nowrap text-ellipsis"
|
||||
|
@ -450,7 +457,7 @@
|
|||
class=" w-full flex justify-between rounded-xl px-3 py-2 {chat.id === $chatId ||
|
||||
chat.id === chatTitleEditId ||
|
||||
chat.id === chatDeleteId
|
||||
? 'bg-gray-300 dark:bg-gray-900'
|
||||
? 'bg-gray-200 dark:bg-gray-900'
|
||||
: chat.id === selectedChatId
|
||||
? 'bg-gray-100 dark:bg-gray-950'
|
||||
: ' group-hover:bg-gray-100 dark:group-hover:bg-gray-950'} whitespace-nowrap text-ellipsis"
|
||||
|
@ -473,14 +480,14 @@
|
|||
|
||||
<div
|
||||
class="
|
||||
|
||||
|
||||
{chat.id === $chatId || chat.id === chatTitleEditId || chat.id === chatDeleteId
|
||||
? 'from-gray-300 dark:from-gray-900'
|
||||
? 'from-gray-200 dark:from-gray-900'
|
||||
: chat.id === selectedChatId
|
||||
? 'from-gray-100 dark:from-gray-950'
|
||||
: 'invisible group-hover:visible from-gray-100 dark:from-gray-950'}
|
||||
absolute right-[10px] top-[10px] pr-2 pl-5 bg-gradient-to-l from-80%
|
||||
|
||||
|
||||
to-transparent"
|
||||
>
|
||||
{#if chatTitleEditId === chat.id}
|
||||
|
@ -628,7 +635,7 @@
|
|||
<div class="flex flex-col">
|
||||
{#if $user !== undefined}
|
||||
<button
|
||||
class=" flex rounded-xl py-3 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-900 transition"
|
||||
class=" flex rounded-xl py-3 px-3.5 w-full hover:bg-gray-100 dark:hover:bg-gray-900 transition"
|
||||
on:click={() => {
|
||||
showDropdown = !showDropdown;
|
||||
}}
|
||||
|
|
|
@ -69,9 +69,9 @@
|
|||
{#if chats.length > 0}
|
||||
<div class="text-left text-sm w-full mb-4 max-h-[22rem] overflow-y-scroll">
|
||||
<div class="relative overflow-x-auto">
|
||||
<table class="w-full text-sm text-left text-gray-500 dark:text-gray-400 table-auto">
|
||||
<table class="w-full text-sm text-left text-gray-600 dark:text-gray-400 table-auto">
|
||||
<thead
|
||||
class="text-xs text-gray-700 uppercase bg-transparent dark:text-gray-200 border-b-2 border-gray-800"
|
||||
class="text-xs text-gray-700 uppercase bg-transparent dark:text-gray-200 border-b-2 dark:border-gray-800"
|
||||
>
|
||||
<tr>
|
||||
<th scope="col" class="px-3 py-2"> {$i18n.t('Name')} </th>
|
||||
|
@ -82,7 +82,7 @@
|
|||
<tbody>
|
||||
{#each chats as chat, idx}
|
||||
<tr
|
||||
class="bg-white {idx !== chats.length - 1 &&
|
||||
class="bg-transparent {idx !== chats.length - 1 &&
|
||||
'border-b'} dark:bg-gray-900 dark:border-gray-850 text-xs"
|
||||
>
|
||||
<td class="px-3 py-1 w-2/3">
|
||||
|
|
373
src/lib/i18n/locales/bn-BD/translation.json
Normal file
373
src/lib/i18n/locales/bn-BD/translation.json
Normal file
|
@ -0,0 +1,373 @@
|
|||
{
|
||||
"'s', 'm', 'h', 'd', 'w' or '-1' for no expiration.": "'s', 'm', 'h', 'd', 'w' অথবা অনির্দিষ্টকাল মেয়াদের জন্য '-1' ",
|
||||
"(Beta)": "(পরিক্ষামূলক)",
|
||||
"(e.g. `sh webui.sh --api`)": "(যেমন `sh webui.sh --api`)",
|
||||
"(latest)": "(সর্বশেষ)",
|
||||
"{{modelName}} is thinking...": "{{modelName}} চিন্তা করছে...",
|
||||
"{{webUIName}} Backend Required": "{{webUIName}} ব্যাকএন্ড আবশ্যক",
|
||||
"a user": "একজন ব্যাবহারকারী",
|
||||
"About": "সম্পর্কে",
|
||||
"Account": "একাউন্ট",
|
||||
"Action": "একশন",
|
||||
"Add a model": "একটি মডেল যোগ করুন",
|
||||
"Add a model tag name": "একটি মডেল ট্যাগ যোগ করুন",
|
||||
"Add a short description about what this modelfile does": "এই মডেলফাইলটির সম্পর্কে সংক্ষিপ্ত বিবরণ যোগ করুন",
|
||||
"Add a short title for this prompt": "এই প্রম্পটের জন্য একটি সংক্ষিপ্ত টাইটেল যোগ করুন",
|
||||
"Add a tag": "একটি ট্যাগ যোগ করুন",
|
||||
"Add Docs": "ডকুমেন্ট যোগ করুন",
|
||||
"Add Files": "ফাইল যোগ করুন",
|
||||
"Add message": "মেসেজ যোগ করুন",
|
||||
"add tags": "ট্যাগ যোগ করুন",
|
||||
"Adjusting these settings will apply changes universally to all users.": "এই সেটিংগুলো পরিবর্তন করলে তা সব ইউজারের উপরেই প্রয়োগ করা হবে",
|
||||
"admin": "এডমিন",
|
||||
"Admin Panel": "এডমিন প্যানেল",
|
||||
"Admin Settings": "এডমিন সেটিংস",
|
||||
"Advanced Parameters": "এডভান্সড প্যারামিটার্স",
|
||||
"all": "সব",
|
||||
"All Users": "সব ইউজার",
|
||||
"Allow": "অনুমোদন",
|
||||
"Allow Chat Deletion": "চ্যাট ডিলিট করতে দিন",
|
||||
"alphanumeric characters and hyphens": "ইংরেজি অক্ষর, সংখ্যা এবং হাইফেন",
|
||||
"Already have an account?": "আগে থেকেই একাউন্ট আছে?",
|
||||
"an assistant": "একটা এসিস্ট্যান্ট",
|
||||
"and": "এবং",
|
||||
"API Base URL": "এপিআই বেজ ইউআরএল",
|
||||
"API Key": "এপিআই কোড",
|
||||
"API RPM": "এপিআই আরপিএম",
|
||||
"are allowed - Activate this command by typing": "অনুমোদিত - কমান্ডটি চালু করার জন্য লিখুন",
|
||||
"Are you sure?": "আপনি নিশ্চিত?",
|
||||
"Audio": "অডিও",
|
||||
"Auto-playback response": "রেসপন্স অটো-প্লেব্যাক",
|
||||
"Auto-send input after 3 sec.": "৩ সেকেন্ড পর ইনপুট সংয়ক্রিয়ভাবে পাঠান",
|
||||
"AUTOMATIC1111 Base URL": "AUTOMATIC1111 বেজ ইউআরএল",
|
||||
"AUTOMATIC1111 Base URL is required.": "AUTOMATIC1111 বেজ ইউআরএল আবশ্যক",
|
||||
"available!": "উপলব্ধ!",
|
||||
"Back": "পেছনে",
|
||||
"Builder Mode": "বিল্ডার মোড",
|
||||
"Cancel": "বাতিল",
|
||||
"Categories": "ক্যাটাগরিসমূহ",
|
||||
"Change Password": "পাসওয়ার্ড পরিবর্তন করুন",
|
||||
"Chat": "চ্যাট",
|
||||
"Chat History": "চ্যাট হিস্টোরি",
|
||||
"Chat History is off for this browser.": "এই ব্রাউজারের জন্য চ্যাট হিস্টোরি বন্ধ আছে",
|
||||
"Chats": "চ্যাটসমূহ",
|
||||
"Check Again": "আবার চেক করুন",
|
||||
"Check for updates": "নতুন আপডেট আছে কিনা চেক করুন",
|
||||
"Checking for updates...": "নতুন আপডেট আছে কিনা চেক করা হচ্ছে...",
|
||||
"Choose a model before saving...": "সেভ করার আগে একটি মডেল নির্বাচন করুন",
|
||||
"Chunk Overlap": "চাঙ্ক ওভারল্যাপ",
|
||||
"Chunk Params": "চাঙ্ক প্যারামিটার্স",
|
||||
"Chunk Size": "চাঙ্ক সাইজ",
|
||||
"Click here for help.": "সাহায্যের জন্য এখানে ক্লিক করুন",
|
||||
"Click here to check other modelfiles.": "অন্যান্য মডেলফাইল চেক করার জন্য এখানে ক্লিক করুন",
|
||||
"Click here to select": "নির্বাচন করার জন্য এখানে ক্লিক করুন",
|
||||
"Click here to select documents.": "ডকুমেন্টগুলো নির্বাচন করার জন্য এখানে ক্লিক করুন",
|
||||
"click here.": "এখানে ক্লিক করুন",
|
||||
"Click on the user role button to change a user's role.": "ইউজারের পদবি পরিবর্তন করার জন্য ইউজারের পদবি বাটনে ক্লিক করুন",
|
||||
"Close": "বন্ধ",
|
||||
"Collection": "সংগ্রহ",
|
||||
"Command": "কমান্ড",
|
||||
"Confirm Password": "পাসওয়ার্ড নিশ্চিত করুন",
|
||||
"Connections": "কানেকশনগুলো",
|
||||
"Content": "বিষয়বস্তু",
|
||||
"Context Length": "কনটেক্সটের দৈর্ঘ্য",
|
||||
"Conversation Mode": "কথোপকথন মোড",
|
||||
"Copy last code block": "সর্বশেষ কোড ব্লক কপি করুন",
|
||||
"Copy last response": "সর্বশেষ রেসপন্স কপি করুন",
|
||||
"Copying to clipboard was successful!": "ক্লিপবোর্ডে কপি করা সফল হয়েছে",
|
||||
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':": "'title' শব্দটি ব্যবহার না করে নিম্মোক্ত অনুসন্ধানের জন্য সংক্ষেপে সর্বোচ্চ ৩-৫ শব্দের একটি হেডার তৈরি করুন",
|
||||
"Create a modelfile": "একটি মডেলফাইল তৈরি করুন",
|
||||
"Create Account": "একাউন্ট তৈরি করুন",
|
||||
"Created at": "নির্মানকাল",
|
||||
"Created by": "নির্মাতা",
|
||||
"Current Model": "বর্তমান মডেল",
|
||||
"Current Password": "বর্তমান পাসওয়ার্ড",
|
||||
"Custom": "কাস্টম",
|
||||
"Customize Ollama models for a specific purpose": "নির্দিষ্ট উদ্দেশ্যে Ollama মডেল পরিবর্তন করুন",
|
||||
"Dark": "ডার্ক",
|
||||
"Database": "ডেটাবেজ",
|
||||
"DD/MM/YYYY HH:mm": "DD/MM/YYYY HH:mm",
|
||||
"Default": "ডিফল্ট",
|
||||
"Default (Automatic1111)": "ডিফল্ট (Automatic1111)",
|
||||
"Default (Web API)": "ডিফল্ট (Web API)",
|
||||
"Default model updated": "ডিফল্ট মডেল আপডেট হয়েছে",
|
||||
"Default Prompt Suggestions": "ডিফল্ট প্রম্পট সাজেশন",
|
||||
"Default User Role": "ইউজারের ডিফল্ট পদবি",
|
||||
"delete": "মুছে ফেলুন",
|
||||
"Delete a model": "একটি মডেল মুছে ফেলুন",
|
||||
"Delete chat": "চ্যাট মুছে ফেলুন",
|
||||
"Delete Chats": "চ্যাটগুলো মুছে ফেলুন",
|
||||
"Deleted {{deleteModelTag}}": "{{deleteModelTag}} মুছে ফেলা হয়েছে",
|
||||
"Deleted {tagName}": "{tagName} মুছে ফেলা হয়েছে",
|
||||
"Description": "বিবরণ",
|
||||
"Notifications": "নোটিফিকেশনসমূহ",
|
||||
"Disabled": "অক্ষম",
|
||||
"Discover a modelfile": "একটি মডেলফাইল খুঁজে বের করুন",
|
||||
"Discover a prompt": "একটি প্রম্পট খুঁজে বের করুন",
|
||||
"Discover, download, and explore custom prompts": "কাস্টম প্রম্পটগুলো আবিস্কার, ডাউনলোড এবং এক্সপ্লোর করুন",
|
||||
"Discover, download, and explore model presets": "মডেল প্রিসেটগুলো আবিস্কার, ডাউনলোড এবং এক্সপ্লোর করুন",
|
||||
"Display the username instead of You in the Chat": "চ্যাটে 'আপনি'-র পরবর্তে ইউজারনেম দেখান",
|
||||
"Document": "ডকুমেন্ট",
|
||||
"Document Settings": "ডকুমেন্ট সেটিংসমূহ",
|
||||
"Documents": "ডকুমেন্টসমূহ",
|
||||
"does not make any external connections, and your data stays securely on your locally hosted server.": "কোন এক্সটার্নাল কানেকশন তৈরি করে না, এবং আপনার ডেটা আর লোকালি হোস্টেড সার্ভারেই নিরাপদে থাকে।",
|
||||
"Don't Allow": "অনুমোদন দেবেন না",
|
||||
"Don't have an account?": "একাউন্ট নেই?",
|
||||
"Download as a File": "ফাইল হিসেবে ডাউনলোড করুন",
|
||||
"Download Database": "ডেটাবেজ ডাউনলোড করুন",
|
||||
"Drop any files here to add to the conversation": "আলোচনায় যুক্ত করার জন্য যে কোন ফাইল এখানে ড্রপ করুন",
|
||||
"e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.": "যেমন '30s','10m'. সময়ের অনুমোদিত অনুমোদিত এককগুলি হচ্ছে 's', 'm', 'h'.",
|
||||
"Edit Doc": "ডকুমেন্ট এডিট করুন",
|
||||
"Edit User": "ইউজার এডিট করুন",
|
||||
"Email": "ইমেইল",
|
||||
"Embedding model: {{embedding_model}}": "এমবেডিং মডেল: {{embedding_model}}",
|
||||
"Enable Chat History": "চ্যাট হিস্টোরি চালু করুন",
|
||||
"Enable New Sign Ups": "নতুন সাইনআপ চালু করুন",
|
||||
"Enabled": "চালু করা হয়েছে",
|
||||
"Enter {{role}} message here": "{{role}} মেসেজ এখানে লিখুন",
|
||||
"Enter API Key": "API Key লিখুন",
|
||||
"Enter Chunk Overlap": "চাঙ্ক ওভারল্যাপ লিখুন",
|
||||
"Enter Chunk Size": "চাংক সাইজ লিখুন",
|
||||
"Enter Image Size (e.g. 512x512)": "ছবির মাপ লিখুন (যেমন 512x512)",
|
||||
"Enter LiteLLM API Base URL (litellm_params.api_base)": "LiteLLM এপিআই বেজ ইউআরএল লিখুন (litellm_params.api_base)",
|
||||
"Enter LiteLLM API Key (litellm_params.api_key)": "LiteLLM এপিআই কোড লিখুন (litellm_params.api_key)",
|
||||
"Enter LiteLLM API RPM (litellm_params.rpm)": "LiteLLM এপিআই RPM দিন (litellm_params.rpm)",
|
||||
"Enter LiteLLM Model (litellm_params.model)": "LiteLLM মডেল দিন (litellm_params.model)",
|
||||
"Enter Max Tokens (litellm_params.max_tokens)": "সর্বোচ্চ টোকেন সংখ্যা দিন (litellm_params.max_tokens)",
|
||||
"Enter model tag (e.g. {{modelTag}})": "মডেল ট্যাগ লিখুন (e.g. {{modelTag}})",
|
||||
"Enter Number of Steps (e.g. 50)": "ধাপের সংখ্যা দিন (যেমন: 50)",
|
||||
"Enter stop sequence": "স্টপ সিকোয়েন্স লিখুন",
|
||||
"Enter Top K": "Top K লিখুন",
|
||||
"Enter URL (e.g. http://127.0.0.1:7860/)": "ইউআরএল দিন (যেমন http://127.0.0.1:7860/)",
|
||||
"Enter Your Email": "আপনার ইমেইল লিখুন",
|
||||
"Enter Your Full Name": "আপনার পূর্ণ নাম লিখুন",
|
||||
"Enter Your Password": "আপনার পাসওয়ার্ড লিখুন",
|
||||
"Experimental": "পরিক্ষামূলক",
|
||||
"Export All Chats (All Users)": "সব চ্যাট এক্সপোর্ট করুন (সব ইউজারের)",
|
||||
"Export Chats": "চ্যাটগুলো এক্সপোর্ট করুন",
|
||||
"Export Documents Mapping": "ডকুমেন্টসমূহ ম্যাপিং এক্সপোর্ট করুন",
|
||||
"Export Modelfiles": "মডেলফাইলগুলো এক্সপোর্ট করুন",
|
||||
"Export Prompts": "প্রম্পটগুলো একপোর্ট করুন",
|
||||
"Failed to read clipboard contents": "ক্লিপবোর্ডের বিষয়বস্তু পড়া সম্ভব হয়নি",
|
||||
"File Mode": "ফাইল মোড",
|
||||
"File not found.": "ফাইল পাওয়া যায়নি",
|
||||
"Fingerprint spoofing detected: Unable to use initials as avatar. Defaulting to default profile image.": "ফিঙ্গারপ্রিন্ট স্পুফিং ধরা পড়েছে: অ্যাভাটার হিসেবে নামের আদ্যক্ষর ব্যবহার করা যাচ্ছে না। ডিফল্ট প্রোফাইল পিকচারে ফিরিয়ে নেয়া হচ্ছে।",
|
||||
"Fluidly stream large external response chunks": "বড় এক্সটার্নাল রেসপন্স চাঙ্কগুলো মসৃণভাবে প্রবাহিত করুন",
|
||||
"Focus chat input": "চ্যাট ইনপুট ফোকাস করুন",
|
||||
"Format your variables using square brackets like this:": "আপনার ভেরিয়বলগুলো এভাবে স্কয়ার ব্রাকেটের মাধ্যমে সাজান",
|
||||
"From (Base Model)": "উৎস (বেজ মডেল)",
|
||||
"Full Screen Mode": "ফুলস্ক্রিন মোড",
|
||||
"General": "সাধারণ",
|
||||
"General Settings": "সাধারণ সেটিংসমূহ",
|
||||
"Hello, {{name}}": "হ্যালো, {{name}}",
|
||||
"Hide": "লুকান",
|
||||
"Hide Additional Params": "অতিরিক্ত প্যারামিটাগুলো লুকান",
|
||||
"How can I help you today?": "আপনাকে আজ কিভাবে সাহায্য করতে পারি?",
|
||||
"Image Generation (Experimental)": "ইমেজ জেনারেশন (পরিক্ষামূলক)",
|
||||
"Image Generation Engine": "ইমেজ জেনারেশন ইঞ্জিন",
|
||||
"Image Settings": "ছবির সেটিংসমূহ",
|
||||
"Images": "ছবিসমূহ",
|
||||
"Import Chats": "চ্যাটগুলি ইমপোর্ট করুন",
|
||||
"Import Documents Mapping": "ডকুমেন্টসমূহ ম্যাপিং ইমপোর্ট করুন",
|
||||
"Import Modelfiles": "মডেলফাইলগুলো ইমপোর্ট করুন",
|
||||
"Import Prompts": "প্রম্পটগুলো ইমপোর্ট করুন",
|
||||
"Include `--api` flag when running stable-diffusion-webui": "stable-diffusion-webui চালু করার সময় `--api` ফ্ল্যাগ সংযুক্ত করুন",
|
||||
"Interface": "ইন্টারফেস",
|
||||
"join our Discord for help.": "সাহায্যের জন্য আমাদের Discord-এ যুক্ত হোন",
|
||||
"JSON": "JSON",
|
||||
"JWT Expiration": "JWT-র মেয়াদ",
|
||||
"JWT Token": "JWT টোকেন",
|
||||
"Keep Alive": "সচল রাখুন",
|
||||
"Keyboard shortcuts": "কিবোর্ড শর্টকাটসমূহ",
|
||||
"Language": "ভাষা",
|
||||
"Light": "লাইট",
|
||||
"Listening...": "শুনছে...",
|
||||
"LLMs can make mistakes. Verify important information.": "LLM ভুল করতে পারে। গুরুত্বপূর্ণ তথ্য যাচাই করে নিন।",
|
||||
"Made by OpenWebUI Community": "OpenWebUI কমিউনিটিকর্তৃক নির্মিত",
|
||||
"Make sure to enclose them with": "এটা দিয়ে বন্ধনী দিতে ভুলবেন না",
|
||||
"Manage LiteLLM Models": "LiteLLM মডেল ব্যবস্থাপনা করুন",
|
||||
"Manage Models": "মডেলসমূহ ব্যবস্থাপনা করুন",
|
||||
"Manage Ollama Models": "Ollama মডেলসূহ ব্যবস্থাপনা করুন",
|
||||
"Max Tokens": "সর্বোচ্চ টোকন",
|
||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "একসঙ্গে সর্বোচ্চ তিনটি মডেল ডাউনলোড করা যায়। দয়া করে পরে আবার চেষ্টা করুন।",
|
||||
"Mirostat": "Mirostat",
|
||||
"Mirostat Eta": "Mirostat Eta",
|
||||
"Mirostat Tau": "Mirostat Tau",
|
||||
"MMMM DD, YYYY": "MMMM DD, YYYY",
|
||||
"Model '{{modelName}}' has been successfully downloaded.": "'{{modelName}}' মডেল সফলভাবে ডাউনলোড হয়েছে।",
|
||||
"Model '{{modelTag}}' is already in queue for downloading.": "{{modelTag}} ডাউনলোডের জন্য আগে থেকেই অপেক্ষমান আছে।",
|
||||
"Model {{embedding_model}} update complete!": "{{embedding_model}} মডেল আপডেট হয়ে গেছে!",
|
||||
"Model {{embedding_model}} update failed or not required!": "{{embedding_model}} মডেল আপডেট ব্যর্থ হয়েছে অথবা প্রয়োজন নেই",
|
||||
"Model {{modelId}} not found": "{{modelId}} মডেল পাওয়া যায়নি",
|
||||
"Model {{modelName}} already exists.": "{{modelName}} মডেল আগে থেকেই আছে",
|
||||
"Model filesystem path detected. Model shortname is required for update, cannot continue.": "মডেল ফাইলসিস্টেম পাথ পাওয়া গেছে। আপডেটের জন্য মডেলের শর্টনেম আবশ্যক, এগিয়ে যাওয়া যাচ্ছে না।",
|
||||
"Model Name": "মডেলের নাম",
|
||||
"Model not selected": "মডেল নির্বাচন করা হয়নি",
|
||||
"Model Tag Name": "মডেলের ট্যাগ নাম",
|
||||
"Model Whitelisting": "মডেল হোয়াইটলিস্টিং",
|
||||
"Model(s) Whitelisted": "হোয়াইটলিস্টেড মডেল(সমূহ)",
|
||||
"Modelfile": "মডেলফাইল",
|
||||
"Modelfile Advanced Settings": "মডেলফাইল এডভান্সড সেটিসমূহ",
|
||||
"Modelfile Content": "মডেলফাইল কনটেন্ট",
|
||||
"Modelfiles": "মডেলফাইলসমূহ",
|
||||
"Models": "মডেলসমূহ",
|
||||
"My Documents": "আমার ডকুমেন্টসমূহ",
|
||||
"My Modelfiles": "আমার মডেলফাইলসমূহ",
|
||||
"My Prompts": "আমার প্রম্পটসমূহ",
|
||||
"Name": "নাম",
|
||||
"Name Tag": "নামের ট্যাগ",
|
||||
"Name your modelfile": "আপনার মডেলফাইলের নাম দিন",
|
||||
"New Chat": "নতুন চ্যাট",
|
||||
"New Password": "নতুন পাসওয়ার্ড",
|
||||
"Not sure what to add?": "কী যুক্ত করতে হবে নিশ্চিত না?",
|
||||
"Not sure what to write? Switch to": "কী লিখতে হবে নিশ্চিত না? পরিবর্তন করুন:",
|
||||
"Off": "বন্ধ",
|
||||
"Okay, Let's Go!": "ঠিক আছে, চলুন যাই!",
|
||||
"Ollama Base URL": "Ollama বেজ ইউআরএল",
|
||||
"Ollama Version": "Ollama ভার্সন",
|
||||
"On": "চালু",
|
||||
"Only": "শুধুমাত্র",
|
||||
"Only alphanumeric characters and hyphens are allowed in the command string.": "কমান্ড স্ট্রিং-এ শুধুমাত্র ইংরেজি অক্ষর, সংখ্যা এবং হাইফেন ব্যবহার করা যাবে।",
|
||||
"Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.": "আহা! আরেকটু ধৈর্য্য ধরুন! আপনার ফাইলগুলো এখনো প্রোসেস চলছে, আমরা ওগুলোকে সেরা প্রক্রিয়াজাত করছি। তৈরি হয়ে গেলে আপনাকে জানিয়ে দেয়া হবে।",
|
||||
"Oops! Looks like the URL is invalid. Please double-check and try again.": "ওহ, মনে হচ্ছে ইউআরএলটা ইনভ্যালিড। দয়া করে আর চেক করে চেষ্টা করুন।",
|
||||
"Oops! You're using an unsupported method (frontend only). Please serve the WebUI from the backend.": "আপনি একটা আনসাপোর্টেড পদ্ধতি (শুধু ফ্রন্টএন্ড) ব্যবহার করছেন। দয়া করে WebUI ব্যাকএন্ড থেকে চালনা করুন।",
|
||||
"Open": "খোলা",
|
||||
"Open AI": "Open AI",
|
||||
"Open AI (Dall-E)": "Open AI (Dall-E)",
|
||||
"Open new chat": "নতুন চ্যাট খুলুন",
|
||||
"OpenAI API": "OpenAI এপিআই",
|
||||
"OpenAI API Key": "OpenAI এপিআই কোড",
|
||||
"OpenAI API Key is required.": "OpenAI API কোড আবশ্যক",
|
||||
"or": "অথবা",
|
||||
"Parameters": "প্যারামিটারসমূহ",
|
||||
"Password": "পাসওয়ার্ড",
|
||||
"PDF Extract Images (OCR)": "পিডিএফ এর ছবি থেকে লেখা বের করুন (OCR)",
|
||||
"pending": "অপেক্ষমান",
|
||||
"Permission denied when accessing microphone: {{error}}": "মাইক্রোফোন ব্যবহারের অনুমতি পাওয়া যায়নি: {{error}}",
|
||||
"Playground": "খেলাঘর",
|
||||
"Profile": "প্রোফাইল",
|
||||
"Prompt Content": "প্রম্পট কন্টেন্ট",
|
||||
"Prompt suggestions": "প্রম্পট সাজেশনসমূহ",
|
||||
"Prompts": "প্রম্পটসমূহ",
|
||||
"Pull a model from Ollama.com": "Ollama.com থেকে একটি টেনে আনুন আনুন",
|
||||
"Pull Progress": "Pull চলমান",
|
||||
"Query Params": "Query প্যারামিটারসমূহ",
|
||||
"RAG Template": "RAG টেম্পলেট",
|
||||
"Raw Format": "Raw ফরম্যাট",
|
||||
"Record voice": "ভয়েস রেকর্ড করুন",
|
||||
"Redirecting you to OpenWebUI Community": "আপনাকে OpenWebUI কমিউনিটিতে পাঠানো হচ্ছে",
|
||||
"Release Notes": "রিলিজ নোটসমূহ",
|
||||
"Repeat Last N": "রিপিট Last N",
|
||||
"Repeat Penalty": "রিপিট প্যানাল্টি",
|
||||
"Request Mode": "রিকোয়েস্ট মোড",
|
||||
"Reset Vector Storage": "ভেক্টর স্টোরেজ রিসেট করুন",
|
||||
"Response AutoCopy to Clipboard": "রেসপন্সগুলো স্বয়ংক্রিভাবে ক্লিপবোর্ডে কপি হবে",
|
||||
"Role": "পদবি",
|
||||
"Rosé Pine": "রোজ পাইন",
|
||||
"Rosé Pine Dawn": "ভোরের রোজ পাইন",
|
||||
"Save": "সংরক্ষণ",
|
||||
"Save & Create": "সংরক্ষণ এবং তৈরি করুন",
|
||||
"Save & Submit": "সংরক্ষণ এবং সাবমিট করুন",
|
||||
"Save & Update": "সংরক্ষণ এবং আপডেট করুন",
|
||||
"Saving chat logs directly to your browser's storage is no longer supported. Please take a moment to download and delete your chat logs by clicking the button below. Don't worry, you can easily re-import your chat logs to the backend through": "মাধ্যমে",
|
||||
"Scan": "স্ক্যান",
|
||||
"Scan complete!": "স্ক্যান সম্পন্ন হয়েছে!",
|
||||
"Scan for documents from {{path}}": "ডকুমেন্টসমূহের জন্য {{path}} স্ক্যান করুন",
|
||||
"Search": "অনুসন্ধান",
|
||||
"Search Documents": "ডকুমেন্টসমূহ অনুসন্ধান করুন",
|
||||
"Search Prompts": "প্রম্পটসমূহ অনুসন্ধান করুন",
|
||||
"See readme.md for instructions": "নির্দেশিকার জন্য readme.md দেখুন",
|
||||
"See what's new": "নতুন কী আছে দেখুন",
|
||||
"Seed": "সীড",
|
||||
"Select a mode": "একটি মডেল নির্বাচন করুন",
|
||||
"Select a model": "একটি মডেল নির্বাচন করুন",
|
||||
"Select an Ollama instance": "একটি Ollama ইন্সট্যান্স নির্বাচন করুন",
|
||||
"Send a Message": "একটি মেসেজ পাঠান",
|
||||
"Send message": "মেসেজ পাঠান",
|
||||
"Server connection verified": "সার্ভার কানেকশন যাচাই করা হয়েছে",
|
||||
"Set as default": "ডিফল্ট হিসেবে নির্ধারণ করুন",
|
||||
"Set Default Model": "ডিফল্ট মডেল নির্ধারণ করুন",
|
||||
"Set Image Size": "ছবির সাইজ নির্ধারণ করুন",
|
||||
"Set Steps": "পরবর্তী ধাপসমূহ",
|
||||
"Set Title Auto-Generation Model": "শিরোনাম অটোজেনারেশন মডেন নির্ধারণ করুন",
|
||||
"Set Voice": "কন্ঠস্বর নির্ধারণ করুন",
|
||||
"Settings": "সেটিংসমূহ",
|
||||
"Settings saved successfully!": "সেটিংগুলো সফলভাবে সংরক্ষিত হয়েছে",
|
||||
"Share to OpenWebUI Community": "OpenWebUI কমিউনিটিতে শেয়ার করুন",
|
||||
"short-summary": "সংক্ষিপ্ত বিবরণ",
|
||||
"Show": "দেখান",
|
||||
"Show Additional Params": "অতিরিক্ত প্যারামিটারগুলো দেখান",
|
||||
"Show shortcuts": "শর্টকাটগুলো দেখান",
|
||||
"sidebar": "সাইডবার",
|
||||
"Sign in": "সাইন ইন",
|
||||
"Sign Out": "সাইন আউট",
|
||||
"Sign up": "সাইন আপ",
|
||||
"Speech recognition error: {{error}}": "স্পিচ রিকগনিশনে সমস্যা: {{error}}",
|
||||
"Speech-to-Text Engine": "স্পিচ-টু-টেক্সট ইঞ্জিন",
|
||||
"SpeechRecognition API is not supported in this browser.": "এই ব্রাউজার স্পিচরিকগনিশন এপিআই সাপোর্ট করে না।",
|
||||
"Stop Sequence": "সিকোয়েন্স থামান",
|
||||
"STT Settings": "STT সেটিংস",
|
||||
"Submit": "সাবমিট",
|
||||
"Success": "সফল",
|
||||
"Successfully updated.": "সফলভাবে আপডেট হয়েছে",
|
||||
"Sync All": "সব সিংক্রোনাইজ করুন",
|
||||
"System": "সিস্টেম",
|
||||
"System Prompt": "সিস্টেম প্রম্পট",
|
||||
"Tags": "ট্যাগসমূহ",
|
||||
"Temperature": "তাপমাত্রা",
|
||||
"Template": "টেম্পলেট",
|
||||
"Text Completion": "লেখা সম্পন্নকরণ",
|
||||
"Text-to-Speech Engine": "টেক্সট-টু-স্পিচ ইঞ্জিন",
|
||||
"Tfs Z": "Tfs Z",
|
||||
"Theme": "থিম",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "এটা নিশ্চিত করে যে, আপনার গুরুত্বপূর্ণ আলোচনা নিরাপদে আপনার ব্যাকএন্ড ডেটাবেজে সংরক্ষিত আছে। ধন্যবাদ!",
|
||||
"This setting does not sync across browsers or devices.": "এই সেটিং অন্যন্য ব্রাউজার বা ডিভাইসের সাথে সিঙ্ক্রোনাইজ নয় না।",
|
||||
"Tip: Update multiple variable slots consecutively by pressing the tab key in the chat input after each replacement.": "পরামর্শ: একাধিক ভেরিয়েবল স্লট একের পর এক রিপ্লেস করার জন্য চ্যাট ইনপুটে কিবোর্ডের Tab বাটন ব্যবহার করুন।",
|
||||
"Title": "শিরোনাম",
|
||||
"Title Auto-Generation": "স্বয়ংক্রিয় শিরোনামগঠন",
|
||||
"Title Generation Prompt": "শিরোনামগঠন প্রম্পট",
|
||||
"to": "প্রতি",
|
||||
"To access the available model names for downloading,": "ডাউনলোডের জন্য এভেইলএবল মডেলের নামগুলো এক্সেস করতে,",
|
||||
"To access the GGUF models available for downloading,": "ডাউলোডের জন্য এভেইলএবল GGUF মডেলগুলো এক্সেস করতে,",
|
||||
"to chat input.": "চ্যাট ইনপুটে",
|
||||
"Toggle settings": "সেটিংস টোগল",
|
||||
"Toggle sidebar": "সাইডবার টোগল",
|
||||
"Top K": "Top K",
|
||||
"Top P": "Top P",
|
||||
"Trouble accessing Ollama?": "Ollama এক্সেস করতে সমস্যা হচ্ছে?",
|
||||
"TTS Settings": "TTS সেটিংসমূহ",
|
||||
"Type Hugging Face Resolve (Download) URL": "Hugging Face থেকে ডাউনলোড করার ইউআরএল টাইপ করুন",
|
||||
"Uh-oh! There was an issue connecting to {{provider}}.": "ওহ-হো! {{provider}} এর সাথে কানেকশনে সমস্যা হয়েছে।",
|
||||
"Understand that updating or changing your embedding model requires reset of the vector database and re-import of all documents. You have been warned!": "জেনে রাখুন, এমবেডিং মডেল আপডেট বা পরিবর্তন করতে হলে ভেক্টর ডেটাবেজ রিসেট করতে হবে এবং সব ডকুমেন্ট আবার নতুন করে ইমপোর্ট করতে হবে। এ বিষয়ে আপনাকে আগেই সাবধান করা হলো।",
|
||||
"Unknown File Type '{{file_type}}', but accepting and treating as plain text": "অপরিচিত ফাইল ফরম্যাট '{{file_type}}', তবে প্লেইন টেক্সট হিসেবে গ্রহণ করা হলো",
|
||||
"Update": "আপডেট",
|
||||
"Update embedding model {{embedding_model}}": "{{embedding_model}} এমবেডিং মডেল আপডেট করুন",
|
||||
"Update password": "পাসওয়ার্ড আপডেট করুন",
|
||||
"Upload a GGUF model": "একটি GGUF মডেল আপলোড করুন",
|
||||
"Upload files": "ফাইলগুলো আপলোড করুন",
|
||||
"Upload Progress": "আপলোড হচ্ছে",
|
||||
"URL Mode": "ইউআরএল মোড",
|
||||
"Use '#' in the prompt input to load and select your documents.": "আপনার ডকুমেন্টসমূহ নির্বাচন করার জন্য আপনার প্রম্পট ইনপুটে '# ব্যবহার করুন।",
|
||||
"Use Gravatar": "Gravatar ব্যবহার করুন",
|
||||
"Use Initials": "নামের আদ্যক্ষর ব্যবহার করুন",
|
||||
"user": "ব্যবহারকারী",
|
||||
"User Permissions": "ইউজার পারমিশনসমূহ",
|
||||
"Users": "ব্যাবহারকারীগণ",
|
||||
"Utilize": "ইউটিলাইজ",
|
||||
"Valid time units:": "সময়ের গ্রহণযোগ্য এককসমূহ:",
|
||||
"variable": "ভেরিয়েবল",
|
||||
"variable to have them replaced with clipboard content.": "ক্লিপবোর্ডের কন্টেন্ট দিয়ে যেই ভেরিয়েবল রিপ্লেস করা যাবে।",
|
||||
"Version": "ভার্সন",
|
||||
"Web": "ওয়েব",
|
||||
"WebUI Add-ons": "WebUI এড-অনসমূহ",
|
||||
"WebUI Settings": "WebUI সেটিংসমূহ",
|
||||
"WebUI will make requests to": "WebUI যেখানে রিকোয়েস্ট পাঠাবে",
|
||||
"What’s New in": "এতে নতুন কী",
|
||||
"When history is turned off, new chats on this browser won't appear in your history on any of your devices.": "যদি হিস্টোরি বন্ধ থাকে তাহলে এই ব্রাউজারের নতুন চ্যাটগুলো আপনার কোন ডিভাইসের হিস্টোরিতেই দেখা যাবে না।",
|
||||
"Whisper (Local)": "Whisper (লোকাল)",
|
||||
"Write a prompt suggestion (e.g. Who are you?)": "একটি প্রম্পট সাজেশন লিখুন (যেমন Who are you?)",
|
||||
"Write a summary in 50 words that summarizes [topic or keyword].": "৫০ শব্দের মধ্যে [topic or keyword] এর একটি সারসংক্ষেপ লিখুন।",
|
||||
"You": "আপনি",
|
||||
"You're a helpful assistant.": "আপনি একজন উপকারী এসিস্ট্যান্ট",
|
||||
"You're now logged in.": "আপনি এখন লগইন করা অবস্থায় আছেন"
|
||||
}
|
|
@ -44,7 +44,7 @@
|
|||
"available!": "¡disponible!",
|
||||
"Back": "Vuelve atrás",
|
||||
"Builder Mode": "Modo de Constructor",
|
||||
"Cancel": "Cancela",
|
||||
"Cancel": "Cancelar",
|
||||
"Categories": "Categorías",
|
||||
"Change Password": "Cambia la Contraseña",
|
||||
"Chat": "Chat",
|
||||
|
@ -152,6 +152,7 @@
|
|||
"File Mode": "Modo de archivo",
|
||||
"File not found.": "Archivo no encontrado.",
|
||||
"Fingerprint spoofing detected: Unable to use initials as avatar. Defaulting to default profile image.": "Se detectó suplantación de huellas: No se pueden usar las iniciales como avatar. Por defecto se utiliza la imagen de perfil predeterminada.",
|
||||
"Fluidly stream large external response chunks": "Transmita con fluidez grandes fragmentos de respuesta externa",
|
||||
"Focus chat input": "Enfoca la entrada del chat",
|
||||
"Format your variables using square brackets like this:": "Formatee sus variables usando corchetes así:",
|
||||
"From (Base Model)": "Desde (Modelo Base)",
|
||||
|
|
|
@ -7,6 +7,10 @@
|
|||
"code": "bg-BG",
|
||||
"title": "Bulgarian (BG)"
|
||||
},
|
||||
{
|
||||
"code": "bn-BD",
|
||||
"title": "Banlga (বাংলা)"
|
||||
},
|
||||
{
|
||||
"code": "ca-ES",
|
||||
"title": "Catalan"
|
||||
|
|
|
@ -120,6 +120,7 @@
|
|||
"Edit Doc": "Belgeyi Düzenle",
|
||||
"Edit User": "Kullanıcıyı Düzenle",
|
||||
"Email": "E-posta",
|
||||
"Embedding model: {{embedding_model}}": "Gömme modeli: {{embedding_model}}",
|
||||
"Enable Chat History": "Sohbet Geçmişini Etkinleştir",
|
||||
"Enable New Sign Ups": "Yeni Kayıtları Etkinleştir",
|
||||
"Enabled": "Etkin",
|
||||
|
@ -150,6 +151,8 @@
|
|||
"Failed to read clipboard contents": "Pano içeriği okunamadı",
|
||||
"File Mode": "Dosya Modu",
|
||||
"File not found.": "Dosya bulunamadı.",
|
||||
"Fingerprint spoofing detected: Unable to use initials as avatar. Defaulting to default profile image.": "Parmak izi sahteciliği tespit edildi: Avatar olarak baş harfler kullanılamıyor. Varsayılan profil resmine dönülüyor.",
|
||||
"Fluidly stream large external response chunks": "Büyük harici yanıt chunklarını akıcı bir şekilde yayınlayın",
|
||||
"Focus chat input": "Sohbet girişine odaklan",
|
||||
"Format your variables using square brackets like this:": "Değişkenlerinizi şu şekilde kare parantezlerle biçimlendirin:",
|
||||
"From (Base Model)": "(Temel Model)'den",
|
||||
|
@ -193,8 +196,11 @@
|
|||
"MMMM DD, YYYY": "DD MMMM YYYY",
|
||||
"Model '{{modelName}}' has been successfully downloaded.": "'{{modelName}}' başarıyla indirildi.",
|
||||
"Model '{{modelTag}}' is already in queue for downloading.": "'{{modelTag}}' zaten indirme sırasında.",
|
||||
"Model {{embedding_model}} update complete!": "Model {{embedding_model}} güncellemesi tamamlandı!",
|
||||
"Model {{embedding_model}} update failed or not required!": "Model {{embedding_model}} güncellemesi başarısız oldu veya gerekli değil!",
|
||||
"Model {{modelId}} not found": "{{modelId}} bulunamadı",
|
||||
"Model {{modelName}} already exists.": "{{modelName}} zaten mevcut.",
|
||||
"Model filesystem path detected. Model shortname is required for update, cannot continue.": "Model dosya sistemi yolu algılandı. Güncelleme için model kısa adı gerekli, devam edilemiyor.",
|
||||
"Model Name": "Model Adı",
|
||||
"Model not selected": "Model seçilmedi",
|
||||
"Model Tag Name": "Model Etiket Adı",
|
||||
|
@ -332,7 +338,10 @@
|
|||
"TTS Settings": "TTS Ayarları",
|
||||
"Type Hugging Face Resolve (Download) URL": "Hugging Face Resolve (Download) URL'sini Yazın",
|
||||
"Uh-oh! There was an issue connecting to {{provider}}.": "Ah! {{provider}}'a bağlanırken bir sorun oluştu.",
|
||||
"Understand that updating or changing your embedding model requires reset of the vector database and re-import of all documents. You have been warned!": "Gömme modelinizi güncellemenin veya değiştirmenin, vektör veritabanının sıfırlanmasını ve tüm belgelerin yeniden içe aktarılmasını gerektirdiğini anlayın. Uyarıldın!",
|
||||
"Unknown File Type '{{file_type}}', but accepting and treating as plain text": "Bilinmeyen Dosya Türü '{{file_type}}', ancak düz metin olarak kabul ediliyor ve işleniyor",
|
||||
"Update": "Güncelleme",
|
||||
"Update embedding model {{embedding_model}}": "Gömme modelini güncelle: {{embedding_model}}",
|
||||
"Update password": "Parolayı Güncelle",
|
||||
"Upload a GGUF model": "Bir GGUF modeli yükle",
|
||||
"Upload files": "Dosyaları Yükle",
|
||||
|
@ -340,6 +349,7 @@
|
|||
"URL Mode": "URL Modu",
|
||||
"Use '#' in the prompt input to load and select your documents.": "Belgelerinizi yüklemek ve seçmek için promptda '#' kullanın.",
|
||||
"Use Gravatar": "Gravatar Kullan",
|
||||
"Use Initials": "Baş Harfleri Kullan",
|
||||
"user": "kullanıcı",
|
||||
"User Permissions": "Kullanıcı İzinleri",
|
||||
"Users": "Kullanıcılar",
|
||||
|
|
|
@ -341,7 +341,7 @@
|
|||
"Use '#' in the prompt input to load and select your documents.": "Для введення промтів до веб-сторінок (URL) або вибору документів, будь ласка, використовуйте символ '#'.",
|
||||
"Use Gravatar": "Змінити аватар",
|
||||
"user": "користувач",
|
||||
"User Permissions": "Дозволи користувача",
|
||||
"User Permissions": "Права користувача",
|
||||
"Users": "Користувачі",
|
||||
"Utilize": "Використовувати",
|
||||
"Valid time units:": "Дійсні одиниці часу:",
|
||||
|
|
|
@ -5,20 +5,20 @@
|
|||
"(latest)": "",
|
||||
"{{modelName}} is thinking...": "{{modelName}} 正在思考...",
|
||||
"{{webUIName}} Backend Required": "需要 {{webUIName}} 后端",
|
||||
"a user": "",
|
||||
"a user": "用户",
|
||||
"About": "关于",
|
||||
"Account": "账户",
|
||||
"Action": "操作",
|
||||
"Add a model": "添加模型",
|
||||
"Add a model tag name": "添加模型标签名称",
|
||||
"Add a short description about what this modelfile does": "添加关于此模型文件功能的简短描述",
|
||||
"Add a short title for this prompt": "为这个提示添加一个简短的标题",
|
||||
"Add a tag": "",
|
||||
"Add a short description about what this modelfile does": "为这个模型文件添加一段简短的描述",
|
||||
"Add a short title for this prompt": "为这个提示词添加一个简短的标题",
|
||||
"Add a tag": "添加标签",
|
||||
"Add Docs": "添加文档",
|
||||
"Add Files": "添加文件",
|
||||
"Add message": "添加消息",
|
||||
"add tags": "添加标签",
|
||||
"Adjusting these settings will apply changes universally to all users.": "调整这些设置将对所有用户普遍应用更改。",
|
||||
"Adjusting these settings will apply changes universally to all users.": "调整这些设置将会对所有用户应用更改。",
|
||||
"admin": "管理员",
|
||||
"Admin Panel": "管理员面板",
|
||||
"Admin Settings": "管理员设置",
|
||||
|
@ -26,15 +26,15 @@
|
|||
"all": "所有",
|
||||
"All Users": "所有用户",
|
||||
"Allow": "允许",
|
||||
"Allow Chat Deletion": "允许删除聊天",
|
||||
"Allow Chat Deletion": "允许删除聊天记录",
|
||||
"alphanumeric characters and hyphens": "字母数字字符和连字符",
|
||||
"Already have an account?": "已经有账户了吗?",
|
||||
"an assistant": "",
|
||||
"an assistant": "助手",
|
||||
"and": "和",
|
||||
"API Base URL": "API 基础 URL",
|
||||
"API Key": "API 密钥",
|
||||
"API RPM": "API RPM",
|
||||
"are allowed - Activate this command by typing": "被允许 - 通过输入激活此命令",
|
||||
"are allowed - Activate this command by typing": "允许 - 通过输入来激活这个命令",
|
||||
"Are you sure?": "你确定吗?",
|
||||
"Audio": "音频",
|
||||
"Auto-playback response": "自动播放回应",
|
||||
|
@ -43,21 +43,21 @@
|
|||
"AUTOMATIC1111 Base URL is required.": "需要 AUTOMATIC1111 基础 URL。",
|
||||
"available!": "可用!",
|
||||
"Back": "返回",
|
||||
"Builder Mode": "构建者模式",
|
||||
"Builder Mode": "构建模式",
|
||||
"Cancel": "取消",
|
||||
"Categories": "分类",
|
||||
"Change Password": "更改密码",
|
||||
"Chat": "聊天",
|
||||
"Chat History": "聊天历史",
|
||||
"Chat History is off for this browser.": "此浏览器已关闭聊天历史。",
|
||||
"Chat History is off for this browser.": "此浏览器已关闭聊天历史功能。",
|
||||
"Chats": "聊天",
|
||||
"Check Again": "再次检查",
|
||||
"Check for updates": "检查更新",
|
||||
"Checking for updates...": "正在检查更新...",
|
||||
"Choose a model before saving...": "保存前选择一个模型...",
|
||||
"Chunk Overlap": "块重叠",
|
||||
"Chunk Params": "块参数",
|
||||
"Chunk Size": "块大小",
|
||||
"Chunk Overlap": "块重叠(Chunk Overlap)",
|
||||
"Chunk Params": "块参数(Chunk Params)",
|
||||
"Chunk Size": "块大小(Chunk Size)",
|
||||
"Click here for help.": "点击这里获取帮助。",
|
||||
"Click here to check other modelfiles.": "点击这里检查其他模型文件。",
|
||||
"Click here to select": "点击这里选择",
|
||||
|
@ -83,7 +83,7 @@
|
|||
"Current Model": "当前模型",
|
||||
"Current Password": "当前密码",
|
||||
"Custom": "自定义",
|
||||
"Customize Ollama models for a specific purpose": "为特定目的定制Ollama模型",
|
||||
"Customize Ollama models for a specific purpose": "定制特定用途的Ollama模型",
|
||||
"Dark": "暗色",
|
||||
"Database": "数据库",
|
||||
"DD/MM/YYYY HH:mm": "DD/MM/YYYY HH:mm",
|
||||
|
@ -106,7 +106,7 @@
|
|||
"Discover a prompt": "探索提示词",
|
||||
"Discover, download, and explore custom prompts": "发现、下载并探索自定义提示词",
|
||||
"Discover, download, and explore model presets": "发现、下载并探索模型预设",
|
||||
"Display the username instead of You in the Chat": "在聊天中显示用户名而不是“您”",
|
||||
"Display the username instead of You in the Chat": "在聊天中显示用户名而不是“你”",
|
||||
"Document": "文档",
|
||||
"Document Settings": "文档设置",
|
||||
"Documents": "文档",
|
||||
|
@ -115,7 +115,7 @@
|
|||
"Don't have an account?": "没有账户?",
|
||||
"Download as a File": "下载为文件",
|
||||
"Download Database": "下载数据库",
|
||||
"Drop any files here to add to the conversation": "将任何文件拖到这里以添加到对话中",
|
||||
"Drop any files here to add to the conversation": "拖动文件到此处以添加到对话中",
|
||||
"e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.": "例如 '30s','10m'。有效的时间单位是's', 'm', 'h'。",
|
||||
"Edit Doc": "编辑文档",
|
||||
"Edit User": "编辑用户",
|
||||
|
@ -123,21 +123,21 @@
|
|||
"Enable Chat History": "启用聊天历史",
|
||||
"Enable New Sign Ups": "启用新注册",
|
||||
"Enabled": "启用",
|
||||
"Enter {{role}} message here": "",
|
||||
"Enter API Key": "",
|
||||
"Enter Chunk Overlap": "",
|
||||
"Enter Chunk Size": "",
|
||||
"Enter Image Size (e.g. 512x512)": "",
|
||||
"Enter LiteLLM API Base URL (litellm_params.api_base)": "",
|
||||
"Enter LiteLLM API Key (litellm_params.api_key)": "",
|
||||
"Enter LiteLLM API RPM (litellm_params.rpm)": "",
|
||||
"Enter LiteLLM Model (litellm_params.model)": "",
|
||||
"Enter Max Tokens (litellm_params.max_tokens)": "",
|
||||
"Enter model tag (e.g. {{modelTag}})": "",
|
||||
"Enter Number of Steps (e.g. 50)": "",
|
||||
"Enter {{role}} message here": "在此处输入 {{role}} 信息",
|
||||
"Enter API Key": "输入API密匙",
|
||||
"Enter Chunk Overlap": "输入块重叠(Chunk Overlap)",
|
||||
"Enter Chunk Size": "输入块大小(Chunk Size)",
|
||||
"Enter Image Size (e.g. 512x512)": "输入图片大小(例如 512x512)",
|
||||
"Enter LiteLLM API Base URL (litellm_params.api_base)": "输入 LiteLLM API 基本 URL (litellm_params.api_base)",
|
||||
"Enter LiteLLM API Key (litellm_params.api_key)": "输入 LiteLLM API 密匙 (litellm_params.api_key)",
|
||||
"Enter LiteLLM API RPM (litellm_params.rpm)": "输入 LiteLLM API 速率限制 (litellm_params.rpm)",
|
||||
"Enter LiteLLM Model (litellm_params.model)": "输入 LiteLLM 模型 (litellm_params.model)",
|
||||
"Enter Max Tokens (litellm_params.max_tokens)": "输入模型的 Max Tokens (litellm_params.max_tokens)",
|
||||
"Enter model tag (e.g. {{modelTag}})": "输入模型标签(例如{{modelTag}})",
|
||||
"Enter Number of Steps (e.g. 50)": "输入步数(例如50)",
|
||||
"Enter stop sequence": "输入停止序列",
|
||||
"Enter Top K": "",
|
||||
"Enter URL (e.g. http://127.0.0.1:7860/)": "",
|
||||
"Enter Top K": "输入 Top K",
|
||||
"Enter URL (e.g. http://127.0.0.1:7860/)": "输入 URL (例如 http://127.0.0.1:7860/)",
|
||||
"Enter Your Email": "输入您的电子邮件",
|
||||
"Enter Your Full Name": "输入您的全名",
|
||||
"Enter Your Password": "输入您的密码",
|
||||
|
@ -159,7 +159,7 @@
|
|||
"Hello, {{name}}": "你好,{{name}}",
|
||||
"Hide": "隐藏",
|
||||
"Hide Additional Params": "隐藏额外参数",
|
||||
"How can I help you today?": "今天我如何能帮到你?",
|
||||
"How can I help you today?": "我今天能帮你做什么?",
|
||||
"Image Generation (Experimental)": "图像生成(实验性)",
|
||||
"Image Generation Engine": "图像生成引擎",
|
||||
"Image Settings": "图像设置",
|
||||
|
@ -179,14 +179,14 @@
|
|||
"Language": "语言",
|
||||
"Light": "浅色",
|
||||
"Listening...": "监听中...",
|
||||
"LLMs can make mistakes. Verify important information.": "大型语言模型可能会犯错。验证重要信息。",
|
||||
"LLMs can make mistakes. Verify important information.": "LLM可能会生成错误信息,请验证重要信息。",
|
||||
"Made by OpenWebUI Community": "由OpenWebUI社区制作",
|
||||
"Make sure to enclose them with": "确保将它们包含在内",
|
||||
"Manage LiteLLM Models": "管理LiteLLM模型",
|
||||
"Manage Models": "",
|
||||
"Manage Models": "管理模型",
|
||||
"Manage Ollama Models": "管理Ollama模型",
|
||||
"Max Tokens": "最大令牌数",
|
||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "最多可以同时下载3个模型。请稍后再试。",
|
||||
"Maximum of 3 models can be downloaded simultaneously. Please try again later.": "最多可以同时下载3个模型,请稍后重试。",
|
||||
"Mirostat": "Mirostat",
|
||||
"Mirostat Eta": "Mirostat Eta",
|
||||
"Mirostat Tau": "Mirostat Tau",
|
||||
|
@ -198,8 +198,8 @@
|
|||
"Model Name": "模型名称",
|
||||
"Model not selected": "未选择模型",
|
||||
"Model Tag Name": "模型标签名称",
|
||||
"Model Whitelisting": "",
|
||||
"Model(s) Whitelisted": "",
|
||||
"Model Whitelisting": "白名单模型",
|
||||
"Model(s) Whitelisted": "模型已加入白名单",
|
||||
"Modelfile": "模型文件",
|
||||
"Modelfile Advanced Settings": "模型文件高级设置",
|
||||
"Modelfile Content": "模型文件内容",
|
||||
|
@ -207,7 +207,7 @@
|
|||
"Models": "模型",
|
||||
"My Documents": "我的文档",
|
||||
"My Modelfiles": "我的模型文件",
|
||||
"My Prompts": "我的提示",
|
||||
"My Prompts": "我的提示词",
|
||||
"Name": "名称",
|
||||
"Name Tag": "名称标签",
|
||||
"Name your modelfile": "命名你的模型文件",
|
||||
|
@ -221,7 +221,7 @@
|
|||
"Ollama Version": "Ollama 版本",
|
||||
"On": "开",
|
||||
"Only": "仅",
|
||||
"Only alphanumeric characters and hyphens are allowed in the command string.": "命令字符串中只允许使用字母数字字符和连字符。",
|
||||
"Only alphanumeric characters and hyphens are allowed in the command string.": "命令字符串中只允许使用英文字母,数字(0-9)以及连字符(-)。",
|
||||
"Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.": "哎呀!请稍等!您的文件仍在处理中。我们正在将它们做得尽善尽美,请耐心等待,一旦准备好我们会通知您。",
|
||||
"Oops! Looks like the URL is invalid. Please double-check and try again.": "哎呀!看起来 URL 无效。请仔细检查后再试一次。",
|
||||
"Oops! You're using an unsupported method (frontend only). Please serve the WebUI from the backend.": "哎呀!您正在使用不支持的方法(仅限前端)。请从后端提供 WebUI。",
|
||||
|
@ -235,10 +235,10 @@
|
|||
"or": "或",
|
||||
"Parameters": "参数",
|
||||
"Password": "密码",
|
||||
"PDF Extract Images (OCR)": "",
|
||||
"PDF Extract Images (OCR)": "PDF图像处理(使用OCR)",
|
||||
"pending": "待定",
|
||||
"Permission denied when accessing microphone: {{error}}": "访问麦克风时权限被拒绝:{{error}}",
|
||||
"Playground": "游乐场",
|
||||
"Playground": "Playground",
|
||||
"Profile": "个人资料",
|
||||
"Prompt Content": "提示词内容",
|
||||
"Prompt suggestions": "提示词建议",
|
||||
|
@ -255,7 +255,7 @@
|
|||
"Repeat Penalty": "重复惩罚",
|
||||
"Request Mode": "请求模式",
|
||||
"Reset Vector Storage": "重置向量存储",
|
||||
"Response AutoCopy to Clipboard": "响应自动复制到剪贴板",
|
||||
"Response AutoCopy to Clipboard": "自动复制回答到剪贴板",
|
||||
"Role": "角色",
|
||||
"Rosé Pine": "Rosé Pine",
|
||||
"Rosé Pine Dawn": "Rosé Pine Dawn",
|
||||
|
@ -263,7 +263,7 @@
|
|||
"Save & Create": "保存并创建",
|
||||
"Save & Submit": "保存并提交",
|
||||
"Save & Update": "保存并更新",
|
||||
"Saving chat logs directly to your browser's storage is no longer supported. Please take a moment to download and delete your chat logs by clicking the button below. Don't worry, you can easily re-import your chat logs to the backend through": "直接将聊天记录保存到浏览器存储中不再受支持。请点击下面的按钮下载并删除您的聊天记录。别担心,您可以通过轻松地将聊天记录重新导入到后端",
|
||||
"Saving chat logs directly to your browser's storage is no longer supported. Please take a moment to download and delete your chat logs by clicking the button below. Don't worry, you can easily re-import your chat logs to the backend through": "不再支持直接将聊天记录保存到浏览器存储中。请点击下面的按钮下载并删除您的聊天记录。别担心,您可以通过轻松地将聊天记录重新导入到后端",
|
||||
"Scan": "扫描",
|
||||
"Scan complete!": "扫描完成!",
|
||||
"Scan for documents from {{path}}": "从 {{path}} 扫描文档",
|
||||
|
@ -275,10 +275,10 @@
|
|||
"Seed": "种子",
|
||||
"Select a mode": "选择一个模式",
|
||||
"Select a model": "选择一个模型",
|
||||
"Select an Ollama instance": "",
|
||||
"Select an Ollama instance": "选择一个Ollama实例",
|
||||
"Send a Message": "发送消息",
|
||||
"Send message": "发送消息",
|
||||
"Server connection verified": "服务器连接已验证",
|
||||
"Server connection verified": "已验证服务器连接",
|
||||
"Set as default": "设为默认",
|
||||
"Set Default Model": "设置默认模型",
|
||||
"Set Image Size": "设置图片大小",
|
||||
|
@ -286,7 +286,7 @@
|
|||
"Set Title Auto-Generation Model": "设置标题自动生成模型",
|
||||
"Set Voice": "设置声音",
|
||||
"Settings": "设置",
|
||||
"Settings saved successfully!": "",
|
||||
"Settings saved successfully!": "设置已保存",
|
||||
"Share to OpenWebUI Community": "分享到OpenWebUI社区",
|
||||
"short-summary": "简短总结",
|
||||
"Show": "显示",
|
||||
|
@ -316,10 +316,10 @@
|
|||
"Theme": "主题",
|
||||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "这确保了您宝贵的对话被安全保存到后端数据库中。谢谢!",
|
||||
"This setting does not sync across browsers or devices.": "此设置不会在浏览器或设备之间同步。",
|
||||
"Tip: Update multiple variable slots consecutively by pressing the tab key in the chat input after each replacement.": "提示:在每次替换后,在聊天输入中按Tab键可以连续更新多个变量槽。",
|
||||
"Tip: Update multiple variable slots consecutively by pressing the tab key in the chat input after each replacement.": "提示:在每次替换后,在聊天输入中按Tab键可以连续更新多个变量。",
|
||||
"Title": "标题",
|
||||
"Title Auto-Generation": "标题自动生成",
|
||||
"Title Generation Prompt": "标题生成提示",
|
||||
"Title Generation Prompt": "自动生成标题的提示词",
|
||||
"to": "到",
|
||||
"To access the available model names for downloading,": "要访问可下载的模型名称,",
|
||||
"To access the GGUF models available for downloading,": "要访问可下载的GGUF模型,",
|
||||
|
@ -331,9 +331,9 @@
|
|||
"Trouble accessing Ollama?": "访问Ollama时遇到问题?",
|
||||
"TTS Settings": "文本转语音设置",
|
||||
"Type Hugging Face Resolve (Download) URL": "输入Hugging Face解析(下载)URL",
|
||||
"Uh-oh! There was an issue connecting to {{provider}}.": "哦哦!连接到{{provider}}时出现问题。",
|
||||
"Unknown File Type '{{file_type}}', but accepting and treating as plain text": "未知文件类型'{{file_type}}',但接受并视为纯文本",
|
||||
"Update password": "",
|
||||
"Uh-oh! There was an issue connecting to {{provider}}.": "哎呀!连接到{{provider}}时出现问题。",
|
||||
"Unknown File Type '{{file_type}}', but accepting and treating as plain text": "未知文件类型'{{file_type}}',将视为纯文本进行处理",
|
||||
"Update password": "更新密码",
|
||||
"Upload a GGUF model": "上传一个GGUF模型",
|
||||
"Upload files": "上传文件",
|
||||
"Upload Progress": "上传进度",
|
||||
|
@ -347,17 +347,17 @@
|
|||
"Valid time units:": "有效时间单位:",
|
||||
"variable": "变量",
|
||||
"variable to have them replaced with clipboard content.": "变量将被剪贴板内容替换。",
|
||||
"Version": "",
|
||||
"Version": "版本",
|
||||
"Web": "网页",
|
||||
"WebUI Add-ons": "WebUI 插件",
|
||||
"WebUI Settings": "WebUI 设置",
|
||||
"WebUI will make requests to": "",
|
||||
"WebUI will make requests to": "WebUI将请求",
|
||||
"What’s New in": "最新变化",
|
||||
"When history is turned off, new chats on this browser won't appear in your history on any of your devices.": "当历史记录被关闭时,这个浏览器上的新聊天不会出现在你任何设备的历史记录中。",
|
||||
"Whisper (Local)": "私语(本地)",
|
||||
"Whisper (Local)": "Whisper(本地)",
|
||||
"Write a prompt suggestion (e.g. Who are you?)": "写一个提示建议(例如:你是谁?)",
|
||||
"Write a summary in 50 words that summarizes [topic or keyword].": "用50个字写一个总结[主题或关键词]。",
|
||||
"You": "你",
|
||||
"You're a helpful assistant.": "你是一个有帮助的助手。",
|
||||
"You're now logged in.": "你现在已经登录了。"
|
||||
"You're now logged in.": "已登录。"
|
||||
}
|
||||
|
|
|
@ -7,11 +7,11 @@
|
|||
|
||||
import { goto } from '$app/navigation';
|
||||
|
||||
import { getOllamaModels, getOllamaVersion } from '$lib/apis/ollama';
|
||||
import { getModels as _getModels } from '$lib/utils';
|
||||
import { getOllamaVersion } from '$lib/apis/ollama';
|
||||
import { getModelfiles } from '$lib/apis/modelfiles';
|
||||
import { getPrompts } from '$lib/apis/prompts';
|
||||
import { getOpenAIModels } from '$lib/apis/openai';
|
||||
import { getLiteLLMModels } from '$lib/apis/litellm';
|
||||
|
||||
import { getDocs } from '$lib/apis/documents';
|
||||
import { getAllChatTags } from '$lib/apis/chats';
|
||||
|
||||
|
@ -47,26 +47,7 @@
|
|||
let showShortcuts = false;
|
||||
|
||||
const getModels = async () => {
|
||||
let models = await Promise.all([
|
||||
await getOllamaModels(localStorage.token).catch((error) => {
|
||||
console.log(error);
|
||||
return null;
|
||||
}),
|
||||
await getOpenAIModels(localStorage.token).catch((error) => {
|
||||
console.log(error);
|
||||
return null;
|
||||
}),
|
||||
await getLiteLLMModels(localStorage.token).catch((error) => {
|
||||
console.log(error);
|
||||
return null;
|
||||
})
|
||||
]);
|
||||
|
||||
models = models
|
||||
.filter((models) => models)
|
||||
.reduce((a, e, i, arr) => a.concat(e, ...(i < arr.length - 1 ? [{ name: 'hr' }] : [])), []);
|
||||
|
||||
return models;
|
||||
return _getModels(localStorage.token);
|
||||
};
|
||||
|
||||
const setOllamaVersion = async (version: string = '') => {
|
||||
|
@ -195,128 +176,128 @@
|
|||
});
|
||||
</script>
|
||||
|
||||
{#if loaded}
|
||||
<div class=" hidden lg:flex fixed bottom-0 right-0 px-3 py-3 z-10">
|
||||
<Tooltip content="Help" placement="left">
|
||||
<button
|
||||
id="show-shortcuts-button"
|
||||
bind:this={showShortcutsButtonElement}
|
||||
class="text-gray-600 dark:text-gray-300 bg-gray-300/20 w-6 h-6 flex items-center justify-center text-xs rounded-full"
|
||||
on:click={() => {
|
||||
showShortcuts = !showShortcuts;
|
||||
}}
|
||||
>
|
||||
?
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<ShortcutsModal bind:show={showShortcuts} />
|
||||
|
||||
<div class="app relative">
|
||||
{#if !['user', 'admin'].includes($user.role)}
|
||||
<div class="fixed w-full h-full flex z-50">
|
||||
<div
|
||||
class="absolute w-full h-full backdrop-blur-md bg-white/20 dark:bg-gray-900/50 flex justify-center"
|
||||
>
|
||||
<div class="m-auto pb-44 flex flex-col justify-center">
|
||||
<div class="max-w-md">
|
||||
<div class="text-center dark:text-white text-2xl font-medium z-50">
|
||||
Account Activation Pending<br /> Contact Admin for WebUI Access
|
||||
</div>
|
||||
|
||||
<div class=" mt-4 text-center text-sm dark:text-gray-200 w-full">
|
||||
Your account status is currently pending activation. To access the WebUI, please
|
||||
reach out to the administrator. Admins can manage user statuses from the Admin
|
||||
Panel.
|
||||
</div>
|
||||
|
||||
<div class=" mt-6 mx-auto relative group w-fit">
|
||||
<button
|
||||
class="relative z-20 flex px-5 py-2 rounded-full bg-white border border-gray-100 dark:border-none hover:bg-gray-100 transition font-medium text-sm"
|
||||
on:click={async () => {
|
||||
location.href = '/';
|
||||
}}
|
||||
>
|
||||
{$i18n.t('Check Again')}
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="text-xs text-center w-full mt-2 text-gray-400 underline"
|
||||
on:click={async () => {
|
||||
localStorage.removeItem('token');
|
||||
location.href = '/auth';
|
||||
}}>{$i18n.t('Sign Out')}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{:else if localDBChats.length > 0}
|
||||
<div class="fixed w-full h-full flex z-50">
|
||||
<div
|
||||
class="absolute w-full h-full backdrop-blur-md bg-white/20 dark:bg-gray-900/50 flex justify-center"
|
||||
>
|
||||
<div class="m-auto pb-44 flex flex-col justify-center">
|
||||
<div class="max-w-md">
|
||||
<div class="text-center dark:text-white text-2xl font-medium z-50">
|
||||
Important Update<br /> Action Required for Chat Log Storage
|
||||
</div>
|
||||
|
||||
<div class=" mt-4 text-center text-sm dark:text-gray-200 w-full">
|
||||
{$i18n.t(
|
||||
"Saving chat logs directly to your browser's storage is no longer supported. Please take a moment to download and delete your chat logs by clicking the button below. Don't worry, you can easily re-import your chat logs to the backend through"
|
||||
)}
|
||||
<span class="font-semibold dark:text-white"
|
||||
>{$i18n.t('Settings')} > {$i18n.t('Chats')} > {$i18n.t('Import Chats')}</span
|
||||
>. {$i18n.t(
|
||||
'This ensures that your valuable conversations are securely saved to your backend database. Thank you!'
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div class=" mt-6 mx-auto relative group w-fit">
|
||||
<button
|
||||
class="relative z-20 flex px-5 py-2 rounded-full bg-white border border-gray-100 dark:border-none hover:bg-gray-100 transition font-medium text-sm"
|
||||
on:click={async () => {
|
||||
let blob = new Blob([JSON.stringify(localDBChats)], {
|
||||
type: 'application/json'
|
||||
});
|
||||
saveAs(blob, `chat-export-${Date.now()}.json`);
|
||||
|
||||
const tx = DB.transaction('chats', 'readwrite');
|
||||
await Promise.all([tx.store.clear(), tx.done]);
|
||||
await deleteDB('Chats');
|
||||
|
||||
localDBChats = [];
|
||||
}}
|
||||
>
|
||||
Download & Delete
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="text-xs text-center w-full mt-2 text-gray-400 underline"
|
||||
on:click={async () => {
|
||||
localDBChats = [];
|
||||
}}>{$i18n.t('Close')}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div
|
||||
class=" text-gray-700 dark:text-gray-100 bg-white dark:bg-gray-900 min-h-screen overflow-auto flex flex-row"
|
||||
<div class=" hidden lg:flex fixed bottom-0 right-0 px-3 py-3 z-10">
|
||||
<Tooltip content="Help" placement="left">
|
||||
<button
|
||||
id="show-shortcuts-button"
|
||||
bind:this={showShortcutsButtonElement}
|
||||
class="text-gray-600 dark:text-gray-300 bg-gray-300/20 w-6 h-6 flex items-center justify-center text-xs rounded-full"
|
||||
on:click={() => {
|
||||
showShortcuts = !showShortcuts;
|
||||
}}
|
||||
>
|
||||
?
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<ShortcutsModal bind:show={showShortcuts} />
|
||||
|
||||
<div class="app relative">
|
||||
<div
|
||||
class=" text-gray-700 dark:text-gray-100 bg-white dark:bg-gray-900 min-h-screen overflow-auto flex flex-row"
|
||||
>
|
||||
{#if loaded}
|
||||
{#if !['user', 'admin'].includes($user.role)}
|
||||
<div class="fixed w-full h-full flex z-50">
|
||||
<div
|
||||
class="absolute w-full h-full backdrop-blur-md bg-white/20 dark:bg-gray-900/50 flex justify-center"
|
||||
>
|
||||
<div class="m-auto pb-44 flex flex-col justify-center">
|
||||
<div class="max-w-md">
|
||||
<div class="text-center dark:text-white text-2xl font-medium z-50">
|
||||
Account Activation Pending<br /> Contact Admin for WebUI Access
|
||||
</div>
|
||||
|
||||
<div class=" mt-4 text-center text-sm dark:text-gray-200 w-full">
|
||||
Your account status is currently pending activation. To access the WebUI, please
|
||||
reach out to the administrator. Admins can manage user statuses from the Admin
|
||||
Panel.
|
||||
</div>
|
||||
|
||||
<div class=" mt-6 mx-auto relative group w-fit">
|
||||
<button
|
||||
class="relative z-20 flex px-5 py-2 rounded-full bg-white border border-gray-100 dark:border-none hover:bg-gray-100 text-gray-700 transition font-medium text-sm"
|
||||
on:click={async () => {
|
||||
location.href = '/';
|
||||
}}
|
||||
>
|
||||
{$i18n.t('Check Again')}
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="text-xs text-center w-full mt-2 text-gray-400 underline"
|
||||
on:click={async () => {
|
||||
localStorage.removeItem('token');
|
||||
location.href = '/auth';
|
||||
}}>{$i18n.t('Sign Out')}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{:else if localDBChats.length > 0}
|
||||
<div class="fixed w-full h-full flex z-50">
|
||||
<div
|
||||
class="absolute w-full h-full backdrop-blur-md bg-white/20 dark:bg-gray-900/50 flex justify-center"
|
||||
>
|
||||
<div class="m-auto pb-44 flex flex-col justify-center">
|
||||
<div class="max-w-md">
|
||||
<div class="text-center dark:text-white text-2xl font-medium z-50">
|
||||
Important Update<br /> Action Required for Chat Log Storage
|
||||
</div>
|
||||
|
||||
<div class=" mt-4 text-center text-sm dark:text-gray-200 w-full">
|
||||
{$i18n.t(
|
||||
"Saving chat logs directly to your browser's storage is no longer supported. Please take a moment to download and delete your chat logs by clicking the button below. Don't worry, you can easily re-import your chat logs to the backend through"
|
||||
)}
|
||||
<span class="font-semibold dark:text-white"
|
||||
>{$i18n.t('Settings')} > {$i18n.t('Chats')} > {$i18n.t('Import Chats')}</span
|
||||
>. {$i18n.t(
|
||||
'This ensures that your valuable conversations are securely saved to your backend database. Thank you!'
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div class=" mt-6 mx-auto relative group w-fit">
|
||||
<button
|
||||
class="relative z-20 flex px-5 py-2 rounded-full bg-white border border-gray-100 dark:border-none hover:bg-gray-100 transition font-medium text-sm"
|
||||
on:click={async () => {
|
||||
let blob = new Blob([JSON.stringify(localDBChats)], {
|
||||
type: 'application/json'
|
||||
});
|
||||
saveAs(blob, `chat-export-${Date.now()}.json`);
|
||||
|
||||
const tx = DB.transaction('chats', 'readwrite');
|
||||
await Promise.all([tx.store.clear(), tx.done]);
|
||||
await deleteDB('Chats');
|
||||
|
||||
localDBChats = [];
|
||||
}}
|
||||
>
|
||||
Download & Delete
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="text-xs text-center w-full mt-2 text-gray-400 underline"
|
||||
on:click={async () => {
|
||||
localDBChats = [];
|
||||
}}>{$i18n.t('Close')}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<Sidebar />
|
||||
<SettingsModal bind:show={$showSettings} />
|
||||
<ChangelogModal bind:show={$showChangelog} />
|
||||
<slot />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.loading {
|
||||
|
|
|
@ -351,7 +351,13 @@
|
|||
model: model,
|
||||
messages: messagesBody,
|
||||
options: {
|
||||
...($settings.options ?? {})
|
||||
...($settings.options ?? {}),
|
||||
stop:
|
||||
$settings?.options?.stop ?? undefined
|
||||
? $settings.options.stop.map((str) =>
|
||||
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||
)
|
||||
: undefined
|
||||
},
|
||||
format: $settings.requestFormat ?? undefined,
|
||||
keep_alive: $settings.keepAlive ?? undefined,
|
||||
|
@ -532,7 +538,7 @@
|
|||
|
||||
console.log(model);
|
||||
|
||||
const res = await generateOpenAIChatCompletion(
|
||||
const [res, controller] = await generateOpenAIChatCompletion(
|
||||
localStorage.token,
|
||||
{
|
||||
model: model.id,
|
||||
|
@ -576,7 +582,12 @@
|
|||
})
|
||||
})),
|
||||
seed: $settings?.options?.seed ?? undefined,
|
||||
stop: $settings?.options?.stop ?? undefined,
|
||||
stop:
|
||||
$settings?.options?.stop ?? undefined
|
||||
? $settings?.options?.stop.map((str) =>
|
||||
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||
)
|
||||
: undefined,
|
||||
temperature: $settings?.options?.temperature ?? undefined,
|
||||
top_p: $settings?.options?.top_p ?? undefined,
|
||||
num_ctx: $settings?.options?.num_ctx ?? undefined,
|
||||
|
@ -608,6 +619,11 @@
|
|||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||
responseMessage.done = true;
|
||||
messages = messages;
|
||||
|
||||
if (stopResponseFlag) {
|
||||
controller.abort('User: Stop Response');
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
import { onMount, getContext } from 'svelte';
|
||||
|
||||
import dayjs from 'dayjs';
|
||||
import relativeTime from 'dayjs/plugin/relativeTime';
|
||||
dayjs.extend(relativeTime);
|
||||
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
|
@ -13,6 +15,9 @@
|
|||
import EditUserModal from '$lib/components/admin/EditUserModal.svelte';
|
||||
import SettingsModal from '$lib/components/admin/SettingsModal.svelte';
|
||||
import Pagination from '$lib/components/common/Pagination.svelte';
|
||||
import ChatBubbles from '$lib/components/icons/ChatBubbles.svelte';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import UserChatsModal from '$lib/components/admin/UserChatsModal.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
|
@ -25,6 +30,8 @@
|
|||
let page = 1;
|
||||
|
||||
let showSettingsModal = false;
|
||||
|
||||
let showUserChatsModal = false;
|
||||
let showEditUserModal = false;
|
||||
|
||||
const updateRoleHandler = async (id, role) => {
|
||||
|
@ -84,6 +91,7 @@
|
|||
/>
|
||||
{/key}
|
||||
|
||||
<UserChatsModal bind:show={showUserChatsModal} user={selectedUser} />
|
||||
<SettingsModal bind:show={showSettingsModal} />
|
||||
|
||||
<div class="min-h-screen max-h-[100dvh] w-full flex justify-center dark:text-white">
|
||||
|
@ -157,7 +165,10 @@
|
|||
<th scope="col" class="px-3 py-2"> {$i18n.t('Role')} </th>
|
||||
<th scope="col" class="px-3 py-2"> {$i18n.t('Name')} </th>
|
||||
<th scope="col" class="px-3 py-2"> {$i18n.t('Email')} </th>
|
||||
<th scope="col" class="px-3 py-2"> {$i18n.t('Last Active')} </th>
|
||||
|
||||
<th scope="col" class="px-3 py-2"> {$i18n.t('Created at')} </th>
|
||||
|
||||
<th scope="col" class="px-3 py-2 text-right" />
|
||||
</tr>
|
||||
</thead>
|
||||
|
@ -215,55 +226,75 @@
|
|||
<td class=" px-3 py-2"> {user.email} </td>
|
||||
|
||||
<td class=" px-3 py-2">
|
||||
{dayjs(user.timestamp * 1000).format($i18n.t('MMMM DD, YYYY'))}
|
||||
{dayjs(user.last_active_at * 1000).fromNow()}
|
||||
</td>
|
||||
|
||||
<td class=" px-3 py-2">
|
||||
{dayjs(user.created_at * 1000).format($i18n.t('MMMM DD, YYYY'))}
|
||||
</td>
|
||||
|
||||
<td class="px-3 py-2 text-right">
|
||||
<div class="flex justify-end w-full">
|
||||
<button
|
||||
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
|
||||
on:click={async () => {
|
||||
showEditUserModal = !showEditUserModal;
|
||||
selectedUser = user;
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
<Tooltip content="Chats">
|
||||
<button
|
||||
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
|
||||
on:click={async () => {
|
||||
showUserChatsModal = !showUserChatsModal;
|
||||
selectedUser = user;
|
||||
}}
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m16.862 4.487 1.687-1.688a1.875 1.875 0 1 1 2.652 2.652L6.832 19.82a4.5 4.5 0 0 1-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 0 1 1.13-1.897L16.863 4.487Zm0 0L19.5 7.125"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
<ChatBubbles />
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<button
|
||||
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
|
||||
on:click={async () => {
|
||||
deleteUserHandler(user.id);
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
<Tooltip content="Edit User">
|
||||
<button
|
||||
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
|
||||
on:click={async () => {
|
||||
showEditUserModal = !showEditUserModal;
|
||||
selectedUser = user;
|
||||
}}
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m14.74 9-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 0 1-2.244 2.077H8.084a2.25 2.25 0 0 1-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 0 0-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 0 1 3.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 0 0-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 0 0-7.5 0"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m16.862 4.487 1.687-1.688a1.875 1.875 0 1 1 2.652 2.652L6.832 19.82a4.5 4.5 0 0 1-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 0 1 1.13-1.897L16.863 4.487Zm0 0L19.5 7.125"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Delete User">
|
||||
<button
|
||||
class="self-center w-fit text-sm px-2 py-2 hover:bg-black/5 dark:hover:bg-white/5 rounded-xl"
|
||||
on:click={async () => {
|
||||
deleteUserHandler(user.id);
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m14.74 9-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 0 1-2.244 2.077H8.084a2.25 2.25 0 0 1-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 0 0-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 0 1 3.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 0 0-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 0 0-7.5 0"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -365,7 +365,13 @@
|
|||
model: model,
|
||||
messages: messagesBody,
|
||||
options: {
|
||||
...($settings.options ?? {})
|
||||
...($settings.options ?? {}),
|
||||
stop:
|
||||
$settings?.options?.stop ?? undefined
|
||||
? $settings.options.stop.map((str) =>
|
||||
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||
)
|
||||
: undefined
|
||||
},
|
||||
format: $settings.requestFormat ?? undefined,
|
||||
keep_alive: $settings.keepAlive ?? undefined,
|
||||
|
@ -544,7 +550,7 @@
|
|||
|
||||
console.log(docs);
|
||||
|
||||
const res = await generateOpenAIChatCompletion(
|
||||
const [res, controller] = await generateOpenAIChatCompletion(
|
||||
localStorage.token,
|
||||
{
|
||||
model: model.id,
|
||||
|
@ -588,7 +594,12 @@
|
|||
})
|
||||
})),
|
||||
seed: $settings?.options?.seed ?? undefined,
|
||||
stop: $settings?.options?.stop ?? undefined,
|
||||
stop:
|
||||
$settings?.options?.stop ?? undefined
|
||||
? $settings.options.stop.map((str) =>
|
||||
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
|
||||
)
|
||||
: undefined,
|
||||
temperature: $settings?.options?.temperature ?? undefined,
|
||||
top_p: $settings?.options?.top_p ?? undefined,
|
||||
num_ctx: $settings?.options?.num_ctx ?? undefined,
|
||||
|
@ -620,6 +631,11 @@
|
|||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||
responseMessage.done = true;
|
||||
messages = messages;
|
||||
|
||||
if (stopResponseFlag) {
|
||||
controller.abort('User: Stop Response');
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
});
|
||||
|
||||
if (success) {
|
||||
toast.success($i18n.t(`Deleted {tagName}`, { tagName }));
|
||||
toast.success($i18n.t(`Deleted {{tagName}}`, { tagName }));
|
||||
}
|
||||
|
||||
return success;
|
||||
|
|
|
@ -67,7 +67,7 @@
|
|||
const textCompletionHandler = async () => {
|
||||
const model = $models.find((model) => model.id === selectedModelId);
|
||||
|
||||
const res = await generateOpenAIChatCompletion(
|
||||
const [res, controller] = await generateOpenAIChatCompletion(
|
||||
localStorage.token,
|
||||
{
|
||||
model: model.id,
|
||||
|
@ -96,7 +96,7 @@
|
|||
const { value, done } = await reader.read();
|
||||
if (done || stopResponseFlag) {
|
||||
if (stopResponseFlag) {
|
||||
await cancelOllamaRequest(localStorage.token, currentRequestId);
|
||||
controller.abort('User: Stop Response');
|
||||
}
|
||||
|
||||
currentRequestId = null;
|
||||
|
@ -135,7 +135,7 @@
|
|||
const chatCompletionHandler = async () => {
|
||||
const model = $models.find((model) => model.id === selectedModelId);
|
||||
|
||||
const res = await generateOpenAIChatCompletion(
|
||||
const [res, controller] = await generateOpenAIChatCompletion(
|
||||
localStorage.token,
|
||||
{
|
||||
model: model.id,
|
||||
|
@ -182,7 +182,7 @@
|
|||
const { value, done } = await reader.read();
|
||||
if (done || stopResponseFlag) {
|
||||
if (stopResponseFlag) {
|
||||
await cancelOllamaRequest(localStorage.token, currentRequestId);
|
||||
controller.abort('User: Stop Response');
|
||||
}
|
||||
|
||||
currentRequestId = null;
|
||||
|
|
|
@ -7,9 +7,11 @@
|
|||
import { getBackendConfig } from '$lib/apis';
|
||||
import { getSessionUser } from '$lib/apis/auths';
|
||||
|
||||
import '../app.css';
|
||||
import '../tailwind.css';
|
||||
import '../app.css';
|
||||
|
||||
import 'tippy.js/dist/tippy.css';
|
||||
|
||||
import { WEBUI_BASE_URL } from '$lib/constants';
|
||||
import i18n, { initI18n } from '$lib/i18n';
|
||||
|
||||
|
@ -60,6 +62,8 @@
|
|||
}
|
||||
|
||||
await tick();
|
||||
|
||||
document.getElementById('splash-screen')?.remove();
|
||||
loaded = true;
|
||||
});
|
||||
</script>
|
||||
|
@ -68,8 +72,10 @@
|
|||
<title>{$WEBUI_NAME}</title>
|
||||
<link rel="icon" href="{WEBUI_BASE_URL}/static/favicon.png" />
|
||||
|
||||
<link rel="stylesheet" type="text/css" href="/themes/rosepine.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/themes/rosepine-dawn.css" />
|
||||
<!-- rosepine themes have been disabled as it's not up to date with our latest version. -->
|
||||
<!-- feel free to make a PR to fix if anyone wants to see it return -->
|
||||
<!-- <link rel="stylesheet" type="text/css" href="/themes/rosepine.css" />
|
||||
<link rel="stylesheet" type="text/css" href="/themes/rosepine-dawn.css" /> -->
|
||||
</svelte:head>
|
||||
|
||||
{#if loaded}
|
||||
|
|
|
@ -215,6 +215,8 @@
|
|||
|
||||
<style>
|
||||
.font-mona {
|
||||
font-family: 'Mona Sans';
|
||||
font-family: 'Mona Sans', -apple-system, 'Arimo', ui-sans-serif, system-ui, 'Segoe UI', Roboto,
|
||||
Ubuntu, Cantarell, 'Noto Sans', sans-serif, 'Helvetica Neue', Arial, 'Apple Color Emoji',
|
||||
'Segoe UI Emoji', 'Segoe UI Symbol', 'Noto Color Emoji';
|
||||
}
|
||||
</style>
|
||||
|
|
Loading…
Reference in a new issue