open-webui/backend/config.py

168 lines
4.7 KiB
Python
Raw Normal View History

2024-01-07 07:59:22 +01:00
import os
import chromadb
2024-01-07 10:40:36 +01:00
from chromadb import Settings
2023-11-19 01:47:12 +01:00
from secrets import token_bytes
from base64 import b64encode
2024-01-07 07:59:22 +01:00
from constants import ERROR_MESSAGES
2024-01-07 10:40:36 +01:00
from pathlib import Path
try:
from dotenv import load_dotenv, find_dotenv
2024-01-22 10:47:07 +01:00
load_dotenv(find_dotenv("../.env"))
except ImportError:
print("dotenv not installed, skipping...")
2023-11-15 01:28:51 +01:00
2024-01-07 10:40:36 +01:00
####################################
2024-01-25 09:40:19 +01:00
# ENV (dev,test,prod)
2024-01-07 10:40:36 +01:00
####################################
2024-01-25 09:40:19 +01:00
ENV = os.environ.get("ENV", "dev")
2024-01-07 10:40:36 +01:00
2024-01-25 09:40:19 +01:00
####################################
# DATA/FRONTEND BUILD DIR
####################################
2024-01-25 09:40:19 +01:00
DATA_DIR = str(Path(os.getenv("DATA_DIR", "./data")).resolve())
FRONTEND_BUILD_DIR = str(Path(os.getenv("FRONTEND_BUILD_DIR", "../build")))
2024-01-07 10:40:36 +01:00
2023-11-19 01:47:12 +01:00
####################################
2024-01-25 09:40:19 +01:00
# File Upload DIR
2023-11-19 01:47:12 +01:00
####################################
2024-01-25 09:40:19 +01:00
UPLOAD_DIR = f"{DATA_DIR}/uploads"
Path(UPLOAD_DIR).mkdir(parents=True, exist_ok=True)
2023-11-15 01:28:51 +01:00
2024-02-06 07:51:08 +01:00
####################################
# Cache DIR
####################################
CACHE_DIR = f"{DATA_DIR}/cache"
Path(CACHE_DIR).mkdir(parents=True, exist_ok=True)
2024-02-18 06:06:08 +01:00
####################################
# Docs DIR
####################################
DOCS_DIR = f"{DATA_DIR}/docs"
Path(DOCS_DIR).mkdir(parents=True, exist_ok=True)
2024-02-06 07:51:08 +01:00
2023-11-19 01:47:12 +01:00
####################################
# OLLAMA_API_BASE_URL
####################################
2024-01-07 07:59:22 +01:00
OLLAMA_API_BASE_URL = os.environ.get(
"OLLAMA_API_BASE_URL", "http://localhost:11434/api"
)
2023-11-15 01:28:51 +01:00
if ENV == "prod":
if OLLAMA_API_BASE_URL == "/ollama/api":
OLLAMA_API_BASE_URL = "http://host.docker.internal:11434/api"
2023-11-19 01:47:12 +01:00
2024-01-05 01:49:34 +01:00
####################################
# OPENAI_API
####################################
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
2024-01-05 03:38:03 +01:00
OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
if OPENAI_API_BASE_URL == "":
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
2024-01-05 01:49:34 +01:00
2024-01-23 06:07:40 +01:00
####################################
# WEBUI
####################################
2024-02-06 19:58:07 +01:00
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", True)
2024-01-23 06:07:40 +01:00
DEFAULT_MODELS = os.environ.get("DEFAULT_MODELS", None)
DEFAULT_PROMPT_SUGGESTIONS = os.environ.get(
"DEFAULT_PROMPT_SUGGESTIONS",
[
{
"title": ["Help me study", "vocabulary for a college entrance exam"],
"content": "Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.",
},
{
"title": ["Give me ideas", "for what to do with my kids' art"],
"content": "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.",
},
{
"title": ["Tell me a fun fact", "about the Roman Empire"],
"content": "Tell me a random fun fact about the Roman Empire",
},
{
"title": ["Show me a code snippet", "of a website's sticky header"],
"content": "Show me a code snippet of a website's sticky header in CSS and JavaScript.",
},
],
)
2024-02-14 10:17:43 +01:00
DEFAULT_USER_ROLE = "pending"
USER_PERMISSIONS = {"chat": {"deletion": True}}
2024-01-23 06:07:40 +01:00
2023-11-19 01:47:12 +01:00
####################################
# WEBUI_VERSION
2023-11-19 01:47:12 +01:00
####################################
2024-02-14 10:19:39 +01:00
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.100")
2023-11-19 01:47:12 +01:00
####################################
2023-12-26 07:14:06 +01:00
# WEBUI_AUTH (Required for security)
2023-11-19 01:47:12 +01:00
####################################
2023-12-26 07:14:06 +01:00
WEBUI_AUTH = True
2023-11-19 01:47:12 +01:00
2023-11-21 01:22:43 +01:00
####################################
# WEBUI_SECRET_KEY
2023-11-21 01:22:43 +01:00
####################################
WEBUI_SECRET_KEY = os.environ.get(
"WEBUI_SECRET_KEY",
2024-02-04 02:42:08 +01:00
os.environ.get(
"WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t"
), # DEPRECATED: remove at next major version
)
2023-11-19 01:47:12 +01:00
if WEBUI_AUTH and WEBUI_SECRET_KEY == "":
2023-11-21 01:22:43 +01:00
raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND)
2024-01-07 07:59:22 +01:00
####################################
# RAG
####################################
2024-01-22 10:47:07 +01:00
CHROMA_DATA_PATH = f"{DATA_DIR}/vector_db"
2024-01-07 07:59:22 +01:00
EMBED_MODEL = "all-MiniLM-L6-v2"
2024-01-07 10:40:36 +01:00
CHROMA_CLIENT = chromadb.PersistentClient(
2024-02-04 02:42:08 +01:00
path=CHROMA_DATA_PATH,
settings=Settings(allow_reset=True, anonymized_telemetry=False),
2024-01-07 10:40:36 +01:00
)
2024-01-07 07:59:22 +01:00
CHUNK_SIZE = 1500
CHUNK_OVERLAP = 100
2024-02-11 09:17:50 +01:00
2024-02-18 07:41:03 +01:00
RAG_TEMPLATE = """Use the following context as your learned knowledge, inside <context></context> XML tags.
<context>
[context]
</context>
When answer to user:
- If you don't know, just say that you don't know.
- If you don't know when you are not sure, ask for clarification.
Avoid mentioning that you obtained the information from the context.
And answer according to the language of the user's question.
Given the context information, answer the query.
Query: [query]"""
2024-02-11 09:17:50 +01:00
####################################
# Transcribe
####################################
2024-02-15 08:32:54 +01:00
WHISPER_MODEL = os.getenv("WHISPER_MODEL", "base")
WHISPER_MODEL_DIR = os.getenv("WHISPER_MODEL_DIR", f"{CACHE_DIR}/whisper/models")