forked from open-webui/open-webui
commit
437d7ff6f5
155 changed files with 13322 additions and 5133 deletions
|
@ -1,4 +1,7 @@
|
|||
.github
|
||||
.DS_Store
|
||||
docs
|
||||
kubernetes
|
||||
node_modules
|
||||
/.svelte-kit
|
||||
/package
|
||||
|
|
|
@ -4,3 +4,9 @@ OLLAMA_API_BASE_URL='http://localhost:11434/api'
|
|||
|
||||
OPENAI_API_BASE_URL=''
|
||||
OPENAI_API_KEY=''
|
||||
|
||||
# AUTOMATIC1111_BASE_URL="http://localhost:7860"
|
||||
|
||||
# DO NOT TRACK
|
||||
SCARF_NO_ANALYTICS=true
|
||||
DO_NOT_TRACK=true
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
*.sh text eol=lf
|
32
.github/pull_request_template.md
vendored
Normal file
32
.github/pull_request_template.md
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
## Pull Request Checklist
|
||||
|
||||
- [ ] **Description:** Briefly describe the changes in this pull request.
|
||||
- [ ] **Changelog:** Ensure a changelog entry following the format of [Keep a Changelog](https://keepachangelog.com/) is added at the bottom of the PR description.
|
||||
- [ ] **Documentation:** Have you updated relevant documentation?
|
||||
- [ ] **Dependencies:** Are there any new dependencies? Have you updated the dependency versions in the documentation?
|
||||
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
[Insert a brief description of the changes made in this pull request]
|
||||
|
||||
---
|
||||
|
||||
### Changelog Entry
|
||||
|
||||
### Added
|
||||
|
||||
- [List any new features or additions]
|
||||
|
||||
### Fixed
|
||||
|
||||
- [List any fixes or corrections]
|
||||
|
||||
### Changed
|
||||
|
||||
- [List any changes or updates]
|
||||
|
||||
### Removed
|
||||
|
||||
- [List any removed features or files]
|
49
.github/workflows/build-release.yml
vendored
Normal file
49
.github/workflows/build-release.yml
vendored
Normal file
|
@ -0,0 +1,49 @@
|
|||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main # or whatever branch you want to use
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Check for changes in package.json
|
||||
run: |
|
||||
git diff --cached --diff-filter=d package.json || {
|
||||
echo "No changes to package.json"
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Get version number from package.json
|
||||
id: get_version
|
||||
run: |
|
||||
VERSION=$(jq -r '.version' package.json)
|
||||
echo "::set-output name=version::$VERSION"
|
||||
|
||||
- name: Create GitHub release
|
||||
uses: actions/github-script@v5
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const release = await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `v${{ steps.get_version.outputs.version }}`,
|
||||
name: `v${{ steps.get_version.outputs.version }}`,
|
||||
body: 'Automatically created new release',
|
||||
})
|
||||
console.log(`Created release ${release.data.html_url}`)
|
||||
|
||||
- name: Upload package to GitHub release
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: package
|
||||
path: .
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
16
.github/workflows/docker-build.yaml
vendored
16
.github/workflows/docker-build.yaml
vendored
|
@ -40,15 +40,21 @@ jobs:
|
|||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
|
||||
- name: Extract metadata for Docker images
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
|
||||
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
|
||||
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
|
||||
# This configuration dynamically generates tags based on the branch, tag, commit, and custom suffix for lite version.
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=sha,prefix=git-
|
||||
type=semver,pattern={{version}}
|
||||
flavor: |
|
||||
latest=${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
|
|
25
CHANGELOG.md
Normal file
25
CHANGELOG.md
Normal file
|
@ -0,0 +1,25 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.1.102] - 2024-02-22
|
||||
|
||||
### Added
|
||||
|
||||
- **🖼️ Image Generation**: Generate Images using the AUTOMATIC1111/stable-diffusion-webui API. You can set this up in Settings > Images.
|
||||
- **📝 Change title generation prompt**: Change the prompt used to generate titles for your chats. You can set this up in the Settings > Interface.
|
||||
- **🤖 Change embedding model**: Change the embedding model used to generate embeddings for your chats in the Dockerfile. Use any sentence transformer model from huggingface.co.
|
||||
- **📢 CHANGELOG.md/Popup**: This popup will show you the latest changes.
|
||||
|
||||
## [0.1.101] - 2024-02-22
|
||||
|
||||
### Fixed
|
||||
|
||||
- LaTex output formatting issue (#828)
|
||||
|
||||
### Changed
|
||||
|
||||
- Instead of having the previous 1.0.0-alpha.101, we switched to semantic versioning as a way to respect global conventions.
|
59
Dockerfile
59
Dockerfile
|
@ -5,7 +5,8 @@ FROM node:alpine as build
|
|||
WORKDIR /app
|
||||
|
||||
# wget embedding model weight from alpine (does not exist from slim-buster)
|
||||
RUN wget "https://chroma-onnx-models.s3.amazonaws.com/all-MiniLM-L6-v2/onnx.tar.gz"
|
||||
RUN wget "https://chroma-onnx-models.s3.amazonaws.com/all-MiniLM-L6-v2/onnx.tar.gz" -O - | \
|
||||
tar -xzf - -C /app
|
||||
|
||||
COPY package.json package-lock.json ./
|
||||
RUN npm ci
|
||||
|
@ -17,35 +18,65 @@ RUN npm run build
|
|||
FROM python:3.11-slim-bookworm as base
|
||||
|
||||
ENV ENV=prod
|
||||
ENV PORT ""
|
||||
|
||||
ENV OLLAMA_API_BASE_URL "/ollama/api"
|
||||
|
||||
ENV OPENAI_API_BASE_URL ""
|
||||
ENV OPENAI_API_KEY ""
|
||||
|
||||
ENV WEBUI_JWT_SECRET_KEY "SECRET_KEY"
|
||||
ENV WEBUI_SECRET_KEY ""
|
||||
|
||||
WORKDIR /app
|
||||
ENV SCARF_NO_ANALYTICS true
|
||||
ENV DO_NOT_TRACK true
|
||||
|
||||
# copy embedding weight from build
|
||||
RUN mkdir -p /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2
|
||||
COPY --from=build /app/onnx.tar.gz /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2
|
||||
######## Preloaded models ########
|
||||
# whisper TTS Settings
|
||||
ENV WHISPER_MODEL="base"
|
||||
ENV WHISPER_MODEL_DIR="/app/backend/data/cache/whisper/models"
|
||||
|
||||
RUN cd /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2 &&\
|
||||
tar -xzf onnx.tar.gz
|
||||
# RAG Embedding Model Settings
|
||||
# any sentence transformer model; models to use can be found at https://huggingface.co/models?library=sentence-transformers
|
||||
# Leaderboard: https://huggingface.co/spaces/mteb/leaderboard
|
||||
# for better persormance and multilangauge support use "intfloat/multilingual-e5-large" (~2.5GB) or "intfloat/multilingual-e5-base" (~1.5GB)
|
||||
# IMPORTANT: If you change the default model (all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them.
|
||||
ENV RAG_EMBEDDING_MODEL="all-MiniLM-L6-v2"
|
||||
# device type for whisper tts and ebbeding models - "cpu" (default), "cuda" (nvidia gpu and CUDA required) or "mps" (apple silicon) - choosing this right can lead to better performance
|
||||
ENV RAG_EMBEDDING_MODEL_DEVICE_TYPE="cpu"
|
||||
ENV RAG_EMBEDDING_MODEL_DIR="/app/backend/data/cache/embedding/models"
|
||||
ENV SENTENCE_TRANSFORMERS_HOME $RAG_EMBEDDING_MODEL_DIR
|
||||
|
||||
# copy built frontend files
|
||||
COPY --from=build /app/build /app/build
|
||||
######## Preloaded models ########
|
||||
|
||||
WORKDIR /app/backend
|
||||
|
||||
# install python dependencies
|
||||
COPY ./backend/requirements.txt ./requirements.txt
|
||||
|
||||
RUN pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
|
||||
RUN pip3 install -r requirements.txt
|
||||
RUN pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu --no-cache-dir
|
||||
RUN pip3 install -r requirements.txt --no-cache-dir
|
||||
|
||||
# RUN python -c "from sentence_transformers import SentenceTransformer; model = SentenceTransformer('all-MiniLM-L6-v2')"
|
||||
# Install pandoc and netcat
|
||||
# RUN python -c "import pypandoc; pypandoc.download_pandoc()"
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y pandoc netcat-openbsd \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# preload embedding model
|
||||
RUN python -c "import os; from chromadb.utils import embedding_functions; sentence_transformer_ef = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=os.environ['RAG_EMBEDDING_MODEL'], device=os.environ['RAG_EMBEDDING_MODEL_DEVICE_TYPE'])"
|
||||
# preload tts model
|
||||
RUN python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='auto', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"
|
||||
|
||||
# copy embedding weight from build
|
||||
RUN mkdir -p /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2
|
||||
COPY --from=build /app/onnx /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2/onnx
|
||||
|
||||
# copy built frontend files
|
||||
COPY --from=build /app/build /app/build
|
||||
COPY --from=build /app/CHANGELOG.md /app/CHANGELOG.md
|
||||
COPY --from=build /app/package.json /app/package.json
|
||||
|
||||
# copy backend files
|
||||
COPY ./backend .
|
||||
|
||||
CMD [ "sh", "start.sh"]
|
||||
CMD [ "bash", "start.sh"]
|
|
@ -1,4 +1,4 @@
|
|||
### Installing Both Ollama and Ollama Web UI Using Kustomize
|
||||
### Installing Both Ollama and Open WebUI Using Kustomize
|
||||
|
||||
For cpu-only pod
|
||||
|
||||
|
@ -12,7 +12,7 @@ For gpu-enabled pod
|
|||
kubectl apply -k ./kubernetes/manifest
|
||||
```
|
||||
|
||||
### Installing Both Ollama and Ollama Web UI Using Helm
|
||||
### Installing Both Ollama and Open WebUI Using Helm
|
||||
|
||||
Package Helm file first
|
||||
|
||||
|
|
243
README.md
243
README.md
|
@ -1,23 +1,19 @@
|
|||
# Ollama Web UI: A User-Friendly Web Interface for Chat Interactions 👋
|
||||
# Open WebUI (Formerly Ollama WebUI) 👋
|
||||
|
||||
![GitHub stars](https://img.shields.io/github/stars/ollama-webui/ollama-webui?style=social)
|
||||
![GitHub forks](https://img.shields.io/github/forks/ollama-webui/ollama-webui?style=social)
|
||||
![GitHub watchers](https://img.shields.io/github/watchers/ollama-webui/ollama-webui?style=social)
|
||||
![GitHub repo size](https://img.shields.io/github/repo-size/ollama-webui/ollama-webui)
|
||||
![GitHub language count](https://img.shields.io/github/languages/count/ollama-webui/ollama-webui)
|
||||
![GitHub top language](https://img.shields.io/github/languages/top/ollama-webui/ollama-webui)
|
||||
![GitHub last commit](https://img.shields.io/github/last-commit/ollama-webui/ollama-webui?color=red)
|
||||
![GitHub stars](https://img.shields.io/github/stars/open-webui/open-webui?style=social)
|
||||
![GitHub forks](https://img.shields.io/github/forks/open-webui/open-webui?style=social)
|
||||
![GitHub watchers](https://img.shields.io/github/watchers/open-webui/open-webui?style=social)
|
||||
![GitHub repo size](https://img.shields.io/github/repo-size/open-webui/open-webui)
|
||||
![GitHub language count](https://img.shields.io/github/languages/count/open-webui/open-webui)
|
||||
![GitHub top language](https://img.shields.io/github/languages/top/open-webui/open-webui)
|
||||
![GitHub last commit](https://img.shields.io/github/last-commit/open-webui/open-webui?color=red)
|
||||
![Hits](https://hits.seeyoufarm.com/api/count/incr/badge.svg?url=https%3A%2F%2Fgithub.com%2Follama-webui%2Follama-wbui&count_bg=%2379C83D&title_bg=%23555555&icon=&icon_color=%23E7E7E7&title=hits&edge_flat=false)
|
||||
[![Discord](https://img.shields.io/badge/Discord-Ollama_Web_UI-blue?logo=discord&logoColor=white)](https://discord.gg/5rJgQTnV4s)
|
||||
[![Discord](https://img.shields.io/badge/Discord-Open_WebUI-blue?logo=discord&logoColor=white)](https://discord.gg/5rJgQTnV4s)
|
||||
[![](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/tjbck)
|
||||
|
||||
ChatGPT-Style Web Interface for Ollama 🦙
|
||||
User-friendly WebUI for LLMs, supported LLM runners include Ollama and OpenAI-compatible APIs. For more information, be sure to check out our [Open WebUI Documentation](https://docs.openwebui.com/).
|
||||
|
||||
**Disclaimer:** _ollama-webui is a community-driven project and is not affiliated with the Ollama team in any way. This initiative is independent, and any inquiries or feedback should be directed to [our community on Discord](https://discord.gg/5rJgQTnV4s). We kindly request users to refrain from contacting or harassing the Ollama team regarding this project._
|
||||
|
||||
![Ollama Web UI Demo](./demo.gif)
|
||||
|
||||
Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you can discover, download, and explore customized Modelfiles for Ollama! 🦙🔍
|
||||
![Open WebUI Demo](./demo.gif)
|
||||
|
||||
## Features ⭐
|
||||
|
||||
|
@ -33,12 +29,16 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
|
|||
|
||||
- ✒️🔢 **Full Markdown and LaTeX Support**: Elevate your LLM experience with comprehensive Markdown and LaTeX capabilities for enriched interaction.
|
||||
|
||||
- 📚 **Local RAG Integration (Alpha)**: Immerse yourself in cutting-edge Retrieval Augmented Generation support, revolutionizing your chat experience by seamlessly incorporating document interactions. In its alpha phase, expect occasional issues as we actively refine and enhance this feature to ensure optimal performance and reliability.
|
||||
- 📚 **Local RAG Integration**: Dive into the future of chat interactions with the groundbreaking Retrieval Augmented Generation (RAG) support. This feature seamlessly integrates document interactions into your chat experience. You can load documents directly into the chat or add files to your document library, effortlessly accessing them using `#` command in the prompt. In its alpha phase, occasional issues may arise as we actively refine and enhance this feature to ensure optimal performance and reliability.
|
||||
|
||||
- 📜 **Prompt Preset Support**: Instantly access preset prompts using the '/' command in the chat input. Load predefined conversation starters effortlessly and expedite your interactions. Effortlessly import prompts through [OllamaHub](https://ollamahub.com/) integration.
|
||||
- 🌐 **Web Browsing Capability**: Seamlessly integrate websites into your chat experience using the `#` command followed by the URL. This feature allows you to incorporate web content directly into your conversations, enhancing the richness and depth of your interactions.
|
||||
|
||||
- 📜 **Prompt Preset Support**: Instantly access preset prompts using the `/` command in the chat input. Load predefined conversation starters effortlessly and expedite your interactions. Effortlessly import prompts through [Open WebUI Community](https://openwebui.com/) integration.
|
||||
|
||||
- 👍👎 **RLHF Annotation**: Empower your messages by rating them with thumbs up and thumbs down, facilitating the creation of datasets for Reinforcement Learning from Human Feedback (RLHF). Utilize your messages to train or fine-tune models, all while ensuring the confidentiality of locally saved data.
|
||||
|
||||
- 🏷️ **Conversation Tagging**: Effortlessly categorize and locate specific chats for quick reference and streamlined data collection.
|
||||
|
||||
- 📥🗑️ **Download/Delete Models**: Easily download or remove models directly from the web UI.
|
||||
|
||||
- ⬆️ **GGUF File Model Creation**: Effortlessly create Ollama models by uploading GGUF files directly from the web UI. Streamlined process with options to upload from your machine or download GGUF files from Hugging Face.
|
||||
|
@ -47,10 +47,12 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
|
|||
|
||||
- 🔄 **Multi-Modal Support**: Seamlessly engage with models that support multimodal interactions, including images (e.g., LLava).
|
||||
|
||||
- 🧩 **Modelfile Builder**: Easily create Ollama modelfiles via the web UI. Create and add characters/agents, customize chat elements, and import modelfiles effortlessly through [OllamaHub](https://ollamahub.com/) integration.
|
||||
- 🧩 **Modelfile Builder**: Easily create Ollama modelfiles via the web UI. Create and add characters/agents, customize chat elements, and import modelfiles effortlessly through [Open WebUI Community](https://openwebui.com/) integration.
|
||||
|
||||
- ⚙️ **Many Models Conversations**: Effortlessly engage with various models simultaneously, harnessing their unique strengths for optimal responses. Enhance your experience by leveraging a diverse set of models in parallel.
|
||||
|
||||
- 💬 **Collaborative Chat**: Harness the collective intelligence of multiple models by seamlessly orchestrating group conversations. Use the `@` command to specify the model, enabling dynamic and diverse dialogues within your chat interface. Immerse yourself in the collective intelligence woven into your chat environment.
|
||||
|
||||
- 🤝 **OpenAI API Integration**: Effortlessly integrate OpenAI-compatible API for versatile conversations alongside Ollama models. Customize the API Base URL to link with **LMStudio, Mistral, OpenRouter, and more**.
|
||||
|
||||
- 🔄 **Regeneration History Access**: Easily revisit and explore your entire regeneration history.
|
||||
|
@ -67,193 +69,65 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
|
|||
|
||||
- 🔐 **Role-Based Access Control (RBAC)**: Ensure secure access with restricted permissions; only authorized individuals can access your Ollama, and exclusive model creation/pulling rights are reserved for administrators.
|
||||
|
||||
- 🔒 **Backend Reverse Proxy Support**: Bolster security through direct communication between Ollama Web UI backend and Ollama. This key feature eliminates the need to expose Ollama over LAN. Requests made to the '/ollama/api' route from the web UI are seamlessly redirected to Ollama from the backend, enhancing overall system security.
|
||||
- 🔒 **Backend Reverse Proxy Support**: Bolster security through direct communication between Open WebUI backend and Ollama. This key feature eliminates the need to expose Ollama over LAN. Requests made to the '/ollama/api' route from the web UI are seamlessly redirected to Ollama from the backend, enhancing overall system security.
|
||||
|
||||
- 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features.
|
||||
- 🌟 **Continuous Updates**: We are committed to improving Open WebUI with regular updates and new features.
|
||||
|
||||
## 🔗 Also Check Out OllamaHub!
|
||||
## 🔗 Also Check Out Open WebUI Community!
|
||||
|
||||
Don't forget to explore our sibling project, [OllamaHub](https://ollamahub.com/), where you can discover, download, and explore customized Modelfiles. OllamaHub offers a wide range of exciting possibilities for enhancing your chat interactions with Ollama! 🚀
|
||||
Don't forget to explore our sibling project, [Open WebUI Community](https://openwebui.com/), where you can discover, download, and explore customized Modelfiles. Open WebUI Community offers a wide range of exciting possibilities for enhancing your chat interactions with Open WebUI! 🚀
|
||||
|
||||
## How to Install 🚀
|
||||
|
||||
🌟 **Important Note on User Roles and Privacy:**
|
||||
> [!NOTE]
|
||||
> Please note that for certain Docker environments, additional configurations might be needed. If you encounter any connection issues, our detailed guide on [Open WebUI Documentation](https://docs.openwebui.com/) is ready to assist you.
|
||||
|
||||
- **Admin Creation:** The very first account to sign up on the Ollama Web UI will be granted **Administrator privileges**. This account will have comprehensive control over the platform, including user management and system settings.
|
||||
|
||||
- **User Registrations:** All subsequent users signing up will initially have their accounts set to **Pending** status by default. These accounts will require approval from the Administrator to gain access to the platform functionalities.
|
||||
|
||||
- **Privacy and Data Security:** We prioritize your privacy and data security above all. Please be reassured that all data entered into the Ollama Web UI is stored locally on your device. Our system is designed to be privacy-first, ensuring that no external requests are made, and your data does not leave your local environment. We are committed to maintaining the highest standards of data privacy and security, ensuring that your information remains confidential and under your control.
|
||||
|
||||
### Installing Ollama Web UI Only
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
Make sure you have the latest version of Ollama installed before proceeding with the installation. You can find the latest version of Ollama at [https://ollama.ai/](https://ollama.ai/).
|
||||
|
||||
##### Checking Ollama
|
||||
|
||||
After installing Ollama, verify that Ollama is running by accessing the following link in your web browser: [http://127.0.0.1:11434/](http://127.0.0.1:11434/). Note that the port number may differ based on your system configuration.
|
||||
|
||||
#### Using Docker 🐳
|
||||
|
||||
**Important:** When using Docker to install Ollama Web UI, make sure to include the `-v ollama-webui:/app/backend/data` in your Docker command. This step is crucial as it ensures your database is properly mounted and prevents any loss of data.
|
||||
|
||||
If Ollama is hosted on your local machine and accessible at [http://127.0.0.1:11434/](http://127.0.0.1:11434/), run the following command:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
```
|
||||
|
||||
Alternatively, if you prefer to build the container yourself, use the following command:
|
||||
|
||||
```bash
|
||||
docker build -t ollama-webui .
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
|
||||
```
|
||||
|
||||
Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000) and accessible over LAN (or Network). Enjoy! 😄
|
||||
|
||||
#### Accessing External Ollama on a Different Server
|
||||
|
||||
Change `OLLAMA_API_BASE_URL` environment variable to match the external Ollama Server url:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
```
|
||||
|
||||
Alternatively, if you prefer to build the container yourself, use the following command:
|
||||
|
||||
```bash
|
||||
docker build -t ollama-webui .
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
|
||||
```
|
||||
|
||||
### Installing Both Ollama and Ollama Web UI
|
||||
|
||||
#### Using Docker Compose
|
||||
|
||||
If you don't have Ollama installed yet, you can use the provided Docker Compose file for a hassle-free installation. Simply run the following command:
|
||||
|
||||
```bash
|
||||
docker compose up -d --build
|
||||
```
|
||||
|
||||
This command will install both Ollama and Ollama Web UI on your system.
|
||||
|
||||
##### Enable GPU
|
||||
|
||||
Use the additional Docker Compose file designed to enable GPU support by running the following command:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.yaml -f docker-compose.gpu.yaml up -d --build
|
||||
```
|
||||
|
||||
##### Expose Ollama API outside the container stack
|
||||
|
||||
Deploy the service with an additional Docker Compose file designed for API exposure:
|
||||
|
||||
```bash
|
||||
docker compose -f docker-compose.yaml -f docker-compose.api.yaml up -d --build
|
||||
```
|
||||
|
||||
#### Using Provided `run-compose.sh` Script (Linux)
|
||||
|
||||
Also available on Windows under any docker-enabled WSL2 linux distro (you have to enable it from Docker Desktop)
|
||||
|
||||
Simply run the following command to grant execute permission to script:
|
||||
|
||||
```bash
|
||||
chmod +x run-compose.sh
|
||||
```
|
||||
|
||||
##### For CPU only container
|
||||
|
||||
```bash
|
||||
./run-compose.sh
|
||||
```
|
||||
|
||||
##### Enable GPU
|
||||
|
||||
For GPU enabled container (to enable this you must have your gpu driver for docker, it mostly works with nvidia so this is the official install guide: [nvidia-container-toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html))
|
||||
Warning! A GPU-enabled installation has only been tested using linux and nvidia GPU, full functionalities are not guaranteed under Windows or Macos or using a different GPU
|
||||
|
||||
```bash
|
||||
./run-compose.sh --enable-gpu
|
||||
```
|
||||
|
||||
Note that both the above commands will use the latest production docker image in repository, to be able to build the latest local version you'll need to append the `--build` parameter, for example:
|
||||
|
||||
```bash
|
||||
./run-compose.sh --enable-gpu --build
|
||||
```
|
||||
|
||||
#### Using Alternative Methods (Kustomize or Helm)
|
||||
|
||||
See [INSTALLATION.md](/INSTALLATION.md) for information on how to install and/or join our [Ollama Web UI Discord community](https://discord.gg/5rJgQTnV4s).
|
||||
|
||||
## How to Install Without Docker
|
||||
|
||||
While we strongly recommend using our convenient Docker container installation for optimal support, we understand that some situations may require a non-Docker setup, especially for development purposes. Please note that non-Docker installations are not officially supported, and you might need to troubleshoot on your own.
|
||||
|
||||
### Project Components
|
||||
|
||||
The Ollama Web UI consists of two primary components: the frontend and the backend (which serves as a reverse proxy, handling static frontend files, and additional features). Both need to be running concurrently for the development environment.
|
||||
### Quick Start with Docker 🐳
|
||||
|
||||
> [!IMPORTANT]
|
||||
> The backend is required for proper functionality
|
||||
> When using Docker to install Open WebUI, make sure to include the `-v open-webui:/app/backend/data` in your Docker command. This step is crucial as it ensures your database is properly mounted and prevents any loss of data.
|
||||
|
||||
### Requirements 📦
|
||||
- **If Ollama is on your computer**, use this command:
|
||||
|
||||
- 🐰 [Bun](https://bun.sh) >= 1.0.21 or 🐢 [Node.js](https://nodejs.org/en) >= 20.10
|
||||
- 🐍 [Python](https://python.org) >= 3.11
|
||||
```bash
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
### Build and Install 🛠️
|
||||
- **If Ollama is on a Different Server**, use this command:
|
||||
|
||||
Run the following commands to install:
|
||||
- To connect to Ollama on another server, change the `OLLAMA_API_BASE_URL` to the server's URL:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/ollama-webui/ollama-webui.git
|
||||
cd ollama-webui/
|
||||
```bash
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
# Copying required .env file
|
||||
cp -RPp example.env .env
|
||||
- After installation, you can access Open WebUI at [http://localhost:3000](http://localhost:3000). Enjoy! 😄
|
||||
|
||||
# Building Frontend Using Node
|
||||
npm i
|
||||
npm run build
|
||||
#### Troubleshooting
|
||||
|
||||
# or Building Frontend Using Bun
|
||||
# bun install
|
||||
# bun run build
|
||||
Encountering connection issues? Our [Open WebUI Documentation](https://docs.openwebui.com/getting-started/troubleshooting/) has got you covered. For further assistance and to join our vibrant community, visit the [Open WebUI Discord](https://discord.gg/5rJgQTnV4s).
|
||||
|
||||
# Serving Frontend with the Backend
|
||||
cd ./backend
|
||||
pip install -r requirements.txt -U
|
||||
sh start.sh
|
||||
### Other Installation Methods
|
||||
|
||||
We offer various installation alternatives, including non-Docker methods, Docker Compose, Kustomize, and Helm. Visit our [Open WebUI Documentation](https://docs.openwebui.com/getting-started/) or join our [Discord community](https://discord.gg/5rJgQTnV4s) for comprehensive guidance.
|
||||
|
||||
### Keeping Your Docker Installation Up-to-Date
|
||||
|
||||
In case you want to update your local Docker installation to the latest version, you can do it with [Watchtower](https://containrrr.dev/watchtower/):
|
||||
|
||||
```bash
|
||||
docker run --rm --volume /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower --run-once open-webui
|
||||
```
|
||||
|
||||
You should have the Ollama Web UI up and running at http://localhost:8080/. Enjoy! 😄
|
||||
In the last part of the command, replace `open-webui` with your container name if it is different.
|
||||
|
||||
## Troubleshooting
|
||||
### Moving from Ollama WebUI to Open WebUI
|
||||
|
||||
See [TROUBLESHOOTING.md](/TROUBLESHOOTING.md) for information on how to troubleshoot and/or join our [Ollama Web UI Discord community](https://discord.gg/5rJgQTnV4s).
|
||||
Check our Migration Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/migration/).
|
||||
|
||||
## What's Next? 🚀
|
||||
## What's Next? 🌟
|
||||
|
||||
### Roadmap 📝
|
||||
|
||||
Here are some exciting tasks on our roadmap:
|
||||
|
||||
- 🌐 **Web Browsing Capability**: Experience the convenience of seamlessly integrating web content directly into your chat. Easily browse and share information without leaving the conversation.
|
||||
- 🔄 **Function Calling**: Empower your interactions by running code directly within the chat. Execute functions and commands effortlessly, enhancing the functionality of your conversations.
|
||||
- ⚙️ **Custom Python Backend Actions**: Empower your Ollama Web UI by creating or downloading custom Python backend actions. Unleash the full potential of your web interface with tailored actions that suit your specific needs, enhancing functionality and versatility.
|
||||
- 🧠 **Long-Term Memory**: Witness the power of persistent memory in our agents. Enjoy conversations that feel continuous as agents remember and reference past interactions, creating a more cohesive and personalized user experience.
|
||||
- 🧪 **Research-Centric Features**: Empower researchers in the fields of LLM and HCI with a comprehensive web UI for conducting user studies. Stay tuned for ongoing feature enhancements (e.g., surveys, analytics, and participant tracking) to facilitate their research.
|
||||
- 📈 **User Study Tools**: Providing specialized tools, like heat maps and behavior tracking modules, to empower researchers in capturing and analyzing user behavior patterns with precision and accuracy.
|
||||
- 📚 **Enhanced Documentation**: Elevate your setup and customization experience with improved, comprehensive documentation.
|
||||
|
||||
Feel free to contribute and help us make Ollama Web UI even better! 🙌
|
||||
Discover upcoming features on our roadmap in the [Open WebUI Documentation](https://docs.openwebui.com/roadmap/).
|
||||
|
||||
## Supporters ✨
|
||||
|
||||
|
@ -265,7 +139,7 @@ A big shoutout to our amazing supporters who's helping to make this project poss
|
|||
|
||||
### Acknowledgments
|
||||
|
||||
Special thanks to [Prof. Lawrence Kim @ SFU](https://www.lhkim.com/) and [Prof. Nick Vincent @ SFU](https://www.nickmvincent.com/) for their invaluable support and guidance in shaping this project into a research endeavor. Grateful for your mentorship throughout the journey! 🙌
|
||||
Special thanks to [Prof. Lawrence Kim](https://www.lhkim.com/) and [Prof. Nick Vincent](https://www.nickmvincent.com/) for their invaluable support and guidance in shaping this project into a research endeavor. Grateful for your mentorship throughout the journey! 🙌
|
||||
|
||||
## License 📜
|
||||
|
||||
|
@ -274,9 +148,8 @@ This project is licensed under the [MIT License](LICENSE) - see the [LICENSE](LI
|
|||
## Support 💬
|
||||
|
||||
If you have any questions, suggestions, or need assistance, please open an issue or join our
|
||||
[Ollama Web UI Discord community](https://discord.gg/5rJgQTnV4s) or
|
||||
[Ollama Discord community](https://discord.gg/ollama) to connect with us! 🤝
|
||||
[Open WebUI Discord community](https://discord.gg/5rJgQTnV4s) to connect with us! 🤝
|
||||
|
||||
---
|
||||
|
||||
Created by [Timothy J. Baek](https://github.com/tjbck) - Let's make Ollama Web UI even more amazing together! 💪
|
||||
Created by [Timothy J. Baek](https://github.com/tjbck) - Let's make Open Web UI even more amazing together! 💪
|
||||
|
|
|
@ -1,32 +1,32 @@
|
|||
# Ollama Web UI Troubleshooting Guide
|
||||
# Open WebUI Troubleshooting Guide
|
||||
|
||||
## Understanding the Ollama WebUI Architecture
|
||||
## Understanding the Open WebUI Architecture
|
||||
|
||||
The Ollama WebUI system is designed to streamline interactions between the client (your browser) and the Ollama API. At the heart of this design is a backend reverse proxy, enhancing security and resolving CORS issues.
|
||||
The Open WebUI system is designed to streamline interactions between the client (your browser) and the Ollama API. At the heart of this design is a backend reverse proxy, enhancing security and resolving CORS issues.
|
||||
|
||||
- **How it Works**: The Ollama WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Ollama WebUI backend via `/ollama/api` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_API_BASE_URL` environment variable. Therefore, a request made to `/ollama/api` in the WebUI is effectively the same as making a request to `OLLAMA_API_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
|
||||
- **How it Works**: The Open WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Open WebUI backend via `/ollama/api` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_API_BASE_URL` environment variable. Therefore, a request made to `/ollama/api` in the WebUI is effectively the same as making a request to `OLLAMA_API_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
|
||||
|
||||
- **Security Benefits**: This design prevents direct exposure of the Ollama API to the frontend, safeguarding against potential CORS (Cross-Origin Resource Sharing) issues and unauthorized access. Requiring authentication to access the Ollama API further enhances this security layer.
|
||||
|
||||
## Ollama WebUI: Server Connection Error
|
||||
## Open WebUI: Server Connection Error
|
||||
|
||||
If you're experiencing connection issues, it’s often due to the WebUI docker container not being able to reach the Ollama server at 127.0.0.1:11434 (host.docker.internal:11434) inside the container . Use the `--network=host` flag in your docker command to resolve this. Note that the port changes from 3000 to 8080, resulting in the link: `http://localhost:8080`.
|
||||
|
||||
**Example Docker Command**:
|
||||
|
||||
```bash
|
||||
docker run -d --network=host -v ollama-webui:/app/backend/data -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434/api --name ollama-webui --restart always ghcr.io/ollama-webui/ollama-webui:main
|
||||
docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434/api --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
### General Connection Errors
|
||||
|
||||
**Ensure Ollama Version is Up-to-Date**: Always start by checking that you have the latest version of Ollama. Visit [Ollama's official site](https://ollama.ai/) for the latest updates.
|
||||
**Ensure Ollama Version is Up-to-Date**: Always start by checking that you have the latest version of Ollama. Visit [Ollama's official site](https://ollama.com/) for the latest updates.
|
||||
|
||||
**Troubleshooting Steps**:
|
||||
|
||||
1. **Verify Ollama URL Format**:
|
||||
- When running the Web UI container, ensure the `OLLAMA_API_BASE_URL` is correctly set, including the `/api` suffix. (e.g., `http://192.168.1.1:11434/api` for different host setups).
|
||||
- In the Ollama WebUI, navigate to "Settings" > "General".
|
||||
- In the Open WebUI, navigate to "Settings" > "General".
|
||||
- Confirm that the Ollama Server URL is correctly set to `[OLLAMA URL]/api` (e.g., `http://localhost:11434/api`), including the `/api` suffix.
|
||||
|
||||
By following these enhanced troubleshooting steps, connection issues should be effectively resolved. For further assistance or queries, feel free to reach out to us on our community Discord.
|
||||
|
|
2
backend/.gitignore
vendored
2
backend/.gitignore
vendored
|
@ -7,3 +7,5 @@ uploads
|
|||
_test
|
||||
Pipfile
|
||||
data/*
|
||||
!data/config.json
|
||||
.webui_secret_key
|
80
backend/apps/audio/main.py
Normal file
80
backend/apps/audio/main.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
import os
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
UploadFile,
|
||||
File,
|
||||
Form,
|
||||
)
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from faster_whisper import WhisperModel
|
||||
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
decode_token,
|
||||
get_current_user,
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from utils.misc import calculate_sha256
|
||||
|
||||
from config import CACHE_DIR, UPLOAD_DIR, WHISPER_MODEL, WHISPER_MODEL_DIR
|
||||
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.post("/transcribe")
|
||||
def transcribe(
|
||||
file: UploadFile = File(...),
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
print(file.content_type)
|
||||
|
||||
if file.content_type not in ["audio/mpeg", "audio/wav"]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_NOT_SUPPORTED,
|
||||
)
|
||||
|
||||
try:
|
||||
filename = file.filename
|
||||
file_path = f"{UPLOAD_DIR}/{filename}"
|
||||
contents = file.file.read()
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(contents)
|
||||
f.close()
|
||||
|
||||
model = WhisperModel(
|
||||
WHISPER_MODEL,
|
||||
device="auto",
|
||||
compute_type="int8",
|
||||
download_root=WHISPER_MODEL_DIR,
|
||||
)
|
||||
|
||||
segments, info = model.transcribe(file_path, beam_size=5)
|
||||
print(
|
||||
"Detected language '%s' with probability %f"
|
||||
% (info.language, info.language_probability)
|
||||
)
|
||||
|
||||
transcript = "".join([segment.text for segment in list(segments)])
|
||||
|
||||
return {"text": transcript.strip()}
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
193
backend/apps/images/main.py
Normal file
193
backend/apps/images/main.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
import re
|
||||
import requests
|
||||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
UploadFile,
|
||||
File,
|
||||
Form,
|
||||
)
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from faster_whisper import WhisperModel
|
||||
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import (
|
||||
get_current_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from utils.misc import calculate_sha256
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
from config import AUTOMATIC1111_BASE_URL
|
||||
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.state.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
|
||||
app.state.ENABLED = app.state.AUTOMATIC1111_BASE_URL != ""
|
||||
app.state.IMAGE_SIZE = "512x512"
|
||||
|
||||
|
||||
@app.get("/enabled", response_model=bool)
|
||||
async def get_enable_status(request: Request, user=Depends(get_admin_user)):
|
||||
return app.state.ENABLED
|
||||
|
||||
|
||||
@app.get("/enabled/toggle", response_model=bool)
|
||||
async def toggle_enabled(request: Request, user=Depends(get_admin_user)):
|
||||
try:
|
||||
r = requests.head(app.state.AUTOMATIC1111_BASE_URL)
|
||||
app.state.ENABLED = not app.state.ENABLED
|
||||
return app.state.ENABLED
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
|
||||
|
||||
class UrlUpdateForm(BaseModel):
|
||||
url: str
|
||||
|
||||
|
||||
@app.get("/url")
|
||||
async def get_openai_url(user=Depends(get_admin_user)):
|
||||
return {"AUTOMATIC1111_BASE_URL": app.state.AUTOMATIC1111_BASE_URL}
|
||||
|
||||
|
||||
@app.post("/url/update")
|
||||
async def update_openai_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
|
||||
|
||||
if form_data.url == "":
|
||||
app.state.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
|
||||
else:
|
||||
app.state.AUTOMATIC1111_BASE_URL = form_data.url.strip("/")
|
||||
|
||||
return {
|
||||
"AUTOMATIC1111_BASE_URL": app.state.AUTOMATIC1111_BASE_URL,
|
||||
"status": True,
|
||||
}
|
||||
|
||||
|
||||
class ImageSizeUpdateForm(BaseModel):
|
||||
size: str
|
||||
|
||||
|
||||
@app.get("/size")
|
||||
async def get_image_size(user=Depends(get_admin_user)):
|
||||
return {"IMAGE_SIZE": app.state.IMAGE_SIZE}
|
||||
|
||||
|
||||
@app.post("/size/update")
|
||||
async def update_image_size(
|
||||
form_data: ImageSizeUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
pattern = r"^\d+x\d+$" # Regular expression pattern
|
||||
if re.match(pattern, form_data.size):
|
||||
app.state.IMAGE_SIZE = form_data.size
|
||||
return {
|
||||
"IMAGE_SIZE": app.state.IMAGE_SIZE,
|
||||
"status": True,
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.INCORRECT_FORMAT(" (e.g., 512x512)."),
|
||||
)
|
||||
|
||||
|
||||
@app.get("/models")
|
||||
def get_models(user=Depends(get_current_user)):
|
||||
try:
|
||||
r = requests.get(url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/sd-models")
|
||||
models = r.json()
|
||||
return models
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
|
||||
|
||||
@app.get("/models/default")
|
||||
async def get_default_model(user=Depends(get_admin_user)):
|
||||
try:
|
||||
r = requests.get(url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/options")
|
||||
options = r.json()
|
||||
|
||||
return {"model": options["sd_model_checkpoint"]}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
|
||||
|
||||
|
||||
class UpdateModelForm(BaseModel):
|
||||
model: str
|
||||
|
||||
|
||||
def set_model_handler(model: str):
|
||||
r = requests.get(url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/options")
|
||||
options = r.json()
|
||||
|
||||
if model != options["sd_model_checkpoint"]:
|
||||
options["sd_model_checkpoint"] = model
|
||||
r = requests.post(
|
||||
url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/options", json=options
|
||||
)
|
||||
|
||||
return options
|
||||
|
||||
|
||||
@app.post("/models/default/update")
|
||||
def update_default_model(
|
||||
form_data: UpdateModelForm,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
return set_model_handler(form_data.model)
|
||||
|
||||
|
||||
class GenerateImageForm(BaseModel):
|
||||
model: Optional[str] = None
|
||||
prompt: str
|
||||
n: int = 1
|
||||
size: str = "512x512"
|
||||
negative_prompt: Optional[str] = None
|
||||
|
||||
|
||||
@app.post("/generations")
|
||||
def generate_image(
|
||||
form_data: GenerateImageForm,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
print(form_data)
|
||||
|
||||
try:
|
||||
if form_data.model:
|
||||
set_model_handler(form_data.model)
|
||||
|
||||
width, height = tuple(map(int, app.state.IMAGE_SIZE.split("x")))
|
||||
|
||||
data = {
|
||||
"prompt": form_data.prompt,
|
||||
"batch_size": form_data.n,
|
||||
"width": width,
|
||||
"height": height,
|
||||
}
|
||||
|
||||
if form_data.negative_prompt != None:
|
||||
data["negative_prompt"] = form_data.negative_prompt
|
||||
|
||||
print(data)
|
||||
|
||||
r = requests.post(
|
||||
url=f"{app.state.AUTOMATIC1111_BASE_URL}/sdapi/v1/txt2img",
|
||||
json=data,
|
||||
)
|
||||
|
||||
return r.json()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise HTTPException(status_code=r.status_code, detail=ERROR_MESSAGES.DEFAULT(e))
|
|
@ -1,15 +1,16 @@
|
|||
from fastapi import FastAPI, Request, Response, HTTPException, Depends
|
||||
from fastapi import FastAPI, Request, Response, HTTPException, Depends, status
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse
|
||||
from fastapi.concurrency import run_in_threadpool
|
||||
|
||||
import requests
|
||||
import json
|
||||
import uuid
|
||||
from pydantic import BaseModel
|
||||
|
||||
from apps.web.models.users import Users
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import decode_token, get_current_user
|
||||
from utils.utils import decode_token, get_current_user, get_admin_user
|
||||
from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
|
||||
|
||||
app = FastAPI()
|
||||
|
@ -26,12 +27,12 @@ app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
|
|||
# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
|
||||
|
||||
|
||||
REQUEST_POOL = []
|
||||
|
||||
|
||||
@app.get("/url")
|
||||
async def get_ollama_api_url(user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
async def get_ollama_api_url(user=Depends(get_admin_user)):
|
||||
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
|
||||
|
||||
|
||||
class UrlUpdateForm(BaseModel):
|
||||
|
@ -39,12 +40,17 @@ class UrlUpdateForm(BaseModel):
|
|||
|
||||
|
||||
@app.post("/url/update")
|
||||
async def update_ollama_api_url(
|
||||
form_data: UrlUpdateForm, user=Depends(get_current_user)
|
||||
):
|
||||
if user and user.role == "admin":
|
||||
app.state.OLLAMA_API_BASE_URL = form_data.url
|
||||
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
|
||||
async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.OLLAMA_API_BASE_URL = form_data.url
|
||||
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
|
||||
|
||||
|
||||
@app.get("/cancel/{request_id}")
|
||||
async def cancel_ollama_request(request_id: str, user=Depends(get_current_user)):
|
||||
if user:
|
||||
if request_id in REQUEST_POOL:
|
||||
REQUEST_POOL.remove(request_id)
|
||||
return True
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
@ -60,21 +66,45 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
if path in ["pull", "delete", "push", "copy", "create"]:
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
headers.pop("Host", None)
|
||||
headers.pop("Authorization", None)
|
||||
headers.pop("Origin", None)
|
||||
headers.pop("Referer", None)
|
||||
headers.pop("host", None)
|
||||
headers.pop("authorization", None)
|
||||
headers.pop("origin", None)
|
||||
headers.pop("referer", None)
|
||||
|
||||
r = None
|
||||
|
||||
def get_request():
|
||||
nonlocal r
|
||||
|
||||
request_id = str(uuid.uuid4())
|
||||
try:
|
||||
REQUEST_POOL.append(request_id)
|
||||
|
||||
def stream_content():
|
||||
try:
|
||||
if path in ["chat"]:
|
||||
yield json.dumps({"id": request_id, "done": False}) + "\n"
|
||||
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
if request_id in REQUEST_POOL:
|
||||
yield chunk
|
||||
else:
|
||||
print("User: canceled request")
|
||||
break
|
||||
finally:
|
||||
if hasattr(r, "close"):
|
||||
r.close()
|
||||
REQUEST_POOL.remove(request_id)
|
||||
|
||||
r = requests.request(
|
||||
method=request.method,
|
||||
url=target_url,
|
||||
|
@ -85,8 +115,10 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
|
||||
r.raise_for_status()
|
||||
|
||||
# r.close()
|
||||
|
||||
return StreamingResponse(
|
||||
r.iter_content(chunk_size=8192),
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
|
@ -96,7 +128,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
error_detail = "Ollama WebUI: Server Connection Error"
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
|
|
|
@ -61,7 +61,7 @@ async def update_ollama_api_url(
|
|||
# yield line
|
||||
# except Exception as e:
|
||||
# print(e)
|
||||
# error_detail = "Ollama WebUI: Server Connection Error"
|
||||
# error_detail = "Open WebUI: Server Connection Error"
|
||||
# yield json.dumps({"error": error_detail, "message": str(e)}).encode()
|
||||
|
||||
|
||||
|
@ -110,7 +110,7 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Ollama WebUI: Server Connection Error"
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
|
||||
if response is not None:
|
||||
try:
|
||||
|
|
|
@ -1,15 +1,24 @@
|
|||
from fastapi import FastAPI, Request, Response, HTTPException, Depends
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse, JSONResponse
|
||||
from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
|
||||
|
||||
import requests
|
||||
import json
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
from apps.web.models.users import Users
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import decode_token, get_current_user
|
||||
from config import OPENAI_API_BASE_URL, OPENAI_API_KEY
|
||||
from utils.utils import (
|
||||
decode_token,
|
||||
get_current_user,
|
||||
get_verified_user,
|
||||
get_admin_user,
|
||||
)
|
||||
from config import OPENAI_API_BASE_URL, OPENAI_API_KEY, CACHE_DIR
|
||||
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
|
@ -33,60 +42,114 @@ class KeyUpdateForm(BaseModel):
|
|||
|
||||
|
||||
@app.get("/url")
|
||||
async def get_openai_url(user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
async def get_openai_url(user=Depends(get_admin_user)):
|
||||
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
|
||||
|
||||
|
||||
@app.post("/url/update")
|
||||
async def update_openai_url(form_data: UrlUpdateForm,
|
||||
user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
app.state.OPENAI_API_BASE_URL = form_data.url
|
||||
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
async def update_openai_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.OPENAI_API_BASE_URL = form_data.url
|
||||
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
|
||||
|
||||
|
||||
@app.get("/key")
|
||||
async def get_openai_key(user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
async def get_openai_key(user=Depends(get_admin_user)):
|
||||
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
|
||||
|
||||
|
||||
@app.post("/key/update")
|
||||
async def update_openai_key(form_data: KeyUpdateForm,
|
||||
user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
app.state.OPENAI_API_KEY = form_data.key
|
||||
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
|
||||
else:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
async def update_openai_key(form_data: KeyUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.OPENAI_API_KEY = form_data.key
|
||||
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
|
||||
|
||||
|
||||
@app.post("/audio/speech")
|
||||
async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
target_url = f"{app.state.OPENAI_API_BASE_URL}/audio/speech"
|
||||
|
||||
if app.state.OPENAI_API_KEY == "":
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
|
||||
|
||||
body = await request.body()
|
||||
|
||||
name = hashlib.sha256(body).hexdigest()
|
||||
|
||||
SPEECH_CACHE_DIR = Path(CACHE_DIR).joinpath("./audio/speech/")
|
||||
SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
file_path = SPEECH_CACHE_DIR.joinpath(f"{name}.mp3")
|
||||
file_body_path = SPEECH_CACHE_DIR.joinpath(f"{name}.json")
|
||||
|
||||
# Check if the file already exists in the cache
|
||||
if file_path.is_file():
|
||||
return FileResponse(file_path)
|
||||
|
||||
headers = {}
|
||||
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
try:
|
||||
print("openai")
|
||||
r = requests.post(
|
||||
url=target_url,
|
||||
data=body,
|
||||
headers=headers,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
# Save the streaming content to a file
|
||||
with open(file_path, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
|
||||
with open(file_body_path, "w") as f:
|
||||
json.dump(json.loads(body.decode("utf-8")), f)
|
||||
|
||||
# Return the saved file
|
||||
return FileResponse(file_path)
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External: {res['error']}"
|
||||
except:
|
||||
error_detail = f"External: {e}"
|
||||
|
||||
raise HTTPException(status_code=r.status_code, detail=error_detail)
|
||||
|
||||
|
||||
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
||||
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
||||
async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
|
||||
target_url = f"{app.state.OPENAI_API_BASE_URL}/{path}"
|
||||
print(target_url, app.state.OPENAI_API_KEY)
|
||||
|
||||
if user.role not in ["user", "admin"]:
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
if app.state.OPENAI_API_KEY == "":
|
||||
raise HTTPException(status_code=401,
|
||||
detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
|
||||
|
||||
body = await request.body()
|
||||
# headers = dict(request.headers)
|
||||
# print(headers)
|
||||
|
||||
# TODO: Remove below after gpt-4-vision fix from Open AI
|
||||
# Try to decode the body of the request from bytes to a UTF-8 string (Require add max_token to fix gpt-4-vision)
|
||||
try:
|
||||
body = body.decode("utf-8")
|
||||
body = json.loads(body)
|
||||
|
||||
# Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
|
||||
# This is a workaround until OpenAI fixes the issue with this model
|
||||
if body.get("model") == "gpt-4-vision-preview":
|
||||
if "max_tokens" not in body:
|
||||
body["max_tokens"] = 4000
|
||||
print("Modified body_dict:", body)
|
||||
|
||||
# Convert the modified body back to JSON
|
||||
body = json.dumps(body)
|
||||
except json.JSONDecodeError as e:
|
||||
print("Error loading request body into a dictionary:", e)
|
||||
|
||||
headers = {}
|
||||
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
|
||||
|
@ -121,17 +184,15 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
|||
|
||||
response_data = r.json()
|
||||
|
||||
print(type(response_data))
|
||||
|
||||
if "openai" in app.state.OPENAI_API_BASE_URL and path == "models":
|
||||
response_data["data"] = list(
|
||||
filter(lambda model: "gpt" in model["id"],
|
||||
response_data["data"]))
|
||||
filter(lambda model: "gpt" in model["id"], response_data["data"])
|
||||
)
|
||||
|
||||
return response_data
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Ollama WebUI: Server Connection Error"
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from fastapi import (
|
||||
FastAPI,
|
||||
Request,
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
|
@ -11,7 +10,11 @@ from fastapi import (
|
|||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import os, shutil
|
||||
|
||||
# from chromadb.utils import embedding_functions
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from sentence_transformers import SentenceTransformer
|
||||
from chromadb.utils import embedding_functions
|
||||
|
||||
from langchain_community.document_loaders import (
|
||||
WebBaseLoader,
|
||||
|
@ -19,29 +22,71 @@ from langchain_community.document_loaders import (
|
|||
PyPDFLoader,
|
||||
CSVLoader,
|
||||
Docx2txtLoader,
|
||||
UnstructuredEPubLoader,
|
||||
UnstructuredWordDocumentLoader,
|
||||
UnstructuredMarkdownLoader,
|
||||
UnstructuredXMLLoader,
|
||||
UnstructuredRSTLoader,
|
||||
UnstructuredExcelLoader,
|
||||
)
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
from langchain_community.vectorstores import Chroma
|
||||
from langchain.chains import RetrievalQA
|
||||
|
||||
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
|
||||
import mimetypes
|
||||
import uuid
|
||||
import time
|
||||
import json
|
||||
|
||||
|
||||
from apps.web.models.documents import (
|
||||
Documents,
|
||||
DocumentForm,
|
||||
DocumentResponse,
|
||||
)
|
||||
|
||||
from utils.misc import (
|
||||
calculate_sha256,
|
||||
calculate_sha256_string,
|
||||
sanitize_filename,
|
||||
extract_folders_after_data_docs,
|
||||
)
|
||||
from utils.utils import get_current_user, get_admin_user
|
||||
from config import (
|
||||
UPLOAD_DIR,
|
||||
DOCS_DIR,
|
||||
RAG_EMBEDDING_MODEL,
|
||||
RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
||||
CHROMA_CLIENT,
|
||||
CHUNK_SIZE,
|
||||
CHUNK_OVERLAP,
|
||||
RAG_TEMPLATE,
|
||||
)
|
||||
|
||||
from utils.misc import calculate_sha256
|
||||
from utils.utils import get_current_user
|
||||
from config import UPLOAD_DIR, EMBED_MODEL, CHROMA_CLIENT, CHUNK_SIZE, CHUNK_OVERLAP
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
# EMBEDDING_FUNC = embedding_functions.SentenceTransformerEmbeddingFunction(
|
||||
# model_name=EMBED_MODEL
|
||||
# )
|
||||
#
|
||||
# if RAG_EMBEDDING_MODEL:
|
||||
# sentence_transformer_ef = SentenceTransformer(
|
||||
# model_name_or_path=RAG_EMBEDDING_MODEL,
|
||||
# cache_folder=RAG_EMBEDDING_MODEL_DIR,
|
||||
# device=RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
||||
# )
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
app.state.CHUNK_SIZE = CHUNK_SIZE
|
||||
app.state.CHUNK_OVERLAP = CHUNK_OVERLAP
|
||||
app.state.RAG_TEMPLATE = RAG_TEMPLATE
|
||||
app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
|
||||
app.state.sentence_transformer_ef = (
|
||||
embedding_functions.SentenceTransformerEmbeddingFunction(
|
||||
model_name=app.state.RAG_EMBEDDING_MODEL,
|
||||
device=RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
origins = ["*"]
|
||||
|
||||
app.add_middleware(
|
||||
|
@ -63,7 +108,7 @@ class StoreWebForm(CollectionNameForm):
|
|||
|
||||
def store_data_in_vector_db(data, collection_name) -> bool:
|
||||
text_splitter = RecursiveCharacterTextSplitter(
|
||||
chunk_size=CHUNK_SIZE, chunk_overlap=CHUNK_OVERLAP
|
||||
chunk_size=app.state.CHUNK_SIZE, chunk_overlap=app.state.CHUNK_OVERLAP
|
||||
)
|
||||
docs = text_splitter.split_documents(data)
|
||||
|
||||
|
@ -71,7 +116,10 @@ def store_data_in_vector_db(data, collection_name) -> bool:
|
|||
metadatas = [doc.metadata for doc in docs]
|
||||
|
||||
try:
|
||||
collection = CHROMA_CLIENT.create_collection(name=collection_name)
|
||||
collection = CHROMA_CLIENT.create_collection(
|
||||
name=collection_name,
|
||||
embedding_function=app.state.sentence_transformer_ef,
|
||||
)
|
||||
|
||||
collection.add(
|
||||
documents=texts, metadatas=metadatas, ids=[str(uuid.uuid1()) for _ in texts]
|
||||
|
@ -87,22 +135,112 @@ def store_data_in_vector_db(data, collection_name) -> bool:
|
|||
|
||||
@app.get("/")
|
||||
async def get_status():
|
||||
return {"status": True}
|
||||
return {
|
||||
"status": True,
|
||||
"chunk_size": app.state.CHUNK_SIZE,
|
||||
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
||||
"template": app.state.RAG_TEMPLATE,
|
||||
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/query/{collection_name}")
|
||||
def query_collection(
|
||||
collection_name: str,
|
||||
query: str,
|
||||
k: Optional[int] = 4,
|
||||
@app.get("/embedding/model")
|
||||
async def get_embedding_model(user=Depends(get_admin_user)):
|
||||
return {
|
||||
"status": True,
|
||||
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
||||
}
|
||||
|
||||
|
||||
class EmbeddingModelUpdateForm(BaseModel):
|
||||
embedding_model: str
|
||||
|
||||
|
||||
@app.post("/embedding/model/update")
|
||||
async def update_embedding_model(
|
||||
form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
|
||||
app.state.sentence_transformer_ef = (
|
||||
embedding_functions.SentenceTransformerEmbeddingFunction(
|
||||
model_name=app.state.RAG_EMBEDDING_MODEL,
|
||||
device=RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/chunk")
|
||||
async def get_chunk_params(user=Depends(get_admin_user)):
|
||||
return {
|
||||
"status": True,
|
||||
"chunk_size": app.state.CHUNK_SIZE,
|
||||
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
||||
}
|
||||
|
||||
|
||||
class ChunkParamUpdateForm(BaseModel):
|
||||
chunk_size: int
|
||||
chunk_overlap: int
|
||||
|
||||
|
||||
@app.post("/chunk/update")
|
||||
async def update_chunk_params(
|
||||
form_data: ChunkParamUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
app.state.CHUNK_SIZE = form_data.chunk_size
|
||||
app.state.CHUNK_OVERLAP = form_data.chunk_overlap
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
"chunk_size": app.state.CHUNK_SIZE,
|
||||
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/template")
|
||||
async def get_rag_template(user=Depends(get_current_user)):
|
||||
return {
|
||||
"status": True,
|
||||
"template": app.state.RAG_TEMPLATE,
|
||||
}
|
||||
|
||||
|
||||
class RAGTemplateForm(BaseModel):
|
||||
template: str
|
||||
|
||||
|
||||
@app.post("/template/update")
|
||||
async def update_rag_template(form_data: RAGTemplateForm, user=Depends(get_admin_user)):
|
||||
# TODO: check template requirements
|
||||
app.state.RAG_TEMPLATE = (
|
||||
form_data.template if form_data.template != "" else RAG_TEMPLATE
|
||||
)
|
||||
return {"status": True, "template": app.state.RAG_TEMPLATE}
|
||||
|
||||
|
||||
class QueryDocForm(BaseModel):
|
||||
collection_name: str
|
||||
query: str
|
||||
k: Optional[int] = 4
|
||||
|
||||
|
||||
@app.post("/query/doc")
|
||||
def query_doc(
|
||||
form_data: QueryDocForm,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
try:
|
||||
# if you use docker use the model from the environment variable
|
||||
collection = CHROMA_CLIENT.get_collection(
|
||||
name=collection_name,
|
||||
name=form_data.collection_name,
|
||||
embedding_function=app.state.sentence_transformer_ef,
|
||||
)
|
||||
result = collection.query(query_texts=[query], n_results=k)
|
||||
|
||||
result = collection.query(query_texts=[form_data.query], n_results=form_data.k)
|
||||
return result
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
@ -112,14 +250,99 @@ def query_collection(
|
|||
)
|
||||
|
||||
|
||||
class QueryCollectionsForm(BaseModel):
|
||||
collection_names: List[str]
|
||||
query: str
|
||||
k: Optional[int] = 4
|
||||
|
||||
|
||||
def merge_and_sort_query_results(query_results, k):
|
||||
# Initialize lists to store combined data
|
||||
combined_ids = []
|
||||
combined_distances = []
|
||||
combined_metadatas = []
|
||||
combined_documents = []
|
||||
|
||||
# Combine data from each dictionary
|
||||
for data in query_results:
|
||||
combined_ids.extend(data["ids"][0])
|
||||
combined_distances.extend(data["distances"][0])
|
||||
combined_metadatas.extend(data["metadatas"][0])
|
||||
combined_documents.extend(data["documents"][0])
|
||||
|
||||
# Create a list of tuples (distance, id, metadata, document)
|
||||
combined = list(
|
||||
zip(combined_distances, combined_ids, combined_metadatas, combined_documents)
|
||||
)
|
||||
|
||||
# Sort the list based on distances
|
||||
combined.sort(key=lambda x: x[0])
|
||||
|
||||
# Unzip the sorted list
|
||||
sorted_distances, sorted_ids, sorted_metadatas, sorted_documents = zip(*combined)
|
||||
|
||||
# Slicing the lists to include only k elements
|
||||
sorted_distances = list(sorted_distances)[:k]
|
||||
sorted_ids = list(sorted_ids)[:k]
|
||||
sorted_metadatas = list(sorted_metadatas)[:k]
|
||||
sorted_documents = list(sorted_documents)[:k]
|
||||
|
||||
# Create the output dictionary
|
||||
merged_query_results = {
|
||||
"ids": [sorted_ids],
|
||||
"distances": [sorted_distances],
|
||||
"metadatas": [sorted_metadatas],
|
||||
"documents": [sorted_documents],
|
||||
"embeddings": None,
|
||||
"uris": None,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
return merged_query_results
|
||||
|
||||
|
||||
@app.post("/query/collection")
|
||||
def query_collection(
|
||||
form_data: QueryCollectionsForm,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
results = []
|
||||
|
||||
for collection_name in form_data.collection_names:
|
||||
try:
|
||||
# if you use docker use the model from the environment variable
|
||||
collection = CHROMA_CLIENT.get_collection(
|
||||
name=collection_name,
|
||||
embedding_function=app.state.sentence_transformer_ef,
|
||||
)
|
||||
|
||||
result = collection.query(
|
||||
query_texts=[form_data.query], n_results=form_data.k
|
||||
)
|
||||
results.append(result)
|
||||
except:
|
||||
pass
|
||||
|
||||
return merge_and_sort_query_results(results, form_data.k)
|
||||
|
||||
|
||||
@app.post("/web")
|
||||
def store_web(form_data: StoreWebForm, user=Depends(get_current_user)):
|
||||
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
|
||||
try:
|
||||
loader = WebBaseLoader(form_data.url)
|
||||
data = loader.load()
|
||||
store_data_in_vector_db(data, form_data.collection_name)
|
||||
return {"status": True, "collection_name": form_data.collection_name}
|
||||
|
||||
collection_name = form_data.collection_name
|
||||
if collection_name == "":
|
||||
collection_name = calculate_sha256_string(form_data.url)[:63]
|
||||
|
||||
store_data_in_vector_db(data, collection_name)
|
||||
return {
|
||||
"status": True,
|
||||
"collection_name": collection_name,
|
||||
"filename": form_data.url,
|
||||
}
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise HTTPException(
|
||||
|
@ -128,6 +351,87 @@ def store_web(form_data: StoreWebForm, user=Depends(get_current_user)):
|
|||
)
|
||||
|
||||
|
||||
def get_loader(filename: str, file_content_type: str, file_path: str):
|
||||
file_ext = filename.split(".")[-1].lower()
|
||||
known_type = True
|
||||
|
||||
known_source_ext = [
|
||||
"go",
|
||||
"py",
|
||||
"java",
|
||||
"sh",
|
||||
"bat",
|
||||
"ps1",
|
||||
"cmd",
|
||||
"js",
|
||||
"ts",
|
||||
"css",
|
||||
"cpp",
|
||||
"hpp",
|
||||
"h",
|
||||
"c",
|
||||
"cs",
|
||||
"sql",
|
||||
"log",
|
||||
"ini",
|
||||
"pl",
|
||||
"pm",
|
||||
"r",
|
||||
"dart",
|
||||
"dockerfile",
|
||||
"env",
|
||||
"php",
|
||||
"hs",
|
||||
"hsc",
|
||||
"lua",
|
||||
"nginxconf",
|
||||
"conf",
|
||||
"m",
|
||||
"mm",
|
||||
"plsql",
|
||||
"perl",
|
||||
"rb",
|
||||
"rs",
|
||||
"db2",
|
||||
"scala",
|
||||
"bash",
|
||||
"swift",
|
||||
"vue",
|
||||
"svelte",
|
||||
]
|
||||
|
||||
if file_ext == "pdf":
|
||||
loader = PyPDFLoader(file_path)
|
||||
elif file_ext == "csv":
|
||||
loader = CSVLoader(file_path)
|
||||
elif file_ext == "rst":
|
||||
loader = UnstructuredRSTLoader(file_path, mode="elements")
|
||||
elif file_ext == "xml":
|
||||
loader = UnstructuredXMLLoader(file_path)
|
||||
elif file_ext == "md":
|
||||
loader = UnstructuredMarkdownLoader(file_path)
|
||||
elif file_content_type == "application/epub+zip":
|
||||
loader = UnstructuredEPubLoader(file_path)
|
||||
elif (
|
||||
file_content_type
|
||||
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
or file_ext in ["doc", "docx"]
|
||||
):
|
||||
loader = Docx2txtLoader(file_path)
|
||||
elif file_content_type in [
|
||||
"application/vnd.ms-excel",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
] or file_ext in ["xls", "xlsx"]:
|
||||
loader = UnstructuredExcelLoader(file_path)
|
||||
elif file_ext in known_source_ext or (file_content_type and file_content_type.find("text/") >= 0):
|
||||
loader = TextLoader(file_path)
|
||||
else:
|
||||
loader = TextLoader(file_path)
|
||||
known_type = False
|
||||
|
||||
return loader, known_type
|
||||
|
||||
|
||||
@app.post("/doc")
|
||||
def store_doc(
|
||||
collection_name: Optional[str] = Form(None),
|
||||
|
@ -136,17 +440,7 @@ def store_doc(
|
|||
):
|
||||
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
|
||||
|
||||
if file.content_type not in [
|
||||
"application/pdf",
|
||||
"text/plain",
|
||||
"text/csv",
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_NOT_SUPPORTED,
|
||||
)
|
||||
|
||||
print(file.content_type)
|
||||
try:
|
||||
filename = file.filename
|
||||
file_path = f"{UPLOAD_DIR}/{filename}"
|
||||
|
@ -160,23 +454,17 @@ def store_doc(
|
|||
collection_name = calculate_sha256(f)[:63]
|
||||
f.close()
|
||||
|
||||
if file.content_type == "application/pdf":
|
||||
loader = PyPDFLoader(file_path)
|
||||
elif (
|
||||
file.content_type
|
||||
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
):
|
||||
loader = Docx2txtLoader(file_path)
|
||||
elif file.content_type == "text/plain":
|
||||
loader = TextLoader(file_path)
|
||||
elif file.content_type == "text/csv":
|
||||
loader = CSVLoader(file_path)
|
||||
|
||||
loader, known_type = get_loader(file.filename, file.content_type, file_path)
|
||||
data = loader.load()
|
||||
result = store_data_in_vector_db(data, collection_name)
|
||||
|
||||
if result:
|
||||
return {"status": True, "collection_name": collection_name}
|
||||
return {
|
||||
"status": True,
|
||||
"collection_name": collection_name,
|
||||
"filename": filename,
|
||||
"known_type": known_type,
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
|
@ -184,45 +472,96 @@ def store_doc(
|
|||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
||||
if "No pandoc was found" in str(e):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
||||
|
||||
|
||||
@app.get("/reset/db")
|
||||
def reset_vector_db(user=Depends(get_current_user)):
|
||||
if user.role == "admin":
|
||||
CHROMA_CLIENT.reset()
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/reset")
|
||||
def reset(user=Depends(get_current_user)) -> bool:
|
||||
if user.role == "admin":
|
||||
folder = f"{UPLOAD_DIR}"
|
||||
for filename in os.listdir(folder):
|
||||
file_path = os.path.join(folder, filename)
|
||||
try:
|
||||
if os.path.isfile(file_path) or os.path.islink(file_path):
|
||||
os.unlink(file_path)
|
||||
elif os.path.isdir(file_path):
|
||||
shutil.rmtree(file_path)
|
||||
except Exception as e:
|
||||
print("Failed to delete %s. Reason: %s" % (file_path, e))
|
||||
|
||||
@app.get("/scan")
|
||||
def scan_docs_dir(user=Depends(get_admin_user)):
|
||||
for path in Path(DOCS_DIR).rglob("./**/*"):
|
||||
try:
|
||||
CHROMA_CLIENT.reset()
|
||||
if path.is_file() and not path.name.startswith("."):
|
||||
tags = extract_folders_after_data_docs(path)
|
||||
filename = path.name
|
||||
file_content_type = mimetypes.guess_type(path)
|
||||
|
||||
f = open(path, "rb")
|
||||
collection_name = calculate_sha256(f)[:63]
|
||||
f.close()
|
||||
|
||||
loader, known_type = get_loader(
|
||||
filename, file_content_type[0], str(path)
|
||||
)
|
||||
data = loader.load()
|
||||
|
||||
result = store_data_in_vector_db(data, collection_name)
|
||||
|
||||
if result:
|
||||
sanitized_filename = sanitize_filename(filename)
|
||||
doc = Documents.get_doc_by_name(sanitized_filename)
|
||||
|
||||
if doc == None:
|
||||
doc = Documents.insert_new_doc(
|
||||
user.id,
|
||||
DocumentForm(
|
||||
**{
|
||||
"name": sanitized_filename,
|
||||
"title": filename,
|
||||
"collection_name": collection_name,
|
||||
"filename": filename,
|
||||
"content": (
|
||||
json.dumps(
|
||||
{
|
||||
"tags": list(
|
||||
map(
|
||||
lambda name: {"name": name},
|
||||
tags,
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
if len(tags)
|
||||
else "{}"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
return True
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@app.get("/reset/db")
|
||||
def reset_vector_db(user=Depends(get_admin_user)):
|
||||
CHROMA_CLIENT.reset()
|
||||
|
||||
|
||||
@app.get("/reset")
|
||||
def reset(user=Depends(get_admin_user)) -> bool:
|
||||
folder = f"{UPLOAD_DIR}"
|
||||
for filename in os.listdir(folder):
|
||||
file_path = os.path.join(folder, filename)
|
||||
try:
|
||||
if os.path.isfile(file_path) or os.path.islink(file_path):
|
||||
os.unlink(file_path)
|
||||
elif os.path.isdir(file_path):
|
||||
shutil.rmtree(file_path)
|
||||
except Exception as e:
|
||||
print("Failed to delete %s. Reason: %s" % (file_path, e))
|
||||
|
||||
try:
|
||||
CHROMA_CLIENT.reset()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
return True
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from peewee import *
|
||||
from config import DATA_DIR
|
||||
|
||||
DB = SqliteDatabase("./data/ollama.db")
|
||||
|
||||
DB = SqliteDatabase(f"{DATA_DIR}/ollama.db")
|
||||
DB.connect()
|
||||
|
|
|
@ -1,15 +1,38 @@
|
|||
from fastapi import FastAPI, Depends
|
||||
from fastapi.routing import APIRoute
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from apps.web.routers import auths, users, chats, modelfiles, prompts, configs, utils
|
||||
from config import WEBUI_VERSION, WEBUI_AUTH
|
||||
from apps.web.routers import (
|
||||
auths,
|
||||
users,
|
||||
chats,
|
||||
documents,
|
||||
modelfiles,
|
||||
prompts,
|
||||
configs,
|
||||
utils,
|
||||
)
|
||||
from config import (
|
||||
WEBUI_VERSION,
|
||||
WEBUI_AUTH,
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_PROMPT_SUGGESTIONS,
|
||||
DEFAULT_USER_ROLE,
|
||||
ENABLE_SIGNUP,
|
||||
USER_PERMISSIONS,
|
||||
)
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
origins = ["*"]
|
||||
|
||||
app.state.ENABLE_SIGNUP = True
|
||||
app.state.DEFAULT_MODELS = None
|
||||
app.state.ENABLE_SIGNUP = ENABLE_SIGNUP
|
||||
app.state.JWT_EXPIRES_IN = "-1"
|
||||
|
||||
app.state.DEFAULT_MODELS = DEFAULT_MODELS
|
||||
app.state.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS
|
||||
app.state.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE
|
||||
app.state.USER_PERMISSIONS = USER_PERMISSIONS
|
||||
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
|
@ -22,9 +45,8 @@ app.add_middleware(
|
|||
app.include_router(auths.router, prefix="/auths", tags=["auths"])
|
||||
app.include_router(users.router, prefix="/users", tags=["users"])
|
||||
app.include_router(chats.router, prefix="/chats", tags=["chats"])
|
||||
app.include_router(modelfiles.router,
|
||||
prefix="/modelfiles",
|
||||
tags=["modelfiles"])
|
||||
app.include_router(documents.router, prefix="/documents", tags=["documents"])
|
||||
app.include_router(modelfiles.router, prefix="/modelfiles", tags=["modelfiles"])
|
||||
app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
|
||||
|
||||
app.include_router(configs.router, prefix="/configs", tags=["configs"])
|
||||
|
@ -35,7 +57,7 @@ app.include_router(utils.router, prefix="/utils", tags=["utils"])
|
|||
async def get_status():
|
||||
return {
|
||||
"status": True,
|
||||
"version": WEBUI_VERSION,
|
||||
"auth": WEBUI_AUTH,
|
||||
"default_models": app.state.DEFAULT_MODELS,
|
||||
"default_prompt_suggestions": app.state.DEFAULT_PROMPT_SUGGESTIONS,
|
||||
}
|
||||
|
|
|
@ -5,12 +5,7 @@ import uuid
|
|||
from peewee import *
|
||||
|
||||
from apps.web.models.users import UserModel, Users
|
||||
from utils.utils import (
|
||||
verify_password,
|
||||
get_password_hash,
|
||||
bearer_scheme,
|
||||
create_token,
|
||||
)
|
||||
from utils.utils import verify_password
|
||||
|
||||
from apps.web.internal.db import DB
|
||||
|
||||
|
@ -63,6 +58,15 @@ class SigninForm(BaseModel):
|
|||
password: str
|
||||
|
||||
|
||||
class ProfileImageUrlForm(BaseModel):
|
||||
profile_image_url: str
|
||||
|
||||
|
||||
class UpdateProfileForm(BaseModel):
|
||||
profile_image_url: str
|
||||
name: str
|
||||
|
||||
|
||||
class UpdatePasswordForm(BaseModel):
|
||||
password: str
|
||||
new_password: str
|
||||
|
|
|
@ -60,23 +60,23 @@ class ChatTitleIdResponse(BaseModel):
|
|||
|
||||
|
||||
class ChatTable:
|
||||
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
db.create_tables([Chat])
|
||||
|
||||
def insert_new_chat(self, user_id: str,
|
||||
form_data: ChatForm) -> Optional[ChatModel]:
|
||||
def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]:
|
||||
id = str(uuid.uuid4())
|
||||
chat = ChatModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"title": form_data.chat["title"] if "title" in
|
||||
form_data.chat else "New Chat",
|
||||
"title": (
|
||||
form_data.chat["title"] if "title" in form_data.chat else "New Chat"
|
||||
),
|
||||
"chat": json.dumps(form_data.chat),
|
||||
"timestamp": int(time.time()),
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
result = Chat.create(**chat.model_dump())
|
||||
return chat if result else None
|
||||
|
@ -109,25 +109,43 @@ class ChatTable:
|
|||
except:
|
||||
return None
|
||||
|
||||
def get_chat_lists_by_user_id(self,
|
||||
user_id: str,
|
||||
skip: int = 0,
|
||||
limit: int = 50) -> List[ChatModel]:
|
||||
def get_chat_lists_by_user_id(
|
||||
self, user_id: str, skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat)) for chat in Chat.select().where(
|
||||
Chat.user_id == user_id).order_by(Chat.timestamp.desc())
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select()
|
||||
.where(Chat.user_id == user_id)
|
||||
.order_by(Chat.timestamp.desc())
|
||||
# .limit(limit)
|
||||
# .offset(skip)
|
||||
]
|
||||
|
||||
def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
|
||||
def get_chat_lists_by_chat_ids(
|
||||
self, chat_ids: List[str], skip: int = 0, limit: int = 50
|
||||
) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat)) for chat in Chat.select().where(
|
||||
Chat.user_id == user_id).order_by(Chat.timestamp.desc())
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select()
|
||||
.where(Chat.id.in_(chat_ids))
|
||||
.order_by(Chat.timestamp.desc())
|
||||
]
|
||||
|
||||
def get_chat_by_id_and_user_id(self, id: str,
|
||||
user_id: str) -> Optional[ChatModel]:
|
||||
def get_all_chats(self) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select().order_by(Chat.timestamp.desc())
|
||||
]
|
||||
|
||||
def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
|
||||
return [
|
||||
ChatModel(**model_to_dict(chat))
|
||||
for chat in Chat.select()
|
||||
.where(Chat.user_id == user_id)
|
||||
.order_by(Chat.timestamp.desc())
|
||||
]
|
||||
|
||||
def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]:
|
||||
try:
|
||||
chat = Chat.get(Chat.id == id, Chat.user_id == user_id)
|
||||
return ChatModel(**model_to_dict(chat))
|
||||
|
@ -142,8 +160,7 @@ class ChatTable:
|
|||
|
||||
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
|
||||
try:
|
||||
query = Chat.delete().where((Chat.id == id)
|
||||
& (Chat.user_id == user_id))
|
||||
query = Chat.delete().where((Chat.id == id) & (Chat.user_id == user_id))
|
||||
query.execute() # Remove the rows, return number of rows removed.
|
||||
|
||||
return True
|
||||
|
|
154
backend/apps/web/models/documents.py
Normal file
154
backend/apps/web/models/documents.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
from pydantic import BaseModel
|
||||
from peewee import *
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
from typing import List, Union, Optional
|
||||
import time
|
||||
|
||||
from utils.utils import decode_token
|
||||
from utils.misc import get_gravatar_url
|
||||
|
||||
from apps.web.internal.db import DB
|
||||
|
||||
import json
|
||||
|
||||
####################
|
||||
# Documents DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Document(Model):
|
||||
collection_name = CharField(unique=True)
|
||||
name = CharField(unique=True)
|
||||
title = CharField()
|
||||
filename = CharField()
|
||||
content = TextField(null=True)
|
||||
user_id = CharField()
|
||||
timestamp = DateField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class DocumentModel(BaseModel):
|
||||
collection_name: str
|
||||
name: str
|
||||
title: str
|
||||
filename: str
|
||||
content: Optional[str] = None
|
||||
user_id: str
|
||||
timestamp: int # timestamp in epoch
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class DocumentResponse(BaseModel):
|
||||
collection_name: str
|
||||
name: str
|
||||
title: str
|
||||
filename: str
|
||||
content: Optional[dict] = None
|
||||
user_id: str
|
||||
timestamp: int # timestamp in epoch
|
||||
|
||||
|
||||
class DocumentUpdateForm(BaseModel):
|
||||
name: str
|
||||
title: str
|
||||
|
||||
|
||||
class DocumentForm(DocumentUpdateForm):
|
||||
collection_name: str
|
||||
filename: str
|
||||
content: Optional[str] = None
|
||||
|
||||
|
||||
class DocumentsTable:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
self.db.create_tables([Document])
|
||||
|
||||
def insert_new_doc(
|
||||
self, user_id: str, form_data: DocumentForm
|
||||
) -> Optional[DocumentModel]:
|
||||
document = DocumentModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
"user_id": user_id,
|
||||
"timestamp": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
result = Document.create(**document.model_dump())
|
||||
if result:
|
||||
return document
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
def get_doc_by_name(self, name: str) -> Optional[DocumentModel]:
|
||||
try:
|
||||
document = Document.get(Document.name == name)
|
||||
return DocumentModel(**model_to_dict(document))
|
||||
except:
|
||||
return None
|
||||
|
||||
def get_docs(self) -> List[DocumentModel]:
|
||||
return [
|
||||
DocumentModel(**model_to_dict(doc))
|
||||
for doc in Document.select()
|
||||
# .limit(limit).offset(skip)
|
||||
]
|
||||
|
||||
def update_doc_by_name(
|
||||
self, name: str, form_data: DocumentUpdateForm
|
||||
) -> Optional[DocumentModel]:
|
||||
try:
|
||||
query = Document.update(
|
||||
title=form_data.title,
|
||||
name=form_data.name,
|
||||
timestamp=int(time.time()),
|
||||
).where(Document.name == name)
|
||||
query.execute()
|
||||
|
||||
doc = Document.get(Document.name == form_data.name)
|
||||
return DocumentModel(**model_to_dict(doc))
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def update_doc_content_by_name(
|
||||
self, name: str, updated: dict
|
||||
) -> Optional[DocumentModel]:
|
||||
try:
|
||||
doc = self.get_doc_by_name(name)
|
||||
doc_content = json.loads(doc.content if doc.content else "{}")
|
||||
doc_content = {**doc_content, **updated}
|
||||
|
||||
query = Document.update(
|
||||
content=json.dumps(doc_content),
|
||||
timestamp=int(time.time()),
|
||||
).where(Document.name == name)
|
||||
query.execute()
|
||||
|
||||
doc = Document.get(Document.name == name)
|
||||
return DocumentModel(**model_to_dict(doc))
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
def delete_doc_by_name(self, name: str) -> bool:
|
||||
try:
|
||||
query = Document.delete().where((Document.name == name))
|
||||
query.execute() # Remove the rows, return number of rows removed.
|
||||
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
Documents = DocumentsTable(DB)
|
206
backend/apps/web/models/tags.py
Normal file
206
backend/apps/web/models/tags.py
Normal file
|
@ -0,0 +1,206 @@
|
|||
from pydantic import BaseModel
|
||||
from typing import List, Union, Optional
|
||||
from peewee import *
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
import json
|
||||
import uuid
|
||||
import time
|
||||
|
||||
from apps.web.internal.db import DB
|
||||
|
||||
####################
|
||||
# Tag DB Schema
|
||||
####################
|
||||
|
||||
|
||||
class Tag(Model):
|
||||
id = CharField(unique=True)
|
||||
name = CharField()
|
||||
user_id = CharField()
|
||||
data = TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class ChatIdTag(Model):
|
||||
id = CharField(unique=True)
|
||||
tag_name = CharField()
|
||||
chat_id = CharField()
|
||||
user_id = CharField()
|
||||
timestamp = DateField()
|
||||
|
||||
class Meta:
|
||||
database = DB
|
||||
|
||||
|
||||
class TagModel(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
user_id: str
|
||||
data: Optional[str] = None
|
||||
|
||||
|
||||
class ChatIdTagModel(BaseModel):
|
||||
id: str
|
||||
tag_name: str
|
||||
chat_id: str
|
||||
user_id: str
|
||||
timestamp: int
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class ChatIdTagForm(BaseModel):
|
||||
tag_name: str
|
||||
chat_id: str
|
||||
|
||||
|
||||
class TagChatIdsResponse(BaseModel):
|
||||
chat_ids: List[str]
|
||||
|
||||
|
||||
class ChatTagsResponse(BaseModel):
|
||||
tags: List[str]
|
||||
|
||||
|
||||
class TagTable:
|
||||
def __init__(self, db):
|
||||
self.db = db
|
||||
db.create_tables([Tag, ChatIdTag])
|
||||
|
||||
def insert_new_tag(self, name: str, user_id: str) -> Optional[TagModel]:
|
||||
id = str(uuid.uuid4())
|
||||
tag = TagModel(**{"id": id, "user_id": user_id, "name": name})
|
||||
try:
|
||||
result = Tag.create(**tag.model_dump())
|
||||
if result:
|
||||
return tag
|
||||
else:
|
||||
return None
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def get_tag_by_name_and_user_id(
|
||||
self, name: str, user_id: str
|
||||
) -> Optional[TagModel]:
|
||||
try:
|
||||
tag = Tag.get(Tag.name == name, Tag.user_id == user_id)
|
||||
return TagModel(**model_to_dict(tag))
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def add_tag_to_chat(
|
||||
self, user_id: str, form_data: ChatIdTagForm
|
||||
) -> Optional[ChatIdTagModel]:
|
||||
tag = self.get_tag_by_name_and_user_id(form_data.tag_name, user_id)
|
||||
if tag == None:
|
||||
tag = self.insert_new_tag(form_data.tag_name, user_id)
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
chatIdTag = ChatIdTagModel(
|
||||
**{
|
||||
"id": id,
|
||||
"user_id": user_id,
|
||||
"chat_id": form_data.chat_id,
|
||||
"tag_name": tag.name,
|
||||
"timestamp": int(time.time()),
|
||||
}
|
||||
)
|
||||
try:
|
||||
result = ChatIdTag.create(**chatIdTag.model_dump())
|
||||
if result:
|
||||
return chatIdTag
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
def get_tags_by_user_id(self, user_id: str) -> List[TagModel]:
|
||||
tag_names = [
|
||||
ChatIdTagModel(**model_to_dict(chat_id_tag)).tag_name
|
||||
for chat_id_tag in ChatIdTag.select()
|
||||
.where(ChatIdTag.user_id == user_id)
|
||||
.order_by(ChatIdTag.timestamp.desc())
|
||||
]
|
||||
|
||||
return [
|
||||
TagModel(**model_to_dict(tag))
|
||||
for tag in Tag.select().where(Tag.name.in_(tag_names))
|
||||
]
|
||||
|
||||
def get_tags_by_chat_id_and_user_id(
|
||||
self, chat_id: str, user_id: str
|
||||
) -> List[TagModel]:
|
||||
tag_names = [
|
||||
ChatIdTagModel(**model_to_dict(chat_id_tag)).tag_name
|
||||
for chat_id_tag in ChatIdTag.select()
|
||||
.where((ChatIdTag.user_id == user_id) & (ChatIdTag.chat_id == chat_id))
|
||||
.order_by(ChatIdTag.timestamp.desc())
|
||||
]
|
||||
|
||||
return [
|
||||
TagModel(**model_to_dict(tag))
|
||||
for tag in Tag.select().where(Tag.name.in_(tag_names))
|
||||
]
|
||||
|
||||
def get_chat_ids_by_tag_name_and_user_id(
|
||||
self, tag_name: str, user_id: str
|
||||
) -> Optional[ChatIdTagModel]:
|
||||
return [
|
||||
ChatIdTagModel(**model_to_dict(chat_id_tag))
|
||||
for chat_id_tag in ChatIdTag.select()
|
||||
.where((ChatIdTag.user_id == user_id) & (ChatIdTag.tag_name == tag_name))
|
||||
.order_by(ChatIdTag.timestamp.desc())
|
||||
]
|
||||
|
||||
def count_chat_ids_by_tag_name_and_user_id(
|
||||
self, tag_name: str, user_id: str
|
||||
) -> int:
|
||||
return (
|
||||
ChatIdTag.select()
|
||||
.where((ChatIdTag.tag_name == tag_name) & (ChatIdTag.user_id == user_id))
|
||||
.count()
|
||||
)
|
||||
|
||||
def delete_tag_by_tag_name_and_chat_id_and_user_id(
|
||||
self, tag_name: str, chat_id: str, user_id: str
|
||||
) -> bool:
|
||||
try:
|
||||
query = ChatIdTag.delete().where(
|
||||
(ChatIdTag.tag_name == tag_name)
|
||||
& (ChatIdTag.chat_id == chat_id)
|
||||
& (ChatIdTag.user_id == user_id)
|
||||
)
|
||||
res = query.execute() # Remove the rows, return number of rows removed.
|
||||
print(res)
|
||||
|
||||
tag_count = self.count_chat_ids_by_tag_name_and_user_id(tag_name, user_id)
|
||||
if tag_count == 0:
|
||||
# Remove tag item from Tag col as well
|
||||
query = Tag.delete().where(
|
||||
(Tag.name == tag_name) & (Tag.user_id == user_id)
|
||||
)
|
||||
query.execute() # Remove the rows, return number of rows removed.
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print("delete_tag", e)
|
||||
return False
|
||||
|
||||
def delete_tags_by_chat_id_and_user_id(self, chat_id: str, user_id: str) -> bool:
|
||||
tags = self.get_tags_by_chat_id_and_user_id(chat_id, user_id)
|
||||
|
||||
for tag in tags:
|
||||
self.delete_tag_by_tag_name_and_chat_id_and_user_id(
|
||||
tag.tag_name, chat_id, user_id
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
Tags = TagTable(DB)
|
|
@ -65,7 +65,7 @@ class UsersTable:
|
|||
"name": name,
|
||||
"email": email,
|
||||
"role": role,
|
||||
"profile_image_url": get_gravatar_url(email),
|
||||
"profile_image_url": "/user.png",
|
||||
"timestamp": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
@ -92,7 +92,8 @@ class UsersTable:
|
|||
def get_users(self, skip: int = 0, limit: int = 50) -> List[UserModel]:
|
||||
return [
|
||||
UserModel(**model_to_dict(user))
|
||||
for user in User.select().limit(limit).offset(skip)
|
||||
for user in User.select()
|
||||
# .limit(limit).offset(skip)
|
||||
]
|
||||
|
||||
def get_num_users(self) -> Optional[int]:
|
||||
|
@ -108,6 +109,20 @@ class UsersTable:
|
|||
except:
|
||||
return None
|
||||
|
||||
def update_user_profile_image_url_by_id(
|
||||
self, id: str, profile_image_url: str
|
||||
) -> Optional[UserModel]:
|
||||
try:
|
||||
query = User.update(profile_image_url=profile_image_url).where(
|
||||
User.id == id
|
||||
)
|
||||
query.execute()
|
||||
|
||||
user = User.get(User.id == id)
|
||||
return UserModel(**model_to_dict(user))
|
||||
except:
|
||||
return None
|
||||
|
||||
def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]:
|
||||
try:
|
||||
query = User.update(**updated).where(User.id == id)
|
||||
|
|
|
@ -3,14 +3,16 @@ from fastapi import Depends, FastAPI, HTTPException, status
|
|||
from datetime import datetime, timedelta
|
||||
from typing import List, Union
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import APIRouter, status
|
||||
from pydantic import BaseModel
|
||||
import time
|
||||
import uuid
|
||||
import re
|
||||
|
||||
from apps.web.models.auths import (
|
||||
SigninForm,
|
||||
SignupForm,
|
||||
UpdateProfileForm,
|
||||
UpdatePasswordForm,
|
||||
UserResponse,
|
||||
SigninResponse,
|
||||
|
@ -18,8 +20,13 @@ from apps.web.models.auths import (
|
|||
)
|
||||
from apps.web.models.users import Users
|
||||
|
||||
from utils.utils import get_password_hash, get_current_user, create_token
|
||||
from utils.misc import get_gravatar_url, validate_email_format
|
||||
from utils.utils import (
|
||||
get_password_hash,
|
||||
get_current_user,
|
||||
get_admin_user,
|
||||
create_token,
|
||||
)
|
||||
from utils.misc import parse_duration, validate_email_format
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -40,14 +47,37 @@ async def get_session_user(user=Depends(get_current_user)):
|
|||
}
|
||||
|
||||
|
||||
############################
|
||||
# Update Profile
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/update/profile", response_model=UserResponse)
|
||||
async def update_profile(
|
||||
form_data: UpdateProfileForm, session_user=Depends(get_current_user)
|
||||
):
|
||||
if session_user:
|
||||
user = Users.update_user_by_id(
|
||||
session_user.id,
|
||||
{"profile_image_url": form_data.profile_image_url, "name": form_data.name},
|
||||
)
|
||||
if user:
|
||||
return user
|
||||
else:
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.DEFAULT())
|
||||
else:
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED)
|
||||
|
||||
|
||||
############################
|
||||
# Update Password
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/update/password", response_model=bool)
|
||||
async def update_password(form_data: UpdatePasswordForm,
|
||||
session_user=Depends(get_current_user)):
|
||||
async def update_password(
|
||||
form_data: UpdatePasswordForm, session_user=Depends(get_current_user)
|
||||
):
|
||||
if session_user:
|
||||
user = Auths.authenticate_user(session_user.email, form_data.password)
|
||||
|
||||
|
@ -66,10 +96,13 @@ async def update_password(form_data: UpdatePasswordForm,
|
|||
|
||||
|
||||
@router.post("/signin", response_model=SigninResponse)
|
||||
async def signin(form_data: SigninForm):
|
||||
async def signin(request: Request, form_data: SigninForm):
|
||||
user = Auths.authenticate_user(form_data.email.lower(), form_data.password)
|
||||
if user:
|
||||
token = create_token(data={"email": user.email})
|
||||
token = create_token(
|
||||
data={"id": user.id},
|
||||
expires_delta=parse_duration(request.app.state.JWT_EXPIRES_IN),
|
||||
)
|
||||
|
||||
return {
|
||||
"token": token,
|
||||
|
@ -91,41 +124,50 @@ async def signin(form_data: SigninForm):
|
|||
|
||||
@router.post("/signup", response_model=SigninResponse)
|
||||
async def signup(request: Request, form_data: SignupForm):
|
||||
if request.app.state.ENABLE_SIGNUP:
|
||||
if validate_email_format(form_data.email.lower()):
|
||||
if not Users.get_user_by_email(form_data.email.lower()):
|
||||
try:
|
||||
role = "admin" if Users.get_num_users() == 0 else "pending"
|
||||
hashed = get_password_hash(form_data.password)
|
||||
user = Auths.insert_new_auth(form_data.email.lower(),
|
||||
hashed, form_data.name, role)
|
||||
if not request.app.state.ENABLE_SIGNUP:
|
||||
raise HTTPException(
|
||||
status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
|
||||
)
|
||||
|
||||
if user:
|
||||
token = create_token(data={"email": user.email})
|
||||
# response.set_cookie(key='token', value=token, httponly=True)
|
||||
if not validate_email_format(form_data.email.lower()):
|
||||
raise HTTPException(
|
||||
status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT
|
||||
)
|
||||
|
||||
return {
|
||||
"token": token,
|
||||
"token_type": "Bearer",
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
"profile_image_url": user.profile_image_url,
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
500, detail=ERROR_MESSAGES.CREATE_USER_ERROR)
|
||||
except Exception as err:
|
||||
raise HTTPException(500,
|
||||
detail=ERROR_MESSAGES.DEFAULT(err))
|
||||
else:
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
|
||||
if Users.get_user_by_email(form_data.email.lower()):
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
|
||||
|
||||
try:
|
||||
role = (
|
||||
"admin"
|
||||
if Users.get_num_users() == 0
|
||||
else request.app.state.DEFAULT_USER_ROLE
|
||||
)
|
||||
hashed = get_password_hash(form_data.password)
|
||||
user = Auths.insert_new_auth(
|
||||
form_data.email.lower(), hashed, form_data.name, role
|
||||
)
|
||||
|
||||
if user:
|
||||
token = create_token(
|
||||
data={"id": user.id},
|
||||
expires_delta=parse_duration(request.app.state.JWT_EXPIRES_IN),
|
||||
)
|
||||
# response.set_cookie(key='token', value=token, httponly=True)
|
||||
|
||||
return {
|
||||
"token": token,
|
||||
"token_type": "Bearer",
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
"profile_image_url": user.profile_image_url,
|
||||
}
|
||||
else:
|
||||
raise HTTPException(400,
|
||||
detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT)
|
||||
else:
|
||||
raise HTTPException(400, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
raise HTTPException(500, detail=ERROR_MESSAGES.CREATE_USER_ERROR)
|
||||
except Exception as err:
|
||||
raise HTTPException(500, detail=ERROR_MESSAGES.DEFAULT(err))
|
||||
|
||||
|
||||
############################
|
||||
|
@ -134,23 +176,64 @@ async def signup(request: Request, form_data: SignupForm):
|
|||
|
||||
|
||||
@router.get("/signup/enabled", response_model=bool)
|
||||
async def get_sign_up_status(request: Request, user=Depends(get_current_user)):
|
||||
if user.role == "admin":
|
||||
return request.app.state.ENABLE_SIGNUP
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
async def get_sign_up_status(request: Request, user=Depends(get_admin_user)):
|
||||
return request.app.state.ENABLE_SIGNUP
|
||||
|
||||
|
||||
@router.get("/signup/enabled/toggle", response_model=bool)
|
||||
async def toggle_sign_up(request: Request, user=Depends(get_current_user)):
|
||||
if user.role == "admin":
|
||||
request.app.state.ENABLE_SIGNUP = not request.app.state.ENABLE_SIGNUP
|
||||
return request.app.state.ENABLE_SIGNUP
|
||||
async def toggle_sign_up(request: Request, user=Depends(get_admin_user)):
|
||||
request.app.state.ENABLE_SIGNUP = not request.app.state.ENABLE_SIGNUP
|
||||
return request.app.state.ENABLE_SIGNUP
|
||||
|
||||
|
||||
############################
|
||||
# Default User Role
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/signup/user/role")
|
||||
async def get_default_user_role(request: Request, user=Depends(get_admin_user)):
|
||||
return request.app.state.DEFAULT_USER_ROLE
|
||||
|
||||
|
||||
class UpdateRoleForm(BaseModel):
|
||||
role: str
|
||||
|
||||
|
||||
@router.post("/signup/user/role")
|
||||
async def update_default_user_role(
|
||||
request: Request, form_data: UpdateRoleForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if form_data.role in ["pending", "user", "admin"]:
|
||||
request.app.state.DEFAULT_USER_ROLE = form_data.role
|
||||
return request.app.state.DEFAULT_USER_ROLE
|
||||
|
||||
|
||||
############################
|
||||
# JWT Expiration
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/token/expires")
|
||||
async def get_token_expires_duration(request: Request, user=Depends(get_admin_user)):
|
||||
return request.app.state.JWT_EXPIRES_IN
|
||||
|
||||
|
||||
class UpdateJWTExpiresDurationForm(BaseModel):
|
||||
duration: str
|
||||
|
||||
|
||||
@router.post("/token/expires/update")
|
||||
async def update_token_expires_duration(
|
||||
request: Request,
|
||||
form_data: UpdateJWTExpiresDurationForm,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
pattern = r"^(-1|0|(-?\d+(\.\d+)?)(ms|s|m|h|d|w))$"
|
||||
|
||||
# Check if the input string matches the pattern
|
||||
if re.match(pattern, form_data.duration):
|
||||
request.app.state.JWT_EXPIRES_IN = form_data.duration
|
||||
return request.app.state.JWT_EXPIRES_IN
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
return request.app.state.JWT_EXPIRES_IN
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from fastapi import Depends, Request, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
from utils.utils import get_current_user
|
||||
from utils.utils import get_current_user, get_admin_user
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
|
@ -16,8 +16,15 @@ from apps.web.models.chats import (
|
|||
Chats,
|
||||
)
|
||||
|
||||
from utils.utils import (
|
||||
bearer_scheme, )
|
||||
|
||||
from apps.web.models.tags import (
|
||||
TagModel,
|
||||
ChatIdTagModel,
|
||||
ChatIdTagForm,
|
||||
ChatTagsResponse,
|
||||
Tags,
|
||||
)
|
||||
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -29,7 +36,8 @@ router = APIRouter()
|
|||
|
||||
@router.get("/", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chats(
|
||||
user=Depends(get_current_user), skip: int = 0, limit: int = 50):
|
||||
user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
|
||||
|
||||
|
||||
|
@ -41,9 +49,21 @@ async def get_user_chats(
|
|||
@router.get("/all", response_model=List[ChatResponse])
|
||||
async def get_all_user_chats(user=Depends(get_current_user)):
|
||||
return [
|
||||
ChatResponse(**{
|
||||
**chat.model_dump(), "chat": json.loads(chat.chat)
|
||||
}) for chat in Chats.get_all_chats_by_user_id(user.id)
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
for chat in Chats.get_all_chats_by_user_id(user.id)
|
||||
]
|
||||
|
||||
|
||||
############################
|
||||
# GetAllChatsInDB
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/all/db", response_model=List[ChatResponse])
|
||||
async def get_all_user_chats_in_db(user=Depends(get_admin_user)):
|
||||
return [
|
||||
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
for chat in Chats.get_all_chats()
|
||||
]
|
||||
|
||||
|
||||
|
@ -54,8 +74,50 @@ async def get_all_user_chats(user=Depends(get_current_user)):
|
|||
|
||||
@router.post("/new", response_model=Optional[ChatResponse])
|
||||
async def create_new_chat(form_data: ChatForm, user=Depends(get_current_user)):
|
||||
chat = Chats.insert_new_chat(user.id, form_data)
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
try:
|
||||
chat = Chats.insert_new_chat(user.id, form_data)
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetAllTags
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/all", response_model=List[TagModel])
|
||||
async def get_all_tags(user=Depends(get_current_user)):
|
||||
try:
|
||||
tags = Tags.get_tags_by_user_id(user.id)
|
||||
return tags
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetChatsByTags
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/tags/tag/{tag_name}", response_model=List[ChatTitleIdResponse])
|
||||
async def get_user_chats_by_tag_name(
|
||||
tag_name: str, user=Depends(get_current_user), skip: int = 0, limit: int = 50
|
||||
):
|
||||
chat_ids = [
|
||||
chat_id_tag.chat_id
|
||||
for chat_id_tag in Tags.get_chat_ids_by_tag_name_and_user_id(tag_name, user.id)
|
||||
]
|
||||
|
||||
print(chat_ids)
|
||||
|
||||
return Chats.get_chat_lists_by_chat_ids(chat_ids, skip, limit)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -68,12 +130,11 @@ async def get_chat_by_id(id: str, user=Depends(get_current_user)):
|
|||
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
|
||||
|
||||
if chat:
|
||||
return ChatResponse(**{
|
||||
**chat.model_dump(), "chat": json.loads(chat.chat)
|
||||
})
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
else:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -82,17 +143,15 @@ async def get_chat_by_id(id: str, user=Depends(get_current_user)):
|
|||
|
||||
|
||||
@router.post("/{id}", response_model=Optional[ChatResponse])
|
||||
async def update_chat_by_id(id: str,
|
||||
form_data: ChatForm,
|
||||
user=Depends(get_current_user)):
|
||||
async def update_chat_by_id(
|
||||
id: str, form_data: ChatForm, user=Depends(get_current_user)
|
||||
):
|
||||
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
|
||||
if chat:
|
||||
updated_chat = {**json.loads(chat.chat), **form_data.chat}
|
||||
|
||||
chat = Chats.update_chat_by_id(id, updated_chat)
|
||||
return ChatResponse(**{
|
||||
**chat.model_dump(), "chat": json.loads(chat.chat)
|
||||
})
|
||||
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
@ -106,11 +165,103 @@ async def update_chat_by_id(id: str,
|
|||
|
||||
|
||||
@router.delete("/{id}", response_model=bool)
|
||||
async def delete_chat_by_id(id: str, user=Depends(get_current_user)):
|
||||
async def delete_chat_by_id(request: Request, id: str, user=Depends(get_current_user)):
|
||||
|
||||
if (
|
||||
user.role == "user"
|
||||
and not request.app.state.USER_PERMISSIONS["chat"]["deletion"]
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
result = Chats.delete_chat_by_id_and_user_id(id, user.id)
|
||||
return result
|
||||
|
||||
|
||||
############################
|
||||
# GetChatTagsById
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/{id}/tags", response_model=List[TagModel])
|
||||
async def get_chat_tags_by_id(id: str, user=Depends(get_current_user)):
|
||||
tags = Tags.get_tags_by_chat_id_and_user_id(id, user.id)
|
||||
|
||||
if tags != None:
|
||||
return tags
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# AddChatTagById
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/{id}/tags", response_model=Optional[ChatIdTagModel])
|
||||
async def add_chat_tag_by_id(
|
||||
id: str, form_data: ChatIdTagForm, user=Depends(get_current_user)
|
||||
):
|
||||
tags = Tags.get_tags_by_chat_id_and_user_id(id, user.id)
|
||||
|
||||
if form_data.tag_name not in tags:
|
||||
tag = Tags.add_tag_to_chat(user.id, form_data)
|
||||
|
||||
if tag:
|
||||
return tag
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteChatTagById
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/{id}/tags", response_model=Optional[bool])
|
||||
async def delete_chat_tag_by_id(
|
||||
id: str, form_data: ChatIdTagForm, user=Depends(get_current_user)
|
||||
):
|
||||
result = Tags.delete_tag_by_tag_name_and_chat_id_and_user_id(
|
||||
form_data.tag_name, id, user.id
|
||||
)
|
||||
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteAllChatTagsById
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/{id}/tags/all", response_model=Optional[bool])
|
||||
async def delete_all_chat_tags_by_id(id: str, user=Depends(get_current_user)):
|
||||
result = Tags.delete_tags_by_chat_id_and_user_id(id, user.id)
|
||||
|
||||
if result:
|
||||
return result
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteAllChats
|
||||
############################
|
||||
|
|
|
@ -10,7 +10,7 @@ import uuid
|
|||
|
||||
from apps.web.models.users import Users
|
||||
|
||||
from utils.utils import get_password_hash, get_current_user, create_token
|
||||
from utils.utils import get_password_hash, get_current_user, get_admin_user, create_token
|
||||
from utils.misc import get_gravatar_url, validate_email_format
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
|
@ -21,20 +21,35 @@ class SetDefaultModelsForm(BaseModel):
|
|||
models: str
|
||||
|
||||
|
||||
class PromptSuggestion(BaseModel):
|
||||
title: List[str]
|
||||
content: str
|
||||
|
||||
|
||||
class SetDefaultSuggestionsForm(BaseModel):
|
||||
suggestions: List[PromptSuggestion]
|
||||
|
||||
|
||||
############################
|
||||
# SetDefaultModels
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/default/models", response_model=str)
|
||||
async def set_global_default_models(request: Request,
|
||||
form_data: SetDefaultModelsForm,
|
||||
user=Depends(get_current_user)):
|
||||
if user.role == "admin":
|
||||
request.app.state.DEFAULT_MODELS = form_data.models
|
||||
return request.app.state.DEFAULT_MODELS
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
async def set_global_default_models(
|
||||
request: Request, form_data: SetDefaultModelsForm, user=Depends(get_admin_user)
|
||||
):
|
||||
request.app.state.DEFAULT_MODELS = form_data.models
|
||||
return request.app.state.DEFAULT_MODELS
|
||||
|
||||
|
||||
|
||||
@router.post("/default/suggestions", response_model=List[PromptSuggestion])
|
||||
async def set_global_default_suggestions(
|
||||
request: Request,
|
||||
form_data: SetDefaultSuggestionsForm,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
data = form_data.model_dump()
|
||||
request.app.state.DEFAULT_PROMPT_SUGGESTIONS = data["suggestions"]
|
||||
return request.app.state.DEFAULT_PROMPT_SUGGESTIONS
|
||||
|
|
158
backend/apps/web/routers/documents.py
Normal file
158
backend/apps/web/routers/documents.py
Normal file
|
@ -0,0 +1,158 @@
|
|||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
|
||||
from fastapi import APIRouter
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
|
||||
from apps.web.models.documents import (
|
||||
Documents,
|
||||
DocumentForm,
|
||||
DocumentUpdateForm,
|
||||
DocumentModel,
|
||||
DocumentResponse,
|
||||
)
|
||||
|
||||
from utils.utils import get_current_user, get_admin_user
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
############################
|
||||
# GetDocuments
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=List[DocumentResponse])
|
||||
async def get_documents(user=Depends(get_current_user)):
|
||||
docs = [
|
||||
DocumentResponse(
|
||||
**{
|
||||
**doc.model_dump(),
|
||||
"content": json.loads(doc.content if doc.content else "{}"),
|
||||
}
|
||||
)
|
||||
for doc in Documents.get_docs()
|
||||
]
|
||||
return docs
|
||||
|
||||
|
||||
############################
|
||||
# CreateNewDoc
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/create", response_model=Optional[DocumentResponse])
|
||||
async def create_new_doc(form_data: DocumentForm, user=Depends(get_admin_user)):
|
||||
doc = Documents.get_doc_by_name(form_data.name)
|
||||
if doc == None:
|
||||
doc = Documents.insert_new_doc(user.id, form_data)
|
||||
|
||||
if doc:
|
||||
return DocumentResponse(
|
||||
**{
|
||||
**doc.model_dump(),
|
||||
"content": json.loads(doc.content if doc.content else "{}"),
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_EXISTS,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.NAME_TAG_TAKEN,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetDocByName
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/name/{name}", response_model=Optional[DocumentResponse])
|
||||
async def get_doc_by_name(name: str, user=Depends(get_current_user)):
|
||||
doc = Documents.get_doc_by_name(name)
|
||||
|
||||
if doc:
|
||||
return DocumentResponse(
|
||||
**{
|
||||
**doc.model_dump(),
|
||||
"content": json.loads(doc.content if doc.content else "{}"),
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# TagDocByName
|
||||
############################
|
||||
|
||||
|
||||
class TagItem(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
class TagDocumentForm(BaseModel):
|
||||
name: str
|
||||
tags: List[dict]
|
||||
|
||||
|
||||
@router.post("/name/{name}/tags", response_model=Optional[DocumentResponse])
|
||||
async def tag_doc_by_name(form_data: TagDocumentForm, user=Depends(get_current_user)):
|
||||
doc = Documents.update_doc_content_by_name(form_data.name, {"tags": form_data.tags})
|
||||
|
||||
if doc:
|
||||
return DocumentResponse(
|
||||
**{
|
||||
**doc.model_dump(),
|
||||
"content": json.loads(doc.content if doc.content else "{}"),
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# UpdateDocByName
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/name/{name}/update", response_model=Optional[DocumentResponse])
|
||||
async def update_doc_by_name(
|
||||
name: str, form_data: DocumentUpdateForm, user=Depends(get_admin_user)
|
||||
):
|
||||
doc = Documents.update_doc_by_name(name, form_data)
|
||||
if doc:
|
||||
return DocumentResponse(
|
||||
**{
|
||||
**doc.model_dump(),
|
||||
"content": json.loads(doc.content if doc.content else "{}"),
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.NAME_TAG_TAKEN,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteDocByName
|
||||
############################
|
||||
|
||||
|
||||
@router.delete("/name/{name}/delete", response_model=bool)
|
||||
async def delete_doc_by_name(name: str, user=Depends(get_admin_user)):
|
||||
result = Documents.delete_doc_by_name(name)
|
||||
return result
|
|
@ -13,7 +13,7 @@ from apps.web.models.modelfiles import (
|
|||
ModelfileResponse,
|
||||
)
|
||||
|
||||
from utils.utils import get_current_user
|
||||
from utils.utils import get_current_user, get_admin_user
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -37,13 +37,7 @@ async def get_modelfiles(skip: int = 0,
|
|||
|
||||
@router.post("/create", response_model=Optional[ModelfileResponse])
|
||||
async def create_new_modelfile(form_data: ModelfileForm,
|
||||
user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
user=Depends(get_admin_user)):
|
||||
modelfile = Modelfiles.insert_new_modelfile(user.id, form_data)
|
||||
|
||||
if modelfile:
|
||||
|
@ -91,12 +85,7 @@ async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
|
|||
|
||||
@router.post("/update", response_model=Optional[ModelfileResponse])
|
||||
async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm,
|
||||
user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
user=Depends(get_admin_user)):
|
||||
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
|
||||
if modelfile:
|
||||
updated_modelfile = {
|
||||
|
@ -127,12 +116,6 @@ async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm,
|
|||
|
||||
@router.delete("/delete", response_model=bool)
|
||||
async def delete_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
|
||||
user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
user=Depends(get_admin_user)):
|
||||
result = Modelfiles.delete_modelfile_by_tag_name(form_data.tag_name)
|
||||
return result
|
||||
|
|
|
@ -8,7 +8,7 @@ import json
|
|||
|
||||
from apps.web.models.prompts import Prompts, PromptForm, PromptModel
|
||||
|
||||
from utils.utils import get_current_user
|
||||
from utils.utils import get_current_user, get_admin_user
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -29,29 +29,21 @@ async def get_prompts(user=Depends(get_current_user)):
|
|||
|
||||
|
||||
@router.post("/create", response_model=Optional[PromptModel])
|
||||
async def create_new_prompt(form_data: PromptForm, user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
async def create_new_prompt(form_data: PromptForm, user=Depends(get_admin_user)):
|
||||
prompt = Prompts.get_prompt_by_command(form_data.command)
|
||||
if prompt == None:
|
||||
prompt = Prompts.insert_new_prompt(user.id, form_data)
|
||||
|
||||
if prompt:
|
||||
return prompt
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.DEFAULT(),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.COMMAND_TAKEN,
|
||||
detail=ERROR_MESSAGES.DEFAULT(),
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.COMMAND_TAKEN,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -79,14 +71,8 @@ async def get_prompt_by_command(command: str, user=Depends(get_current_user)):
|
|||
|
||||
@router.post("/command/{command}/update", response_model=Optional[PromptModel])
|
||||
async def update_prompt_by_command(
|
||||
command: str, form_data: PromptForm, user=Depends(get_current_user)
|
||||
command: str, form_data: PromptForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
prompt = Prompts.update_prompt_by_command(f"/{command}", form_data)
|
||||
if prompt:
|
||||
return prompt
|
||||
|
@ -103,12 +89,6 @@ async def update_prompt_by_command(
|
|||
|
||||
|
||||
@router.delete("/command/{command}/delete", response_model=bool)
|
||||
async def delete_prompt_by_command(command: str, user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
async def delete_prompt_by_command(command: str, user=Depends(get_admin_user)):
|
||||
result = Prompts.delete_prompt_by_command(f"/{command}")
|
||||
return result
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from fastapi import Response
|
||||
from fastapi import Response, Request
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Union, Optional
|
||||
|
@ -11,7 +11,7 @@ import uuid
|
|||
from apps.web.models.users import UserModel, UserUpdateForm, UserRoleUpdateForm, Users
|
||||
from apps.web.models.auths import Auths
|
||||
|
||||
from utils.utils import get_current_user, get_password_hash
|
||||
from utils.utils import get_current_user, get_password_hash, get_admin_user
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
router = APIRouter()
|
||||
|
@ -22,37 +22,43 @@ router = APIRouter()
|
|||
|
||||
|
||||
@router.get("/", response_model=List[UserModel])
|
||||
async def get_users(skip: int = 0, limit: int = 50, user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
async def get_users(skip: int = 0, limit: int = 50, user=Depends(get_admin_user)):
|
||||
return Users.get_users(skip, limit)
|
||||
|
||||
|
||||
############################
|
||||
# User Permissions
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/permissions/user")
|
||||
async def get_user_permissions(request: Request, user=Depends(get_admin_user)):
|
||||
return request.app.state.USER_PERMISSIONS
|
||||
|
||||
|
||||
@router.post("/permissions/user")
|
||||
async def update_user_permissions(
|
||||
request: Request, form_data: dict, user=Depends(get_admin_user)
|
||||
):
|
||||
request.app.state.USER_PERMISSIONS = form_data
|
||||
return request.app.state.USER_PERMISSIONS
|
||||
|
||||
|
||||
############################
|
||||
# UpdateUserRole
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/update/role", response_model=Optional[UserModel])
|
||||
async def update_user_role(
|
||||
form_data: UserRoleUpdateForm, user=Depends(get_current_user)
|
||||
):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
async def update_user_role(form_data: UserRoleUpdateForm, user=Depends(get_admin_user)):
|
||||
|
||||
if user.id != form_data.id:
|
||||
return Users.update_user_role_by_id(form_data.id, form_data.role)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
|
@ -62,14 +68,8 @@ async def update_user_role(
|
|||
|
||||
@router.post("/{user_id}/update", response_model=Optional[UserModel])
|
||||
async def update_user_by_id(
|
||||
user_id: str, form_data: UserUpdateForm, session_user=Depends(get_current_user)
|
||||
user_id: str, form_data: UserUpdateForm, session_user=Depends(get_admin_user)
|
||||
):
|
||||
if session_user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
user = Users.get_user_by_id(user_id)
|
||||
|
||||
if user:
|
||||
|
@ -98,18 +98,17 @@ async def update_user_by_id(
|
|||
|
||||
if updated_user:
|
||||
return updated_user
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(),
|
||||
)
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.USER_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.DEFAULT(),
|
||||
)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.USER_NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteUserById
|
||||
|
@ -117,25 +116,19 @@ async def update_user_by_id(
|
|||
|
||||
|
||||
@router.delete("/{user_id}", response_model=bool)
|
||||
async def delete_user_by_id(user_id: str, user=Depends(get_current_user)):
|
||||
if user.role == "admin":
|
||||
if user.id != user_id:
|
||||
result = Auths.delete_auth_by_id(user_id)
|
||||
async def delete_user_by_id(user_id: str, user=Depends(get_admin_user)):
|
||||
if user.id != user_id:
|
||||
result = Auths.delete_auth_by_id(user_id)
|
||||
|
||||
if result:
|
||||
return True
|
||||
|
||||
if result:
|
||||
return True
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=ERROR_MESSAGES.DELETE_USER_ERROR,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=ERROR_MESSAGES.DELETE_USER_ERROR,
|
||||
)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
|
|
|
@ -9,9 +9,11 @@ import os
|
|||
import aiohttp
|
||||
import json
|
||||
|
||||
from utils.misc import calculate_sha256
|
||||
from utils.misc import calculate_sha256, get_gravatar_url
|
||||
|
||||
from config import OLLAMA_API_BASE_URL, DATA_DIR, UPLOAD_DIR
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
from config import OLLAMA_API_BASE_URL
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
@ -40,10 +42,7 @@ def parse_huggingface_url(hf_url):
|
|||
return None
|
||||
|
||||
|
||||
async def download_file_stream(url,
|
||||
file_path,
|
||||
file_name,
|
||||
chunk_size=1024 * 1024):
|
||||
async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
|
||||
done = False
|
||||
|
||||
if os.path.exists(file_path):
|
||||
|
@ -57,8 +56,7 @@ async def download_file_stream(url,
|
|||
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
async with session.get(url, headers=headers) as response:
|
||||
total_size = int(response.headers.get("content-length",
|
||||
0)) + current_size
|
||||
total_size = int(response.headers.get("content-length", 0)) + current_size
|
||||
|
||||
with open(file_path, "ab+") as file:
|
||||
async for data in response.content.iter_chunked(chunk_size):
|
||||
|
@ -91,13 +89,14 @@ async def download_file_stream(url,
|
|||
|
||||
|
||||
@router.get("/download")
|
||||
async def download(url: str, ):
|
||||
async def download(
|
||||
url: str,
|
||||
):
|
||||
# url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
|
||||
file_name = parse_huggingface_url(url)
|
||||
|
||||
if file_name:
|
||||
os.makedirs("./uploads", exist_ok=True)
|
||||
file_path = os.path.join("./uploads", f"{file_name}")
|
||||
file_path = f"{UPLOAD_DIR}/{file_name}"
|
||||
|
||||
return StreamingResponse(
|
||||
download_file_stream(url, file_path, file_name),
|
||||
|
@ -108,25 +107,29 @@ async def download(url: str, ):
|
|||
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload(file: UploadFile = File(...)):
|
||||
os.makedirs("./uploads", exist_ok=True)
|
||||
file_path = os.path.join("./uploads", file.filename)
|
||||
def upload(file: UploadFile = File(...)):
|
||||
file_path = f"{UPLOAD_DIR}/{file.filename}"
|
||||
|
||||
async def file_write_stream():
|
||||
total = 0
|
||||
total_size = file.size
|
||||
# Save file in chunks
|
||||
with open(file_path, "wb+") as f:
|
||||
for chunk in file.file:
|
||||
f.write(chunk)
|
||||
|
||||
def file_process_stream():
|
||||
total_size = os.path.getsize(file_path)
|
||||
chunk_size = 1024 * 1024
|
||||
|
||||
done = False
|
||||
try:
|
||||
with open(file_path, "wb+") as f:
|
||||
while True:
|
||||
chunk = file.file.read(chunk_size)
|
||||
with open(file_path, "rb") as f:
|
||||
total = 0
|
||||
done = False
|
||||
|
||||
while not done:
|
||||
chunk = f.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
done = True
|
||||
continue
|
||||
|
||||
total += len(chunk)
|
||||
done = total_size == total
|
||||
progress = round((total / total_size) * 100, 2)
|
||||
|
||||
res = {
|
||||
|
@ -134,7 +137,6 @@ async def upload(file: UploadFile = File(...)):
|
|||
"total": total_size,
|
||||
"completed": total,
|
||||
}
|
||||
|
||||
yield f"data: {json.dumps(res)}\n\n"
|
||||
|
||||
if done:
|
||||
|
@ -152,14 +154,21 @@ async def upload(file: UploadFile = File(...)):
|
|||
"name": file.filename,
|
||||
}
|
||||
os.remove(file_path)
|
||||
|
||||
yield f"data: {json.dumps(res)}\n\n"
|
||||
else:
|
||||
raise "Ollama: Could not create blob, Please try again."
|
||||
raise Exception(
|
||||
"Ollama: Could not create blob, Please try again."
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
res = {"error": str(e)}
|
||||
yield f"data: {json.dumps(res)}\n\n"
|
||||
|
||||
return StreamingResponse(file_write_stream(),
|
||||
media_type="text/event-stream")
|
||||
return StreamingResponse(file_process_stream(), media_type="text/event-stream")
|
||||
|
||||
|
||||
@router.get("/gravatar")
|
||||
async def get_gravatar(
|
||||
email: str,
|
||||
):
|
||||
return get_gravatar_url(email)
|
||||
|
|
|
@ -1,30 +1,28 @@
|
|||
from dotenv import load_dotenv, find_dotenv
|
||||
import os
|
||||
|
||||
|
||||
import chromadb
|
||||
from chromadb import Settings
|
||||
from base64 import b64encode
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from pathlib import Path
|
||||
import json
|
||||
import markdown
|
||||
import requests
|
||||
import shutil
|
||||
|
||||
from secrets import token_bytes
|
||||
from base64 import b64encode
|
||||
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
|
||||
from pathlib import Path
|
||||
try:
|
||||
from dotenv import load_dotenv, find_dotenv
|
||||
|
||||
load_dotenv(find_dotenv("../.env"))
|
||||
|
||||
|
||||
####################################
|
||||
# File Upload
|
||||
####################################
|
||||
|
||||
|
||||
UPLOAD_DIR = "./data/uploads"
|
||||
Path(UPLOAD_DIR).mkdir(parents=True, exist_ok=True)
|
||||
load_dotenv(find_dotenv("../.env"))
|
||||
except ImportError:
|
||||
print("dotenv not installed, skipping...")
|
||||
|
||||
WEBUI_NAME = "Open WebUI"
|
||||
shutil.copyfile("../build/favicon.png", "./static/favicon.png")
|
||||
|
||||
####################################
|
||||
# ENV (dev,test,prod)
|
||||
|
@ -32,6 +30,141 @@ Path(UPLOAD_DIR).mkdir(parents=True, exist_ok=True)
|
|||
|
||||
ENV = os.environ.get("ENV", "dev")
|
||||
|
||||
try:
|
||||
with open(f"../package.json", "r") as f:
|
||||
PACKAGE_DATA = json.load(f)
|
||||
except:
|
||||
PACKAGE_DATA = {"version": "0.0.0"}
|
||||
|
||||
VERSION = PACKAGE_DATA["version"]
|
||||
|
||||
|
||||
# Function to parse each section
|
||||
def parse_section(section):
|
||||
items = []
|
||||
for li in section.find_all("li"):
|
||||
# Extract raw HTML string
|
||||
raw_html = str(li)
|
||||
|
||||
# Extract text without HTML tags
|
||||
text = li.get_text(separator=" ", strip=True)
|
||||
|
||||
# Split into title and content
|
||||
parts = text.split(": ", 1)
|
||||
title = parts[0].strip() if len(parts) > 1 else ""
|
||||
content = parts[1].strip() if len(parts) > 1 else text
|
||||
|
||||
items.append({"title": title, "content": content, "raw": raw_html})
|
||||
return items
|
||||
|
||||
|
||||
try:
|
||||
with open("../CHANGELOG.md", "r") as file:
|
||||
changelog_content = file.read()
|
||||
except:
|
||||
changelog_content = ""
|
||||
|
||||
# Convert markdown content to HTML
|
||||
html_content = markdown.markdown(changelog_content)
|
||||
|
||||
# Parse the HTML content
|
||||
soup = BeautifulSoup(html_content, "html.parser")
|
||||
|
||||
# Initialize JSON structure
|
||||
changelog_json = {}
|
||||
|
||||
# Iterate over each version
|
||||
for version in soup.find_all("h2"):
|
||||
version_number = version.get_text().strip().split(" - ")[0][1:-1] # Remove brackets
|
||||
date = version.get_text().strip().split(" - ")[1]
|
||||
|
||||
version_data = {"date": date}
|
||||
|
||||
# Find the next sibling that is a h3 tag (section title)
|
||||
current = version.find_next_sibling()
|
||||
|
||||
print(current)
|
||||
|
||||
while current and current.name != "h2":
|
||||
if current.name == "h3":
|
||||
section_title = current.get_text().lower() # e.g., "added", "fixed"
|
||||
section_items = parse_section(current.find_next_sibling("ul"))
|
||||
version_data[section_title] = section_items
|
||||
|
||||
# Move to the next element
|
||||
current = current.find_next_sibling()
|
||||
|
||||
changelog_json[version_number] = version_data
|
||||
|
||||
|
||||
CHANGELOG = changelog_json
|
||||
|
||||
|
||||
####################################
|
||||
# CUSTOM_NAME
|
||||
####################################
|
||||
|
||||
CUSTOM_NAME = os.environ.get("CUSTOM_NAME", "")
|
||||
if CUSTOM_NAME:
|
||||
try:
|
||||
r = requests.get(f"https://api.openwebui.com/api/v1/custom/{CUSTOM_NAME}")
|
||||
data = r.json()
|
||||
if r.ok:
|
||||
if "logo" in data:
|
||||
url = (
|
||||
f"https://api.openwebui.com{data['logo']}"
|
||||
if data["logo"][0] == "/"
|
||||
else data["logo"]
|
||||
)
|
||||
|
||||
r = requests.get(url, stream=True)
|
||||
if r.status_code == 200:
|
||||
with open("./static/favicon.png", "wb") as f:
|
||||
r.raw.decode_content = True
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
|
||||
WEBUI_NAME = data["name"]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
|
||||
|
||||
####################################
|
||||
# DATA/FRONTEND BUILD DIR
|
||||
####################################
|
||||
|
||||
DATA_DIR = str(Path(os.getenv("DATA_DIR", "./data")).resolve())
|
||||
FRONTEND_BUILD_DIR = str(Path(os.getenv("FRONTEND_BUILD_DIR", "../build")))
|
||||
|
||||
try:
|
||||
with open(f"{DATA_DIR}/config.json", "r") as f:
|
||||
CONFIG_DATA = json.load(f)
|
||||
except:
|
||||
CONFIG_DATA = {}
|
||||
|
||||
####################################
|
||||
# File Upload DIR
|
||||
####################################
|
||||
|
||||
UPLOAD_DIR = f"{DATA_DIR}/uploads"
|
||||
Path(UPLOAD_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
####################################
|
||||
# Cache DIR
|
||||
####################################
|
||||
|
||||
CACHE_DIR = f"{DATA_DIR}/cache"
|
||||
Path(CACHE_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
####################################
|
||||
# Docs DIR
|
||||
####################################
|
||||
|
||||
DOCS_DIR = f"{DATA_DIR}/docs"
|
||||
Path(DOCS_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
####################################
|
||||
# OLLAMA_API_BASE_URL
|
||||
####################################
|
||||
|
@ -54,11 +187,50 @@ OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
|
|||
if OPENAI_API_BASE_URL == "":
|
||||
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
|
||||
|
||||
|
||||
####################################
|
||||
# WEBUI
|
||||
####################################
|
||||
|
||||
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", True)
|
||||
DEFAULT_MODELS = os.environ.get("DEFAULT_MODELS", None)
|
||||
|
||||
|
||||
DEFAULT_PROMPT_SUGGESTIONS = (
|
||||
CONFIG_DATA["ui"]["prompt_suggestions"]
|
||||
if "ui" in CONFIG_DATA
|
||||
and "prompt_suggestions" in CONFIG_DATA["ui"]
|
||||
and type(CONFIG_DATA["ui"]["prompt_suggestions"]) is list
|
||||
else [
|
||||
{
|
||||
"title": ["Help me study", "vocabulary for a college entrance exam"],
|
||||
"content": "Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.",
|
||||
},
|
||||
{
|
||||
"title": ["Give me ideas", "for what to do with my kids' art"],
|
||||
"content": "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.",
|
||||
},
|
||||
{
|
||||
"title": ["Tell me a fun fact", "about the Roman Empire"],
|
||||
"content": "Tell me a random fun fact about the Roman Empire",
|
||||
},
|
||||
{
|
||||
"title": ["Show me a code snippet", "of a website's sticky header"],
|
||||
"content": "Show me a code snippet of a website's sticky header in CSS and JavaScript.",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_USER_ROLE = os.getenv("DEFAULT_USER_ROLE", "pending")
|
||||
USER_PERMISSIONS = {"chat": {"deletion": True}}
|
||||
|
||||
|
||||
####################################
|
||||
# WEBUI_VERSION
|
||||
####################################
|
||||
|
||||
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.50")
|
||||
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.100")
|
||||
|
||||
####################################
|
||||
# WEBUI_AUTH (Required for security)
|
||||
|
@ -67,22 +239,62 @@ WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.50")
|
|||
WEBUI_AUTH = True
|
||||
|
||||
####################################
|
||||
# WEBUI_JWT_SECRET_KEY
|
||||
# WEBUI_SECRET_KEY
|
||||
####################################
|
||||
|
||||
WEBUI_JWT_SECRET_KEY = os.environ.get("WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t")
|
||||
WEBUI_SECRET_KEY = os.environ.get(
|
||||
"WEBUI_SECRET_KEY",
|
||||
os.environ.get(
|
||||
"WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t"
|
||||
), # DEPRECATED: remove at next major version
|
||||
)
|
||||
|
||||
if WEBUI_AUTH and WEBUI_JWT_SECRET_KEY == "":
|
||||
if WEBUI_AUTH and WEBUI_SECRET_KEY == "":
|
||||
raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND)
|
||||
|
||||
####################################
|
||||
# RAG
|
||||
####################################
|
||||
|
||||
CHROMA_DATA_PATH = "./data/vector_db"
|
||||
EMBED_MODEL = "all-MiniLM-L6-v2"
|
||||
CHROMA_DATA_PATH = f"{DATA_DIR}/vector_db"
|
||||
# this uses the model defined in the Dockerfile ENV variable. If you dont use docker or docker based deployments such as k8s, the default embedding model will be used (all-MiniLM-L6-v2)
|
||||
RAG_EMBEDDING_MODEL = os.environ.get("RAG_EMBEDDING_MODEL", "all-MiniLM-L6-v2")
|
||||
# device type ebbeding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
|
||||
RAG_EMBEDDING_MODEL_DEVICE_TYPE = os.environ.get(
|
||||
"RAG_EMBEDDING_MODEL_DEVICE_TYPE", "cpu"
|
||||
)
|
||||
CHROMA_CLIENT = chromadb.PersistentClient(
|
||||
path=CHROMA_DATA_PATH, settings=Settings(allow_reset=True)
|
||||
path=CHROMA_DATA_PATH,
|
||||
settings=Settings(allow_reset=True, anonymized_telemetry=False),
|
||||
)
|
||||
CHUNK_SIZE = 1500
|
||||
CHUNK_OVERLAP = 100
|
||||
|
||||
|
||||
RAG_TEMPLATE = """Use the following context as your learned knowledge, inside <context></context> XML tags.
|
||||
<context>
|
||||
[context]
|
||||
</context>
|
||||
|
||||
When answer to user:
|
||||
- If you don't know, just say that you don't know.
|
||||
- If you don't know when you are not sure, ask for clarification.
|
||||
Avoid mentioning that you obtained the information from the context.
|
||||
And answer according to the language of the user's question.
|
||||
|
||||
Given the context information, answer the query.
|
||||
Query: [query]"""
|
||||
|
||||
####################################
|
||||
# Transcribe
|
||||
####################################
|
||||
|
||||
WHISPER_MODEL = os.getenv("WHISPER_MODEL", "base")
|
||||
WHISPER_MODEL_DIR = os.getenv("WHISPER_MODEL_DIR", f"{CACHE_DIR}/whisper/models")
|
||||
|
||||
|
||||
####################################
|
||||
# Images
|
||||
####################################
|
||||
|
||||
AUTOMATIC1111_BASE_URL = os.getenv("AUTOMATIC1111_BASE_URL", "")
|
||||
|
|
|
@ -18,6 +18,9 @@ class ERROR_MESSAGES(str, Enum):
|
|||
"Uh-oh! This username is already registered. Please choose another username."
|
||||
)
|
||||
COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string."
|
||||
FILE_EXISTS = "Uh-oh! This file is already registered. Please choose another file."
|
||||
|
||||
NAME_TAG_TAKEN = "Uh-oh! This name tag is already registered. Please choose another name tag string."
|
||||
INVALID_TOKEN = (
|
||||
"Your session has expired or the token is invalid. Please sign in again."
|
||||
)
|
||||
|
@ -39,3 +42,8 @@ class ERROR_MESSAGES(str, Enum):
|
|||
USER_NOT_FOUND = "We could not find what you're looking for :/"
|
||||
API_KEY_NOT_FOUND = "Oops! It looks like there's a hiccup. The API key is missing. Please make sure to provide a valid API key to access this feature."
|
||||
MALICIOUS = "Unusual activities detected, please try again in a few minutes."
|
||||
|
||||
PANDOC_NOT_INSTALLED = "Pandoc is not installed on the server. Please contact your administrator for assistance."
|
||||
INCORRECT_FORMAT = (
|
||||
lambda err="": f"Invalid format. Please use the correct format{err if err else ''}"
|
||||
)
|
||||
|
|
34
backend/data/config.json
Normal file
34
backend/data/config.json
Normal file
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"ui": {
|
||||
"prompt_suggestions": [
|
||||
{
|
||||
"title": [
|
||||
"Help me study",
|
||||
"vocabulary for a college entrance exam"
|
||||
],
|
||||
"content": "Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option."
|
||||
},
|
||||
{
|
||||
"title": [
|
||||
"Give me ideas",
|
||||
"for what to do with my kids' art"
|
||||
],
|
||||
"content": "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter."
|
||||
},
|
||||
{
|
||||
"title": [
|
||||
"Tell me a fun fact",
|
||||
"about the Roman Empire"
|
||||
],
|
||||
"content": "Tell me a random fun fact about the Roman Empire"
|
||||
},
|
||||
{
|
||||
"title": [
|
||||
"Show me a code snippet",
|
||||
"of a website's sticky header"
|
||||
],
|
||||
"content": "Show me a code snippet of a website's sticky header in CSS and JavaScript."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -1 +1,2 @@
|
|||
uvicorn main:app --port 8080 --host 0.0.0.0 --forwarded-allow-ips '*' --reload
|
||||
PORT="${PORT:-8080}"
|
||||
uvicorn main:app --port $PORT --host 0.0.0.0 --forwarded-allow-ips '*' --reload
|
|
@ -1,5 +1,9 @@
|
|||
from bs4 import BeautifulSoup
|
||||
import json
|
||||
import markdown
|
||||
import time
|
||||
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi import HTTPException
|
||||
|
@ -10,11 +14,13 @@ from starlette.exceptions import HTTPException as StarletteHTTPException
|
|||
|
||||
from apps.ollama.main import app as ollama_app
|
||||
from apps.openai.main import app as openai_app
|
||||
|
||||
from apps.web.main import app as webui_app
|
||||
from apps.audio.main import app as audio_app
|
||||
from apps.images.main import app as images_app
|
||||
from apps.rag.main import app as rag_app
|
||||
|
||||
from config import ENV
|
||||
from apps.web.main import app as webui_app
|
||||
|
||||
from config import WEBUI_NAME, ENV, VERSION, CHANGELOG, FRONTEND_BUILD_DIR
|
||||
|
||||
|
||||
class SPAStaticFiles(StaticFiles):
|
||||
|
@ -55,7 +61,35 @@ app.mount("/api/v1", webui_app)
|
|||
|
||||
app.mount("/ollama/api", ollama_app)
|
||||
app.mount("/openai/api", openai_app)
|
||||
|
||||
app.mount("/images/api/v1", images_app)
|
||||
app.mount("/audio/api/v1", audio_app)
|
||||
app.mount("/rag/api/v1", rag_app)
|
||||
|
||||
|
||||
app.mount("/", SPAStaticFiles(directory="../build", html=True), name="spa-static-files")
|
||||
@app.get("/api/config")
|
||||
async def get_app_config():
|
||||
|
||||
return {
|
||||
"status": True,
|
||||
"name": WEBUI_NAME,
|
||||
"version": VERSION,
|
||||
"images": images_app.state.ENABLED,
|
||||
"default_models": webui_app.state.DEFAULT_MODELS,
|
||||
"default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/changelog")
|
||||
async def get_app_changelog():
|
||||
return CHANGELOG
|
||||
|
||||
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||
|
||||
|
||||
app.mount(
|
||||
"/",
|
||||
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
|
||||
name="spa-static-files",
|
||||
)
|
||||
|
|
|
@ -22,6 +22,15 @@ chromadb
|
|||
sentence_transformers
|
||||
pypdf
|
||||
docx2txt
|
||||
unstructured
|
||||
markdown
|
||||
pypandoc
|
||||
pandas
|
||||
openpyxl
|
||||
pyxlsb
|
||||
xlrd
|
||||
|
||||
faster-whisper
|
||||
|
||||
PyJWT
|
||||
pyjwt[crypto]
|
||||
|
|
|
@ -1,3 +1,22 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
uvicorn main:app --host 0.0.0.0 --port 8080 --forwarded-allow-ips '*'
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
cd "$SCRIPT_DIR" || exit
|
||||
|
||||
KEY_FILE=.webui_secret_key
|
||||
|
||||
PORT="${PORT:-8080}"
|
||||
if test "$WEBUI_SECRET_KEY $WEBUI_JWT_SECRET_KEY" = " "; then
|
||||
echo No WEBUI_SECRET_KEY provided
|
||||
|
||||
if ! [ -e "$KEY_FILE" ]; then
|
||||
echo Generating WEBUI_SECRET_KEY
|
||||
# Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one.
|
||||
echo $(head -c 12 /dev/random | base64) > $KEY_FILE
|
||||
fi
|
||||
|
||||
echo Loading WEBUI_SECRET_KEY from $KEY_FILE
|
||||
WEBUI_SECRET_KEY=`cat $KEY_FILE`
|
||||
fi
|
||||
|
||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn main:app --host 0.0.0.0 --port "$PORT" --forwarded-allow-ips '*'
|
32
backend/start_windows.bat
Normal file
32
backend/start_windows.bat
Normal file
|
@ -0,0 +1,32 @@
|
|||
:: This method is not recommended, and we recommend you use the `start.sh` file with WSL instead.
|
||||
@echo off
|
||||
SETLOCAL ENABLEDELAYEDEXPANSION
|
||||
|
||||
:: Get the directory of the current script
|
||||
SET "SCRIPT_DIR=%~dp0"
|
||||
cd /d "%SCRIPT_DIR%" || exit /b
|
||||
|
||||
SET "KEY_FILE=.webui_secret_key"
|
||||
SET "PORT=%PORT:8080%"
|
||||
SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%"
|
||||
SET "WEBUI_JWT_SECRET_KEY=%WEBUI_JWT_SECRET_KEY%"
|
||||
|
||||
:: Check if WEBUI_SECRET_KEY and WEBUI_JWT_SECRET_KEY are not set
|
||||
IF "%WEBUI_SECRET_KEY%%WEBUI_JWT_SECRET_KEY%" == " " (
|
||||
echo No WEBUI_SECRET_KEY provided
|
||||
|
||||
IF NOT EXIST "%KEY_FILE%" (
|
||||
echo Generating WEBUI_SECRET_KEY
|
||||
:: Generate a random value to use as a WEBUI_SECRET_KEY in case the user didn't provide one
|
||||
SET /p WEBUI_SECRET_KEY=<nul
|
||||
FOR /L %%i IN (1,1,12) DO SET /p WEBUI_SECRET_KEY=<!random!>>%KEY_FILE%
|
||||
echo WEBUI_SECRET_KEY generated
|
||||
)
|
||||
|
||||
echo Loading WEBUI_SECRET_KEY from %KEY_FILE%
|
||||
SET /p WEBUI_SECRET_KEY=<%KEY_FILE%
|
||||
)
|
||||
|
||||
:: Execute uvicorn
|
||||
SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%"
|
||||
uvicorn main:app --host 0.0.0.0 --port "%PORT%" --forwarded-allow-ips '*'
|
BIN
backend/static/favicon.png
Normal file
BIN
backend/static/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 6 KiB |
|
@ -1,5 +1,8 @@
|
|||
from pathlib import Path
|
||||
import hashlib
|
||||
import re
|
||||
from datetime import timedelta
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def get_gravatar_url(email):
|
||||
|
@ -24,7 +27,85 @@ def calculate_sha256(file):
|
|||
return sha256.hexdigest()
|
||||
|
||||
|
||||
def calculate_sha256_string(string):
|
||||
# Create a new SHA-256 hash object
|
||||
sha256_hash = hashlib.sha256()
|
||||
# Update the hash object with the bytes of the input string
|
||||
sha256_hash.update(string.encode("utf-8"))
|
||||
# Get the hexadecimal representation of the hash
|
||||
hashed_string = sha256_hash.hexdigest()
|
||||
return hashed_string
|
||||
|
||||
|
||||
def validate_email_format(email: str) -> bool:
|
||||
if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def sanitize_filename(file_name):
|
||||
# Convert to lowercase
|
||||
lower_case_file_name = file_name.lower()
|
||||
|
||||
# Remove special characters using regular expression
|
||||
sanitized_file_name = re.sub(r"[^\w\s]", "", lower_case_file_name)
|
||||
|
||||
# Replace spaces with dashes
|
||||
final_file_name = re.sub(r"\s+", "-", sanitized_file_name)
|
||||
|
||||
return final_file_name
|
||||
|
||||
|
||||
def extract_folders_after_data_docs(path):
|
||||
# Convert the path to a Path object if it's not already
|
||||
path = Path(path)
|
||||
|
||||
# Extract parts of the path
|
||||
parts = path.parts
|
||||
|
||||
# Find the index of '/data/docs' in the path
|
||||
try:
|
||||
index_data_docs = parts.index("data") + 1
|
||||
index_docs = parts.index("docs", index_data_docs) + 1
|
||||
except ValueError:
|
||||
return []
|
||||
|
||||
# Exclude the filename and accumulate folder names
|
||||
tags = []
|
||||
|
||||
folders = parts[index_docs:-1]
|
||||
for idx, part in enumerate(folders):
|
||||
tags.append("/".join(folders[: idx + 1]))
|
||||
|
||||
return tags
|
||||
|
||||
|
||||
def parse_duration(duration: str) -> Optional[timedelta]:
|
||||
if duration == "-1" or duration == "0":
|
||||
return None
|
||||
|
||||
# Regular expression to find number and unit pairs
|
||||
pattern = r"(-?\d+(\.\d+)?)(ms|s|m|h|d|w)"
|
||||
matches = re.findall(pattern, duration)
|
||||
|
||||
if not matches:
|
||||
raise ValueError("Invalid duration string")
|
||||
|
||||
total_duration = timedelta()
|
||||
|
||||
for number, _, unit in matches:
|
||||
number = float(number)
|
||||
if unit == "ms":
|
||||
total_duration += timedelta(milliseconds=number)
|
||||
elif unit == "s":
|
||||
total_duration += timedelta(seconds=number)
|
||||
elif unit == "m":
|
||||
total_duration += timedelta(minutes=number)
|
||||
elif unit == "h":
|
||||
total_duration += timedelta(hours=number)
|
||||
elif unit == "d":
|
||||
total_duration += timedelta(days=number)
|
||||
elif unit == "w":
|
||||
total_duration += timedelta(weeks=number)
|
||||
|
||||
return total_duration
|
||||
|
|
|
@ -14,14 +14,14 @@ import config
|
|||
logging.getLogger("passlib").setLevel(logging.ERROR)
|
||||
|
||||
|
||||
JWT_SECRET_KEY = config.WEBUI_JWT_SECRET_KEY
|
||||
SESSION_SECRET = config.WEBUI_SECRET_KEY
|
||||
ALGORITHM = "HS256"
|
||||
|
||||
##############
|
||||
# Auth Utils
|
||||
##############
|
||||
|
||||
bearer_scheme = HTTPBearer()
|
||||
bearer_security = HTTPBearer()
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
|
||||
|
@ -42,13 +42,13 @@ def create_token(data: dict, expires_delta: Union[timedelta, None] = None) -> st
|
|||
expire = datetime.utcnow() + expires_delta
|
||||
payload.update({"exp": expire})
|
||||
|
||||
encoded_jwt = jwt.encode(payload, JWT_SECRET_KEY, algorithm=ALGORITHM)
|
||||
encoded_jwt = jwt.encode(payload, SESSION_SECRET, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def decode_token(token: str) -> Optional[dict]:
|
||||
try:
|
||||
decoded = jwt.decode(token, JWT_SECRET_KEY, options={"verify_signature": False})
|
||||
decoded = jwt.decode(token, SESSION_SECRET, algorithms=[ALGORITHM])
|
||||
return decoded
|
||||
except Exception as e:
|
||||
return None
|
||||
|
@ -58,10 +58,12 @@ def extract_token_from_auth_header(auth_header: str):
|
|||
return auth_header[len("Bearer ") :]
|
||||
|
||||
|
||||
def get_current_user(auth_token: HTTPAuthorizationCredentials = Depends(HTTPBearer())):
|
||||
def get_current_user(
|
||||
auth_token: HTTPAuthorizationCredentials = Depends(bearer_security),
|
||||
):
|
||||
data = decode_token(auth_token.credentials)
|
||||
if data != None and "email" in data:
|
||||
user = Users.get_user_by_email(data["email"])
|
||||
if data != None and "id" in data:
|
||||
user = Users.get_user_by_id(data["id"])
|
||||
if user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
|
@ -73,3 +75,21 @@ def get_current_user(auth_token: HTTPAuthorizationCredentials = Depends(HTTPBear
|
|||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.UNAUTHORIZED,
|
||||
)
|
||||
|
||||
|
||||
def get_verified_user(user=Depends(get_current_user)):
|
||||
if user.role not in {"user", "admin"}:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
def get_admin_user(user=Depends(get_current_user)):
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
return user
|
||||
|
|
BIN
bun.lockb
BIN
bun.lockb
Binary file not shown.
BIN
demo.gif
BIN
demo.gif
Binary file not shown.
Before Width: | Height: | Size: 5.9 MiB After Width: | Height: | Size: 6.1 MiB |
|
@ -10,26 +10,27 @@ services:
|
|||
restart: unless-stopped
|
||||
image: ollama/ollama:latest
|
||||
|
||||
ollama-webui:
|
||||
open-webui:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
OLLAMA_API_BASE_URL: '/ollama/api'
|
||||
dockerfile: Dockerfile
|
||||
image: ghcr.io/ollama-webui/ollama-webui:main
|
||||
container_name: ollama-webui
|
||||
image: ghcr.io/open-webui/open-webui:main
|
||||
container_name: open-webui
|
||||
volumes:
|
||||
- ollama-webui:/app/backend/data
|
||||
- open-webui:/app/backend/data
|
||||
depends_on:
|
||||
- ollama
|
||||
ports:
|
||||
- ${OLLAMA_WEBUI_PORT-3000}:8080
|
||||
environment:
|
||||
- 'OLLAMA_API_BASE_URL=http://ollama:11434/api'
|
||||
- 'WEBUI_SECRET_KEY='
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
ollama: {}
|
||||
ollama-webui: {}
|
||||
open-webui: {}
|
||||
|
|
61
docs/CONTRIBUTING.md
Normal file
61
docs/CONTRIBUTING.md
Normal file
|
@ -0,0 +1,61 @@
|
|||
# Contributing to Open WebUI
|
||||
|
||||
🚀 **Welcome, Contributors!** 🚀
|
||||
|
||||
Your interest in contributing to Open WebUI is greatly appreciated. This document is here to guide you through the process, ensuring your contributions enhance the project effectively. Let's make Open WebUI even better, together!
|
||||
|
||||
## 📌 Key Points
|
||||
|
||||
### 🦙 Ollama vs. Open WebUI
|
||||
|
||||
It's crucial to distinguish between Ollama and Open WebUI:
|
||||
|
||||
- **Open WebUI** focuses on providing an intuitive and responsive web interface for chat interactions.
|
||||
- **Ollama** is the underlying technology that powers these interactions.
|
||||
|
||||
If your issue or contribution pertains directly to the core Ollama technology, please direct it to the appropriate [Ollama project repository](https://ollama.com/). Open WebUI's repository is dedicated to the web interface aspect only.
|
||||
|
||||
### 🚨 Reporting Issues
|
||||
|
||||
Noticed something off? Have an idea? Check our [Issues tab](https://github.com/open-webui/oopen-webui/issues) to see if it's already been reported or suggested. If not, feel free to open a new issue. When reporting an issue, please follow our issue templates. These templates are designed to ensure that all necessary details are provided from the start, enabling us to address your concerns more efficiently.
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> - **Template Compliance:** Please be aware that failure to follow the provided issue template, or not providing the requested information at all, will likely result in your issue being closed without further consideration. This approach is critical for maintaining the manageability and integrity of issue tracking.
|
||||
>
|
||||
> - **Detail is Key:** To ensure your issue is understood and can be effectively addressed, it's imperative to include comprehensive details. Descriptions should be clear, including steps to reproduce, expected outcomes, and actual results. Lack of sufficient detail may hinder our ability to resolve your issue.
|
||||
|
||||
### 🧭 Scope of Support
|
||||
|
||||
We've noticed an uptick in issues not directly related to Open WebUI but rather to the environment it's run in, especially Docker setups. While we strive to support Docker deployment, understanding Docker fundamentals is crucial for a smooth experience.
|
||||
|
||||
- **Docker Deployment Support**: Open WebUI supports Docker deployment. Familiarity with Docker is assumed. For Docker basics, please refer to the [official Docker documentation](https://docs.docker.com/get-started/overview/).
|
||||
|
||||
- **Advanced Configurations**: Setting up reverse proxies for HTTPS and managing Docker deployments requires foundational knowledge. There are numerous online resources available to learn these skills. Ensuring you have this knowledge will greatly enhance your experience with Open WebUI and similar projects.
|
||||
|
||||
## 💡 Contributing
|
||||
|
||||
Looking to contribute? Great! Here's how you can help:
|
||||
|
||||
### 🛠 Pull Requests
|
||||
|
||||
We welcome pull requests. Before submitting one, please:
|
||||
|
||||
1. Discuss your idea or issue in the [issues section](https://github.com/open-webui/open-webui/issues).
|
||||
2. Follow the project's coding standards and include tests for new features.
|
||||
3. Update documentation as necessary.
|
||||
4. Write clear, descriptive commit messages.
|
||||
|
||||
### 📚 Documentation & Tutorials
|
||||
|
||||
Help us make Open WebUI more accessible by improving documentation, writing tutorials, or creating guides on setting up and optimizing the web UI.
|
||||
|
||||
### 🤔 Questions & Feedback
|
||||
|
||||
Got questions or feedback? Join our [Discord community](https://discord.gg/5rJgQTnV4s) or open an issue. We're here to help!
|
||||
|
||||
## 🙏 Thank You!
|
||||
|
||||
Your contributions, big or small, make a significant impact on Open WebUI. We're excited to see what you bring to the project!
|
||||
|
||||
Together, let's create an even more powerful tool for the community. 🌟
|
20
docs/SECURITY.md
Normal file
20
docs/SECURITY.md
Normal file
|
@ -0,0 +1,20 @@
|
|||
# Security Policy
|
||||
|
||||
Our primary goal is to ensure the protection and confidentiality of sensitive data stored by users on open-webui.
|
||||
|
||||
## Supported Versions
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| main | :white_check_mark: |
|
||||
| others | :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you discover a security issue within our system, please notify us immediately via a pull request or contact us on discord.
|
||||
|
||||
## Product Security
|
||||
|
||||
We regularly audit our internal processes and system's architecture for vulnerabilities using a combination of automated and manual testing techniques.
|
||||
|
||||
We are planning on implementing SAST and SCA scans in our project soon.
|
199
docs/apache.md
Normal file
199
docs/apache.md
Normal file
|
@ -0,0 +1,199 @@
|
|||
# Hosting UI and Models separately
|
||||
|
||||
Sometimes, its beneficial to host Ollama, separate from the UI, but retain the RAG and RBAC support features shared across users:
|
||||
|
||||
# Open WebUI Configuration
|
||||
|
||||
## UI Configuration
|
||||
|
||||
For the UI configuration, you can set up the Apache VirtualHost as follows:
|
||||
|
||||
```
|
||||
# Assuming you have a website hosting this UI at "server.com"
|
||||
<VirtualHost 192.168.1.100:80>
|
||||
ServerName server.com
|
||||
DocumentRoot /home/server/public_html
|
||||
|
||||
ProxyPass / http://server.com:3000/ nocanon
|
||||
ProxyPassReverse / http://server.com:3000/
|
||||
|
||||
</VirtualHost>
|
||||
```
|
||||
|
||||
Enable the site first before you can request SSL:
|
||||
|
||||
`a2ensite server.com.conf` # this will enable the site. a2ensite is short for "Apache 2 Enable Site"
|
||||
|
||||
```
|
||||
# For SSL
|
||||
<VirtualHost 192.168.1.100:443>
|
||||
ServerName server.com
|
||||
DocumentRoot /home/server/public_html
|
||||
|
||||
ProxyPass / http://server.com:3000/ nocanon
|
||||
ProxyPassReverse / http://server.com:3000/
|
||||
|
||||
SSLEngine on
|
||||
SSLCertificateFile /etc/ssl/virtualmin/170514456861234/ssl.cert
|
||||
SSLCertificateKeyFile /etc/ssl/virtualmin/170514456861234/ssl.key
|
||||
SSLProtocol all -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
|
||||
|
||||
SSLProxyEngine on
|
||||
SSLCACertificateFile /etc/ssl/virtualmin/170514456865864/ssl.ca
|
||||
</VirtualHost>
|
||||
|
||||
```
|
||||
|
||||
I'm using virtualmin here for my SSL clusters, but you can also use certbot directly or your preferred SSL method. To use SSL:
|
||||
|
||||
### Prerequisites.
|
||||
|
||||
Run the following commands:
|
||||
|
||||
`snap install certbot --classic`
|
||||
`snap apt install python3-certbot-apache` (this will install the apache plugin).
|
||||
|
||||
Navigate to the apache sites-available directory:
|
||||
|
||||
`cd /etc/apache2/sites-available/`
|
||||
|
||||
Create server.com.conf if it is not yet already created, containing the above `<virtualhost>` configuration (it should match your case. Modify as necessary). Use the one without the SSL:
|
||||
|
||||
Once it's created, run `certbot --apache -d server.com`, this will request and add/create an SSL keys for you as well as create the server.com.le-ssl.conf
|
||||
|
||||
# Configuring Ollama Server
|
||||
|
||||
On your latest installation of Ollama, make sure that you have setup your api server from the official Ollama reference:
|
||||
|
||||
[Ollama FAQ](https://github.com/jmorganca/ollama/blob/main/docs/faq.md)
|
||||
|
||||
### TL;DR
|
||||
|
||||
The guide doesn't seem to match the current updated service file on linux. So, we will address it here:
|
||||
|
||||
Unless when you're compiling Ollama from source, installing with the standard install `curl https://ollama.com/install.sh | sh` creates a file called `ollama.service` in /etc/systemd/system. You can use nano to edit the file:
|
||||
|
||||
```
|
||||
sudo nano /etc/systemd/system/ollama.service
|
||||
```
|
||||
|
||||
Add the following lines:
|
||||
|
||||
```
|
||||
Environment="OLLAMA_HOST=0.0.0.0:11434" # this line is mandatory. You can also specify
|
||||
```
|
||||
|
||||
For instance:
|
||||
|
||||
```
|
||||
[Unit]
|
||||
Description=Ollama Service
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/local/bin/ollama serve
|
||||
Environment="OLLAMA_HOST=0.0.0.0:11434" # this line is mandatory. You can also specify 192.168.254.109:DIFFERENT_PORT, format
|
||||
Environment="OLLAMA_ORIGINS=http://192.168.254.106:11434,https://models.server.city" # this line is optional
|
||||
User=ollama
|
||||
Group=ollama
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
Environment="PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/s>
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
```
|
||||
|
||||
Save the file by pressing CTRL+S, then press CTRL+X
|
||||
|
||||
When your computer restarts, the Ollama server will now be listening on the IP:PORT you specified, in this case 0.0.0.0:11434, or 192.168.254.106:11434 (whatever your local IP address is). Make sure that your router is correctly configured to serve pages from that local IP by forwarding 11434 to your local IP server.
|
||||
|
||||
# Ollama Model Configuration
|
||||
|
||||
## For the Ollama model configuration, use the following Apache VirtualHost setup:
|
||||
|
||||
Navigate to the apache sites-available directory:
|
||||
|
||||
`cd /etc/apache2/sites-available/`
|
||||
|
||||
`nano models.server.city.conf` # match this with your ollama server domain
|
||||
|
||||
Add the folloing virtualhost containing this example (modify as needed):
|
||||
|
||||
```
|
||||
|
||||
# Assuming you have a website hosting this UI at "models.server.city"
|
||||
<IfModule mod_ssl.c>
|
||||
<VirtualHost 192.168.254.109:443>
|
||||
DocumentRoot "/var/www/html/"
|
||||
ServerName models.server.city
|
||||
<Directory "/var/www/html/">
|
||||
Options None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
ProxyRequests Off
|
||||
ProxyPreserveHost On
|
||||
ProxyAddHeaders On
|
||||
SSLProxyEngine on
|
||||
|
||||
ProxyPass / http://server.city:1000/ nocanon # or port 11434
|
||||
ProxyPassReverse / http://server.city:1000/ # or port 11434
|
||||
|
||||
SSLCertificateFile /etc/letsencrypt/live/models.server.city/fullchain.pem
|
||||
SSLCertificateKeyFile /etc/letsencrypt/live/models.server.city/privkey.pem
|
||||
Include /etc/letsencrypt/options-ssl-apache.conf
|
||||
</VirtualHost>
|
||||
</IfModule>
|
||||
```
|
||||
|
||||
You may need to enable the site first (if you haven't done so yet) before you can request SSL:
|
||||
|
||||
`a2ensite models.server.city.conf`
|
||||
|
||||
#### For the SSL part of Ollama server
|
||||
|
||||
Run the following commands:
|
||||
|
||||
Navigate to the apache sites-available directory:
|
||||
|
||||
`cd /etc/apache2/sites-available/`
|
||||
`certbot --apache -d server.com`
|
||||
|
||||
```
|
||||
<VirtualHost 192.168.254.109:80>
|
||||
DocumentRoot "/var/www/html/"
|
||||
ServerName models.server.city
|
||||
<Directory "/var/www/html/">
|
||||
Options None
|
||||
Require all granted
|
||||
</Directory>
|
||||
|
||||
ProxyRequests Off
|
||||
ProxyPreserveHost On
|
||||
ProxyAddHeaders On
|
||||
SSLProxyEngine on
|
||||
|
||||
ProxyPass / http://server.city:1000/ nocanon # or port 11434
|
||||
ProxyPassReverse / http://server.city:1000/ # or port 11434
|
||||
|
||||
RewriteEngine on
|
||||
RewriteCond %{SERVER_NAME} =models.server.city
|
||||
RewriteRule ^ https://%{SERVER_NAME}%{REQUEST_URI} [END,NE,R=permanent]
|
||||
</VirtualHost>
|
||||
|
||||
```
|
||||
|
||||
Don't forget to restart/reload Apache with `systemctl reload apache2`
|
||||
|
||||
Open your site at https://server.com!
|
||||
|
||||
**Congratulations**, your _**Open-AI-like Chat-GPT style UI**_ is now serving AI with RAG, RBAC and multimodal features! Download Ollama models if you haven't yet done so!
|
||||
|
||||
If you encounter any misconfiguration or errors, please file an issue or engage with our discussion. There are a lot of friendly developers here to assist you.
|
||||
|
||||
Let's make this UI much more user friendly for everyone!
|
||||
|
||||
Thanks for making open-webui your UI Choice for AI!
|
||||
|
||||
This doc is made by **Bob Reyes**, your **Open-WebUI** fan from the Philippines.
|
|
@ -0,0 +1 @@
|
|||
values-minikube.yaml
|
|
@ -1,5 +1,21 @@
|
|||
apiVersion: v2
|
||||
name: ollama-webui
|
||||
description: "Ollama Web UI: A User-Friendly Web Interface for Chat Interactions 👋"
|
||||
name: open-webui
|
||||
version: 1.0.0
|
||||
icon: https://raw.githubusercontent.com/ollama-webui/ollama-webui/main/static/favicon.png
|
||||
appVersion: "latest"
|
||||
|
||||
home: https://www.openwebui.com/
|
||||
icon: https://raw.githubusercontent.com/open-webui/open-webui/main/static/favicon.png
|
||||
|
||||
description: "Open WebUI: A User-Friendly Web Interface for Chat Interactions 👋"
|
||||
keywords:
|
||||
- llm
|
||||
- chat
|
||||
- web-ui
|
||||
|
||||
sources:
|
||||
- https://github.com/open-webui/open-webui/tree/main/kubernetes/helm
|
||||
- https://hub.docker.com/r/ollama/ollama
|
||||
- https://github.com/open-webui/open-webui/pkgs/container/open-webui
|
||||
|
||||
annotations:
|
||||
licenses: MIT
|
||||
|
|
47
kubernetes/helm/templates/_helpers.tpl
Normal file
47
kubernetes/helm/templates/_helpers.tpl
Normal file
|
@ -0,0 +1,47 @@
|
|||
{{- define "open-webui.name" -}}
|
||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
||||
{{- end -}}
|
||||
|
||||
{{- define "ollama.name" -}}
|
||||
ollama
|
||||
{{- end -}}
|
||||
|
||||
{{- define "ollama.url" -}}
|
||||
{{- printf "http://%s.%s.svc.cluster.local:%d/api" (include "ollama.name" .) (.Release.Namespace) (.Values.ollama.service.port | int) }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "chart.name" -}}
|
||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "base.labels" -}}
|
||||
helm.sh/chart: {{ include "chart.name" . }}
|
||||
{{- if .Chart.AppVersion }}
|
||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
||||
{{- end }}
|
||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "base.selectorLabels" -}}
|
||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
||||
{{- end -}}
|
||||
|
||||
{{- define "open-webui.selectorLabels" -}}
|
||||
{{ include "base.selectorLabels" . }}
|
||||
app.kubernetes.io/component: {{ .Chart.Name }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "open-webui.labels" -}}
|
||||
{{ include "base.labels" . }}
|
||||
{{ include "open-webui.selectorLabels" . }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "ollama.selectorLabels" -}}
|
||||
{{ include "base.selectorLabels" . }}
|
||||
app.kubernetes.io/component: {{ include "ollama.name" . }}
|
||||
{{- end }}
|
||||
|
||||
{{- define "ollama.labels" -}}
|
||||
{{ include "base.labels" . }}
|
||||
{{ include "ollama.selectorLabels" . }}
|
||||
{{- end }}
|
|
@ -1,4 +0,0 @@
|
|||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: {{ .Values.namespace }}
|
|
@ -1,13 +1,21 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ollama-service
|
||||
namespace: {{ .Values.namespace }}
|
||||
name: {{ include "ollama.name" . }}
|
||||
labels:
|
||||
{{- include "ollama.labels" . | nindent 4 }}
|
||||
{{- with .Values.ollama.service.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
type: {{ .Values.ollama.service.type }}
|
||||
selector:
|
||||
app: ollama
|
||||
{{- include "ollama.selectorLabels" . | nindent 4 }}
|
||||
{{- with .Values.ollama.service }}
|
||||
type: {{ .type }}
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: {{ .Values.ollama.servicePort }}
|
||||
targetPort: {{ .Values.ollama.servicePort }}
|
||||
name: http
|
||||
port: {{ .port }}
|
||||
targetPort: http
|
||||
{{- end }}
|
||||
|
|
|
@ -1,24 +1,43 @@
|
|||
apiVersion: apps/v1
|
||||
kind: StatefulSet
|
||||
metadata:
|
||||
name: ollama
|
||||
namespace: {{ .Values.namespace }}
|
||||
name: {{ include "ollama.name" . }}
|
||||
labels:
|
||||
{{- include "ollama.labels" . | nindent 4 }}
|
||||
{{- with .Values.ollama.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
serviceName: "ollama"
|
||||
serviceName: {{ include "ollama.name" . }}
|
||||
replicas: {{ .Values.ollama.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ollama
|
||||
{{- include "ollama.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ollama
|
||||
{{- include "ollama.labels" . | nindent 8 }}
|
||||
{{- with .Values.ollama.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
enableServiceLinks: false
|
||||
automountServiceAccountToken: false
|
||||
{{- with .Values.ollama.runtimeClassName }}
|
||||
runtimeClassName: {{ . }}
|
||||
{{- end }}
|
||||
containers:
|
||||
- name: ollama
|
||||
image: {{ .Values.ollama.image }}
|
||||
- name: {{ include "ollama.name" . }}
|
||||
{{- with .Values.ollama.image }}
|
||||
image: {{ .repository }}:{{ .tag }}
|
||||
imagePullPolicy: {{ .pullPolicy }}
|
||||
{{- end }}
|
||||
tty: true
|
||||
ports:
|
||||
- containerPort: {{ .Values.ollama.servicePort }}
|
||||
- name: http
|
||||
containerPort: {{ .Values.ollama.service.containerPort }}
|
||||
env:
|
||||
{{- if .Values.ollama.gpu.enabled }}
|
||||
- name: PATH
|
||||
|
@ -27,29 +46,51 @@ spec:
|
|||
value: /usr/local/nvidia/lib:/usr/local/nvidia/lib64
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
{{- end}}
|
||||
{{- if .Values.ollama.resources }}
|
||||
resources: {{- toYaml .Values.ollama.resources | nindent 10 }}
|
||||
{{- end }}
|
||||
{{- with .Values.ollama.resources }}
|
||||
resources: {{- toYaml . | nindent 10 }}
|
||||
{{- end }}
|
||||
volumeMounts:
|
||||
- name: ollama-volume
|
||||
- name: data
|
||||
mountPath: /root/.ollama
|
||||
tty: true
|
||||
{{- with .Values.ollama.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.ollama.tolerations }}
|
||||
tolerations:
|
||||
{{- if .Values.ollama.gpu.enabled }}
|
||||
- key: nvidia.com/gpu
|
||||
operator: Exists
|
||||
effect: NoSchedule
|
||||
{{- end }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
volumes:
|
||||
{{- if and .Values.ollama.persistence.enabled .Values.ollama.persistence.existingClaim }}
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ .Values.ollama.persistence.existingClaim }}
|
||||
{{- else if not .Values.ollama.persistence.enabled }}
|
||||
- name: data
|
||||
emptyDir: {}
|
||||
{{- else if and .Values.ollama.persistence.enabled (not .Values.ollama.persistence.existingClaim) }}
|
||||
[]
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: ollama-volume
|
||||
name: data
|
||||
labels:
|
||||
{{- include "ollama.selectorLabels" . | nindent 8 }}
|
||||
{{- with .Values.ollama.persistence.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
accessModes:
|
||||
{{- range .Values.ollama.persistence.accessModes }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
resources:
|
||||
requests:
|
||||
storage: {{ .Values.ollama.volumeSize }}
|
||||
storage: {{ .Values.ollama.persistence.size | quote }}
|
||||
storageClass: {{ .Values.ollama.persistence.storageClass }}
|
||||
{{- with .Values.ollama.persistence.selector }}
|
||||
selector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
|
|
@ -1,38 +1,62 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ollama-webui-deployment
|
||||
namespace: {{ .Values.namespace }}
|
||||
name: {{ include "open-webui.name" . }}
|
||||
labels:
|
||||
{{- include "open-webui.labels" . | nindent 4 }}
|
||||
{{- with .Values.webui.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
replicas: 1
|
||||
replicas: {{ .Values.webui.replicaCount }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ollama-webui
|
||||
{{- include "open-webui.selectorLabels" . | nindent 6 }}
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ollama-webui
|
||||
{{- include "open-webui.labels" . | nindent 8 }}
|
||||
{{- with .Values.webui.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
enableServiceLinks: false
|
||||
automountServiceAccountToken: false
|
||||
containers:
|
||||
- name: ollama-webui
|
||||
image: {{ .Values.webui.image }}
|
||||
- name: {{ .Chart.Name }}
|
||||
{{- with .Values.webui.image }}
|
||||
image: {{ .repository }}:{{ .tag | default $.Chart.AppVersion }}
|
||||
imagePullPolicy: {{ .pullPolicy }}
|
||||
{{- end }}
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
{{- if .Values.webui.resources }}
|
||||
resources: {{- toYaml .Values.webui.resources | nindent 10 }}
|
||||
- name: http
|
||||
containerPort: {{ .Values.webui.service.containerPort }}
|
||||
{{- with .Values.webui.resources }}
|
||||
resources: {{- toYaml . | nindent 10 }}
|
||||
{{- end }}
|
||||
volumeMounts:
|
||||
- name: webui-volume
|
||||
- name: data
|
||||
mountPath: /app/backend/data
|
||||
env:
|
||||
- name: OLLAMA_API_BASE_URL
|
||||
value: "http://ollama-service.{{ .Values.namespace }}.svc.cluster.local:{{ .Values.ollama.servicePort }}/api"
|
||||
value: {{ include "ollama.url" . | quote }}
|
||||
tty: true
|
||||
{{- with .Values.webui.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
volumes:
|
||||
- name: webui-volume
|
||||
{{- if and .Values.webui.persistence.enabled .Values.webui.persistence.existingClaim }}
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: ollama-webui-pvc
|
||||
claimName: {{ .Values.webui.persistence.existingClaim }}
|
||||
{{- else if not .Values.webui.persistence.enabled }}
|
||||
- name: data
|
||||
emptyDir: {}
|
||||
{{- else if and .Values.webui.persistence.enabled (not .Values.webui.persistence.existingClaim) }}
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: {{ include "open-webui.name" . }}
|
||||
{{- end }}
|
||||
|
|
|
@ -2,13 +2,23 @@
|
|||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: ollama-webui-ingress
|
||||
namespace: {{ .Values.namespace }}
|
||||
{{- if .Values.webui.ingress.annotations }}
|
||||
name: {{ include "open-webui.name" . }}
|
||||
labels:
|
||||
{{- include "open-webui.labels" . | nindent 4 }}
|
||||
{{- with .Values.webui.ingress.annotations }}
|
||||
annotations:
|
||||
{{ toYaml .Values.webui.ingress.annotations | trimSuffix "\n" | indent 4 }}
|
||||
{{- end }}
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.webui.ingress.class }}
|
||||
ingressClassName: {{ . }}
|
||||
{{- end }}
|
||||
{{- if .Values.webui.ingress.tls }}
|
||||
tls:
|
||||
- hosts:
|
||||
- {{ .Values.webui.ingress.host | quote }}
|
||||
secretName: {{ default (printf "%s-tls" .Release.Name) .Values.webui.ingress.existingSecret }}
|
||||
{{- end }}
|
||||
rules:
|
||||
- host: {{ .Values.webui.ingress.host }}
|
||||
http:
|
||||
|
@ -17,7 +27,7 @@ spec:
|
|||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: ollama-webui-service
|
||||
name: {{ include "open-webui.name" . }}
|
||||
port:
|
||||
number: {{ .Values.webui.servicePort }}
|
||||
name: http
|
||||
{{- end }}
|
||||
|
|
|
@ -1,12 +1,25 @@
|
|||
{{- if and .Values.webui.persistence.enabled (not .Values.webui.persistence.existingClaim) }}
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: {{ include "open-webui.name" . }}
|
||||
labels:
|
||||
app: ollama-webui
|
||||
name: ollama-webui-pvc
|
||||
namespace: {{ .Values.namespace }}
|
||||
{{- include "open-webui.selectorLabels" . | nindent 4 }}
|
||||
{{- with .Values.webui.persistence.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
accessModes:
|
||||
{{- range .Values.webui.persistence.accessModes }}
|
||||
- {{ . | quote }}
|
||||
{{- end }}
|
||||
resources:
|
||||
requests:
|
||||
storage: {{ .Values.webui.volumeSize }}
|
||||
storage: {{ .Values.webui.persistence.size }}
|
||||
storageClass: {{ .Values.webui.persistence.storageClass }}
|
||||
{{- with .Values.webui.persistence.selector }}
|
||||
selector:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
|
|
@ -1,15 +1,24 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ollama-webui-service
|
||||
namespace: {{ .Values.namespace }}
|
||||
name: {{ include "open-webui.name" . }}
|
||||
labels:
|
||||
{{- include "open-webui.labels" . | nindent 4 }}
|
||||
{{- with .Values.webui.service.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
type: {{ .Values.webui.service.type }} # Default: NodePort # Use LoadBalancer if you're on a cloud that supports it
|
||||
selector:
|
||||
app: ollama-webui
|
||||
{{- include "open-webui.selectorLabels" . | nindent 4 }}
|
||||
{{- with .Values.webui.service }}
|
||||
type: {{ .type }}
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: {{ .Values.webui.servicePort }}
|
||||
targetPort: {{ .Values.webui.servicePort }}
|
||||
# If using NodePort, you can optionally specify the nodePort:
|
||||
# nodePort: 30000
|
||||
- protocol: TCP
|
||||
name: http
|
||||
port: {{ .port }}
|
||||
targetPort: http
|
||||
{{- if .nodePort }}
|
||||
nodePort: {{ .nodePort | int }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
|
27
kubernetes/helm/values-minikube.yaml
Normal file
27
kubernetes/helm/values-minikube.yaml
Normal file
|
@ -0,0 +1,27 @@
|
|||
ollama:
|
||||
resources:
|
||||
requests:
|
||||
cpu: "2000m"
|
||||
memory: "2Gi"
|
||||
limits:
|
||||
cpu: "4000m"
|
||||
memory: "4Gi"
|
||||
nvidia.com/gpu: "0"
|
||||
service:
|
||||
type: ClusterIP
|
||||
gpu:
|
||||
enabled: false
|
||||
|
||||
webui:
|
||||
resources:
|
||||
requests:
|
||||
cpu: "500m"
|
||||
memory: "500Mi"
|
||||
limits:
|
||||
cpu: "1000m"
|
||||
memory: "1Gi"
|
||||
ingress:
|
||||
enabled: true
|
||||
host: open-webui.minikube.local
|
||||
service:
|
||||
type: NodePort
|
|
@ -1,38 +1,72 @@
|
|||
namespace: ollama-namespace
|
||||
nameOverride: ""
|
||||
|
||||
ollama:
|
||||
annotations: {}
|
||||
podAnnotations: {}
|
||||
replicaCount: 1
|
||||
image: ollama/ollama:latest
|
||||
servicePort: 11434
|
||||
resources:
|
||||
limits:
|
||||
cpu: "2000m"
|
||||
memory: "2Gi"
|
||||
nvidia.com/gpu: "0"
|
||||
volumeSize: 1Gi
|
||||
image:
|
||||
repository: ollama/ollama
|
||||
tag: latest
|
||||
pullPolicy: Always
|
||||
resources: {}
|
||||
persistence:
|
||||
enabled: true
|
||||
size: 30Gi
|
||||
existingClaim: ""
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
storageClass: ""
|
||||
selector: {}
|
||||
annotations: {}
|
||||
nodeSelector: {}
|
||||
# -- If using a special runtime container such as nvidia, set it here.
|
||||
runtimeClassName: ""
|
||||
tolerations:
|
||||
- key: nvidia.com/gpu
|
||||
operator: Exists
|
||||
effect: NoSchedule
|
||||
service:
|
||||
type: ClusterIP
|
||||
annotations: {}
|
||||
port: 80
|
||||
containerPort: 11434
|
||||
gpu:
|
||||
# -- Enable additional ENV values to help Ollama discover GPU usage
|
||||
enabled: false
|
||||
|
||||
webui:
|
||||
annotations: {}
|
||||
podAnnotations: {}
|
||||
replicaCount: 1
|
||||
image:
|
||||
repository: ghcr.io/open-webui/open-webui
|
||||
tag: ""
|
||||
pullPolicy: Always
|
||||
resources: {}
|
||||
ingress:
|
||||
enabled: false
|
||||
class: ""
|
||||
# -- Use appropriate annotations for your Ingress controller, e.g., for NGINX:
|
||||
# nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
annotations: {}
|
||||
host: ""
|
||||
tls: false
|
||||
existingSecret: ""
|
||||
persistence:
|
||||
enabled: true
|
||||
size: 2Gi
|
||||
existingClaim: ""
|
||||
# -- If using multiple replicas, you must update accessModes to ReadWriteMany
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
storageClass: ""
|
||||
selector: {}
|
||||
annotations: {}
|
||||
nodeSelector: {}
|
||||
tolerations: []
|
||||
service:
|
||||
type: ClusterIP
|
||||
gpu:
|
||||
enabled: false
|
||||
|
||||
webui:
|
||||
replicaCount: 1
|
||||
image: ghcr.io/ollama-webui/ollama-webui:main
|
||||
servicePort: 8080
|
||||
resources:
|
||||
limits:
|
||||
cpu: "500m"
|
||||
memory: "500Mi"
|
||||
ingress:
|
||||
enabled: true
|
||||
annotations:
|
||||
# Use appropriate annotations for your Ingress controller, e.g., for NGINX:
|
||||
# nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
host: ollama.minikube.local
|
||||
volumeSize: 1Gi
|
||||
nodeSelector: {}
|
||||
tolerations: []
|
||||
service:
|
||||
type: NodePort
|
||||
annotations: {}
|
||||
port: 80
|
||||
containerPort: 8080
|
||||
nodePort: ""
|
||||
|
|
|
@ -2,7 +2,7 @@ apiVersion: v1
|
|||
kind: Service
|
||||
metadata:
|
||||
name: ollama-service
|
||||
namespace: ollama-namespace
|
||||
namespace: open-webui
|
||||
spec:
|
||||
selector:
|
||||
app: ollama
|
||||
|
|
|
@ -2,7 +2,7 @@ apiVersion: apps/v1
|
|||
kind: StatefulSet
|
||||
metadata:
|
||||
name: ollama
|
||||
namespace: ollama-namespace
|
||||
namespace: open-webui
|
||||
spec:
|
||||
serviceName: "ollama"
|
||||
replicas: 1
|
||||
|
@ -20,9 +20,13 @@ spec:
|
|||
ports:
|
||||
- containerPort: 11434
|
||||
resources:
|
||||
limits:
|
||||
requests:
|
||||
cpu: "2000m"
|
||||
memory: "2Gi"
|
||||
limits:
|
||||
cpu: "4000m"
|
||||
memory: "4Gi"
|
||||
nvidia.com/gpu: "0"
|
||||
volumeMounts:
|
||||
- name: ollama-volume
|
||||
mountPath: /root/.ollama
|
||||
|
@ -34,4 +38,4 @@ spec:
|
|||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 1Gi
|
||||
storage: 30Gi
|
|
@ -1,4 +1,4 @@
|
|||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: ollama-namespace
|
||||
name: open-webui
|
|
@ -1,28 +1,38 @@
|
|||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ollama-webui-deployment
|
||||
namespace: ollama-namespace
|
||||
name: open-webui-deployment
|
||||
namespace: open-webui
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ollama-webui
|
||||
app: open-webui
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ollama-webui
|
||||
app: open-webui
|
||||
spec:
|
||||
containers:
|
||||
- name: ollama-webui
|
||||
image: ghcr.io/ollama-webui/ollama-webui:main
|
||||
- name: open-webui
|
||||
image: ghcr.io/open-webui/open-webui:main
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
resources:
|
||||
limits:
|
||||
requests:
|
||||
cpu: "500m"
|
||||
memory: "500Mi"
|
||||
limits:
|
||||
cpu: "1000m"
|
||||
memory: "1Gi"
|
||||
env:
|
||||
- name: OLLAMA_API_BASE_URL
|
||||
value: "http://ollama-service.ollama-namespace.svc.cluster.local:11434/api"
|
||||
value: "http://ollama-service.open-webui.svc.cluster.local:11434/api"
|
||||
tty: true
|
||||
volumeMounts:
|
||||
- name: webui-volume
|
||||
mountPath: /app/backend/data
|
||||
volumes:
|
||||
- name: webui-volume
|
||||
persistentVolumeClaim:
|
||||
claimName: ollama-webui-pvc
|
|
@ -1,20 +1,20 @@
|
|||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: ollama-webui-ingress
|
||||
namespace: ollama-namespace
|
||||
name: open-webui-ingress
|
||||
namespace: open-webui
|
||||
#annotations:
|
||||
# Use appropriate annotations for your Ingress controller, e.g., for NGINX:
|
||||
# nginx.ingress.kubernetes.io/rewrite-target: /
|
||||
spec:
|
||||
rules:
|
||||
- host: ollama.minikube.local
|
||||
- host: open-webui.minikube.local
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: ollama-webui-service
|
||||
name: open-webui-service
|
||||
port:
|
||||
number: 8080
|
||||
|
|
12
kubernetes/manifest/base/webui-pvc.yaml
Normal file
12
kubernetes/manifest/base/webui-pvc.yaml
Normal file
|
@ -0,0 +1,12 @@
|
|||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
labels:
|
||||
app: ollama-webui
|
||||
name: ollama-webui-pvc
|
||||
namespace: open-webui
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
|
@ -1,12 +1,12 @@
|
|||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ollama-webui-service
|
||||
namespace: ollama-namespace
|
||||
name: open-webui-service
|
||||
namespace: open-webui
|
||||
spec:
|
||||
type: NodePort # Use LoadBalancer if you're on a cloud that supports it
|
||||
selector:
|
||||
app: ollama-webui
|
||||
app: open-webui
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
resources:
|
||||
- base/ollama-namespace.yaml
|
||||
- base/open-webui.yaml
|
||||
- base/ollama-service.yaml
|
||||
- base/ollama-statefulset.yaml
|
||||
- base/webui-deployment.yaml
|
||||
- base/webui-service.yaml
|
||||
- base/webui-ingress.yaml
|
||||
- base/webui-pvc.yaml
|
||||
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
|
|
@ -2,7 +2,7 @@ apiVersion: apps/v1
|
|||
kind: StatefulSet
|
||||
metadata:
|
||||
name: ollama
|
||||
namespace: ollama-namespace
|
||||
namespace: open-webui
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
|
|
477
package-lock.json
generated
477
package-lock.json
generated
|
@ -1,14 +1,15 @@
|
|||
{
|
||||
"name": "ollama-webui",
|
||||
"version": "0.0.1",
|
||||
"name": "open-webui",
|
||||
"version": "v1.0.0-alpha.101",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "ollama-webui",
|
||||
"version": "0.0.1",
|
||||
"name": "open-webui",
|
||||
"version": "v1.0.0-alpha.101",
|
||||
"dependencies": {
|
||||
"@sveltejs/adapter-node": "^1.3.1",
|
||||
"async": "^3.2.5",
|
||||
"dayjs": "^1.11.10",
|
||||
"file-saver": "^2.0.5",
|
||||
"highlight.js": "^11.9.0",
|
||||
|
@ -37,6 +38,7 @@
|
|||
"prettier-plugin-svelte": "^2.10.1",
|
||||
"svelte": "^4.0.5",
|
||||
"svelte-check": "^3.4.3",
|
||||
"svelte-confetti": "^1.3.2",
|
||||
"tailwindcss": "^3.3.3",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^5.0.0",
|
||||
|
@ -76,51 +78,6 @@
|
|||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz",
|
||||
"integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/android-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz",
|
||||
|
@ -136,276 +93,6 @@
|
|||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/darwin-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/freebsd-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/freebsd-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-arm": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz",
|
||||
"integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-ia32": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz",
|
||||
"integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-loong64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz",
|
||||
"integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-mips64el": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz",
|
||||
"integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==",
|
||||
"cpu": [
|
||||
"mips64el"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-ppc64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz",
|
||||
"integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-riscv64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz",
|
||||
"integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-s390x": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz",
|
||||
"integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/netbsd-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"netbsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/openbsd-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"openbsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/sunos-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"sunos"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-ia32": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz",
|
||||
"integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/win32-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint-community/eslint-utils": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
|
||||
|
@ -1250,6 +937,11 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/async": {
|
||||
"version": "3.2.5",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz",
|
||||
"integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg=="
|
||||
},
|
||||
"node_modules/autoprefixer": {
|
||||
"version": "10.4.16",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz",
|
||||
|
@ -3483,6 +3175,15 @@
|
|||
"svelte": "^3.55.0 || ^4.0.0-next.0 || ^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/svelte-confetti": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/svelte-confetti/-/svelte-confetti-1.3.2.tgz",
|
||||
"integrity": "sha512-R+JwFTC7hIgWVA/OuXrkj384B7CMoceb0t9VacyW6dORTQg0pWojVBB8Bo3tM30cLEQE48Fekzqgx+XSzHESMA==",
|
||||
"dev": true,
|
||||
"peerDependencies": {
|
||||
"svelte": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/svelte-eslint-parser": {
|
||||
"version": "0.33.1",
|
||||
"resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-0.33.1.tgz",
|
||||
|
@ -4022,138 +3723,12 @@
|
|||
"@jridgewell/trace-mapping": "^0.3.9"
|
||||
}
|
||||
},
|
||||
"@esbuild/android-arm": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz",
|
||||
"integrity": "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/android-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/android-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/darwin-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/darwin-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/freebsd-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/freebsd-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-arm": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz",
|
||||
"integrity": "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-ia32": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz",
|
||||
"integrity": "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-loong64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz",
|
||||
"integrity": "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-mips64el": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz",
|
||||
"integrity": "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-ppc64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz",
|
||||
"integrity": "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-riscv64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz",
|
||||
"integrity": "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-s390x": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz",
|
||||
"integrity": "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/linux-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/netbsd-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/openbsd-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/sunos-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/win32-arm64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz",
|
||||
"integrity": "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/win32-ia32": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz",
|
||||
"integrity": "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==",
|
||||
"optional": true
|
||||
},
|
||||
"@esbuild/win32-x64": {
|
||||
"version": "0.18.20",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz",
|
||||
"integrity": "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==",
|
||||
"optional": true
|
||||
},
|
||||
"@eslint-community/eslint-utils": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
|
||||
|
@ -4735,6 +4310,11 @@
|
|||
"integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
|
||||
"dev": true
|
||||
},
|
||||
"async": {
|
||||
"version": "3.2.5",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz",
|
||||
"integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg=="
|
||||
},
|
||||
"autoprefixer": {
|
||||
"version": "10.4.16",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz",
|
||||
|
@ -6282,6 +5862,13 @@
|
|||
"typescript": "^5.0.3"
|
||||
}
|
||||
},
|
||||
"svelte-confetti": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/svelte-confetti/-/svelte-confetti-1.3.2.tgz",
|
||||
"integrity": "sha512-R+JwFTC7hIgWVA/OuXrkj384B7CMoceb0t9VacyW6dORTQg0pWojVBB8Bo3tM30cLEQE48Fekzqgx+XSzHESMA==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
},
|
||||
"svelte-eslint-parser": {
|
||||
"version": "0.33.1",
|
||||
"resolved": "https://registry.npmjs.org/svelte-eslint-parser/-/svelte-eslint-parser-0.33.1.tgz",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "ollama-webui",
|
||||
"version": "0.0.1",
|
||||
"name": "open-webui",
|
||||
"version": "0.1.102",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "vite dev --host",
|
||||
|
@ -32,6 +32,7 @@
|
|||
"prettier-plugin-svelte": "^2.10.1",
|
||||
"svelte": "^4.0.5",
|
||||
"svelte-check": "^3.4.3",
|
||||
"svelte-confetti": "^1.3.2",
|
||||
"tailwindcss": "^3.3.3",
|
||||
"tslib": "^2.4.1",
|
||||
"typescript": "^5.0.0",
|
||||
|
@ -40,6 +41,7 @@
|
|||
"type": "module",
|
||||
"dependencies": {
|
||||
"@sveltejs/adapter-node": "^1.3.1",
|
||||
"async": "^3.2.5",
|
||||
"dayjs": "^1.11.10",
|
||||
"file-saver": "^2.0.5",
|
||||
"highlight.js": "^11.9.0",
|
||||
|
|
|
@ -11,8 +11,8 @@ TICK='\u2713'
|
|||
|
||||
# Detect GPU driver
|
||||
get_gpu_driver() {
|
||||
# Detect NVIDIA GPUs
|
||||
if lspci | grep -i nvidia >/dev/null; then
|
||||
# Detect NVIDIA GPUs using lspci or nvidia-smi
|
||||
if lspci | grep -i nvidia >/dev/null || nvidia-smi >/dev/null 2>&1; then
|
||||
echo "nvidia"
|
||||
return
|
||||
fi
|
||||
|
@ -181,6 +181,9 @@ else
|
|||
DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.data.yaml"
|
||||
export OLLAMA_DATA_DIR=$data_dir # Set OLLAMA_DATA_DIR environment variable
|
||||
fi
|
||||
if [[ -n $webui_port ]]; then
|
||||
export OLLAMA_WEBUI_PORT=$webui_port # Set OLLAMA_WEBUI_PORT environment variable
|
||||
fi
|
||||
DEFAULT_COMPOSE_COMMAND+=" up -d"
|
||||
DEFAULT_COMPOSE_COMMAND+=" --remove-orphans"
|
||||
DEFAULT_COMPOSE_COMMAND+=" --force-recreate"
|
||||
|
|
|
@ -1,7 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
host_port=11434
|
||||
container_port=11434
|
||||
|
||||
read -r -p "Do you want ollama in Docker with GPU support? (y/n): " use_gpu
|
||||
|
||||
docker rm -f ollama || true
|
||||
docker pull ollama/ollama
|
||||
# CPU Only
|
||||
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
|
||||
# GPU Support
|
||||
# docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
|
||||
docker pull ollama/ollama:latest
|
||||
|
||||
docker_args="-d -v ollama:/root/.ollama -p $host_port:$container_port --name ollama ollama/ollama"
|
||||
|
||||
if [ "$use_gpu" = "y" ]; then
|
||||
docker_args="--gpus=all $docker_args"
|
||||
fi
|
||||
|
||||
docker run $docker_args
|
||||
|
||||
docker image prune -f
|
22
run.sh
22
run.sh
|
@ -1,5 +1,19 @@
|
|||
docker build -t ollama-webui .
|
||||
docker stop ollama-webui || true
|
||||
docker rm ollama-webui || true
|
||||
docker run -d -p 3000:8080 --add-host=host.docker.internal:host-gateway -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
|
||||
#!/bin/bash
|
||||
|
||||
image_name="open-webui"
|
||||
container_name="open-webui"
|
||||
host_port=3000
|
||||
container_port=8080
|
||||
|
||||
docker build -t "$image_name" .
|
||||
docker stop "$container_name" &>/dev/null || true
|
||||
docker rm "$container_name" &>/dev/null || true
|
||||
|
||||
docker run -d -p "$host_port":"$container_port" \
|
||||
--add-host=host.docker.internal:host-gateway \
|
||||
-v "${image_name}:/app/backend/data" \
|
||||
--name "$container_name" \
|
||||
--restart always \
|
||||
"$image_name"
|
||||
|
||||
docker image prune -f
|
|
@ -37,8 +37,8 @@ math {
|
|||
}
|
||||
|
||||
::-webkit-scrollbar {
|
||||
height: 0.45rem;
|
||||
width: 0.35rem;
|
||||
height: 0.4rem;
|
||||
width: 0.4rem;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
|
|
31
src/lib/apis/audio/index.ts
Normal file
31
src/lib/apis/audio/index.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
import { AUDIO_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const transcribeAudio = async (token: string, file: File) => {
|
||||
const data = new FormData();
|
||||
data.append('file', file);
|
||||
|
||||
let error = null;
|
||||
const res = await fetch(`${AUDIO_API_BASE_URL}/transcribe`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
},
|
||||
body: data
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
|
@ -89,6 +89,37 @@ export const userSignUp = async (name: string, email: string, password: string)
|
|||
return res;
|
||||
};
|
||||
|
||||
export const updateUserProfile = async (token: string, name: string, profileImageUrl: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/update/profile`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: name,
|
||||
profile_image_url: profileImageUrl
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateUserPassword = async (token: string, password: string, newPassword: string) => {
|
||||
let error = null;
|
||||
|
||||
|
@ -147,6 +178,63 @@ export const getSignUpEnabledStatus = async (token: string) => {
|
|||
return res;
|
||||
};
|
||||
|
||||
export const getDefaultUserRole = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/user/role`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateDefaultUserRole = async (token: string, role: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/user/role`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
role: role
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const toggleSignUpEnabledStatus = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
|
@ -173,3 +261,60 @@ export const toggleSignUpEnabledStatus = async (token: string) => {
|
|||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getJWTExpiresDuration = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/token/expires`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateJWTExpiresDuration = async (token: string, duration: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/token/expires/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
duration: duration
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
|
|
@ -93,6 +93,99 @@ export const getAllChats = async (token: string) => {
|
|||
return res;
|
||||
};
|
||||
|
||||
export const getAllUserChats = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/all/db`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getAllChatTags = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/tags/all`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getChatListByTagName = async (token: string = '', tagName: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/tags/tag/${tagName}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getChatById = async (token: string, id: string) => {
|
||||
let error = null;
|
||||
|
||||
|
@ -170,6 +263,141 @@ export const deleteChatById = async (token: string, id: string) => {
|
|||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getTagsById = async (token: string, id: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const addTagById = async (token: string, id: string, tagName: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
tag_name: tagName,
|
||||
chat_id: id
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const deleteTagById = async (token: string, id: string, tagName: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
tag_name: tagName,
|
||||
chat_id: id
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
export const deleteTagsById = async (token: string, id: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/${id}/tags/all`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
|
|
|
@ -29,3 +29,33 @@ export const setDefaultModels = async (token: string, models: string) => {
|
|||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const setDefaultPromptSuggestions = async (token: string, promptSuggestions: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/configs/default/suggestions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
suggestions: promptSuggestions
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
|
218
src/lib/apis/documents/index.ts
Normal file
218
src/lib/apis/documents/index.ts
Normal file
|
@ -0,0 +1,218 @@
|
|||
import { WEBUI_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const createNewDoc = async (
|
||||
token: string,
|
||||
collection_name: string,
|
||||
filename: string,
|
||||
name: string,
|
||||
title: string
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/documents/create`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
collection_name: collection_name,
|
||||
filename: filename,
|
||||
name: name,
|
||||
title: title
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getDocs = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/documents/`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getDocByName = async (token: string, name: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/documents/name/${name}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
type DocUpdateForm = {
|
||||
name: string;
|
||||
title: string;
|
||||
};
|
||||
|
||||
export const updateDocByName = async (token: string, name: string, form: DocUpdateForm) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/documents/name/${name}/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: form.name,
|
||||
title: form.title
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
type TagDocForm = {
|
||||
name: string;
|
||||
tags: string[];
|
||||
};
|
||||
|
||||
export const tagDocByName = async (token: string, name: string, form: TagDocForm) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/documents/name/${name}/tags`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: form.name,
|
||||
tags: form.tags
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const deleteDocByName = async (token: string, name: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/documents/name/${name}/delete`, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.then((json) => {
|
||||
return json;
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
|
||||
console.log(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
333
src/lib/apis/images/index.ts
Normal file
333
src/lib/apis/images/index.ts
Normal file
|
@ -0,0 +1,333 @@
|
|||
import { IMAGES_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const getImageGenerationEnabledStatus = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/enabled`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const toggleImageGenerationEnabledStatus = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/enabled/toggle`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getAUTOMATIC1111Url = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/url`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res.AUTOMATIC1111_BASE_URL;
|
||||
};
|
||||
|
||||
export const updateAUTOMATIC1111Url = async (token: string = '', url: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/url/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: url
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res.AUTOMATIC1111_BASE_URL;
|
||||
};
|
||||
|
||||
export const getImageSize = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/size`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res.IMAGE_SIZE;
|
||||
};
|
||||
|
||||
export const updateImageSize = async (token: string = '', size: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/size/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
size: size
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res.IMAGE_SIZE;
|
||||
};
|
||||
|
||||
export const getDiffusionModels = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/models`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getDefaultDiffusionModel = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/models/default`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res.model;
|
||||
};
|
||||
|
||||
export const updateDefaultDiffusionModel = async (token: string = '', model: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/models/default/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res.model;
|
||||
};
|
||||
|
||||
export const imageGenerations = async (token: string = '', prompt: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${IMAGES_API_BASE_URL}/generations`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
prompt: prompt
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
} else {
|
||||
error = 'Server connection failed';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
|
@ -1,9 +1,31 @@
|
|||
import { WEBUI_API_BASE_URL } from '$lib/constants';
|
||||
import { WEBUI_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const getBackendConfig = async () => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/`, {
|
||||
const res = await fetch(`${WEBUI_BASE_URL}/api/config`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err;
|
||||
return null;
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getChangelog = async () => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_BASE_URL}/api/changelog`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
|
|
|
@ -133,9 +133,19 @@ export const getOllamaModels = async (token: string = '') => {
|
|||
});
|
||||
};
|
||||
|
||||
export const generateTitle = async (token: string = '', model: string, prompt: string) => {
|
||||
// TODO: migrate to backend
|
||||
export const generateTitle = async (
|
||||
token: string = '',
|
||||
template: string,
|
||||
model: string,
|
||||
prompt: string
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
template = template.replace(/{{prompt}}/g, prompt);
|
||||
|
||||
console.log(template);
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
@ -144,7 +154,7 @@ export const generateTitle = async (token: string = '', model: string, prompt: s
|
|||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
prompt: `Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title': ${prompt}`,
|
||||
prompt: template,
|
||||
stream: false
|
||||
})
|
||||
})
|
||||
|
@ -167,10 +177,50 @@ export const generateTitle = async (token: string = '', model: string, prompt: s
|
|||
return res?.response ?? 'New Chat';
|
||||
};
|
||||
|
||||
export const generatePrompt = async (token: string = '', model: string, conversation: string) => {
|
||||
let error = null;
|
||||
|
||||
if (conversation === '') {
|
||||
conversation = '[no existing conversation]';
|
||||
}
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
prompt: `Conversation:
|
||||
${conversation}
|
||||
|
||||
As USER in the conversation above, your task is to continue the conversation. Remember, Your responses should be crafted as if you're a human conversing in a natural, realistic manner, keeping in mind the context and flow of the dialogue. Please generate a fitting response to the last message in the conversation, or if there is no existing conversation, initiate one as a normal person would.
|
||||
|
||||
Response:
|
||||
`
|
||||
})
|
||||
}).catch((err) => {
|
||||
console.log(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const generateChatCompletion = async (token: string = '', body: object) => {
|
||||
let controller = new AbortController();
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
|
||||
signal: controller.signal,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
|
@ -186,6 +236,27 @@ export const generateChatCompletion = async (token: string = '', body: object) =
|
|||
throw error;
|
||||
}
|
||||
|
||||
return [res, controller];
|
||||
};
|
||||
|
||||
export const cancelChatCompletion = async (token: string = '', requestId: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/cancel/${requestId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
}).catch((err) => {
|
||||
error = err;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
|
@ -261,13 +332,30 @@ export const pullModel = async (token: string, tagName: string) => {
|
|||
name: tagName
|
||||
})
|
||||
}).catch((err) => {
|
||||
console.log(err);
|
||||
error = err;
|
||||
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
// export const pullModel = async (token: string, tagName: string) => {
|
||||
// return await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
|
||||
// method: 'POST',
|
||||
// headers: {
|
||||
// 'Content-Type': 'text/event-stream',
|
||||
// Authorization: `Bearer ${token}`
|
||||
// },
|
||||
// body: JSON.stringify({
|
||||
// name: tagName
|
||||
// })
|
||||
// });
|
||||
// };
|
||||
|
|
|
@ -229,3 +229,34 @@ export const generateOpenAIChatCompletion = async (token: string = '', body: obj
|
|||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const synthesizeOpenAISpeech = async (
|
||||
token: string = '',
|
||||
speaker: string = 'alloy',
|
||||
text: string = ''
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OPENAI_API_BASE_URL}/audio/speech`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: 'tts-1',
|
||||
input: text,
|
||||
voice: speaker
|
||||
})
|
||||
}).catch((err) => {
|
||||
console.log(err);
|
||||
error = err;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
|
|
@ -1,5 +1,120 @@
|
|||
import { RAG_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const getChunkParams = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/chunk`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateChunkParams = async (token: string, size: number, overlap: number) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/chunk/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
chunk_size: size,
|
||||
chunk_overlap: overlap
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getRAGTemplate = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/template`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res?.template ?? '';
|
||||
};
|
||||
|
||||
export const updateRAGTemplate = async (token: string, template: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/template/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
template: template
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const uploadDocToVectorDB = async (token: string, collection_name: string, file: File) => {
|
||||
const data = new FormData();
|
||||
data.append('file', file);
|
||||
|
@ -64,30 +179,90 @@ export const uploadWebToVectorDB = async (token: string, collection_name: string
|
|||
return res;
|
||||
};
|
||||
|
||||
export const queryVectorDB = async (
|
||||
export const queryDoc = async (
|
||||
token: string,
|
||||
collection_name: string,
|
||||
query: string,
|
||||
k: number
|
||||
) => {
|
||||
let error = null;
|
||||
const searchParams = new URLSearchParams();
|
||||
|
||||
searchParams.set('query', query);
|
||||
if (k) {
|
||||
searchParams.set('k', k.toString());
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/query/doc`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
collection_name: collection_name,
|
||||
query: query,
|
||||
k: k
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const res = await fetch(
|
||||
`${RAG_API_BASE_URL}/query/${collection_name}/?${searchParams.toString()}`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
export const queryCollection = async (
|
||||
token: string,
|
||||
collection_names: string,
|
||||
query: string,
|
||||
k: number
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/query/collection`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
collection_names: collection_names,
|
||||
query: query,
|
||||
k: k
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const scanDocs = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${RAG_API_BASE_URL}/scan`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
authorization: `Bearer ${token}`
|
||||
}
|
||||
)
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
|
|
|
@ -1,5 +1,62 @@
|
|||
import { WEBUI_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const getUserPermissions = async (token: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/users/permissions/user`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateUserPermissions = async (token: string, permissions: object) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/users/permissions/user`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...permissions
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.detail;
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateUserRole = async (token: string, id: string, role: string) => {
|
||||
let error = null;
|
||||
|
||||
|
|
23
src/lib/apis/utils/index.ts
Normal file
23
src/lib/apis/utils/index.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
import { WEBUI_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const getGravatarUrl = async (email: string) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_API_BASE_URL}/utils/gravatar?email=${email}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err;
|
||||
return null;
|
||||
});
|
||||
|
||||
return res;
|
||||
};
|
8
src/lib/components/AddFilesPlaceholder.svelte
Normal file
8
src/lib/components/AddFilesPlaceholder.svelte
Normal file
|
@ -0,0 +1,8 @@
|
|||
<div class=" text-center text-6xl mb-3">📄</div>
|
||||
<div class="text-center dark:text-white text-2xl font-semibold z-50">Add Files</div>
|
||||
|
||||
<slot
|
||||
><div class=" mt-2 text-center text-sm dark:text-gray-200 w-full">
|
||||
Drop any files here to add to the conversation
|
||||
</div>
|
||||
</slot>
|
115
src/lib/components/ChangelogModal.svelte
Normal file
115
src/lib/components/ChangelogModal.svelte
Normal file
|
@ -0,0 +1,115 @@
|
|||
<script lang="ts">
|
||||
import { onMount } from 'svelte';
|
||||
import { Confetti } from 'svelte-confetti';
|
||||
|
||||
import { WEBUI_NAME, config } from '$lib/stores';
|
||||
|
||||
import { WEBUI_VERSION } from '$lib/constants';
|
||||
import { getChangelog } from '$lib/apis';
|
||||
|
||||
import Modal from './common/Modal.svelte';
|
||||
|
||||
export let show = false;
|
||||
|
||||
let changelog = null;
|
||||
|
||||
onMount(async () => {
|
||||
const res = await getChangelog();
|
||||
changelog = res;
|
||||
});
|
||||
</script>
|
||||
|
||||
<Modal bind:show>
|
||||
<div class="px-5 py-4 dark:text-gray-300">
|
||||
<div class="flex justify-between items-start">
|
||||
<div class="text-xl font-bold">
|
||||
What’s New in {$WEBUI_NAME}
|
||||
<Confetti x={[-1, -0.25]} y={[0, 0.5]} />
|
||||
</div>
|
||||
<button
|
||||
class="self-center"
|
||||
on:click={() => {
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-5 h-5"
|
||||
>
|
||||
<path
|
||||
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<div class="flex items-center mt-1">
|
||||
<div class="text-sm dark:text-gray-200">Release Notes</div>
|
||||
<div class="flex self-center w-[1px] h-6 mx-2.5 bg-gray-200 dark:bg-gray-700" />
|
||||
<div class="text-sm dark:text-gray-200">
|
||||
v{WEBUI_VERSION}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-800" />
|
||||
|
||||
<div class=" w-full p-4 px-5">
|
||||
<div class=" overflow-y-scroll max-h-80">
|
||||
<div class="mb-3">
|
||||
{#if changelog}
|
||||
{#each Object.keys(changelog) as version}
|
||||
<div class=" mb-3 pr-2">
|
||||
<div class="font-bold text-xl mb-1 dark:text-white">
|
||||
v{version} - {changelog[version].date}
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-800 my-2" />
|
||||
|
||||
{#each Object.keys(changelog[version]).filter((section) => section !== 'date') as section}
|
||||
<div class="">
|
||||
<div
|
||||
class="font-bold uppercase text-xs {section === 'added'
|
||||
? 'text-white bg-blue-600'
|
||||
: section === 'fixed'
|
||||
? 'text-white bg-green-600'
|
||||
: section === 'changed'
|
||||
? 'text-white bg-yellow-600'
|
||||
: section === 'removed'
|
||||
? 'text-white bg-red-600'
|
||||
: ''} w-fit px-3 rounded-full my-2.5"
|
||||
>
|
||||
{section}
|
||||
</div>
|
||||
|
||||
<div class="my-2.5 px-1.5">
|
||||
{#each Object.keys(changelog[version][section]) as item}
|
||||
<div class="text-sm mb-2">
|
||||
<div class="font-semibold uppercase">
|
||||
{changelog[version][section][item].title}
|
||||
</div>
|
||||
<div class="mb-2 mt-1">{changelog[version][section][item].content}</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
on:click={() => {
|
||||
localStorage.version = $config.version;
|
||||
show = false;
|
||||
}}
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
>
|
||||
<span class="relative">Okay, Let's Go!</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
140
src/lib/components/admin/Settings/General.svelte
Normal file
140
src/lib/components/admin/Settings/General.svelte
Normal file
|
@ -0,0 +1,140 @@
|
|||
<script lang="ts">
|
||||
import {
|
||||
getDefaultUserRole,
|
||||
getJWTExpiresDuration,
|
||||
getSignUpEnabledStatus,
|
||||
toggleSignUpEnabledStatus,
|
||||
updateDefaultUserRole,
|
||||
updateJWTExpiresDuration
|
||||
} from '$lib/apis/auths';
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
export let saveHandler: Function;
|
||||
let signUpEnabled = true;
|
||||
let defaultUserRole = 'pending';
|
||||
let JWTExpiresIn = '';
|
||||
|
||||
const toggleSignUpEnabled = async () => {
|
||||
signUpEnabled = await toggleSignUpEnabledStatus(localStorage.token);
|
||||
};
|
||||
|
||||
const updateDefaultUserRoleHandler = async (role) => {
|
||||
defaultUserRole = await updateDefaultUserRole(localStorage.token, role);
|
||||
};
|
||||
|
||||
const updateJWTExpiresDurationHandler = async (duration) => {
|
||||
JWTExpiresIn = await updateJWTExpiresDuration(localStorage.token, duration);
|
||||
};
|
||||
|
||||
onMount(async () => {
|
||||
signUpEnabled = await getSignUpEnabledStatus(localStorage.token);
|
||||
defaultUserRole = await getDefaultUserRole(localStorage.token);
|
||||
JWTExpiresIn = await getJWTExpiresDuration(localStorage.token);
|
||||
});
|
||||
</script>
|
||||
|
||||
<form
|
||||
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
||||
on:submit|preventDefault={() => {
|
||||
// console.log('submit');
|
||||
updateJWTExpiresDurationHandler(JWTExpiresIn);
|
||||
saveHandler();
|
||||
}}
|
||||
>
|
||||
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80">
|
||||
<div>
|
||||
<div class=" mb-2 text-sm font-medium">General Settings</div>
|
||||
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">Enable New Sign Ups</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded transition"
|
||||
on:click={() => {
|
||||
toggleSignUpEnabled();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if signUpEnabled}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
d="M11.5 1A3.5 3.5 0 0 0 8 4.5V7H2.5A1.5 1.5 0 0 0 1 8.5v5A1.5 1.5 0 0 0 2.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 9.5 7V4.5a2 2 0 1 1 4 0v1.75a.75.75 0 0 0 1.5 0V4.5A3.5 3.5 0 0 0 11.5 1Z"
|
||||
/>
|
||||
</svg>
|
||||
<span class="ml-2 self-center">Enabled</span>
|
||||
{:else}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M8 1a3.5 3.5 0 0 0-3.5 3.5V7A1.5 1.5 0 0 0 3 8.5v5A1.5 1.5 0 0 0 4.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 11.5 7V4.5A3.5 3.5 0 0 0 8 1Zm2 6V4.5a2 2 0 1 0-4 0V7h4Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
|
||||
<span class="ml-2 self-center">Disabled</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">Default User Role</div>
|
||||
<div class="flex items-center relative">
|
||||
<select
|
||||
class="w-fit pr-8 rounded py-2 px-2 text-xs bg-transparent outline-none text-right"
|
||||
bind:value={defaultUserRole}
|
||||
placeholder="Select a theme"
|
||||
on:change={(e) => {
|
||||
updateDefaultUserRoleHandler(e.target.value);
|
||||
}}
|
||||
>
|
||||
<option value="pending">Pending</option>
|
||||
<option value="user">User</option>
|
||||
<option value="admin">Admin</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-700 my-3" />
|
||||
|
||||
<div class=" w-full justify-between">
|
||||
<div class="flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">JWT Expiration</div>
|
||||
</div>
|
||||
|
||||
<div class="flex mt-2 space-x-2">
|
||||
<input
|
||||
class="w-full rounded py-1.5 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none border border-gray-100 dark:border-gray-600"
|
||||
type="text"
|
||||
placeholder={`e.g.) "30m","1h", "10d". `}
|
||||
bind:value={JWTExpiresIn}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
|
||||
Valid time units: <span class=" text-gray-300 font-medium"
|
||||
>'s', 'm', 'h', 'd', 'w' or '-1' for no expiration.</span
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
type="submit"
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
82
src/lib/components/admin/Settings/Users.svelte
Normal file
82
src/lib/components/admin/Settings/Users.svelte
Normal file
|
@ -0,0 +1,82 @@
|
|||
<script lang="ts">
|
||||
import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths';
|
||||
import { getUserPermissions, updateUserPermissions } from '$lib/apis/users';
|
||||
import { onMount } from 'svelte';
|
||||
|
||||
export let saveHandler: Function;
|
||||
|
||||
let permissions = {
|
||||
chat: {
|
||||
deletion: true
|
||||
}
|
||||
};
|
||||
|
||||
onMount(async () => {
|
||||
permissions = await getUserPermissions(localStorage.token);
|
||||
});
|
||||
</script>
|
||||
|
||||
<form
|
||||
class="flex flex-col h-full justify-between space-y-3 text-sm"
|
||||
on:submit|preventDefault={async () => {
|
||||
// console.log('submit');
|
||||
await updateUserPermissions(localStorage.token, permissions);
|
||||
saveHandler();
|
||||
}}
|
||||
>
|
||||
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80">
|
||||
<div>
|
||||
<div class=" mb-2 text-sm font-medium">User Permissions</div>
|
||||
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">Allow Chat Deletion</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded transition"
|
||||
on:click={() => {
|
||||
permissions.chat.deletion = !permissions.chat.deletion;
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if permissions.chat.deletion}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
d="M11.5 1A3.5 3.5 0 0 0 8 4.5V7H2.5A1.5 1.5 0 0 0 1 8.5v5A1.5 1.5 0 0 0 2.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 9.5 7V4.5a2 2 0 1 1 4 0v1.75a.75.75 0 0 0 1.5 0V4.5A3.5 3.5 0 0 0 11.5 1Z"
|
||||
/>
|
||||
</svg>
|
||||
<span class="ml-2 self-center">Allow</span>
|
||||
{:else}
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M8 1a3.5 3.5 0 0 0-3.5 3.5V7A1.5 1.5 0 0 0 3 8.5v5A1.5 1.5 0 0 0 4.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 11.5 7V4.5A3.5 3.5 0 0 0 8 1Zm2 6V4.5a2 2 0 1 0-4 0V7h4Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
|
||||
<span class="ml-2 self-center">Don't Allow</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium">
|
||||
<button
|
||||
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
|
||||
type="submit"
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
107
src/lib/components/admin/SettingsModal.svelte
Normal file
107
src/lib/components/admin/SettingsModal.svelte
Normal file
|
@ -0,0 +1,107 @@
|
|||
<script>
|
||||
import Modal from '../common/Modal.svelte';
|
||||
|
||||
import General from './Settings/General.svelte';
|
||||
import Users from './Settings/Users.svelte';
|
||||
|
||||
export let show = false;
|
||||
|
||||
let selectedTab = 'general';
|
||||
</script>
|
||||
|
||||
<Modal bind:show>
|
||||
<div>
|
||||
<div class=" flex justify-between dark:text-gray-300 px-5 py-4">
|
||||
<div class=" text-lg font-medium self-center">Admin Settings</div>
|
||||
<button
|
||||
class="self-center"
|
||||
on:click={() => {
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-5 h-5"
|
||||
>
|
||||
<path
|
||||
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<hr class=" dark:border-gray-800" />
|
||||
|
||||
<div class="flex flex-col md:flex-row w-full p-4 md:space-x-4">
|
||||
<div
|
||||
class="tabs flex flex-row overflow-x-auto space-x-1 md:space-x-0 md:space-y-1 md:flex-col flex-1 md:flex-none md:w-40 dark:text-gray-200 text-xs text-left mb-3 md:mb-0"
|
||||
>
|
||||
<button
|
||||
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
|
||||
'general'
|
||||
? 'bg-gray-200 dark:bg-gray-700'
|
||||
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'general';
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M6.955 1.45A.5.5 0 0 1 7.452 1h1.096a.5.5 0 0 1 .497.45l.17 1.699c.484.12.94.312 1.356.562l1.321-1.081a.5.5 0 0 1 .67.033l.774.775a.5.5 0 0 1 .034.67l-1.08 1.32c.25.417.44.873.561 1.357l1.699.17a.5.5 0 0 1 .45.497v1.096a.5.5 0 0 1-.45.497l-1.699.17c-.12.484-.312.94-.562 1.356l1.082 1.322a.5.5 0 0 1-.034.67l-.774.774a.5.5 0 0 1-.67.033l-1.322-1.08c-.416.25-.872.44-1.356.561l-.17 1.699a.5.5 0 0 1-.497.45H7.452a.5.5 0 0 1-.497-.45l-.17-1.699a4.973 4.973 0 0 1-1.356-.562L4.108 13.37a.5.5 0 0 1-.67-.033l-.774-.775a.5.5 0 0 1-.034-.67l1.08-1.32a4.971 4.971 0 0 1-.561-1.357l-1.699-.17A.5.5 0 0 1 1 8.548V7.452a.5.5 0 0 1 .45-.497l1.699-.17c.12-.484.312-.94.562-1.356L2.629 4.107a.5.5 0 0 1 .034-.67l.774-.774a.5.5 0 0 1 .67-.033L5.43 3.71a4.97 4.97 0 0 1 1.356-.561l.17-1.699ZM6 8c0 .538.212 1.026.558 1.385l.057.057a2 2 0 0 0 2.828-2.828l-.058-.056A2 2 0 0 0 6 8Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class=" self-center">General</div>
|
||||
</button>
|
||||
|
||||
<button
|
||||
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
|
||||
'users'
|
||||
? 'bg-gray-200 dark:bg-gray-700'
|
||||
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'users';
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
d="M8 8a2.5 2.5 0 1 0 0-5 2.5 2.5 0 0 0 0 5ZM3.156 11.763c.16-.629.44-1.21.813-1.72a2.5 2.5 0 0 0-2.725 1.377c-.136.287.102.58.418.58h1.449c.01-.077.025-.156.045-.237ZM12.847 11.763c.02.08.036.16.046.237h1.446c.316 0 .554-.293.417-.579a2.5 2.5 0 0 0-2.722-1.378c.374.51.653 1.09.813 1.72ZM14 7.5a1.5 1.5 0 1 1-3 0 1.5 1.5 0 0 1 3 0ZM3.5 9a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3ZM5 13c-.552 0-1.013-.455-.876-.99a4.002 4.002 0 0 1 7.753 0c.136.535-.324.99-.877.99H5Z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class=" self-center">Users</div>
|
||||
</button>
|
||||
</div>
|
||||
<div class="flex-1 md:min-h-[380px]">
|
||||
{#if selectedTab === 'general'}
|
||||
<General
|
||||
saveHandler={() => {
|
||||
show = false;
|
||||
}}
|
||||
/>
|
||||
{:else if selectedTab === 'users'}
|
||||
<Users
|
||||
saveHandler={() => {
|
||||
show = false;
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
|
@ -2,11 +2,16 @@
|
|||
import toast from 'svelte-french-toast';
|
||||
import { onMount, tick } from 'svelte';
|
||||
import { settings } from '$lib/stores';
|
||||
import { calculateSHA256, findWordIndices } from '$lib/utils';
|
||||
import { blobToFile, calculateSHA256, findWordIndices } from '$lib/utils';
|
||||
|
||||
import Prompts from './MessageInput/PromptCommands.svelte';
|
||||
import Suggestions from './MessageInput/Suggestions.svelte';
|
||||
import { uploadDocToVectorDB } from '$lib/apis/rag';
|
||||
import { uploadDocToVectorDB, uploadWebToVectorDB } from '$lib/apis/rag';
|
||||
import AddFilesPlaceholder from '../AddFilesPlaceholder.svelte';
|
||||
import { SUPPORTED_FILE_TYPE, SUPPORTED_FILE_EXTENSIONS } from '$lib/constants';
|
||||
import Documents from './MessageInput/Documents.svelte';
|
||||
import Models from './MessageInput/Models.svelte';
|
||||
import { transcribeAudio } from '$lib/apis/audio';
|
||||
|
||||
export let submitPrompt: Function;
|
||||
export let stopResponse: Function;
|
||||
|
@ -15,78 +20,205 @@
|
|||
export let autoScroll = true;
|
||||
|
||||
let filesInputElement;
|
||||
|
||||
let promptsElement;
|
||||
let documentsElement;
|
||||
let modelsElement;
|
||||
|
||||
let inputFiles;
|
||||
let dragged = false;
|
||||
|
||||
let user = null;
|
||||
let chatInputPlaceholder = '';
|
||||
|
||||
export let files = [];
|
||||
|
||||
export let fileUploadEnabled = true;
|
||||
export let speechRecognitionEnabled = true;
|
||||
export let speechRecognitionListening = false;
|
||||
|
||||
export let prompt = '';
|
||||
export let messages = [];
|
||||
|
||||
let speechRecognition;
|
||||
|
||||
$: if (prompt) {
|
||||
const chatInput = document.getElementById('chat-textarea');
|
||||
|
||||
if (chatInput) {
|
||||
chatInput.style.height = '';
|
||||
chatInput.style.height = Math.min(chatInput.scrollHeight, 200) + 'px';
|
||||
}
|
||||
}
|
||||
|
||||
let mediaRecorder;
|
||||
let audioChunks = [];
|
||||
let isRecording = false;
|
||||
const MIN_DECIBELS = -45;
|
||||
|
||||
const scrollToBottom = () => {
|
||||
const element = document.getElementById('messages-container');
|
||||
element.scrollTop = element.scrollHeight;
|
||||
};
|
||||
|
||||
const startRecording = async () => {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
mediaRecorder = new MediaRecorder(stream);
|
||||
mediaRecorder.onstart = () => {
|
||||
isRecording = true;
|
||||
console.log('Recording started');
|
||||
};
|
||||
mediaRecorder.ondataavailable = (event) => audioChunks.push(event.data);
|
||||
mediaRecorder.onstop = async () => {
|
||||
isRecording = false;
|
||||
console.log('Recording stopped');
|
||||
|
||||
// Create a blob from the audio chunks
|
||||
const audioBlob = new Blob(audioChunks, { type: 'audio/wav' });
|
||||
|
||||
const file = blobToFile(audioBlob, 'recording.wav');
|
||||
|
||||
const res = await transcribeAudio(localStorage.token, file).catch((error) => {
|
||||
toast.error(error);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (res) {
|
||||
prompt = res.text;
|
||||
await tick();
|
||||
|
||||
const inputElement = document.getElementById('chat-textarea');
|
||||
inputElement?.focus();
|
||||
|
||||
if (prompt !== '' && $settings?.speechAutoSend === true) {
|
||||
submitPrompt(prompt, user);
|
||||
}
|
||||
}
|
||||
|
||||
// saveRecording(audioBlob);
|
||||
audioChunks = [];
|
||||
};
|
||||
|
||||
// Start recording
|
||||
mediaRecorder.start();
|
||||
|
||||
// Monitor silence
|
||||
monitorSilence(stream);
|
||||
};
|
||||
|
||||
const monitorSilence = (stream) => {
|
||||
const audioContext = new AudioContext();
|
||||
const audioStreamSource = audioContext.createMediaStreamSource(stream);
|
||||
const analyser = audioContext.createAnalyser();
|
||||
analyser.minDecibels = MIN_DECIBELS;
|
||||
audioStreamSource.connect(analyser);
|
||||
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
const domainData = new Uint8Array(bufferLength);
|
||||
|
||||
let lastSoundTime = Date.now();
|
||||
|
||||
const detectSound = () => {
|
||||
analyser.getByteFrequencyData(domainData);
|
||||
|
||||
if (domainData.some((value) => value > 0)) {
|
||||
lastSoundTime = Date.now();
|
||||
}
|
||||
|
||||
if (isRecording && Date.now() - lastSoundTime > 3000) {
|
||||
mediaRecorder.stop();
|
||||
audioContext.close();
|
||||
return;
|
||||
}
|
||||
|
||||
window.requestAnimationFrame(detectSound);
|
||||
};
|
||||
|
||||
window.requestAnimationFrame(detectSound);
|
||||
};
|
||||
|
||||
const saveRecording = (blob) => {
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
document.body.appendChild(a);
|
||||
a.style = 'display: none';
|
||||
a.href = url;
|
||||
a.download = 'recording.wav';
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
};
|
||||
|
||||
const speechRecognitionHandler = () => {
|
||||
// Check if SpeechRecognition is supported
|
||||
|
||||
if (speechRecognitionListening) {
|
||||
speechRecognition.stop();
|
||||
if (isRecording) {
|
||||
if (speechRecognition) {
|
||||
speechRecognition.stop();
|
||||
}
|
||||
|
||||
if (mediaRecorder) {
|
||||
mediaRecorder.stop();
|
||||
}
|
||||
} else {
|
||||
if ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window) {
|
||||
// Create a SpeechRecognition object
|
||||
speechRecognition = new (window.SpeechRecognition || window.webkitSpeechRecognition)();
|
||||
isRecording = true;
|
||||
|
||||
// Set continuous to true for continuous recognition
|
||||
speechRecognition.continuous = true;
|
||||
|
||||
// Set the timeout for turning off the recognition after inactivity (in milliseconds)
|
||||
const inactivityTimeout = 3000; // 3 seconds
|
||||
|
||||
let timeoutId;
|
||||
// Start recognition
|
||||
speechRecognition.start();
|
||||
speechRecognitionListening = true;
|
||||
|
||||
// Event triggered when speech is recognized
|
||||
speechRecognition.onresult = function (event) {
|
||||
// Clear the inactivity timeout
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
// Handle recognized speech
|
||||
console.log(event);
|
||||
const transcript = event.results[Object.keys(event.results).length - 1][0].transcript;
|
||||
prompt = `${prompt}${transcript}`;
|
||||
|
||||
// Restart the inactivity timeout
|
||||
timeoutId = setTimeout(() => {
|
||||
console.log('Speech recognition turned off due to inactivity.');
|
||||
speechRecognition.stop();
|
||||
}, inactivityTimeout);
|
||||
};
|
||||
|
||||
// Event triggered when recognition is ended
|
||||
speechRecognition.onend = function () {
|
||||
// Restart recognition after it ends
|
||||
console.log('recognition ended');
|
||||
speechRecognitionListening = false;
|
||||
if (prompt !== '' && $settings?.speechAutoSend === true) {
|
||||
submitPrompt(prompt);
|
||||
}
|
||||
};
|
||||
|
||||
// Event triggered when an error occurs
|
||||
speechRecognition.onerror = function (event) {
|
||||
console.log(event);
|
||||
toast.error(`Speech recognition error: ${event.error}`);
|
||||
speechRecognitionListening = false;
|
||||
};
|
||||
if ($settings?.audio?.STTEngine ?? '' !== '') {
|
||||
startRecording();
|
||||
} else {
|
||||
toast.error('SpeechRecognition API is not supported in this browser.');
|
||||
if ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window) {
|
||||
// Create a SpeechRecognition object
|
||||
speechRecognition = new (window.SpeechRecognition || window.webkitSpeechRecognition)();
|
||||
|
||||
// Set continuous to true for continuous recognition
|
||||
speechRecognition.continuous = true;
|
||||
|
||||
// Set the timeout for turning off the recognition after inactivity (in milliseconds)
|
||||
const inactivityTimeout = 3000; // 3 seconds
|
||||
|
||||
let timeoutId;
|
||||
// Start recognition
|
||||
speechRecognition.start();
|
||||
|
||||
// Event triggered when speech is recognized
|
||||
speechRecognition.onresult = async (event) => {
|
||||
// Clear the inactivity timeout
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
// Handle recognized speech
|
||||
console.log(event);
|
||||
const transcript = event.results[Object.keys(event.results).length - 1][0].transcript;
|
||||
|
||||
prompt = `${prompt}${transcript}`;
|
||||
|
||||
await tick();
|
||||
const inputElement = document.getElementById('chat-textarea');
|
||||
inputElement?.focus();
|
||||
|
||||
// Restart the inactivity timeout
|
||||
timeoutId = setTimeout(() => {
|
||||
console.log('Speech recognition turned off due to inactivity.');
|
||||
speechRecognition.stop();
|
||||
}, inactivityTimeout);
|
||||
};
|
||||
|
||||
// Event triggered when recognition is ended
|
||||
speechRecognition.onend = function () {
|
||||
// Restart recognition after it ends
|
||||
console.log('recognition ended');
|
||||
isRecording = false;
|
||||
if (prompt !== '' && $settings?.speechAutoSend === true) {
|
||||
submitPrompt(prompt, user);
|
||||
}
|
||||
};
|
||||
|
||||
// Event triggered when an error occurs
|
||||
speechRecognition.onerror = function (event) {
|
||||
console.log(event);
|
||||
toast.error(`Speech recognition error: ${event.error}`);
|
||||
isRecording = false;
|
||||
};
|
||||
} else {
|
||||
toast.error('SpeechRecognition API is not supported in this browser.');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -102,25 +234,77 @@
|
|||
error: ''
|
||||
};
|
||||
|
||||
files = [...files, doc];
|
||||
const res = await uploadDocToVectorDB(localStorage.token, '', file);
|
||||
try {
|
||||
files = [...files, doc];
|
||||
|
||||
if (res) {
|
||||
doc.upload_status = true;
|
||||
doc.collection_name = res.collection_name;
|
||||
files = files;
|
||||
if (['audio/mpeg', 'audio/wav'].includes(file['type'])) {
|
||||
const res = await transcribeAudio(localStorage.token, file).catch((error) => {
|
||||
toast.error(error);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (res) {
|
||||
console.log(res);
|
||||
const blob = new Blob([res.text], { type: 'text/plain' });
|
||||
file = blobToFile(blob, `${file.name}.txt`);
|
||||
}
|
||||
}
|
||||
|
||||
const res = await uploadDocToVectorDB(localStorage.token, '', file);
|
||||
|
||||
if (res) {
|
||||
doc.upload_status = true;
|
||||
doc.collection_name = res.collection_name;
|
||||
files = files;
|
||||
}
|
||||
} catch (e) {
|
||||
// Remove the failed doc from the files array
|
||||
files = files.filter((f) => f.name !== file.name);
|
||||
toast.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
const uploadWeb = async (url) => {
|
||||
console.log(url);
|
||||
|
||||
const doc = {
|
||||
type: 'doc',
|
||||
name: url,
|
||||
collection_name: '',
|
||||
upload_status: false,
|
||||
url: url,
|
||||
error: ''
|
||||
};
|
||||
|
||||
try {
|
||||
files = [...files, doc];
|
||||
const res = await uploadWebToVectorDB(localStorage.token, '', url);
|
||||
|
||||
if (res) {
|
||||
doc.upload_status = true;
|
||||
doc.collection_name = res.collection_name;
|
||||
files = files;
|
||||
}
|
||||
} catch (e) {
|
||||
// Remove the failed doc from the files array
|
||||
files = files.filter((f) => f.name !== url);
|
||||
toast.error(e);
|
||||
}
|
||||
};
|
||||
|
||||
onMount(() => {
|
||||
const dropZone = document.querySelector('body');
|
||||
|
||||
dropZone?.addEventListener('dragover', (e) => {
|
||||
const onDragOver = (e) => {
|
||||
e.preventDefault();
|
||||
dragged = true;
|
||||
});
|
||||
};
|
||||
|
||||
dropZone.addEventListener('drop', async (e) => {
|
||||
const onDragLeave = () => {
|
||||
dragged = false;
|
||||
};
|
||||
|
||||
const onDrop = async (e) => {
|
||||
e.preventDefault();
|
||||
console.log(e);
|
||||
|
||||
|
@ -141,19 +325,19 @@
|
|||
|
||||
if (inputFiles && inputFiles.length > 0) {
|
||||
const file = inputFiles[0];
|
||||
console.log(file, file.name.split('.').at(-1));
|
||||
if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) {
|
||||
reader.readAsDataURL(file);
|
||||
} else if (
|
||||
[
|
||||
'application/pdf',
|
||||
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
||||
'text/plain',
|
||||
'text/csv'
|
||||
].includes(file['type'])
|
||||
SUPPORTED_FILE_TYPE.includes(file['type']) ||
|
||||
SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1))
|
||||
) {
|
||||
uploadDoc(file);
|
||||
} else {
|
||||
toast.error(`Unsupported File Type '${file['type']}'.`);
|
||||
toast.error(
|
||||
`Unknown File Type '${file['type']}', but accepting and treating as plain text`
|
||||
);
|
||||
uploadDoc(file);
|
||||
}
|
||||
} else {
|
||||
toast.error(`File not found.`);
|
||||
|
@ -161,11 +345,17 @@
|
|||
}
|
||||
|
||||
dragged = false;
|
||||
});
|
||||
};
|
||||
|
||||
dropZone?.addEventListener('dragleave', () => {
|
||||
dragged = false;
|
||||
});
|
||||
dropZone?.addEventListener('dragover', onDragOver);
|
||||
dropZone?.addEventListener('drop', onDrop);
|
||||
dropZone?.addEventListener('dragleave', onDragLeave);
|
||||
|
||||
return () => {
|
||||
dropZone?.removeEventListener('dragover', onDragOver);
|
||||
dropZone?.removeEventListener('drop', onDrop);
|
||||
dropZone?.removeEventListener('dragleave', onDragLeave);
|
||||
};
|
||||
});
|
||||
</script>
|
||||
|
||||
|
@ -179,29 +369,24 @@
|
|||
<div class="absolute rounded-xl w-full h-full backdrop-blur bg-gray-800/40 flex justify-center">
|
||||
<div class="m-auto pt-64 flex flex-col justify-center">
|
||||
<div class="max-w-md">
|
||||
<div class=" text-center text-6xl mb-3">🗂️</div>
|
||||
<div class="text-center dark:text-white text-2xl font-semibold z-50">Add Files</div>
|
||||
|
||||
<div class=" mt-2 text-center text-sm dark:text-gray-200 w-full">
|
||||
Drop any files/images here to add to the conversation
|
||||
</div>
|
||||
<AddFilesPlaceholder />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="fixed bottom-0 w-full">
|
||||
<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
|
||||
<div class="w-full">
|
||||
<div class="px-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
|
||||
<div class="flex flex-col max-w-3xl w-full">
|
||||
<div>
|
||||
<div class="relative">
|
||||
{#if autoScroll === false && messages.length > 0}
|
||||
<div class=" flex justify-center mb-4">
|
||||
<div class=" absolute -top-12 left-0 right-0 flex justify-center">
|
||||
<button
|
||||
class=" bg-white border border-gray-100 dark:border-none dark:bg-white/20 p-1.5 rounded-full"
|
||||
on:click={() => {
|
||||
autoScroll = true;
|
||||
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
|
||||
scrollToBottom();
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
|
@ -221,18 +406,48 @@
|
|||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="w-full">
|
||||
<div class="w-full relative">
|
||||
{#if prompt.charAt(0) === '/'}
|
||||
<Prompts bind:this={promptsElement} bind:prompt />
|
||||
{:else if messages.length == 0 && suggestionPrompts.length !== 0}
|
||||
{:else if prompt.charAt(0) === '#'}
|
||||
<Documents
|
||||
bind:this={documentsElement}
|
||||
bind:prompt
|
||||
on:url={(e) => {
|
||||
console.log(e);
|
||||
uploadWeb(e.detail);
|
||||
}}
|
||||
on:select={(e) => {
|
||||
console.log(e);
|
||||
files = [
|
||||
...files,
|
||||
{
|
||||
type: e?.detail?.type ?? 'doc',
|
||||
...e.detail,
|
||||
upload_status: true
|
||||
}
|
||||
];
|
||||
}}
|
||||
/>
|
||||
{:else if prompt.charAt(0) === '@'}
|
||||
<Models
|
||||
bind:this={modelsElement}
|
||||
bind:prompt
|
||||
bind:user
|
||||
bind:chatInputPlaceholder
|
||||
{messages}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if messages.length == 0 && suggestionPrompts.length !== 0}
|
||||
<Suggestions {suggestionPrompts} {submitPrompt} />
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="bg-white dark:bg-gray-800">
|
||||
<div class="max-w-3xl px-2.5 -mb-0.5 mx-auto inset-x-0">
|
||||
<div class="bg-gradient-to-t from-white dark:from-gray-800 from-40% pb-2">
|
||||
<div class="bg-white dark:bg-gray-900">
|
||||
<div class="max-w-3xl px-2.5 mx-auto inset-x-0">
|
||||
<div class=" pb-2">
|
||||
<input
|
||||
bind:this={filesInputElement}
|
||||
bind:files={inputFiles}
|
||||
|
@ -257,18 +472,17 @@
|
|||
if (['image/gif', 'image/jpeg', 'image/png'].includes(file['type'])) {
|
||||
reader.readAsDataURL(file);
|
||||
} else if (
|
||||
[
|
||||
'application/pdf',
|
||||
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
||||
'text/plain',
|
||||
'text/csv'
|
||||
].includes(file['type'])
|
||||
SUPPORTED_FILE_TYPE.includes(file['type']) ||
|
||||
SUPPORTED_FILE_EXTENSIONS.includes(file.name.split('.').at(-1))
|
||||
) {
|
||||
uploadDoc(file);
|
||||
filesInputElement.value = '';
|
||||
} else {
|
||||
toast.error(`Unsupported File Type '${file['type']}'.`);
|
||||
inputFiles = null;
|
||||
toast.error(
|
||||
`Unknown File Type '${file['type']}', but accepting and treating as plain text`
|
||||
);
|
||||
uploadDoc(file);
|
||||
filesInputElement.value = '';
|
||||
}
|
||||
} else {
|
||||
toast.error(`File not found.`);
|
||||
|
@ -276,9 +490,9 @@
|
|||
}}
|
||||
/>
|
||||
<form
|
||||
class=" flex flex-col relative w-full rounded-xl border dark:border-gray-600 bg-white dark:bg-gray-800 dark:text-gray-100"
|
||||
class=" flex flex-col relative w-full rounded-xl border dark:border-gray-700 bg-white dark:bg-gray-900 dark:text-gray-100"
|
||||
on:submit|preventDefault={() => {
|
||||
submitPrompt(prompt);
|
||||
submitPrompt(prompt, user);
|
||||
}}
|
||||
>
|
||||
{#if files.length > 0}
|
||||
|
@ -361,6 +575,34 @@
|
|||
<div class=" text-gray-500 text-sm">Document</div>
|
||||
</div>
|
||||
</div>
|
||||
{:else if file.type === 'collection'}
|
||||
<div
|
||||
class="h-16 w-[15rem] flex items-center space-x-3 px-2.5 dark:bg-gray-600 rounded-xl border border-gray-200 dark:border-none"
|
||||
>
|
||||
<div class="p-2.5 bg-red-400 text-white rounded-lg">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
class="w-6 h-6"
|
||||
>
|
||||
<path
|
||||
d="M7.5 3.375c0-1.036.84-1.875 1.875-1.875h.375a3.75 3.75 0 0 1 3.75 3.75v1.875C13.5 8.161 14.34 9 15.375 9h1.875A3.75 3.75 0 0 1 21 12.75v3.375C21 17.16 20.16 18 19.125 18h-9.75A1.875 1.875 0 0 1 7.5 16.125V3.375Z"
|
||||
/>
|
||||
<path
|
||||
d="M15 5.25a5.23 5.23 0 0 0-1.279-3.434 9.768 9.768 0 0 1 6.963 6.963A5.23 5.23 0 0 0 17.25 7.5h-1.875A.375.375 0 0 1 15 7.125V5.25ZM4.875 6H6v10.125A3.375 3.375 0 0 0 9.375 19.5H16.5v1.125c0 1.035-.84 1.875-1.875 1.875h-9.75A1.875 1.875 0 0 1 3 20.625V7.875C3 6.839 3.84 6 4.875 6Z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col justify-center -space-y-0.5">
|
||||
<div class=" dark:text-gray-100 text-sm font-medium line-clamp-1">
|
||||
{file?.title ?? `#${file.name}`}
|
||||
</div>
|
||||
|
||||
<div class=" text-gray-500 text-sm">Collection</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class=" absolute -top-1 -right-1">
|
||||
|
@ -417,20 +659,38 @@
|
|||
|
||||
<textarea
|
||||
id="chat-textarea"
|
||||
class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
|
||||
class=" dark:bg-gray-900 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
|
||||
? ''
|
||||
: ' pl-4'} rounded-xl resize-none h-[48px]"
|
||||
placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'}
|
||||
placeholder={chatInputPlaceholder !== ''
|
||||
? chatInputPlaceholder
|
||||
: isRecording
|
||||
? 'Listening...'
|
||||
: 'Send a message'}
|
||||
bind:value={prompt}
|
||||
on:keypress={(e) => {
|
||||
if (e.keyCode == 13 && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
}
|
||||
if (prompt !== '' && e.keyCode == 13 && !e.shiftKey) {
|
||||
submitPrompt(prompt);
|
||||
submitPrompt(prompt, user);
|
||||
}
|
||||
}}
|
||||
on:keydown={async (e) => {
|
||||
const isCtrlPressed = e.ctrlKey || e.metaKey; // metaKey is for Cmd key on Mac
|
||||
|
||||
// Check if Ctrl + R is pressed
|
||||
if (prompt === '' && isCtrlPressed && e.key.toLowerCase() === 'r') {
|
||||
e.preventDefault();
|
||||
console.log('regenerate');
|
||||
|
||||
const regenerateButton = [
|
||||
...document.getElementsByClassName('regenerate-response-button')
|
||||
]?.at(-1);
|
||||
|
||||
regenerateButton?.click();
|
||||
}
|
||||
|
||||
if (prompt === '' && e.key == 'ArrowUp') {
|
||||
e.preventDefault();
|
||||
|
||||
|
@ -448,8 +708,10 @@
|
|||
editButton?.click();
|
||||
}
|
||||
|
||||
if (prompt.charAt(0) === '/' && e.key === 'ArrowUp') {
|
||||
promptsElement.selectUp();
|
||||
if (['/', '#', '@'].includes(prompt.charAt(0)) && e.key === 'ArrowUp') {
|
||||
e.preventDefault();
|
||||
|
||||
(promptsElement || documentsElement || modelsElement).selectUp();
|
||||
|
||||
const commandOptionButton = [
|
||||
...document.getElementsByClassName('selected-command-option-button')
|
||||
|
@ -457,8 +719,10 @@
|
|||
commandOptionButton.scrollIntoView({ block: 'center' });
|
||||
}
|
||||
|
||||
if (prompt.charAt(0) === '/' && e.key === 'ArrowDown') {
|
||||
promptsElement.selectDown();
|
||||
if (['/', '#', '@'].includes(prompt.charAt(0)) && e.key === 'ArrowDown') {
|
||||
e.preventDefault();
|
||||
|
||||
(promptsElement || documentsElement || modelsElement).selectDown();
|
||||
|
||||
const commandOptionButton = [
|
||||
...document.getElementsByClassName('selected-command-option-button')
|
||||
|
@ -466,7 +730,7 @@
|
|||
commandOptionButton.scrollIntoView({ block: 'center' });
|
||||
}
|
||||
|
||||
if (prompt.charAt(0) === '/' && e.key === 'Enter') {
|
||||
if (['/', '#', '@'].includes(prompt.charAt(0)) && e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
|
||||
const commandOptionButton = [
|
||||
|
@ -476,7 +740,7 @@
|
|||
commandOptionButton?.click();
|
||||
}
|
||||
|
||||
if (prompt.charAt(0) === '/' && e.key === 'Tab') {
|
||||
if (['/', '#', '@'].includes(prompt.charAt(0)) && e.key === 'Tab') {
|
||||
e.preventDefault();
|
||||
|
||||
const commandOptionButton = [
|
||||
|
@ -507,6 +771,11 @@
|
|||
on:input={(e) => {
|
||||
e.target.style.height = '';
|
||||
e.target.style.height = Math.min(e.target.scrollHeight, 200) + 'px';
|
||||
user = null;
|
||||
}}
|
||||
on:focus={(e) => {
|
||||
e.target.style.height = '';
|
||||
e.target.style.height = Math.min(e.target.scrollHeight, 200) + 'px';
|
||||
}}
|
||||
on:paste={(e) => {
|
||||
const clipboardData = e.clipboardData || window.clipboardData;
|
||||
|
@ -538,13 +807,14 @@
|
|||
{#if messages.length == 0 || messages.at(-1).done == true}
|
||||
{#if speechRecognitionEnabled}
|
||||
<button
|
||||
id="voice-input-button"
|
||||
class=" text-gray-600 dark:text-gray-300 transition rounded-lg p-1.5 mr-0.5 self-center"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
speechRecognitionHandler();
|
||||
}}
|
||||
>
|
||||
{#if speechRecognitionListening}
|
||||
{#if isRecording}
|
||||
<svg
|
||||
class=" w-5 h-5 translate-y-[0.5px]"
|
||||
fill="currentColor"
|
||||
|
@ -599,19 +869,19 @@
|
|||
<button
|
||||
class="{prompt !== ''
|
||||
? 'bg-black text-white hover:bg-gray-900 dark:bg-white dark:text-black dark:hover:bg-gray-100 '
|
||||
: 'text-white bg-gray-100 dark:text-gray-800 dark:bg-gray-600 disabled'} transition rounded-lg p-1 mr-0.5 w-7 h-7 self-center"
|
||||
: 'text-white bg-gray-100 dark:text-gray-900 dark:bg-gray-800 disabled'} transition rounded-lg p-1 mr-0.5 w-7 h-7 self-center"
|
||||
type="submit"
|
||||
disabled={prompt === ''}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-5 h-5"
|
||||
class="w-4.5 h-4.5 mx-auto"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M10 17a.75.75 0 01-.75-.75V5.612L5.29 9.77a.75.75 0 01-1.08-1.04l5.25-5.5a.75.75 0 011.08 0l5.25 5.5a.75.75 0 11-1.08 1.04l-3.96-4.158V16.25A.75.75 0 0110 17z"
|
||||
d="M8 14a.75.75 0 0 1-.75-.75V4.56L4.03 7.78a.75.75 0 0 1-1.06-1.06l4.5-4.5a.75.75 0 0 1 1.06 0l4.5 4.5a.75.75 0 0 1-1.06 1.06L8.75 4.56v8.69A.75.75 0 0 1 8 14Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
|
|
159
src/lib/components/chat/MessageInput/Documents.svelte
Normal file
159
src/lib/components/chat/MessageInput/Documents.svelte
Normal file
|
@ -0,0 +1,159 @@
|
|||
<script lang="ts">
|
||||
import { createEventDispatcher } from 'svelte';
|
||||
|
||||
import { documents } from '$lib/stores';
|
||||
import { removeFirstHashWord, isValidHttpUrl } from '$lib/utils';
|
||||
import { tick } from 'svelte';
|
||||
import toast from 'svelte-french-toast';
|
||||
|
||||
export let prompt = '';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
let selectedIdx = 0;
|
||||
|
||||
let filteredItems = [];
|
||||
let filteredDocs = [];
|
||||
|
||||
let collections = [];
|
||||
|
||||
$: collections = [
|
||||
...($documents.length > 0
|
||||
? [
|
||||
{
|
||||
name: 'All Documents',
|
||||
type: 'collection',
|
||||
title: 'All Documents',
|
||||
collection_names: $documents.map((doc) => doc.collection_name)
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...$documents
|
||||
.reduce((a, e, i, arr) => {
|
||||
return [...new Set([...a, ...(e?.content?.tags ?? []).map((tag) => tag.name)])];
|
||||
}, [])
|
||||
.map((tag) => ({
|
||||
name: tag,
|
||||
type: 'collection',
|
||||
collection_names: $documents
|
||||
.filter((doc) => (doc?.content?.tags ?? []).map((tag) => tag.name).includes(tag))
|
||||
.map((doc) => doc.collection_name)
|
||||
}))
|
||||
];
|
||||
|
||||
$: filteredCollections = collections
|
||||
.filter((collection) => collection.name.includes(prompt.split(' ')?.at(0)?.substring(1) ?? ''))
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
$: filteredDocs = $documents
|
||||
.filter((doc) => doc.name.includes(prompt.split(' ')?.at(0)?.substring(1) ?? ''))
|
||||
.sort((a, b) => a.title.localeCompare(b.title));
|
||||
|
||||
$: filteredItems = [...filteredCollections, ...filteredDocs];
|
||||
|
||||
$: if (prompt) {
|
||||
selectedIdx = 0;
|
||||
|
||||
console.log(filteredCollections);
|
||||
}
|
||||
|
||||
export const selectUp = () => {
|
||||
selectedIdx = Math.max(0, selectedIdx - 1);
|
||||
};
|
||||
|
||||
export const selectDown = () => {
|
||||
selectedIdx = Math.min(selectedIdx + 1, filteredItems.length - 1);
|
||||
};
|
||||
|
||||
const confirmSelect = async (doc) => {
|
||||
dispatch('select', doc);
|
||||
|
||||
prompt = removeFirstHashWord(prompt);
|
||||
const chatInputElement = document.getElementById('chat-textarea');
|
||||
|
||||
await tick();
|
||||
chatInputElement?.focus();
|
||||
await tick();
|
||||
};
|
||||
|
||||
const confirmSelectWeb = async (url) => {
|
||||
dispatch('url', url);
|
||||
|
||||
prompt = removeFirstHashWord(prompt);
|
||||
const chatInputElement = document.getElementById('chat-textarea');
|
||||
|
||||
await tick();
|
||||
chatInputElement?.focus();
|
||||
await tick();
|
||||
};
|
||||
</script>
|
||||
|
||||
{#if filteredItems.length > 0 || prompt.split(' ')?.at(0)?.substring(1).startsWith('http')}
|
||||
<div class="md:px-2 mb-3 text-left w-full absolute bottom-0 left-0 right-0">
|
||||
<div class="flex w-full rounded-lg border border-gray-100 dark:border-gray-700">
|
||||
<div class=" bg-gray-100 dark:bg-gray-700 w-10 rounded-l-lg text-center">
|
||||
<div class=" text-lg font-semibold mt-2">#</div>
|
||||
</div>
|
||||
|
||||
<div class="max-h-60 flex flex-col w-full rounded-r-lg">
|
||||
<div class=" overflow-y-auto bg-white p-2 rounded-tr-lg space-y-0.5">
|
||||
{#each filteredItems as doc, docIdx}
|
||||
<button
|
||||
class=" px-3 py-1.5 rounded-lg w-full text-left {docIdx === selectedIdx
|
||||
? ' bg-gray-100 selected-command-option-button'
|
||||
: ''}"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
console.log(doc);
|
||||
|
||||
confirmSelect(doc);
|
||||
}}
|
||||
on:mousemove={() => {
|
||||
selectedIdx = docIdx;
|
||||
}}
|
||||
on:focus={() => {}}
|
||||
>
|
||||
{#if doc.type === 'collection'}
|
||||
<div class=" font-medium text-black line-clamp-1">
|
||||
{doc?.title ?? `#${doc.name}`}
|
||||
</div>
|
||||
|
||||
<div class=" text-xs text-gray-600 line-clamp-1">Collection</div>
|
||||
{:else}
|
||||
<div class=" font-medium text-black line-clamp-1">
|
||||
#{doc.name} ({doc.filename})
|
||||
</div>
|
||||
|
||||
<div class=" text-xs text-gray-600 line-clamp-1">
|
||||
{doc.title}
|
||||
</div>
|
||||
{/if}
|
||||
</button>
|
||||
{/each}
|
||||
|
||||
{#if prompt.split(' ')?.at(0)?.substring(1).startsWith('http')}
|
||||
<button
|
||||
class="px-3 py-1.5 rounded-lg w-full text-left bg-gray-100 selected-command-option-button"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
const url = prompt.split(' ')?.at(0)?.substring(1);
|
||||
if (isValidHttpUrl(url)) {
|
||||
confirmSelectWeb(url);
|
||||
} else {
|
||||
toast.error(
|
||||
'Oops! Looks like the URL is invalid. Please double-check and try again.'
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div class=" font-medium text-black line-clamp-1">
|
||||
{prompt.split(' ')?.at(0)?.substring(1)}
|
||||
</div>
|
||||
|
||||
<div class=" text-xs text-gray-600 line-clamp-1">Web</div>
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
158
src/lib/components/chat/MessageInput/Models.svelte
Normal file
158
src/lib/components/chat/MessageInput/Models.svelte
Normal file
|
@ -0,0 +1,158 @@
|
|||
<script lang="ts">
|
||||
import { generatePrompt } from '$lib/apis/ollama';
|
||||
import { models } from '$lib/stores';
|
||||
import { splitStream } from '$lib/utils';
|
||||
import { tick } from 'svelte';
|
||||
import toast from 'svelte-french-toast';
|
||||
|
||||
export let prompt = '';
|
||||
export let user = null;
|
||||
|
||||
export let chatInputPlaceholder = '';
|
||||
export let messages = [];
|
||||
|
||||
let selectedIdx = 0;
|
||||
let filteredModels = [];
|
||||
|
||||
$: filteredModels = $models
|
||||
.filter(
|
||||
(p) =>
|
||||
p.name !== 'hr' &&
|
||||
!p.external &&
|
||||
p.name.includes(prompt.split(' ')?.at(0)?.substring(1) ?? '')
|
||||
)
|
||||
.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
$: if (prompt) {
|
||||
selectedIdx = 0;
|
||||
}
|
||||
|
||||
export const selectUp = () => {
|
||||
selectedIdx = Math.max(0, selectedIdx - 1);
|
||||
};
|
||||
|
||||
export const selectDown = () => {
|
||||
selectedIdx = Math.min(selectedIdx + 1, filteredModels.length - 1);
|
||||
};
|
||||
|
||||
const confirmSelect = async (model) => {
|
||||
// dispatch('select', model);
|
||||
prompt = '';
|
||||
user = JSON.parse(JSON.stringify(model.name));
|
||||
await tick();
|
||||
|
||||
chatInputPlaceholder = `'${model.name}' is thinking...`;
|
||||
|
||||
const chatInputElement = document.getElementById('chat-textarea');
|
||||
|
||||
await tick();
|
||||
chatInputElement?.focus();
|
||||
await tick();
|
||||
|
||||
const convoText = messages.reduce((a, message, i, arr) => {
|
||||
return `${a}### ${message.role.toUpperCase()}\n${message.content}\n\n`;
|
||||
}, '');
|
||||
|
||||
const res = await generatePrompt(localStorage.token, model.name, convoText);
|
||||
|
||||
if (res && res.ok) {
|
||||
const reader = res.body
|
||||
.pipeThrough(new TextDecoderStream())
|
||||
.pipeThrough(splitStream('\n'))
|
||||
.getReader();
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
let lines = value.split('\n');
|
||||
|
||||
for (const line of lines) {
|
||||
if (line !== '') {
|
||||
console.log(line);
|
||||
let data = JSON.parse(line);
|
||||
|
||||
if ('detail' in data) {
|
||||
throw data;
|
||||
}
|
||||
|
||||
if (data.done == false) {
|
||||
if (prompt == '' && data.response == '\n') {
|
||||
continue;
|
||||
} else {
|
||||
prompt += data.response;
|
||||
console.log(data.response);
|
||||
chatInputElement.scrollTop = chatInputElement.scrollHeight;
|
||||
await tick();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
if ('detail' in error) {
|
||||
toast.error(error.detail);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (res !== null) {
|
||||
const error = await res.json();
|
||||
console.log(error);
|
||||
if ('detail' in error) {
|
||||
toast.error(error.detail);
|
||||
} else {
|
||||
toast.error(error.error);
|
||||
}
|
||||
} else {
|
||||
toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
|
||||
}
|
||||
}
|
||||
|
||||
chatInputPlaceholder = '';
|
||||
|
||||
console.log(user);
|
||||
};
|
||||
</script>
|
||||
|
||||
{#if filteredModels.length > 0}
|
||||
<div class="md:px-2 mb-3 text-left w-full absolute bottom-0 left-0 right-0">
|
||||
<div class="flex w-full rounded-lg border border-gray-100 dark:border-gray-700">
|
||||
<div class=" bg-gray-100 dark:bg-gray-700 w-10 rounded-l-lg text-center">
|
||||
<div class=" text-lg font-semibold mt-2">@</div>
|
||||
</div>
|
||||
|
||||
<div class="max-h-60 flex flex-col w-full rounded-r-lg">
|
||||
<div class=" overflow-y-auto bg-white p-2 rounded-tr-lg space-y-0.5">
|
||||
{#each filteredModels as model, modelIdx}
|
||||
<button
|
||||
class=" px-3 py-1.5 rounded-lg w-full text-left {modelIdx === selectedIdx
|
||||
? ' bg-gray-100 selected-command-option-button'
|
||||
: ''}"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
confirmSelect(model);
|
||||
}}
|
||||
on:mousemove={() => {
|
||||
selectedIdx = modelIdx;
|
||||
}}
|
||||
on:focus={() => {}}
|
||||
>
|
||||
<div class=" font-medium text-black line-clamp-1">
|
||||
{model.name}
|
||||
</div>
|
||||
|
||||
<!-- <div class=" text-xs text-gray-600 line-clamp-1">
|
||||
{doc.title}
|
||||
</div> -->
|
||||
</button>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
|
@ -2,6 +2,7 @@
|
|||
import { prompts } from '$lib/stores';
|
||||
import { findWordIndices } from '$lib/utils';
|
||||
import { tick } from 'svelte';
|
||||
import toast from 'svelte-french-toast';
|
||||
|
||||
export let prompt = '';
|
||||
let selectedCommandIdx = 0;
|
||||
|
@ -24,7 +25,18 @@
|
|||
};
|
||||
|
||||
const confirmCommand = async (command) => {
|
||||
prompt = command.content;
|
||||
let text = command.content;
|
||||
|
||||
if (command.content.includes('{{CLIPBOARD}}')) {
|
||||
const clipboardText = await navigator.clipboard.readText().catch((err) => {
|
||||
toast.error('Failed to read clipboard contents');
|
||||
return '{{CLIPBOARD}}';
|
||||
});
|
||||
|
||||
text = command.content.replaceAll('{{CLIPBOARD}}', clipboardText);
|
||||
}
|
||||
|
||||
prompt = text;
|
||||
|
||||
const chatInputElement = document.getElementById('chat-textarea');
|
||||
|
||||
|
@ -47,7 +59,7 @@
|
|||
</script>
|
||||
|
||||
{#if filteredPromptCommands.length > 0}
|
||||
<div class="md:px-2 mb-3 text-left w-full">
|
||||
<div class="md:px-2 mb-3 text-left w-full absolute bottom-0 left-0 right-0">
|
||||
<div class="flex w-full rounded-lg border border-gray-100 dark:border-gray-700">
|
||||
<div class=" bg-gray-100 dark:bg-gray-700 w-10 rounded-l-lg text-center">
|
||||
<div class=" text-lg font-semibold mt-2">/</div>
|
||||
|
|
|
@ -1,21 +1,30 @@
|
|||
<script lang="ts">
|
||||
export let submitPrompt: Function;
|
||||
export let suggestionPrompts = [];
|
||||
|
||||
let prompts = [];
|
||||
|
||||
$: prompts =
|
||||
suggestionPrompts.length <= 4
|
||||
? suggestionPrompts
|
||||
: suggestionPrompts.sort(() => Math.random() - 0.5).slice(0, 4);
|
||||
</script>
|
||||
|
||||
<div class=" flex flex-wrap-reverse mb-3 md:p-1 text-left w-full">
|
||||
{#each suggestionPrompts as prompt, promptIdx}
|
||||
<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]">
|
||||
{#each prompts as prompt, promptIdx}
|
||||
<div
|
||||
class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px] px-2"
|
||||
>
|
||||
<button
|
||||
class=" flex-1 flex justify-between w-full h-full px-4 py-2.5 bg-white hover:bg-gray-50 dark:bg-gray-800 dark:hover:bg-gray-700 outline outline-1 outline-gray-200 dark:outline-gray-600 rounded-lg transition group"
|
||||
class=" flex-1 flex justify-between w-full h-full px-4 py-2.5 bg-white hover:bg-gray-50 dark:bg-gray-900 dark:hover:bg-gray-700 outline outline-1 outline-gray-200 dark:outline-gray-800 rounded-lg transition group"
|
||||
on:click={() => {
|
||||
submitPrompt(prompt.content);
|
||||
}}
|
||||
>
|
||||
<div class="flex flex-col text-left self-center">
|
||||
{#if prompt.title}
|
||||
{#if prompt.title && prompt.title[0] !== ''}
|
||||
<div class="text-sm font-medium dark:text-gray-300">{prompt.title[0]}</div>
|
||||
<div class="text-sm text-gray-500">{prompt.title[1]}</div>
|
||||
<div class="text-sm text-gray-500 line-clamp-1">{prompt.title[1]}</div>
|
||||
{:else}
|
||||
<div class=" self-center text-sm font-medium dark:text-gray-300 line-clamp-2">
|
||||
{prompt.content}
|
||||
|
@ -24,17 +33,17 @@
|
|||
</div>
|
||||
|
||||
<div
|
||||
class="self-center p-1 rounded-lg text-white group-hover:bg-gray-100 group-hover:text-gray-800 dark:group-hover:bg-gray-800 dark:group-hover:text-gray-300 dark:text-gray-800 transition"
|
||||
class="self-center p-1 rounded-lg text-white group-hover:bg-gray-100 group-hover:text-gray-800 dark:group-hover:bg-gray-800 dark:group-hover:text-gray-100 dark:text-gray-900 transition"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M10 17a.75.75 0 01-.75-.75V5.612L5.29 9.77a.75.75 0 01-1.08-1.04l5.25-5.5a.75.75 0 011.08 0l5.25 5.5a.75.75 0 11-1.08 1.04l-3.96-4.158V16.25A.75.75 0 0110 17z"
|
||||
d="M8 14a.75.75 0 0 1-.75-.75V4.56L4.03 7.78a.75.75 0 0 1-1.06-1.06l4.5-4.5a.75.75 0 0 1 1.06 0l4.5 4.5a.75.75 0 0 1-1.06 1.06L8.75 4.56v8.69A.75.75 0 0 1 8 14Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
|
|
|
@ -11,9 +11,11 @@
|
|||
import ResponseMessage from './Messages/ResponseMessage.svelte';
|
||||
import Placeholder from './Messages/Placeholder.svelte';
|
||||
import Spinner from '../common/Spinner.svelte';
|
||||
import { imageGenerations } from '$lib/apis/images';
|
||||
|
||||
export let chatId = '';
|
||||
export let sendPrompt: Function;
|
||||
export let continueGeneration: Function;
|
||||
export let regenerateResponse: Function;
|
||||
|
||||
export let processing = '';
|
||||
|
@ -28,10 +30,15 @@
|
|||
$: if (autoScroll && bottomPadding) {
|
||||
(async () => {
|
||||
await tick();
|
||||
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
|
||||
scrollToBottom();
|
||||
})();
|
||||
}
|
||||
|
||||
const scrollToBottom = () => {
|
||||
const element = document.getElementById('messages-container');
|
||||
element.scrollTop = element.scrollHeight;
|
||||
};
|
||||
|
||||
const copyToClipboard = (text) => {
|
||||
if (!navigator.clipboard) {
|
||||
var textArea = document.createElement('textarea');
|
||||
|
@ -159,10 +166,11 @@
|
|||
|
||||
await tick();
|
||||
|
||||
autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
|
||||
const element = document.getElementById('messages-container');
|
||||
autoScroll = element.scrollHeight - element.scrollTop <= element.clientHeight + 50;
|
||||
|
||||
setTimeout(() => {
|
||||
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
|
||||
scrollToBottom();
|
||||
}, 100);
|
||||
};
|
||||
|
||||
|
@ -207,100 +215,151 @@
|
|||
|
||||
await tick();
|
||||
|
||||
autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
|
||||
const element = document.getElementById('messages-container');
|
||||
autoScroll = element.scrollHeight - element.scrollTop <= element.clientHeight + 50;
|
||||
|
||||
setTimeout(() => {
|
||||
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
|
||||
scrollToBottom();
|
||||
}, 100);
|
||||
};
|
||||
|
||||
// TODO: change delete behaviour
|
||||
// const deleteMessageAndDescendants = async (messageId: string) => {
|
||||
// if (history.messages[messageId]) {
|
||||
// history.messages[messageId].deleted = true;
|
||||
|
||||
// for (const childId of history.messages[messageId].childrenIds) {
|
||||
// await deleteMessageAndDescendants(childId);
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
// const triggerDeleteMessageRecursive = async (messageId: string) => {
|
||||
// await deleteMessageAndDescendants(messageId);
|
||||
// await updateChatById(localStorage.token, chatId, { history });
|
||||
// await chats.set(await getChatList(localStorage.token));
|
||||
// };
|
||||
|
||||
const messageDeleteHandler = async (messageId) => {
|
||||
if (history.messages[messageId]) {
|
||||
history.messages[messageId].deleted = true;
|
||||
|
||||
for (const childId of history.messages[messageId].childrenIds) {
|
||||
history.messages[childId].deleted = true;
|
||||
}
|
||||
}
|
||||
await updateChatById(localStorage.token, chatId, { history });
|
||||
};
|
||||
</script>
|
||||
|
||||
{#if messages.length == 0}
|
||||
<Placeholder models={selectedModels} modelfiles={selectedModelfiles} />
|
||||
{:else}
|
||||
{#key chatId}
|
||||
{#each messages as message, messageIdx}
|
||||
<div class=" w-full">
|
||||
<div class="flex flex-col justify-between px-5 mb-3 max-w-3xl mx-auto rounded-lg group">
|
||||
{#if message.role === 'user'}
|
||||
<UserMessage
|
||||
user={$user}
|
||||
{message}
|
||||
siblings={message.parentId !== null
|
||||
? history.messages[message.parentId]?.childrenIds ?? []
|
||||
: Object.values(history.messages)
|
||||
.filter((message) => message.parentId === null)
|
||||
.map((message) => message.id) ?? []}
|
||||
{confirmEditMessage}
|
||||
{showPreviousMessage}
|
||||
{showNextMessage}
|
||||
{copyToClipboard}
|
||||
/>
|
||||
<div class=" pb-10">
|
||||
{#key chatId}
|
||||
{#each messages as message, messageIdx}
|
||||
{#if !message.deleted}
|
||||
<div class=" w-full">
|
||||
<div
|
||||
class="flex flex-col justify-between px-5 mb-3 {$settings?.fullScreenMode ?? null
|
||||
? 'max-w-full'
|
||||
: 'max-w-3xl'} mx-auto rounded-lg group"
|
||||
>
|
||||
{#if message.role === 'user'}
|
||||
<UserMessage
|
||||
on:delete={() => messageDeleteHandler(message.id)}
|
||||
user={$user}
|
||||
{message}
|
||||
isFirstMessage={messageIdx === 0}
|
||||
siblings={message.parentId !== null
|
||||
? history.messages[message.parentId]?.childrenIds ?? []
|
||||
: Object.values(history.messages)
|
||||
.filter((message) => message.parentId === null)
|
||||
.map((message) => message.id) ?? []}
|
||||
{confirmEditMessage}
|
||||
{showPreviousMessage}
|
||||
{showNextMessage}
|
||||
{copyToClipboard}
|
||||
/>
|
||||
|
||||
{#if messages.length - 1 === messageIdx && processing !== ''}
|
||||
<div class="flex my-2.5 ml-12 items-center w-fit space-x-2.5">
|
||||
<div class=" dark:text-blue-100">
|
||||
<svg
|
||||
class=" w-4 h-4 translate-y-[0.5px]"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
><style>
|
||||
.spinner_qM83 {
|
||||
animation: spinner_8HQG 1.05s infinite;
|
||||
}
|
||||
.spinner_oXPr {
|
||||
animation-delay: 0.1s;
|
||||
}
|
||||
.spinner_ZTLf {
|
||||
animation-delay: 0.2s;
|
||||
}
|
||||
@keyframes spinner_8HQG {
|
||||
0%,
|
||||
57.14% {
|
||||
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
|
||||
transform: translate(0);
|
||||
}
|
||||
28.57% {
|
||||
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
|
||||
transform: translateY(-6px);
|
||||
}
|
||||
100% {
|
||||
transform: translate(0);
|
||||
}
|
||||
}
|
||||
</style><circle class="spinner_qM83" cx="4" cy="12" r="2.5" /><circle
|
||||
class="spinner_qM83 spinner_oXPr"
|
||||
cx="12"
|
||||
cy="12"
|
||||
r="2.5"
|
||||
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="2.5" /></svg
|
||||
>
|
||||
</div>
|
||||
<div class=" text-sm font-medium">
|
||||
{processing}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{:else}
|
||||
<ResponseMessage
|
||||
{message}
|
||||
modelfiles={selectedModelfiles}
|
||||
siblings={history.messages[message.parentId]?.childrenIds ?? []}
|
||||
isLastMessage={messageIdx + 1 === messages.length}
|
||||
{confirmEditResponseMessage}
|
||||
{showPreviousMessage}
|
||||
{showNextMessage}
|
||||
{rateMessage}
|
||||
{copyToClipboard}
|
||||
{regenerateResponse}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
{#if messages.length - 1 === messageIdx && processing !== ''}
|
||||
<div class="flex my-2.5 ml-12 items-center w-fit space-x-2.5">
|
||||
<div class=" dark:text-blue-100">
|
||||
<svg
|
||||
class=" w-4 h-4 translate-y-[0.5px]"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
><style>
|
||||
.spinner_qM83 {
|
||||
animation: spinner_8HQG 1.05s infinite;
|
||||
}
|
||||
.spinner_oXPr {
|
||||
animation-delay: 0.1s;
|
||||
}
|
||||
.spinner_ZTLf {
|
||||
animation-delay: 0.2s;
|
||||
}
|
||||
@keyframes spinner_8HQG {
|
||||
0%,
|
||||
57.14% {
|
||||
animation-timing-function: cubic-bezier(0.33, 0.66, 0.66, 1);
|
||||
transform: translate(0);
|
||||
}
|
||||
28.57% {
|
||||
animation-timing-function: cubic-bezier(0.33, 0, 0.66, 0.33);
|
||||
transform: translateY(-6px);
|
||||
}
|
||||
100% {
|
||||
transform: translate(0);
|
||||
}
|
||||
}
|
||||
</style><circle class="spinner_qM83" cx="4" cy="12" r="2.5" /><circle
|
||||
class="spinner_qM83 spinner_oXPr"
|
||||
cx="12"
|
||||
cy="12"
|
||||
r="2.5"
|
||||
/><circle class="spinner_qM83 spinner_ZTLf" cx="20" cy="12" r="2.5" /></svg
|
||||
>
|
||||
</div>
|
||||
<div class=" text-sm font-medium">
|
||||
{processing}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{:else}
|
||||
<ResponseMessage
|
||||
{message}
|
||||
modelfiles={selectedModelfiles}
|
||||
siblings={history.messages[message.parentId]?.childrenIds ?? []}
|
||||
isLastMessage={messageIdx + 1 === messages.length}
|
||||
{confirmEditResponseMessage}
|
||||
{showPreviousMessage}
|
||||
{showNextMessage}
|
||||
{rateMessage}
|
||||
{copyToClipboard}
|
||||
{continueGeneration}
|
||||
{regenerateResponse}
|
||||
on:save={async (e) => {
|
||||
console.log('save', e);
|
||||
|
||||
{#if bottomPadding}
|
||||
<div class=" mb-10" />
|
||||
{/if}
|
||||
{/key}
|
||||
const message = e.detail;
|
||||
history.messages[message.id] = message;
|
||||
await updateChatById(localStorage.token, chatId, {
|
||||
messages: messages,
|
||||
history: history
|
||||
});
|
||||
}}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
|
||||
{#if bottomPadding}
|
||||
<div class=" mb-10" />
|
||||
{/if}
|
||||
{/key}
|
||||
</div>
|
||||
{/if}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue