Merge pull request #413 from ollama-webui/main

dev
This commit is contained in:
Timothy Jaeryang Baek 2024-01-07 02:25:52 -08:00 committed by GitHub
commit 0ddb2b320b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
107 changed files with 5981 additions and 1976 deletions

1
.github/FUNDING.yml vendored Normal file
View file

@ -0,0 +1 @@
github: tjbck

View file

@ -4,7 +4,6 @@ about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
# Bug Report
@ -31,6 +30,7 @@ assignees: ''
## Reproduction Details
**Confirmation:**
- [ ] I have read and followed all the instructions provided in the README.md.
- [ ] I have reviewed the troubleshooting.md document.
- [ ] I have included the browser console logs.

View file

@ -4,7 +4,6 @@ about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**

27
.github/workflows/format-backend.yaml vendored Normal file
View file

@ -0,0 +1,27 @@
name: Python CI
on:
push:
branches: ['main']
pull_request:
jobs:
build:
name: 'Format Backend'
env:
PUBLIC_API_BASE_URL: ''
runs-on: ubuntu-latest
strategy:
matrix:
node-version:
- latest
steps:
- uses: actions/checkout@v4
- name: Use Python
uses: actions/setup-python@v4
- name: Use Bun
uses: oven-sh/setup-bun@v1
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install yapf
- name: Format backend
run: bun run format:backend

View file

@ -0,0 +1,22 @@
name: Bun CI
on:
push:
branches: ['main']
pull_request:
jobs:
build:
name: 'Format & Build Frontend'
env:
PUBLIC_API_BASE_URL: ''
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Use Bun
uses: oven-sh/setup-bun@v1
- run: bun --version
- name: Install frontend dependencies
run: bun install
- name: Format frontend
run: bun run format
- name: Build frontend
run: bun run build

27
.github/workflows/lint-backend.disabled vendored Normal file
View file

@ -0,0 +1,27 @@
name: Python CI
on:
push:
branches: ['main']
pull_request:
jobs:
build:
name: 'Lint Backend'
env:
PUBLIC_API_BASE_URL: ''
runs-on: ubuntu-latest
strategy:
matrix:
node-version:
- latest
steps:
- uses: actions/checkout@v4
- name: Use Python
uses: actions/setup-python@v4
- name: Use Bun
uses: oven-sh/setup-bun@v1
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pylint
- name: Lint backend
run: bun run lint:backend

View file

@ -0,0 +1,21 @@
name: Bun CI
on:
push:
branches: ['main']
pull_request:
jobs:
build:
name: 'Lint Frontend'
env:
PUBLIC_API_BASE_URL: ''
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Use Bun
uses: oven-sh/setup-bun@v1
- run: bun --version
- name: Install frontend dependencies
run: bun install --frozen-lockfile
- run: bun run lint:frontend
- run: bun run lint:types
if: success() || failure()

View file

@ -1,27 +0,0 @@
name: Node.js CI
on:
push:
branches: ['main']
pull_request:
jobs:
build:
name: 'Fmt, Lint, & Build'
env:
PUBLIC_API_BASE_URL: ''
runs-on: ubuntu-latest
strategy:
matrix:
node-version:
- latest
steps:
- uses: actions/checkout@v3
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- run: node --version
- run: npm clean-install
- run: npm run fmt
#- run: npm run lint
#- run: npm run lint:types
- run: npm run build

290
.gitignore vendored
View file

@ -8,3 +8,293 @@ node_modules
!.env.example
vite.config.js.timestamp-*
vite.config.ts.timestamp-*
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

View file

@ -11,3 +11,6 @@ node_modules
pnpm-lock.yaml
package-lock.json
yarn.lock
# Ignore kubernetes files
kubernetes

View file

@ -2,12 +2,6 @@
FROM node:alpine as build
ARG OLLAMA_API_BASE_URL='/ollama/api'
RUN echo $OLLAMA_API_BASE_URL
ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL
RUN echo $PUBLIC_API_BASE_URL
WORKDIR /app
COPY package.json package-lock.json ./
@ -18,11 +12,13 @@ RUN npm run build
FROM python:3.11-slim-buster as base
ARG OLLAMA_API_BASE_URL='/ollama/api'
ENV ENV=prod
ENV OLLAMA_API_BASE_URL $OLLAMA_API_BASE_URL
ENV WEBUI_AUTH ""
ENV OLLAMA_API_BASE_URL "/ollama/api"
ENV OPENAI_API_BASE_URL ""
ENV OPENAI_API_KEY ""
ENV WEBUI_JWT_SECRET_KEY "SECRET_KEY"
WORKDIR /app

35
INSTALLATION.md Normal file
View file

@ -0,0 +1,35 @@
### Installing Both Ollama and Ollama Web UI Using Kustomize
For cpu-only pod
```bash
kubectl apply -f ./kubernetes/manifest/base
```
For gpu-enabled pod
```bash
kubectl apply -k ./kubernetes/manifest
```
### Installing Both Ollama and Ollama Web UI Using Helm
Package Helm file first
```bash
helm package ./kubernetes/helm/
```
For cpu-only pod
```bash
helm install ollama-webui ./ollama-webui-*.tgz
```
For gpu-enabled pod
```bash
helm install ollama-webui ./ollama-webui-*.tgz --set ollama.resources.limits.nvidia.com/gpu="1"
```
Check the `kubernetes/helm/values.yaml` file to know which parameters are available for customization

113
README.md
View file

@ -27,12 +27,16 @@ Also check our sibling project, [OllamaHub](https://ollamahub.com/), where you c
- ⚡ **Swift Responsiveness**: Enjoy fast and responsive performance.
- 🚀 **Effortless Setup**: Install seamlessly using Docker for a hassle-free experience.
- 🚀 **Effortless Setup**: Install seamlessly using Docker or Kubernetes (kubectl, kustomize or helm) for a hassle-free experience.
- 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature.
- ✒️🔢 **Full Markdown and LaTeX Support**: Elevate your LLM experience with comprehensive Markdown and LaTeX capabilities for enriched interaction.
- 📜 **Prompt Preset Support**: Instantly access preset prompts using the '/' command in the chat input. Load predefined conversation starters effortlessly and expedite your interactions. Effortlessly import prompts through [OllamaHub](https://ollamahub.com/) integration.
- 👍👎 **RLHF Annotation**: Empower your messages by rating them with thumbs up and thumbs down, facilitating the creation of datasets for Reinforcement Learning from Human Feedback (RLHF). Utilize your messages to train or fine-tune models, all while ensuring the confidentiality of locally saved data.
- 📥🗑️ **Download/Delete Models**: Easily download or remove models directly from the web UI.
- ⬆️ **GGUF File Model Creation**: Effortlessly create Ollama models by uploading GGUF files directly from the web UI. Streamlined process with options to upload from your machine or download GGUF files from Hugging Face.
@ -79,32 +83,6 @@ Don't forget to explore our sibling project, [OllamaHub](https://ollamahub.com/)
- **Privacy and Data Security:** We prioritize your privacy and data security above all. Please be reassured that all data entered into the Ollama Web UI is stored locally on your device. Our system is designed to be privacy-first, ensuring that no external requests are made, and your data does not leave your local environment. We are committed to maintaining the highest standards of data privacy and security, ensuring that your information remains confidential and under your control.
### Installing Both Ollama and Ollama Web UI Using Docker Compose
If you don't have Ollama installed yet, you can use the provided Docker Compose file for a hassle-free installation. Simply run the following command:
```bash
docker compose up -d --build
```
This command will install both Ollama and Ollama Web UI on your system.
#### Enable GPU
Use the additional Docker Compose file designed to enable GPU support by running the following command:
```bash
docker compose -f docker-compose.yml -f docker-compose.gpu.yml up -d --build
```
#### Expose Ollama API outside the container stack
Deploy the service with an additional Docker Compose file designed for API exposure:
```bash
docker compose -f docker-compose.yml -f docker-compose.api.yml up -d --build
```
### Installing Ollama Web UI Only
#### Prerequisites
@ -149,6 +127,69 @@ docker build -t ollama-webui .
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v ollama-webui:/app/backend/data --name ollama-webui --restart always ollama-webui
```
### Installing Both Ollama and Ollama Web UI
#### Using Docker Compose
If you don't have Ollama installed yet, you can use the provided Docker Compose file for a hassle-free installation. Simply run the following command:
```bash
docker compose up -d --build
```
This command will install both Ollama and Ollama Web UI on your system.
##### Enable GPU
Use the additional Docker Compose file designed to enable GPU support by running the following command:
```bash
docker compose -f docker-compose.yaml -f docker-compose.gpu.yaml up -d --build
```
##### Expose Ollama API outside the container stack
Deploy the service with an additional Docker Compose file designed for API exposure:
```bash
docker compose -f docker-compose.yaml -f docker-compose.api.yaml up -d --build
```
#### Using Provided `run-compose.sh` Script (Linux)
Also available on Windows under any docker-enabled WSL2 linux distro (you have to enable it from Docker Desktop)
Simply run the following command to grant execute permission to script:
```bash
chmod +x run-compose.sh
```
##### For CPU only container
```bash
./run-compose.sh
```
##### Enable GPU
For GPU enabled container (to enable this you must have your gpu driver for docker, it mostly works with nvidia so this is the official install guide: [nvidia-container-toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html))
Warning! A GPU-enabled installation has only been tested using linux and nvidia GPU, full functionalities are not guaranteed under Windows or Macos or using a different GPU
```bash
./run-compose.sh --enable-gpu
```
Note that both the above commands will use the latest production docker image in repository, to be able to build the latest local version you'll need to append the `--build` parameter, for example:
```bash
./run-compose.sh --enable-gpu --build
```
#### Using Alternative Methods (Kustomize or Helm)
See [INSTALLATION.md](/INSTALLATION.md) for information on how to install and/or join our [Ollama Web UI Discord community](https://discord.gg/5rJgQTnV4s).
## How to Install Without Docker
While we strongly recommend using our convenient Docker container installation for optimal support, we understand that some situations may require a non-Docker setup, especially for development purposes. Please note that non-Docker installations are not officially supported, and you might need to troubleshoot on your own.
@ -157,9 +198,15 @@ While we strongly recommend using our convenient Docker container installation f
The Ollama Web UI consists of two primary components: the frontend and the backend (which serves as a reverse proxy, handling static frontend files, and additional features). Both need to be running concurrently for the development environment.
**Warning: Backend Dependency for Proper Functionality**
> [!IMPORTANT]
> The backend is required for proper functionality
### TL;DR 🚀
### Requirements 📦
- 🐰 [Bun](https://bun.sh) >= 1.0.21 or 🐢 [Node.js](https://nodejs.org/en) >= 20.10
- 🐍 [Python](https://python.org) >= 3.11
### Build and Install 🛠️
Run the following commands to install:
@ -170,13 +217,17 @@ cd ollama-webui/
# Copying required .env file
cp -RPp example.env .env
# Building Frontend
# Building Frontend Using Node
npm i
npm run build
# or Building Frontend Using Bun
# bun install
# bun run build
# Serving Frontend with the Backend
cd ./backend
pip install -r requirements.txt
pip install -r requirements.txt -U
sh start.sh
```

View file

@ -4,7 +4,7 @@
The Ollama WebUI system is designed to streamline interactions between the client (your browser) and the Ollama API. At the heart of this design is a backend reverse proxy, enhancing security and resolving CORS issues.
- **How it Works**: When you make a request (like `/ollama/api/tags`) from the Ollama WebUI, it doesnt go directly to the Ollama API. Instead, it first reaches the Ollama WebUI backend. The backend then forwards this request to the Ollama API via the route you define in the `OLLAMA_API_BASE_URL` environment variable. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
- **How it Works**: The Ollama WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Ollama WebUI backend via `/ollama/api` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_API_BASE_URL` environment variable. Therefore, a request made to `/ollama/api` in the WebUI is effectively the same as making a request to `OLLAMA_API_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
- **Security Benefits**: This design prevents direct exposure of the Ollama API to the frontend, safeguarding against potential CORS (Cross-Origin Resource Sharing) issues and unauthorized access. Requiring authentication to access the Ollama API further enhances this security layer.
@ -27,6 +27,6 @@ docker run -d --network=host -v ollama-webui:/app/backend/data -e OLLAMA_API_BAS
1. **Verify Ollama URL Format**:
- When running the Web UI container, ensure the `OLLAMA_API_BASE_URL` is correctly set, including the `/api` suffix. (e.g., `http://192.168.1.1:11434/api` for different host setups).
- In the Ollama WebUI, navigate to "Settings" > "General".
- Confirm that the Ollama Server URL is correctly set to `/ollama/api`, including the `/api` suffix.
- Confirm that the Ollama Server URL is correctly set to `[OLLAMA URL]/api` (e.g., `http://localhost:11434/api`), including the `/api` suffix.
By following these enhanced troubleshooting steps, connection issues should be effectively resolved. For further assistance or queries, feel free to reach out to us on our community Discord.

3
backend/.gitignore vendored
View file

@ -4,4 +4,5 @@ _old
uploads
.ipynb_checkpoints
*.db
_test
_test
Pipfile

View file

@ -1,115 +1,111 @@
from flask import Flask, request, Response, jsonify
from flask_cors import CORS
from fastapi import FastAPI, Request, Response, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
from fastapi.concurrency import run_in_threadpool
import requests
import json
from pydantic import BaseModel
from apps.web.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import extract_token_from_auth_header
from utils.utils import decode_token, get_current_user
from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
app = Flask(__name__)
CORS(
app
) # Enable Cross-Origin Resource Sharing (CORS) to allow requests from different domains
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Define the target server URL
TARGET_SERVER_URL = OLLAMA_API_BASE_URL
app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
@app.route("/", defaults={"path": ""}, methods=["GET", "POST", "PUT", "DELETE"])
@app.route("/<path:path>", methods=["GET", "POST", "PUT", "DELETE"])
def proxy(path):
# Combine the base URL of the target server with the requested path
target_url = f"{TARGET_SERVER_URL}/{path}"
print(target_url)
@app.get("/url")
async def get_ollama_api_url(user=Depends(get_current_user)):
if user and user.role == "admin":
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
# Get data from the original request
data = request.get_data()
class UrlUpdateForm(BaseModel):
url: str
@app.post("/url/update")
async def update_ollama_api_url(
form_data: UrlUpdateForm, user=Depends(get_current_user)
):
if user and user.role == "admin":
app.state.OLLAMA_API_BASE_URL = form_data.url
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
body = await request.body()
headers = dict(request.headers)
# Basic RBAC support
if WEBUI_AUTH:
if "Authorization" in headers:
token = extract_token_from_auth_header(headers["Authorization"])
user = Users.get_user_by_token(token)
if user:
# Only user and admin roles can access
if user.role in ["user", "admin"]:
if path in ["pull", "delete", "push", "copy", "create"]:
# Only admin role can perform actions above
if user.role == "admin":
pass
else:
return (
jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}),
401,
)
else:
pass
else:
return jsonify({"detail": ERROR_MESSAGES.ACCESS_PROHIBITED}), 401
else:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
else:
return jsonify({"detail": ERROR_MESSAGES.UNAUTHORIZED}), 401
if user.role in ["user", "admin"]:
if path in ["pull", "delete", "push", "copy", "create"]:
if user.role != "admin":
raise HTTPException(
status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
)
else:
pass
r = None
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
headers.pop("Host", None)
headers.pop("Authorization", None)
headers.pop("Origin", None)
headers.pop("Referer", None)
r = None
def get_request():
nonlocal r
try:
r = requests.request(
method=request.method,
url=target_url,
data=body,
headers=headers,
stream=True,
)
r.raise_for_status()
return StreamingResponse(
r.iter_content(chunk_size=8192),
status_code=r.status_code,
headers=dict(r.headers),
)
except Exception as e:
raise e
try:
# Make a request to the target server
r = requests.request(
method=request.method,
url=target_url,
data=data,
headers=headers,
stream=True, # Enable streaming for server-sent events
)
r.raise_for_status()
# Proxy the target server's response to the client
def generate():
for chunk in r.iter_content(chunk_size=8192):
yield chunk
response = Response(generate(), status=r.status_code)
# Copy headers from the target server's response to the client's response
for key, value in r.headers.items():
response.headers[key] = value
return response
return await run_in_threadpool(get_request)
except Exception as e:
print(e)
error_detail = "Ollama WebUI: Server Connection Error"
if r != None:
print(r.text)
res = r.json()
if "error" in res:
error_detail = f"Ollama: {res['error']}"
print(res)
if r is not None:
try:
res = r.json()
if "error" in res:
error_detail = f"Ollama: {res['error']}"
except:
error_detail = f"Ollama: {e}"
return (
jsonify(
{
"detail": error_detail,
"message": str(e),
}
),
400,
raise HTTPException(
status_code=r.status_code if r else 500,
detail=error_detail,
)
if __name__ == "__main__":
app.run(debug=True)

View file

@ -0,0 +1,127 @@
from fastapi import FastAPI, Request, Response, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse
import requests
import json
from pydantic import BaseModel
from apps.web.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import decode_token, get_current_user
from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
import aiohttp
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
@app.get("/url")
async def get_ollama_api_url(user=Depends(get_current_user)):
if user and user.role == "admin":
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
class UrlUpdateForm(BaseModel):
url: str
@app.post("/url/update")
async def update_ollama_api_url(
form_data: UrlUpdateForm, user=Depends(get_current_user)
):
if user and user.role == "admin":
app.state.OLLAMA_API_BASE_URL = form_data.url
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
# async def fetch_sse(method, target_url, body, headers):
# async with aiohttp.ClientSession() as session:
# try:
# async with session.request(
# method, target_url, data=body, headers=headers
# ) as response:
# print(response.status)
# async for line in response.content:
# yield line
# except Exception as e:
# print(e)
# error_detail = "Ollama WebUI: Server Connection Error"
# yield json.dumps({"error": error_detail, "message": str(e)}).encode()
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
print(target_url)
body = await request.body()
headers = dict(request.headers)
if user.role in ["user", "admin"]:
if path in ["pull", "delete", "push", "copy", "create"]:
if user.role != "admin":
raise HTTPException(
status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
)
else:
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
headers.pop("Host", None)
headers.pop("Authorization", None)
headers.pop("Origin", None)
headers.pop("Referer", None)
session = aiohttp.ClientSession()
response = None
try:
response = await session.request(
request.method, target_url, data=body, headers=headers
)
print(response)
if not response.ok:
data = await response.json()
print(data)
response.raise_for_status()
async def generate():
async for line in response.content:
print(line)
yield line
await session.close()
return StreamingResponse(generate(), response.status)
except Exception as e:
print(e)
error_detail = "Ollama WebUI: Server Connection Error"
if response is not None:
try:
res = await response.json()
if "error" in res:
error_detail = f"Ollama: {res['error']}"
except:
error_detail = f"Ollama: {e}"
await session.close()
raise HTTPException(
status_code=response.status if response else 500,
detail=error_detail,
)

143
backend/apps/openai/main.py Normal file
View file

@ -0,0 +1,143 @@
from fastapi import FastAPI, Request, Response, HTTPException, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import StreamingResponse, JSONResponse
import requests
import json
from pydantic import BaseModel
from apps.web.models.users import Users
from constants import ERROR_MESSAGES
from utils.utils import decode_token, get_current_user
from config import OPENAI_API_BASE_URL, OPENAI_API_KEY
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.state.OPENAI_API_BASE_URL = OPENAI_API_BASE_URL
app.state.OPENAI_API_KEY = OPENAI_API_KEY
class UrlUpdateForm(BaseModel):
url: str
class KeyUpdateForm(BaseModel):
key: str
@app.get("/url")
async def get_openai_url(user=Depends(get_current_user)):
if user and user.role == "admin":
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
else:
raise HTTPException(status_code=401,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
@app.post("/url/update")
async def update_openai_url(form_data: UrlUpdateForm,
user=Depends(get_current_user)):
if user and user.role == "admin":
app.state.OPENAI_API_BASE_URL = form_data.url
return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
else:
raise HTTPException(status_code=401,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
@app.get("/key")
async def get_openai_key(user=Depends(get_current_user)):
if user and user.role == "admin":
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
else:
raise HTTPException(status_code=401,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
@app.post("/key/update")
async def update_openai_key(form_data: KeyUpdateForm,
user=Depends(get_current_user)):
if user and user.role == "admin":
app.state.OPENAI_API_KEY = form_data.key
return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
else:
raise HTTPException(status_code=401,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
target_url = f"{app.state.OPENAI_API_BASE_URL}/{path}"
print(target_url, app.state.OPENAI_API_KEY)
if user.role not in ["user", "admin"]:
raise HTTPException(status_code=401,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
if app.state.OPENAI_API_KEY == "":
raise HTTPException(status_code=401,
detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
body = await request.body()
# headers = dict(request.headers)
# print(headers)
headers = {}
headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
headers["Content-Type"] = "application/json"
try:
r = requests.request(
method=request.method,
url=target_url,
data=body,
headers=headers,
stream=True,
)
r.raise_for_status()
# Check if response is SSE
if "text/event-stream" in r.headers.get("Content-Type", ""):
return StreamingResponse(
r.iter_content(chunk_size=8192),
status_code=r.status_code,
headers=dict(r.headers),
)
else:
# For non-SSE, read the response and return it
# response_data = (
# r.json()
# if r.headers.get("Content-Type", "")
# == "application/json"
# else r.text
# )
response_data = r.json()
print(type(response_data))
if "openai" in app.state.OPENAI_API_BASE_URL and path == "models":
response_data["data"] = list(
filter(lambda model: "gpt" in model["id"],
response_data["data"]))
return response_data
except Exception as e:
print(e)
error_detail = "Ollama WebUI: Server Connection Error"
if r is not None:
try:
res = r.json()
if "error" in res:
error_detail = f"External: {res['error']}"
except:
error_detail = f"External: {e}"
raise HTTPException(status_code=r.status_code, detail=error_detail)

View file

@ -1,13 +1,16 @@
from fastapi import FastAPI, Request, Depends, HTTPException
from fastapi import FastAPI, Depends
from fastapi.routing import APIRoute
from fastapi.middleware.cors import CORSMiddleware
from apps.web.routers import auths, users, chats, modelfiles, utils
from apps.web.routers import auths, users, chats, modelfiles, prompts, configs, utils
from config import WEBUI_VERSION, WEBUI_AUTH
app = FastAPI()
origins = ["*"]
app.state.ENABLE_SIGNUP = True
app.state.DEFAULT_MODELS = None
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
@ -16,16 +19,23 @@ app.add_middleware(
allow_headers=["*"],
)
app.include_router(auths.router, prefix="/auths", tags=["auths"])
app.include_router(users.router, prefix="/users", tags=["users"])
app.include_router(chats.router, prefix="/chats", tags=["chats"])
app.include_router(modelfiles.router, prefix="/modelfiles", tags=["modelfiles"])
app.include_router(modelfiles.router,
prefix="/modelfiles",
tags=["modelfiles"])
app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
app.include_router(configs.router, prefix="/configs", tags=["configs"])
app.include_router(utils.router, prefix="/utils", tags=["utils"])
@app.get("/")
async def get_status():
return {"status": True, "version": WEBUI_VERSION, "auth": WEBUI_AUTH}
return {
"status": True,
"version": WEBUI_VERSION,
"auth": WEBUI_AUTH,
"default_models": app.state.DEFAULT_MODELS,
}

View file

@ -4,7 +4,6 @@ import time
import uuid
from peewee import *
from apps.web.models.users import UserModel, Users
from utils.utils import (
verify_password,
@ -123,6 +122,15 @@ class AuthsTable:
except:
return False
def update_email_by_id(self, id: str, email: str) -> bool:
try:
query = Auth.update(email=email).where(Auth.id == id)
result = query.execute()
return True if result == 1 else False
except:
return False
def delete_auth_by_id(self, id: str) -> bool:
try:
# Delete User

View file

@ -3,14 +3,12 @@ from typing import List, Union, Optional
from peewee import *
from playhouse.shortcuts import model_to_dict
import json
import uuid
import time
from apps.web.internal.db import DB
####################
# Chat DB Schema
####################
@ -62,23 +60,23 @@ class ChatTitleIdResponse(BaseModel):
class ChatTable:
def __init__(self, db):
self.db = db
db.create_tables([Chat])
def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]:
def insert_new_chat(self, user_id: str,
form_data: ChatForm) -> Optional[ChatModel]:
id = str(uuid.uuid4())
chat = ChatModel(
**{
"id": id,
"user_id": user_id,
"title": form_data.chat["title"]
if "title" in form_data.chat
else "New Chat",
"title": form_data.chat["title"] if "title" in
form_data.chat else "New Chat",
"chat": json.dumps(form_data.chat),
"timestamp": int(time.time()),
}
)
})
result = Chat.create(**chat.model_dump())
return chat if result else None
@ -111,27 +109,25 @@ class ChatTable:
except:
return None
def get_chat_lists_by_user_id(
self, user_id: str, skip: int = 0, limit: int = 50
) -> List[ChatModel]:
def get_chat_lists_by_user_id(self,
user_id: str,
skip: int = 0,
limit: int = 50) -> List[ChatModel]:
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.user_id == user_id)
.order_by(Chat.timestamp.desc())
ChatModel(**model_to_dict(chat)) for chat in Chat.select().where(
Chat.user_id == user_id).order_by(Chat.timestamp.desc())
# .limit(limit)
# .offset(skip)
]
def get_all_chats_by_user_id(self, user_id: str) -> List[ChatModel]:
return [
ChatModel(**model_to_dict(chat))
for chat in Chat.select()
.where(Chat.user_id == user_id)
.order_by(Chat.timestamp.desc())
ChatModel(**model_to_dict(chat)) for chat in Chat.select().where(
Chat.user_id == user_id).order_by(Chat.timestamp.desc())
]
def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]:
def get_chat_by_id_and_user_id(self, id: str,
user_id: str) -> Optional[ChatModel]:
try:
chat = Chat.get(Chat.id == id, Chat.user_id == user_id)
return ChatModel(**model_to_dict(chat))
@ -146,7 +142,8 @@ class ChatTable:
def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool:
try:
query = Chat.delete().where((Chat.id == id) & (Chat.user_id == user_id))
query = Chat.delete().where((Chat.id == id)
& (Chat.user_id == user_id))
query.execute() # Remove the rows, return number of rows removed.
return True

View file

@ -12,7 +12,7 @@ from apps.web.internal.db import DB
import json
####################
# User DB Schema
# Modelfile DB Schema
####################
@ -58,13 +58,14 @@ class ModelfileResponse(BaseModel):
class ModelfilesTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Modelfile])
def insert_new_modelfile(
self, user_id: str, form_data: ModelfileForm
) -> Optional[ModelfileModel]:
self, user_id: str,
form_data: ModelfileForm) -> Optional[ModelfileModel]:
if "tagName" in form_data.modelfile:
modelfile = ModelfileModel(
**{
@ -72,8 +73,7 @@ class ModelfilesTable:
"tag_name": form_data.modelfile["tagName"],
"modelfile": json.dumps(form_data.modelfile),
"timestamp": int(time.time()),
}
)
})
try:
result = Modelfile.create(**modelfile.model_dump())
@ -87,28 +87,29 @@ class ModelfilesTable:
else:
return None
def get_modelfile_by_tag_name(self, tag_name: str) -> Optional[ModelfileModel]:
def get_modelfile_by_tag_name(self,
tag_name: str) -> Optional[ModelfileModel]:
try:
modelfile = Modelfile.get(Modelfile.tag_name == tag_name)
return ModelfileModel(**model_to_dict(modelfile))
except:
return None
def get_modelfiles(self, skip: int = 0, limit: int = 50) -> List[ModelfileResponse]:
def get_modelfiles(self,
skip: int = 0,
limit: int = 50) -> List[ModelfileResponse]:
return [
ModelfileResponse(
**{
**model_to_dict(modelfile),
"modelfile": json.loads(modelfile.modelfile),
}
)
for modelfile in Modelfile.select()
"modelfile":
json.loads(modelfile.modelfile),
}) for modelfile in Modelfile.select()
# .limit(limit).offset(skip)
]
def update_modelfile_by_tag_name(
self, tag_name: str, modelfile: dict
) -> Optional[ModelfileModel]:
self, tag_name: str, modelfile: dict) -> Optional[ModelfileModel]:
try:
query = Modelfile.update(
modelfile=json.dumps(modelfile),

View file

@ -0,0 +1,115 @@
from pydantic import BaseModel
from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
import time
from utils.utils import decode_token
from utils.misc import get_gravatar_url
from apps.web.internal.db import DB
import json
####################
# Prompts DB Schema
####################
class Prompt(Model):
command = CharField(unique=True)
user_id = CharField()
title = CharField()
content = TextField()
timestamp = DateField()
class Meta:
database = DB
class PromptModel(BaseModel):
command: str
user_id: str
title: str
content: str
timestamp: int # timestamp in epoch
####################
# Forms
####################
class PromptForm(BaseModel):
command: str
title: str
content: str
class PromptsTable:
def __init__(self, db):
self.db = db
self.db.create_tables([Prompt])
def insert_new_prompt(self, user_id: str,
form_data: PromptForm) -> Optional[PromptModel]:
prompt = PromptModel(
**{
"user_id": user_id,
"command": form_data.command,
"title": form_data.title,
"content": form_data.content,
"timestamp": int(time.time()),
})
try:
result = Prompt.create(**prompt.model_dump())
if result:
return prompt
else:
return None
except:
return None
def get_prompt_by_command(self, command: str) -> Optional[PromptModel]:
try:
prompt = Prompt.get(Prompt.command == command)
return PromptModel(**model_to_dict(prompt))
except:
return None
def get_prompts(self) -> List[PromptModel]:
return [
PromptModel(**model_to_dict(prompt)) for prompt in Prompt.select()
# .limit(limit).offset(skip)
]
def update_prompt_by_command(
self, command: str,
form_data: PromptForm) -> Optional[PromptModel]:
try:
query = Prompt.update(
title=form_data.title,
content=form_data.content,
timestamp=int(time.time()),
).where(Prompt.command == command)
query.execute()
prompt = Prompt.get(Prompt.command == command)
return PromptModel(**model_to_dict(prompt))
except:
return None
def delete_prompt_by_command(self, command: str) -> bool:
try:
query = Prompt.delete().where((Prompt.command == command))
query.execute() # Remove the rows, return number of rows removed.
return True
except:
return False
Prompts = PromptsTable(DB)

View file

@ -3,14 +3,11 @@ from peewee import *
from playhouse.shortcuts import model_to_dict
from typing import List, Union, Optional
import time
from utils.utils import decode_token
from utils.misc import get_gravatar_url
from apps.web.internal.db import DB
from apps.web.models.chats import Chats
####################
# User DB Schema
####################
@ -47,6 +44,13 @@ class UserRoleUpdateForm(BaseModel):
role: str
class UserUpdateForm(BaseModel):
name: str
email: str
profile_image_url: str
password: Optional[str] = None
class UsersTable:
def __init__(self, db):
self.db = db
@ -85,14 +89,6 @@ class UsersTable:
except:
return None
def get_user_by_token(self, token: str) -> Optional[UserModel]:
data = decode_token(token)
if data != None and "email" in data:
return self.get_user_by_email(data["email"])
else:
return None
def get_users(self, skip: int = 0, limit: int = 50) -> List[UserModel]:
return [
UserModel(**model_to_dict(user))
@ -112,6 +108,16 @@ class UsersTable:
except:
return None
def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]:
try:
query = User.update(**updated).where(User.id == id)
query.execute()
user = User.get(User.id == id)
return UserModel(**model_to_dict(user))
except:
return None
def delete_user_by_id(self, id: str) -> bool:
try:
# Delete User Chats

View file

@ -1,4 +1,4 @@
from fastapi import Response
from fastapi import Response, Request
from fastapi import Depends, FastAPI, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union
@ -18,16 +18,10 @@ from apps.web.models.auths import (
)
from apps.web.models.users import Users
from utils.utils import (
get_password_hash,
bearer_scheme,
create_token,
)
from utils.misc import get_gravatar_url
from utils.utils import get_password_hash, get_current_user, create_token
from utils.misc import get_gravatar_url, validate_email_format
from constants import ERROR_MESSAGES
router = APIRouter()
############################
@ -36,22 +30,14 @@ router = APIRouter()
@router.get("/", response_model=UserResponse)
async def get_session_user(cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
return {
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
"profile_image_url": user.profile_image_url,
}
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def get_session_user(user=Depends(get_current_user)):
return {
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
"profile_image_url": user.profile_image_url,
}
############################
@ -60,10 +46,8 @@ async def get_session_user(cred=Depends(bearer_scheme)):
@router.post("/update/password", response_model=bool)
async def update_password(form_data: UpdatePasswordForm, cred=Depends(bearer_scheme)):
token = cred.credentials
session_user = Users.get_user_by_token(token)
async def update_password(form_data: UpdatePasswordForm,
session_user=Depends(get_current_user)):
if session_user:
user = Auths.authenticate_user(session_user.email, form_data.password)
@ -106,31 +90,67 @@ async def signin(form_data: SigninForm):
@router.post("/signup", response_model=SigninResponse)
async def signup(form_data: SignupForm):
if not Users.get_user_by_email(form_data.email.lower()):
try:
role = "admin" if Users.get_num_users() == 0 else "pending"
hashed = get_password_hash(form_data.password)
user = Auths.insert_new_auth(
form_data.email.lower(), hashed, form_data.name, role
)
async def signup(request: Request, form_data: SignupForm):
if request.app.state.ENABLE_SIGNUP:
if validate_email_format(form_data.email.lower()):
if not Users.get_user_by_email(form_data.email.lower()):
try:
role = "admin" if Users.get_num_users() == 0 else "pending"
hashed = get_password_hash(form_data.password)
user = Auths.insert_new_auth(form_data.email.lower(),
hashed, form_data.name, role)
if user:
token = create_token(data={"email": user.email})
# response.set_cookie(key='token', value=token, httponly=True)
if user:
token = create_token(data={"email": user.email})
# response.set_cookie(key='token', value=token, httponly=True)
return {
"token": token,
"token_type": "Bearer",
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
"profile_image_url": user.profile_image_url,
}
return {
"token": token,
"token_type": "Bearer",
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
"profile_image_url": user.profile_image_url,
}
else:
raise HTTPException(
500, detail=ERROR_MESSAGES.CREATE_USER_ERROR)
except Exception as err:
raise HTTPException(500,
detail=ERROR_MESSAGES.DEFAULT(err))
else:
raise HTTPException(500, detail=ERROR_MESSAGES.CREATE_USER_ERROR)
except Exception as err:
raise HTTPException(500, detail=ERROR_MESSAGES.DEFAULT(err))
raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
else:
raise HTTPException(400,
detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT)
else:
raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
raise HTTPException(400, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
############################
# ToggleSignUp
############################
@router.get("/signup/enabled", response_model=bool)
async def get_sign_up_status(request: Request, user=Depends(get_current_user)):
if user.role == "admin":
return request.app.state.ENABLE_SIGNUP
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
@router.get("/signup/enabled/toggle", response_model=bool)
async def toggle_sign_up(request: Request, user=Depends(get_current_user)):
if user.role == "admin":
request.app.state.ENABLE_SIGNUP = not request.app.state.ENABLE_SIGNUP
return request.app.state.ENABLE_SIGNUP
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)

View file

@ -1,8 +1,7 @@
from fastapi import Response
from fastapi import Depends, FastAPI, HTTPException, status
from fastapi import Depends, Request, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union, Optional
from utils.utils import get_current_user
from fastapi import APIRouter
from pydantic import BaseModel
import json
@ -18,8 +17,7 @@ from apps.web.models.chats import (
)
from utils.utils import (
bearer_scheme,
)
bearer_scheme, )
from constants import ERROR_MESSAGES
router = APIRouter()
@ -30,17 +28,9 @@ router = APIRouter()
@router.get("/", response_model=List[ChatTitleIdResponse])
async def get_user_chats(skip: int = 0, limit: int = 50, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def get_user_chats(
user=Depends(get_current_user), skip: int = 0, limit: int = 50):
return Chats.get_chat_lists_by_user_id(user.id, skip, limit)
############################
@ -49,20 +39,12 @@ async def get_user_chats(skip: int = 0, limit: int = 50, cred=Depends(bearer_sch
@router.get("/all", response_model=List[ChatResponse])
async def get_all_user_chats(cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
return [
ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
for chat in Chats.get_all_chats_by_user_id(user.id)
]
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def get_all_user_chats(user=Depends(get_current_user)):
return [
ChatResponse(**{
**chat.model_dump(), "chat": json.loads(chat.chat)
}) for chat in Chats.get_all_chats_by_user_id(user.id)
]
############################
@ -71,18 +53,9 @@ async def get_all_user_chats(cred=Depends(bearer_scheme)):
@router.post("/new", response_model=Optional[ChatResponse])
async def create_new_chat(form_data: ChatForm, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
chat = Chats.insert_new_chat(user.id, form_data)
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def create_new_chat(form_data: ChatForm, user=Depends(get_current_user)):
chat = Chats.insert_new_chat(user.id, form_data)
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
############################
@ -91,25 +64,16 @@ async def create_new_chat(form_data: ChatForm, cred=Depends(bearer_scheme)):
@router.get("/{id}", response_model=Optional[ChatResponse])
async def get_chat_by_id(id: str, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
async def get_chat_by_id(id: str, user=Depends(get_current_user)):
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
if user:
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
if chat:
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.NOT_FOUND,
)
if chat:
return ChatResponse(**{
**chat.model_dump(), "chat": json.loads(chat.chat)
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.NOT_FOUND)
############################
@ -118,26 +82,21 @@ async def get_chat_by_id(id: str, cred=Depends(bearer_scheme)):
@router.post("/{id}", response_model=Optional[ChatResponse])
async def update_chat_by_id(id: str, form_data: ChatForm, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
async def update_chat_by_id(id: str,
form_data: ChatForm,
user=Depends(get_current_user)):
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
if chat:
updated_chat = {**json.loads(chat.chat), **form_data.chat}
if user:
chat = Chats.get_chat_by_id_and_user_id(id, user.id)
if chat:
updated_chat = {**json.loads(chat.chat), **form_data.chat}
chat = Chats.update_chat_by_id(id, updated_chat)
return ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
chat = Chats.update_chat_by_id(id, updated_chat)
return ChatResponse(**{
**chat.model_dump(), "chat": json.loads(chat.chat)
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
@ -147,18 +106,9 @@ async def update_chat_by_id(id: str, form_data: ChatForm, cred=Depends(bearer_sc
@router.delete("/{id}", response_model=bool)
async def delete_chat_by_id(id: str, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
result = Chats.delete_chat_by_id_and_user_id(id, user.id)
return result
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def delete_chat_by_id(id: str, user=Depends(get_current_user)):
result = Chats.delete_chat_by_id_and_user_id(id, user.id)
return result
############################
@ -167,15 +117,6 @@ async def delete_chat_by_id(id: str, cred=Depends(bearer_scheme)):
@router.delete("/", response_model=bool)
async def delete_all_user_chats(cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
result = Chats.delete_chats_by_user_id(user.id)
return result
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def delete_all_user_chats(user=Depends(get_current_user)):
result = Chats.delete_chats_by_user_id(user.id)
return result

View file

@ -0,0 +1,40 @@
from fastapi import Response, Request
from fastapi import Depends, FastAPI, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union
from fastapi import APIRouter
from pydantic import BaseModel
import time
import uuid
from apps.web.models.users import Users
from utils.utils import get_password_hash, get_current_user, create_token
from utils.misc import get_gravatar_url, validate_email_format
from constants import ERROR_MESSAGES
router = APIRouter()
class SetDefaultModelsForm(BaseModel):
models: str
############################
# SetDefaultModels
############################
@router.post("/default/models", response_model=str)
async def set_global_default_models(request: Request,
form_data: SetDefaultModelsForm,
user=Depends(get_current_user)):
if user.role == "admin":
request.app.state.DEFAULT_MODELS = form_data.models
return request.app.state.DEFAULT_MODELS
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)

View file

@ -1,4 +1,3 @@
from fastapi import Response
from fastapi import Depends, FastAPI, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union, Optional
@ -6,8 +5,6 @@ from typing import List, Union, Optional
from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.web.models.users import Users
from apps.web.models.modelfiles import (
Modelfiles,
ModelfileForm,
@ -16,9 +13,7 @@ from apps.web.models.modelfiles import (
ModelfileResponse,
)
from utils.utils import (
bearer_scheme,
)
from utils.utils import get_current_user
from constants import ERROR_MESSAGES
router = APIRouter()
@ -29,17 +24,10 @@ router = APIRouter()
@router.get("/", response_model=List[ModelfileResponse])
async def get_modelfiles(skip: int = 0, limit: int = 50, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
return Modelfiles.get_modelfiles(skip, limit)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
async def get_modelfiles(skip: int = 0,
limit: int = 50,
user=Depends(get_current_user)):
return Modelfiles.get_modelfiles(skip, limit)
############################
@ -48,36 +36,27 @@ async def get_modelfiles(skip: int = 0, limit: int = 50, cred=Depends(bearer_sch
@router.post("/create", response_model=Optional[ModelfileResponse])
async def create_new_modelfile(form_data: ModelfileForm, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
async def create_new_modelfile(form_data: ModelfileForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
if user:
# Admin Only
if user.role == "admin":
modelfile = Modelfiles.insert_new_modelfile(user.id, form_data)
modelfile = Modelfiles.insert_new_modelfile(user.id, form_data)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.DEFAULT(),
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile":
json.loads(modelfile.modelfile),
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
detail=ERROR_MESSAGES.DEFAULT(),
)
@ -87,31 +66,21 @@ async def create_new_modelfile(form_data: ModelfileForm, cred=Depends(bearer_sch
@router.post("/", response_model=Optional[ModelfileResponse])
async def get_modelfile_by_tag_name(
form_data: ModelfileTagNameForm, cred=Depends(bearer_scheme)
):
token = cred.credentials
user = Users.get_user_by_token(token)
async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
user=Depends(get_current_user)):
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if user:
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.NOT_FOUND,
)
if modelfile:
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile":
json.loads(modelfile.modelfile),
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
detail=ERROR_MESSAGES.NOT_FOUND,
)
@ -121,45 +90,33 @@ async def get_modelfile_by_tag_name(
@router.post("/update", response_model=Optional[ModelfileResponse])
async def update_modelfile_by_tag_name(
form_data: ModelfileUpdateForm, cred=Depends(bearer_scheme)
):
token = cred.credentials
user = Users.get_user_by_token(token)
async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if modelfile:
updated_modelfile = {
**json.loads(modelfile.modelfile),
**form_data.modelfile,
}
if user:
if user.role == "admin":
modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name)
if modelfile:
updated_modelfile = {
**json.loads(modelfile.modelfile),
**form_data.modelfile,
}
modelfile = Modelfiles.update_modelfile_by_tag_name(
form_data.tag_name, updated_modelfile)
modelfile = Modelfiles.update_modelfile_by_tag_name(
form_data.tag_name, updated_modelfile
)
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile": json.loads(modelfile.modelfile),
}
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
return ModelfileResponse(
**{
**modelfile.model_dump(),
"modelfile":
json.loads(modelfile.modelfile),
})
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
@ -169,23 +126,13 @@ async def update_modelfile_by_tag_name(
@router.delete("/delete", response_model=bool)
async def delete_modelfile_by_tag_name(
form_data: ModelfileTagNameForm, cred=Depends(bearer_scheme)
):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
if user.role == "admin":
result = Modelfiles.delete_modelfile_by_tag_name(form_data.tag_name)
return result
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
else:
async def delete_modelfile_by_tag_name(form_data: ModelfileTagNameForm,
user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
result = Modelfiles.delete_modelfile_by_tag_name(form_data.tag_name)
return result

View file

@ -0,0 +1,114 @@
from fastapi import Depends, FastAPI, HTTPException, status
from datetime import datetime, timedelta
from typing import List, Union, Optional
from fastapi import APIRouter
from pydantic import BaseModel
import json
from apps.web.models.prompts import Prompts, PromptForm, PromptModel
from utils.utils import get_current_user
from constants import ERROR_MESSAGES
router = APIRouter()
############################
# GetPrompts
############################
@router.get("/", response_model=List[PromptModel])
async def get_prompts(user=Depends(get_current_user)):
return Prompts.get_prompts()
############################
# CreateNewPrompt
############################
@router.post("/create", response_model=Optional[PromptModel])
async def create_new_prompt(form_data: PromptForm, user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
prompt = Prompts.get_prompt_by_command(form_data.command)
if prompt == None:
prompt = Prompts.insert_new_prompt(user.id, form_data)
if prompt:
return prompt
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.DEFAULT(),
)
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.COMMAND_TAKEN,
)
############################
# GetPromptByCommand
############################
@router.get("/command/{command}", response_model=Optional[PromptModel])
async def get_prompt_by_command(command: str, user=Depends(get_current_user)):
prompt = Prompts.get_prompt_by_command(f"/{command}")
if prompt:
return prompt
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.NOT_FOUND,
)
############################
# UpdatePromptByCommand
############################
@router.post("/command/{command}/update", response_model=Optional[PromptModel])
async def update_prompt_by_command(
command: str, form_data: PromptForm, user=Depends(get_current_user)
):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
prompt = Prompts.update_prompt_by_command(f"/{command}", form_data)
if prompt:
return prompt
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
############################
# DeletePromptByCommand
############################
@router.delete("/command/{command}/delete", response_model=bool)
async def delete_prompt_by_command(command: str, user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
result = Prompts.delete_prompt_by_command(f"/{command}")
return result

View file

@ -8,15 +8,10 @@ from pydantic import BaseModel
import time
import uuid
from apps.web.models.users import UserModel, UserRoleUpdateForm, Users
from apps.web.models.users import UserModel, UserUpdateForm, UserRoleUpdateForm, Users
from apps.web.models.auths import Auths
from utils.utils import (
get_password_hash,
bearer_scheme,
create_token,
)
from utils.utils import get_current_user, get_password_hash
from constants import ERROR_MESSAGES
router = APIRouter()
@ -27,23 +22,13 @@ router = APIRouter()
@router.get("/", response_model=List[UserModel])
async def get_users(skip: int = 0, limit: int = 50, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
if user:
if user.role == "admin":
return Users.get_users(skip, limit)
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
else:
async def get_users(skip: int = 0, limit: int = 50, user=Depends(get_current_user)):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
return Users.get_users(skip, limit)
############################
@ -52,28 +37,77 @@ async def get_users(skip: int = 0, limit: int = 50, cred=Depends(bearer_scheme))
@router.post("/update/role", response_model=Optional[UserModel])
async def update_user_role(form_data: UserRoleUpdateForm, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
async def update_user_role(
form_data: UserRoleUpdateForm, user=Depends(get_current_user)
):
if user.role != "admin":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
if user:
if user.role == "admin":
if user.id != form_data.id:
return Users.update_user_role_by_id(form_data.id, form_data.role)
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
)
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
if user.id != form_data.id:
return Users.update_user_role_by_id(form_data.id, form_data.role)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
)
############################
# UpdateUserById
############################
@router.post("/{user_id}/update", response_model=Optional[UserModel])
async def update_user_by_id(
user_id: str, form_data: UserUpdateForm, session_user=Depends(get_current_user)
):
if session_user.role != "admin":
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)
user = Users.get_user_by_id(user_id)
if user:
if form_data.email.lower() != user.email:
email_user = Users.get_user_by_email(form_data.email.lower())
if email_user:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.EMAIL_TAKEN,
)
if form_data.password:
hashed = get_password_hash(form_data.password)
print(hashed)
Auths.update_user_password_by_id(user_id, hashed)
Auths.update_email_by_id(user_id, form_data.email.lower())
updated_user = Users.update_user_by_id(
user_id,
{
"name": form_data.name,
"email": form_data.email.lower(),
"profile_image_url": form_data.profile_image_url,
},
)
if updated_user:
return updated_user
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.DEFAULT(),
)
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ERROR_MESSAGES.USER_NOT_FOUND,
)
@ -83,34 +117,25 @@ async def update_user_role(form_data: UserRoleUpdateForm, cred=Depends(bearer_sc
@router.delete("/{user_id}", response_model=bool)
async def delete_user_by_id(user_id: str, cred=Depends(bearer_scheme)):
token = cred.credentials
user = Users.get_user_by_token(token)
async def delete_user_by_id(user_id: str, user=Depends(get_current_user)):
if user.role == "admin":
if user.id != user_id:
result = Auths.delete_auth_by_id(user_id)
if user:
if user.role == "admin":
if user.id != user_id:
result = Auths.delete_auth_by_id(user_id)
if result:
return True
else:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ERROR_MESSAGES.DELETE_USER_ERROR,
)
if result:
return True
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ERROR_MESSAGES.DELETE_USER_ERROR,
)
else:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
)
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
status_code=status.HTTP_403_FORBIDDEN,
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
)

View file

@ -9,12 +9,10 @@ import os
import aiohttp
import json
from utils.misc import calculate_sha256
from config import OLLAMA_API_BASE_URL
router = APIRouter()
@ -42,7 +40,10 @@ def parse_huggingface_url(hf_url):
return None
async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
async def download_file_stream(url,
file_path,
file_name,
chunk_size=1024 * 1024):
done = False
if os.path.exists(file_path):
@ -56,7 +57,8 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
async with aiohttp.ClientSession(timeout=timeout) as session:
async with session.get(url, headers=headers) as response:
total_size = int(response.headers.get("content-length", 0)) + current_size
total_size = int(response.headers.get("content-length",
0)) + current_size
with open(file_path, "ab+") as file:
async for data in response.content.iter_chunked(chunk_size):
@ -89,9 +91,7 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
@router.get("/download")
async def download(
url: str,
):
async def download(url: str, ):
# url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
file_name = parse_huggingface_url(url)
@ -161,4 +161,5 @@ async def upload(file: UploadFile = File(...)):
res = {"error": str(e)}
yield f"data: {json.dumps(res)}\n\n"
return StreamingResponse(file_write_stream(), media_type="text/event-stream")
return StreamingResponse(file_write_stream(),
media_type="text/event-stream")

View file

@ -19,19 +19,28 @@ ENV = os.environ.get("ENV", "dev")
# OLLAMA_API_BASE_URL
####################################
OLLAMA_API_BASE_URL = os.environ.get(
"OLLAMA_API_BASE_URL", "http://localhost:11434/api"
)
OLLAMA_API_BASE_URL = os.environ.get("OLLAMA_API_BASE_URL",
"http://localhost:11434/api")
if ENV == "prod":
if OLLAMA_API_BASE_URL == "/ollama/api":
OLLAMA_API_BASE_URL = "http://host.docker.internal:11434/api"
####################################
# OPENAI_API
####################################
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
if OPENAI_API_BASE_URL == "":
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
####################################
# WEBUI_VERSION
####################################
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.42")
WEBUI_VERSION = os.environ.get("WEBUI_VERSION", "v1.0.0-alpha.50")
####################################
# WEBUI_AUTH (Required for security)

View file

@ -6,6 +6,7 @@ class MESSAGES(str, Enum):
class ERROR_MESSAGES(str, Enum):
def __str__(self) -> str:
return super().__str__()
@ -17,19 +18,20 @@ class ERROR_MESSAGES(str, Enum):
USERNAME_TAKEN = (
"Uh-oh! This username is already registered. Please choose another username."
)
COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string."
INVALID_TOKEN = (
"Your session has expired or the token is invalid. Please sign in again."
)
INVALID_CRED = "The email or password provided is incorrect. Please check for typos and try logging in again."
INVALID_EMAIL_FORMAT = "The email format you entered is invalid. Please double-check and make sure you're using a valid email address (e.g., yourname@example.com)."
INVALID_PASSWORD = (
"The password provided is incorrect. Please check for typos and try again."
)
UNAUTHORIZED = "401 Unauthorized"
ACCESS_PROHIBITED = "You do not have permission to access this resource. Please contact your administrator for assistance."
ACTION_PROHIBITED = (
"The requested action has been restricted as a security measure."
)
"The requested action has been restricted as a security measure.")
NOT_FOUND = "We could not find what you're looking for :/"
USER_NOT_FOUND = "We could not find what you're looking for :/"
API_KEY_NOT_FOUND = "Oops! It looks like there's a hiccup. The API key is missing. Please make sure to provide a valid API key to access this feature."
MALICIOUS = "Unusual activities detected, please try again in a few minutes."

View file

@ -6,12 +6,15 @@ from fastapi.middleware.cors import CORSMiddleware
from starlette.exceptions import HTTPException as StarletteHTTPException
from apps.ollama.main import app as ollama_app
from apps.openai.main import app as openai_app
from apps.web.main import app as webui_app
import time
class SPAStaticFiles(StaticFiles):
async def get_response(self, path: str, scope):
try:
return await super().get_response(path, scope)
@ -46,5 +49,9 @@ async def check_url(request: Request, call_next):
app.mount("/api/v1", webui_app)
app.mount("/ollama/api", WSGIMiddleware(ollama_app))
app.mount("/", SPAStaticFiles(directory="../build", html=True), name="spa-static-files")
app.mount("/ollama/api", ollama_app)
app.mount("/openai/api", openai_app)
app.mount("/",
SPAStaticFiles(directory="../build", html=True),
name="spa-static-files")

View file

@ -18,3 +18,5 @@ bcrypt
PyJWT
pyjwt[crypto]
black

4
backend/start.sh Normal file → Executable file
View file

@ -1 +1,3 @@
uvicorn main:app --host 0.0.0.0 --port 8080 --forwarded-allow-ips '*'
#!/usr/bin/env bash
uvicorn main:app --host 0.0.0.0 --port 8080 --forwarded-allow-ips '*'

View file

@ -1,4 +1,5 @@
import hashlib
import re
def get_gravatar_url(email):
@ -21,3 +22,9 @@ def calculate_sha256(file):
for chunk in iter(lambda: file.read(8192), b""):
sha256.update(chunk)
return sha256.hexdigest()
def validate_email_format(email: str) -> bool:
if not re.match(r"[^@]+@[^@]+\.[^@]+", email):
return False
return True

View file

@ -1,14 +1,19 @@
from fastapi.security import HTTPBasicCredentials, HTTPBearer
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
from fastapi import HTTPException, status, Depends
from apps.web.models.users import Users
from pydantic import BaseModel
from typing import Union, Optional
from constants import ERROR_MESSAGES
from passlib.context import CryptContext
from datetime import datetime, timedelta
import requests
import jwt
import logging
import config
logging.getLogger("passlib").setLevel(logging.ERROR)
JWT_SECRET_KEY = config.WEBUI_JWT_SECRET_KEY
ALGORITHM = "HS256"
@ -53,16 +58,18 @@ def extract_token_from_auth_header(auth_header: str):
return auth_header[len("Bearer ") :]
def verify_token(request):
try:
bearer = request.headers["authorization"]
if bearer:
token = bearer[len("Bearer ") :]
decoded = jwt.decode(
token, JWT_SECRET_KEY, options={"verify_signature": False}
def get_current_user(auth_token: HTTPAuthorizationCredentials = Depends(HTTPBearer())):
data = decode_token(auth_token.credentials)
if data != None and "email" in data:
user = Users.get_user_by_email(data["email"])
if user is None:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.INVALID_TOKEN,
)
return decoded
else:
return None
except Exception as e:
return None
return user
else:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail=ERROR_MESSAGES.UNAUTHORIZED,
)

BIN
bun.lockb Executable file

Binary file not shown.

BIN
demo.gif

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.3 MiB

After

Width:  |  Height:  |  Size: 5.9 MiB

View file

@ -1,7 +1,7 @@
version: '3.6'
version: '3.8'
services:
ollama:
# Expose Ollama API outside the container stack
ports:
- 11434:11434
- ${OLLAMA_WEBAPI_PORT-11434}:11434

6
docker-compose.data.yaml Normal file
View file

@ -0,0 +1,6 @@
version: '3.8'
services:
ollama:
volumes:
- ${OLLAMA_DATA_DIR-./ollama-data}:/root/.ollama

View file

@ -1,4 +1,4 @@
version: '3.6'
version: '3.8'
services:
ollama:
@ -7,7 +7,7 @@ services:
resources:
reservations:
devices:
- driver: nvidia
count: 1
- driver: ${OLLAMA_GPU_DRIVER-nvidia}
count: ${OLLAMA_GPU_COUNT-1}
capabilities:
- gpu

View file

@ -1,4 +1,4 @@
version: '3.6'
version: '3.8'
services:
ollama:
@ -16,16 +16,16 @@ services:
args:
OLLAMA_API_BASE_URL: '/ollama/api'
dockerfile: Dockerfile
image: ollama-webui:latest
image: ghcr.io/ollama-webui/ollama-webui:main
container_name: ollama-webui
volumes:
- ollama-webui:/app/backend/data
depends_on:
- ollama
ports:
- 3000:8080
- ${OLLAMA_WEBUI_PORT-3000}:8080
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
- 'OLLAMA_API_BASE_URL=http://ollama:11434/api'
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped

3
docs/README.md Normal file
View file

@ -0,0 +1,3 @@
# Project workflow
[![](https://mermaid.ink/img/pako:eNq1k01rAjEQhv_KkFNLFe1N9iAUevFSRVl6Cci4Gd1ANtlmsmtF_O_N7iqtHxR76ClhMu87zwyZvcicIpEIpo-KbEavGjceC2lL9EFnukQbIGXygNye5y9TY7DAZTpZLsjXXVYXg3dapRM4hh9mu5A7-3hTfSXtAtJK21Tsj8dPl3USmJZkGVbebWNKD2rNOjAYl6HJHYdkNBwNpb3U9aNZvzFNYE6h8tFiSyZzBUGJG4K1dwVwTSYQrCptlLRvLt5dA5i2la5Ruk51Ux0VKQjuxPVbAwuyiuFlNgHfzJ5DoxtgqQf1813gnZRLZ5lAYcD7WT1lpGtiQKug9C4jZrrp-Fd-1-Y1bdzo4dvnZDLz7lPHyj8sOgfg4x84E7RTuEaZt8yRZqtDfgT_rwG2u3Dv_ERPFOQL1Cqu2F5aAClCTgVJkcSrojVWJkgh7SGmYhXcYmczkQRfUU9UZfQ4baRI1miYDl_QqlPg?type=png)](https://mermaid.live/edit#pako:eNq1k01rAjEQhv_KkFNLFe1N9iAUevFSRVl6Cci4Gd1ANtlmsmtF_O_N7iqtHxR76ClhMu87zwyZvcicIpEIpo-KbEavGjceC2lL9EFnukQbIGXygNye5y9TY7DAZTpZLsjXXVYXg3dapRM4hh9mu5A7-3hTfSXtAtJK21Tsj8dPl3USmJZkGVbebWNKD2rNOjAYl6HJHYdkNBwNpb3U9aNZvzFNYE6h8tFiSyZzBUGJG4K1dwVwTSYQrCptlLRvLt5dA5i2la5Ruk51Ux0VKQjuxPVbAwuyiuFlNgHfzJ5DoxtgqQf1813gnZRLZ5lAYcD7WT1lpGtiQKug9C4jZrrp-Fd-1-Y1bdzo4dvnZDLz7lPHyj8sOgfg4x84E7RTuEaZt8yRZqtDfgT_rwG2u3Dv_ERPFOQL1Cqu2F5aAClCTgVJkcSrojVWJkgh7SGmYhXcYmczkQRfUU9UZfQ4baRI1miYDl_QqlPg)

View file

@ -1,12 +1,6 @@
# If you're serving both the frontend and backend (Recommended)
# Set the public API base URL for seamless communication
PUBLIC_API_BASE_URL='/ollama/api'
# If you're serving only the frontend (Not recommended and not fully supported)
# Comment above and Uncomment below
# You can use the default value or specify a custom path, e.g., '/api'
# PUBLIC_API_BASE_URL='http://{location.hostname}:11434/api'
# Ollama URL for the backend to connect
# The path '/ollama/api' will be redirected to the specified backend URL
OLLAMA_API_BASE_URL='http://localhost:11434/api'
OPENAI_API_BASE_URL=''
OPENAI_API_KEY=''

View file

View file

@ -0,0 +1,5 @@
apiVersion: v2
name: ollama-webui
description: "Ollama Web UI: A User-Friendly Web Interface for Chat Interactions 👋"
version: 1.0.0
icon: https://raw.githubusercontent.com/ollama-webui/ollama-webui/main/static/favicon.png

View file

@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: {{ .Values.namespace }}

View file

@ -0,0 +1,13 @@
apiVersion: v1
kind: Service
metadata:
name: ollama-service
namespace: {{ .Values.namespace }}
spec:
type: {{ .Values.ollama.service.type }}
selector:
app: ollama
ports:
- protocol: TCP
port: {{ .Values.ollama.servicePort }}
targetPort: {{ .Values.ollama.servicePort }}

View file

@ -0,0 +1,55 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: ollama
namespace: {{ .Values.namespace }}
spec:
serviceName: "ollama"
replicas: {{ .Values.ollama.replicaCount }}
selector:
matchLabels:
app: ollama
template:
metadata:
labels:
app: ollama
spec:
containers:
- name: ollama
image: {{ .Values.ollama.image }}
ports:
- containerPort: {{ .Values.ollama.servicePort }}
env:
{{- if .Values.ollama.gpu.enabled }}
- name: PATH
value: /usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
- name: LD_LIBRARY_PATH
value: /usr/local/nvidia/lib:/usr/local/nvidia/lib64
- name: NVIDIA_DRIVER_CAPABILITIES
value: compute,utility
{{- end}}
{{- if .Values.ollama.resources }}
resources: {{- toYaml .Values.ollama.resources | nindent 10 }}
{{- end }}
volumeMounts:
- name: ollama-volume
mountPath: /root/.ollama
tty: true
{{- with .Values.ollama.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
tolerations:
{{- if .Values.ollama.gpu.enabled }}
- key: nvidia.com/gpu
operator: Exists
effect: NoSchedule
{{- end }}
volumeClaimTemplates:
- metadata:
name: ollama-volume
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: {{ .Values.ollama.volumeSize }}

View file

@ -0,0 +1,38 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: ollama-webui-deployment
namespace: {{ .Values.namespace }}
spec:
replicas: 1
selector:
matchLabels:
app: ollama-webui
template:
metadata:
labels:
app: ollama-webui
spec:
containers:
- name: ollama-webui
image: {{ .Values.webui.image }}
ports:
- containerPort: 8080
{{- if .Values.webui.resources }}
resources: {{- toYaml .Values.webui.resources | nindent 10 }}
{{- end }}
volumeMounts:
- name: webui-volume
mountPath: /app/backend/data
env:
- name: OLLAMA_API_BASE_URL
value: "http://ollama-service.{{ .Values.namespace }}.svc.cluster.local:{{ .Values.ollama.servicePort }}/api"
tty: true
{{- with .Values.webui.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
volumes:
- name: webui-volume
persistentVolumeClaim:
claimName: ollama-webui-pvc

View file

@ -0,0 +1,23 @@
{{- if .Values.webui.ingress.enabled }}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: ollama-webui-ingress
namespace: {{ .Values.namespace }}
{{- if .Values.webui.ingress.annotations }}
annotations:
{{ toYaml .Values.webui.ingress.annotations | trimSuffix "\n" | indent 4 }}
{{- end }}
spec:
rules:
- host: {{ .Values.webui.ingress.host }}
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: ollama-webui-service
port:
number: {{ .Values.webui.servicePort }}
{{- end }}

View file

@ -0,0 +1,12 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
labels:
app: ollama-webui
name: ollama-webui-pvc
namespace: {{ .Values.namespace }}
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: {{ .Values.webui.volumeSize }}

View file

@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: ollama-webui-service
namespace: {{ .Values.namespace }}
spec:
type: {{ .Values.webui.service.type }} # Default: NodePort # Use LoadBalancer if you're on a cloud that supports it
selector:
app: ollama-webui
ports:
- protocol: TCP
port: {{ .Values.webui.servicePort }}
targetPort: {{ .Values.webui.servicePort }}
# If using NodePort, you can optionally specify the nodePort:
# nodePort: 30000

View file

@ -0,0 +1,38 @@
namespace: ollama-namespace
ollama:
replicaCount: 1
image: ollama/ollama:latest
servicePort: 11434
resources:
limits:
cpu: "2000m"
memory: "2Gi"
nvidia.com/gpu: "0"
volumeSize: 1Gi
nodeSelector: {}
tolerations: []
service:
type: ClusterIP
gpu:
enabled: false
webui:
replicaCount: 1
image: ghcr.io/ollama-webui/ollama-webui:main
servicePort: 8080
resources:
limits:
cpu: "500m"
memory: "500Mi"
ingress:
enabled: true
annotations:
# Use appropriate annotations for your Ingress controller, e.g., for NGINX:
# nginx.ingress.kubernetes.io/rewrite-target: /
host: ollama.minikube.local
volumeSize: 1Gi
nodeSelector: {}
tolerations: []
service:
type: NodePort

View file

@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: ollama-namespace

View file

@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: ollama-service
namespace: ollama-namespace
spec:
selector:
app: ollama
ports:
- protocol: TCP
port: 11434
targetPort: 11434

View file

@ -0,0 +1,37 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: ollama
namespace: ollama-namespace
spec:
serviceName: "ollama"
replicas: 1
selector:
matchLabels:
app: ollama
template:
metadata:
labels:
app: ollama
spec:
containers:
- name: ollama
image: ollama/ollama:latest
ports:
- containerPort: 11434
resources:
limits:
cpu: "2000m"
memory: "2Gi"
volumeMounts:
- name: ollama-volume
mountPath: /root/.ollama
tty: true
volumeClaimTemplates:
- metadata:
name: ollama-volume
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 1Gi

View file

@ -0,0 +1,28 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: ollama-webui-deployment
namespace: ollama-namespace
spec:
replicas: 1
selector:
matchLabels:
app: ollama-webui
template:
metadata:
labels:
app: ollama-webui
spec:
containers:
- name: ollama-webui
image: ghcr.io/ollama-webui/ollama-webui:main
ports:
- containerPort: 8080
resources:
limits:
cpu: "500m"
memory: "500Mi"
env:
- name: OLLAMA_API_BASE_URL
value: "http://ollama-service.ollama-namespace.svc.cluster.local:11434/api"
tty: true

View file

@ -0,0 +1,20 @@
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: ollama-webui-ingress
namespace: ollama-namespace
#annotations:
# Use appropriate annotations for your Ingress controller, e.g., for NGINX:
# nginx.ingress.kubernetes.io/rewrite-target: /
spec:
rules:
- host: ollama.minikube.local
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: ollama-webui-service
port:
number: 8080

View file

@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: ollama-webui-service
namespace: ollama-namespace
spec:
type: NodePort # Use LoadBalancer if you're on a cloud that supports it
selector:
app: ollama-webui
ports:
- protocol: TCP
port: 8080
targetPort: 8080
# If using NodePort, you can optionally specify the nodePort:
# nodePort: 30000

View file

@ -0,0 +1,12 @@
resources:
- base/ollama-namespace.yaml
- base/ollama-service.yaml
- base/ollama-statefulset.yaml
- base/webui-deployment.yaml
- base/webui-service.yaml
- base/webui-ingress.yaml
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
patches:
- path: patches/ollama-statefulset-gpu.yaml

View file

@ -0,0 +1,17 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: ollama
namespace: ollama-namespace
spec:
selector:
matchLabels:
app: ollama
serviceName: "ollama"
template:
spec:
containers:
- name: ollama
resources:
limits:
nvidia.com/gpu: "1"

448
package-lock.json generated
View file

@ -9,6 +9,7 @@
"version": "0.0.1",
"dependencies": {
"@sveltejs/adapter-node": "^1.3.1",
"dayjs": "^1.11.10",
"file-saver": "^2.0.5",
"highlight.js": "^11.9.0",
"idb": "^7.1.1",
@ -22,12 +23,13 @@
"devDependencies": {
"@sveltejs/adapter-auto": "^2.0.0",
"@sveltejs/adapter-static": "^2.0.3",
"@sveltejs/kit": "^1.20.4",
"@sveltejs/kit": "^1.30.0",
"@tailwindcss/typography": "^0.5.10",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
"@types/bun": "latest",
"@typescript-eslint/eslint-plugin": "^6.17.0",
"@typescript-eslint/parser": "^6.17.0",
"autoprefixer": "^10.4.16",
"eslint": "^8.28.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-svelte": "^2.30.0",
"postcss": "^8.4.31",
@ -429,9 +431,9 @@
}
},
"node_modules/@eslint/eslintrc": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz",
"integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==",
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
"integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
"dev": true,
"dependencies": {
"ajv": "^6.12.4",
@ -452,9 +454,9 @@
}
},
"node_modules/@eslint/js": {
"version": "8.51.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz",
"integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==",
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz",
"integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==",
"dev": true,
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
@ -469,12 +471,12 @@
}
},
"node_modules/@humanwhocodes/config-array": {
"version": "0.11.11",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz",
"integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==",
"version": "0.11.13",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz",
"integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==",
"dev": true,
"dependencies": {
"@humanwhocodes/object-schema": "^1.2.1",
"@humanwhocodes/object-schema": "^2.0.1",
"debug": "^4.1.1",
"minimatch": "^3.0.5"
},
@ -496,9 +498,9 @@
}
},
"node_modules/@humanwhocodes/object-schema": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz",
"integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==",
"dev": true
},
"node_modules/@jridgewell/gen-mapping": {
@ -783,12 +785,12 @@
}
},
"node_modules/@sveltejs/kit": {
"version": "1.26.0",
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-1.26.0.tgz",
"integrity": "sha512-CV/AlTziC05yrz7UjVqEd0pH6+2dnrbmcnHGr2d3jXtmOgzNnlDkXtX8g3BfJ6nntsPD+0jtS2PzhvRHblRz4A==",
"version": "1.30.3",
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-1.30.3.tgz",
"integrity": "sha512-0DzVXfU4h+tChFvoc8C61IqErCyskD4ydSIDjpKS2lYlEzIYrtYrY7juSqACFxqcvZAnOEXvSY+zZ8br0+ZMMg==",
"hasInstallScript": true,
"dependencies": {
"@sveltejs/vite-plugin-svelte": "^2.4.1",
"@sveltejs/vite-plugin-svelte": "^2.5.0",
"@types/cookie": "^0.5.1",
"cookie": "^0.5.0",
"devalue": "^4.3.1",
@ -809,14 +811,14 @@
"node": "^16.14 || >=18"
},
"peerDependencies": {
"svelte": "^3.54.0 || ^4.0.0-next.0",
"svelte": "^3.54.0 || ^4.0.0-next.0 || ^5.0.0-next.0",
"vite": "^4.0.0"
}
},
"node_modules/@sveltejs/vite-plugin-svelte": {
"version": "2.4.6",
"resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-2.4.6.tgz",
"integrity": "sha512-zO79p0+DZnXPnF0ltIigWDx/ux7Ni+HRaFOw720Qeivc1azFUrJxTl0OryXVibYNx1hCboGia1NRV3x8RNv4cA==",
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-2.5.3.tgz",
"integrity": "sha512-erhNtXxE5/6xGZz/M9eXsmI7Pxa6MS7jyTy06zN3Ck++ldrppOnOlJwHHTsMC7DHDQdgUp4NAc4cDNQ9eGdB/w==",
"dependencies": {
"@sveltejs/vite-plugin-svelte-inspector": "^1.0.4",
"debug": "^4.3.4",
@ -830,7 +832,7 @@
"node": "^14.18.0 || >= 16"
},
"peerDependencies": {
"svelte": "^3.54.0 || ^4.0.0",
"svelte": "^3.54.0 || ^4.0.0 || ^5.0.0-next.0",
"vite": "^4.0.0"
}
},
@ -878,6 +880,15 @@
"node": ">=4"
}
},
"node_modules/@types/bun": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@types/bun/-/bun-1.0.0.tgz",
"integrity": "sha512-TPI/aImv/fSo0SWlt29wq0tWRqQOWsC4FOXYeUK0Ni6tAS+FqJZ2p7QCGY4hmHaHQeE2KhKJ6Qn9k3kvFfXD3Q==",
"dev": true,
"dependencies": {
"bun-types": "1.0.18"
}
},
"node_modules/@types/cookie": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.5.2.tgz",
@ -889,9 +900,9 @@
"integrity": "sha512-VeiPZ9MMwXjO32/Xu7+OwflfmeoRwkE/qzndw42gGtgJwZopBnzy2gD//NN1+go1mADzkDcqf/KnFRSjTJ8xJA=="
},
"node_modules/@types/json-schema": {
"version": "7.0.13",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz",
"integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==",
"version": "7.0.15",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
"dev": true
},
"node_modules/@types/pug": {
@ -906,22 +917,22 @@
"integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q=="
},
"node_modules/@types/semver": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.3.tgz",
"integrity": "sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw==",
"version": "7.5.6",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz",
"integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==",
"dev": true
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.4.tgz",
"integrity": "sha512-DAbgDXwtX+pDkAHwiGhqP3zWUGpW49B7eqmgpPtg+BKJXwdct79ut9+ifqOFPJGClGKSHXn2PTBatCnldJRUoA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.17.0.tgz",
"integrity": "sha512-Vih/4xLXmY7V490dGwBQJTpIZxH4ZFH6eCVmQ4RFkB+wmaCTDAx4dtgoWwMNGKLkqRY1L6rPqzEbjorRnDo4rQ==",
"dev": true,
"dependencies": {
"@eslint-community/regexpp": "^4.5.1",
"@typescript-eslint/scope-manager": "6.7.4",
"@typescript-eslint/type-utils": "6.7.4",
"@typescript-eslint/utils": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4",
"@typescript-eslint/scope-manager": "6.17.0",
"@typescript-eslint/type-utils": "6.17.0",
"@typescript-eslint/utils": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0",
"debug": "^4.3.4",
"graphemer": "^1.4.0",
"ignore": "^5.2.4",
@ -947,15 +958,15 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.4.tgz",
"integrity": "sha512-I5zVZFY+cw4IMZUeNCU7Sh2PO5O57F7Lr0uyhgCJmhN/BuTlnc55KxPonR4+EM3GBdfiCyGZye6DgMjtubQkmA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.17.0.tgz",
"integrity": "sha512-C4bBaX2orvhK+LlwrY8oWGmSl4WolCfYm513gEccdWZj0CwGadbIADb0FtVEcI+WzUyjyoBj2JRP8g25E6IB8A==",
"dev": true,
"dependencies": {
"@typescript-eslint/scope-manager": "6.7.4",
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/typescript-estree": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4",
"@typescript-eslint/scope-manager": "6.17.0",
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/typescript-estree": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0",
"debug": "^4.3.4"
},
"engines": {
@ -975,13 +986,13 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.4.tgz",
"integrity": "sha512-SdGqSLUPTXAXi7c3Ob7peAGVnmMoGzZ361VswK2Mqf8UOYcODiYvs8rs5ILqEdfvX1lE7wEZbLyELCW+Yrql1A==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.17.0.tgz",
"integrity": "sha512-RX7a8lwgOi7am0k17NUO0+ZmMOX4PpjLtLRgLmT1d3lBYdWH4ssBUbwdmc5pdRX8rXon8v9x8vaoOSpkHfcXGA==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4"
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0"
},
"engines": {
"node": "^16.0.0 || >=18.0.0"
@ -992,13 +1003,13 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.4.tgz",
"integrity": "sha512-n+g3zi1QzpcAdHFP9KQF+rEFxMb2KxtnJGID3teA/nxKHOVi3ylKovaqEzGBbVY2pBttU6z85gp0D00ufLzViQ==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.17.0.tgz",
"integrity": "sha512-hDXcWmnbtn4P2B37ka3nil3yi3VCQO2QEB9gBiHJmQp5wmyQWqnjA85+ZcE8c4FqnaB6lBwMrPkgd4aBYz3iNg==",
"dev": true,
"dependencies": {
"@typescript-eslint/typescript-estree": "6.7.4",
"@typescript-eslint/utils": "6.7.4",
"@typescript-eslint/typescript-estree": "6.17.0",
"@typescript-eslint/utils": "6.17.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.0.1"
},
@ -1019,9 +1030,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.4.tgz",
"integrity": "sha512-o9XWK2FLW6eSS/0r/tgjAGsYasLAnOWg7hvZ/dGYSSNjCh+49k5ocPN8OmG5aZcSJ8pclSOyVKP2x03Sj+RrCA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.17.0.tgz",
"integrity": "sha512-qRKs9tvc3a4RBcL/9PXtKSehI/q8wuU9xYJxe97WFxnzH8NWWtcW3ffNS+EWg8uPvIerhjsEZ+rHtDqOCiH57A==",
"dev": true,
"engines": {
"node": "^16.0.0 || >=18.0.0"
@ -1032,16 +1043,17 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.4.tgz",
"integrity": "sha512-ty8b5qHKatlNYd9vmpHooQz3Vki3gG+3PchmtsA4TgrZBKWHNjWfkQid7K7xQogBqqc7/BhGazxMD5vr6Ha+iQ==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.17.0.tgz",
"integrity": "sha512-gVQe+SLdNPfjlJn5VNGhlOhrXz4cajwFd5kAgWtZ9dCZf4XJf8xmgCTLIqec7aha3JwgLI2CK6GY1043FRxZwg==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4",
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0",
"debug": "^4.3.4",
"globby": "^11.1.0",
"is-glob": "^4.0.3",
"minimatch": "9.0.3",
"semver": "^7.5.4",
"ts-api-utils": "^1.0.1"
},
@ -1058,18 +1070,42 @@
}
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dev": true,
"dependencies": {
"balanced-match": "^1.0.0"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
"integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
"dev": true,
"dependencies": {
"brace-expansion": "^2.0.1"
},
"engines": {
"node": ">=16 || 14 >=14.17"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/@typescript-eslint/utils": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.4.tgz",
"integrity": "sha512-PRQAs+HUn85Qdk+khAxsVV+oULy3VkbH3hQ8hxLRJXWBEd7iI+GbQxH5SEUSH7kbEoTp6oT1bOwyga24ELALTA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.17.0.tgz",
"integrity": "sha512-LofsSPjN/ITNkzV47hxas2JCsNCEnGhVvocfyOcLzT9c/tSZE7SfhS/iWtzP1lKNOEfLhRTZz6xqI8N2RzweSQ==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
"@types/json-schema": "^7.0.12",
"@types/semver": "^7.5.0",
"@typescript-eslint/scope-manager": "6.7.4",
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/typescript-estree": "6.7.4",
"@typescript-eslint/scope-manager": "6.17.0",
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/typescript-estree": "6.17.0",
"semver": "^7.5.4"
},
"engines": {
@ -1084,12 +1120,12 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.4.tgz",
"integrity": "sha512-pOW37DUhlTZbvph50x5zZCkFn3xzwkGtNoJHzIM3svpiSkJzwOYr/kVBaXmf+RAQiUDs1AHEZVNPg6UJCJpwRA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.17.0.tgz",
"integrity": "sha512-H6VwB/k3IuIeQOyYczyyKN8wH6ed8EwliaYHLxOIhyF0dYEIsN8+Bk3GE19qafeMKyZJJHP8+O1HiFhFLUNKSg==",
"dev": true,
"dependencies": {
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/types": "6.17.0",
"eslint-visitor-keys": "^3.4.1"
},
"engines": {
@ -1100,6 +1136,12 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@ungap/structured-clone": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
"integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==",
"dev": true
},
"node_modules/acorn": {
"version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
@ -1341,6 +1383,12 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/bun-types": {
"version": "1.0.18",
"resolved": "https://registry.npmjs.org/bun-types/-/bun-types-1.0.18.tgz",
"integrity": "sha512-1XZ7AxOF8oO8FZtw1xj006JAKxEjulK3dUhsktZVN95vXBlsf4NIjQxfistVdpt24v3H2I9BwHp+UU+gXSSpAw==",
"dev": true
},
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@ -1530,6 +1578,11 @@
"node": ">=4"
}
},
"node_modules/dayjs": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz",
"integrity": "sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ=="
},
"node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
@ -1688,18 +1741,19 @@
}
},
"node_modules/eslint": {
"version": "8.51.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz",
"integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==",
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz",
"integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
"@eslint/eslintrc": "^2.1.2",
"@eslint/js": "8.51.0",
"@humanwhocodes/config-array": "^0.11.11",
"@eslint/eslintrc": "^2.1.4",
"@eslint/js": "8.56.0",
"@humanwhocodes/config-array": "^0.11.13",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"@ungap/structured-clone": "^1.2.0",
"ajv": "^6.12.4",
"chalk": "^4.0.0",
"cross-spawn": "^7.0.2",
@ -2071,9 +2125,9 @@
}
},
"node_modules/globals": {
"version": "13.23.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz",
"integrity": "sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==",
"version": "13.24.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
"integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
"dev": true,
"dependencies": {
"type-fest": "^0.20.2"
@ -3025,9 +3079,9 @@
}
},
"node_modules/punycode": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
"integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==",
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
"dev": true,
"engines": {
"node": ">=6"
@ -3886,11 +3940,11 @@
}
},
"node_modules/vitefu": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.4.tgz",
"integrity": "sha512-fanAXjSaf9xXtOOeno8wZXIhgia+CZury481LsDaV++lSvcU2R9Ch2bPh3PYFyoHW+w9LqAeYRISVQjUIew14g==",
"version": "0.2.5",
"resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.5.tgz",
"integrity": "sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==",
"peerDependencies": {
"vite": "^3.0.0 || ^4.0.0"
"vite": "^3.0.0 || ^4.0.0 || ^5.0.0"
},
"peerDependenciesMeta": {
"vite": {
@ -4116,9 +4170,9 @@
"dev": true
},
"@eslint/eslintrc": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.2.tgz",
"integrity": "sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==",
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
"integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
"dev": true,
"requires": {
"ajv": "^6.12.4",
@ -4133,9 +4187,9 @@
}
},
"@eslint/js": {
"version": "8.51.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.51.0.tgz",
"integrity": "sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==",
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz",
"integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==",
"dev": true
},
"@fastify/busboy": {
@ -4144,12 +4198,12 @@
"integrity": "sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ=="
},
"@humanwhocodes/config-array": {
"version": "0.11.11",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.11.tgz",
"integrity": "sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==",
"version": "0.11.13",
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz",
"integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==",
"dev": true,
"requires": {
"@humanwhocodes/object-schema": "^1.2.1",
"@humanwhocodes/object-schema": "^2.0.1",
"debug": "^4.1.1",
"minimatch": "^3.0.5"
}
@ -4161,9 +4215,9 @@
"dev": true
},
"@humanwhocodes/object-schema": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
"integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz",
"integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==",
"dev": true
},
"@jridgewell/gen-mapping": {
@ -4366,11 +4420,11 @@
"requires": {}
},
"@sveltejs/kit": {
"version": "1.26.0",
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-1.26.0.tgz",
"integrity": "sha512-CV/AlTziC05yrz7UjVqEd0pH6+2dnrbmcnHGr2d3jXtmOgzNnlDkXtX8g3BfJ6nntsPD+0jtS2PzhvRHblRz4A==",
"version": "1.30.3",
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-1.30.3.tgz",
"integrity": "sha512-0DzVXfU4h+tChFvoc8C61IqErCyskD4ydSIDjpKS2lYlEzIYrtYrY7juSqACFxqcvZAnOEXvSY+zZ8br0+ZMMg==",
"requires": {
"@sveltejs/vite-plugin-svelte": "^2.4.1",
"@sveltejs/vite-plugin-svelte": "^2.5.0",
"@types/cookie": "^0.5.1",
"cookie": "^0.5.0",
"devalue": "^4.3.1",
@ -4386,9 +4440,9 @@
}
},
"@sveltejs/vite-plugin-svelte": {
"version": "2.4.6",
"resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-2.4.6.tgz",
"integrity": "sha512-zO79p0+DZnXPnF0ltIigWDx/ux7Ni+HRaFOw720Qeivc1azFUrJxTl0OryXVibYNx1hCboGia1NRV3x8RNv4cA==",
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-2.5.3.tgz",
"integrity": "sha512-erhNtXxE5/6xGZz/M9eXsmI7Pxa6MS7jyTy06zN3Ck++ldrppOnOlJwHHTsMC7DHDQdgUp4NAc4cDNQ9eGdB/w==",
"requires": {
"@sveltejs/vite-plugin-svelte-inspector": "^1.0.4",
"debug": "^4.3.4",
@ -4431,6 +4485,15 @@
}
}
},
"@types/bun": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@types/bun/-/bun-1.0.0.tgz",
"integrity": "sha512-TPI/aImv/fSo0SWlt29wq0tWRqQOWsC4FOXYeUK0Ni6tAS+FqJZ2p7QCGY4hmHaHQeE2KhKJ6Qn9k3kvFfXD3Q==",
"dev": true,
"requires": {
"bun-types": "1.0.18"
}
},
"@types/cookie": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.5.2.tgz",
@ -4442,9 +4505,9 @@
"integrity": "sha512-VeiPZ9MMwXjO32/Xu7+OwflfmeoRwkE/qzndw42gGtgJwZopBnzy2gD//NN1+go1mADzkDcqf/KnFRSjTJ8xJA=="
},
"@types/json-schema": {
"version": "7.0.13",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.13.tgz",
"integrity": "sha512-RbSSoHliUbnXj3ny0CNFOoxrIDV6SUGyStHsvDqosw6CkdPV8TtWGlfecuK4ToyMEAql6pzNxgCFKanovUzlgQ==",
"version": "7.0.15",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
"integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==",
"dev": true
},
"@types/pug": {
@ -4459,22 +4522,22 @@
"integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q=="
},
"@types/semver": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.3.tgz",
"integrity": "sha512-OxepLK9EuNEIPxWNME+C6WwbRAOOI2o2BaQEGzz5Lu2e4Z5eDnEo+/aVEDMIXywoJitJ7xWd641wrGLZdtwRyw==",
"version": "7.5.6",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz",
"integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==",
"dev": true
},
"@typescript-eslint/eslint-plugin": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.4.tgz",
"integrity": "sha512-DAbgDXwtX+pDkAHwiGhqP3zWUGpW49B7eqmgpPtg+BKJXwdct79ut9+ifqOFPJGClGKSHXn2PTBatCnldJRUoA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.17.0.tgz",
"integrity": "sha512-Vih/4xLXmY7V490dGwBQJTpIZxH4ZFH6eCVmQ4RFkB+wmaCTDAx4dtgoWwMNGKLkqRY1L6rPqzEbjorRnDo4rQ==",
"dev": true,
"requires": {
"@eslint-community/regexpp": "^4.5.1",
"@typescript-eslint/scope-manager": "6.7.4",
"@typescript-eslint/type-utils": "6.7.4",
"@typescript-eslint/utils": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4",
"@typescript-eslint/scope-manager": "6.17.0",
"@typescript-eslint/type-utils": "6.17.0",
"@typescript-eslint/utils": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0",
"debug": "^4.3.4",
"graphemer": "^1.4.0",
"ignore": "^5.2.4",
@ -4484,86 +4547,113 @@
}
},
"@typescript-eslint/parser": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.7.4.tgz",
"integrity": "sha512-I5zVZFY+cw4IMZUeNCU7Sh2PO5O57F7Lr0uyhgCJmhN/BuTlnc55KxPonR4+EM3GBdfiCyGZye6DgMjtubQkmA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.17.0.tgz",
"integrity": "sha512-C4bBaX2orvhK+LlwrY8oWGmSl4WolCfYm513gEccdWZj0CwGadbIADb0FtVEcI+WzUyjyoBj2JRP8g25E6IB8A==",
"dev": true,
"requires": {
"@typescript-eslint/scope-manager": "6.7.4",
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/typescript-estree": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4",
"@typescript-eslint/scope-manager": "6.17.0",
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/typescript-estree": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0",
"debug": "^4.3.4"
}
},
"@typescript-eslint/scope-manager": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.7.4.tgz",
"integrity": "sha512-SdGqSLUPTXAXi7c3Ob7peAGVnmMoGzZ361VswK2Mqf8UOYcODiYvs8rs5ILqEdfvX1lE7wEZbLyELCW+Yrql1A==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.17.0.tgz",
"integrity": "sha512-RX7a8lwgOi7am0k17NUO0+ZmMOX4PpjLtLRgLmT1d3lBYdWH4ssBUbwdmc5pdRX8rXon8v9x8vaoOSpkHfcXGA==",
"dev": true,
"requires": {
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4"
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0"
}
},
"@typescript-eslint/type-utils": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.7.4.tgz",
"integrity": "sha512-n+g3zi1QzpcAdHFP9KQF+rEFxMb2KxtnJGID3teA/nxKHOVi3ylKovaqEzGBbVY2pBttU6z85gp0D00ufLzViQ==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.17.0.tgz",
"integrity": "sha512-hDXcWmnbtn4P2B37ka3nil3yi3VCQO2QEB9gBiHJmQp5wmyQWqnjA85+ZcE8c4FqnaB6lBwMrPkgd4aBYz3iNg==",
"dev": true,
"requires": {
"@typescript-eslint/typescript-estree": "6.7.4",
"@typescript-eslint/utils": "6.7.4",
"@typescript-eslint/typescript-estree": "6.17.0",
"@typescript-eslint/utils": "6.17.0",
"debug": "^4.3.4",
"ts-api-utils": "^1.0.1"
}
},
"@typescript-eslint/types": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.7.4.tgz",
"integrity": "sha512-o9XWK2FLW6eSS/0r/tgjAGsYasLAnOWg7hvZ/dGYSSNjCh+49k5ocPN8OmG5aZcSJ8pclSOyVKP2x03Sj+RrCA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.17.0.tgz",
"integrity": "sha512-qRKs9tvc3a4RBcL/9PXtKSehI/q8wuU9xYJxe97WFxnzH8NWWtcW3ffNS+EWg8uPvIerhjsEZ+rHtDqOCiH57A==",
"dev": true
},
"@typescript-eslint/typescript-estree": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.4.tgz",
"integrity": "sha512-ty8b5qHKatlNYd9vmpHooQz3Vki3gG+3PchmtsA4TgrZBKWHNjWfkQid7K7xQogBqqc7/BhGazxMD5vr6Ha+iQ==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.17.0.tgz",
"integrity": "sha512-gVQe+SLdNPfjlJn5VNGhlOhrXz4cajwFd5kAgWtZ9dCZf4XJf8xmgCTLIqec7aha3JwgLI2CK6GY1043FRxZwg==",
"dev": true,
"requires": {
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/visitor-keys": "6.7.4",
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/visitor-keys": "6.17.0",
"debug": "^4.3.4",
"globby": "^11.1.0",
"is-glob": "^4.0.3",
"minimatch": "9.0.3",
"semver": "^7.5.4",
"ts-api-utils": "^1.0.1"
},
"dependencies": {
"brace-expansion": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
"dev": true,
"requires": {
"balanced-match": "^1.0.0"
}
},
"minimatch": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
"integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
"dev": true,
"requires": {
"brace-expansion": "^2.0.1"
}
}
}
},
"@typescript-eslint/utils": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.7.4.tgz",
"integrity": "sha512-PRQAs+HUn85Qdk+khAxsVV+oULy3VkbH3hQ8hxLRJXWBEd7iI+GbQxH5SEUSH7kbEoTp6oT1bOwyga24ELALTA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.17.0.tgz",
"integrity": "sha512-LofsSPjN/ITNkzV47hxas2JCsNCEnGhVvocfyOcLzT9c/tSZE7SfhS/iWtzP1lKNOEfLhRTZz6xqI8N2RzweSQ==",
"dev": true,
"requires": {
"@eslint-community/eslint-utils": "^4.4.0",
"@types/json-schema": "^7.0.12",
"@types/semver": "^7.5.0",
"@typescript-eslint/scope-manager": "6.7.4",
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/typescript-estree": "6.7.4",
"@typescript-eslint/scope-manager": "6.17.0",
"@typescript-eslint/types": "6.17.0",
"@typescript-eslint/typescript-estree": "6.17.0",
"semver": "^7.5.4"
}
},
"@typescript-eslint/visitor-keys": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.4.tgz",
"integrity": "sha512-pOW37DUhlTZbvph50x5zZCkFn3xzwkGtNoJHzIM3svpiSkJzwOYr/kVBaXmf+RAQiUDs1AHEZVNPg6UJCJpwRA==",
"version": "6.17.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.17.0.tgz",
"integrity": "sha512-H6VwB/k3IuIeQOyYczyyKN8wH6ed8EwliaYHLxOIhyF0dYEIsN8+Bk3GE19qafeMKyZJJHP8+O1HiFhFLUNKSg==",
"dev": true,
"requires": {
"@typescript-eslint/types": "6.7.4",
"@typescript-eslint/types": "6.17.0",
"eslint-visitor-keys": "^3.4.1"
}
},
"@ungap/structured-clone": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
"integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==",
"dev": true
},
"acorn": {
"version": "8.10.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz",
@ -4720,6 +4810,12 @@
"resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.3.0.tgz",
"integrity": "sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw=="
},
"bun-types": {
"version": "1.0.18",
"resolved": "https://registry.npmjs.org/bun-types/-/bun-types-1.0.18.tgz",
"integrity": "sha512-1XZ7AxOF8oO8FZtw1xj006JAKxEjulK3dUhsktZVN95vXBlsf4NIjQxfistVdpt24v3H2I9BwHp+UU+gXSSpAw==",
"dev": true
},
"callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@ -4850,6 +4946,11 @@
"integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==",
"dev": true
},
"dayjs": {
"version": "1.11.10",
"resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.10.tgz",
"integrity": "sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ=="
},
"debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
@ -4969,18 +5070,19 @@
"dev": true
},
"eslint": {
"version": "8.51.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.51.0.tgz",
"integrity": "sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==",
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz",
"integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==",
"dev": true,
"requires": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
"@eslint/eslintrc": "^2.1.2",
"@eslint/js": "8.51.0",
"@humanwhocodes/config-array": "^0.11.11",
"@eslint/eslintrc": "^2.1.4",
"@eslint/js": "8.56.0",
"@humanwhocodes/config-array": "^0.11.13",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"@ungap/structured-clone": "^1.2.0",
"ajv": "^6.12.4",
"chalk": "^4.0.0",
"cross-spawn": "^7.0.2",
@ -5251,9 +5353,9 @@
}
},
"globals": {
"version": "13.23.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz",
"integrity": "sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==",
"version": "13.24.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
"integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
"dev": true,
"requires": {
"type-fest": "^0.20.2"
@ -5905,9 +6007,9 @@
"requires": {}
},
"punycode": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
"integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==",
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
"integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
"dev": true
},
"queue-microtask": {
@ -6430,9 +6532,9 @@
}
},
"vitefu": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.4.tgz",
"integrity": "sha512-fanAXjSaf9xXtOOeno8wZXIhgia+CZury481LsDaV++lSvcU2R9Ch2bPh3PYFyoHW+w9LqAeYRISVQjUIew14g==",
"version": "0.2.5",
"resolved": "https://registry.npmjs.org/vitefu/-/vitefu-0.2.5.tgz",
"integrity": "sha512-SgHtMLoqaeeGnd2evZ849ZbACbnwQCIwRH57t18FxcXoZop0uQu0uzlIhJBlF/eWVzuce0sHeqPcDo+evVcg8Q==",
"requires": {}
},
"which": {

View file

@ -8,22 +8,23 @@
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"lint": "npm run eslint",
"lint": "npm run lint:frontend ; npm run lint:types ; npm run lint:backend",
"lint:frontend": "eslint . --fix",
"lint:types": "npm run check",
"fmt": "npm run prettier:svelte && npm run prettier",
"eslint": "npx -p eslint@8 -- eslint .",
"prettier:svelte": "npx -p prettier@2 -- prettier --plugin-search-dir . --write .",
"prettier": "npx -p prettier@2 -- prettier --write '**/*.{js,css,md,html,json}'"
"lint:backend": "pylint backend/",
"format": "prettier --plugin-search-dir --write '**/*.{js,ts,svelte,css,md,html,json}'",
"format:backend": "yapf --recursive backend -p -i"
},
"devDependencies": {
"@sveltejs/adapter-auto": "^2.0.0",
"@sveltejs/adapter-static": "^2.0.3",
"@sveltejs/kit": "^1.20.4",
"@sveltejs/kit": "^1.30.0",
"@tailwindcss/typography": "^0.5.10",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
"@types/bun": "latest",
"@typescript-eslint/eslint-plugin": "^6.17.0",
"@typescript-eslint/parser": "^6.17.0",
"autoprefixer": "^10.4.16",
"eslint": "^8.28.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-svelte": "^2.30.0",
"postcss": "^8.4.31",
@ -39,6 +40,7 @@
"type": "module",
"dependencies": {
"@sveltejs/adapter-node": "^1.3.1",
"dayjs": "^1.11.10",
"file-saver": "^2.0.5",
"highlight.js": "^11.9.0",
"idb": "^7.1.1",

237
run-compose.sh Executable file
View file

@ -0,0 +1,237 @@
#!/bin/bash
# Define color and formatting codes
BOLD='\033[1m'
GREEN='\033[1;32m'
WHITE='\033[1;37m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Unicode character for tick mark
TICK='\u2713'
# Detect GPU driver
get_gpu_driver() {
# Detect NVIDIA GPUs
if lspci | grep -i nvidia >/dev/null; then
echo "nvidia"
return
fi
# Detect AMD GPUs (including GCN architecture check for amdgpu vs radeon)
if lspci | grep -i amd >/dev/null; then
# List of known GCN and later architecture cards
# This is a simplified list, and in a real-world scenario, you'd want a more comprehensive one
local gcn_and_later=("Radeon HD 7000" "Radeon HD 8000" "Radeon R5" "Radeon R7" "Radeon R9" "Radeon RX")
# Get GPU information
local gpu_info=$(lspci | grep -i 'vga.*amd')
for model in "${gcn_and_later[@]}"; do
if echo "$gpu_info" | grep -iq "$model"; then
echo "amdgpu"
return
fi
done
# Default to radeon if no GCN or later architecture is detected
echo "radeon"
return
fi
# Detect Intel GPUs
if lspci | grep -i intel >/dev/null; then
echo "i915"
return
fi
# If no known GPU is detected
echo "Unknown or unsupported GPU driver"
exit 1
}
# Function for rolling animation
show_loading() {
local spin='-\|/'
local i=0
printf " "
while kill -0 $1 2>/dev/null; do
i=$(( (i+1) %4 ))
printf "\b${spin:$i:1}"
sleep .1
done
# Replace the spinner with a tick
printf "\b${GREEN}${TICK}${NC}"
}
# Usage information
usage() {
echo "Usage: $0 [OPTIONS]"
echo "Options:"
echo " --enable-gpu[count=COUNT] Enable GPU support with the specified count."
echo " --enable-api[port=PORT] Enable API and expose it on the specified port."
echo " --webui[port=PORT] Set the port for the web user interface."
echo " --data[folder=PATH] Bind mount for ollama data folder (by default will create the 'ollama' volume)."
echo " --build Build the docker image before running the compose project."
echo " --drop Drop the compose project."
echo " -q, --quiet Run script in headless mode."
echo " -h, --help Show this help message."
echo ""
echo "Examples:"
echo " $0 --drop"
echo " $0 --enable-gpu[count=1]"
echo " $0 --enable-api[port=11435]"
echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000]"
echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data]"
echo " $0 --enable-gpu[count=1] --enable-api[port=12345] --webui[port=3000] --data[folder=./ollama-data] --build"
echo ""
echo "This script configures and runs a docker-compose setup with optional GPU support, API exposure, and web UI configuration."
echo "About the gpu to use, the script automatically detects it using the "lspci" command."
echo "In this case the gpu detected is: $(get_gpu_driver)"
}
# Default values
gpu_count=1
api_port=11435
webui_port=3000
headless=false
build_image=false
kill_compose=false
# Function to extract value from the parameter
extract_value() {
echo "$1" | sed -E 's/.*\[.*=(.*)\].*/\1/; t; s/.*//'
}
# Parse arguments
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
--enable-gpu*)
enable_gpu=true
value=$(extract_value "$key")
gpu_count=${value:-1}
;;
--enable-api*)
enable_api=true
value=$(extract_value "$key")
api_port=${value:-11435}
;;
--webui*)
value=$(extract_value "$key")
webui_port=${value:-3000}
;;
--data*)
value=$(extract_value "$key")
data_dir=${value:-"./ollama-data"}
;;
--drop)
kill_compose=true
;;
--build)
build_image=true
;;
-q|--quiet)
headless=true
;;
-h|--help)
usage
exit
;;
*)
# Unknown option
echo "Unknown option: $key"
usage
exit 1
;;
esac
shift # past argument or value
done
if [[ $kill_compose == true ]]; then
docker compose down --remove-orphans
echo -e "${GREEN}${BOLD}Compose project dropped successfully.${NC}"
exit
else
DEFAULT_COMPOSE_COMMAND="docker compose -f docker-compose.yaml"
if [[ $enable_gpu == true ]]; then
# Validate and process command-line arguments
if [[ -n $gpu_count ]]; then
if ! [[ $gpu_count =~ ^[0-9]+$ ]]; then
echo "Invalid GPU count: $gpu_count"
exit 1
fi
echo "Enabling GPU with $gpu_count GPUs"
# Add your GPU allocation logic here
export OLLAMA_GPU_DRIVER=$(get_gpu_driver)
export OLLAMA_GPU_COUNT=$gpu_count # Set OLLAMA_GPU_COUNT environment variable
fi
DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.gpu.yaml"
fi
if [[ $enable_api == true ]]; then
DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.api.yaml"
if [[ -n $api_port ]]; then
export OLLAMA_WEBAPI_PORT=$api_port # Set OLLAMA_WEBAPI_PORT environment variable
fi
fi
if [[ -n $data_dir ]]; then
DEFAULT_COMPOSE_COMMAND+=" -f docker-compose.data.yaml"
export OLLAMA_DATA_DIR=$data_dir # Set OLLAMA_DATA_DIR environment variable
fi
DEFAULT_COMPOSE_COMMAND+=" up -d"
DEFAULT_COMPOSE_COMMAND+=" --remove-orphans"
DEFAULT_COMPOSE_COMMAND+=" --force-recreate"
if [[ $build_image == true ]]; then
DEFAULT_COMPOSE_COMMAND+=" --build"
fi
fi
# Recap of environment variables
echo
echo -e "${WHITE}${BOLD}Current Setup:${NC}"
echo -e " ${GREEN}${BOLD}GPU Driver:${NC} ${OLLAMA_GPU_DRIVER:-Not Enabled}"
echo -e " ${GREEN}${BOLD}GPU Count:${NC} ${OLLAMA_GPU_COUNT:-Not Enabled}"
echo -e " ${GREEN}${BOLD}WebAPI Port:${NC} ${OLLAMA_WEBAPI_PORT:-Not Enabled}"
echo -e " ${GREEN}${BOLD}Data Folder:${NC} ${data_dir:-Using ollama volume}"
echo -e " ${GREEN}${BOLD}WebUI Port:${NC} $webui_port"
echo
if [[ $headless == true ]]; then
echo -ne "${WHITE}${BOLD}Running in headless mode... ${NC}"
choice="y"
else
# Ask for user acceptance
echo -ne "${WHITE}${BOLD}Do you want to proceed with current setup? (Y/n): ${NC}"
read -n1 -s choice
fi
echo
if [[ $choice == "" || $choice == "y" ]]; then
# Execute the command with the current user
eval "$DEFAULT_COMPOSE_COMMAND" &
# Capture the background process PID
PID=$!
# Display the loading animation
#show_loading $PID
# Wait for the command to finish
wait $PID
echo
# Check exit status
if [ $? -eq 0 ]; then
echo -e "${GREEN}${BOLD}Compose project started successfully.${NC}"
else
echo -e "${RED}${BOLD}There was an error starting the compose project.${NC}"
fi
else
echo "Aborted."
fi
echo

7
run-ollama-docker.sh Normal file
View file

@ -0,0 +1,7 @@
docker rm -f ollama || true
docker pull ollama/ollama
# CPU Only
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
# GPU Support
# docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
docker image prune -f

View file

@ -16,7 +16,7 @@ html {
code {
/* white-space-collapse: preserve !important; */
white-space: pre;
overflow-x: auto;
width: auto;
}

View file

@ -12,6 +12,10 @@
(!('theme' in localStorage) && window.matchMedia('(prefers-color-scheme: light)').matches)
) {
document.documentElement.classList.add('light');
} else if (localStorage.theme) {
localStorage.theme.split(' ').forEach((e) => {
document.documentElement.classList.add(e);
});
} else {
document.documentElement.classList.add('dark');
}

View file

@ -119,3 +119,57 @@ export const updateUserPassword = async (token: string, password: string, newPas
return res;
};
export const getSignUpEnabledStatus = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/enabled`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};
export const toggleSignUpEnabledStatus = async (token: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/auths/signup/enabled/toggle`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -0,0 +1,31 @@
import { WEBUI_API_BASE_URL } from '$lib/constants';
export const setDefaultModels = async (token: string, models: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/configs/default/models`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
models: models
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -1,12 +1,76 @@
import { OLLAMA_API_BASE_URL } from '$lib/constants';
export const getOllamaVersion = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = ''
) => {
export const getOllamaAPIUrl = async (token: string = '') => {
let error = null;
const res = await fetch(`${base_url}/version`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
url: url
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OLLAMA_API_BASE_URL;
};
export const getOllamaVersion = async (token: string = '') => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/version`, {
method: 'GET',
headers: {
Accept: 'application/json',
@ -35,13 +99,10 @@ export const getOllamaVersion = async (
return res?.version ?? '';
};
export const getOllamaModels = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = ''
) => {
export const getOllamaModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${base_url}/tags`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/tags`, {
method: 'GET',
headers: {
Accept: 'application/json',
@ -67,18 +128,15 @@ export const getOllamaModels = async (
throw error;
}
return res?.models ?? [];
return (res?.models ?? []).sort((a, b) => {
return a.name.localeCompare(b.name);
});
};
export const generateTitle = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = '',
model: string,
prompt: string
) => {
export const generateTitle = async (token: string = '', model: string, prompt: string) => {
let error = null;
const res = await fetch(`${base_url}/generate`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/generate`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
@ -86,7 +144,7 @@ export const generateTitle = async (
},
body: JSON.stringify({
model: model,
prompt: `Generate a brief 3-5 word title for this question, excluding the term 'title.' Then, please reply with only the title: ${prompt}`,
prompt: `Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title': ${prompt}`,
stream: false
})
})
@ -109,14 +167,10 @@ export const generateTitle = async (
return res?.response ?? 'New Chat';
};
export const generateChatCompletion = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string = '',
body: object
) => {
export const generateChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${base_url}/chat`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/chat`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
@ -135,15 +189,10 @@ export const generateChatCompletion = async (
return res;
};
export const createModel = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string,
tagName: string,
content: string
) => {
export const createModel = async (token: string, tagName: string, content: string) => {
let error = null;
const res = await fetch(`${base_url}/create`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
@ -165,14 +214,10 @@ export const createModel = async (
return res;
};
export const deleteModel = async (
base_url: string = OLLAMA_API_BASE_URL,
token: string,
tagName: string
) => {
export const deleteModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${base_url}/delete`, {
const res = await fetch(`${OLLAMA_API_BASE_URL}/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'text/event-stream',
@ -202,3 +247,27 @@ export const deleteModel = async (
return res;
};
export const pullModel = async (token: string, tagName: string) => {
let error = null;
const res = await fetch(`${OLLAMA_API_BASE_URL}/pull`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
name: tagName
})
}).catch((err) => {
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -1,4 +1,176 @@
export const getOpenAIModels = async (
import { OPENAI_API_BASE_URL } from '$lib/constants';
export const getOpenAIUrl = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/url`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_BASE_URL;
};
export const updateOpenAIUrl = async (token: string = '', url: string) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/url/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
url: url
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_BASE_URL;
};
export const getOpenAIKey = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/key`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_KEY;
};
export const updateOpenAIKey = async (token: string = '', key: string) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/key/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
},
body: JSON.stringify({
key: key
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
} else {
error = 'Server connection failed';
}
return null;
});
if (error) {
throw error;
}
return res.OPENAI_API_KEY;
};
export const getOpenAIModels = async (token: string = '') => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/models`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...(token && { authorization: `Bearer ${token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
return [];
});
if (error) {
throw error;
}
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
? models
.map((model) => ({ name: model.id, external: true }))
.sort((a, b) => {
return a.name.localeCompare(b.name);
})
: models;
};
export const getOpenAIModelsDirect = async (
base_url: string = 'https://api.openai.com/v1',
api_key: string = ''
) => {
@ -25,9 +197,35 @@ export const getOpenAIModels = async (
throw error;
}
let models = Array.isArray(res) ? res : res?.data ?? null;
const models = Array.isArray(res) ? res : res?.data ?? null;
return models
.map((model) => ({ name: model.id, external: true }))
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true));
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
.sort((a, b) => {
return a.name.localeCompare(b.name);
});
};
export const generateOpenAIChatCompletion = async (token: string = '', body: object) => {
let error = null;
const res = await fetch(`${OPENAI_API_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
}).catch((err) => {
console.log(err);
error = err;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -0,0 +1,178 @@
import { WEBUI_API_BASE_URL } from '$lib/constants';
export const createNewPrompt = async (
token: string,
command: string,
title: string,
content: string
) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/create`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
command: `/${command}`,
title: title,
content: content
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const getPrompts = async (token: string = '') => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const getPromptByCommand = async (token: string, command: string) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/command/${command}`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const updatePromptByCommand = async (
token: string,
command: string,
title: string,
content: string
) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/command/${command}/update`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
},
body: JSON.stringify({
command: `/${command}`,
title: title,
content: content
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};
export const deletePromptByCommand = async (token: string, command: string) => {
let error = null;
command = command.charAt(0) === '/' ? command.slice(1) : command;
const res = await fetch(`${WEBUI_API_BASE_URL}/prompts/command/${command}/delete`, {
method: 'DELETE',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
authorization: `Bearer ${token}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.then((json) => {
return json;
})
.catch((err) => {
error = err.detail;
console.log(err);
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -84,3 +84,43 @@ export const deleteUserById = async (token: string, userId: string) => {
return res;
};
type UserUpdateForm = {
profile_image_url: string;
email: string;
name: string;
password: string;
};
export const updateUserById = async (token: string, userId: string, user: UserUpdateForm) => {
let error = null;
const res = await fetch(`${WEBUI_API_BASE_URL}/users/${userId}/update`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
profile_image_url: user.profile_image_url,
email: user.email,
name: user.name,
password: user.password !== '' ? user.password : undefined
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return null;
});
if (error) {
throw error;
}
return res;
};

View file

@ -0,0 +1,172 @@
<script lang="ts">
import toast from 'svelte-french-toast';
import dayjs from 'dayjs';
import { createEventDispatcher } from 'svelte';
import { onMount } from 'svelte';
import { updateUserById } from '$lib/apis/users';
import Modal from '../common/Modal.svelte';
const dispatch = createEventDispatcher();
export let show = false;
export let selectedUser;
export let sessionUser;
let _user = {
profile_image_url: '',
name: '',
email: '',
password: ''
};
const submitHandler = async () => {
const res = await updateUserById(localStorage.token, selectedUser.id, _user).catch((error) => {
toast.error(error);
});
if (res) {
dispatch('save');
show = false;
}
};
onMount(() => {
if (selectedUser) {
_user = selectedUser;
_user.password = '';
}
});
</script>
<Modal size="sm" bind:show>
<div>
<div class=" flex justify-between dark:text-gray-300 px-5 py-4">
<div class=" text-lg font-medium self-center">Edit User</div>
<button
class="self-center"
on:click={() => {
show = false;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-5 h-5"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
<hr class=" dark:border-gray-800" />
<div class="flex flex-col md:flex-row w-full p-5 md:space-x-4 dark:text-gray-200">
<div class=" flex flex-col w-full sm:flex-row sm:justify-center sm:space-x-6">
<form
class="flex flex-col w-full"
on:submit|preventDefault={() => {
submitHandler();
}}
>
<div class=" flex items-center rounded-md py-2 px-4 w-full">
<div class=" self-center mr-5">
<img
src={selectedUser.profile_image_url}
class=" max-w-[55px] object-cover rounded-full"
alt="User profile"
/>
</div>
<div>
<div class=" self-center capitalize font-semibold">{selectedUser.name}</div>
<div class="text-xs text-gray-500">
Created at {dayjs(selectedUser.timestamp * 1000).format('MMMM DD, YYYY')}
</div>
</div>
</div>
<hr class=" dark:border-gray-800 my-3 w-full" />
<div class=" flex flex-col space-y-1.5">
<div class="flex flex-col w-full">
<div class=" mb-1 text-xs text-gray-500">Email</div>
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 disabled:text-gray-500 dark:disabled:text-gray-500 outline-none"
type="email"
bind:value={_user.email}
autocomplete="off"
required
disabled={_user.id == sessionUser.id}
/>
</div>
</div>
<div class="flex flex-col w-full">
<div class=" mb-1 text-xs text-gray-500">Name</div>
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
type="text"
bind:value={_user.name}
autocomplete="off"
required
/>
</div>
</div>
<div class="flex flex-col w-full">
<div class=" mb-1 text-xs text-gray-500">New Password</div>
<div class="flex-1">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
type="password"
bind:value={_user.password}
autocomplete="new-password"
/>
</div>
</div>
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
<button
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
type="submit"
>
Save
</button>
</div>
</form>
</div>
</div>
</div>
</Modal>
<style>
input::-webkit-outer-spin-button,
input::-webkit-inner-spin-button {
/* display: none; <- Crashes Chrome on hover */
-webkit-appearance: none;
margin: 0; /* <-- Apparently some margin are still there even though it's hidden */
}
.tabs::-webkit-scrollbar {
display: none; /* for Chrome, Safari and Opera */
}
.tabs {
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
}
input[type='number'] {
-moz-appearance: textfield; /* Firefox */
}
</style>

View file

@ -1,8 +1,11 @@
<script lang="ts">
import { settings } from '$lib/stores';
import toast from 'svelte-french-toast';
import { onMount, tick } from 'svelte';
import { settings } from '$lib/stores';
import { findWordIndices } from '$lib/utils';
import Prompts from './MessageInput/PromptCommands.svelte';
import Suggestions from './MessageInput/Suggestions.svelte';
import { onMount } from 'svelte';
export let submitPrompt: Function;
export let stopResponse: Function;
@ -11,6 +14,8 @@
export let autoScroll = true;
let filesInputElement;
let promptsElement;
let inputFiles;
let dragged = false;
@ -154,36 +159,42 @@
<div class="fixed bottom-0 w-full">
<div class="px-2.5 pt-2.5 -mb-0.5 mx-auto inset-x-0 bg-transparent flex justify-center">
{#if messages.length == 0 && suggestionPrompts.length !== 0}
<div class="max-w-3xl w-full">
<Suggestions {suggestionPrompts} {submitPrompt} />
<div class="flex flex-col max-w-3xl w-full">
<div>
{#if autoScroll === false && messages.length > 0}
<div class=" flex justify-center mb-4">
<button
class=" bg-white border border-gray-100 dark:border-none dark:bg-white/20 p-1.5 rounded-full"
on:click={() => {
autoScroll = true;
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-5 h-5"
>
<path
fill-rule="evenodd"
d="M10 3a.75.75 0 01.75.75v10.638l3.96-4.158a.75.75 0 111.08 1.04l-5.25 5.5a.75.75 0 01-1.08 0l-5.25-5.5a.75.75 0 111.08-1.04l3.96 4.158V3.75A.75.75 0 0110 3z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
{/if}
</div>
{/if}
{#if autoScroll === false && messages.length > 0}
<div class=" flex justify-center mb-4">
<button
class=" bg-white border border-gray-100 dark:border-none dark:bg-white/20 p-1.5 rounded-full"
on:click={() => {
autoScroll = true;
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-5 h-5"
>
<path
fill-rule="evenodd"
d="M10 3a.75.75 0 01.75.75v10.638l3.96-4.158a.75.75 0 111.08 1.04l-5.25 5.5a.75.75 0 01-1.08 0l-5.25-5.5a.75.75 0 111.08-1.04l3.96 4.158V3.75A.75.75 0 0110 3z"
clip-rule="evenodd"
/>
</svg>
</button>
<div class="w-full">
{#if prompt.charAt(0) === '/'}
<Prompts bind:this={promptsElement} bind:prompt />
{:else if messages.length == 0 && suggestionPrompts.length !== 0}
<Suggestions {suggestionPrompts} {submitPrompt} />
{/if}
</div>
{/if}
</div>
</div>
<div class="bg-white dark:bg-gray-800">
<div class="max-w-3xl px-2.5 -mb-0.5 mx-auto inset-x-0">
@ -287,7 +298,7 @@
id="chat-textarea"
class=" dark:bg-gray-800 dark:text-gray-100 outline-none w-full py-3 px-2 {fileUploadEnabled
? ''
: ' pl-4'} rounded-xl resize-none"
: ' pl-4'} rounded-xl resize-none h-[48px]"
placeholder={speechRecognitionListening ? 'Listening...' : 'Send a message'}
bind:value={prompt}
on:keypress={(e) => {
@ -298,6 +309,79 @@
submitPrompt(prompt);
}
}}
on:keydown={async (e) => {
if (prompt === '' && e.key == 'ArrowUp') {
e.preventDefault();
const userMessageElement = [
...document.getElementsByClassName('user-message')
]?.at(-1);
const editButton = [
...document.getElementsByClassName('edit-user-message-button')
]?.at(-1);
console.log(userMessageElement);
userMessageElement.scrollIntoView({ block: 'center' });
editButton?.click();
}
if (prompt.charAt(0) === '/' && e.key === 'ArrowUp') {
promptsElement.selectUp();
const commandOptionButton = [
...document.getElementsByClassName('selected-command-option-button')
]?.at(-1);
commandOptionButton.scrollIntoView({ block: 'center' });
}
if (prompt.charAt(0) === '/' && e.key === 'ArrowDown') {
promptsElement.selectDown();
const commandOptionButton = [
...document.getElementsByClassName('selected-command-option-button')
]?.at(-1);
commandOptionButton.scrollIntoView({ block: 'center' });
}
if (prompt.charAt(0) === '/' && e.key === 'Enter') {
e.preventDefault();
const commandOptionButton = [
...document.getElementsByClassName('selected-command-option-button')
]?.at(-1);
commandOptionButton?.click();
}
if (prompt.charAt(0) === '/' && e.key === 'Tab') {
e.preventDefault();
const commandOptionButton = [
...document.getElementsByClassName('selected-command-option-button')
]?.at(-1);
commandOptionButton?.click();
} else if (e.key === 'Tab') {
const words = findWordIndices(prompt);
if (words.length > 0) {
const word = words.at(0);
const fullPrompt = prompt;
prompt = prompt.substring(0, word?.endIndex + 1);
await tick();
e.target.scrollTop = e.target.scrollHeight;
prompt = fullPrompt;
await tick();
e.preventDefault();
e.target.setSelectionRange(word?.startIndex, word.endIndex + 1);
}
}
}}
rows="1"
on:input={(e) => {
e.target.style.height = '';

View file

@ -0,0 +1,111 @@
<script lang="ts">
import { prompts } from '$lib/stores';
import { findWordIndices } from '$lib/utils';
import { tick } from 'svelte';
export let prompt = '';
let selectedCommandIdx = 0;
let filteredPromptCommands = [];
$: filteredPromptCommands = $prompts
.filter((p) => p.command.includes(prompt))
.sort((a, b) => a.title.localeCompare(b.title));
$: if (prompt) {
selectedCommandIdx = 0;
}
export const selectUp = () => {
selectedCommandIdx = Math.max(0, selectedCommandIdx - 1);
};
export const selectDown = () => {
selectedCommandIdx = Math.min(selectedCommandIdx + 1, filteredPromptCommands.length - 1);
};
const confirmCommand = async (command) => {
prompt = command.content;
const chatInputElement = document.getElementById('chat-textarea');
await tick();
chatInputElement.style.height = '';
chatInputElement.style.height = Math.min(chatInputElement.scrollHeight, 200) + 'px';
chatInputElement?.focus();
await tick();
const words = findWordIndices(prompt);
if (words.length > 0) {
const word = words.at(0);
chatInputElement.setSelectionRange(word?.startIndex, word.endIndex + 1);
}
};
</script>
{#if filteredPromptCommands.length > 0}
<div class="md:px-2 mb-3 text-left w-full">
<div class="flex w-full rounded-lg border border-gray-100 dark:border-gray-700">
<div class=" bg-gray-100 dark:bg-gray-700 w-10 rounded-l-lg text-center">
<div class=" text-lg font-semibold mt-2">/</div>
</div>
<div class="max-h-60 flex flex-col w-full rounded-r-lg">
<div class=" overflow-y-auto bg-white p-2 rounded-tr-lg space-y-0.5">
{#each filteredPromptCommands as command, commandIdx}
<button
class=" px-3 py-1.5 rounded-lg w-full text-left {commandIdx === selectedCommandIdx
? ' bg-gray-100 selected-command-option-button'
: ''}"
type="button"
on:click={() => {
confirmCommand(command);
}}
on:mousemove={() => {
selectedCommandIdx = commandIdx;
}}
on:focus={() => {}}
>
<div class=" font-medium text-black">
{command.command}
</div>
<div class=" text-xs text-gray-600">
{command.title}
</div>
</button>
{/each}
</div>
<div
class=" px-2 pb-1 text-xs text-gray-600 bg-white rounded-br-lg flex items-center space-x-1"
>
<div>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-3 h-3"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="m11.25 11.25.041-.02a.75.75 0 0 1 1.063.852l-.708 2.836a.75.75 0 0 0 1.063.853l.041-.021M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Zm-9-3.75h.008v.008H12V8.25Z"
/>
</svg>
</div>
<div class="line-clamp-1">
Tip: Update multiple variable slots consecutively by pressing the tab key in the chat
input after each replacement.
</div>
</div>
</div>
</div>
</div>
{/if}

View file

@ -7,7 +7,7 @@
{#each suggestionPrompts as prompt, promptIdx}
<div class="{promptIdx > 1 ? 'hidden sm:inline-flex' : ''} basis-full sm:basis-1/2 p-[5px]">
<button
class=" flex-1 flex justify-between w-full px-4 py-2.5 bg-white hover:bg-gray-50 dark:bg-gray-800 dark:hover:bg-gray-700 outline outline-1 outline-gray-200 dark:outline-gray-600 rounded-lg transition group"
class=" flex-1 flex justify-between w-full h-full px-4 py-2.5 bg-white hover:bg-gray-50 dark:bg-gray-800 dark:hover:bg-gray-700 outline outline-1 outline-gray-200 dark:outline-gray-600 rounded-lg transition group"
on:click={() => {
submitPrompt(prompt.content);
}}
@ -17,7 +17,9 @@
<div class="text-sm font-medium dark:text-gray-300">{prompt.title[0]}</div>
<div class="text-sm text-gray-500">{prompt.title[1]}</div>
{:else}
<div class=" self-center text-sm font-medium dark:text-gray-300">{prompt.content}</div>
<div class=" self-center text-sm font-medium dark:text-gray-300 line-clamp-2">
{prompt.content}
</div>
{/if}
</div>

View file

@ -1,7 +1,7 @@
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { chats, config, db, modelfiles, settings, user } from '$lib/stores';
import { chats, config, modelfiles, settings, user } from '$lib/stores';
import { tick } from 'svelte';
import toast from 'svelte-french-toast';
@ -215,42 +215,44 @@
{#if messages.length == 0}
<Placeholder models={selectedModels} modelfiles={selectedModelfiles} />
{:else}
{#each messages as message, messageIdx}
<div class=" w-full">
<div class="flex justify-between px-5 mb-3 max-w-3xl mx-auto rounded-lg group">
{#if message.role === 'user'}
<UserMessage
user={$user}
{message}
siblings={message.parentId !== null
? history.messages[message.parentId]?.childrenIds ?? []
: Object.values(history.messages)
.filter((message) => message.parentId === null)
.map((message) => message.id) ?? []}
{confirmEditMessage}
{showPreviousMessage}
{showNextMessage}
{copyToClipboard}
/>
{:else}
<ResponseMessage
{message}
modelfiles={selectedModelfiles}
siblings={history.messages[message.parentId]?.childrenIds ?? []}
isLastMessage={messageIdx + 1 === messages.length}
{confirmEditResponseMessage}
{showPreviousMessage}
{showNextMessage}
{rateMessage}
{copyToClipboard}
{regenerateResponse}
/>
{/if}
{#key chatId}
{#each messages as message, messageIdx}
<div class=" w-full">
<div class="flex justify-between px-5 mb-3 max-w-3xl mx-auto rounded-lg group">
{#if message.role === 'user'}
<UserMessage
user={$user}
{message}
siblings={message.parentId !== null
? history.messages[message.parentId]?.childrenIds ?? []
: Object.values(history.messages)
.filter((message) => message.parentId === null)
.map((message) => message.id) ?? []}
{confirmEditMessage}
{showPreviousMessage}
{showNextMessage}
{copyToClipboard}
/>
{:else}
<ResponseMessage
{message}
modelfiles={selectedModelfiles}
siblings={history.messages[message.parentId]?.childrenIds ?? []}
isLastMessage={messageIdx + 1 === messages.length}
{confirmEditResponseMessage}
{showPreviousMessage}
{showNextMessage}
{rateMessage}
{copyToClipboard}
{regenerateResponse}
/>
{/if}
</div>
</div>
</div>
{/each}
{/each}
{#if bottomPadding}
<div class=" mb-10" />
{/if}
{#if bottomPadding}
<div class=" mb-10" />
{/if}
{/key}
{/if}

View file

@ -27,7 +27,7 @@
>
{#if model in modelfiles}
<img
src={modelfiles[model]?.imageUrl}
src={modelfiles[model]?.imageUrl ?? '/ollama-dark.png'}
alt="modelfile"
class=" w-20 mb-2 rounded-full {models.length > 1
? ' border-[5px] border-white dark:border-gray-800'

View file

@ -88,6 +88,7 @@
let code = block.querySelector('code');
code.style.borderTopRightRadius = 0;
code.style.borderTopLeftRadius = 0;
code.style.whiteSpace = 'pre';
let topBarDiv = document.createElement('div');
topBarDiv.style.backgroundColor = '#202123';
@ -284,7 +285,7 @@
</div>
</div>
{:else}
{@html marked(message.content.replace('\\\\', '\\\\\\'))}
{@html marked(message.content.replaceAll('\\', '\\\\'))}
{/if}
{#if message.done}

View file

@ -24,6 +24,8 @@
editElement.style.height = '';
editElement.style.height = `${editElement.scrollHeight}px`;
editElement?.focus();
};
const editMessageConfirmHandler = async () => {
@ -43,7 +45,9 @@
<ProfileImage src={user?.profile_image_url ?? '/user.png'} />
<div class="w-full overflow-hidden">
<Name>You</Name>
<div class="user-message">
<Name>You</Name>
</div>
<div
class="prose chat-{message.role} w-full max-w-full dark:prose-invert prose-headings:my-0 prose-p:my-0 prose-p:-mb-4 prose-pre:my-0 prose-table:my-0 prose-blockquote:my-0 prose-img:my-0 prose-ul:-my-4 prose-ol:-my-4 prose-li:-my-3 prose-ul:-mb-6 prose-ol:-mb-6 prose-li:-mb-4 whitespace-pre-line"
@ -145,7 +149,7 @@
{/if}
<button
class="invisible group-hover:visible p-1 rounded dark:hover:bg-gray-800 transition"
class="invisible group-hover:visible p-1 rounded dark:hover:bg-gray-800 transition edit-user-message-button"
on:click={() => {
editMessageHandler();
}}

View file

@ -1,11 +1,13 @@
<script lang="ts">
import { models, showSettings, settings } from '$lib/stores';
import { setDefaultModels } from '$lib/apis/configs';
import { models, showSettings, settings, user } from '$lib/stores';
import { onMount, tick } from 'svelte';
import toast from 'svelte-french-toast';
export let selectedModels = [''];
export let disabled = false;
const saveDefaultModel = () => {
const saveDefaultModel = async () => {
const hasEmptyModel = selectedModels.filter((it) => it === '');
if (hasEmptyModel.length) {
toast.error('Choose a model before saving...');
@ -13,8 +15,19 @@
}
settings.set({ ...$settings, models: selectedModels });
localStorage.setItem('settings', JSON.stringify($settings));
if ($user.role === 'admin') {
console.log('setting default models globally');
await setDefaultModels(localStorage.token, selectedModels.join(','));
}
toast.success('Default model updated');
};
$: if (selectedModels.length > 0 && $models.length > 0) {
selectedModels = selectedModels.map((model) =>
$models.map((m) => m.name).includes(model) ? model : ''
);
}
</script>
<div class="flex flex-col my-2">

View file

@ -12,7 +12,8 @@
top_k: '',
top_p: '',
tfs_z: '',
num_ctx: ''
num_ctx: '',
num_predict: ''
};
</script>
@ -507,4 +508,49 @@
</div>
{/if}
</div>
<div class=" py-0.5 w-full justify-between">
<div class="flex w-full justify-between">
<div class=" self-center text-xs font-medium">Max Tokens</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
options.num_predict = options.num_predict === '' ? 128 : '';
}}
>
{#if options.num_predict === ''}
<span class="ml-2 self-center"> Default </span>
{:else}
<span class="ml-2 self-center"> Custom </span>
{/if}
</button>
</div>
{#if options.num_predict !== ''}
<div class="flex mt-0.5 space-x-2">
<div class=" flex-1">
<input
id="steps-range"
type="range"
min="-2"
max="16000"
step="1"
bind:value={options.num_predict}
class="w-full h-2 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
/>
</div>
<div class="">
<input
bind:value={options.num_predict}
type="number"
class=" bg-transparent text-center w-14"
min="-2"
max="16000"
step="1"
/>
</div>
</div>
{/if}
</div>
</div>

View file

@ -7,19 +7,30 @@
import { config, models, settings, user, chats } from '$lib/stores';
import { splitStream, getGravatarURL } from '$lib/utils';
import { getOllamaVersion } from '$lib/apis/ollama';
import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
import {
WEB_UI_VERSION,
OLLAMA_API_BASE_URL,
WEBUI_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
getOllamaVersion,
getOllamaModels,
getOllamaAPIUrl,
updateOllamaAPIUrl,
pullModel,
createModel,
deleteModel
} from '$lib/apis/ollama';
import { createNewChat, deleteAllChats, getAllChats, getChatList } from '$lib/apis/chats';
import { WEB_UI_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
import Advanced from './Settings/Advanced.svelte';
import Modal from '../common/Modal.svelte';
import { updateUserPassword } from '$lib/apis/auths';
import { goto } from '$app/navigation';
import Page from '../../../routes/(app)/+page.svelte';
import {
getOpenAIKey,
getOpenAIModels,
getOpenAIUrl,
updateOpenAIKey,
updateOpenAIUrl
} from '$lib/apis/openai';
export let show = false;
@ -33,7 +44,8 @@
let selectedTab = 'general';
// General
let API_BASE_URL = OLLAMA_API_BASE_URL;
let API_BASE_URL = '';
let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light'];
let theme = 'dark';
let notificationEnabled = false;
let system = '';
@ -53,7 +65,8 @@
top_p: '',
stop: '',
tfs_z: '',
num_ctx: ''
num_ctx: '',
num_predict: ''
};
// Models
@ -72,17 +85,21 @@
let deleteModelTag = '';
// External
let OPENAI_API_KEY = '';
let OPENAI_API_BASE_URL = '';
// Addons
let titleAutoGenerate = true;
let speechAutoSend = false;
let responseAutoCopy = false;
let gravatarEmail = '';
let OPENAI_API_KEY = '';
let OPENAI_API_BASE_URL = '';
let titleAutoGenerateModel = '';
// Chats
let saveChatHistory = true;
let importFiles;
let showDeleteConfirm = false;
@ -134,22 +151,23 @@
// About
let ollamaVersion = '';
const checkOllamaConnection = async () => {
if (API_BASE_URL === '') {
API_BASE_URL = OLLAMA_API_BASE_URL;
}
const _models = await getModels(API_BASE_URL, 'ollama');
const updateOllamaAPIUrlHandler = async () => {
API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL);
const _models = await getModels('ollama');
if (_models.length > 0) {
toast.success('Server connection verified');
await models.set(_models);
saveSettings({
API_BASE_URL: API_BASE_URL
});
}
};
const updateOpenAIHandler = async () => {
OPENAI_API_BASE_URL = await updateOpenAIUrl(localStorage.token, OPENAI_API_BASE_URL);
OPENAI_API_KEY = await updateOpenAIKey(localStorage.token, OPENAI_API_KEY);
await models.set(await getModels());
};
const toggleTheme = async () => {
if (theme === 'dark') {
theme = 'light';
@ -218,73 +236,72 @@
}
};
const toggleAuthHeader = async () => {
authEnabled = !authEnabled;
const toggleSaveChatHistory = async () => {
saveChatHistory = !saveChatHistory;
console.log(saveChatHistory);
if (saveChatHistory === false) {
await goto('/');
}
saveSettings({ saveChatHistory: saveChatHistory });
};
const pullModelHandler = async () => {
modelTransferring = true;
const res = await fetch(`${API_BASE_URL}/pull`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: modelTag
})
});
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
const res = await pullModel(localStorage.token, modelTag);
while (true) {
const { value, done } = await reader.read();
if (done) break;
if (res) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
try {
let lines = value.split('\n');
while (true) {
const { value, done } = await reader.read();
if (done) break;
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
try {
let lines = value.split('\n');
if (data.error) {
throw data.error;
}
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
if (data.detail) {
throw data.detail;
}
if (data.status) {
if (!data.digest) {
toast.success(data.status);
if (data.error) {
throw data.error;
}
if (data.status === 'success') {
const notification = new Notification(`Ollama`, {
body: `Model '${modelTag}' has been successfully downloaded.`,
icon: '/favicon.png'
});
}
} else {
digest = data.digest;
if (data.completed) {
pullProgress = Math.round((data.completed / data.total) * 1000) / 10;
if (data.detail) {
throw data.detail;
}
if (data.status) {
if (!data.digest) {
toast.success(data.status);
if (data.status === 'success') {
const notification = new Notification(`Ollama`, {
body: `Model '${modelTag}' has been successfully downloaded.`,
icon: '/favicon.png'
});
}
} else {
pullProgress = 100;
digest = data.digest;
if (data.completed) {
pullProgress = Math.round((data.completed / data.total) * 1000) / 10;
} else {
pullProgress = 100;
}
}
}
}
}
} catch (error) {
console.log(error);
toast.error(error);
}
} catch (error) {
console.log(error);
toast.error(error);
}
}
@ -405,21 +422,11 @@
}
if (uploaded) {
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/create`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: `${name}:latest`,
modelfile: `FROM @${modelFileDigest}\n${modelFileContent}`
})
}).catch((err) => {
console.log(err);
return null;
});
const res = await createModel(
localStorage.token,
`${name}:latest`,
`FROM @${modelFileDigest}\n${modelFileContent}`
);
if (res && res.ok) {
const reader = res.body
@ -485,124 +492,35 @@
};
const deleteModelHandler = async () => {
const res = await fetch(`${API_BASE_URL}/delete`, {
method: 'DELETE',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
name: deleteModelTag
})
const res = await deleteModel(localStorage.token, deleteModelTag).catch((error) => {
toast.error(error);
});
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) break;
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '' && line !== 'null') {
console.log(line);
let data = JSON.parse(line);
console.log(data);
if (data.error) {
throw data.error;
}
if (data.detail) {
throw data.detail;
}
if (data.status) {
}
} else {
toast.success(`Deleted ${deleteModelTag}`);
}
}
} catch (error) {
console.log(error);
toast.error(error);
}
if (res) {
toast.success(`Deleted ${deleteModelTag}`);
}
deleteModelTag = '';
models.set(await getModels());
};
const getModels = async (url = '', type = 'all') => {
let models = [];
const res = await fetch(`${url ? url : $settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/tags`, {
method: 'GET',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
} else {
toast.error('Server connection failed');
}
return null;
});
console.log(res);
models.push(...(res?.models ?? []));
const getModels = async (type = 'all') => {
const models = [];
models.push(
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
);
// If OpenAI API Key exists
if (type === 'all' && $settings.OPENAI_API_KEY) {
const API_BASE_URL = $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
if (type === 'all' && OPENAI_API_KEY) {
const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
// Validate OPENAI_API_KEY
const openaiModelRes = await fetch(`${API_BASE_URL}/models`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
}
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
console.log(error);
toast.error(`OpenAI: ${error?.error?.message ?? 'Network Problem'}`);
return null;
});
const openAIModels = Array.isArray(openaiModelRes)
? openaiModelRes
: openaiModelRes?.data ?? null;
models.push(
...(openAIModels
? [
{ name: 'hr' },
...openAIModels
.map((model) => ({ name: model.id, external: true }))
.filter((model) =>
API_BASE_URL.includes('openai') ? model.name.includes('gpt') : true
)
]
: [])
);
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
}
return models;
@ -634,15 +552,20 @@
};
onMount(async () => {
console.log('settings', $user.role === 'admin');
if ($user.role === 'admin') {
API_BASE_URL = await getOllamaAPIUrl(localStorage.token);
OPENAI_API_BASE_URL = await getOpenAIUrl(localStorage.token);
OPENAI_API_KEY = await getOpenAIKey(localStorage.token);
}
let settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
console.log(settings);
theme = localStorage.theme ?? 'dark';
notificationEnabled = settings.notificationEnabled ?? false;
API_BASE_URL = settings.API_BASE_URL ?? OLLAMA_API_BASE_URL;
system = settings.system ?? '';
requestFormat = settings.requestFormat ?? '';
options.seed = settings.seed ?? 0;
@ -657,10 +580,10 @@
titleAutoGenerate = settings.titleAutoGenerate ?? true;
speechAutoSend = settings.speechAutoSend ?? false;
responseAutoCopy = settings.responseAutoCopy ?? false;
titleAutoGenerateModel = settings.titleAutoGenerateModel ?? '';
gravatarEmail = settings.gravatarEmail ?? '';
OPENAI_API_KEY = settings.OPENAI_API_KEY ?? '';
OPENAI_API_BASE_URL = settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1';
saveChatHistory = settings.saveChatHistory ?? true;
authEnabled = settings.authHeader !== undefined ? true : false;
if (authEnabled) {
@ -668,10 +591,7 @@
authContent = settings.authHeader.split(' ')[1];
}
ollamaVersion = await getOllamaVersion(
API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => {
return '';
});
});
@ -755,55 +675,57 @@
<div class=" self-center">Advanced</div>
</button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'models'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'models';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M10 1c3.866 0 7 1.79 7 4s-3.134 4-7 4-7-1.79-7-4 3.134-4 7-4zm5.694 8.13c.464-.264.91-.583 1.306-.952V10c0 2.21-3.134 4-7 4s-7-1.79-7-4V8.178c.396.37.842.688 1.306.953C5.838 10.006 7.854 10.5 10 10.5s4.162-.494 5.694-1.37zM3 13.179V15c0 2.21 3.134 4 7 4s7-1.79 7-4v-1.822c-.396.37-.842.688-1.306.953-1.532.875-3.548 1.369-5.694 1.369s-4.162-.494-5.694-1.37A7.009 7.009 0 013 13.179z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center">Models</div>
</button>
{#if $user?.role === 'admin'}
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'models'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'models';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M10 1c3.866 0 7 1.79 7 4s-3.134 4-7 4-7-1.79-7-4 3.134-4 7-4zm5.694 8.13c.464-.264.91-.583 1.306-.952V10c0 2.21-3.134 4-7 4s-7-1.79-7-4V8.178c.396.37.842.688 1.306.953C5.838 10.006 7.854 10.5 10 10.5s4.162-.494 5.694-1.37zM3 13.179V15c0 2.21 3.134 4 7 4s7-1.79 7-4v-1.822c-.396.37-.842.688-1.306.953-1.532.875-3.548 1.369-5.694 1.369s-4.162-.494-5.694-1.37A7.009 7.009 0 013 13.179z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class=" self-center">Models</div>
</button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'external'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'external';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z"
/>
</svg>
</div>
<div class=" self-center">External</div>
</button>
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
'external'
? 'bg-gray-200 dark:bg-gray-700'
: ' hover:bg-gray-300 dark:hover:bg-gray-800'}"
on:click={() => {
selectedTab = 'external';
}}
>
<div class=" self-center mr-2">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M1 9.5A3.5 3.5 0 0 0 4.5 13H12a3 3 0 0 0 .917-5.857 2.503 2.503 0 0 0-3.198-3.019 3.5 3.5 0 0 0-6.628 2.171A3.5 3.5 0 0 0 1 9.5Z"
/>
</svg>
</div>
<div class=" self-center">External</div>
</button>
{/if}
<button
class="px-2.5 py-2.5 min-w-fit rounded-lg flex-1 md:flex-none flex text-right transition {selectedTab ===
@ -944,41 +866,72 @@
<div class=" py-0.5 flex w-full justify-between">
<div class=" self-center text-xs font-medium">Theme</div>
<button
<!-- <button
class="p-1 px-3 text-xs flex rounded transition"
on:click={() => {
toggleTheme();
}}
>
{#if theme === 'dark'}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M7.455 2.004a.75.75 0 01.26.77 7 7 0 009.958 7.967.75.75 0 011.067.853A8.5 8.5 0 116.647 1.921a.75.75 0 01.808.083z"
clip-rule="evenodd"
/>
</svg>
</button> -->
<span class="ml-2 self-center"> Dark </span>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4 self-center"
>
<path
d="M10 2a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 2zM10 15a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 15zM10 7a3 3 0 100 6 3 3 0 000-6zM15.657 5.404a.75.75 0 10-1.06-1.06l-1.061 1.06a.75.75 0 001.06 1.06l1.06-1.06zM6.464 14.596a.75.75 0 10-1.06-1.06l-1.06 1.06a.75.75 0 001.06 1.06l1.06-1.06zM18 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 0118 10zM5 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 015 10zM14.596 15.657a.75.75 0 001.06-1.06l-1.06-1.061a.75.75 0 10-1.06 1.06l1.06 1.06zM5.404 6.464a.75.75 0 001.06-1.06l-1.06-1.06a.75.75 0 10-1.061 1.06l1.06 1.06z"
/>
</svg>
<span class="ml-2 self-center"> Light </span>
{/if}
</button>
<div class="flex items-center relative">
<div class=" absolute right-16">
{#if theme === 'dark'}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M7.455 2.004a.75.75 0 01.26.77 7 7 0 009.958 7.967.75.75 0 011.067.853A8.5 8.5 0 116.647 1.921a.75.75 0 01.808.083z"
clip-rule="evenodd"
/>
</svg>
{:else if theme === 'light'}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4 self-center"
>
<path
d="M10 2a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 2zM10 15a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 15zM10 7a3 3 0 100 6 3 3 0 000-6zM15.657 5.404a.75.75 0 10-1.06-1.06l-1.061 1.06a.75.75 0 001.06 1.06l1.06-1.06zM6.464 14.596a.75.75 0 10-1.06-1.06l-1.06 1.06a.75.75 0 001.06 1.06l1.06-1.06zM18 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 0118 10zM5 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 015 10zM14.596 15.657a.75.75 0 001.06-1.06l-1.06-1.061a.75.75 0 10-1.06 1.06l1.06 1.06zM5.404 6.464a.75.75 0 001.06-1.06l-1.06-1.06a.75.75 0 10-1.061 1.06l1.06 1.06z"
/>
</svg>
{/if}
</div>
<select
class="w-fit pr-8 rounded py-2 px-2 text-xs bg-transparent outline-none text-right"
bind:value={theme}
placeholder="Select a theme"
on:change={(e) => {
localStorage.theme = theme;
themes
.filter((e) => e !== theme)
.forEach((e) => {
e.split(' ').forEach((e) => {
document.documentElement.classList.remove(e);
});
});
theme.split(' ').forEach((e) => {
document.documentElement.classList.add(e);
});
console.log(theme);
}}
>
<option value="dark">Dark</option>
<option value="light">Light</option>
<option value="rose-pine dark">Rosé Pine</option>
<option value="rose-pine-dawn light">Rosé Pine Dawn</option>
</select>
</div>
</div>
<div>
@ -1002,51 +955,51 @@
</div>
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter URL (e.g. http://localhost:8080/ollama/api)"
bind:value={API_BASE_URL}
/>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
on:click={() => {
checkOllamaConnection();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
clip-rule="evenodd"
{#if $user.role === 'admin'}
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Ollama API URL</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<input
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
placeholder="Enter URL (e.g. http://localhost:11434/api)"
bind:value={API_BASE_URL}
/>
</svg>
</button>
</div>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
on:click={() => {
updateOllamaAPIUrlHandler();
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
The field above should be set to <span
class=" text-gray-500 dark:text-gray-300 font-medium">'/ollama/api'</span
>;
<a
class=" text-gray-500 dark:text-gray-300 font-medium"
href="https://github.com/ollama-webui/ollama-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
Trouble accessing Ollama?
<a
class=" text-gray-300 font-medium"
href="https://github.com/ollama-webui/ollama-webui#troubleshooting"
target="_blank"
>
Click here for help.
</a>
</div>
</div>
</div>
{/if}
<hr class=" dark:border-gray-700" />
@ -1064,7 +1017,6 @@
class=" px-4 py-2 bg-emerald-600 hover:bg-emerald-700 text-gray-100 transition rounded"
on:click={() => {
saveSettings({
API_BASE_URL: API_BASE_URL === '' ? OLLAMA_API_BASE_URL : API_BASE_URL,
system: system !== '' ? system : undefined
});
show = false;
@ -1132,7 +1084,8 @@
top_k: options.top_k !== '' ? options.top_k : undefined,
top_p: options.top_p !== '' ? options.top_p : undefined,
tfs_z: options.tfs_z !== '' ? options.tfs_z : undefined,
num_ctx: options.num_ctx !== '' ? options.num_ctx : undefined
num_ctx: options.num_ctx !== '' ? options.num_ctx : undefined,
num_predict: options.num_predict !== '' ? options.num_predict : undefined
}
});
show = false;
@ -1219,7 +1172,7 @@
<div class=" mb-2 text-xs">Pull Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 text-xs font-medium text-blue-100 text-center p-0.5 leading-none rounded-full"
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, pullProgress ?? 0)}%"
>
{pullProgress ?? 0}%
@ -1454,10 +1407,12 @@
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={() => {
saveSettings({
OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
});
updateOpenAIHandler();
// saveSettings({
// OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
// OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
// });
show = false;
}}
>
@ -1514,10 +1469,6 @@
<form
class="flex flex-col h-full justify-between space-y-3 text-sm"
on:submit|preventDefault={() => {
saveSettings({
gravatarEmail: gravatarEmail !== '' ? gravatarEmail : undefined,
gravatarUrl: gravatarEmail !== '' ? getGravatarURL(gravatarEmail) : undefined
});
show = false;
}}
>
@ -1527,7 +1478,7 @@
<div>
<div class=" py-0.5 flex w-full justify-between">
<div class=" self-center text-xs font-medium">Title Auto Generation</div>
<div class=" self-center text-xs font-medium">Title Auto-Generation</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
@ -1589,6 +1540,54 @@
</div>
<hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">Set Title Auto-Generation Model</div>
<div class="flex w-full">
<div class="flex-1 mr-2">
<select
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
bind:value={titleAutoGenerateModel}
placeholder="Select a model"
>
<option value="" selected>Default</option>
{#each $models.filter((m) => m.size != null) as model}
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
>{model.name +
' (' +
(model.size / 1024 ** 3).toFixed(1) +
' GB)'}</option
>
{/each}
</select>
</div>
<button
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
on:click={() => {
saveSettings({
titleAutoGenerateModel:
titleAutoGenerateModel !== '' ? titleAutoGenerateModel : undefined
});
}}
type="button"
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-3.5 h-3.5"
>
<path
fill-rule="evenodd"
d="M13.836 2.477a.75.75 0 0 1 .75.75v3.182a.75.75 0 0 1-.75.75h-3.182a.75.75 0 0 1 0-1.5h1.37l-.84-.841a4.5 4.5 0 0 0-7.08.932.75.75 0 0 1-1.3-.75 6 6 0 0 1 9.44-1.242l.842.84V3.227a.75.75 0 0 1 .75-.75Zm-.911 7.5A.75.75 0 0 1 13.199 11a6 6 0 0 1-9.44 1.241l-.84-.84v1.371a.75.75 0 0 1-1.5 0V9.591a.75.75 0 0 1 .75-.75H5.35a.75.75 0 0 1 0 1.5H3.98l.841.841a4.5 4.5 0 0 0 7.08-.932.75.75 0 0 1 1.025-.273Z"
clip-rule="evenodd"
/>
</svg>
</button>
</div>
</div>
<!-- <hr class=" dark:border-gray-700" />
<div>
<div class=" mb-2.5 text-sm font-medium">
Gravatar Email <span class=" text-gray-400 text-sm">(optional)</span>
@ -1611,7 +1610,7 @@
target="_blank">Gravatar.</a
>
</div>
</div>
</div> -->
</div>
<div class="flex justify-end pt-3 text-sm font-medium">
@ -1626,6 +1625,64 @@
{:else if selectedTab === 'chats'}
<div class="flex flex-col h-full justify-between space-y-3 text-sm">
<div class=" space-y-2">
<div
class="flex flex-col justify-between rounded-md items-center py-2 px-3.5 w-full transition"
>
<div class="flex w-full justify-between">
<div class=" self-center text-sm font-medium">Chat History</div>
<button
class="p-1 px-3 text-xs flex rounded transition"
type="button"
on:click={() => {
toggleSaveChatHistory();
}}
>
{#if saveChatHistory === true}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path d="M8 9.5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3Z" />
<path
fill-rule="evenodd"
d="M1.38 8.28a.87.87 0 0 1 0-.566 7.003 7.003 0 0 1 13.238.006.87.87 0 0 1 0 .566A7.003 7.003 0 0 1 1.379 8.28ZM11 8a3 3 0 1 1-6 0 3 3 0 0 1 6 0Z"
clip-rule="evenodd"
/>
</svg>
<span class="ml-2 self-center"> On </span>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M3.28 2.22a.75.75 0 0 0-1.06 1.06l10.5 10.5a.75.75 0 1 0 1.06-1.06l-1.322-1.323a7.012 7.012 0 0 0 2.16-3.11.87.87 0 0 0 0-.567A7.003 7.003 0 0 0 4.82 3.76l-1.54-1.54Zm3.196 3.195 1.135 1.136A1.502 1.502 0 0 1 9.45 8.389l1.136 1.135a3 3 0 0 0-4.109-4.109Z"
clip-rule="evenodd"
/>
<path
d="m7.812 10.994 1.816 1.816A7.003 7.003 0 0 1 1.38 8.28a.87.87 0 0 1 0-.566 6.985 6.985 0 0 1 1.113-2.039l2.513 2.513a3 3 0 0 0 2.806 2.806Z"
/>
</svg>
<span class="ml-2 self-center">Off</span>
{/if}
</button>
</div>
<div class="text-xs text-left w-full font-medium mt-0.5">
This setting does not sync across browsers or devices.
</div>
</div>
<hr class=" dark:border-gray-700" />
<div class="flex flex-col">
<input
id="chat-import-input"

View file

@ -3,8 +3,18 @@
import { fade, blur } from 'svelte/transition';
export let show = true;
export let size = 'md';
let mounted = false;
const sizeToWidth = (size) => {
if (size === 'sm') {
return 'w-[30rem]';
} else {
return 'w-[40rem]';
}
};
onMount(() => {
mounted = true;
});
@ -28,7 +38,9 @@
}}
>
<div
class="m-auto rounded-xl max-w-full w-[40rem] mx-2 bg-gray-50 dark:bg-gray-900 shadow-3xl"
class="m-auto rounded-xl max-w-full {sizeToWidth(
size
)} mx-2 bg-gray-50 dark:bg-gray-900 shadow-3xl"
transition:fade={{ delay: 100, duration: 200 }}
on:click={(e) => {
e.stopPropagation();

View file

@ -1,7 +1,10 @@
<script lang="ts">
import { getChatById } from '$lib/apis/chats';
import { chatId, db, modelfiles } from '$lib/stores';
import toast from 'svelte-french-toast';
import fileSaver from 'file-saver';
const { saveAs } = fileSaver;
import { getChatById } from '$lib/apis/chats';
import { chatId, modelfiles } from '$lib/stores';
export let initNewChat: Function;
export let title: string = 'Ollama Web UI';
@ -33,6 +36,21 @@
false
);
};
const downloadChat = async () => {
const chat = (await getChatById(localStorage.token, $chatId)).chat;
console.log('download', chat);
const chatText = chat.messages.reduce((a, message, i, arr) => {
return `${a}### ${message.role.toUpperCase()}\n${message.content}\n\n`;
}, '');
let blob = new Blob([chatText], {
type: 'text/plain'
});
saveAs(blob, `chat-${chat.title}.txt`);
};
</script>
<nav
@ -69,7 +87,30 @@
</div>
{#if shareEnabled}
<div class="pl-2">
<div class="pl-2 flex space-x-1.5">
<button
class=" cursor-pointer p-2 flex dark:hover:bg-gray-700 rounded-lg transition border dark:border-gray-600"
on:click={async () => {
downloadChat();
}}
>
<div class=" m-auto self-center">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M8.75 2.75a.75.75 0 0 0-1.5 0v5.69L5.03 6.22a.75.75 0 0 0-1.06 1.06l3.5 3.5a.75.75 0 0 0 1.06 0l3.5-3.5a.75.75 0 0 0-1.06-1.06L8.75 8.44V2.75Z"
/>
<path
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/>
</svg>
</div>
</button>
<button
class=" cursor-pointer p-2 flex dark:hover:bg-gray-700 rounded-lg transition border dark:border-gray-600"
on:click={async () => {
@ -79,15 +120,15 @@
<div class=" m-auto self-center">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M9.25 13.25a.75.75 0 001.5 0V4.636l2.955 3.129a.75.75 0 001.09-1.03l-4.25-4.5a.75.75 0 00-1.09 0l-4.25 4.5a.75.75 0 101.09 1.03L9.25 4.636v8.614z"
d="M7.25 10.25a.75.75 0 0 0 1.5 0V4.56l2.22 2.22a.75.75 0 1 0 1.06-1.06l-3.5-3.5a.75.75 0 0 0-1.06 0l-3.5 3.5a.75.75 0 0 0 1.06 1.06l2.22-2.22v5.69Z"
/>
<path
d="M3.5 12.75a.75.75 0 00-1.5 0v2.5A2.75 2.75 0 004.75 18h10.5A2.75 2.75 0 0018 15.25v-2.5a.75.75 0 00-1.5 0v2.5c0 .69-.56 1.25-1.25 1.25H4.75c-.69 0-1.25-.56-1.25-1.25v-2.5z"
d="M3.5 9.75a.75.75 0 0 0-1.5 0v1.5A2.75 2.75 0 0 0 4.75 14h6.5A2.75 2.75 0 0 0 14 11.25v-1.5a.75.75 0 0 0-1.5 0v1.5c0 .69-.56 1.25-1.25 1.25h-6.5c-.69 0-1.25-.56-1.25-1.25v-1.5Z"
/>
</svg>
</div>

View file

@ -6,7 +6,7 @@
import { goto, invalidateAll } from '$app/navigation';
import { page } from '$app/stores';
import { user, chats, showSettings, chatId } from '$lib/stores';
import { user, chats, settings, showSettings, chatId } from '$lib/stores';
import { onMount } from 'svelte';
import { deleteChatById, getChatList, updateChatById } from '$lib/apis/chats';
@ -49,6 +49,12 @@
await deleteChatById(localStorage.token, id);
await chats.set(await getChatList(localStorage.token));
};
const saveSettings = async (updated) => {
await settings.set({ ...$settings, ...updated });
localStorage.setItem('settings', JSON.stringify($settings));
location.href = '/';
};
</script>
<div
@ -100,7 +106,7 @@
</div>
{#if $user?.role === 'admin'}
<div class="px-2.5 flex justify-center my-1">
<div class="px-2.5 flex justify-center mt-1">
<button
class="flex-grow flex space-x-3 rounded-md px-3 py-2 hover:bg-gray-900 transition"
on:click={async () => {
@ -129,255 +135,326 @@
</div>
</button>
</div>
<div class="px-2.5 flex justify-center mb-1">
<button
class="flex-grow flex space-x-3 rounded-md px-3 py-2 hover:bg-gray-900 transition"
on:click={async () => {
goto('/prompts');
}}
>
<div class="self-center">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M11.013 2.513a1.75 1.75 0 0 1 2.475 2.474L6.226 12.25a2.751 2.751 0 0 1-.892.596l-2.047.848a.75.75 0 0 1-.98-.98l.848-2.047a2.75 2.75 0 0 1 .596-.892l7.262-7.261Z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class="flex self-center">
<div class=" self-center font-medium text-sm">Prompts</div>
</div>
</button>
</div>
{/if}
<div class="px-2.5 mt-1 mb-2 flex justify-center space-x-2">
<div class="flex w-full">
<div class="self-center pl-3 py-2 rounded-l bg-gray-950">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M9 3.5a5.5 5.5 0 100 11 5.5 5.5 0 000-11zM2 9a7 7 0 1112.452 4.391l3.328 3.329a.75.75 0 11-1.06 1.06l-3.329-3.328A7 7 0 012 9z"
clip-rule="evenodd"
/>
</svg>
</div>
<div class="relative flex flex-col flex-1 overflow-y-auto">
{#if !($settings.saveChatHistory ?? true)}
<div class="absolute z-40 w-full h-full bg-black/90 flex justify-center">
<div class=" text-left px-5 py-2">
<div class=" font-medium">Chat History is off for this browser.</div>
<div class="text-xs mt-2">
When history is turned off, new chats on this browser won't appear in your history on
any of your devices. <span class=" font-semibold"
>This setting does not sync across browsers or devices.</span
>
</div>
<input
class="w-full rounded-r py-1.5 pl-2.5 pr-4 text-sm text-gray-300 bg-gray-950 outline-none"
placeholder="Search"
bind:value={search}
/>
<!-- <div class="self-center pr-3 py-2 bg-gray-900">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M12 3c2.755 0 5.455.232 8.083.678.533.09.917.556.917 1.096v1.044a2.25 2.25 0 01-.659 1.591l-5.432 5.432a2.25 2.25 0 00-.659 1.591v2.927a2.25 2.25 0 01-1.244 2.013L9.75 21v-6.568a2.25 2.25 0 00-.659-1.591L3.659 7.409A2.25 2.25 0 013 5.818V4.774c0-.54.384-1.006.917-1.096A48.32 48.32 0 0112 3z"
/>
</svg>
</div> -->
</div>
</div>
<div class="pl-2.5 my-2 flex-1 flex flex-col space-y-1 overflow-y-auto">
{#each $chats.filter((chat) => {
if (search === '') {
return true;
} else {
let title = chat.title.toLowerCase();
if (title.includes(search)) {
return true;
} else {
return false;
}
}
}) as chat, i}
<div class=" w-full pr-2 relative">
<button
class=" w-full flex justify-between rounded-md px-3 py-2 hover:bg-gray-900 {chat.id ===
$chatId
? 'bg-gray-900'
: ''} transition whitespace-nowrap text-ellipsis"
on:click={() => {
// goto(`/c/${chat.id}`);
if (chat.id !== chatTitleEditId) {
chatTitleEditId = null;
chatTitle = '';
}
if (chat.id !== $chatId) {
loadChat(chat.id);
}
}}
>
<div class=" flex self-center flex-1">
<div class=" self-center mr-3">
<div class="mt-3">
<button
class="flex justify-center items-center space-x-1.5 px-3 py-2.5 rounded-lg text-xs bg-gray-200 hover:bg-gray-300 transition text-gray-800 font-medium w-full"
type="button"
on:click={() => {
saveSettings({
saveChatHistory: true
});
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
viewBox="0 0 16 16"
fill="currentColor"
class="w-3 h-3"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M2.25 12.76c0 1.6 1.123 2.994 2.707 3.227 1.087.16 2.185.283 3.293.369V21l4.076-4.076a1.526 1.526 0 011.037-.443 48.282 48.282 0 005.68-.494c1.584-.233 2.707-1.626 2.707-3.228V6.741c0-1.602-1.123-2.995-2.707-3.228A48.394 48.394 0 0012 3c-2.392 0-4.744.175-7.043.513C3.373 3.746 2.25 5.14 2.25 6.741v6.018z"
fill-rule="evenodd"
d="M8 1a.75.75 0 0 1 .75.75v6.5a.75.75 0 0 1-1.5 0v-6.5A.75.75 0 0 1 8 1ZM4.11 3.05a.75.75 0 0 1 0 1.06 5.5 5.5 0 1 0 7.78 0 .75.75 0 0 1 1.06-1.06 7 7 0 1 1-9.9 0 .75.75 0 0 1 1.06 0Z"
clip-rule="evenodd"
/>
</svg>
<div>Enable Chat History</div>
</button>
</div>
</div>
</div>
{/if}
<div class="px-2.5 mt-1 mb-2 flex justify-center space-x-2">
<div class="flex w-full" id="chat-search">
<div class="self-center pl-3 py-2 rounded-l bg-gray-950">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M9 3.5a5.5 5.5 0 100 11 5.5 5.5 0 000-11zM2 9a7 7 0 1112.452 4.391l3.328 3.329a.75.75 0 11-1.06 1.06l-3.329-3.328A7 7 0 012 9z"
clip-rule="evenodd"
/>
</svg>
</div>
<input
class="w-full rounded-r py-1.5 pl-2.5 pr-4 text-sm text-gray-300 bg-gray-950 outline-none"
placeholder="Search"
bind:value={search}
/>
<!-- <div class="self-center pr-3 py-2 bg-gray-900">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M12 3c2.755 0 5.455.232 8.083.678.533.09.917.556.917 1.096v1.044a2.25 2.25 0 01-.659 1.591l-5.432 5.432a2.25 2.25 0 00-.659 1.591v2.927a2.25 2.25 0 01-1.244 2.013L9.75 21v-6.568a2.25 2.25 0 00-.659-1.591L3.659 7.409A2.25 2.25 0 013 5.818V4.774c0-.54.384-1.006.917-1.096A48.32 48.32 0 0112 3z"
/>
</svg>
</div> -->
</div>
</div>
<div class="pl-2.5 my-2 flex-1 flex flex-col space-y-1 overflow-y-auto">
{#each $chats.filter((chat) => {
if (search === '') {
return true;
} else {
let title = chat.title.toLowerCase();
if (title.includes(search)) {
return true;
} else {
return false;
}
}
}) as chat, i}
<div class=" w-full pr-2 relative">
<button
class=" w-full flex justify-between rounded-md px-3 py-2 hover:bg-gray-900 {chat.id ===
$chatId
? 'bg-gray-900'
: ''} transition whitespace-nowrap text-ellipsis"
on:click={() => {
// goto(`/c/${chat.id}`);
if (chat.id !== chatTitleEditId) {
chatTitleEditId = null;
chatTitle = '';
}
if (chat.id !== $chatId) {
loadChat(chat.id);
}
}}
>
<div class=" flex self-center flex-1">
<div class=" self-center mr-3">
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M2.25 12.76c0 1.6 1.123 2.994 2.707 3.227 1.087.16 2.185.283 3.293.369V21l4.076-4.076a1.526 1.526 0 011.037-.443 48.282 48.282 0 005.68-.494c1.584-.233 2.707-1.626 2.707-3.228V6.741c0-1.602-1.123-2.995-2.707-3.228A48.394 48.394 0 0012 3c-2.392 0-4.744.175-7.043.513C3.373 3.746 2.25 5.14 2.25 6.741v6.018z"
/>
</svg>
</div>
<div
class=" text-left self-center overflow-hidden {chat.id === $chatId
? 'w-[120px]'
: 'w-[180px]'} "
>
{#if chatTitleEditId === chat.id}
<input bind:value={chatTitle} class=" bg-transparent w-full" />
{:else}
{chat.title}
{/if}
</div>
</div>
<div
class=" text-left self-center overflow-hidden {chat.id === $chatId
? 'w-[120px]'
: 'w-[180px]'} "
>
</button>
{#if chat.id === $chatId}
<div class=" absolute right-[22px] top-[10px]">
{#if chatTitleEditId === chat.id}
<input bind:value={chatTitle} class=" bg-transparent w-full" />
<div class="flex self-center space-x-1.5">
<button
class=" self-center hover:text-white transition"
on:click={() => {
editChatTitle(chat.id, chatTitle);
chatTitleEditId = null;
chatTitle = '';
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatTitleEditId = null;
chatTitle = '';
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
{:else if chatDeleteId === chat.id}
<div class="flex self-center space-x-1.5">
<button
class=" self-center hover:text-white transition"
on:click={() => {
deleteChat(chat.id);
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatDeleteId = null;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
{:else}
{chat.title}
<div class="flex self-center space-x-1.5">
<button
id="delete-chat-button"
class=" hidden"
on:click={() => {
deleteChat(chat.id);
}}
/>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatTitle = chat.title;
chatTitleEditId = chat.id;
// editChatTitle(chat.id, 'a');
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L6.832 19.82a4.5 4.5 0 01-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 011.13-1.897L16.863 4.487zm0 0L19.5 7.125"
/>
</svg>
</button>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatDeleteId = chat.id;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0"
/>
</svg>
</button>
</div>
{/if}
</div>
</div>
</button>
{#if chat.id === $chatId}
<div class=" absolute right-[22px] top-[10px]">
{#if chatTitleEditId === chat.id}
<div class="flex self-center space-x-1.5">
<button
class=" self-center hover:text-white transition"
on:click={() => {
editChatTitle(chat.id, chatTitle);
chatTitleEditId = null;
chatTitle = '';
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatTitleEditId = null;
chatTitle = '';
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
{:else if chatDeleteId === chat.id}
<div class="flex self-center space-x-1.5">
<button
class=" self-center hover:text-white transition"
on:click={() => {
deleteChat(chat.id);
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M16.704 4.153a.75.75 0 01.143 1.052l-8 10.5a.75.75 0 01-1.127.075l-4.5-4.5a.75.75 0 011.06-1.06l3.894 3.893 7.48-9.817a.75.75 0 011.05-.143z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatDeleteId = null;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
/>
</svg>
</button>
</div>
{:else}
<div class="flex self-center space-x-1.5">
<button
id="delete-chat-button"
class=" hidden"
on:click={() => {
deleteChat(chat.id);
}}
/>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatTitle = chat.title;
chatTitleEditId = chat.id;
// editChatTitle(chat.id, 'a');
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M16.862 4.487l1.687-1.688a1.875 1.875 0 112.652 2.652L6.832 19.82a4.5 4.5 0 01-1.897 1.13l-2.685.8.8-2.685a4.5 4.5 0 011.13-1.897L16.863 4.487zm0 0L19.5 7.125"
/>
</svg>
</button>
<button
class=" self-center hover:text-white transition"
on:click={() => {
chatDeleteId = chat.id;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 24 24"
stroke-width="1.5"
stroke="currentColor"
class="w-4 h-4"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M14.74 9l-.346 9m-4.788 0L9.26 9m9.968-3.21c.342.052.682.107 1.022.166m-1.022-.165L18.16 19.673a2.25 2.25 0 01-2.244 2.077H8.084a2.25 2.25 0 01-2.244-2.077L4.772 5.79m14.456 0a48.108 48.108 0 00-3.478-.397m-12 .562c.34-.059.68-.114 1.022-.165m0 0a48.11 48.11 0 013.478-.397m7.5 0v-.916c0-1.18-.91-2.164-2.09-2.201a51.964 51.964 0 00-3.32 0c-1.18.037-2.09 1.022-2.09 2.201v.916m7.5 0a48.667 48.667 0 00-7.5 0"
/>
</svg>
</button>
</div>
{/if}
</div>
{/if}
</div>
{/each}
{/if}
</div>
{/each}
</div>
</div>
<div class="px-2.5">

View file

@ -1,16 +1,10 @@
import { dev, browser } from '$app/environment';
import { PUBLIC_API_BASE_URL } from '$env/static/public';
export const OLLAMA_API_BASE_URL = dev
? `http://${location.hostname}:8080/ollama/api`
: PUBLIC_API_BASE_URL === ''
? browser
? `http://${location.hostname}:11434/api`
: `http://localhost:11434/api`
: PUBLIC_API_BASE_URL;
import { dev } from '$app/environment';
export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``;
export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`;
export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama/api`;
export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai/api`;
export const WEB_UI_VERSION = 'v1.0.0-alpha-static';

View file

@ -5,10 +5,14 @@ export const config = writable(undefined);
export const user = writable(undefined);
// Frontend
export const db = writable(undefined);
export const theme = writable('dark');
export const chatId = writable('');
export const chats = writable([]);
export const models = writable([]);
export const modelfiles = writable([]);
export const prompts = writable([]);
export const settings = writable({});
export const showSettings = writable(false);

View file

@ -21,7 +21,7 @@ export const splitStream = (splitOn) => {
};
export const convertMessagesToHistory = (messages) => {
let history = {
const history = {
messages: {},
currentId: null
};
@ -111,3 +111,19 @@ export const checkVersion = (required, current) => {
caseFirst: 'upper'
}) < 0;
};
export const findWordIndices = (text) => {
const regex = /\[([^\]]+)\]/g;
const matches = [];
let match;
while ((match = regex.exec(text)) !== null) {
matches.push({
word: match[1],
startIndex: match.index,
endIndex: regex.lastIndex - 1
});
}
return matches;
};

View file

@ -9,11 +9,12 @@
import { getOllamaModels, getOllamaVersion } from '$lib/apis/ollama';
import { getModelfiles } from '$lib/apis/modelfiles';
import { getPrompts } from '$lib/apis/prompts';
import { getOpenAIModels } from '$lib/apis/openai';
import { user, showSettings, settings, models, modelfiles } from '$lib/stores';
import { OLLAMA_API_BASE_URL, REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
import { user, showSettings, settings, models, modelfiles, prompts } from '$lib/stores';
import { REQUIRED_OLLAMA_VERSION, WEBUI_API_BASE_URL } from '$lib/constants';
import SettingsModal from '$lib/components/chat/SettingsModal.svelte';
import Sidebar from '$lib/components/layout/Sidebar.svelte';
@ -31,36 +32,28 @@
const getModels = async () => {
let models = [];
models.push(
...(await getOllamaModels(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
...(await getOllamaModels(localStorage.token).catch((error) => {
toast.error(error);
return [];
}))
);
// If OpenAI API Key exists
if ($settings.OPENAI_API_KEY) {
const openAIModels = await getOpenAIModels(
$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1',
$settings.OPENAI_API_KEY
).catch((error) => {
console.log(error);
toast.error(error);
return null;
});
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
}
// $settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1',
// $settings.OPENAI_API_KEY
const openAIModels = await getOpenAIModels(localStorage.token).catch((error) => {
console.log(error);
return null;
});
models.push(...(openAIModels ? [{ name: 'hr' }, ...openAIModels] : []));
return models;
};
const setOllamaVersion = async (version: string = '') => {
if (version === '') {
version = await getOllamaVersion(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token
).catch((error) => {
version = await getOllamaVersion(localStorage.token).catch((error) => {
return '';
});
}
@ -101,6 +94,9 @@
console.log();
await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
await modelfiles.set(await getModelfiles(localStorage.token));
await prompts.set(await getPrompts(localStorage.token));
console.log($modelfiles);
modelfiles.subscribe(async () => {

View file

@ -6,8 +6,7 @@
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { models, modelfiles, user, settings, chats, chatId, config } from '$lib/stores';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { copyToClipboard, splitStream } from '$lib/utils';
@ -17,6 +16,7 @@
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { createNewChat, getChatList, updateChatById } from '$lib/apis/chats';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
let stopResponseFlag = false;
let autoScroll = true;
@ -90,9 +90,18 @@
messages: {},
currentId: null
};
selectedModels = $page.url.searchParams.get('models')
? $page.url.searchParams.get('models')?.split(',')
: $settings.models ?? [''];
console.log($config);
if ($page.url.searchParams.get('models')) {
selectedModels = $page.url.searchParams.get('models')?.split(',');
} else if ($settings?.models) {
selectedModels = $settings?.models;
} else if ($config?.default_models) {
selectedModels = $config?.default_models.split(',');
} else {
selectedModels = [''];
}
let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
settings.set({
@ -109,10 +118,14 @@
await Promise.all(
selectedModels.map(async (model) => {
console.log(model);
if ($models.filter((m) => m.name === model)[0].external) {
const modelTag = $models.filter((m) => m.name === model).at(0);
if (modelTag?.external) {
await sendPromptOpenAI(model, prompt, parentId, _chatId);
} else {
} else if (modelTag) {
await sendPromptOllama(model, prompt, parentId, _chatId);
} else {
toast.error(`Model ${model} not found`);
}
})
);
@ -150,36 +163,32 @@
// Scroll down
window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
{
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
}
);
const res = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
});
if (res && res.ok) {
const reader = res.body
@ -271,11 +280,13 @@
}
if ($chatId == _chatId) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
}
} else {
if (res !== null) {
@ -313,185 +324,173 @@
};
const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
if ($settings.OPENAI_API_KEY) {
if (models) {
let responseMessageId = uuidv4();
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
window.scrollTo({ top: document.body.scrollHeight });
window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: { content: message.content })
})),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
frequency_penalty: $settings.repeat_penalty ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
});
const res = await generateOpenAIChatCompletion(localStorage.token, {
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: { content: message.content })
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
});
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
}
if ($chatId == _chatId) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
}
} else {
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
stopResponseFlag = false;
await tick();
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await setChatTitle(_chatId, userPrompt);
if ($chatId == _chatId) {
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
}
} else {
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await setChatTitle(_chatId, userPrompt);
}
};
@ -532,20 +531,24 @@
// Create new chat if only one message in messages
if (messages.length == 1) {
chat = await createNewChat(localStorage.token, {
id: $chatId,
title: 'New Chat',
models: selectedModels,
system: $settings.system ?? undefined,
options: {
...($settings.options ?? {})
},
messages: messages,
history: history,
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
await chatId.set(chat.id);
if ($settings.saveChatHistory ?? true) {
chat = await createNewChat(localStorage.token, {
id: $chatId,
title: 'New Chat',
models: selectedModels,
system: $settings.system ?? undefined,
options: {
...($settings.options ?? {})
},
messages: messages,
history: history,
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
await chatId.set(chat.id);
} else {
await chatId.set('local');
}
await tick();
}
@ -579,9 +582,8 @@
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
const title = await generateTitle(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
selectedModels[0],
$settings?.titleAutoGenerateModel ?? selectedModels[0],
userPrompt
);
@ -598,8 +600,10 @@
title = _title;
}
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
};
</script>

View file

@ -7,10 +7,17 @@
import toast from 'svelte-french-toast';
import { updateUserRole, getUsers, deleteUserById } from '$lib/apis/users';
import { getSignUpEnabledStatus, toggleSignUpEnabledStatus } from '$lib/apis/auths';
import EditUserModal from '$lib/components/admin/EditUserModal.svelte';
let loaded = false;
let users = [];
let selectedUser = null;
let signUpEnabled = true;
let showEditUserModal = false;
const updateRoleHandler = async (id, role) => {
const res = await updateUserRole(localStorage.token, id, role).catch((error) => {
toast.error(error);
@ -22,6 +29,17 @@
}
};
const editUserPasswordHandler = async (id, password) => {
const res = await deleteUserById(localStorage.token, id).catch((error) => {
toast.error(error);
return null;
});
if (res) {
users = await getUsers(localStorage.token);
toast.success('Successfully updated');
}
};
const deleteUserHandler = async (id) => {
const res = await deleteUserById(localStorage.token, id).catch((error) => {
toast.error(error);
@ -32,16 +50,33 @@
}
};
const toggleSignUpEnabled = async () => {
signUpEnabled = await toggleSignUpEnabledStatus(localStorage.token);
};
onMount(async () => {
if ($user?.role !== 'admin') {
await goto('/');
} else {
users = await getUsers(localStorage.token);
signUpEnabled = await getSignUpEnabledStatus(localStorage.token);
}
loaded = true;
});
</script>
{#key selectedUser}
<EditUserModal
bind:show={showEditUserModal}
{selectedUser}
sessionUser={$user}
on:save={async () => {
users = await getUsers(localStorage.token);
}}
/>
{/key}
<div
class=" bg-white dark:bg-gray-800 dark:text-gray-100 min-h-screen w-full flex justify-center font-mona"
>
@ -49,7 +84,52 @@
<div class="w-full max-w-3xl px-10 md:px-16 min-h-screen flex flex-col">
<div class="py-10 w-full">
<div class=" flex flex-col justify-center">
<div class=" text-2xl font-semibold">Users ({users.length})</div>
<div class=" flex justify-between items-center">
<div class=" text-2xl font-semibold">Users ({users.length})</div>
<div>
<button
class="flex items-center space-x-1 border border-gray-200 dark:border-gray-600 px-3 py-1 rounded-lg"
type="button"
on:click={() => {
toggleSignUpEnabled();
}}
>
{#if signUpEnabled}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
d="M11.5 1A3.5 3.5 0 0 0 8 4.5V7H2.5A1.5 1.5 0 0 0 1 8.5v5A1.5 1.5 0 0 0 2.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 9.5 7V4.5a2 2 0 1 1 4 0v1.75a.75.75 0 0 0 1.5 0V4.5A3.5 3.5 0 0 0 11.5 1Z"
/>
</svg>
<div class=" text-xs">
New Sign Up <span class=" font-semibold">Enabled</span>
</div>
{:else}
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M8 1a3.5 3.5 0 0 0-3.5 3.5V7A1.5 1.5 0 0 0 3 8.5v5A1.5 1.5 0 0 0 4.5 15h7a1.5 1.5 0 0 0 1.5-1.5v-5A1.5 1.5 0 0 0 11.5 7V4.5A3.5 3.5 0 0 0 8 1Zm2 6V4.5a2 2 0 1 0-4 0V7h4Z"
clip-rule="evenodd"
/>
</svg>
<div class=" text-xs">
New Sign Up <span class=" font-semibold">Disabled</span>
</div>
{/if}
</button>
</div>
</div>
<div class=" text-gray-500 text-xs font-medium mt-1">
Click on the user role cell in the table to change a user's role.
</div>
@ -100,7 +180,28 @@
}}>{user.role}</button
>
</td>
<td class="px-6 py-4 text-center flex justify-center">
<td class="px-6 py-4 space-x-1 text-center flex justify-center">
<button
class="self-center w-fit text-sm p-1.5 border dark:border-gray-600 rounded-xl flex"
on:click={async () => {
showEditUserModal = !showEditUserModal;
selectedUser = user;
}}
>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-4 h-4"
>
<path
fill-rule="evenodd"
d="M11.013 2.513a1.75 1.75 0 0 1 2.475 2.474L6.226 12.25a2.751 2.751 0 0 1-.892.596l-2.047.848a.75.75 0 0 1-.98-.98l.848-2.047a2.75 2.75 0 0 1 .596-.892l7.262-7.261Z"
clip-rule="evenodd"
/>
</svg>
</button>
<button
class="self-center w-fit text-sm p-1.5 border dark:border-gray-600 rounded-xl flex"
on:click={async () => {

View file

@ -6,10 +6,11 @@
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { models, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { models, modelfiles, user, settings, chats, chatId } from '$lib/stores';
import { generateChatCompletion, generateTitle } from '$lib/apis/ollama';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { copyToClipboard, splitStream } from '$lib/utils';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
@ -136,17 +137,20 @@
await Promise.all(
selectedModels.map(async (model) => {
console.log(model);
if ($models.filter((m) => m.name === model)[0].external) {
const modelTag = $models.filter((m) => m.name === model).at(0);
if (modelTag?.external) {
await sendPromptOpenAI(model, prompt, parentId, _chatId);
} else {
} else if (modelTag) {
await sendPromptOllama(model, prompt, parentId, _chatId);
} else {
toast.error(`Model ${model} not found`);
}
})
);
await chats.set(await getChatList(localStorage.token));
};
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
// Create response message
let responseMessageId = uuidv4();
@ -177,36 +181,32 @@
// Scroll down
window.scrollTo({ top: document.body.scrollHeight });
const res = await generateChatCompletion(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
{
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
}
);
const res = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
content: message.content,
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
});
if (res && res.ok) {
const reader = res.body
@ -340,185 +340,171 @@
};
const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
if ($settings.OPENAI_API_KEY) {
if (models) {
let responseMessageId = uuidv4();
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
window.scrollTo({ top: document.body.scrollHeight });
window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(
`${$settings.OPENAI_API_BASE_URL ?? 'https://api.openai.com/v1'}/chat/completions`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: { content: message.content })
})),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
frequency_penalty: $settings.repeat_penalty ?? undefined
})
}
).catch((err) => {
console.log(err);
return null;
});
const res = await generateOpenAIChatCompletion(localStorage.token, {
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
text: message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: { content: message.content })
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
});
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
}
if ($chatId == _chatId) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
}
} else {
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
stopResponseFlag = false;
await tick();
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: '/favicon.png'
});
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await setChatTitle(_chatId, userPrompt);
}
}
if ($chatId == _chatId) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
}
} else {
toast.error(`Uh-oh! There was an issue connecting to ${model}.`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
}
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${model}.`;
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await setChatTitle(_chatId, userPrompt);
}
};
@ -605,12 +591,7 @@
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
const title = await generateTitle(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
selectedModels[0],
userPrompt
);
const title = await generateTitle(localStorage.token, selectedModels[0], userPrompt);
if (title) {
await setChatTitle(_chatId, title);
@ -628,6 +609,12 @@
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
};
onMount(async () => {
if (!($settings.saveChatHistory ?? true)) {
await goto('/');
}
});
</script>
<svelte:window

View file

@ -6,7 +6,6 @@
import { onMount } from 'svelte';
import { modelfiles, settings, user } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { createModel, deleteModel } from '$lib/apis/ollama';
import {
createNewModelfile,
@ -20,11 +19,7 @@
const deleteModelHandler = async (tagName) => {
let success = null;
success = await deleteModel(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
tagName
);
success = await deleteModel(localStorage.token, tagName);
if (success) {
toast.success(`Deleted ${tagName}`);
@ -44,7 +39,7 @@
const url = 'https://ollamahub.com';
const tab = await window.open(`${url}/create`, '_blank');
const tab = await window.open(`${url}/modelfiles/create`, '_blank');
window.addEventListener(
'message',
(event) => {
@ -254,6 +249,30 @@
</svg>
</div>
</button>
<button
class="self-center w-fit text-sm px-3 py-1 border dark:border-gray-600 rounded-xl flex"
on:click={async () => {
saveModelfiles($modelfiles);
}}
>
<div class=" self-center mr-2 font-medium">Export Modelfiles</div>
<div class=" self-center">
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 16 16"
fill="currentColor"
class="w-3.5 h-3.5"
>
<path
fill-rule="evenodd"
d="M4 2a1.5 1.5 0 0 0-1.5 1.5v9A1.5 1.5 0 0 0 4 14h8a1.5 1.5 0 0 0 1.5-1.5V6.621a1.5 1.5 0 0 0-.44-1.06L9.94 2.439A1.5 1.5 0 0 0 8.878 2H4Zm4 3.5a.75.75 0 0 1 .75.75v2.69l.72-.72a.75.75 0 1 1 1.06 1.06l-2 2a.75.75 0 0 1-1.06 0l-2-2a.75.75 0 0 1 1.06-1.06l.72.72V6.25A.75.75 0 0 1 8 5.5Z"
clip-rule="evenodd"
/>
</svg>
</div>
</button>
</div>
{#if localModelfiles.length > 0}

View file

@ -2,8 +2,7 @@
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-french-toast';
import { goto } from '$app/navigation';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { settings, db, user, config, modelfiles, models } from '$lib/stores';
import { settings, user, config, modelfiles, models } from '$lib/stores';
import Advanced from '$lib/components/chat/Settings/Advanced.svelte';
import { splitStream } from '$lib/utils';
@ -51,7 +50,8 @@
top_k: '',
top_p: '',
tfs_z: '',
num_ctx: ''
num_ctx: '',
num_predict: ''
};
let modelfileCreator = null;
@ -73,6 +73,7 @@ ${options.top_k !== '' ? `PARAMETER top_k ${options.top_k}` : ''}
${options.top_p !== '' ? `PARAMETER top_p ${options.top_p}` : ''}
${options.tfs_z !== '' ? `PARAMETER tfs_z ${options.tfs_z}` : ''}
${options.num_ctx !== '' ? `PARAMETER num_ctx ${options.num_ctx}` : ''}
${options.num_predict !== '' ? `PARAMETER num_predict ${options.num_predict}` : ''}
SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
}
@ -130,12 +131,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
Object.keys(categories).filter((category) => categories[category]).length > 0 &&
!$models.includes(tagName)
) {
const res = await createModel(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
tagName,
content
);
const res = await createModel(localStorage.token, tagName, content);
if (res) {
const reader = res.body
@ -639,7 +635,7 @@ SYSTEM """${system}"""`.replace(/^\s*\n/gm, '');
<div class=" text-sm font-semibold mb-2">Pull Progress</div>
<div class="w-full rounded-full dark:bg-gray-800">
<div
class="dark:bg-gray-600 text-xs font-medium text-blue-100 text-center p-0.5 leading-none rounded-full"
class="dark:bg-gray-600 bg-gray-500 text-xs font-medium text-gray-100 text-center p-0.5 leading-none rounded-full"
style="width: {Math.max(15, pullProgress ?? 0)}%"
>
{pullProgress ?? 0}%

View file

@ -6,9 +6,7 @@
import { onMount } from 'svelte';
import { page } from '$app/stores';
import { settings, db, user, config, modelfiles } from '$lib/stores';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { settings, user, config, modelfiles } from '$lib/stores';
import { splitStream } from '$lib/utils';
import { createModel } from '$lib/apis/ollama';
@ -104,12 +102,7 @@
content !== '' &&
Object.keys(categories).filter((category) => categories[category]).length > 0
) {
const res = await createModel(
$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL,
localStorage.token,
tagName,
content
);
const res = await createModel(localStorage.token, tagName, content);
if (res) {
const reader = res.body

Some files were not shown because too many files have changed in this diff Show more