forked from open-webui/open-webui
commit
92e21acb4c
39 changed files with 1962 additions and 1075 deletions
|
@ -1,6 +1,6 @@
|
|||
# Ollama URL for the backend to connect
|
||||
# The path '/ollama/api' will be redirected to the specified backend URL
|
||||
OLLAMA_API_BASE_URL='http://localhost:11434/api'
|
||||
# The path '/ollama' will be redirected to the specified backend URL
|
||||
OLLAMA_BASE_URL='http://localhost:11434'
|
||||
|
||||
OPENAI_API_BASE_URL=''
|
||||
OPENAI_API_KEY=''
|
||||
|
|
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -32,7 +32,7 @@ assignees: ''
|
|||
**Confirmation:**
|
||||
|
||||
- [ ] I have read and followed all the instructions provided in the README.md.
|
||||
- [ ] I have reviewed the troubleshooting.md document.
|
||||
- [ ] I am on the latest version of both Open WebUI and Ollama.
|
||||
- [ ] I have included the browser console logs.
|
||||
- [ ] I have included the Docker container logs.
|
||||
|
||||
|
|
12
.github/workflows/build-release.yml
vendored
12
.github/workflows/build-release.yml
vendored
|
@ -26,17 +26,27 @@ jobs:
|
|||
VERSION=$(jq -r '.version' package.json)
|
||||
echo "::set-output name=version::$VERSION"
|
||||
|
||||
- name: Extract latest CHANGELOG entry
|
||||
id: changelog
|
||||
run: |
|
||||
CHANGELOG_CONTENT=$(awk '/^## \[/{n++} n==1' CHANGELOG.md)
|
||||
echo "CHANGELOG_CONTENT<<EOF"
|
||||
echo "$CHANGELOG_CONTENT"
|
||||
echo "EOF"
|
||||
echo "::set-output name=content::${CHANGELOG_CONTENT}"
|
||||
|
||||
- name: Create GitHub release
|
||||
uses: actions/github-script@v5
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const changelog = `${{ steps.changelog.outputs.content }}`;
|
||||
const release = await github.rest.repos.createRelease({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `v${{ steps.get_version.outputs.version }}`,
|
||||
name: `v${{ steps.get_version.outputs.version }}`,
|
||||
body: 'Automatically created new release',
|
||||
body: changelog,
|
||||
})
|
||||
console.log(`Created release ${release.data.html_url}`)
|
||||
|
||||
|
|
20
CHANGELOG.md
20
CHANGELOG.md
|
@ -5,6 +5,26 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.1.109] - 2024-03-06
|
||||
|
||||
### Added
|
||||
|
||||
- **🔄 Multiple Ollama Servers Support**: Enjoy enhanced scalability and performance with support for multiple Ollama servers in a single WebUI. Load balancing features are now available, providing improved efficiency (#788, #278).
|
||||
- **🔧 Support for Claude 3 and Gemini**: Responding to user requests, we've expanded our toolset to include Claude 3 and Gemini, offering a wider range of functionalities within our platform (#1064).
|
||||
- **🔍 OCR Functionality for PDF Loader**: We've augmented our PDF loader with Optical Character Recognition (OCR) capabilities. Now, extract text from scanned documents and images within PDFs, broadening the scope of content processing (#1050).
|
||||
|
||||
### Fixed
|
||||
|
||||
- **🛠️ RAG Collection**: Implemented a dynamic mechanism to recreate RAG collections, ensuring users have up-to-date and accurate data (#1031).
|
||||
- **📝 User Agent Headers**: Fixed issue of RAG web requests being sent with empty user_agent headers, reducing rejections from certain websites. Realistic headers are now utilized for these requests (#1024).
|
||||
- **⏹️ Playground Cancel Functionality**: Introducing a new "Cancel" option for stopping Ollama generation in the Playground, enhancing user control and usability (#1006).
|
||||
- **🔤 Typographical Error in 'ASSISTANT' Field**: Corrected a typographical error in the 'ASSISTANT' field within the GGUF model upload template for accuracy and consistency (#1061).
|
||||
|
||||
### Changed
|
||||
|
||||
- **🔄 Refactored Message Deletion Logic**: Streamlined message deletion process for improved efficiency and user experience, simplifying interactions within the platform (#1004).
|
||||
- **⚠️ Deprecation of `OLLAMA_API_BASE_URL`**: Deprecated `OLLAMA_API_BASE_URL` environment variable; recommend using `OLLAMA_BASE_URL` instead. Refer to our documentation for further details.
|
||||
|
||||
## [0.1.108] - 2024-03-02
|
||||
|
||||
### Added
|
||||
|
|
|
@ -20,7 +20,7 @@ FROM python:3.11-slim-bookworm as base
|
|||
ENV ENV=prod
|
||||
ENV PORT ""
|
||||
|
||||
ENV OLLAMA_API_BASE_URL "/ollama/api"
|
||||
ENV OLLAMA_BASE_URL "/ollama"
|
||||
|
||||
ENV OPENAI_API_BASE_URL ""
|
||||
ENV OPENAI_API_KEY ""
|
||||
|
|
|
@ -95,10 +95,10 @@ Don't forget to explore our sibling project, [Open WebUI Community](https://open
|
|||
|
||||
- **If Ollama is on a Different Server**, use this command:
|
||||
|
||||
- To connect to Ollama on another server, change the `OLLAMA_API_BASE_URL` to the server's URL:
|
||||
- To connect to Ollama on another server, change the `OLLAMA_BASE_URL` to the server's URL:
|
||||
|
||||
```bash
|
||||
docker run -d -p 3000:8080 -e OLLAMA_API_BASE_URL=https://example.com/api -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
docker run -d -p 3000:8080 -e OLLAMA_BASE_URL=https://example.com -v open-webui:/app/backend/data --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
- After installation, you can access Open WebUI at [http://localhost:3000](http://localhost:3000). Enjoy! 😄
|
||||
|
@ -110,7 +110,7 @@ If you're experiencing connection issues, it’s often due to the WebUI docker c
|
|||
**Example Docker Command**:
|
||||
|
||||
```bash
|
||||
docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434/api --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_BASE_URL=http://127.0.0.1:11434 --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
### Other Installation Methods
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
The Open WebUI system is designed to streamline interactions between the client (your browser) and the Ollama API. At the heart of this design is a backend reverse proxy, enhancing security and resolving CORS issues.
|
||||
|
||||
- **How it Works**: The Open WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Open WebUI backend via `/ollama/api` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_API_BASE_URL` environment variable. Therefore, a request made to `/ollama/api` in the WebUI is effectively the same as making a request to `OLLAMA_API_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_API_BASE_URL/tags` in the backend.
|
||||
- **How it Works**: The Open WebUI is designed to interact with the Ollama API through a specific route. When a request is made from the WebUI to Ollama, it is not directly sent to the Ollama API. Initially, the request is sent to the Open WebUI backend via `/ollama` route. From there, the backend is responsible for forwarding the request to the Ollama API. This forwarding is accomplished by using the route specified in the `OLLAMA_BASE_URL` environment variable. Therefore, a request made to `/ollama` in the WebUI is effectively the same as making a request to `OLLAMA_BASE_URL` in the backend. For instance, a request to `/ollama/api/tags` in the WebUI is equivalent to `OLLAMA_BASE_URL/api/tags` in the backend.
|
||||
|
||||
- **Security Benefits**: This design prevents direct exposure of the Ollama API to the frontend, safeguarding against potential CORS (Cross-Origin Resource Sharing) issues and unauthorized access. Requiring authentication to access the Ollama API further enhances this security layer.
|
||||
|
||||
|
@ -15,7 +15,7 @@ If you're experiencing connection issues, it’s often due to the WebUI docker c
|
|||
**Example Docker Command**:
|
||||
|
||||
```bash
|
||||
docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434/api --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_BASE_URL=http://127.0.0.1:11434 --name open-webui --restart always ghcr.io/open-webui/open-webui:main
|
||||
```
|
||||
|
||||
### General Connection Errors
|
||||
|
@ -25,8 +25,8 @@ docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_API_BASE_
|
|||
**Troubleshooting Steps**:
|
||||
|
||||
1. **Verify Ollama URL Format**:
|
||||
- When running the Web UI container, ensure the `OLLAMA_API_BASE_URL` is correctly set, including the `/api` suffix. (e.g., `http://192.168.1.1:11434/api` for different host setups).
|
||||
- When running the Web UI container, ensure the `OLLAMA_BASE_URL` is correctly set. (e.g., `http://192.168.1.1:11434` for different host setups).
|
||||
- In the Open WebUI, navigate to "Settings" > "General".
|
||||
- Confirm that the Ollama Server URL is correctly set to `[OLLAMA URL]/api` (e.g., `http://localhost:11434/api`), including the `/api` suffix.
|
||||
- Confirm that the Ollama Server URL is correctly set to `[OLLAMA URL]` (e.g., `http://localhost:11434`).
|
||||
|
||||
By following these enhanced troubleshooting steps, connection issues should be effectively resolved. For further assistance or queries, feel free to reach out to us on our community Discord.
|
||||
|
|
|
@ -3,15 +3,22 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||
from fastapi.responses import StreamingResponse
|
||||
from fastapi.concurrency import run_in_threadpool
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
import random
|
||||
import requests
|
||||
import json
|
||||
import uuid
|
||||
from pydantic import BaseModel
|
||||
import aiohttp
|
||||
import asyncio
|
||||
|
||||
from apps.web.models.users import Users
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import decode_token, get_current_user, get_admin_user
|
||||
from config import OLLAMA_BASE_URL, WEBUI_AUTH
|
||||
from config import OLLAMA_BASE_URLS
|
||||
|
||||
from typing import Optional, List, Union
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
|
@ -22,27 +29,44 @@ app.add_middleware(
|
|||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.state.OLLAMA_BASE_URL = OLLAMA_BASE_URL
|
||||
|
||||
# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
|
||||
app.state.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
|
||||
app.state.MODELS = {}
|
||||
|
||||
|
||||
REQUEST_POOL = []
|
||||
|
||||
|
||||
@app.get("/url")
|
||||
async def get_ollama_api_url(user=Depends(get_admin_user)):
|
||||
return {"OLLAMA_BASE_URL": app.state.OLLAMA_BASE_URL}
|
||||
# TODO: Implement a more intelligent load balancing mechanism for distributing requests among multiple backend instances.
|
||||
# Current implementation uses a simple round-robin approach (random.choice). Consider incorporating algorithms like weighted round-robin,
|
||||
# least connections, or least response time for better resource utilization and performance optimization.
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def check_url(request: Request, call_next):
|
||||
if len(app.state.MODELS) == 0:
|
||||
await get_all_models()
|
||||
else:
|
||||
pass
|
||||
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
|
||||
@app.get("/urls")
|
||||
async def get_ollama_api_urls(user=Depends(get_admin_user)):
|
||||
return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
|
||||
|
||||
|
||||
class UrlUpdateForm(BaseModel):
|
||||
url: str
|
||||
urls: List[str]
|
||||
|
||||
|
||||
@app.post("/url/update")
|
||||
@app.post("/urls/update")
|
||||
async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.OLLAMA_BASE_URL = form_data.url
|
||||
return {"OLLAMA_BASE_URL": app.state.OLLAMA_BASE_URL}
|
||||
app.state.OLLAMA_BASE_URLS = form_data.urls
|
||||
|
||||
print(app.state.OLLAMA_BASE_URLS)
|
||||
return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
|
||||
|
||||
|
||||
@app.get("/cancel/{request_id}")
|
||||
|
@ -55,9 +79,806 @@ async def cancel_ollama_request(request_id: str, user=Depends(get_current_user))
|
|||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
async def fetch_url(url):
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
return await response.json()
|
||||
except Exception as e:
|
||||
# Handle connection error here
|
||||
print(f"Connection error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def merge_models_lists(model_lists):
|
||||
merged_models = {}
|
||||
|
||||
for idx, model_list in enumerate(model_lists):
|
||||
for model in model_list:
|
||||
digest = model["digest"]
|
||||
if digest not in merged_models:
|
||||
model["urls"] = [idx]
|
||||
merged_models[digest] = model
|
||||
else:
|
||||
merged_models[digest]["urls"].append(idx)
|
||||
|
||||
return list(merged_models.values())
|
||||
|
||||
|
||||
# user=Depends(get_current_user)
|
||||
|
||||
|
||||
async def get_all_models():
|
||||
print("get_all_models")
|
||||
tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
|
||||
responses = await asyncio.gather(*tasks)
|
||||
responses = list(filter(lambda x: x is not None, responses))
|
||||
|
||||
models = {
|
||||
"models": merge_models_lists(
|
||||
map(lambda response: response["models"], responses)
|
||||
)
|
||||
}
|
||||
app.state.MODELS = {model["model"]: model for model in models["models"]}
|
||||
|
||||
return models
|
||||
|
||||
|
||||
@app.get("/api/tags")
|
||||
@app.get("/api/tags/{url_idx}")
|
||||
async def get_ollama_tags(
|
||||
url_idx: Optional[int] = None, user=Depends(get_current_user)
|
||||
):
|
||||
|
||||
if url_idx == None:
|
||||
return await get_all_models()
|
||||
else:
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
try:
|
||||
r = requests.request(method="GET", url=f"{url}/api/tags")
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/api/version")
|
||||
@app.get("/api/version/{url_idx}")
|
||||
async def get_ollama_versions(url_idx: Optional[int] = None):
|
||||
|
||||
if url_idx == None:
|
||||
|
||||
# returns lowest version
|
||||
tasks = [fetch_url(f"{url}/api/version") for url in app.state.OLLAMA_BASE_URLS]
|
||||
responses = await asyncio.gather(*tasks)
|
||||
responses = list(filter(lambda x: x is not None, responses))
|
||||
|
||||
lowest_version = min(
|
||||
responses, key=lambda x: tuple(map(int, x["version"].split(".")))
|
||||
)
|
||||
|
||||
return {"version": lowest_version["version"]}
|
||||
else:
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
try:
|
||||
r = requests.request(method="GET", url=f"{url}/api/version")
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class ModelNameForm(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
@app.post("/api/pull")
|
||||
@app.post("/api/pull/{url_idx}")
|
||||
async def pull_model(
|
||||
form_data: ModelNameForm, url_idx: int = 0, user=Depends(get_admin_user)
|
||||
):
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
r = None
|
||||
|
||||
def get_request():
|
||||
nonlocal url
|
||||
nonlocal r
|
||||
try:
|
||||
|
||||
def stream_content():
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
yield chunk
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/pull",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
stream=True,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
return StreamingResponse(
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class PushModelForm(BaseModel):
|
||||
name: str
|
||||
insecure: Optional[bool] = None
|
||||
stream: Optional[bool] = None
|
||||
|
||||
|
||||
@app.delete("/api/push")
|
||||
@app.delete("/api/push/{url_idx}")
|
||||
async def push_model(
|
||||
form_data: PushModelForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if form_data.name in app.state.MODELS:
|
||||
url_idx = app.state.MODELS[form_data.name]["urls"][0]
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
r = None
|
||||
|
||||
def get_request():
|
||||
nonlocal url
|
||||
nonlocal r
|
||||
try:
|
||||
|
||||
def stream_content():
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
yield chunk
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/push",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
return StreamingResponse(
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class CreateModelForm(BaseModel):
|
||||
name: str
|
||||
modelfile: Optional[str] = None
|
||||
stream: Optional[bool] = None
|
||||
path: Optional[str] = None
|
||||
|
||||
|
||||
@app.post("/api/create")
|
||||
@app.post("/api/create/{url_idx}")
|
||||
async def create_model(
|
||||
form_data: CreateModelForm, url_idx: int = 0, user=Depends(get_admin_user)
|
||||
):
|
||||
print(form_data)
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
r = None
|
||||
|
||||
def get_request():
|
||||
nonlocal url
|
||||
nonlocal r
|
||||
try:
|
||||
|
||||
def stream_content():
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
yield chunk
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/create",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
stream=True,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
print(r)
|
||||
|
||||
return StreamingResponse(
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class CopyModelForm(BaseModel):
|
||||
source: str
|
||||
destination: str
|
||||
|
||||
|
||||
@app.post("/api/copy")
|
||||
@app.post("/api/copy/{url_idx}")
|
||||
async def copy_model(
|
||||
form_data: CopyModelForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if form_data.source in app.state.MODELS:
|
||||
url_idx = app.state.MODELS[form_data.source]["urls"][0]
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.source),
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/copy",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
print(r.text)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
@app.delete("/api/delete")
|
||||
@app.delete("/api/delete/{url_idx}")
|
||||
async def delete_model(
|
||||
form_data: ModelNameForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_admin_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if form_data.name in app.state.MODELS:
|
||||
url_idx = app.state.MODELS[form_data.name]["urls"][0]
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="DELETE",
|
||||
url=f"{url}/api/delete",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
print(r.text)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
@app.post("/api/show")
|
||||
async def show_model_info(form_data: ModelNameForm, user=Depends(get_current_user)):
|
||||
if form_data.name not in app.state.MODELS:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.name),
|
||||
)
|
||||
|
||||
url_idx = random.choice(app.state.MODELS[form_data.name]["urls"])
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/show",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class GenerateEmbeddingsForm(BaseModel):
|
||||
model: str
|
||||
prompt: str
|
||||
options: Optional[dict] = None
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
@app.post("/api/embeddings")
|
||||
@app.post("/api/embeddings/{url_idx}")
|
||||
async def generate_embeddings(
|
||||
form_data: GenerateEmbeddingsForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
if url_idx == None:
|
||||
if form_data.model in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
try:
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/embeddings",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
return r.json()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class GenerateCompletionForm(BaseModel):
|
||||
model: str
|
||||
prompt: str
|
||||
images: Optional[List[str]] = None
|
||||
format: Optional[str] = None
|
||||
options: Optional[dict] = None
|
||||
system: Optional[str] = None
|
||||
template: Optional[str] = None
|
||||
context: Optional[str] = None
|
||||
stream: Optional[bool] = True
|
||||
raw: Optional[bool] = None
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
@app.post("/api/generate")
|
||||
@app.post("/api/generate/{url_idx}")
|
||||
async def generate_completion(
|
||||
form_data: GenerateCompletionForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
if url_idx == None:
|
||||
if form_data.model in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="error_detail",
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
r = None
|
||||
|
||||
def get_request():
|
||||
nonlocal form_data
|
||||
nonlocal r
|
||||
|
||||
request_id = str(uuid.uuid4())
|
||||
try:
|
||||
REQUEST_POOL.append(request_id)
|
||||
|
||||
def stream_content():
|
||||
try:
|
||||
if form_data.stream:
|
||||
yield json.dumps({"id": request_id, "done": False}) + "\n"
|
||||
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
if request_id in REQUEST_POOL:
|
||||
yield chunk
|
||||
else:
|
||||
print("User: canceled request")
|
||||
break
|
||||
finally:
|
||||
if hasattr(r, "close"):
|
||||
r.close()
|
||||
if request_id in REQUEST_POOL:
|
||||
REQUEST_POOL.remove(request_id)
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/generate",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
stream=True,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
return StreamingResponse(
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
class ChatMessage(BaseModel):
|
||||
role: str
|
||||
content: str
|
||||
images: Optional[List[str]] = None
|
||||
|
||||
|
||||
class GenerateChatCompletionForm(BaseModel):
|
||||
model: str
|
||||
messages: List[ChatMessage]
|
||||
format: Optional[str] = None
|
||||
options: Optional[dict] = None
|
||||
template: Optional[str] = None
|
||||
stream: Optional[bool] = True
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
|
||||
|
||||
@app.post("/api/chat")
|
||||
@app.post("/api/chat/{url_idx}")
|
||||
async def generate_chat_completion(
|
||||
form_data: GenerateChatCompletionForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
if url_idx == None:
|
||||
if form_data.model in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
r = None
|
||||
|
||||
print(form_data.model_dump_json(exclude_none=True))
|
||||
|
||||
def get_request():
|
||||
nonlocal form_data
|
||||
nonlocal r
|
||||
|
||||
request_id = str(uuid.uuid4())
|
||||
try:
|
||||
REQUEST_POOL.append(request_id)
|
||||
|
||||
def stream_content():
|
||||
try:
|
||||
if form_data.stream:
|
||||
yield json.dumps({"id": request_id, "done": False}) + "\n"
|
||||
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
if request_id in REQUEST_POOL:
|
||||
yield chunk
|
||||
else:
|
||||
print("User: canceled request")
|
||||
break
|
||||
finally:
|
||||
if hasattr(r, "close"):
|
||||
r.close()
|
||||
if request_id in REQUEST_POOL:
|
||||
REQUEST_POOL.remove(request_id)
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/chat",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
stream=True,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
return StreamingResponse(
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
# TODO: we should update this part once Ollama supports other types
|
||||
class OpenAIChatMessage(BaseModel):
|
||||
role: str
|
||||
content: str
|
||||
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class OpenAIChatCompletionForm(BaseModel):
|
||||
model: str
|
||||
messages: List[OpenAIChatMessage]
|
||||
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
@app.post("/v1/chat/completions")
|
||||
@app.post("/v1/chat/completions/{url_idx}")
|
||||
async def generate_openai_chat_completion(
|
||||
form_data: OpenAIChatCompletionForm,
|
||||
url_idx: Optional[int] = None,
|
||||
user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
if url_idx == None:
|
||||
if form_data.model in app.state.MODELS:
|
||||
url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.MODEL_NOT_FOUND(form_data.model),
|
||||
)
|
||||
|
||||
url = app.state.OLLAMA_BASE_URLS[url_idx]
|
||||
print(url)
|
||||
|
||||
r = None
|
||||
|
||||
def get_request():
|
||||
nonlocal form_data
|
||||
nonlocal r
|
||||
|
||||
request_id = str(uuid.uuid4())
|
||||
try:
|
||||
REQUEST_POOL.append(request_id)
|
||||
|
||||
def stream_content():
|
||||
try:
|
||||
if form_data.stream:
|
||||
yield json.dumps(
|
||||
{"request_id": request_id, "done": False}
|
||||
) + "\n"
|
||||
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
if request_id in REQUEST_POOL:
|
||||
yield chunk
|
||||
else:
|
||||
print("User: canceled request")
|
||||
break
|
||||
finally:
|
||||
if hasattr(r, "close"):
|
||||
r.close()
|
||||
if request_id in REQUEST_POOL:
|
||||
REQUEST_POOL.remove(request_id)
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/v1/chat/completions",
|
||||
data=form_data.model_dump_json(exclude_none=True),
|
||||
stream=True,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
||||
return StreamingResponse(
|
||||
stream_content(),
|
||||
status_code=r.status_code,
|
||||
headers=dict(r.headers),
|
||||
)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await run_in_threadpool(get_request)
|
||||
except Exception as e:
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
if r is not None:
|
||||
try:
|
||||
res = r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=r.status_code if r else 500,
|
||||
detail=error_detail,
|
||||
)
|
||||
|
||||
|
||||
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
||||
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
||||
target_url = f"{app.state.OLLAMA_BASE_URL}/{path}"
|
||||
async def deprecated_proxy(path: str, request: Request, user=Depends(get_current_user)):
|
||||
url = app.state.OLLAMA_BASE_URLS[0]
|
||||
target_url = f"{url}/{path}"
|
||||
|
||||
body = await request.body()
|
||||
headers = dict(request.headers)
|
||||
|
|
|
@ -1,127 +0,0 @@
|
|||
from fastapi import FastAPI, Request, Response, HTTPException, Depends
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import StreamingResponse
|
||||
|
||||
import requests
|
||||
import json
|
||||
from pydantic import BaseModel
|
||||
|
||||
from apps.web.models.users import Users
|
||||
from constants import ERROR_MESSAGES
|
||||
from utils.utils import decode_token, get_current_user
|
||||
from config import OLLAMA_API_BASE_URL, WEBUI_AUTH
|
||||
|
||||
import aiohttp
|
||||
|
||||
app = FastAPI()
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.state.OLLAMA_API_BASE_URL = OLLAMA_API_BASE_URL
|
||||
|
||||
# TARGET_SERVER_URL = OLLAMA_API_BASE_URL
|
||||
|
||||
|
||||
@app.get("/url")
|
||||
async def get_ollama_api_url(user=Depends(get_current_user)):
|
||||
if user and user.role == "admin":
|
||||
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
class UrlUpdateForm(BaseModel):
|
||||
url: str
|
||||
|
||||
|
||||
@app.post("/url/update")
|
||||
async def update_ollama_api_url(
|
||||
form_data: UrlUpdateForm, user=Depends(get_current_user)
|
||||
):
|
||||
if user and user.role == "admin":
|
||||
app.state.OLLAMA_API_BASE_URL = form_data.url
|
||||
return {"OLLAMA_API_BASE_URL": app.state.OLLAMA_API_BASE_URL}
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
|
||||
# async def fetch_sse(method, target_url, body, headers):
|
||||
# async with aiohttp.ClientSession() as session:
|
||||
# try:
|
||||
# async with session.request(
|
||||
# method, target_url, data=body, headers=headers
|
||||
# ) as response:
|
||||
# print(response.status)
|
||||
# async for line in response.content:
|
||||
# yield line
|
||||
# except Exception as e:
|
||||
# print(e)
|
||||
# error_detail = "Open WebUI: Server Connection Error"
|
||||
# yield json.dumps({"error": error_detail, "message": str(e)}).encode()
|
||||
|
||||
|
||||
@app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
||||
async def proxy(path: str, request: Request, user=Depends(get_current_user)):
|
||||
target_url = f"{app.state.OLLAMA_API_BASE_URL}/{path}"
|
||||
print(target_url)
|
||||
|
||||
body = await request.body()
|
||||
headers = dict(request.headers)
|
||||
|
||||
if user.role in ["user", "admin"]:
|
||||
if path in ["pull", "delete", "push", "copy", "create"]:
|
||||
if user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
|
||||
)
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
|
||||
|
||||
headers.pop("Host", None)
|
||||
headers.pop("Authorization", None)
|
||||
headers.pop("Origin", None)
|
||||
headers.pop("Referer", None)
|
||||
|
||||
session = aiohttp.ClientSession()
|
||||
response = None
|
||||
try:
|
||||
response = await session.request(
|
||||
request.method, target_url, data=body, headers=headers
|
||||
)
|
||||
|
||||
print(response)
|
||||
if not response.ok:
|
||||
data = await response.json()
|
||||
print(data)
|
||||
response.raise_for_status()
|
||||
|
||||
async def generate():
|
||||
async for line in response.content:
|
||||
print(line)
|
||||
yield line
|
||||
await session.close()
|
||||
|
||||
return StreamingResponse(generate(), response.status)
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
error_detail = "Open WebUI: Server Connection Error"
|
||||
|
||||
if response is not None:
|
||||
try:
|
||||
res = await response.json()
|
||||
if "error" in res:
|
||||
error_detail = f"Ollama: {res['error']}"
|
||||
except:
|
||||
error_detail = f"Ollama: {e}"
|
||||
|
||||
await session.close()
|
||||
raise HTTPException(
|
||||
status_code=response.status if response else 500,
|
||||
detail=error_detail,
|
||||
)
|
|
@ -108,7 +108,7 @@ class StoreWebForm(CollectionNameForm):
|
|||
url: str
|
||||
|
||||
|
||||
def store_data_in_vector_db(data, collection_name) -> bool:
|
||||
def store_data_in_vector_db(data, collection_name, overwrite: bool = False) -> bool:
|
||||
text_splitter = RecursiveCharacterTextSplitter(
|
||||
chunk_size=app.state.CHUNK_SIZE, chunk_overlap=app.state.CHUNK_OVERLAP
|
||||
)
|
||||
|
@ -118,6 +118,12 @@ def store_data_in_vector_db(data, collection_name) -> bool:
|
|||
metadatas = [doc.metadata for doc in docs]
|
||||
|
||||
try:
|
||||
if overwrite:
|
||||
for collection in CHROMA_CLIENT.list_collections():
|
||||
if collection_name == collection.name:
|
||||
print(f"deleting existing collection {collection_name}")
|
||||
CHROMA_CLIENT.delete_collection(name=collection_name)
|
||||
|
||||
collection = CHROMA_CLIENT.create_collection(
|
||||
name=collection_name,
|
||||
embedding_function=app.state.sentence_transformer_ef,
|
||||
|
@ -355,7 +361,7 @@ def store_web(form_data: StoreWebForm, user=Depends(get_current_user)):
|
|||
if collection_name == "":
|
||||
collection_name = calculate_sha256_string(form_data.url)[:63]
|
||||
|
||||
store_data_in_vector_db(data, collection_name)
|
||||
store_data_in_vector_db(data, collection_name, overwrite=True)
|
||||
return {
|
||||
"status": True,
|
||||
"collection_name": collection_name,
|
||||
|
@ -419,7 +425,7 @@ def get_loader(filename: str, file_content_type: str, file_path: str):
|
|||
]
|
||||
|
||||
if file_ext == "pdf":
|
||||
loader = PyPDFLoader(file_path)
|
||||
loader = PyPDFLoader(file_path, extract_images=True)
|
||||
elif file_ext == "csv":
|
||||
loader = CSVLoader(file_path)
|
||||
elif file_ext == "rst":
|
||||
|
|
|
@ -14,7 +14,7 @@ import json
|
|||
from utils.utils import get_admin_user
|
||||
from utils.misc import calculate_sha256, get_gravatar_url
|
||||
|
||||
from config import OLLAMA_API_BASE_URL, DATA_DIR, UPLOAD_DIR
|
||||
from config import OLLAMA_BASE_URLS, DATA_DIR, UPLOAD_DIR
|
||||
from constants import ERROR_MESSAGES
|
||||
|
||||
|
||||
|
@ -75,7 +75,7 @@ async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024
|
|||
hashed = calculate_sha256(file)
|
||||
file.seek(0)
|
||||
|
||||
url = f"{OLLAMA_API_BASE_URL}/blobs/sha256:{hashed}"
|
||||
url = f"{OLLAMA_BASE_URLS[0]}/blobs/sha256:{hashed}"
|
||||
response = requests.post(url, data=file)
|
||||
|
||||
if response.ok:
|
||||
|
@ -147,7 +147,7 @@ def upload(file: UploadFile = File(...)):
|
|||
hashed = calculate_sha256(f)
|
||||
f.seek(0)
|
||||
|
||||
url = f"{OLLAMA_API_BASE_URL}/blobs/sha256:{hashed}"
|
||||
url = f"{OLLAMA_BASE_URLS[0]}/blobs/sha256:{hashed}"
|
||||
response = requests.post(url, data=f)
|
||||
|
||||
if response.ok:
|
||||
|
|
|
@ -200,27 +200,32 @@ if not os.path.exists(LITELLM_CONFIG_PATH):
|
|||
|
||||
|
||||
####################################
|
||||
# OLLAMA_API_BASE_URL
|
||||
# OLLAMA_BASE_URL
|
||||
####################################
|
||||
|
||||
OLLAMA_API_BASE_URL = os.environ.get(
|
||||
"OLLAMA_API_BASE_URL", "http://localhost:11434/api"
|
||||
)
|
||||
|
||||
if ENV == "prod":
|
||||
if OLLAMA_API_BASE_URL == "/ollama/api":
|
||||
OLLAMA_API_BASE_URL = "http://host.docker.internal:11434/api"
|
||||
|
||||
|
||||
OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "")
|
||||
|
||||
if OLLAMA_BASE_URL == "":
|
||||
if ENV == "prod":
|
||||
if OLLAMA_BASE_URL == "/ollama":
|
||||
OLLAMA_BASE_URL = "http://host.docker.internal:11434"
|
||||
|
||||
|
||||
if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "":
|
||||
OLLAMA_BASE_URL = (
|
||||
OLLAMA_API_BASE_URL[:-4]
|
||||
if OLLAMA_API_BASE_URL.endswith("/api")
|
||||
else OLLAMA_API_BASE_URL
|
||||
)
|
||||
|
||||
OLLAMA_BASE_URLS = os.environ.get("OLLAMA_BASE_URLS", "")
|
||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS if OLLAMA_BASE_URLS != "" else OLLAMA_BASE_URL
|
||||
|
||||
OLLAMA_BASE_URLS = [url.strip() for url in OLLAMA_BASE_URLS.split(",")]
|
||||
|
||||
|
||||
####################################
|
||||
# OPENAI_API
|
||||
|
|
|
@ -48,3 +48,5 @@ class ERROR_MESSAGES(str, Enum):
|
|||
lambda err="": f"Invalid format. Please use the correct format{err if err else ''}"
|
||||
)
|
||||
RATE_LIMIT_EXCEEDED = "API rate limit exceeded"
|
||||
|
||||
MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found"
|
||||
|
|
|
@ -104,7 +104,7 @@ async def auth_middleware(request: Request, call_next):
|
|||
app.mount("/api/v1", webui_app)
|
||||
app.mount("/litellm/api", litellm_app)
|
||||
|
||||
app.mount("/ollama/api", ollama_app)
|
||||
app.mount("/ollama", ollama_app)
|
||||
app.mount("/openai/api", openai_app)
|
||||
|
||||
app.mount("/images/api/v1", images_app)
|
||||
|
@ -125,6 +125,14 @@ async def get_app_config():
|
|||
}
|
||||
|
||||
|
||||
@app.get("/api/version")
|
||||
async def get_app_config():
|
||||
|
||||
return {
|
||||
"version": VERSION,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/changelog")
|
||||
async def get_app_changelog():
|
||||
return CHANGELOG
|
||||
|
|
|
@ -22,6 +22,7 @@ google-generativeai
|
|||
|
||||
langchain
|
||||
langchain-community
|
||||
fake_useragent
|
||||
chromadb
|
||||
sentence_transformers
|
||||
pypdf
|
||||
|
@ -33,6 +34,7 @@ pandas
|
|||
openpyxl
|
||||
pyxlsb
|
||||
xlrd
|
||||
rapidocr-onnxruntime
|
||||
|
||||
faster-whisper
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ services:
|
|||
build:
|
||||
context: .
|
||||
args:
|
||||
OLLAMA_API_BASE_URL: '/ollama/api'
|
||||
OLLAMA_BASE_URL: '/ollama'
|
||||
dockerfile: Dockerfile
|
||||
image: ghcr.io/open-webui/open-webui:main
|
||||
container_name: open-webui
|
||||
|
@ -25,7 +25,7 @@ services:
|
|||
ports:
|
||||
- ${OPEN_WEBUI_PORT-3000}:8080
|
||||
environment:
|
||||
- 'OLLAMA_API_BASE_URL=http://ollama:11434/api'
|
||||
- 'OLLAMA_BASE_URL=http://ollama:11434'
|
||||
- 'WEBUI_SECRET_KEY='
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
|
|
|
@ -40,7 +40,7 @@ spec:
|
|||
- name: data
|
||||
mountPath: /app/backend/data
|
||||
env:
|
||||
- name: OLLAMA_API_BASE_URL
|
||||
- name: OLLAMA_BASE_URL
|
||||
value: {{ include "ollama.url" . | quote }}
|
||||
tty: true
|
||||
{{- with .Values.webui.nodeSelector }}
|
||||
|
|
|
@ -26,8 +26,8 @@ spec:
|
|||
cpu: "1000m"
|
||||
memory: "1Gi"
|
||||
env:
|
||||
- name: OLLAMA_API_BASE_URL
|
||||
value: "http://ollama-service.open-webui.svc.cluster.local:11434/api"
|
||||
- name: OLLAMA_BASE_URL
|
||||
value: "http://ollama-service.open-webui.svc.cluster.local:11434"
|
||||
tty: true
|
||||
volumeMounts:
|
||||
- name: webui-volume
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "open-webui",
|
||||
"version": "0.1.108",
|
||||
"version": "0.1.109",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "vite dev --host",
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { OLLAMA_API_BASE_URL } from '$lib/constants';
|
||||
|
||||
export const getOllamaAPIUrl = async (token: string = '') => {
|
||||
export const getOllamaUrls = async (token: string = '') => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/url`, {
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/urls`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
|
@ -29,13 +29,13 @@ export const getOllamaAPIUrl = async (token: string = '') => {
|
|||
throw error;
|
||||
}
|
||||
|
||||
return res.OLLAMA_BASE_URL;
|
||||
return res.OLLAMA_BASE_URLS;
|
||||
};
|
||||
|
||||
export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
|
||||
export const updateOllamaUrls = async (token: string = '', urls: string[]) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/url/update`, {
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/urls/update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
|
@ -43,7 +43,7 @@ export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
|
|||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: url
|
||||
urls: urls
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
|
@ -64,7 +64,7 @@ export const updateOllamaAPIUrl = async (token: string = '', url: string) => {
|
|||
throw error;
|
||||
}
|
||||
|
||||
return res.OLLAMA_BASE_URL;
|
||||
return res.OLLAMA_BASE_URLS;
|
||||
};
|
||||
|
||||
export const getOllamaVersion = async (token: string = '') => {
|
||||
|
@ -151,7 +151,8 @@ export const generateTitle = async (
|
|||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
|
@ -189,7 +190,8 @@ export const generatePrompt = async (token: string = '', model: string, conversa
|
|||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
|
@ -223,7 +225,8 @@ export const generateTextCompletion = async (token: string = '', model: string,
|
|||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
|
@ -251,7 +254,8 @@ export const generateChatCompletion = async (token: string = '', body: object) =
|
|||
signal: controller.signal,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify(body)
|
||||
|
@ -294,7 +298,8 @@ export const createModel = async (token: string, tagName: string, content: strin
|
|||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/create`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
|
@ -313,19 +318,23 @@ export const createModel = async (token: string, tagName: string, content: strin
|
|||
return res;
|
||||
};
|
||||
|
||||
export const deleteModel = async (token: string, tagName: string) => {
|
||||
export const deleteModel = async (token: string, tagName: string, urlIdx: string | null = null) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/delete`, {
|
||||
const res = await fetch(
|
||||
`${OLLAMA_API_BASE_URL}/api/delete${urlIdx !== null ? `/${urlIdx}` : ''}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: tagName
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
|
@ -336,7 +345,12 @@ export const deleteModel = async (token: string, tagName: string) => {
|
|||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = err.error;
|
||||
error = err;
|
||||
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
|
@ -347,13 +361,14 @@ export const deleteModel = async (token: string, tagName: string) => {
|
|||
return res;
|
||||
};
|
||||
|
||||
export const pullModel = async (token: string, tagName: string) => {
|
||||
export const pullModel = async (token: string, tagName: string, urlIdx: string | null = null) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/pull`, {
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/api/pull${urlIdx !== null ? `/${urlIdx}` : ''}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
export let suggestionPrompts = [];
|
||||
export let autoScroll = true;
|
||||
|
||||
let chatTextAreaElement:HTMLTextAreaElement
|
||||
let filesInputElement;
|
||||
|
||||
let promptsElement;
|
||||
|
@ -43,11 +43,9 @@
|
|||
let speechRecognition;
|
||||
|
||||
$: if (prompt) {
|
||||
const chatInput = document.getElementById('chat-textarea');
|
||||
|
||||
if (chatInput) {
|
||||
chatInput.style.height = '';
|
||||
chatInput.style.height = Math.min(chatInput.scrollHeight, 200) + 'px';
|
||||
if (chatTextAreaElement) {
|
||||
chatTextAreaElement.style.height = '';
|
||||
chatTextAreaElement.style.height = Math.min(chatTextAreaElement.scrollHeight, 200) + 'px';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,9 +84,7 @@
|
|||
if (res) {
|
||||
prompt = res.text;
|
||||
await tick();
|
||||
|
||||
const inputElement = document.getElementById('chat-textarea');
|
||||
inputElement?.focus();
|
||||
chatTextAreaElement?.focus();
|
||||
|
||||
if (prompt !== '' && $settings?.speechAutoSend === true) {
|
||||
submitPrompt(prompt, user);
|
||||
|
@ -191,8 +187,7 @@
|
|||
prompt = `${prompt}${transcript}`;
|
||||
|
||||
await tick();
|
||||
const inputElement = document.getElementById('chat-textarea');
|
||||
inputElement?.focus();
|
||||
chatTextAreaElement?.focus();
|
||||
|
||||
// Restart the inactivity timeout
|
||||
timeoutId = setTimeout(() => {
|
||||
|
@ -294,8 +289,7 @@
|
|||
};
|
||||
|
||||
onMount(() => {
|
||||
const chatInput = document.getElementById('chat-textarea');
|
||||
window.setTimeout(() => chatInput?.focus(), 0);
|
||||
window.setTimeout(() => chatTextAreaElement?.focus(), 0);
|
||||
|
||||
const dropZone = document.querySelector('body');
|
||||
|
||||
|
@ -663,6 +657,7 @@
|
|||
|
||||
<textarea
|
||||
id="chat-textarea"
|
||||
bind:this={chatTextAreaElement}
|
||||
class=" dark:bg-gray-900 dark:text-gray-100 outline-none w-full py-3 px-3 {fileUploadEnabled
|
||||
? ''
|
||||
: ' pl-4'} rounded-xl resize-none h-[48px]"
|
||||
|
|
|
@ -223,33 +223,80 @@
|
|||
}, 100);
|
||||
};
|
||||
|
||||
// TODO: change delete behaviour
|
||||
// const deleteMessageAndDescendants = async (messageId: string) => {
|
||||
// if (history.messages[messageId]) {
|
||||
// history.messages[messageId].deleted = true;
|
||||
|
||||
// for (const childId of history.messages[messageId].childrenIds) {
|
||||
// await deleteMessageAndDescendants(childId);
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
|
||||
// const triggerDeleteMessageRecursive = async (messageId: string) => {
|
||||
// await deleteMessageAndDescendants(messageId);
|
||||
// await updateChatById(localStorage.token, chatId, { history });
|
||||
// await chats.set(await getChatList(localStorage.token));
|
||||
// };
|
||||
|
||||
const messageDeleteHandler = async (messageId) => {
|
||||
if (history.messages[messageId]) {
|
||||
history.messages[messageId].deleted = true;
|
||||
|
||||
for (const childId of history.messages[messageId].childrenIds) {
|
||||
history.messages[childId].deleted = true;
|
||||
const messageToDelete = history.messages[messageId];
|
||||
const messageParentId = messageToDelete.parentId;
|
||||
const messageChildrenIds = messageToDelete.childrenIds ?? [];
|
||||
const hasSibling = messageChildrenIds.some(
|
||||
(childId) => history.messages[childId]?.childrenIds?.length > 0
|
||||
);
|
||||
messageChildrenIds.forEach((childId) => {
|
||||
const child = history.messages[childId];
|
||||
if (child && child.childrenIds) {
|
||||
if (child.childrenIds.length === 0 && !hasSibling) {
|
||||
// if last prompt/response pair
|
||||
history.messages[messageParentId].childrenIds = [];
|
||||
history.currentId = messageParentId;
|
||||
} else {
|
||||
child.childrenIds.forEach((grandChildId) => {
|
||||
if (history.messages[grandChildId]) {
|
||||
history.messages[grandChildId].parentId = messageParentId;
|
||||
history.messages[messageParentId].childrenIds.push(grandChildId);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
await updateChatById(localStorage.token, chatId, { history });
|
||||
// remove response
|
||||
history.messages[messageParentId].childrenIds = history.messages[
|
||||
messageParentId
|
||||
].childrenIds.filter((id) => id !== childId);
|
||||
});
|
||||
// remove prompt
|
||||
history.messages[messageParentId].childrenIds = history.messages[
|
||||
messageParentId
|
||||
].childrenIds.filter((id) => id !== messageId);
|
||||
await updateChatById(localStorage.token, chatId, {
|
||||
messages: messages,
|
||||
history: history
|
||||
});
|
||||
};
|
||||
|
||||
// const messageDeleteHandler = async (messageId) => {
|
||||
// const message = history.messages[messageId];
|
||||
// const parentId = message.parentId;
|
||||
// const childrenIds = message.childrenIds ?? [];
|
||||
// const grandchildrenIds = [];
|
||||
|
||||
// // Iterate through childrenIds to find grandchildrenIds
|
||||
// for (const childId of childrenIds) {
|
||||
// const childMessage = history.messages[childId];
|
||||
// const grandChildrenIds = childMessage.childrenIds ?? [];
|
||||
|
||||
// for (const grandchildId of grandchildrenIds) {
|
||||
// const childMessage = history.messages[grandchildId];
|
||||
// childMessage.parentId = parentId;
|
||||
// }
|
||||
// grandchildrenIds.push(...grandChildrenIds);
|
||||
// }
|
||||
|
||||
// history.messages[parentId].childrenIds.push(...grandchildrenIds);
|
||||
// history.messages[parentId].childrenIds = history.messages[parentId].childrenIds.filter(
|
||||
// (id) => id !== messageId
|
||||
// );
|
||||
|
||||
// // Select latest message
|
||||
// let currentMessageId = grandchildrenIds.at(-1);
|
||||
// if (currentMessageId) {
|
||||
// let messageChildrenIds = history.messages[currentMessageId].childrenIds;
|
||||
// while (messageChildrenIds.length !== 0) {
|
||||
// currentMessageId = messageChildrenIds.at(-1);
|
||||
// messageChildrenIds = history.messages[currentMessageId].childrenIds;
|
||||
// }
|
||||
// history.currentId = currentMessageId;
|
||||
// }
|
||||
|
||||
// await updateChatById(localStorage.token, chatId, { messages, history });
|
||||
// };
|
||||
</script>
|
||||
|
||||
{#if messages.length == 0}
|
||||
|
@ -258,7 +305,6 @@
|
|||
<div class=" pb-10">
|
||||
{#key chatId}
|
||||
{#each messages as message, messageIdx}
|
||||
{#if !message.deleted}
|
||||
<div class=" w-full">
|
||||
<div
|
||||
class="flex flex-col justify-between px-5 mb-3 {$settings?.fullScreenMode ?? null
|
||||
|
@ -308,7 +354,6 @@
|
|||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
|
||||
{#if bottomPadding}
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
import CodeBlock from './CodeBlock.svelte';
|
||||
import Image from '$lib/components/common/Image.svelte';
|
||||
import { WEBUI_BASE_URL } from '$lib/constants';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
|
||||
export let modelfiles = [];
|
||||
export let message;
|
||||
|
@ -40,7 +41,7 @@
|
|||
|
||||
let edit = false;
|
||||
let editedContent = '';
|
||||
|
||||
let editTextAreaElement: HTMLTextAreaElement;
|
||||
let tooltipInstance = null;
|
||||
|
||||
let sentencesAudio = {};
|
||||
|
@ -247,10 +248,9 @@
|
|||
editedContent = message.content;
|
||||
|
||||
await tick();
|
||||
const editElement = document.getElementById(`message-edit-${message.id}`);
|
||||
|
||||
editElement.style.height = '';
|
||||
editElement.style.height = `${editElement.scrollHeight}px`;
|
||||
editTextAreaElement.style.height = '';
|
||||
editTextAreaElement.style.height = `${editTextAreaElement.scrollHeight}px`;
|
||||
};
|
||||
|
||||
const editMessageConfirmHandler = async () => {
|
||||
|
@ -341,9 +341,11 @@
|
|||
<div class=" w-full">
|
||||
<textarea
|
||||
id="message-edit-{message.id}"
|
||||
bind:this={editTextAreaElement}
|
||||
class=" bg-transparent outline-none w-full resize-none"
|
||||
bind:value={editedContent}
|
||||
on:input={(e) => {
|
||||
e.target.style.height = '';
|
||||
e.target.style.height = `${e.target.scrollHeight}px`;
|
||||
}}
|
||||
/>
|
||||
|
@ -462,6 +464,7 @@
|
|||
</div>
|
||||
{/if}
|
||||
|
||||
<Tooltip content="Edit" placement="bottom">
|
||||
<button
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
@ -485,7 +488,9 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Copy" placement="bottom">
|
||||
<button
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
@ -509,7 +514,9 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Good Response" placement="bottom">
|
||||
<button
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
@ -534,6 +541,9 @@
|
|||
/></svg
|
||||
>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Bad Response" placement="bottom">
|
||||
<button
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
@ -558,7 +568,9 @@
|
|||
/></svg
|
||||
>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Read Aloud" placement="bottom">
|
||||
<button
|
||||
id="speak-button-{message.id}"
|
||||
class="{isLastMessage
|
||||
|
@ -598,7 +610,12 @@
|
|||
cx="12"
|
||||
cy="12"
|
||||
r="3"
|
||||
/><circle class="spinner_S1WN spinner_JApP" cx="20" cy="12" r="3" /></svg
|
||||
/><circle
|
||||
class="spinner_S1WN spinner_JApP"
|
||||
cx="20"
|
||||
cy="12"
|
||||
r="3"
|
||||
/></svg
|
||||
>
|
||||
{:else if speaking}
|
||||
<svg
|
||||
|
@ -632,8 +649,10 @@
|
|||
</svg>
|
||||
{/if}
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
{#if $config.images}
|
||||
<Tooltip content="Generate Image" placement="bottom">
|
||||
<button
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
@ -696,9 +715,11 @@
|
|||
</svg>
|
||||
{/if}
|
||||
</button>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if message.info}
|
||||
<Tooltip content="Generation Info" placement="bottom">
|
||||
<button
|
||||
class=" {isLastMessage
|
||||
? 'visible'
|
||||
|
@ -723,9 +744,11 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if isLastMessage}
|
||||
<Tooltip content="Continue Response" placement="bottom">
|
||||
<button
|
||||
type="button"
|
||||
class="{isLastMessage
|
||||
|
@ -755,7 +778,9 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Regenerate" placement="bottom">
|
||||
<button
|
||||
type="button"
|
||||
class="{isLastMessage
|
||||
|
@ -778,6 +803,7 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
import Name from './Name.svelte';
|
||||
import ProfileImage from './ProfileImage.svelte';
|
||||
import { modelfiles, settings } from '$lib/stores';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
|
@ -20,18 +21,17 @@
|
|||
|
||||
let edit = false;
|
||||
let editedContent = '';
|
||||
|
||||
let messageEditTextAreaElement: HTMLTextAreaElement;
|
||||
const editMessageHandler = async () => {
|
||||
edit = true;
|
||||
editedContent = message.content;
|
||||
|
||||
await tick();
|
||||
const editElement = document.getElementById(`message-edit-${message.id}`);
|
||||
|
||||
editElement.style.height = '';
|
||||
editElement.style.height = `${editElement.scrollHeight}px`;
|
||||
messageEditTextAreaElement.style.height = '';
|
||||
messageEditTextAreaElement.style.height = `${messageEditTextAreaElement.scrollHeight}px`;
|
||||
|
||||
editElement?.focus();
|
||||
messageEditTextAreaElement?.focus();
|
||||
};
|
||||
|
||||
const editMessageConfirmHandler = async () => {
|
||||
|
@ -165,9 +165,11 @@
|
|||
<div class=" w-full">
|
||||
<textarea
|
||||
id="message-edit-{message.id}"
|
||||
bind:this={messageEditTextAreaElement}
|
||||
class=" bg-transparent outline-none w-full resize-none"
|
||||
bind:value={editedContent}
|
||||
on:input={(e) => {
|
||||
e.target.style.height = '';
|
||||
e.target.style.height = `${e.target.scrollHeight}px`;
|
||||
}}
|
||||
/>
|
||||
|
@ -245,6 +247,7 @@
|
|||
</div>
|
||||
{/if}
|
||||
|
||||
<Tooltip content="Edit" placement="bottom">
|
||||
<button
|
||||
class="invisible group-hover:visible p-1 rounded dark:hover:text-white hover:text-black transition edit-user-message-button"
|
||||
on:click={() => {
|
||||
|
@ -266,7 +269,9 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<Tooltip content="Copy" placement="bottom">
|
||||
<button
|
||||
class="invisible group-hover:visible p-1 rounded dark:hover:text-white hover:text-black transition"
|
||||
on:click={() => {
|
||||
|
@ -288,8 +293,10 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
{#if !isFirstMessage}
|
||||
<Tooltip content="Delete" placement="bottom">
|
||||
<button
|
||||
class="invisible group-hover:visible p-1 rounded dark:hover:text-white hover:text-black transition"
|
||||
on:click={() => {
|
||||
|
@ -311,6 +318,7 @@
|
|||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
let name = '';
|
||||
let showJWTToken = false;
|
||||
let JWTTokenCopied = false;
|
||||
let profileImageInputElement: HTMLInputElement;
|
||||
|
||||
const submitHandler = async () => {
|
||||
const updatedUser = await updateUserProfile(localStorage.token, name, profileImageUrl).catch(
|
||||
|
@ -40,11 +41,12 @@
|
|||
<div class=" space-y-3 pr-1.5 overflow-y-scroll max-h-80">
|
||||
<input
|
||||
id="profile-image-input"
|
||||
bind:this={profileImageInputElement}
|
||||
type="file"
|
||||
hidden
|
||||
accept="image/*"
|
||||
on:change={(e) => {
|
||||
const files = e?.target?.files ?? [];
|
||||
const files = profileImageInputElement.files ?? [];
|
||||
let reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
let originalImageUrl = `${event.target.result}`;
|
||||
|
@ -86,7 +88,7 @@
|
|||
// Display the compressed image
|
||||
profileImageUrl = compressedSrc;
|
||||
|
||||
e.target.files = null;
|
||||
profileImageInputElement.files = null;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -107,9 +109,7 @@
|
|||
<button
|
||||
class="relative rounded-full dark:bg-gray-700"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
document.getElementById('profile-image-input')?.click();
|
||||
}}
|
||||
on:click={profileImageInputElement.click}
|
||||
>
|
||||
<img
|
||||
src={profileImageUrl !== '' ? profileImageUrl : '/user.png'}
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
let saveChatHistory = true;
|
||||
let importFiles;
|
||||
let showDeleteConfirm = false;
|
||||
let chatImportInputElement: HTMLInputElement;
|
||||
|
||||
$: if (importFiles) {
|
||||
console.log(importFiles);
|
||||
|
@ -159,12 +160,17 @@
|
|||
<hr class=" dark:border-gray-700" />
|
||||
|
||||
<div class="flex flex-col">
|
||||
<input id="chat-import-input" bind:files={importFiles} type="file" accept=".json" hidden />
|
||||
<input
|
||||
id="chat-import-input"
|
||||
bind:this={chatImportInputElement}
|
||||
bind:files={importFiles}
|
||||
type="file"
|
||||
accept=".json"
|
||||
hidden
|
||||
/>
|
||||
<button
|
||||
class=" flex rounded-md py-2 px-3.5 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
|
||||
on:click={() => {
|
||||
document.getElementById('chat-import-input').click();
|
||||
}}
|
||||
on:click={chatImportInputElement.click}
|
||||
>
|
||||
<div class=" self-center mr-3">
|
||||
<svg
|
||||
|
|
|
@ -3,14 +3,15 @@
|
|||
import { createEventDispatcher, onMount } from 'svelte';
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
import { getOllamaAPIUrl, getOllamaVersion, updateOllamaAPIUrl } from '$lib/apis/ollama';
|
||||
import { getOllamaUrls, getOllamaVersion, updateOllamaUrls } from '$lib/apis/ollama';
|
||||
import { getOpenAIKey, getOpenAIUrl, updateOpenAIKey, updateOpenAIUrl } from '$lib/apis/openai';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
export let getModels: Function;
|
||||
|
||||
// External
|
||||
let API_BASE_URL = '';
|
||||
let OLLAMA_BASE_URL = '';
|
||||
let OLLAMA_BASE_URLS = [''];
|
||||
|
||||
let OPENAI_API_KEY = '';
|
||||
let OPENAI_API_BASE_URL = '';
|
||||
|
@ -25,8 +26,8 @@
|
|||
await models.set(await getModels());
|
||||
};
|
||||
|
||||
const updateOllamaAPIUrlHandler = async () => {
|
||||
API_BASE_URL = await updateOllamaAPIUrl(localStorage.token, API_BASE_URL);
|
||||
const updateOllamaUrlsHandler = async () => {
|
||||
OLLAMA_BASE_URLS = await updateOllamaUrls(localStorage.token, OLLAMA_BASE_URLS);
|
||||
|
||||
const ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => {
|
||||
toast.error(error);
|
||||
|
@ -41,7 +42,7 @@
|
|||
|
||||
onMount(async () => {
|
||||
if ($user.role === 'admin') {
|
||||
API_BASE_URL = await getOllamaAPIUrl(localStorage.token);
|
||||
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
|
||||
OPENAI_API_BASE_URL = await getOpenAIUrl(localStorage.token);
|
||||
OPENAI_API_KEY = await getOpenAIKey(localStorage.token);
|
||||
}
|
||||
|
@ -53,11 +54,6 @@
|
|||
on:submit|preventDefault={() => {
|
||||
updateOpenAIHandler();
|
||||
dispatch('save');
|
||||
|
||||
// saveSettings({
|
||||
// OPENAI_API_KEY: OPENAI_API_KEY !== '' ? OPENAI_API_KEY : undefined,
|
||||
// OPENAI_API_BASE_URL: OPENAI_API_BASE_URL !== '' ? OPENAI_API_BASE_URL : undefined
|
||||
// });
|
||||
}}
|
||||
>
|
||||
<div class=" pr-1.5 overflow-y-scroll max-h-[20.5rem] space-y-3">
|
||||
|
@ -115,18 +111,64 @@
|
|||
|
||||
<div>
|
||||
<div class=" mb-2.5 text-sm font-medium">Ollama Base URL</div>
|
||||
<div class="flex w-full">
|
||||
<div class="flex-1 mr-2">
|
||||
<div class="flex w-full gap-1.5">
|
||||
<div class="flex-1 flex flex-col gap-2">
|
||||
{#each OLLAMA_BASE_URLS as url, idx}
|
||||
<div class="flex gap-1.5">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-800 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter URL (e.g. http://localhost:11434)"
|
||||
bind:value={API_BASE_URL}
|
||||
bind:value={url}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="self-center flex items-center">
|
||||
{#if idx === 0}
|
||||
<button
|
||||
class="px-3 bg-gray-200 hover:bg-gray-300 dark:bg-gray-600 dark:hover:bg-gray-700 rounded transition"
|
||||
class="px-1"
|
||||
on:click={() => {
|
||||
updateOllamaAPIUrlHandler();
|
||||
OLLAMA_BASE_URLS = [...OLLAMA_BASE_URLS, ''];
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
{:else}
|
||||
<button
|
||||
class="px-1"
|
||||
on:click={() => {
|
||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter((url, urlIdx) => idx !== urlIdx);
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
|
||||
</svg>
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
|
||||
<div class="">
|
||||
<button
|
||||
class="p-2.5 bg-gray-200 hover:bg-gray-300 dark:bg-gray-850 dark:hover:bg-gray-800 rounded-lg transition"
|
||||
on:click={() => {
|
||||
updateOllamaUrlsHandler();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
|
@ -144,6 +186,7 @@
|
|||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
|
||||
Trouble accessing Ollama?
|
||||
|
|
|
@ -2,7 +2,13 @@
|
|||
import queue from 'async/queue';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
import { createModel, deleteModel, getOllamaVersion, pullModel } from '$lib/apis/ollama';
|
||||
import {
|
||||
createModel,
|
||||
deleteModel,
|
||||
getOllamaUrls,
|
||||
getOllamaVersion,
|
||||
pullModel
|
||||
} from '$lib/apis/ollama';
|
||||
import { WEBUI_API_BASE_URL, WEBUI_BASE_URL } from '$lib/constants';
|
||||
import { WEBUI_NAME, models, user } from '$lib/stores';
|
||||
import { splitStream } from '$lib/utils';
|
||||
|
@ -13,7 +19,7 @@
|
|||
|
||||
let showLiteLLM = false;
|
||||
let showLiteLLMParams = false;
|
||||
|
||||
let modelUploadInputElement: HTMLInputElement;
|
||||
let liteLLMModelInfo = [];
|
||||
|
||||
let liteLLMModel = '';
|
||||
|
@ -27,6 +33,9 @@
|
|||
$: liteLLMModelName = liteLLMModel;
|
||||
|
||||
// Models
|
||||
|
||||
let OLLAMA_URLS = [];
|
||||
let selectedOllamaUrlIdx: string | null = null;
|
||||
let showExperimentalOllama = false;
|
||||
let ollamaVersion = '';
|
||||
const MAX_PARALLEL_DOWNLOADS = 3;
|
||||
|
@ -45,7 +54,7 @@
|
|||
let modelUploadMode = 'file';
|
||||
let modelInputFile = '';
|
||||
let modelFileUrl = '';
|
||||
let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop "</s>"\nPARAMETER stop "USER:"\nPARAMETER stop "ASSSISTANT:"`;
|
||||
let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop "</s>"\nPARAMETER stop "USER:"\nPARAMETER stop "ASSISTANT:"`;
|
||||
let modelFileDigest = '';
|
||||
let uploadProgress = null;
|
||||
|
||||
|
@ -236,9 +245,11 @@
|
|||
};
|
||||
|
||||
const deleteModelHandler = async () => {
|
||||
const res = await deleteModel(localStorage.token, deleteModelTag).catch((error) => {
|
||||
const res = await deleteModel(localStorage.token, deleteModelTag, selectedOllamaUrlIdx).catch(
|
||||
(error) => {
|
||||
toast.error(error);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
if (res) {
|
||||
toast.success(`Deleted ${deleteModelTag}`);
|
||||
|
@ -249,10 +260,12 @@
|
|||
};
|
||||
|
||||
const pullModelHandlerProcessor = async (opts: { modelName: string; callback: Function }) => {
|
||||
const res = await pullModel(localStorage.token, opts.modelName).catch((error) => {
|
||||
const res = await pullModel(localStorage.token, opts.modelName, selectedOllamaUrlIdx).catch(
|
||||
(error) => {
|
||||
opts.callback({ success: false, error, modelName: opts.modelName });
|
||||
return null;
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
if (res) {
|
||||
const reader = res.body
|
||||
|
@ -358,6 +371,15 @@
|
|||
};
|
||||
|
||||
onMount(async () => {
|
||||
OLLAMA_URLS = await getOllamaUrls(localStorage.token).catch((error) => {
|
||||
toast.error(error);
|
||||
return [];
|
||||
});
|
||||
|
||||
if (OLLAMA_URLS.length > 1) {
|
||||
selectedOllamaUrlIdx = 0;
|
||||
}
|
||||
|
||||
ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => false);
|
||||
liteLLMModelInfo = await getLiteLLMModelInfo(localStorage.token);
|
||||
});
|
||||
|
@ -367,20 +389,35 @@
|
|||
<div class=" space-y-3 pr-1.5 overflow-y-scroll h-[23rem]">
|
||||
{#if ollamaVersion}
|
||||
<div class="space-y-2 pr-1.5">
|
||||
<div>
|
||||
<div class=" mb-2 text-sm font-medium">Manage Ollama Models</div>
|
||||
<div class="text-sm font-medium">Manage Ollama Models</div>
|
||||
|
||||
{#if OLLAMA_URLS.length > 1}
|
||||
<div class="flex-1 pb-1">
|
||||
<select
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
bind:value={selectedOllamaUrlIdx}
|
||||
placeholder="Select an Ollama instance"
|
||||
>
|
||||
{#each OLLAMA_URLS as url, idx}
|
||||
<option value={idx} class="bg-gray-100 dark:bg-gray-700">{url}</option>
|
||||
{/each}
|
||||
</select>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="space-y-2">
|
||||
<div>
|
||||
<div class=" mb-2 text-sm font-medium">Pull a model from Ollama.com</div>
|
||||
<div class="flex w-full">
|
||||
<div class="flex-1 mr-2">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter model tag (e.g. mistral:7b)"
|
||||
bind:value={modelTag}
|
||||
/>
|
||||
</div>
|
||||
<button
|
||||
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
|
||||
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
|
||||
on:click={() => {
|
||||
pullModelHandler();
|
||||
}}
|
||||
|
@ -463,14 +500,14 @@
|
|||
<div class="flex w-full">
|
||||
<div class="flex-1 mr-2">
|
||||
<select
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
bind:value={deleteModelTag}
|
||||
placeholder="Select a model"
|
||||
>
|
||||
{#if !deleteModelTag}
|
||||
<option value="" disabled selected>Select a model</option>
|
||||
{/if}
|
||||
{#each $models.filter((m) => m.size != null) as model}
|
||||
{#each $models.filter((m) => m.size != null && (selectedOllamaUrlIdx === null ? true : (m?.urls ?? []).includes(selectedOllamaUrlIdx))) as model}
|
||||
<option value={model.name} class="bg-gray-100 dark:bg-gray-700"
|
||||
>{model.name + ' (' + (model.size / 1024 ** 3).toFixed(1) + ' GB)'}</option
|
||||
>
|
||||
|
@ -478,7 +515,7 @@
|
|||
</select>
|
||||
</div>
|
||||
<button
|
||||
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
|
||||
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
|
||||
on:click={() => {
|
||||
deleteModelHandler();
|
||||
}}
|
||||
|
@ -499,7 +536,7 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class="pt-1">
|
||||
<div class="flex justify-between items-center text-xs">
|
||||
<div class=" text-sm font-medium">Experimental</div>
|
||||
<button
|
||||
|
@ -507,7 +544,7 @@
|
|||
type="button"
|
||||
on:click={() => {
|
||||
showExperimentalOllama = !showExperimentalOllama;
|
||||
}}>{showExperimentalOllama ? 'Show' : 'Hide'}</button
|
||||
}}>{showExperimentalOllama ? 'Hide' : 'Show'}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -546,6 +583,7 @@
|
|||
<div class="flex-1 {modelInputFile && modelInputFile.length > 0 ? 'mr-2' : ''}">
|
||||
<input
|
||||
id="model-upload-input"
|
||||
bind:this={modelUploadInputElement}
|
||||
type="file"
|
||||
bind:files={modelInputFile}
|
||||
on:change={() => {
|
||||
|
@ -558,10 +596,8 @@
|
|||
|
||||
<button
|
||||
type="button"
|
||||
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850"
|
||||
on:click={() => {
|
||||
document.getElementById('model-upload-input').click();
|
||||
}}
|
||||
class="w-full rounded-lg text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850"
|
||||
on:click={modelUploadInputElement.click}
|
||||
>
|
||||
{#if modelInputFile && modelInputFile.length > 0}
|
||||
{modelInputFile[0].name}
|
||||
|
@ -573,7 +609,7 @@
|
|||
{:else}
|
||||
<div class="flex-1 {modelFileUrl !== '' ? 'mr-2' : ''}">
|
||||
<input
|
||||
class="w-full rounded text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850 outline-none {modelFileUrl !==
|
||||
class="w-full rounded-lg text-left py-2 px-4 dark:text-gray-300 dark:bg-gray-850 outline-none {modelFileUrl !==
|
||||
''
|
||||
? 'mr-2'
|
||||
: ''}"
|
||||
|
@ -677,6 +713,7 @@
|
|||
</form>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
<hr class=" dark:border-gray-700 my-2" />
|
||||
{/if}
|
||||
|
||||
|
@ -693,7 +730,7 @@
|
|||
type="button"
|
||||
on:click={() => {
|
||||
showLiteLLMParams = !showLiteLLMParams;
|
||||
}}>{showLiteLLMParams ? 'Advanced' : 'Default'}</button
|
||||
}}>{showLiteLLMParams ? 'Hide Additional Params' : 'Show Additional Params'}</button
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -702,7 +739,7 @@
|
|||
<div class="flex w-full mb-1.5">
|
||||
<div class="flex-1 mr-2">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter LiteLLM Model (litellm_params.model)"
|
||||
bind:value={liteLLMModel}
|
||||
autocomplete="off"
|
||||
|
@ -710,7 +747,7 @@
|
|||
</div>
|
||||
|
||||
<button
|
||||
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
|
||||
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
|
||||
on:click={() => {
|
||||
addLiteLLMModelHandler();
|
||||
}}
|
||||
|
@ -734,7 +771,7 @@
|
|||
<div class="flex w-full">
|
||||
<div class="flex-1">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter Model Name (model_name)"
|
||||
bind:value={liteLLMModelName}
|
||||
autocomplete="off"
|
||||
|
@ -748,7 +785,7 @@
|
|||
<div class="flex w-full">
|
||||
<div class="flex-1">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter LiteLLM API Base URL (litellm_params.api_base)"
|
||||
bind:value={liteLLMAPIBase}
|
||||
autocomplete="off"
|
||||
|
@ -762,7 +799,7 @@
|
|||
<div class="flex w-full">
|
||||
<div class="flex-1">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter LiteLLM API Key (litellm_params.api_key)"
|
||||
bind:value={liteLLMAPIKey}
|
||||
autocomplete="off"
|
||||
|
@ -776,7 +813,7 @@
|
|||
<div class="flex w-full">
|
||||
<div class="flex-1">
|
||||
<input
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
placeholder="Enter LiteLLM API RPM (litellm_params.rpm)"
|
||||
bind:value={liteLLMRPM}
|
||||
autocomplete="off"
|
||||
|
@ -803,7 +840,7 @@
|
|||
<div class="flex w-full">
|
||||
<div class="flex-1 mr-2">
|
||||
<select
|
||||
class="w-full rounded py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm dark:text-gray-300 dark:bg-gray-850 outline-none"
|
||||
bind:value={deleteLiteLLMModelId}
|
||||
placeholder="Select a model"
|
||||
>
|
||||
|
@ -818,7 +855,7 @@
|
|||
</select>
|
||||
</div>
|
||||
<button
|
||||
class="px-3 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded transition"
|
||||
class="px-2.5 bg-gray-100 hover:bg-gray-200 text-gray-800 dark:bg-gray-850 dark:hover:bg-gray-800 dark:text-gray-100 rounded-lg transition"
|
||||
on:click={() => {
|
||||
deleteLiteLLMModelHandler();
|
||||
}}
|
||||
|
|
|
@ -29,6 +29,6 @@
|
|||
});
|
||||
</script>
|
||||
|
||||
<div bind:this={tooltipElement}>
|
||||
<div bind:this={tooltipElement} aria-label={content}>
|
||||
<slot />
|
||||
</div>
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
export let show = false;
|
||||
export let selectedDoc;
|
||||
|
||||
let uploadDocInputElement: HTMLInputElement;
|
||||
let inputFiles;
|
||||
let tags = [];
|
||||
|
||||
|
@ -69,7 +69,7 @@
|
|||
}
|
||||
|
||||
inputFiles = null;
|
||||
document.getElementById('upload-doc-input').value = '';
|
||||
uploadDocInputElement.value = '';
|
||||
} else {
|
||||
toast.error(`File not found.`);
|
||||
}
|
||||
|
@ -126,14 +126,19 @@
|
|||
}}
|
||||
>
|
||||
<div class="mb-3 w-full">
|
||||
<input id="upload-doc-input" hidden bind:files={inputFiles} type="file" multiple />
|
||||
<input
|
||||
id="upload-doc-input"
|
||||
bind:this={uploadDocInputElement}
|
||||
hidden
|
||||
bind:files={inputFiles}
|
||||
type="file"
|
||||
multiple
|
||||
/>
|
||||
|
||||
<button
|
||||
class="w-full text-sm font-medium py-3 bg-gray-850 hover:bg-gray-800 text-center rounded-xl"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
document.getElementById('upload-doc-input')?.click();
|
||||
}}
|
||||
on:click={uploadDocInputElement.click}
|
||||
>
|
||||
{#if inputFiles}
|
||||
{inputFiles.length > 0 ? `${inputFiles.length}` : ''} document(s) selected.
|
||||
|
|
|
@ -2,12 +2,11 @@
|
|||
import { onMount } from 'svelte';
|
||||
|
||||
export let messages = [];
|
||||
|
||||
let textAreaElement: HTMLTextAreaElement;
|
||||
onMount(() => {
|
||||
messages.forEach((message, idx) => {
|
||||
let textareaElement = document.getElementById(`${message.role}-${idx}-textarea`);
|
||||
textareaElement.style.height = '';
|
||||
textareaElement.style.height = textareaElement.scrollHeight + 'px';
|
||||
textAreaElement.style.height = '';
|
||||
textAreaElement.style.height = textAreaElement.scrollHeight + 'px';
|
||||
});
|
||||
});
|
||||
</script>
|
||||
|
@ -27,16 +26,17 @@
|
|||
<div class="flex-1">
|
||||
<textarea
|
||||
id="{message.role}-{idx}-textarea"
|
||||
bind:this={textAreaElement}
|
||||
class="w-full bg-transparent outline-none rounded-lg p-2 text-sm resize-none overflow-hidden"
|
||||
placeholder="Enter {message.role === 'user' ? 'a user' : 'an assistant'} message here"
|
||||
rows="1"
|
||||
on:input={(e) => {
|
||||
e.target.style.height = '';
|
||||
e.target.style.height = e.target.scrollHeight + 'px';
|
||||
textAreaElement.style.height = '';
|
||||
textAreaElement.style.height = textAreaElement.scrollHeight + 'px';
|
||||
}}
|
||||
on:focus={(e) => {
|
||||
e.target.style.height = '';
|
||||
e.target.style.height = e.target.scrollHeight + 'px';
|
||||
textAreaElement.style.height = '';
|
||||
textAreaElement.style.height = textAreaElement.scrollHeight + 'px';
|
||||
|
||||
// e.target.style.height = Math.min(e.target.scrollHeight, 200) + 'px';
|
||||
}}
|
||||
|
|
|
@ -7,7 +7,7 @@ export const WEBUI_BASE_URL = dev ? `http://${location.hostname}:8080` : ``;
|
|||
export const WEBUI_API_BASE_URL = `${WEBUI_BASE_URL}/api/v1`;
|
||||
|
||||
export const LITELLM_API_BASE_URL = `${WEBUI_BASE_URL}/litellm/api`;
|
||||
export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama/api`;
|
||||
export const OLLAMA_API_BASE_URL = `${WEBUI_BASE_URL}/ollama`;
|
||||
export const OPENAI_API_BASE_URL = `${WEBUI_BASE_URL}/openai/api`;
|
||||
export const AUDIO_API_BASE_URL = `${WEBUI_BASE_URL}/audio/api/v1`;
|
||||
export const IMAGES_API_BASE_URL = `${WEBUI_BASE_URL}/images/api/v1`;
|
||||
|
@ -90,8 +90,3 @@ export const SUPPORTED_FILE_EXTENSIONS = [
|
|||
// This feature, akin to $env/static/private, exclusively incorporates environment variables
|
||||
// that are prefixed with config.kit.env.publicPrefix (usually set to PUBLIC_).
|
||||
// Consequently, these variables can be securely exposed to client-side code.
|
||||
|
||||
// Example of the .env configuration:
|
||||
// OLLAMA_API_BASE_URL="http://localhost:11434/api"
|
||||
// # Public
|
||||
// PUBLIC_API_BASE_URL=$OLLAMA_API_BASE_URL
|
||||
|
|
|
@ -34,10 +34,11 @@
|
|||
import Sidebar from '$lib/components/layout/Sidebar.svelte';
|
||||
import ShortcutsModal from '$lib/components/chat/ShortcutsModal.svelte';
|
||||
import ChangelogModal from '$lib/components/ChangelogModal.svelte';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
|
||||
let ollamaVersion = '';
|
||||
let loaded = false;
|
||||
|
||||
let showShortcutsButtonElement: HTMLButtonElement;
|
||||
let DB = null;
|
||||
let localDBChats = [];
|
||||
|
||||
|
@ -184,7 +185,7 @@
|
|||
if (isCtrlPressed && event.key === '/') {
|
||||
event.preventDefault();
|
||||
console.log('showShortcuts');
|
||||
document.getElementById('show-shortcuts-button')?.click();
|
||||
showShortcutsButtonElement.click();
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -201,8 +202,10 @@
|
|||
|
||||
{#if loaded}
|
||||
<div class=" hidden lg:flex fixed bottom-0 right-0 px-3 py-3 z-10">
|
||||
<Tooltip content="help" placement="left">
|
||||
<button
|
||||
id="show-shortcuts-button"
|
||||
bind:this={showShortcutsButtonElement}
|
||||
class="text-gray-600 dark:text-gray-300 bg-gray-300/20 w-6 h-6 flex items-center justify-center text-xs rounded-full"
|
||||
on:click={() => {
|
||||
showShortcuts = !showShortcuts;
|
||||
|
@ -210,6 +213,7 @@
|
|||
>
|
||||
?
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<ShortcutsModal bind:show={showShortcuts} />
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
let stopResponseFlag = false;
|
||||
let autoScroll = true;
|
||||
let processing = '';
|
||||
|
||||
let messagesContainerElement: HTMLDivElement;
|
||||
let currentRequestId = null;
|
||||
|
||||
let selectedModels = [''];
|
||||
|
@ -140,8 +140,7 @@
|
|||
};
|
||||
|
||||
const scrollToBottom = () => {
|
||||
const element = document.getElementById('messages-container');
|
||||
element.scrollTop = element.scrollHeight;
|
||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||
};
|
||||
|
||||
//////////////////////////
|
||||
|
@ -340,7 +339,7 @@
|
|||
content: $settings.system
|
||||
}
|
||||
: undefined,
|
||||
...messages.filter((message) => !message.deleted)
|
||||
...messages
|
||||
]
|
||||
.filter((message) => message)
|
||||
.map((message, idx, arr) => ({
|
||||
|
@ -548,7 +547,7 @@
|
|||
content: $settings.system
|
||||
}
|
||||
: undefined,
|
||||
...messages.filter((message) => !message.deleted)
|
||||
...messages
|
||||
]
|
||||
.filter((message) => message)
|
||||
.map((message, idx, arr) => ({
|
||||
|
@ -821,8 +820,11 @@
|
|||
<div
|
||||
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0"
|
||||
id="messages-container"
|
||||
bind:this={messagesContainerElement}
|
||||
on:scroll={(e) => {
|
||||
autoScroll = e.target.scrollHeight - e.target.scrollTop <= e.target.clientHeight + 50;
|
||||
autoScroll =
|
||||
messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
|
||||
messagesContainerElement.clientHeight + 50;
|
||||
}}
|
||||
>
|
||||
<div
|
||||
|
@ -830,10 +832,7 @@
|
|||
? 'max-w-full'
|
||||
: 'max-w-2xl md:px-0'} mx-auto w-full px-4"
|
||||
>
|
||||
<ModelSelector
|
||||
bind:selectedModels
|
||||
disabled={messages.length > 0 && !selectedModels.includes('')}
|
||||
/>
|
||||
<ModelSelector bind:selectedModels />
|
||||
</div>
|
||||
|
||||
<div class=" h-full w-full flex flex-col py-8">
|
||||
|
|
|
@ -45,7 +45,7 @@
|
|||
let stopResponseFlag = false;
|
||||
let autoScroll = true;
|
||||
let processing = '';
|
||||
|
||||
let messagesContainerElement: HTMLDivElement;
|
||||
let currentRequestId = null;
|
||||
|
||||
// let chatId = $page.params.id;
|
||||
|
@ -160,8 +160,7 @@
|
|||
};
|
||||
|
||||
const scrollToBottom = () => {
|
||||
const element = document.getElementById('messages-container');
|
||||
element.scrollTop = element.scrollHeight;
|
||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||
};
|
||||
|
||||
//////////////////////////
|
||||
|
@ -353,7 +352,7 @@
|
|||
content: $settings.system
|
||||
}
|
||||
: undefined,
|
||||
...messages.filter((message) => !message.deleted)
|
||||
...messages
|
||||
]
|
||||
.filter((message) => message)
|
||||
.map((message, idx, arr) => ({
|
||||
|
@ -561,7 +560,7 @@
|
|||
content: $settings.system
|
||||
}
|
||||
: undefined,
|
||||
...messages.filter((message) => !message.deleted)
|
||||
...messages
|
||||
]
|
||||
.filter((message) => message)
|
||||
.map((message, idx, arr) => ({
|
||||
|
@ -852,8 +851,11 @@
|
|||
<div
|
||||
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0"
|
||||
id="messages-container"
|
||||
bind:this={messagesContainerElement}
|
||||
on:scroll={(e) => {
|
||||
autoScroll = e.target.scrollHeight - e.target.scrollTop <= e.target.clientHeight + 50;
|
||||
autoScroll =
|
||||
messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
|
||||
messagesContainerElement.clientHeight + 50;
|
||||
}}
|
||||
>
|
||||
<div
|
||||
|
@ -861,10 +863,7 @@
|
|||
? 'max-w-full'
|
||||
: 'max-w-2xl md:px-0'} mx-auto w-full px-4"
|
||||
>
|
||||
<ModelSelector
|
||||
bind:selectedModels
|
||||
disabled={messages.length > 0 && !selectedModels.includes('')}
|
||||
/>
|
||||
<ModelSelector bind:selectedModels />
|
||||
</div>
|
||||
|
||||
<div class=" h-full w-full flex flex-col py-8">
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
|
||||
let inputFiles = '';
|
||||
let query = '';
|
||||
|
||||
let documentsImportInputElement: HTMLInputElement;
|
||||
let tags = [];
|
||||
|
||||
let showSettingsModal = false;
|
||||
|
@ -524,6 +524,7 @@
|
|||
<div class="flex space-x-2">
|
||||
<input
|
||||
id="documents-import-input"
|
||||
bind:this={documentsImportInputElement}
|
||||
bind:files={importFiles}
|
||||
type="file"
|
||||
accept=".json"
|
||||
|
@ -558,9 +559,7 @@
|
|||
|
||||
<button
|
||||
class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition"
|
||||
on:click={async () => {
|
||||
document.getElementById('documents-import-input')?.click();
|
||||
}}
|
||||
on:click={documentsImportInputElement.click}
|
||||
>
|
||||
<div class=" self-center mr-2 font-medium">Import Documents Mapping</div>
|
||||
|
||||
|
|
|
@ -16,11 +16,14 @@
|
|||
|
||||
let localModelfiles = [];
|
||||
let importFiles;
|
||||
|
||||
let modelfilesImportInputElement: HTMLInputElement;
|
||||
const deleteModelHandler = async (tagName) => {
|
||||
let success = null;
|
||||
|
||||
success = await deleteModel(localStorage.token, tagName);
|
||||
success = await deleteModel(localStorage.token, tagName).catch((err) => {
|
||||
toast.error(err);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (success) {
|
||||
toast.success(`Deleted ${tagName}`);
|
||||
|
@ -235,6 +238,7 @@
|
|||
<div class="flex space-x-1">
|
||||
<input
|
||||
id="modelfiles-import-input"
|
||||
bind:this={modelfilesImportInputElement}
|
||||
bind:files={importFiles}
|
||||
type="file"
|
||||
accept=".json"
|
||||
|
@ -262,9 +266,7 @@
|
|||
|
||||
<button
|
||||
class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition"
|
||||
on:click={async () => {
|
||||
document.getElementById('modelfiles-import-input')?.click();
|
||||
}}
|
||||
on:click={modelfilesImportInputElement.click}
|
||||
>
|
||||
<div class=" self-center mr-2 font-medium">Import Modelfiles</div>
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
<script>
|
||||
<script lang="ts">
|
||||
import { goto } from '$app/navigation';
|
||||
|
||||
import { onMount, tick } from 'svelte';
|
||||
|
@ -21,15 +21,17 @@
|
|||
|
||||
let mode = 'chat';
|
||||
let loaded = false;
|
||||
|
||||
let text = '';
|
||||
|
||||
let selectedModelId = '';
|
||||
|
||||
let loading = false;
|
||||
let currentRequestId;
|
||||
let currentRequestId = null;
|
||||
let stopResponseFlag = false;
|
||||
|
||||
let messagesContainerElement: HTMLDivElement;
|
||||
let textCompletionAreaElement: HTMLTextAreaElement;
|
||||
|
||||
let system = '';
|
||||
let messages = [
|
||||
{
|
||||
|
@ -39,13 +41,7 @@
|
|||
];
|
||||
|
||||
const scrollToBottom = () => {
|
||||
let element;
|
||||
|
||||
if (mode === 'chat') {
|
||||
element = document.getElementById('messages-container');
|
||||
} else {
|
||||
element = document.getElementById('text-completion-textarea');
|
||||
}
|
||||
const element = mode === 'chat' ? messagesContainerElement : textCompletionAreaElement;
|
||||
|
||||
if (element) {
|
||||
element.scrollTop = element?.scrollHeight;
|
||||
|
@ -96,6 +92,10 @@
|
|||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done || stopResponseFlag) {
|
||||
if (stopResponseFlag) {
|
||||
await cancelChatCompletion(localStorage.token, currentRequestId);
|
||||
}
|
||||
|
||||
currentRequestId = null;
|
||||
break;
|
||||
}
|
||||
|
@ -112,10 +112,14 @@
|
|||
let data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
|
||||
if ('request_id' in data) {
|
||||
currentRequestId = data.request_id;
|
||||
} else {
|
||||
text += data.choices[0].delta.content ?? '';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@ -150,16 +154,6 @@
|
|||
: `${OLLAMA_API_BASE_URL}/v1`
|
||||
);
|
||||
|
||||
// const [res, controller] = await generateChatCompletion(localStorage.token, {
|
||||
// model: selectedModelId,
|
||||
// messages: [
|
||||
// {
|
||||
// role: 'assistant',
|
||||
// content: text
|
||||
// }
|
||||
// ]
|
||||
// });
|
||||
|
||||
let responseMessage;
|
||||
if (messages.at(-1)?.role === 'assistant') {
|
||||
responseMessage = messages.at(-1);
|
||||
|
@ -184,6 +178,11 @@
|
|||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done || stopResponseFlag) {
|
||||
if (stopResponseFlag) {
|
||||
await cancelChatCompletion(localStorage.token, currentRequestId);
|
||||
}
|
||||
|
||||
currentRequestId = null;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -200,6 +199,9 @@
|
|||
let data = JSON.parse(line.replace(/^data: /, ''));
|
||||
console.log(data);
|
||||
|
||||
if ('request_id' in data) {
|
||||
currentRequestId = data.request_id;
|
||||
} else {
|
||||
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
|
||||
continue;
|
||||
} else {
|
||||
|
@ -215,54 +217,13 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
scrollToBottom();
|
||||
}
|
||||
|
||||
// while (true) {
|
||||
// const { value, done } = await reader.read();
|
||||
// if (done || stopResponseFlag) {
|
||||
// if (stopResponseFlag) {
|
||||
// await cancelChatCompletion(localStorage.token, currentRequestId);
|
||||
// }
|
||||
|
||||
// currentRequestId = null;
|
||||
// break;
|
||||
// }
|
||||
|
||||
// try {
|
||||
// let lines = value.split('\n');
|
||||
|
||||
// for (const line of lines) {
|
||||
// if (line !== '') {
|
||||
// console.log(line);
|
||||
// let data = JSON.parse(line);
|
||||
|
||||
// if ('detail' in data) {
|
||||
// throw data;
|
||||
// }
|
||||
|
||||
// if ('id' in data) {
|
||||
// console.log(data);
|
||||
// currentRequestId = data.id;
|
||||
// } else {
|
||||
// if (data.done == false) {
|
||||
// text += data.message.content;
|
||||
// } else {
|
||||
// console.log('done');
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// } catch (error) {
|
||||
// console.log(error);
|
||||
// }
|
||||
|
||||
// scrollToBottom();
|
||||
// }
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -417,12 +378,14 @@
|
|||
<div
|
||||
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0"
|
||||
id="messages-container"
|
||||
bind:this={messagesContainerElement}
|
||||
>
|
||||
<div class=" h-full w-full flex flex-col">
|
||||
<div class="flex-1 p-1">
|
||||
{#if mode === 'complete'}
|
||||
<textarea
|
||||
id="text-completion-textarea"
|
||||
bind:this={textCompletionAreaElement}
|
||||
class="w-full h-full p-3 bg-transparent outline outline-1 outline-gray-200 dark:outline-gray-800 resize-none rounded-lg text-sm"
|
||||
bind:value={text}
|
||||
placeholder="You're a helpful assistant."
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
let importFiles = '';
|
||||
let query = '';
|
||||
|
||||
let promptsImportInputElement: HTMLInputElement;
|
||||
const sharePrompt = async (prompt) => {
|
||||
toast.success('Redirecting you to OpenWebUI Community');
|
||||
|
||||
|
@ -208,6 +208,7 @@
|
|||
<div class="flex space-x-2">
|
||||
<input
|
||||
id="prompts-import-input"
|
||||
bind:this={promptsImportInputElement}
|
||||
bind:files={importFiles}
|
||||
type="file"
|
||||
accept=".json"
|
||||
|
@ -241,9 +242,7 @@
|
|||
|
||||
<button
|
||||
class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition"
|
||||
on:click={async () => {
|
||||
document.getElementById('prompts-import-input')?.click();
|
||||
}}
|
||||
on:click={promptsImportInputElement.click}
|
||||
>
|
||||
<div class=" self-center mr-2 font-medium">Import Prompts</div>
|
||||
|
||||
|
@ -266,7 +265,7 @@
|
|||
<button
|
||||
class="flex text-xs items-center space-x-1 px-3 py-1.5 rounded-xl bg-gray-50 hover:bg-gray-100 dark:bg-gray-800 dark:hover:bg-gray-700 dark:text-gray-200 transition"
|
||||
on:click={async () => {
|
||||
// document.getElementById('modelfiles-import-input')?.click();
|
||||
// promptsImportInputElement.click();
|
||||
let blob = new Blob([JSON.stringify($prompts)], {
|
||||
type: 'application/json'
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue