forked from open-webui/open-webui
pwned :)
This commit is contained in:
parent
a41b195f46
commit
8651bec915
2 changed files with 17 additions and 2 deletions
|
@ -43,20 +43,29 @@ app.add_middleware(
|
||||||
|
|
||||||
|
|
||||||
async def run_background_process(command):
|
async def run_background_process(command):
|
||||||
|
# Start the process
|
||||||
process = await asyncio.create_subprocess_exec(
|
process = await asyncio.create_subprocess_exec(
|
||||||
*command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
*command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
)
|
)
|
||||||
return process
|
# Read output asynchronously
|
||||||
|
async for line in process.stdout:
|
||||||
|
print(line.decode().strip()) # Print stdout line by line
|
||||||
|
|
||||||
|
await process.wait() # Wait for the subprocess to finish
|
||||||
|
|
||||||
|
|
||||||
async def start_litellm_background():
|
async def start_litellm_background():
|
||||||
|
print("start_litellm_background")
|
||||||
# Command to run in the background
|
# Command to run in the background
|
||||||
command = "litellm --telemetry False --config ./data/litellm/config.yaml"
|
command = "litellm --telemetry False --config ./data/litellm/config.yaml"
|
||||||
|
|
||||||
await run_background_process(command)
|
await run_background_process(command)
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
@app.on_event("startup")
|
||||||
async def startup_event():
|
async def startup_event():
|
||||||
|
|
||||||
|
print("startup_event")
|
||||||
# TODO: Check config.yaml file and create one
|
# TODO: Check config.yaml file and create one
|
||||||
asyncio.create_task(start_litellm_background())
|
asyncio.create_task(start_litellm_background())
|
||||||
|
|
||||||
|
|
|
@ -20,12 +20,13 @@ from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
from apps.ollama.main import app as ollama_app
|
from apps.ollama.main import app as ollama_app
|
||||||
from apps.openai.main import app as openai_app
|
from apps.openai.main import app as openai_app
|
||||||
|
|
||||||
from apps.litellm.main import app as litellm_app
|
from apps.litellm.main import app as litellm_app, start_litellm_background
|
||||||
from apps.audio.main import app as audio_app
|
from apps.audio.main import app as audio_app
|
||||||
from apps.images.main import app as images_app
|
from apps.images.main import app as images_app
|
||||||
from apps.rag.main import app as rag_app
|
from apps.rag.main import app as rag_app
|
||||||
from apps.web.main import app as webui_app
|
from apps.web.main import app as webui_app
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
@ -168,6 +169,11 @@ async def check_url(request: Request, call_next):
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def on_startup():
|
||||||
|
asyncio.create_task(start_litellm_background())
|
||||||
|
|
||||||
|
|
||||||
app.mount("/api/v1", webui_app)
|
app.mount("/api/v1", webui_app)
|
||||||
app.mount("/litellm/api", litellm_app)
|
app.mount("/litellm/api", litellm_app)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue