forked from open-webui/open-webui
		
	feat: add LITELLM_PROXY_PORT to configure internal proxy port
This commit is contained in:
		
							parent
							
								
									86bc0c8c73
								
							
						
					
					
						commit
						0ea9e19d79
					
				
					 2 changed files with 17 additions and 7 deletions
				
			
		|  | @ -23,7 +23,12 @@ log = logging.getLogger(__name__) | |||
| log.setLevel(SRC_LOG_LEVELS["LITELLM"]) | ||||
| 
 | ||||
| 
 | ||||
| from config import MODEL_FILTER_ENABLED, MODEL_FILTER_LIST, DATA_DIR | ||||
| from config import ( | ||||
|     MODEL_FILTER_ENABLED, | ||||
|     MODEL_FILTER_LIST, | ||||
|     DATA_DIR, | ||||
|     LITELLM_PROXY_PORT, | ||||
| ) | ||||
| 
 | ||||
| from litellm.utils import get_llm_provider | ||||
| 
 | ||||
|  | @ -90,9 +95,7 @@ async def run_background_process(command): | |||
| async def start_litellm_background(): | ||||
|     log.info("start_litellm_background") | ||||
|     # Command to run in the background | ||||
|     command = ( | ||||
|         "litellm --port 14365 --telemetry False --config ./data/litellm/config.yaml" | ||||
|     ) | ||||
|     command = f"litellm --port {LITELLM_PROXY_PORT} --telemetry False --config ./data/litellm/config.yaml" | ||||
| 
 | ||||
|     await run_background_process(command) | ||||
| 
 | ||||
|  | @ -109,7 +112,6 @@ async def shutdown_litellm_background(): | |||
| 
 | ||||
| @app.on_event("startup") | ||||
| async def startup_event(): | ||||
| 
 | ||||
|     log.info("startup_event") | ||||
|     # TODO: Check config.yaml file and create one | ||||
|     asyncio.create_task(start_litellm_background()) | ||||
|  | @ -186,7 +188,7 @@ async def get_models(user=Depends(get_current_user)): | |||
|     while not background_process: | ||||
|         await asyncio.sleep(0.1) | ||||
| 
 | ||||
|     url = "http://localhost:14365/v1" | ||||
|     url = f"http://localhost:{LITELLM_PROXY_PORT}/v1" | ||||
|     r = None | ||||
|     try: | ||||
|         r = requests.request(method="GET", url=f"{url}/models") | ||||
|  | @ -289,7 +291,7 @@ async def delete_model_from_config( | |||
| async def proxy(path: str, request: Request, user=Depends(get_verified_user)): | ||||
|     body = await request.body() | ||||
| 
 | ||||
|     url = "http://localhost:14365" | ||||
|     url = f"http://localhost:{LITELLM_PROXY_PORT}" | ||||
| 
 | ||||
|     target_url = f"{url}/{path}" | ||||
| 
 | ||||
|  |  | |||
|  | @ -499,3 +499,11 @@ IMAGES_OPENAI_API_KEY = os.getenv("IMAGES_OPENAI_API_KEY", OPENAI_API_KEY) | |||
| 
 | ||||
| AUDIO_OPENAI_API_BASE_URL = os.getenv("AUDIO_OPENAI_API_BASE_URL", OPENAI_API_BASE_URL) | ||||
| AUDIO_OPENAI_API_KEY = os.getenv("AUDIO_OPENAI_API_KEY", OPENAI_API_KEY) | ||||
| 
 | ||||
| #################################### | ||||
| # LiteLLM | ||||
| #################################### | ||||
| 
 | ||||
| LITELLM_PROXY_PORT = int(os.getenv("LITELLM_PROXY_PORT", "14365")) | ||||
| if LITELLM_PROXY_PORT < 0 or LITELLM_PROXY_PORT > 65535: | ||||
|     raise ValueError("Invalid port number for LITELLM_PROXY_PORT") | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Jun Siang Cheah
						Jun Siang Cheah