forked from open-webui/open-webui
		
	
						commit
						f1e0487c00
					
				
					 15 changed files with 919 additions and 83 deletions
				
			
		
							
								
								
									
										38
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
										
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
										
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,38 @@ | |||
| --- | ||||
| name: Bug report | ||||
| about: Create a report to help us improve | ||||
| title: '' | ||||
| labels: '' | ||||
| assignees: '' | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| **Describe the bug** | ||||
| A clear and concise description of what the bug is. | ||||
| 
 | ||||
| **To Reproduce** | ||||
| Steps to reproduce the behavior: | ||||
| 1. Go to '...' | ||||
| 2. Click on '....' | ||||
| 3. Scroll down to '....' | ||||
| 4. See error | ||||
| 
 | ||||
| **Expected behavior** | ||||
| A clear and concise description of what you expected to happen. | ||||
| 
 | ||||
| **Screenshots** | ||||
| If applicable, add screenshots to help explain your problem. | ||||
| 
 | ||||
| **Desktop (please complete the following information):** | ||||
|  - OS: [e.g. iOS] | ||||
|  - Browser [e.g. chrome, safari] | ||||
|  - Version [e.g. 22] | ||||
| 
 | ||||
| **Smartphone (please complete the following information):** | ||||
|  - Device: [e.g. iPhone6] | ||||
|  - OS: [e.g. iOS8.1] | ||||
|  - Browser [e.g. stock browser, safari] | ||||
|  - Version [e.g. 22] | ||||
| 
 | ||||
| **Additional context** | ||||
| Add any other context about the problem here. | ||||
							
								
								
									
										20
									
								
								.github/ISSUE_TEMPLATE/feature_request.md
									
										
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								.github/ISSUE_TEMPLATE/feature_request.md
									
										
									
									
										vendored
									
									
										Normal file
									
								
							|  | @ -0,0 +1,20 @@ | |||
| --- | ||||
| name: Feature request | ||||
| about: Suggest an idea for this project | ||||
| title: '' | ||||
| labels: '' | ||||
| assignees: '' | ||||
| 
 | ||||
| --- | ||||
| 
 | ||||
| **Is your feature request related to a problem? Please describe.** | ||||
| A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] | ||||
| 
 | ||||
| **Describe the solution you'd like** | ||||
| A clear and concise description of what you want to happen. | ||||
| 
 | ||||
| **Describe alternatives you've considered** | ||||
| A clear and concise description of any alternative solutions or features you've considered. | ||||
| 
 | ||||
| **Additional context** | ||||
| Add any other context or screenshots about the feature request here. | ||||
							
								
								
									
										2
									
								
								.github/workflows/node.js.yaml
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.github/workflows/node.js.yaml
									
										
									
									
										vendored
									
									
								
							|  | @ -6,6 +6,8 @@ on: | |||
| jobs: | ||||
|   build: | ||||
|     name: 'Fmt, Lint, & Build' | ||||
|     env: | ||||
|       PUBLIC_API_BASE_URL: '' | ||||
|     runs-on: ubuntu-latest | ||||
|     strategy: | ||||
|       matrix: | ||||
|  |  | |||
							
								
								
									
										64
									
								
								Caddyfile.localhost
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								Caddyfile.localhost
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,64 @@ | |||
| # Run with | ||||
| #    caddy run --envfile ./example.env --config ./Caddyfile.localhost | ||||
| # | ||||
| # This is configured for | ||||
| #    - Automatic HTTPS (even for localhost) | ||||
| #    - Reverse Proxying to Ollama API Base URL (http://localhost:11434/api) | ||||
| #    - CORS | ||||
| #    - HTTP Basic Auth API Tokens (uncomment basicauth section) | ||||
| 
 | ||||
| 
 | ||||
| # CORS Preflight (OPTIONS) + Request (GET, POST, PATCH, PUT, DELETE) | ||||
| (cors-api) { | ||||
| 	@match-cors-api-preflight method OPTIONS | ||||
| 	handle @match-cors-api-preflight { | ||||
| 		header { | ||||
| 			Access-Control-Allow-Origin "{http.request.header.origin}" | ||||
| 			Access-Control-Allow-Methods "GET, POST, PUT, PATCH, DELETE, OPTIONS" | ||||
| 			Access-Control-Allow-Headers "Origin, Accept, Authorization, Content-Type, X-Requested-With" | ||||
| 			Access-Control-Allow-Credentials "true" | ||||
| 			Access-Control-Max-Age "3600" | ||||
| 			defer | ||||
| 		} | ||||
| 		respond "" 204 | ||||
| 	} | ||||
| 
 | ||||
| 	@match-cors-api-request { | ||||
| 		not { | ||||
| 			header Origin "{http.request.scheme}://{http.request.host}" | ||||
| 		} | ||||
| 		header Origin "{http.request.header.origin}" | ||||
| 	} | ||||
| 	handle @match-cors-api-request { | ||||
| 		header { | ||||
| 			Access-Control-Allow-Origin "{http.request.header.origin}" | ||||
| 			Access-Control-Allow-Methods "GET, POST, PUT, PATCH, DELETE, OPTIONS" | ||||
| 			Access-Control-Allow-Headers "Origin, Accept, Authorization, Content-Type, X-Requested-With" | ||||
| 			Access-Control-Allow-Credentials "true" | ||||
| 			Access-Control-Max-Age "3600" | ||||
| 			defer | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| # replace localhost with example.com or whatever | ||||
| localhost { | ||||
| 	## HTTP Basic Auth | ||||
| 	## (uncomment to enable) | ||||
| 	# basicauth { | ||||
| 	# 	# see .example.env for how to generate tokens | ||||
| 	# 	{env.OLLAMA_API_ID} {env.OLLAMA_API_TOKEN_DIGEST} | ||||
| 	# } | ||||
| 
 | ||||
| 	handle /api/* { | ||||
| 		# Comment to disable CORS | ||||
| 		import cors-api | ||||
| 
 | ||||
| 		reverse_proxy localhost:11434 | ||||
| 	} | ||||
| 
 | ||||
| 	# Same-Origin Static Web Server | ||||
| 	file_server { | ||||
| 		root ./build/ | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										11
									
								
								Dockerfile
									
										
									
									
									
								
							
							
						
						
									
										11
									
								
								Dockerfile
									
										
									
									
									
								
							|  | @ -1,15 +1,20 @@ | |||
| # syntax=docker/dockerfile:1 | ||||
| 
 | ||||
| FROM node:latest | ||||
| 
 | ||||
| WORKDIR /app | ||||
| 
 | ||||
| ARG OLLAMA_API_BASE_URL='' | ||||
| RUN echo $OLLAMA_API_BASE_URL | ||||
| 
 | ||||
| ENV ENV prod | ||||
| 
 | ||||
| ENV PUBLIC_API_BASE_URL $OLLAMA_API_BASE_URL | ||||
| RUN echo $PUBLIC_API_BASE_URL | ||||
| 
 | ||||
| COPY package.json package-lock.json ./  | ||||
| RUN npm ci | ||||
| 
 | ||||
| 
 | ||||
| COPY . . | ||||
| RUN npm run build | ||||
| 
 | ||||
| CMD [ "node", "./build/index.js"] | ||||
| CMD [ "npm", "run", "start"] | ||||
|  |  | |||
							
								
								
									
										68
									
								
								README.md
									
										
									
									
									
								
							
							
						
						
									
										68
									
								
								README.md
									
										
									
									
									
								
							|  | @ -7,15 +7,25 @@ ChatGPT-Style Web Interface for Ollama 🦙 | |||
| ## Features ⭐ | ||||
| 
 | ||||
| - 🖥️ **Intuitive Interface**: Our chat interface takes inspiration from ChatGPT, ensuring a user-friendly experience. | ||||
| 
 | ||||
| - 📱 **Responsive Design**: Enjoy a seamless experience on both desktop and mobile devices. | ||||
| 
 | ||||
| - ⚡ **Swift Responsiveness**: Enjoy fast and responsive performance. | ||||
| 
 | ||||
| - 🚀 **Effortless Setup**: Install seamlessly using Docker for a hassle-free experience. | ||||
| 
 | ||||
| - 🤖 **Multiple Model Support**: Seamlessly switch between different chat models for diverse interactions. | ||||
| 
 | ||||
| - 📜 **Chat History**: Effortlessly access and manage your conversation history. | ||||
| 
 | ||||
| - 📤📥 **Import/Export Chat History**: Seamlessly move your chat data in and out of the platform. | ||||
| 
 | ||||
| - ⚙️ **Fine-Tuned Control with Advanced Parameters**: Gain a deeper level of control by adjusting parameters such as temperature and defining your system prompts to tailor the conversation to your specific preferences and needs. | ||||
| 
 | ||||
| - 💻 **Code Syntax Highlighting**: Enjoy enhanced code readability with our syntax highlighting feature. | ||||
| - 🔗 **External Ollama Server Connection**: Link to the model when Ollama is hosted on a different server via the environment variable -e OLLAMA_ENDPOINT="http://[insert your Ollama address]". | ||||
| 
 | ||||
| - 🔗 **External Ollama Server Connection**: You can seamlessly connect to an external Ollama server hosted on a different address by setting the environment variable during the Docker build process. Execute the following command to include the Ollama API base URL in the Docker image: `docker build --build-arg OLLAMA_API_BASE_URL='http://localhost:11434/api' -t ollama-webui .`. | ||||
| 
 | ||||
| - 🌟 **Continuous Updates**: We are committed to improving Ollama Web UI with regular updates and new features. | ||||
| 
 | ||||
| ## How to Install 🚀 | ||||
|  | @ -38,22 +48,67 @@ OLLAMA_HOST=0.0.0.0 OLLAMA_ORIGINS=* ollama serve | |||
| 
 | ||||
| ### Using Docker 🐳 | ||||
| 
 | ||||
| If Ollama is hosted on your local machine, run the following command: | ||||
| 
 | ||||
| ```bash | ||||
| docker build -t ollama-webui . | ||||
| docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway --name ollama-webui --restart always ollama-webui | ||||
| docker build --build-arg OLLAMA_API_BASE_URL='' -t ollama-webui . | ||||
| docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui | ||||
| ``` | ||||
| 
 | ||||
| Your Ollama Web UI should now be hosted at [http://localhost:3000](http://localhost:3000). Enjoy! 😄 | ||||
| 
 | ||||
| #### Connecting to Ollama on a Different Server | ||||
| 
 | ||||
| If Ollama is hosted on a server other than your local machine, you can connect to it using the following environment variable: | ||||
| If Ollama is hosted on a server other than your local machine, change `OLLAMA_API_BASE_URL` to match: | ||||
| 
 | ||||
| ```bash | ||||
| docker build -t ollama-webui . | ||||
| docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway -e OLLAMA_ENDPOINT="http://[insert your ollama url]" --name ollama-webui --restart always ollama-webui | ||||
| docker build --build-arg OLLAMA_API_BASE_URL='https://example.com/api' -t ollama-webui . | ||||
| docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui | ||||
| ``` | ||||
| 
 | ||||
| ## How to Build for Static Deployment | ||||
| 
 | ||||
| 1. Install `node` | ||||
| 
 | ||||
|    ```sh | ||||
|    # Mac, Linux | ||||
|    curl https://webi.sh/node@lts | sh | ||||
|    source ~/.config/envman/PATH.env | ||||
|    ``` | ||||
| 
 | ||||
|    ```pwsh | ||||
|    # Windows | ||||
|    curl.exe https://webi.ms/node@lts | powershell | ||||
|    ``` | ||||
| 
 | ||||
| 2. Clone & Enter the project | ||||
|    ```sh | ||||
|    git clone https://github.com/ollama-webui/ollama-webui.git | ||||
|    pushd ./ollama-webui/ | ||||
|    ``` | ||||
| 3. Create and edit `.env` | ||||
|    ```sh | ||||
|    cp -RPp example.env .env | ||||
|    ``` | ||||
| 4. Run in dev mode, or build the site for deployment | ||||
|    - Test in Dev mode: | ||||
|      ```sh | ||||
|      npm run dev | ||||
|      ``` | ||||
|    - Build for Deploy: \ | ||||
|      (`PUBLIC_API_BASE_URL` will overwrite the value in `.env`) | ||||
|      ```sh | ||||
|      PUBLIC_API_BASE_URL='https://example.com/api' npm run build | ||||
|      ``` | ||||
| 5. Test the build with `caddy` (or the server of your choice) | ||||
| 
 | ||||
|    ```sh | ||||
|    curl https://webi.sh/caddy | sh | ||||
| 
 | ||||
|    PUBLIC_API_BASE_URL='https://localhost/api' npm run build | ||||
|    caddy run --envfile .env --config ./Caddyfile.localhost | ||||
|    ``` | ||||
| 
 | ||||
| ## What's Next? 🚀 | ||||
| 
 | ||||
| ### To-Do List 📝 | ||||
|  | @ -76,6 +131,7 @@ A big shoutout to our amazing contributors who have helped make this project pos | |||
| 
 | ||||
| - [Ollama Team](https://github.com/jmorganca/ollama) | ||||
| - [Timothy J. Baek](https://github.com/tjbck) | ||||
| - [AJ ONeal](https://github.com/coolaj86) | ||||
| 
 | ||||
| ## License 📜 | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										8
									
								
								example.env
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								example.env
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,8 @@ | |||
| # must be defined, but defaults to 'http://{location.hostname}:11434/api' | ||||
| # can also use path, such as '/api' | ||||
| PUBLIC_API_BASE_URL='' | ||||
| 
 | ||||
| OLLAMA_API_ID='my-api-token' | ||||
| OLLAMA_API_TOKEN='xxxxxxxxxxxxxxxx' | ||||
| # generated by passing the token to `caddy hash-password` | ||||
| OLLAMA_API_TOKEN_DIGEST='$2a$14$iyyuawykR92xTHNR9lWzfu.uCct/9/xUPX3zBqLqrjAu0usNRPbyi' | ||||
							
								
								
									
										668
									
								
								package-lock.json
									
										
									
										generated
									
									
									
								
							
							
						
						
									
										668
									
								
								package-lock.json
									
										
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							|  | @ -3,6 +3,7 @@ | |||
| 	"version": "0.0.1", | ||||
| 	"private": true, | ||||
| 	"scripts": { | ||||
| 		"start": "http-server ./build", | ||||
| 		"dev": "vite dev --host", | ||||
| 		"build": "vite build", | ||||
| 		"preview": "vite preview", | ||||
|  | @ -17,6 +18,7 @@ | |||
| 	}, | ||||
| 	"devDependencies": { | ||||
| 		"@sveltejs/adapter-auto": "^2.0.0", | ||||
| 		"@sveltejs/adapter-static": "^2.0.3", | ||||
| 		"@sveltejs/kit": "^1.20.4", | ||||
| 		"@typescript-eslint/eslint-plugin": "^6.0.0", | ||||
| 		"@typescript-eslint/parser": "^6.0.0", | ||||
|  | @ -39,6 +41,7 @@ | |||
| 		"@sveltejs/adapter-node": "^1.3.1", | ||||
| 		"file-saver": "^2.0.5", | ||||
| 		"highlight.js": "^11.9.0", | ||||
| 		"http-server": "^14.1.1", | ||||
| 		"idb": "^7.1.1", | ||||
| 		"marked": "^9.1.0", | ||||
| 		"svelte-french-toast": "^1.2.0", | ||||
|  |  | |||
							
								
								
									
										2
									
								
								run.sh
									
										
									
									
									
								
							
							
						
						
									
										2
									
								
								run.sh
									
										
									
									
									
								
							|  | @ -1,5 +1,5 @@ | |||
| docker stop ollama-webui || true | ||||
| docker rm ollama-webui || true | ||||
| docker build -t ollama-webui . | ||||
| docker run -d -p 3000:3000 --add-host=host.docker.internal:host-gateway --name ollama-webui --restart always ollama-webui | ||||
| docker run -d -p 3000:8080 --name ollama-webui --restart always ollama-webui | ||||
| docker image prune -f | ||||
|  | @ -1,7 +1,19 @@ | |||
| import { browser, dev } from '$app/environment'; | ||||
| import { browser } from '$app/environment'; | ||||
| import { PUBLIC_API_BASE_URL } from '$env/static/public'; | ||||
| 
 | ||||
| export const ENDPOINT = browser | ||||
| 	? `http://${location.hostname}:11434` | ||||
| 	: dev | ||||
| 	? 'http://127.0.0.1:11434' | ||||
| 	: 'http://host.docker.internal:11434'; | ||||
| export const API_BASE_URL = | ||||
| 	PUBLIC_API_BASE_URL === '' | ||||
| 		? browser | ||||
| 			? `http://${location.hostname}:11434/api` | ||||
| 			: `http://localhost:11434/api` | ||||
| 		: PUBLIC_API_BASE_URL; | ||||
| 
 | ||||
| // Source: https://kit.svelte.dev/docs/modules#$env-static-public
 | ||||
| // This feature, akin to $env/static/private, exclusively incorporates environment variables
 | ||||
| // that are prefixed with config.kit.env.publicPrefix (usually set to PUBLIC_).
 | ||||
| // Consequently, these variables can be securely exposed to client-side code.
 | ||||
| 
 | ||||
| // Example of the .env configuration:
 | ||||
| // OLLAMA_API_BASE_URL="http://localhost:11434/api"
 | ||||
| // # Public
 | ||||
| // PUBLIC_API_BASE_URL=$OLLAMA_API_BASE_URL
 | ||||
|  |  | |||
							
								
								
									
										16
									
								
								src/routes/+layout.js
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								src/routes/+layout.js
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,16 @@ | |||
| // if you want to generate a static html file
 | ||||
| // for your page.
 | ||||
| // Documentation: https://kit.svelte.dev/docs/page-options#prerender
 | ||||
| export const prerender = true; | ||||
| 
 | ||||
| // if you want to Generate a SPA
 | ||||
| // you have to set ssr to false.
 | ||||
| // This is not the case (so set as true or comment the line)
 | ||||
| // Documentation: https://kit.svelte.dev/docs/page-options#ssr
 | ||||
| // export const ssr = false;
 | ||||
| 
 | ||||
| // How to manage the trailing slashes in the URLs
 | ||||
| // the URL for about page witll be /about with 'ignore' (default)
 | ||||
| // the URL for about page witll be /about/ with 'always'
 | ||||
| // https://kit.svelte.dev/docs/page-options#trailingslash
 | ||||
| export const trailingSlash = 'ignore'; | ||||
|  | @ -1,30 +0,0 @@ | |||
| import { ENDPOINT } from '$lib/constants'; | ||||
| import type { PageServerLoad } from './$types'; | ||||
| 
 | ||||
| export const load: PageServerLoad = async ({ url }) => { | ||||
| 	const OLLAMA_ENDPOINT = process.env.OLLAMA_ENDPOINT; | ||||
| 	console.log(OLLAMA_ENDPOINT); | ||||
| 	const models = await fetch( | ||||
| 		`${OLLAMA_ENDPOINT != undefined ? OLLAMA_ENDPOINT : ENDPOINT}/api/tags`, | ||||
| 		{ | ||||
| 			method: 'GET', | ||||
| 			headers: { | ||||
| 				Accept: 'application/json', | ||||
| 				'Content-Type': 'application/json' | ||||
| 			} | ||||
| 		} | ||||
| 	) | ||||
| 		.then(async (res) => { | ||||
| 			if (!res.ok) throw await res.json(); | ||||
| 			return res.json(); | ||||
| 		}) | ||||
| 		.catch((error) => { | ||||
| 			console.log(error); | ||||
| 			return null; | ||||
| 		}); | ||||
| 
 | ||||
| 	return { | ||||
| 		models: models?.models ?? [], | ||||
| 		OLLAMA_ENDPOINT: process.env.OLLAMA_ENDPOINT | ||||
| 	}; | ||||
| }; | ||||
|  | @ -7,20 +7,15 @@ | |||
| 	const { saveAs } = fileSaver; | ||||
| 	import hljs from 'highlight.js'; | ||||
| 	import 'highlight.js/styles/dark.min.css'; | ||||
| 
 | ||||
| 	import type { PageData } from './$types'; | ||||
| 	import { ENDPOINT as SERVER_ENDPOINT } from '$lib/constants'; | ||||
| 	import { API_BASE_URL } from '$lib/constants'; | ||||
| 	import { onMount, tick } from 'svelte'; | ||||
| 	import { page } from '$app/stores'; | ||||
| 	const suggestions = $page.url.searchParams.get('suggestions'); | ||||
| 
 | ||||
| 	import Navbar from '$lib/components/layout/Navbar.svelte'; | ||||
| 	import SettingsModal from '$lib/components/chat/SettingsModal.svelte'; | ||||
| 
 | ||||
| 	export let data: PageData; | ||||
| 	$: ({ models, OLLAMA_ENDPOINT } = data); | ||||
| 	let suggestions = ''; // $page.url.searchParams.get('suggestions'); | ||||
| 
 | ||||
| 	let ENDPOINT; | ||||
| 	let models = []; | ||||
| 	let textareaElement; | ||||
| 	let showSettings = false; | ||||
| 	let db; | ||||
|  | @ -36,10 +31,25 @@ | |||
| 	let messages = []; | ||||
| 
 | ||||
| 	onMount(async () => { | ||||
| 		ENDPOINT = OLLAMA_ENDPOINT ? OLLAMA_ENDPOINT : SERVER_ENDPOINT; | ||||
| 		console.log(OLLAMA_ENDPOINT); | ||||
| 		console.log(SERVER_ENDPOINT); | ||||
| 		console.log(ENDPOINT); | ||||
| 		console.log(API_BASE_URL); | ||||
| 		const res = await fetch(`${API_BASE_URL}/tags`, { | ||||
| 			method: 'GET', | ||||
| 			headers: { | ||||
| 				Accept: 'application/json', | ||||
| 				'Content-Type': 'application/json' | ||||
| 			} | ||||
| 		}) | ||||
| 			.then(async (res) => { | ||||
| 				if (!res.ok) throw await res.json(); | ||||
| 				return res.json(); | ||||
| 			}) | ||||
| 			.catch((error) => { | ||||
| 				console.log(error); | ||||
| 				return { models: [] }; | ||||
| 			}); | ||||
| 
 | ||||
| 		const data = res; | ||||
| 		models = data.models; | ||||
| 
 | ||||
| 		let settings = localStorage.getItem('settings'); | ||||
| 		if (settings) { | ||||
|  | @ -267,7 +277,7 @@ | |||
| 			messages = [...messages, responseMessage]; | ||||
| 			window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 			const res = await fetch(`${ENDPOINT}/api/generate`, { | ||||
| 			const res = await fetch(`${API_BASE_URL}/generate`, { | ||||
| 				method: 'POST', | ||||
| 				headers: { | ||||
| 					'Content-Type': 'text/event-stream' | ||||
|  | @ -363,7 +373,7 @@ | |||
| 			messages = [...messages, responseMessage]; | ||||
| 			window.scrollTo({ top: document.body.scrollHeight }); | ||||
| 
 | ||||
| 			const res = await fetch(`${ENDPOINT}/api/generate`, { | ||||
| 			const res = await fetch(`${API_BASE_URL}/generate`, { | ||||
| 				method: 'POST', | ||||
| 				headers: { | ||||
| 					'Content-Type': 'text/event-stream' | ||||
|  | @ -443,7 +453,7 @@ | |||
| 	const generateTitle = async (user_prompt) => { | ||||
| 		console.log('generateTitle'); | ||||
| 
 | ||||
| 		const res = await fetch(`${ENDPOINT}/api/generate`, { | ||||
| 		const res = await fetch(`${API_BASE_URL}/generate`, { | ||||
| 			method: 'POST', | ||||
| 			headers: { | ||||
| 				'Content-Type': 'text/event-stream' | ||||
|  |  | |||
|  | @ -1,4 +1,4 @@ | |||
| import adapter from '@sveltejs/adapter-node'; | ||||
| import adapter from '@sveltejs/adapter-static'; | ||||
| import { vitePreprocess } from '@sveltejs/kit/vite'; | ||||
| 
 | ||||
| /** @type {import('@sveltejs/kit').Config} */ | ||||
|  | @ -11,7 +11,11 @@ const config = { | |||
| 		// adapter-auto only supports some environments, see https://kit.svelte.dev/docs/adapter-auto for a list.
 | ||||
| 		// If your environment is not supported or you settled on a specific environment, switch out the adapter.
 | ||||
| 		// See https://kit.svelte.dev/docs/adapters for more information about adapters.
 | ||||
| 		adapter: adapter() | ||||
| 		adapter: adapter({ | ||||
| 			pages: 'build', | ||||
| 			assets: 'build', | ||||
| 			fallback: null | ||||
| 		}) | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue
	
	 Timothy Jaeryang Baek
						Timothy Jaeryang Baek