2024-01-07 08:40:51 +01:00
|
|
|
from fastapi import (
|
|
|
|
FastAPI,
|
|
|
|
Depends,
|
|
|
|
HTTPException,
|
|
|
|
status,
|
|
|
|
UploadFile,
|
|
|
|
File,
|
|
|
|
Form,
|
|
|
|
)
|
2024-01-07 07:07:20 +01:00
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
2024-01-07 10:40:36 +01:00
|
|
|
import os, shutil
|
2024-02-18 06:06:08 +01:00
|
|
|
|
|
|
|
from pathlib import Path
|
2024-02-01 22:35:41 +01:00
|
|
|
from typing import List
|
2024-01-07 07:07:20 +01:00
|
|
|
|
2024-02-19 07:51:17 +01:00
|
|
|
from sentence_transformers import SentenceTransformer
|
2024-02-17 19:38:29 +01:00
|
|
|
from chromadb.utils import embedding_functions
|
2024-01-07 07:07:20 +01:00
|
|
|
|
2024-01-07 18:05:52 +01:00
|
|
|
from langchain_community.document_loaders import (
|
|
|
|
WebBaseLoader,
|
|
|
|
TextLoader,
|
|
|
|
PyPDFLoader,
|
|
|
|
CSVLoader,
|
2024-01-07 22:56:01 +01:00
|
|
|
Docx2txtLoader,
|
2024-01-13 14:46:56 +01:00
|
|
|
UnstructuredEPubLoader,
|
2024-01-10 00:24:53 +01:00
|
|
|
UnstructuredWordDocumentLoader,
|
|
|
|
UnstructuredMarkdownLoader,
|
feat: Add RAG support for various programming languages
Enables RAG for golang, python, java, sh, bat, powershell, cmd, js, css, c/c++/c#, sql, logs, ini, perl, r, dart, docker, env, php, haskell, lua, conf, plsql, ruby, db2, scalla, bash, swift, vue, html, xml, and other arbitrary text files.
2024-01-17 08:09:47 +01:00
|
|
|
UnstructuredXMLLoader,
|
2024-01-19 18:48:04 +01:00
|
|
|
UnstructuredRSTLoader,
|
2024-01-23 22:03:22 +01:00
|
|
|
UnstructuredExcelLoader,
|
2024-01-07 18:05:52 +01:00
|
|
|
)
|
2024-01-07 07:59:22 +01:00
|
|
|
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
|
|
|
|
|
|
from pydantic import BaseModel
|
|
|
|
from typing import Optional
|
2024-02-18 06:06:08 +01:00
|
|
|
import mimetypes
|
2024-01-07 07:59:22 +01:00
|
|
|
import uuid
|
2024-02-19 20:05:45 +01:00
|
|
|
import json
|
|
|
|
|
2024-01-07 07:59:22 +01:00
|
|
|
|
2024-02-18 06:06:08 +01:00
|
|
|
from apps.web.models.documents import (
|
|
|
|
Documents,
|
|
|
|
DocumentForm,
|
|
|
|
DocumentResponse,
|
|
|
|
)
|
2024-02-18 09:17:43 +01:00
|
|
|
|
2024-03-09 04:26:39 +01:00
|
|
|
from apps.rag.utils import query_doc, query_collection
|
|
|
|
|
2024-02-18 06:06:08 +01:00
|
|
|
from utils.misc import (
|
|
|
|
calculate_sha256,
|
|
|
|
calculate_sha256_string,
|
|
|
|
sanitize_filename,
|
|
|
|
extract_folders_after_data_docs,
|
|
|
|
)
|
2024-02-09 01:05:01 +01:00
|
|
|
from utils.utils import get_current_user, get_admin_user
|
2024-02-18 06:06:08 +01:00
|
|
|
from config import (
|
|
|
|
UPLOAD_DIR,
|
|
|
|
DOCS_DIR,
|
2024-02-18 20:16:10 +01:00
|
|
|
RAG_EMBEDDING_MODEL,
|
2024-02-19 07:51:17 +01:00
|
|
|
RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
2024-02-18 06:06:08 +01:00
|
|
|
CHROMA_CLIENT,
|
|
|
|
CHUNK_SIZE,
|
|
|
|
CHUNK_OVERLAP,
|
2024-02-18 07:41:03 +01:00
|
|
|
RAG_TEMPLATE,
|
2024-02-18 06:06:08 +01:00
|
|
|
)
|
2024-02-18 09:20:54 +01:00
|
|
|
|
2024-01-07 07:59:22 +01:00
|
|
|
from constants import ERROR_MESSAGES
|
|
|
|
|
2024-02-19 07:51:17 +01:00
|
|
|
#
|
2024-02-19 20:05:45 +01:00
|
|
|
# if RAG_EMBEDDING_MODEL:
|
2024-02-19 07:51:17 +01:00
|
|
|
# sentence_transformer_ef = SentenceTransformer(
|
|
|
|
# model_name_or_path=RAG_EMBEDDING_MODEL,
|
|
|
|
# cache_folder=RAG_EMBEDDING_MODEL_DIR,
|
|
|
|
# device=RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
|
|
|
# )
|
2024-02-18 20:16:10 +01:00
|
|
|
|
2024-01-07 07:07:20 +01:00
|
|
|
|
|
|
|
app = FastAPI()
|
|
|
|
|
2024-03-10 21:32:34 +01:00
|
|
|
app.state.PDF_EXTRACT_IMAGES = False
|
2024-02-18 07:29:52 +01:00
|
|
|
app.state.CHUNK_SIZE = CHUNK_SIZE
|
|
|
|
app.state.CHUNK_OVERLAP = CHUNK_OVERLAP
|
2024-02-18 07:41:03 +01:00
|
|
|
app.state.RAG_TEMPLATE = RAG_TEMPLATE
|
2024-02-19 20:05:45 +01:00
|
|
|
app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
|
2024-03-03 03:56:57 +01:00
|
|
|
app.state.TOP_K = 4
|
|
|
|
|
2024-02-19 20:05:45 +01:00
|
|
|
app.state.sentence_transformer_ef = (
|
|
|
|
embedding_functions.SentenceTransformerEmbeddingFunction(
|
|
|
|
model_name=app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
device=RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
|
|
|
)
|
|
|
|
)
|
2024-02-18 07:41:03 +01:00
|
|
|
|
2024-02-18 07:29:52 +01:00
|
|
|
|
2024-01-07 07:07:20 +01:00
|
|
|
origins = ["*"]
|
|
|
|
|
|
|
|
app.add_middleware(
|
|
|
|
CORSMiddleware,
|
|
|
|
allow_origins=origins,
|
|
|
|
allow_credentials=True,
|
|
|
|
allow_methods=["*"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-01-07 08:40:51 +01:00
|
|
|
class CollectionNameForm(BaseModel):
|
2024-01-07 07:59:22 +01:00
|
|
|
collection_name: Optional[str] = "test"
|
|
|
|
|
|
|
|
|
2024-01-07 08:40:51 +01:00
|
|
|
class StoreWebForm(CollectionNameForm):
|
|
|
|
url: str
|
|
|
|
|
|
|
|
|
2024-01-07 07:07:20 +01:00
|
|
|
@app.get("/")
|
|
|
|
async def get_status():
|
2024-02-18 07:29:52 +01:00
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"chunk_size": app.state.CHUNK_SIZE,
|
|
|
|
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
2024-02-19 20:05:45 +01:00
|
|
|
"template": app.state.RAG_TEMPLATE,
|
|
|
|
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/embedding/model")
|
|
|
|
async def get_embedding_model(user=Depends(get_admin_user)):
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class EmbeddingModelUpdateForm(BaseModel):
|
|
|
|
embedding_model: str
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/embedding/model/update")
|
|
|
|
async def update_embedding_model(
|
|
|
|
form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
|
|
|
|
):
|
|
|
|
app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
|
|
|
|
app.state.sentence_transformer_ef = (
|
|
|
|
embedding_functions.SentenceTransformerEmbeddingFunction(
|
|
|
|
model_name=app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
device=RAG_EMBEDDING_MODEL_DEVICE_TYPE,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
2024-02-18 07:29:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-03-10 21:32:34 +01:00
|
|
|
@app.get("/config")
|
|
|
|
async def get_rag_config(user=Depends(get_admin_user)):
|
2024-02-18 07:29:52 +01:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-03-10 21:32:34 +01:00
|
|
|
"pdf_extract_images": app.state.PDF_EXTRACT_IMAGES,
|
|
|
|
"chunk": {
|
|
|
|
"chunk_size": app.state.CHUNK_SIZE,
|
|
|
|
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
|
|
|
},
|
2024-02-18 07:29:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class ChunkParamUpdateForm(BaseModel):
|
|
|
|
chunk_size: int
|
|
|
|
chunk_overlap: int
|
|
|
|
|
|
|
|
|
2024-03-10 21:32:34 +01:00
|
|
|
class ConfigUpdateForm(BaseModel):
|
|
|
|
pdf_extract_images: bool
|
|
|
|
chunk: ChunkParamUpdateForm
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/config/update")
|
|
|
|
async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_user)):
|
|
|
|
app.state.PDF_EXTRACT_IMAGES = form_data.pdf_extract_images
|
|
|
|
app.state.CHUNK_SIZE = form_data.chunk.chunk_size
|
|
|
|
app.state.CHUNK_OVERLAP = form_data.chunk.chunk_overlap
|
2024-02-18 07:29:52 +01:00
|
|
|
|
|
|
|
return {
|
|
|
|
"status": True,
|
2024-03-10 21:32:34 +01:00
|
|
|
"pdf_extract_images": app.state.PDF_EXTRACT_IMAGES,
|
|
|
|
"chunk": {
|
|
|
|
"chunk_size": app.state.CHUNK_SIZE,
|
|
|
|
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
|
|
|
},
|
2024-02-18 07:29:52 +01:00
|
|
|
}
|
2024-01-07 07:59:22 +01:00
|
|
|
|
|
|
|
|
2024-02-18 07:41:03 +01:00
|
|
|
@app.get("/template")
|
|
|
|
async def get_rag_template(user=Depends(get_current_user)):
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"template": app.state.RAG_TEMPLATE,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-03-03 03:56:57 +01:00
|
|
|
@app.get("/query/settings")
|
|
|
|
async def get_query_settings(user=Depends(get_admin_user)):
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"template": app.state.RAG_TEMPLATE,
|
|
|
|
"k": app.state.TOP_K,
|
|
|
|
}
|
2024-02-18 07:41:03 +01:00
|
|
|
|
|
|
|
|
2024-03-03 03:56:57 +01:00
|
|
|
class QuerySettingsForm(BaseModel):
|
|
|
|
k: Optional[int] = None
|
|
|
|
template: Optional[str] = None
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/query/settings/update")
|
|
|
|
async def update_query_settings(
|
|
|
|
form_data: QuerySettingsForm, user=Depends(get_admin_user)
|
|
|
|
):
|
|
|
|
app.state.RAG_TEMPLATE = form_data.template if form_data.template else RAG_TEMPLATE
|
|
|
|
app.state.TOP_K = form_data.k if form_data.k else 4
|
2024-02-18 07:41:03 +01:00
|
|
|
return {"status": True, "template": app.state.RAG_TEMPLATE}
|
2024-01-07 07:59:22 +01:00
|
|
|
|
|
|
|
|
2024-02-04 00:57:06 +01:00
|
|
|
class QueryDocForm(BaseModel):
|
2024-02-01 22:35:41 +01:00
|
|
|
collection_name: str
|
|
|
|
query: str
|
2024-03-03 03:56:57 +01:00
|
|
|
k: Optional[int] = None
|
2024-02-01 22:35:41 +01:00
|
|
|
|
|
|
|
|
2024-02-04 00:57:06 +01:00
|
|
|
@app.post("/query/doc")
|
2024-03-09 04:26:39 +01:00
|
|
|
def query_doc_handler(
|
2024-02-04 00:57:06 +01:00
|
|
|
form_data: QueryDocForm,
|
2024-01-07 11:46:12 +01:00
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
2024-03-09 04:26:39 +01:00
|
|
|
|
2024-01-07 10:59:00 +01:00
|
|
|
try:
|
2024-03-09 04:26:39 +01:00
|
|
|
return query_doc(
|
|
|
|
collection_name=form_data.collection_name,
|
|
|
|
query=form_data.query,
|
|
|
|
k=form_data.k if form_data.k else app.state.TOP_K,
|
2024-02-19 20:05:45 +01:00
|
|
|
embedding_function=app.state.sentence_transformer_ef,
|
|
|
|
)
|
2024-01-07 10:59:00 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
2024-01-07 07:59:22 +01:00
|
|
|
|
|
|
|
|
2024-02-01 22:35:41 +01:00
|
|
|
class QueryCollectionsForm(BaseModel):
|
|
|
|
collection_names: List[str]
|
|
|
|
query: str
|
2024-03-03 03:56:57 +01:00
|
|
|
k: Optional[int] = None
|
2024-02-01 22:35:41 +01:00
|
|
|
|
|
|
|
|
2024-02-04 00:57:06 +01:00
|
|
|
@app.post("/query/collection")
|
2024-03-09 04:26:39 +01:00
|
|
|
def query_collection_handler(
|
2024-02-01 22:35:41 +01:00
|
|
|
form_data: QueryCollectionsForm,
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
2024-03-09 04:26:39 +01:00
|
|
|
return query_collection(
|
|
|
|
collection_names=form_data.collection_names,
|
|
|
|
query=form_data.query,
|
|
|
|
k=form_data.k if form_data.k else app.state.TOP_K,
|
|
|
|
embedding_function=app.state.sentence_transformer_ef,
|
2024-03-03 03:56:57 +01:00
|
|
|
)
|
2024-02-01 22:35:41 +01:00
|
|
|
|
|
|
|
|
2024-01-07 07:59:22 +01:00
|
|
|
@app.post("/web")
|
2024-01-07 11:46:12 +01:00
|
|
|
def store_web(form_data: StoreWebForm, user=Depends(get_current_user)):
|
2024-01-07 07:59:22 +01:00
|
|
|
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
|
|
|
|
try:
|
|
|
|
loader = WebBaseLoader(form_data.url)
|
|
|
|
data = loader.load()
|
2024-01-27 07:17:28 +01:00
|
|
|
|
|
|
|
collection_name = form_data.collection_name
|
|
|
|
if collection_name == "":
|
|
|
|
collection_name = calculate_sha256_string(form_data.url)[:63]
|
|
|
|
|
2024-03-04 17:00:06 +01:00
|
|
|
store_data_in_vector_db(data, collection_name, overwrite=True)
|
2024-01-08 10:26:15 +01:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-01-27 07:17:28 +01:00
|
|
|
"collection_name": collection_name,
|
2024-01-08 10:26:15 +01:00
|
|
|
"filename": form_data.url,
|
|
|
|
}
|
2024-01-07 07:59:22 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-03-24 08:40:27 +01:00
|
|
|
def store_data_in_vector_db(data, collection_name, overwrite: bool = False) -> bool:
|
|
|
|
text_splitter = RecursiveCharacterTextSplitter(
|
|
|
|
chunk_size=app.state.CHUNK_SIZE,
|
|
|
|
chunk_overlap=app.state.CHUNK_OVERLAP,
|
|
|
|
add_start_index=True,
|
|
|
|
)
|
|
|
|
docs = text_splitter.split_documents(data)
|
|
|
|
return store_docs_in_vector_db(docs, collection_name, overwrite)
|
|
|
|
|
|
|
|
|
|
|
|
def store_text_in_vector_db(
|
2024-03-24 08:41:41 +01:00
|
|
|
text, metadata, collection_name, overwrite: bool = False
|
2024-03-24 08:40:27 +01:00
|
|
|
) -> bool:
|
|
|
|
text_splitter = RecursiveCharacterTextSplitter(
|
|
|
|
chunk_size=app.state.CHUNK_SIZE,
|
|
|
|
chunk_overlap=app.state.CHUNK_OVERLAP,
|
|
|
|
add_start_index=True,
|
|
|
|
)
|
2024-03-24 08:41:41 +01:00
|
|
|
docs = text_splitter.create_documents([text], metadatas=[metadata])
|
2024-03-24 08:40:27 +01:00
|
|
|
return store_docs_in_vector_db(docs, collection_name, overwrite)
|
|
|
|
|
|
|
|
|
|
|
|
def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> bool:
|
|
|
|
texts = [doc.page_content for doc in docs]
|
|
|
|
metadatas = [doc.metadata for doc in docs]
|
|
|
|
|
|
|
|
try:
|
|
|
|
if overwrite:
|
|
|
|
for collection in CHROMA_CLIENT.list_collections():
|
|
|
|
if collection_name == collection.name:
|
|
|
|
print(f"deleting existing collection {collection_name}")
|
|
|
|
CHROMA_CLIENT.delete_collection(name=collection_name)
|
|
|
|
|
|
|
|
collection = CHROMA_CLIENT.create_collection(
|
|
|
|
name=collection_name,
|
|
|
|
embedding_function=app.state.sentence_transformer_ef,
|
|
|
|
)
|
|
|
|
|
|
|
|
collection.add(
|
|
|
|
documents=texts, metadatas=metadatas, ids=[str(uuid.uuid1()) for _ in texts]
|
|
|
|
)
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
if e.__class__.__name__ == "UniqueConstraintError":
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2024-02-18 06:06:08 +01:00
|
|
|
def get_loader(filename: str, file_content_type: str, file_path: str):
|
|
|
|
file_ext = filename.split(".")[-1].lower()
|
2024-01-25 09:24:49 +01:00
|
|
|
known_type = True
|
|
|
|
|
|
|
|
known_source_ext = [
|
|
|
|
"go",
|
|
|
|
"py",
|
|
|
|
"java",
|
|
|
|
"sh",
|
|
|
|
"bat",
|
|
|
|
"ps1",
|
|
|
|
"cmd",
|
|
|
|
"js",
|
|
|
|
"ts",
|
|
|
|
"css",
|
|
|
|
"cpp",
|
|
|
|
"hpp",
|
|
|
|
"h",
|
|
|
|
"c",
|
|
|
|
"cs",
|
|
|
|
"sql",
|
|
|
|
"log",
|
|
|
|
"ini",
|
|
|
|
"pl",
|
|
|
|
"pm",
|
|
|
|
"r",
|
|
|
|
"dart",
|
|
|
|
"dockerfile",
|
|
|
|
"env",
|
|
|
|
"php",
|
|
|
|
"hs",
|
|
|
|
"hsc",
|
|
|
|
"lua",
|
|
|
|
"nginxconf",
|
|
|
|
"conf",
|
|
|
|
"m",
|
|
|
|
"mm",
|
|
|
|
"plsql",
|
|
|
|
"perl",
|
|
|
|
"rb",
|
|
|
|
"rs",
|
|
|
|
"db2",
|
|
|
|
"scala",
|
|
|
|
"bash",
|
|
|
|
"swift",
|
|
|
|
"vue",
|
|
|
|
"svelte",
|
|
|
|
]
|
|
|
|
|
|
|
|
if file_ext == "pdf":
|
2024-03-10 21:32:34 +01:00
|
|
|
loader = PyPDFLoader(file_path, extract_images=app.state.PDF_EXTRACT_IMAGES)
|
2024-01-25 09:24:49 +01:00
|
|
|
elif file_ext == "csv":
|
|
|
|
loader = CSVLoader(file_path)
|
|
|
|
elif file_ext == "rst":
|
|
|
|
loader = UnstructuredRSTLoader(file_path, mode="elements")
|
|
|
|
elif file_ext == "xml":
|
|
|
|
loader = UnstructuredXMLLoader(file_path)
|
|
|
|
elif file_ext == "md":
|
|
|
|
loader = UnstructuredMarkdownLoader(file_path)
|
2024-02-18 06:06:08 +01:00
|
|
|
elif file_content_type == "application/epub+zip":
|
2024-01-25 09:24:49 +01:00
|
|
|
loader = UnstructuredEPubLoader(file_path)
|
|
|
|
elif (
|
2024-02-18 06:06:08 +01:00
|
|
|
file_content_type
|
2024-01-25 09:24:49 +01:00
|
|
|
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
|
|
or file_ext in ["doc", "docx"]
|
|
|
|
):
|
|
|
|
loader = Docx2txtLoader(file_path)
|
2024-02-18 06:06:08 +01:00
|
|
|
elif file_content_type in [
|
2024-01-25 09:24:49 +01:00
|
|
|
"application/vnd.ms-excel",
|
|
|
|
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
|
|
] or file_ext in ["xls", "xlsx"]:
|
|
|
|
loader = UnstructuredExcelLoader(file_path)
|
2024-03-03 03:56:57 +01:00
|
|
|
elif file_ext in known_source_ext or (
|
|
|
|
file_content_type and file_content_type.find("text/") >= 0
|
|
|
|
):
|
2024-03-16 07:52:37 +01:00
|
|
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
2024-01-25 09:24:49 +01:00
|
|
|
else:
|
2024-03-16 07:52:37 +01:00
|
|
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
2024-01-25 09:24:49 +01:00
|
|
|
known_type = False
|
|
|
|
|
|
|
|
return loader, known_type
|
|
|
|
|
|
|
|
|
2024-01-07 07:59:22 +01:00
|
|
|
@app.post("/doc")
|
2024-01-07 11:46:12 +01:00
|
|
|
def store_doc(
|
2024-01-07 18:00:30 +01:00
|
|
|
collection_name: Optional[str] = Form(None),
|
2024-01-07 11:46:12 +01:00
|
|
|
file: UploadFile = File(...),
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
2024-01-07 07:59:22 +01:00
|
|
|
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
|
2024-01-07 08:40:51 +01:00
|
|
|
|
2024-01-10 00:24:53 +01:00
|
|
|
print(file.content_type)
|
2024-01-07 07:59:22 +01:00
|
|
|
try:
|
2024-01-07 08:40:51 +01:00
|
|
|
filename = file.filename
|
2024-01-07 10:40:36 +01:00
|
|
|
file_path = f"{UPLOAD_DIR}/{filename}"
|
2024-01-07 07:59:22 +01:00
|
|
|
contents = file.file.read()
|
2024-01-07 08:40:51 +01:00
|
|
|
with open(file_path, "wb") as f:
|
2024-01-07 07:59:22 +01:00
|
|
|
f.write(contents)
|
|
|
|
f.close()
|
|
|
|
|
2024-01-07 18:00:30 +01:00
|
|
|
f = open(file_path, "rb")
|
|
|
|
if collection_name == None:
|
|
|
|
collection_name = calculate_sha256(f)[:63]
|
|
|
|
f.close()
|
|
|
|
|
2024-02-18 06:06:08 +01:00
|
|
|
loader, known_type = get_loader(file.filename, file.content_type, file_path)
|
2024-01-07 08:40:51 +01:00
|
|
|
data = loader.load()
|
2024-01-07 10:40:36 +01:00
|
|
|
result = store_data_in_vector_db(data, collection_name)
|
|
|
|
|
|
|
|
if result:
|
2024-01-08 10:26:15 +01:00
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"collection_name": collection_name,
|
|
|
|
"filename": filename,
|
2024-01-25 09:24:49 +01:00
|
|
|
"known_type": known_type,
|
2024-01-08 10:26:15 +01:00
|
|
|
}
|
2024-01-07 10:40:36 +01:00
|
|
|
else:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(),
|
|
|
|
)
|
2024-01-07 07:59:22 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
2024-01-13 14:46:56 +01:00
|
|
|
if "No pandoc was found" in str(e):
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
2024-01-07 07:59:22 +01:00
|
|
|
|
|
|
|
|
2024-03-24 08:40:27 +01:00
|
|
|
class TextRAGForm(BaseModel):
|
|
|
|
name: str
|
|
|
|
content: str
|
|
|
|
collection_name: Optional[str] = None
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/text")
|
|
|
|
def store_text(
|
|
|
|
form_data: TextRAGForm,
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
|
|
|
|
|
|
|
collection_name = form_data.collection_name
|
|
|
|
if collection_name == None:
|
|
|
|
collection_name = calculate_sha256_string(form_data.content)
|
|
|
|
|
2024-03-24 08:41:41 +01:00
|
|
|
result = store_text_in_vector_db(
|
|
|
|
form_data.content,
|
|
|
|
metadata={"name": form_data.name, "created_by": user.id},
|
|
|
|
collection_name=collection_name,
|
|
|
|
)
|
2024-03-24 08:40:27 +01:00
|
|
|
|
|
|
|
if result:
|
|
|
|
return {"status": True, "collection_name": collection_name}
|
|
|
|
else:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-02-18 06:06:08 +01:00
|
|
|
@app.get("/scan")
|
|
|
|
def scan_docs_dir(user=Depends(get_admin_user)):
|
2024-02-23 11:57:31 +01:00
|
|
|
for path in Path(DOCS_DIR).rglob("./**/*"):
|
|
|
|
try:
|
2024-02-18 06:06:08 +01:00
|
|
|
if path.is_file() and not path.name.startswith("."):
|
|
|
|
tags = extract_folders_after_data_docs(path)
|
|
|
|
filename = path.name
|
|
|
|
file_content_type = mimetypes.guess_type(path)
|
|
|
|
|
|
|
|
f = open(path, "rb")
|
|
|
|
collection_name = calculate_sha256(f)[:63]
|
|
|
|
f.close()
|
|
|
|
|
2024-02-18 06:31:46 +01:00
|
|
|
loader, known_type = get_loader(
|
|
|
|
filename, file_content_type[0], str(path)
|
|
|
|
)
|
2024-02-18 06:06:08 +01:00
|
|
|
data = loader.load()
|
|
|
|
|
|
|
|
result = store_data_in_vector_db(data, collection_name)
|
|
|
|
|
|
|
|
if result:
|
|
|
|
sanitized_filename = sanitize_filename(filename)
|
|
|
|
doc = Documents.get_doc_by_name(sanitized_filename)
|
|
|
|
|
|
|
|
if doc == None:
|
|
|
|
doc = Documents.insert_new_doc(
|
|
|
|
user.id,
|
|
|
|
DocumentForm(
|
|
|
|
**{
|
|
|
|
"name": sanitized_filename,
|
|
|
|
"title": filename,
|
|
|
|
"collection_name": collection_name,
|
|
|
|
"filename": filename,
|
|
|
|
"content": (
|
|
|
|
json.dumps(
|
|
|
|
{
|
|
|
|
"tags": list(
|
|
|
|
map(
|
|
|
|
lambda name: {"name": name},
|
|
|
|
tags,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
|
|
|
if len(tags)
|
|
|
|
else "{}"
|
|
|
|
),
|
|
|
|
}
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2024-02-23 11:57:31 +01:00
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
2024-02-18 06:06:08 +01:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2024-01-07 10:40:36 +01:00
|
|
|
@app.get("/reset/db")
|
2024-02-09 01:05:01 +01:00
|
|
|
def reset_vector_db(user=Depends(get_admin_user)):
|
|
|
|
CHROMA_CLIENT.reset()
|
2024-01-07 10:40:36 +01:00
|
|
|
|
|
|
|
|
|
|
|
@app.get("/reset")
|
2024-02-09 01:05:01 +01:00
|
|
|
def reset(user=Depends(get_admin_user)) -> bool:
|
|
|
|
folder = f"{UPLOAD_DIR}"
|
|
|
|
for filename in os.listdir(folder):
|
|
|
|
file_path = os.path.join(folder, filename)
|
2024-01-07 10:40:36 +01:00
|
|
|
try:
|
2024-02-09 01:05:01 +01:00
|
|
|
if os.path.isfile(file_path) or os.path.islink(file_path):
|
|
|
|
os.unlink(file_path)
|
|
|
|
elif os.path.isdir(file_path):
|
|
|
|
shutil.rmtree(file_path)
|
2024-01-07 10:40:36 +01:00
|
|
|
except Exception as e:
|
2024-02-09 01:05:01 +01:00
|
|
|
print("Failed to delete %s. Reason: %s" % (file_path, e))
|
2024-01-07 10:40:36 +01:00
|
|
|
|
2024-02-09 01:05:01 +01:00
|
|
|
try:
|
|
|
|
CHROMA_CLIENT.reset()
|
|
|
|
except Exception as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
return True
|