88 lines
2.7 KiB
Python
88 lines
2.7 KiB
Python
import asyncio
|
|
import sys
|
|
from app.services.dependencies import get_file_service, get_faiss_service
|
|
from app.config import get_config
|
|
from langchain_openai import ChatOpenAI
|
|
from langchain.schema import Document
|
|
from langchain.prompts import PromptTemplate
|
|
|
|
|
|
async def main():
|
|
"""
|
|
Entry point for the XBO product assistant.
|
|
"""
|
|
config = get_config(env_name="development")
|
|
file_service = get_file_service(config=config)
|
|
|
|
openai_api_key = config.OPENAI_API_KEY
|
|
create_faiss_index = config.CREATE_FAISS_INDEX
|
|
print ("Create FAISS Index: ", create_faiss_index)
|
|
print("Wilkommen zum XBO Kaufberater!")
|
|
faiss_service = get_faiss_service(openai_api_key)
|
|
|
|
try:
|
|
if create_faiss_index:
|
|
print("[INFO] Creating a new FAISS index...")
|
|
pdfs = file_service.load_pdfs()
|
|
if not pdfs:
|
|
print("[ERROR] No PDFs found.")
|
|
sys.exit(1)
|
|
|
|
all_documents = []
|
|
for pdf in pdfs:
|
|
print(f"Processing PDF: {pdf}")
|
|
text = file_service.extract_text_from_pdf(pdf)
|
|
all_documents.append(Document(page_content=text, metadata={"source": pdf}))
|
|
|
|
vectorstore = faiss_service.create_faiss_index(all_documents)
|
|
else:
|
|
vectorstore = faiss_service.load_faiss_index()
|
|
except Exception as e:
|
|
print(f"[ERROR] {e}")
|
|
sys.exit(1)
|
|
|
|
llm = ChatOpenAI(model="gpt-4o", openai_api_key=openai_api_key)
|
|
retriever = vectorstore.as_retriever(search_kwargs={"k": 21}, search_type="mmr")
|
|
|
|
while True:
|
|
user_input = input("\nWas möchten Sie wissen? (type 'exit' to quit): ").strip()
|
|
if user_input.lower() == "exit":
|
|
print("Auf Wiedersehen!")
|
|
break
|
|
|
|
try:
|
|
print("[INFO] Retrieving relevant documents...")
|
|
docs = retriever.invoke(user_input)
|
|
|
|
if not docs:
|
|
print("\n[ANSWER]: Keine passenden Informationen gefunden.")
|
|
continue
|
|
|
|
context = "\n\n".join([doc.page_content for doc in docs])
|
|
|
|
prompt = PromptTemplate(
|
|
template="""
|
|
Du bist ein Assistent, der Fragen zu Produktinformationen beantwortet.
|
|
|
|
Kontext:
|
|
{context}
|
|
|
|
Frage:
|
|
{question}
|
|
|
|
Antwort:
|
|
""",
|
|
input_variables=["context", "question"]
|
|
)
|
|
|
|
response = llm.invoke(prompt.format(context=context, question=user_input))
|
|
|
|
print("\n[ANSWER]:")
|
|
print(response.content)
|
|
except Exception as e:
|
|
print(f"[ERROR] Failed to process query: {e}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
asyncio.run(main())
|