|
import os |
|
import getpass |
|
import sys |
|
from pathlib import Path |
|
from operator import itemgetter |
|
from dotenv import load_dotenv |
|
from lets_talk.config import (CREATE_VECTOR_DB,VECTOR_STORAGE_PATH) |
|
|
|
|
|
load_dotenv() |
|
|
|
if CREATE_VECTOR_DB: |
|
import pipeline |
|
|
|
print("=== create vector db ===") |
|
|
|
pipeline.create_vector_database() |
|
print("========================") |
|
else: |
|
|
|
print("=== check vector db ===") |
|
if not Path(VECTOR_STORAGE_PATH).exists(): |
|
print(f"Vector store not found at {VECTOR_STORAGE_PATH}. Please create it first.") |
|
sys.exit(1) |
|
|
|
import chainlit as cl |
|
from lets_talk.agent import build_agent,parse_output |
|
|
|
tdg_agent = build_agent() |
|
|
|
|
|
@cl.on_chat_start |
|
async def setup_chain(): |
|
|
|
api_key = os.environ.get("OPENAI_API_KEY") |
|
if not api_key: |
|
|
|
api_key = await cl.AskUserMessage( |
|
content="Please enter your OpenAI API Key:", |
|
timeout=60, |
|
raise_on_timeout=True |
|
).send() |
|
os.environ["OPENAI_API_KEY"] = api_key.content |
|
|
|
|
|
msg = cl.Message(content="Let's talk about [TheDataGuy](https://thedataguy.pro)'s blog posts, how can I help you?", author="System") |
|
await msg.send() |
|
|
|
|
|
cl.user_session.set("agent", tdg_agent) |
|
|
|
|
|
|
|
|
|
|
|
@cl.on_message |
|
async def on_message(message: cl.Message): |
|
""" |
|
Handler for user messages. Processes the query through the research agent |
|
and streams the response back to the user. |
|
|
|
Args: |
|
message: The user's message |
|
""" |
|
agent_executor = cl.user_session.get("agent") |
|
|
|
|
|
msg = cl.Message(content="") |
|
|
|
|
|
with cl.Step(name="Agent") as step: |
|
|
|
result = await agent_executor.ainvoke( |
|
{"question": message.content}, |
|
|
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
final_answer = parse_output(result) |
|
|
|
|
|
await msg.stream_token(final_answer) |
|
await msg.send() |
|
|
|
|