Rohit108 commited on
Commit
6d35820
·
verified ·
1 Parent(s): 391a292

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +152 -89
app.py CHANGED
@@ -1,89 +1,152 @@
1
- import sqlite3
2
- import pandas as pd
3
- import openai
4
- import os
5
- import streamlit as st
6
- import datetime
7
-
8
- # Set OpenAI API Key (Ensure it's set properly)
9
- openai.api_key = os.getenv("sk-NOBe-504FBda5dOQPesE8xKYgzmvBhG_Z_21UZGXtvT3BlbkFJF2vDvLgwzMMrIYgqAC6ezqMnupr9ZAelUCMH4XBP8A") or "sk-NOBe-504FBda5dOQPesE8xKYgzmvBhG_Z_21UZGXtvT3BlbkFJF2vDvLgwzMMrIYgqAC6ezqMnupr9ZAelUCMH4XBP8A"
10
-
11
- # Create sample house data
12
- house_data = [
13
- ("Alice", "1234567890", 15000, True, "Delhi", "2BHK", "Furnished"),
14
- ("Bob", "9876543210", 45000, False, "Mumbai", "3BHK", "Semi Furnished"),
15
- ("Charlie", "5556667777", 30000, True, "Pune", "4BHK", "Non Furnished"),
16
- ("David", "4445556666", 25000, True, "Jaipur", "1BHK", "Furnished"),
17
- ("Eve", "3332221111", 40000, False, "Ahmedabad", "3BHK", "Semi Furnished")
18
- ]
19
-
20
- # Create DataFrame and save to CSV
21
- df = pd.DataFrame(house_data, columns=["owner_name", "contact", "price", "for_sale", "location", "house_type", "house_details"])
22
- df.to_csv("houses.csv", index=False)
23
-
24
- # Connect to SQLite Database
25
- conn = sqlite3.connect("houses.db")
26
- cursor = conn.cursor()
27
-
28
- # Create Table if it doesn't exist
29
- cursor.execute('''
30
- CREATE TABLE IF NOT EXISTS houses (
31
- id INTEGER PRIMARY KEY AUTOINCREMENT,
32
- owner_name TEXT,
33
- contact TEXT,
34
- price INTEGER,
35
- for_sale BOOLEAN,
36
- location TEXT,
37
- house_type TEXT,
38
- house_details TEXT
39
- )
40
- ''')
41
-
42
- # Load data from CSV and insert into database (replace table content if exists)
43
- df = pd.read_csv("houses.csv")
44
- df.to_sql("houses", conn, if_exists="replace", index=False)
45
- conn.commit()
46
-
47
- # Function to retrieve data from SQLite Database
48
- def retrieve_data():
49
- cursor.execute("SELECT * FROM houses")
50
- rows = cursor.fetchall()
51
- # Assuming the table now includes the 'id' column, we include it in the DataFrame.
52
- return pd.DataFrame(rows, columns=["owner_name", "contact", "price", "for_sale", "location", "house_type", "house_details"])
53
-
54
- # Chatbot function that uses the database records in its prompt
55
- def chatbot(query):
56
- df = retrieve_data()
57
- relevant_data = df.to_string(index=False)
58
- prompt = f"Given the following real estate records:\n{relevant_data}\n\nAnswer the user's query based on the provided data.\n\nUser Query: {query}\nResponse:"
59
-
60
- response = openai.ChatCompletion.create(
61
- model="gpt-3.5-turbo",
62
- messages=[
63
- {"role": "system", "content": "You are an AI assistant that uses the provided database records to answer user queries."},
64
- {"role": "user", "content": prompt}
65
- ]
66
- )
67
- return response["choices"][0]["message"]["content"]
68
-
69
- # Streamlit UI
70
- st.title("Real Estate Chatbot")
71
-
72
- st.write("### Available Houses")
73
- st.dataframe(retrieve_data())
74
-
75
- query = st.text_input("Ask about available houses:")
76
- if st.button("Ask Chatbot"):
77
- if query:
78
- response_text = chatbot(query)
79
- # Get the current date and time
80
- now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
81
- st.write("### User Query:")
82
- st.write(query)
83
- st.write(f"### Chatbot Response (Generated on {now}):")
84
- st.write(response_text)
85
- else:
86
- st.warning("Please enter a query.")
87
-
88
- conn.close()
89
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, Sequence, TypedDict
2
+ import functools
3
+ import operator
4
+ from bertopic import BERTopic
5
+ from langchain.agents import AgentExecutor, create_openai_tools_agent
6
+ from langchain_core.messages import BaseMessage, HumanMessage
7
+ from langchain_openai import ChatOpenAI
8
+ from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
9
+ from langchain_core.output_parsers.openai_functions import JsonOutputFunctionsParser
10
+ from langgraph.graph import END, StateGraph
11
+ from langchain_community.tools.tavily_search import TavilySearchResults
12
+ from langchain_experimental.tools import PythonREPLTool
13
+
14
+ # Initialize tools
15
+ tavily_tool = TavilySearchResults(max_results=5)
16
+ python_repl_tool = PythonREPLTool()
17
+
18
+ # Load BERTopic model
19
+ topic_model = BERTopic.load("topic_model")
20
+
21
+ # Function to create an agent
22
+ def create_agent(llm: ChatOpenAI, tools: list, system_prompt: str):
23
+ prompt = ChatPromptTemplate.from_messages(
24
+ [
25
+ ("system", system_prompt),
26
+ MessagesPlaceholder(variable_name="messages"),
27
+ MessagesPlaceholder(variable_name="agent_scratchpad"),
28
+ ]
29
+ )
30
+ agent = create_openai_tools_agent(llm, tools, prompt)
31
+ executor = AgentExecutor(agent=agent, tools=tools)
32
+ return executor
33
+
34
+ # Function to define an agent node
35
+ def agent_node(state, agent, name):
36
+ result = agent.invoke(state)
37
+ return {"messages": [HumanMessage(content=result["output"], name=name)]}
38
+
39
+ # Define the Viewer agent using the BERTopic model
40
+ def viewer_agent(state):
41
+ review = state["messages"][-1].content
42
+ index = topic_model.transform([review])[0][0]
43
+ answer = topic_model.topic_labels_.get(index)
44
+ return {"messages": [HumanMessage(content=answer, name="Viewer")]}
45
+
46
+ # Define AgentState
47
+ class AgentState(TypedDict):
48
+ messages: Annotated[Sequence[BaseMessage], operator.add]
49
+ next: str
50
+
51
+ # Create LLM for the supervisor
52
+ llm = ChatOpenAI(model="gpt-4-1106-preview")
53
+
54
+ # Define the system prompt for the supervisor
55
+ system_prompt = (
56
+ "You are a supervisor tasked with managing a conversation between the following workers: Researcher, Coder, Viewer. "
57
+ "Given the following user request, respond with the worker to act next. Each worker will perform a task and respond with their results and status. "
58
+ "When finished, respond with FINISH. If the request seems like a product review or sentiment analysis, route it to the Viewer."
59
+ )
60
+
61
+ # Define options
62
+ options = ["FINISH", "Researcher", "Coder", "Viewer"]
63
+
64
+ # Define the function for routing
65
+ function_def = {
66
+ "name": "route",
67
+ "description": "Select the next role.",
68
+ "parameters": {
69
+ "title": "routeSchema",
70
+ "type": "object",
71
+ "properties": {
72
+ "next": {
73
+ "title": "Next",
74
+ "anyOf": [
75
+ {"enum": options},
76
+ ],
77
+ }
78
+ },
79
+ "required": ["next"],
80
+ },
81
+ }
82
+
83
+ # Define the prompt for the supervisor
84
+ prompt = ChatPromptTemplate.from_messages(
85
+ [
86
+ ("system", system_prompt),
87
+ MessagesPlaceholder(variable_name="messages"),
88
+ (
89
+ "system",
90
+ "Given the conversation above, who should act next? Or should we FINISH? Select one of: {options}",
91
+ ),
92
+ ]
93
+ ).partial(options=str(options), members="Researcher, Coder, Viewer")
94
+
95
+ # Create the supervisor chain
96
+ supervisor_chain = (
97
+ prompt
98
+ | llm.bind_functions(functions=[function_def], function_call="route")
99
+ | JsonOutputFunctionsParser()
100
+ )
101
+
102
+ # Define agents
103
+ research_agent = create_agent(llm, [tavily_tool], "You are a web researcher.")
104
+ research_node = functools.partial(agent_node, agent=research_agent, name="Researcher")
105
+
106
+ code_agent = create_agent(
107
+ llm,
108
+ [python_repl_tool],
109
+ "You may generate safe python code to analyze data and generate charts using matplotlib.",
110
+ )
111
+ code_node = functools.partial(agent_node, agent=code_agent, name="Coder")
112
+
113
+ viewer_node = functools.partial(viewer_agent)
114
+
115
+ # Create the workflow
116
+ workflow = StateGraph(AgentState)
117
+ workflow.add_node("Researcher", research_node)
118
+ workflow.add_node("Coder", code_node)
119
+ workflow.add_node("Viewer", viewer_node)
120
+ workflow.add_node("supervisor", supervisor_chain)
121
+
122
+ # Add edges for each agent to report back to the supervisor
123
+ members = ["Researcher", "Coder", "Viewer"]
124
+ for member in members:
125
+ workflow.add_edge(member, "supervisor")
126
+
127
+ # Add conditional edges
128
+ conditional_map = {k: k for k in members}
129
+ conditional_map["FINISH"] = END
130
+ workflow.add_conditional_edges("supervisor", lambda x: x["next"], conditional_map)
131
+
132
+ # Set the entry point
133
+ workflow.set_entry_point("supervisor")
134
+
135
+ # Compile the workflow
136
+ graph = workflow.compile()
137
+
138
+ # Testing the workflow
139
+ for s in graph.stream(
140
+ {
141
+ "messages": [
142
+ HumanMessage(content="write a report of gopal who worked in 3 k technologies")
143
+ ]
144
+ }
145
+ ):
146
+ if "__end__" not in s:
147
+ print(s)
148
+ print("----")
149
+
150
+
151
+
152
+