Spaces:
Sleeping
Sleeping
Commit
Β·
98b8ff7
1
Parent(s):
035c095
appointment receptionist usecases
Browse files- .vscode/launch.json +16 -0
- app.py +67 -0
- configfile.ini +6 -0
- configfile.py +20 -0
- requirements.txt +7 -0
- src/LLMS/groqllm.py +21 -0
- src/LLMS/hfllm.py +0 -0
- src/langgraphagent/caller_agent.py +92 -0
- src/streamlitui/loadui.py +55 -0
- src/tools/langgraphtool.py +48 -0
.vscode/launch.json
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"version": "0.2.0",
|
3 |
+
"configurations": [
|
4 |
+
{
|
5 |
+
"name": "debug streamlit",
|
6 |
+
"type": "debugpy",
|
7 |
+
"request": "launch",
|
8 |
+
"program": "./.venv/Lib/site-packages/streamlit", // /home/xx/tmp/venv/bin/streamlit",
|
9 |
+
"args": [
|
10 |
+
"run",
|
11 |
+
"app.py"
|
12 |
+
],
|
13 |
+
"justMyCode": false
|
14 |
+
}
|
15 |
+
]
|
16 |
+
}
|
app.py
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
from configfile import Config
|
4 |
+
from src.streamlitui.loadui import LoadStreamlitUI
|
5 |
+
from src.LLMS.groqllm import GroqLLM
|
6 |
+
|
7 |
+
from src.langgraphagent.caller_agent import Caller_Agent
|
8 |
+
from langchain_core.messages import HumanMessage
|
9 |
+
from src.tools.langgraphtool import APPOINTMENTS
|
10 |
+
|
11 |
+
|
12 |
+
|
13 |
+
def submit_message(model):
|
14 |
+
obj_caller_agent = Caller_Agent(model)
|
15 |
+
# caller agent
|
16 |
+
return obj_caller_agent.receive_message_from_caller(st.session_state["message"])
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
# MAIN Function START
|
21 |
+
|
22 |
+
|
23 |
+
if __name__ == "__main__":
|
24 |
+
# config
|
25 |
+
obj_config = Config()
|
26 |
+
# load ui
|
27 |
+
ui = LoadStreamlitUI()
|
28 |
+
user_input = ui.load_streamlit_ui()
|
29 |
+
|
30 |
+
|
31 |
+
|
32 |
+
# is_add_message_to_history = st.session_state["chat_with_history"]
|
33 |
+
|
34 |
+
if user_input['selected_usecase'] == "Appointment Receptionist":
|
35 |
+
if st.chat_input("Type message here", key="message") :
|
36 |
+
# Configure LLM
|
37 |
+
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
38 |
+
model = obj_llm_config.get_llm_model()
|
39 |
+
CONVERSATION = (submit_message(model))
|
40 |
+
|
41 |
+
col1, col2 = st.columns(2)
|
42 |
+
with col1:
|
43 |
+
for message in CONVERSATION:
|
44 |
+
if type(message) == HumanMessage:
|
45 |
+
with st.chat_message("user"):
|
46 |
+
st.write(message.content)
|
47 |
+
else:
|
48 |
+
with st.chat_message("assistant"):
|
49 |
+
st.write(message.content)
|
50 |
+
|
51 |
+
with col2:
|
52 |
+
st.header("Appointments")
|
53 |
+
st.write(APPOINTMENTS)
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
|
63 |
+
|
64 |
+
|
65 |
+
|
66 |
+
|
67 |
+
|
configfile.ini
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[DEFAULT]
|
2 |
+
PAGE_TITLE = Langgraph IN ACTION
|
3 |
+
LLM_OPTIONS = Groq
|
4 |
+
USECASE_OPTIONS = Appointment Receptionist
|
5 |
+
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
6 |
+
|
configfile.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from configparser import ConfigParser
|
2 |
+
|
3 |
+
|
4 |
+
class Config:
|
5 |
+
def __init__(self, config_file="configfile.ini"):
|
6 |
+
self.config = ConfigParser()
|
7 |
+
self.config.read(config_file)
|
8 |
+
|
9 |
+
def get_llm_options(self):
|
10 |
+
return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
|
11 |
+
|
12 |
+
def get_usecase_options(self):
|
13 |
+
return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
|
14 |
+
|
15 |
+
def get_groq_model_options(self):
|
16 |
+
return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
|
17 |
+
|
18 |
+
def get_page_title(self):
|
19 |
+
return self.config["DEFAULT"].get("PAGE_TITLE")
|
20 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
streamlit==1.41.1
|
2 |
+
groq==0.13.1
|
3 |
+
langgraph
|
4 |
+
langchain_community
|
5 |
+
langchain_openai
|
6 |
+
langchain_core
|
7 |
+
langchain_groq
|
src/LLMS/groqllm.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import streamlit as st
|
4 |
+
from langchain_groq import ChatGroq
|
5 |
+
|
6 |
+
|
7 |
+
class GroqLLM:
|
8 |
+
def __init__(self,user_controls_input):
|
9 |
+
self.user_controls_input = user_controls_input
|
10 |
+
|
11 |
+
def get_llm_model(self):
|
12 |
+
try :
|
13 |
+
groq_api_key = self.user_controls_input['GROQ_API_KEY']
|
14 |
+
selected_groq_model = self.user_controls_input['selected_groq_model']
|
15 |
+
if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
|
16 |
+
st.error("Please Enter the Groq API KEY")
|
17 |
+
llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
|
18 |
+
|
19 |
+
except Exception as e:
|
20 |
+
raise ValueError(f"Error Occurred with Exception : {e}")
|
21 |
+
return llm
|
src/LLMS/hfllm.py
ADDED
File without changes
|
src/langgraphagent/caller_agent.py
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_core.prompts import ChatPromptTemplate
|
2 |
+
from langgraph.graph import StateGraph, END, MessagesState
|
3 |
+
import datetime
|
4 |
+
from src.tools.langgraphtool import book_appointment, get_next_available_appointment, cancel_appointment
|
5 |
+
from langchain_openai import ChatOpenAI
|
6 |
+
from langgraph.prebuilt import ToolNode
|
7 |
+
from langchain_core.messages import HumanMessage
|
8 |
+
from src.LLMS.groqllm import GroqLLM
|
9 |
+
|
10 |
+
CONVERSATION = []
|
11 |
+
|
12 |
+
class Caller_Agent:
|
13 |
+
def __init__(self,model):
|
14 |
+
self.llm = model
|
15 |
+
|
16 |
+
|
17 |
+
|
18 |
+
# Nodes
|
19 |
+
def call_caller_model(self,state: MessagesState):
|
20 |
+
state["current_time"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
|
21 |
+
response = self.caller_model.invoke(state)
|
22 |
+
return {"messages": [response]}
|
23 |
+
|
24 |
+
# Edges
|
25 |
+
def should_continue_caller(self,state: MessagesState):
|
26 |
+
messages = state["messages"]
|
27 |
+
last_message = messages[-1]
|
28 |
+
if not last_message.tool_calls:
|
29 |
+
return "end"
|
30 |
+
else:
|
31 |
+
return "continue"
|
32 |
+
|
33 |
+
|
34 |
+
def call_tool(self):
|
35 |
+
caller_tools = [book_appointment, get_next_available_appointment, cancel_appointment]
|
36 |
+
tool_node = ToolNode(caller_tools)
|
37 |
+
|
38 |
+
|
39 |
+
caller_pa_prompt = """You are a personal assistant, and need to help the user to book or cancel appointments, you should check the available appointments before booking anything. Be extremely polite, so much so that it is almost rude.
|
40 |
+
Current time: {current_time}
|
41 |
+
"""
|
42 |
+
|
43 |
+
caller_chat_template = ChatPromptTemplate.from_messages([
|
44 |
+
("system", caller_pa_prompt),
|
45 |
+
("placeholder", "{messages}"),
|
46 |
+
])
|
47 |
+
|
48 |
+
self.caller_model = caller_chat_template | self.llm.bind_tools(caller_tools)
|
49 |
+
|
50 |
+
|
51 |
+
|
52 |
+
# Graph
|
53 |
+
caller_workflow = StateGraph(MessagesState)
|
54 |
+
|
55 |
+
# Add Nodes
|
56 |
+
caller_workflow.add_node("agent", self.call_caller_model)
|
57 |
+
caller_workflow.add_node("action", tool_node)
|
58 |
+
|
59 |
+
# Add Edges
|
60 |
+
caller_workflow.add_conditional_edges(
|
61 |
+
"agent",
|
62 |
+
self.should_continue_caller,
|
63 |
+
{
|
64 |
+
"continue": "action",
|
65 |
+
"end": END,
|
66 |
+
},
|
67 |
+
)
|
68 |
+
caller_workflow.add_edge("action", "agent")
|
69 |
+
|
70 |
+
# Set Entry Point and build the graph
|
71 |
+
caller_workflow.set_entry_point("agent")
|
72 |
+
|
73 |
+
self.caller_app = caller_workflow.compile()
|
74 |
+
|
75 |
+
|
76 |
+
|
77 |
+
|
78 |
+
# Invoke model
|
79 |
+
def receive_message_from_caller(self,message):
|
80 |
+
CONVERSATION.append(HumanMessage(content=message, type="human"))
|
81 |
+
state = {
|
82 |
+
"messages": CONVERSATION,
|
83 |
+
}
|
84 |
+
print(state)
|
85 |
+
self.call_tool()
|
86 |
+
new_state = self.caller_app.invoke(state)
|
87 |
+
CONVERSATION.extend(new_state["messages"][len(CONVERSATION):])
|
88 |
+
return CONVERSATION
|
89 |
+
|
90 |
+
|
91 |
+
|
92 |
+
|
src/streamlitui/loadui.py
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from configfile import Config # Import the Config class
|
3 |
+
import os
|
4 |
+
|
5 |
+
|
6 |
+
class LoadStreamlitUI:
|
7 |
+
def __init__(self):
|
8 |
+
self.config = Config() # Create a Config instance
|
9 |
+
self.user_controls = {}
|
10 |
+
|
11 |
+
def load_streamlit_ui(self):
|
12 |
+
st.set_page_config(page_title= "π€ " + self.config.get_page_title(), layout="wide")
|
13 |
+
st.header("π€ " + self.config.get_page_title())
|
14 |
+
|
15 |
+
with st.sidebar:
|
16 |
+
# Get options from config
|
17 |
+
llm_options = self.config.get_llm_options()
|
18 |
+
usecase_options = self.config.get_usecase_options()
|
19 |
+
|
20 |
+
# LLM selection
|
21 |
+
self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
|
22 |
+
|
23 |
+
if self.user_controls["selected_llm"] == 'Groq':
|
24 |
+
# Model selection
|
25 |
+
model_options = self.config.get_groq_model_options()
|
26 |
+
self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
|
27 |
+
# API key input
|
28 |
+
self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
|
29 |
+
type="password")
|
30 |
+
# Use case selection
|
31 |
+
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
32 |
+
|
33 |
+
|
34 |
+
|
35 |
+
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
# Use agent description about role
|
40 |
+
self.user_controls['agent_descriptions'] = st.text_input("Enter the description about an agent",placeholder='eg. You are a helpful assistant that always responds in a polite, upbeat and positive manner.')
|
41 |
+
st.session_state["chat_with_history"] = st.sidebar.toggle("Chat With History")
|
42 |
+
self.user_controls['num_history_responses'] = 0
|
43 |
+
if st.session_state["chat_with_history"]:
|
44 |
+
self.user_controls['num_history_responses'] = st.number_input("Enter number of history include from last chat",placeholder="eg. 1",step=1)
|
45 |
+
|
46 |
+
if self.user_controls['selected_usecase'] == "Appointment Receptionist":
|
47 |
+
col1, col2 = st.columns(2)
|
48 |
+
|
49 |
+
with col1:
|
50 |
+
st.subheader("Appointment Manager")
|
51 |
+
|
52 |
+
with col2:
|
53 |
+
st.subheader("Appointments")
|
54 |
+
|
55 |
+
return self.user_controls
|
src/tools/langgraphtool.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_core.tools import tool
|
2 |
+
import datetime
|
3 |
+
|
4 |
+
|
5 |
+
APPOINTMENTS = []
|
6 |
+
|
7 |
+
@tool
|
8 |
+
def get_next_available_appointment():
|
9 |
+
"""Returns the next available appointment"""
|
10 |
+
current_time = datetime.datetime.now()
|
11 |
+
return f"One appointment available at {current_time + datetime.timedelta(minutes=(30 - current_time.minute % 30))}"
|
12 |
+
|
13 |
+
@tool
|
14 |
+
def book_appointment(appointment_year: int, appointment_month: int, appointment_day: int, appointment_hour: int, appointment_minute: int, appointment_name: str):
|
15 |
+
"""Book an appointment at the given time, you must know the exact time to book
|
16 |
+
|
17 |
+
Args:
|
18 |
+
appointment_year: The year of the appointment
|
19 |
+
appointment_month: The month of the appointment
|
20 |
+
appointment_day: The day of the appointment
|
21 |
+
appointment_hour: The hour of the appointment
|
22 |
+
appointment_minute: The minute of the appointment
|
23 |
+
appointment_name: The name of the person booking the appointment
|
24 |
+
"""
|
25 |
+
time = datetime.datetime(appointment_year, appointment_month, appointment_day, appointment_hour, appointment_minute)
|
26 |
+
for appointment in APPOINTMENTS:
|
27 |
+
if appointment.time >= time and appointment.time < time + datetime.timedelta(minutes=30):
|
28 |
+
return f"Appointment at {time} is already booked"
|
29 |
+
APPOINTMENTS.append({"time": time, "name": appointment_name})
|
30 |
+
return f"Appointment booked for {time}"
|
31 |
+
|
32 |
+
@tool
|
33 |
+
def cancel_appointment(appointment_year: int, appointment_month: int, appointment_day: int, appointment_hour: int, appointment_minute: int):
|
34 |
+
"""Cancel the appointment at the given time
|
35 |
+
|
36 |
+
Args:
|
37 |
+
appointment_year: The year of the appointment
|
38 |
+
appointment_month: The month of the appointment
|
39 |
+
appointment_day: The day of the appointment
|
40 |
+
appointment_hour: The hour of the appointment
|
41 |
+
appointment_minute: The minute of the appointment
|
42 |
+
"""
|
43 |
+
time = datetime.datetime(appointment_year, appointment_month, appointment_day, appointment_hour, appointment_minute)
|
44 |
+
for appointment in APPOINTMENTS:
|
45 |
+
if appointment["time"] == time:
|
46 |
+
APPOINTMENTS.remove(appointment)
|
47 |
+
return f"Appointment at {time} cancelled"
|
48 |
+
return f"No appointment found at {time}"
|