Spaces:
Running
Running
import streamlit as st | |
from configfile import Config | |
from src.graph.graph_builder import GraphBuilder | |
from src.streamlitui.loadui import LoadStreamlitUI | |
from src.LLMS.groqllm import GroqLLM | |
from src.langgraphagent.caller_agent import Caller_Agent | |
from langchain_core.messages import HumanMessage,AIMessage,ToolMessage | |
from src.tools.langgraphtool import APPOINTMENTS | |
def submit_message(model): | |
obj_caller_agent = Caller_Agent(model) | |
# caller agent | |
return obj_caller_agent.receive_message_from_caller(st.session_state["message"]) | |
# MAIN Function START | |
if __name__ == "__main__": | |
# config | |
obj_config = Config() | |
# load ui | |
ui = LoadStreamlitUI() | |
user_input = ui.load_streamlit_ui() | |
graph_display ='' | |
# is_add_message_to_history = st.session_state["chat_with_history"] | |
if user_input['selected_usecase'] == "Appointment Receptionist": | |
if st.chat_input("Type message here", key="message") : | |
# Configure LLM | |
obj_llm_config = GroqLLM(user_controls_input=user_input) | |
model = obj_llm_config.get_llm_model() | |
CONVERSATION,APPOINTMENTS,graph_display= (submit_message(model)) | |
col1, col2 = st.columns(2) | |
with col1: | |
for message in CONVERSATION: | |
if type(message) == HumanMessage: | |
with st.chat_message("user"): | |
st.write(message.content) | |
else: | |
with st.chat_message("assistant"): | |
st.write(message.content) | |
with col2: | |
st.header("Appointments") | |
st.write(APPOINTMENTS) | |
elif user_input['selected_usecase'] == "Customer Support": | |
from src.csbot.customer_support_chatbot import Customer_Support_Bot | |
from langchain_core.messages import AIMessage, HumanMessage | |
from src.tools.customer_support_tools import customers_database, data_protection_checks | |
st.subheader('Flower Shop Chatbot' + 'π') | |
if 'message_history' not in st.session_state: | |
st.session_state.message_history = [AIMessage(content="Hiya, Im the flower shop chatbot. How can I help?")] | |
main_col, right_col = st.columns([2, 1]) | |
# 1. Buttons for chat - Clear Button | |
with st.sidebar: | |
if st.button('Clear Chat'): | |
st.session_state.message_history = [] | |
# 2. Chat history and input | |
with main_col: | |
user_message = st.chat_input("Type here...") | |
if user_message: | |
st.session_state.message_history.append(HumanMessage(content=user_message)) | |
obj_llm_config = GroqLLM(user_controls_input=user_input) | |
llm = obj_llm_config.get_llm_model() | |
obj_cs_bot = Customer_Support_Bot(llm=llm) | |
app = obj_cs_bot.chat_bot() | |
response = app.invoke({ | |
'messages': st.session_state.message_history | |
}) | |
st.session_state.message_history = response['messages'] | |
for i in range(1, len(st.session_state.message_history) + 1): | |
this_message = st.session_state.message_history[-i] | |
if isinstance(this_message, AIMessage): | |
message_box = st.chat_message('assistant') | |
else: | |
message_box = st.chat_message('user') | |
message_box.markdown(this_message.content) | |
# 3. State variables | |
with right_col: | |
st.title('customers database') | |
st.write(customers_database) | |
st.title('data protection checks') | |
st.write(data_protection_checks) | |
else: | |
# Basic Examples - chatbot and chatbot with tool | |
# Text input for user message | |
user_message = st.chat_input("Enter your message:") | |
if user_message: | |
# Configure LLM | |
obj_llm_config = GroqLLM(user_controls_input=user_input) | |
model = obj_llm_config.get_llm_model() | |
# Initialize and set up the graph based on use case | |
usecase = user_input['selected_usecase'] | |
graph_builder = GraphBuilder(model) | |
graph_display = graph = graph_builder.setup_graph(usecase) | |
# Prepare state and invoke the graph | |
initial_state = {"messages": [user_message]} | |
entry_points = {"Basic Chatbot": "chatbot", "Chatbot with Tool": "chatbot_with_tool"} | |
entry_points = {"Basic Chatbot": "chatbot", "Chatbot with Tool": "chatbot_with_tool"} | |
if usecase =="Basic Chatbot": | |
for event in graph.stream({'messages':("user",user_message)}): | |
print(event.values()) | |
for value in event.values(): | |
print(value['messages']) | |
with st.chat_message("user"): | |
st.write(user_message) | |
with st.chat_message("assistant"): | |
st.write(value["messages"].content) | |
else: | |
res = graph.invoke(initial_state) | |
for message in res['messages']: | |
if type(message) == HumanMessage: | |
with st.chat_message("user"): | |
st.write(message.content) | |
elif type(message)==ToolMessage: | |
with st.chat_message("ai"): | |
st.write("Tool Call Start") | |
st.write(message.content) | |
st.write("Tool Call End") | |
elif type(message)==AIMessage and message.content: | |
with st.chat_message("assistant"): | |
st.write(message.content) | |
# display graph | |
if graph_display: | |
st.write('state graph workflow') | |
st.image(graph_display.get_graph(xray=True).draw_mermaid_png()) | |