Spaces:
Running
Running
File size: 2,598 Bytes
20cac53 0e49bbd 20cac53 0e49bbd 20cac53 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
import streamlit as st
import json
from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
from src.langgraphagenticai.LLMS.groqllm import GroqLLM
from src.langgraphagenticai.LLMS.geminillm import GeminiLLM
from src.langgraphagenticai.graph.graph_builder import GraphBuilder
from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
# MAIN Function START
def load_langgraph_agenticai_app():
"""
Loads and runs the LangGraph AgenticAI application with Streamlit UI.
This function initializes the UI, handles user input, configures the LLM model,
sets up the graph based on the selected use case, and displays the output while
implementing exception handling for robustness.
"""
# Load UI
ui = LoadStreamlitUI()
user_input = ui.load_streamlit_ui()
if not user_input:
st.error("Error: Failed to load user input from the UI.")
return
# Text input for user message
if st.session_state.IsFetchButtonClicked:
user_message = st.session_state.timeframe
else :
user_message = st.chat_input("Enter your message:")
if user_message:
try:
# Configure LLM
if user_input.get('selected_llm') == 'Groq':
obj_llm_config = GroqLLM(user_controls_input=user_input)
model = obj_llm_config.get_groq_llm_model()
elif user_input.get('selected_llm') == 'Gemini':
obj_llm_config = GeminiLLM(user_controls_input=user_input)
model = obj_llm_config.get_gemini_llm_model()
if not model:
st.error("Error: LLM model could not be initialized.")
return
# Initialize and set up the graph based on use case
usecase = user_input.get('selected_usecase')
if not usecase:
st.error("Error: No use case selected.")
return
### Graph Builder
graph_builder=GraphBuilder(model)
try:
graph = graph_builder.setup_graph(usecase)
DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
except Exception as e:
st.error(f"Error: Graph setup failed - {e}")
return
except Exception as e:
raise ValueError(f"Error Occurred with Exception : {e}")
|