Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from dotenv import load_dotenv | |
import google.generativeai as gen_ai | |
# Load environment variables | |
load_dotenv() | |
# Configure Streamlit page settings | |
st.set_page_config( | |
page_title="Smart Waste Management System", | |
page_icon="♻️", | |
layout="centered", | |
) | |
# Retrieve the Google API key from the environment | |
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") | |
# Check if the API key is loaded | |
if not GOOGLE_API_KEY: | |
st.error("🚨 API key not found! Please set the GOOGLE_API_KEY in your .env file.") | |
st.stop() | |
# Configure the Generative AI model | |
try: | |
gen_ai.configure(api_key=GOOGLE_API_KEY) | |
model = gen_ai.GenerativeModel("gemini-1.5-pro") # Updated model version | |
except Exception as e: | |
st.error(f"❌ Error initializing the Gemini-Pro model: {e}") | |
st.stop() | |
# Initialize the chat session if not already present in session state | |
if "chat_session" not in st.session_state: | |
try: | |
st.session_state.chat_session = model.start_chat(history=[]) | |
except Exception as e: | |
st.error(f"❌ Error initializing chat session: {e}") | |
st.stop() | |
# Display the app's title | |
st.title("♻️ Smart Waste Management System") | |
# Introduction and instructions | |
st.markdown( | |
""" | |
Welcome to the **Smart Waste Management System**! This tool helps **citizens, municipal workers, | |
recycling companies, and biogas plants** collaborate efficiently for **better waste management**. | |
### **🌟 Key Features** | |
- **Citizen Role:** Report waste collection issues and track garbage pickup. | |
- **Municipal Workers:** Manage schedules and coordinate garbage segregation. | |
- **Recycling Companies:** View and respond to requests for plastic waste. | |
- **Biogas Plants:** Manage bio-waste for energy production. | |
""" | |
) | |
# User role selection | |
user_role = st.selectbox("🔹 Select Your Role:", ["Citizen", "Municipal Worker", "Recycling Company", "Biogas Plant"]) | |
# Chat input | |
user_prompt = st.chat_input(f"💬 [{user_role}] Enter your query or task...") | |
if user_prompt: | |
# Display the user's message | |
st.chat_message("user").markdown(f"**{user_role}:** {user_prompt}") | |
# Generate a role-specific prompt | |
role_specific_prompt = f"You are assisting a {user_role} in a smart waste management system. The user says: {user_prompt}" | |
# Send the prompt to Gemini-Pro and get the response | |
try: | |
gemini_response = st.session_state.chat_session.send_message(role_specific_prompt) | |
# Display Gemini-Pro's response | |
with st.chat_message("assistant"): | |
st.markdown(gemini_response.text) | |
except Exception as e: | |
st.error(f"❌ Error processing your message: {e}") | |
# Sidebar Information | |
st.sidebar.title("📌 About") | |
st.sidebar.markdown( | |
""" | |
The **Smart Waste Management System** aims to: | |
- 🏡 **Improve waste collection efficiency** for citizens. | |
- 🚛 **Help municipal workers** manage schedules. | |
- 🔄 **Assist recycling companies** in waste processing. | |
- ⚡ **Support biogas plants** in energy production. | |
💡 **Need Help?** Use the chat to ask questions! | |
""" | |
) | |