File size: 7,739 Bytes
6eaa3dc f6b4c75 42ae402 9edd269 6eaa3dc f6b4c75 9edd269 6eaa3dc 684911e 9edd269 684911e 9edd269 42ae402 684911e 9edd269 684911e 9edd269 684911e 6eaa3dc 684911e 9edd269 42ae402 9edd269 42ae402 9edd269 42ae402 9edd269 42ae402 9edd269 42ae402 684911e 9edd269 684911e 9edd269 6eaa3dc f6b4c75 9edd269 42ae402 9edd269 684911e f6b4c75 c0a9330 f6b4c75 c0a9330 f6b4c75 684911e 9edd269 684911e 42ae402 684911e f6b4c75 9edd269 f6b4c75 684911e f716f19 684911e f716f19 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 684911e f6b4c75 42ae402 9edd269 42ae402 9edd269 6eaa3dc f6b4c75 9edd269 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 |
import streamlit as st
import pandas as pd
import plotly.express as px
from datetime import datetime, timedelta
from simple_salesforce import Salesforce
from transformers import pipeline
from utils import fetch_salesforce_data, detect_anomalies, generate_pdf_report
import os
import logging
# Configure logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger(__name__)
# Streamlit app configuration
try:
st.set_page_config(page_title="LabOps Dashboard", layout="wide")
logger.info("Streamlit page configuration set successfully.")
except Exception as e:
logger.error(f"Failed to set Streamlit page configuration: {e}")
raise
# Cache Salesforce connection
@st.cache_resource
def init_salesforce():
logger.info("Initializing Salesforce connection...")
try:
sf = Salesforce(
username=os.getenv("SF_USERNAME", st.secrets.get("sf_username")),
password=os.getenv("SF_PASSWORD", st.secrets.get("sf_password")),
security_token=os.getenv("SF_SECURITY_TOKEN", st.secrets.get("sf_security_token"))
)
logger.info("Salesforce connection initialized successfully.")
return sf
except Exception as e:
logger.error(f"Failed to initialize Salesforce: {e}")
st.error(f"Cannot connect to Salesforce: {e}")
return None
# Cache Hugging Face model
@st.cache_resource
def init_anomaly_detector():
logger.info("Initializing anomaly detector...")
try:
# Use lighter model for Hugging Face Spaces
detector = pipeline(
"text-classification",
model="prajjwal1/bert-tiny",
tokenizer="prajjwal1/bert-tiny",
clean_up_tokenization_spaces=True
)
logger.info("Anomaly detector initialized successfully.")
return detector
except Exception as e:
logger.error(f"Failed to initialize anomaly detector: {e}")
st.error(f"Cannot initialize anomaly detector: {e}")
return None
# Initialize connections
sf = init_salesforce()
anomaly_detector = init_anomaly_detector()
# Cache data fetching
@st.cache_data(ttl=10)
def get_filtered_data(lab_site, equipment_type, date_start, date_end):
logger.info(f"Fetching data for lab: {lab_site}, equipment: {equipment_type}, date range: {date_start} to {date_end}")
try:
query = f"""
SELECT Equipment__c, Log_Timestamp__c, Status__c, Usage_Count__c, Lab__c, Equipment_Type__c
FROM SmartLog__c
WHERE Log_Timestamp__c >= {date_start.strftime('%Y-%m-%d')}
AND Log_Timestamp__c <= {date_end.strftime('%Y-%m-%d')}
"""
if lab_site != "All":
query += f" AND Lab__c = '{lab_site}'"
if equipment_type != "All":
query += f" AND Equipment_Type__c = '{equipment_type}'"
query += " LIMIT 100"
data = fetch_salesforce_data(sf, query)
logger.info(f"Fetched {len(data)} records from Salesforce.")
return data
except Exception as e:
logger.error(f"Failed to fetch data: {e}")
return []
def main():
logger.info("Starting main application...")
if sf is None or anomaly_detector is None:
st.error("Application cannot start due to initialization failures. Check logs for details.")
logger.error("Application initialization failed: Salesforce or anomaly detector not available.")
return
st.title("Multi-Device LabOps Dashboard")
# Filters
col1, col2 = st.columns(2)
with col1:
lab_site = st.selectbox("Select Lab Site", ["All", "Lab1", "Lab2", "Lab3"])
with col2:
equipment_type = st.selectbox("Equipment Type", ["All", "Cell Analyzer", "Weight Log", "UV Verification"])
date_range = st.date_input("Date Range", [datetime.now() - timedelta(days=7), datetime.now()])
if len(date_range) != 2:
st.warning("Please select a valid date range.")
logger.warning("Invalid date range selected.")
return
date_start, date_end = date_range
# Fetch and process data
with st.spinner("Fetching data..."):
data = get_filtered_data(lab_site, equipment_type, date_start, date_end)
if not data:
st.warning("No data available for the selected filters.")
logger.warning("No data returned for the selected filters.")
return
df = pd.DataFrame(data)
df["Log_Timestamp__c"] = pd.to_datetime(df["Log_Timestamp__c"])
df["Anomaly"] = df.apply(
lambda row: detect_anomalies(f"{row['Status__c']} Usage:{row['Usage_Count__c']}", anomaly_detector),
axis=1
)
# Pagination
page_size = 10
total_pages = max(1, len(df) // page_size + (1 if len(df) % page_size else 0))
page = st.number_input("Page", min_value=1, max_value=total_pages, value=1, step=1)
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
paginated_df = df[start_idx:end_idx]
# Device Cards
st.subheader("Device Status")
for _, row in paginated_df.iterrows():
anomaly = "⚠️ Anomaly" if row["Anomaly"] == "POSITIVE" else "✅ Normal"
st.markdown(f"""
**{row['Equipment__c']}** | Lab: {row['Lab__c']} | Health: {row['Status__c']} |
Usage: {row['Usage_Count__c']} | Last Log: {row['Log_Timestamp__c'].strftime('%Y-%m-%d %H:%M:%S')} | {anomaly}
""")
# Usage Chart
st.subheader("Usage Trends")
fig = px.line(
df,
x="Log_Timestamp__c",
y="Usage_Count__c",
color="Equipment__c",
title="Daily Usage Trends",
labels={"Log_Timestamp__c": "Timestamp", "Usage_Count__c": "Usage Count"}
)
fig.update_layout(xaxis_title="Timestamp", yaxis_title="Usage Count")
st.plotly_chart(fig, use_container_width=True)
# Downtime Chart
st.subheader("Downtime Patterns")
downtime_df = df[df["Status__c"] == "Down"]
if not downtime_df.empty:
fig_downtime = px.histogram(
downtime_df,
x="Log_Timestamp__c",
color="Equipment__c",
title="Downtime Patterns",
labels={"Log_Timestamp__c": "Timestamp"}
)
fig_downtime.update_layout(xaxis_title="Timestamp", yaxis_title="Downtime Count")
st.plotly_chart(fig_downtime, use_container_width=True)
else:
st.info("No downtime events found for the selected filters.")
# AMC Reminders
st.subheader("AMC Reminders")
amc_query = "SELECT Equipment__c, AMC_Expiry_Date__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= NEXT_N_DAYS:14"
amc_data = fetch_salesforce_data(sf, amc_query, retries=3)
if amc_data:
for record in amc_data:
st.write(f"Equipment {record['Equipment__c']} - AMC Expiry: {record['AMC_Expiry_Date__c']}")
else:
st.info("No AMC expiries within the next 14 days.")
# Export PDF
if st.button("Export PDF Report"):
with st.spinner("Generating PDF..."):
try:
pdf_file = generate_pdf_report(df, lab_site, equipment_type, [date_start, date_end])
with open(pdf_file, "rb") as f:
st.download_button("Download PDF", f, file_name="LabOps_Report.pdf", mime="application/pdf")
logger.info("PDF report generated successfully.")
except Exception as e:
st.error(f"Failed to generate PDF: {e}")
logger.error(f"Failed to generate PDF: {e}")
if __name__ == "__main__":
try:
logger.info("Application starting...")
main()
except Exception as e:
logger.error(f"Application failed to start: {e}")
raise |