Update app.py
Browse files
app.py
CHANGED
@@ -4,26 +4,54 @@ import plotly.express as px
|
|
4 |
from datetime import datetime, timedelta
|
5 |
from simple_salesforce import Salesforce
|
6 |
from transformers import pipeline
|
7 |
-
from reportlab.lib.pagesizes import letter
|
8 |
-
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph
|
9 |
-
from reportlab.lib import colors
|
10 |
-
from reportlab.lib.styles import getSampleStyleSheet
|
11 |
from utils import fetch_salesforce_data, detect_anomalies, generate_pdf_report
|
12 |
|
13 |
# Streamlit app configuration
|
14 |
st.set_page_config(page_title="LabOps Dashboard", layout="wide")
|
15 |
|
16 |
-
# Salesforce
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
-
#
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
def main():
|
|
|
|
|
|
|
|
|
27 |
st.title("Multi-Device LabOps Dashboard")
|
28 |
|
29 |
# Filters
|
@@ -33,63 +61,89 @@ def main():
|
|
33 |
with col2:
|
34 |
equipment_type = st.selectbox("Equipment Type", ["All", "Cell Analyzer", "Weight Log", "UV Verification"])
|
35 |
with col3:
|
36 |
-
|
|
|
|
|
37 |
|
38 |
-
#
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
if equipment_type != "All":
|
48 |
-
query += f" AND Equipment_Type__c = '{equipment_type}'"
|
49 |
-
|
50 |
-
data = fetch_salesforce_data(sf, query)
|
51 |
-
df = pd.DataFrame(data)
|
52 |
-
|
53 |
-
if df.empty:
|
54 |
st.warning("No data available for the selected filters.")
|
55 |
return
|
56 |
|
57 |
-
|
58 |
-
df["
|
59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
60 |
# Device Cards
|
61 |
st.subheader("Device Status")
|
62 |
-
for
|
63 |
-
|
64 |
-
latest_log = device_data.iloc[-1]
|
65 |
-
anomaly = "⚠️ Anomaly" if latest_log["Anomaly"] == "POSITIVE" else "✅ Normal"
|
66 |
st.markdown(f"""
|
67 |
-
**{
|
|
|
68 |
""")
|
69 |
|
70 |
# Usage Chart
|
71 |
st.subheader("Usage Trends")
|
72 |
-
fig = px.line(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
st.plotly_chart(fig, use_container_width=True)
|
74 |
-
|
75 |
# Downtime Chart
|
|
|
76 |
downtime_df = df[df["Status__c"] == "Down"]
|
77 |
if not downtime_df.empty:
|
78 |
-
fig_downtime = px.histogram(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
st.plotly_chart(fig_downtime, use_container_width=True)
|
80 |
-
|
|
|
|
|
81 |
# AMC Reminders
|
82 |
st.subheader("AMC Reminders")
|
83 |
amc_query = "SELECT Equipment__c, AMC_Expiry_Date__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= NEXT_N_DAYS:14"
|
84 |
-
amc_data = fetch_salesforce_data(sf, amc_query)
|
85 |
-
|
86 |
-
|
87 |
-
|
|
|
|
|
|
|
88 |
# Export PDF
|
89 |
if st.button("Export PDF Report"):
|
90 |
-
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
93 |
|
94 |
if __name__ == "__main__":
|
95 |
main()
|
|
|
4 |
from datetime import datetime, timedelta
|
5 |
from simple_salesforce import Salesforce
|
6 |
from transformers import pipeline
|
|
|
|
|
|
|
|
|
7 |
from utils import fetch_salesforce_data, detect_anomalies, generate_pdf_report
|
8 |
|
9 |
# Streamlit app configuration
|
10 |
st.set_page_config(page_title="LabOps Dashboard", layout="wide")
|
11 |
|
12 |
+
# Cache Salesforce connection
|
13 |
+
@st.cache_resource
|
14 |
+
def init_salesforce():
|
15 |
+
try:
|
16 |
+
return Salesforce(
|
17 |
+
username=st.secrets["sf_username"],
|
18 |
+
password=st.secrets["sf_password"],
|
19 |
+
security_token=st.secrets["sf_security_token"]
|
20 |
+
)
|
21 |
+
except Exception as e:
|
22 |
+
st.error(f"Failed to connect to Salesforce: {e}")
|
23 |
+
return None
|
24 |
|
25 |
+
# Cache Hugging Face model
|
26 |
+
@st.cache_resource
|
27 |
+
def init_anomaly_detector():
|
28 |
+
return pipeline("text-classification", model="distilbert-base-uncased", tokenizer="distilbert-base-uncased")
|
29 |
+
|
30 |
+
# Initialize connections
|
31 |
+
sf = init_salesforce()
|
32 |
+
anomaly_detector = init_anomaly_detector()
|
33 |
+
|
34 |
+
# Cache data fetching
|
35 |
+
@st.cache_data(ttl=10) # Cache for 10 seconds to meet refresh requirement
|
36 |
+
def get_filtered_data(lab_site, equipment_type, date_start, date_end):
|
37 |
+
query = f"""
|
38 |
+
SELECT Equipment__c, Log_Timestamp__c, Status__c, Usage_Count__c, Lab__c, Equipment_Type__c
|
39 |
+
FROM SmartLog__c
|
40 |
+
WHERE Log_Timestamp__c >= {date_start.strftime('%Y-%m-%d')}
|
41 |
+
AND Log_Timestamp__c <= {date_end.strftime('%Y-%m-%d')}
|
42 |
+
"""
|
43 |
+
if lab_site != "All":
|
44 |
+
query += f" AND Lab__c = '{lab_site}'"
|
45 |
+
if equipment_type != "All":
|
46 |
+
query += f" AND Equipment_Type__c = '{equipment_type}'"
|
47 |
+
query += " LIMIT 1000" # Mitigate data overload
|
48 |
+
return fetch_salesforce_data(sf, query)
|
49 |
|
50 |
def main():
|
51 |
+
if sf is None:
|
52 |
+
st.error("Cannot proceed without Salesforce connection.")
|
53 |
+
return
|
54 |
+
|
55 |
st.title("Multi-Device LabOps Dashboard")
|
56 |
|
57 |
# Filters
|
|
|
61 |
with col2:
|
62 |
equipment_type = st.selectbox("Equipment Type", ["All", "Cell Analyzer", "Weight Log", "UV Verification"])
|
63 |
with col3:
|
64 |
+
default_start = datetime.now() - timedelta(days=7)
|
65 |
+
default_end = datetime.now()
|
66 |
+
date_range = st.date_input("Date Range", [default_start, default_end])
|
67 |
|
68 |
+
# Validate date range
|
69 |
+
if len(date_range) != 2:
|
70 |
+
st.warning("Please select a valid date range.")
|
71 |
+
return
|
72 |
+
date_start, date_end = date_range
|
73 |
+
|
74 |
+
# Fetch and process data
|
75 |
+
data = get_filtered_data(lab_site, equipment_type, date_start, date_end)
|
76 |
+
if not data:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
st.warning("No data available for the selected filters.")
|
78 |
return
|
79 |
|
80 |
+
df = pd.DataFrame(data)
|
81 |
+
df["Log_Timestamp__c"] = pd.to_datetime(df["Log_Timestamp__c"])
|
82 |
+
df["Anomaly"] = df["Status__c"].apply(lambda x: detect_anomalies(str(x), anomaly_detector))
|
83 |
+
|
84 |
+
# Pagination
|
85 |
+
page_size = 10
|
86 |
+
page = st.number_input("Page", min_value=1, value=1, step=1)
|
87 |
+
start_idx = (page - 1) * page_size
|
88 |
+
end_idx = start_idx + page_size
|
89 |
+
paginated_df = df[start_idx:end_idx]
|
90 |
+
|
91 |
# Device Cards
|
92 |
st.subheader("Device Status")
|
93 |
+
for _, row in paginated_df.iterrows():
|
94 |
+
anomaly = "⚠️ Anomaly" if row["Anomaly"] == "POSITIVE" else "✅ Normal"
|
|
|
|
|
95 |
st.markdown(f"""
|
96 |
+
**{row['Equipment__c']}** | Lab: {row['Lab__c']} | Health: {row['Status__c']} |
|
97 |
+
Usage: {row['Usage_Count__c']} | Last Log: {row['Log_Timestamp__c'].strftime('%Y-%m-%d %H:%M:%S')} | {anomaly}
|
98 |
""")
|
99 |
|
100 |
# Usage Chart
|
101 |
st.subheader("Usage Trends")
|
102 |
+
fig = px.line(
|
103 |
+
df,
|
104 |
+
x="Log_Timestamp__c",
|
105 |
+
y="Usage_Count__c",
|
106 |
+
color="Equipment__c",
|
107 |
+
title="Daily Usage Trends",
|
108 |
+
labels={"Log_Timestamp__c": "Timestamp", "Usage_Count__c": "Usage Count"}
|
109 |
+
)
|
110 |
+
fig.update_layout(xaxis_title="Timestamp", yaxis_title="Usage Count")
|
111 |
st.plotly_chart(fig, use_container_width=True)
|
112 |
+
|
113 |
# Downtime Chart
|
114 |
+
st.subheader("Downtime Patterns")
|
115 |
downtime_df = df[df["Status__c"] == "Down"]
|
116 |
if not downtime_df.empty:
|
117 |
+
fig_downtime = px.histogram(
|
118 |
+
downtime_df,
|
119 |
+
x="Log_Timestamp__c",
|
120 |
+
color="Equipment__c",
|
121 |
+
title="Downtime Patterns",
|
122 |
+
labels={"Log_Timestamp__c": "Timestamp"}
|
123 |
+
)
|
124 |
+
fig_downtime.update_layout(xaxis_title="Timestamp", yaxis_title="Downtime Count")
|
125 |
st.plotly_chart(fig_downtime, use_container_width=True)
|
126 |
+
else:
|
127 |
+
st.info("No downtime events found for the selected filters.")
|
128 |
+
|
129 |
# AMC Reminders
|
130 |
st.subheader("AMC Reminders")
|
131 |
amc_query = "SELECT Equipment__c, AMC_Expiry_Date__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= NEXT_N_DAYS:14"
|
132 |
+
amc_data = fetch_salesforce_data(sf, amc_query, retries=3)
|
133 |
+
if amc_data:
|
134 |
+
for record in amc_data:
|
135 |
+
st.write(f"Equipment {record['Equipment__c']} - AMC Expiry: {record['AMC_Expiry_Date__c']}")
|
136 |
+
else:
|
137 |
+
st.info("No AMC expiries within the next 14 days.")
|
138 |
+
|
139 |
# Export PDF
|
140 |
if st.button("Export PDF Report"):
|
141 |
+
try:
|
142 |
+
pdf_file = generate_pdf_report(df, lab_site, equipment_type, [date_start, date_end])
|
143 |
+
with open(pdf_file, "rb") as f:
|
144 |
+
st.download_button("Download PDF", f, file_name="LabOps_Report.pdf", mime="application/pdf")
|
145 |
+
except Exception as e:
|
146 |
+
st.error(f"Failed to generate PDF: {e}")
|
147 |
|
148 |
if __name__ == "__main__":
|
149 |
main()
|