MaheshP98 commited on
Commit
684911e
·
verified ·
1 Parent(s): 034be3b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +102 -48
app.py CHANGED
@@ -4,26 +4,54 @@ import plotly.express as px
4
  from datetime import datetime, timedelta
5
  from simple_salesforce import Salesforce
6
  from transformers import pipeline
7
- from reportlab.lib.pagesizes import letter
8
- from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph
9
- from reportlab.lib import colors
10
- from reportlab.lib.styles import getSampleStyleSheet
11
  from utils import fetch_salesforce_data, detect_anomalies, generate_pdf_report
12
 
13
  # Streamlit app configuration
14
  st.set_page_config(page_title="LabOps Dashboard", layout="wide")
15
 
16
- # Salesforce authentication (replace with your credentials)
17
- sf = Salesforce(
18
- username=st.secrets["sf_username"],
19
- password=st.secrets["sf_password"],
20
- security_token=st.secrets["sf_security_token"]
21
- )
 
 
 
 
 
 
22
 
23
- # Initialize Hugging Face anomaly detection pipeline
24
- anomaly_detector = pipeline("text-classification", model="bert-base-uncased", tokenizer="bert-base-uncased")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
  def main():
 
 
 
 
27
  st.title("Multi-Device LabOps Dashboard")
28
 
29
  # Filters
@@ -33,63 +61,89 @@ def main():
33
  with col2:
34
  equipment_type = st.selectbox("Equipment Type", ["All", "Cell Analyzer", "Weight Log", "UV Verification"])
35
  with col3:
36
- date_range = st.date_input("Date Range", [datetime.now() - timedelta(days=7), datetime.now()])
 
 
37
 
38
- # Fetch data from Salesforce
39
- query = f"""
40
- SELECT Equipment__c, Log_Timestamp__c, Status__c, Usage_Count__c
41
- FROM SmartLog__c
42
- WHERE Log_Timestamp__c >= {date_range[0].strftime('%Y-%m-%d')}
43
- AND Log_Timestamp__c <= {date_range[1].strftime('%Y-%m-%d')}
44
- """
45
- if lab_site != "All":
46
- query += f" AND Lab__c = '{lab_site}'"
47
- if equipment_type != "All":
48
- query += f" AND Equipment_Type__c = '{equipment_type}'"
49
-
50
- data = fetch_salesforce_data(sf, query)
51
- df = pd.DataFrame(data)
52
-
53
- if df.empty:
54
  st.warning("No data available for the selected filters.")
55
  return
56
 
57
- # Detect anomalies using Hugging Face
58
- df["Anomaly"] = df["Status__c"].apply(lambda x: detect_anomalies(x, anomaly_detector))
59
-
 
 
 
 
 
 
 
 
60
  # Device Cards
61
  st.subheader("Device Status")
62
- for equipment in df["Equipment__c"].unique():
63
- device_data = df[df["Equipment__c"] == equipment]
64
- latest_log = device_data.iloc[-1]
65
- anomaly = "⚠️ Anomaly" if latest_log["Anomaly"] == "POSITIVE" else "✅ Normal"
66
  st.markdown(f"""
67
- **{equipment}** | Health: {latest_log["Status__c"]} | Usage: {latest_log["Usage_Count__c"]} | Last Log: {latest_log["Log_Timestamp__c"]} | {anomaly}
 
68
  """)
69
 
70
  # Usage Chart
71
  st.subheader("Usage Trends")
72
- fig = px.line(df, x="Log_Timestamp__c", y="Usage_Count__c", color="Equipment__c", title="Daily Usage Trends")
 
 
 
 
 
 
 
 
73
  st.plotly_chart(fig, use_container_width=True)
74
-
75
  # Downtime Chart
 
76
  downtime_df = df[df["Status__c"] == "Down"]
77
  if not downtime_df.empty:
78
- fig_downtime = px.histogram(downtime_df, x="Log_Timestamp__c", color="Equipment__c", title="Downtime Patterns")
 
 
 
 
 
 
 
79
  st.plotly_chart(fig_downtime, use_container_width=True)
80
-
 
 
81
  # AMC Reminders
82
  st.subheader("AMC Reminders")
83
  amc_query = "SELECT Equipment__c, AMC_Expiry_Date__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= NEXT_N_DAYS:14"
84
- amc_data = fetch_salesforce_data(sf, amc_query)
85
- for record in amc_data:
86
- st.write(f"Equipment {record['Equipment__c']} - AMC Expiry: {record['AMC_Expiry_Date__c']}")
87
-
 
 
 
88
  # Export PDF
89
  if st.button("Export PDF Report"):
90
- pdf_file = generate_pdf_report(df, lab_site, equipment_type, date_range)
91
- with open(pdf_file, "rb") as f:
92
- st.download_button("Download PDF", f, file_name="LabOps_Report.pdf")
 
 
 
93
 
94
  if __name__ == "__main__":
95
  main()
 
4
  from datetime import datetime, timedelta
5
  from simple_salesforce import Salesforce
6
  from transformers import pipeline
 
 
 
 
7
  from utils import fetch_salesforce_data, detect_anomalies, generate_pdf_report
8
 
9
  # Streamlit app configuration
10
  st.set_page_config(page_title="LabOps Dashboard", layout="wide")
11
 
12
+ # Cache Salesforce connection
13
+ @st.cache_resource
14
+ def init_salesforce():
15
+ try:
16
+ return Salesforce(
17
+ username=st.secrets["sf_username"],
18
+ password=st.secrets["sf_password"],
19
+ security_token=st.secrets["sf_security_token"]
20
+ )
21
+ except Exception as e:
22
+ st.error(f"Failed to connect to Salesforce: {e}")
23
+ return None
24
 
25
+ # Cache Hugging Face model
26
+ @st.cache_resource
27
+ def init_anomaly_detector():
28
+ return pipeline("text-classification", model="distilbert-base-uncased", tokenizer="distilbert-base-uncased")
29
+
30
+ # Initialize connections
31
+ sf = init_salesforce()
32
+ anomaly_detector = init_anomaly_detector()
33
+
34
+ # Cache data fetching
35
+ @st.cache_data(ttl=10) # Cache for 10 seconds to meet refresh requirement
36
+ def get_filtered_data(lab_site, equipment_type, date_start, date_end):
37
+ query = f"""
38
+ SELECT Equipment__c, Log_Timestamp__c, Status__c, Usage_Count__c, Lab__c, Equipment_Type__c
39
+ FROM SmartLog__c
40
+ WHERE Log_Timestamp__c >= {date_start.strftime('%Y-%m-%d')}
41
+ AND Log_Timestamp__c <= {date_end.strftime('%Y-%m-%d')}
42
+ """
43
+ if lab_site != "All":
44
+ query += f" AND Lab__c = '{lab_site}'"
45
+ if equipment_type != "All":
46
+ query += f" AND Equipment_Type__c = '{equipment_type}'"
47
+ query += " LIMIT 1000" # Mitigate data overload
48
+ return fetch_salesforce_data(sf, query)
49
 
50
  def main():
51
+ if sf is None:
52
+ st.error("Cannot proceed without Salesforce connection.")
53
+ return
54
+
55
  st.title("Multi-Device LabOps Dashboard")
56
 
57
  # Filters
 
61
  with col2:
62
  equipment_type = st.selectbox("Equipment Type", ["All", "Cell Analyzer", "Weight Log", "UV Verification"])
63
  with col3:
64
+ default_start = datetime.now() - timedelta(days=7)
65
+ default_end = datetime.now()
66
+ date_range = st.date_input("Date Range", [default_start, default_end])
67
 
68
+ # Validate date range
69
+ if len(date_range) != 2:
70
+ st.warning("Please select a valid date range.")
71
+ return
72
+ date_start, date_end = date_range
73
+
74
+ # Fetch and process data
75
+ data = get_filtered_data(lab_site, equipment_type, date_start, date_end)
76
+ if not data:
 
 
 
 
 
 
 
77
  st.warning("No data available for the selected filters.")
78
  return
79
 
80
+ df = pd.DataFrame(data)
81
+ df["Log_Timestamp__c"] = pd.to_datetime(df["Log_Timestamp__c"])
82
+ df["Anomaly"] = df["Status__c"].apply(lambda x: detect_anomalies(str(x), anomaly_detector))
83
+
84
+ # Pagination
85
+ page_size = 10
86
+ page = st.number_input("Page", min_value=1, value=1, step=1)
87
+ start_idx = (page - 1) * page_size
88
+ end_idx = start_idx + page_size
89
+ paginated_df = df[start_idx:end_idx]
90
+
91
  # Device Cards
92
  st.subheader("Device Status")
93
+ for _, row in paginated_df.iterrows():
94
+ anomaly = "⚠️ Anomaly" if row["Anomaly"] == "POSITIVE" else "✅ Normal"
 
 
95
  st.markdown(f"""
96
+ **{row['Equipment__c']}** | Lab: {row['Lab__c']} | Health: {row['Status__c']} |
97
+ Usage: {row['Usage_Count__c']} | Last Log: {row['Log_Timestamp__c'].strftime('%Y-%m-%d %H:%M:%S')} | {anomaly}
98
  """)
99
 
100
  # Usage Chart
101
  st.subheader("Usage Trends")
102
+ fig = px.line(
103
+ df,
104
+ x="Log_Timestamp__c",
105
+ y="Usage_Count__c",
106
+ color="Equipment__c",
107
+ title="Daily Usage Trends",
108
+ labels={"Log_Timestamp__c": "Timestamp", "Usage_Count__c": "Usage Count"}
109
+ )
110
+ fig.update_layout(xaxis_title="Timestamp", yaxis_title="Usage Count")
111
  st.plotly_chart(fig, use_container_width=True)
112
+
113
  # Downtime Chart
114
+ st.subheader("Downtime Patterns")
115
  downtime_df = df[df["Status__c"] == "Down"]
116
  if not downtime_df.empty:
117
+ fig_downtime = px.histogram(
118
+ downtime_df,
119
+ x="Log_Timestamp__c",
120
+ color="Equipment__c",
121
+ title="Downtime Patterns",
122
+ labels={"Log_Timestamp__c": "Timestamp"}
123
+ )
124
+ fig_downtime.update_layout(xaxis_title="Timestamp", yaxis_title="Downtime Count")
125
  st.plotly_chart(fig_downtime, use_container_width=True)
126
+ else:
127
+ st.info("No downtime events found for the selected filters.")
128
+
129
  # AMC Reminders
130
  st.subheader("AMC Reminders")
131
  amc_query = "SELECT Equipment__c, AMC_Expiry_Date__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= NEXT_N_DAYS:14"
132
+ amc_data = fetch_salesforce_data(sf, amc_query, retries=3)
133
+ if amc_data:
134
+ for record in amc_data:
135
+ st.write(f"Equipment {record['Equipment__c']} - AMC Expiry: {record['AMC_Expiry_Date__c']}")
136
+ else:
137
+ st.info("No AMC expiries within the next 14 days.")
138
+
139
  # Export PDF
140
  if st.button("Export PDF Report"):
141
+ try:
142
+ pdf_file = generate_pdf_report(df, lab_site, equipment_type, [date_start, date_end])
143
+ with open(pdf_file, "rb") as f:
144
+ st.download_button("Download PDF", f, file_name="LabOps_Report.pdf", mime="application/pdf")
145
+ except Exception as e:
146
+ st.error(f"Failed to generate PDF: {e}")
147
 
148
  if __name__ == "__main__":
149
  main()