Update app.py
Browse files
app.py
CHANGED
@@ -25,7 +25,12 @@ def init_salesforce():
|
|
25 |
# Cache Hugging Face model
|
26 |
@st.cache_resource
|
27 |
def init_anomaly_detector():
|
28 |
-
return pipeline(
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
# Initialize connections
|
31 |
sf = init_salesforce()
|
@@ -79,11 +84,15 @@ def main():
|
|
79 |
|
80 |
df = pd.DataFrame(data)
|
81 |
df["Log_Timestamp__c"] = pd.to_datetime(df["Log_Timestamp__c"])
|
82 |
-
df["Anomaly"] = df
|
|
|
|
|
|
|
83 |
|
84 |
# Pagination
|
85 |
page_size = 10
|
86 |
-
|
|
|
87 |
start_idx = (page - 1) * page_size
|
88 |
end_idx = start_idx + page_size
|
89 |
paginated_df = df[start_idx:end_idx]
|
|
|
25 |
# Cache Hugging Face model
|
26 |
@st.cache_resource
|
27 |
def init_anomaly_detector():
|
28 |
+
return pipeline(
|
29 |
+
"text-classification",
|
30 |
+
model="distilbert-base-uncased",
|
31 |
+
tokenizer="distilbert-base-uncased",
|
32 |
+
clean_up_tokenization_spaces=True
|
33 |
+
)
|
34 |
|
35 |
# Initialize connections
|
36 |
sf = init_salesforce()
|
|
|
84 |
|
85 |
df = pd.DataFrame(data)
|
86 |
df["Log_Timestamp__c"] = pd.to_datetime(df["Log_Timestamp__c"])
|
87 |
+
df["Anomaly"] = df.apply(
|
88 |
+
lambda row: detect_anomalies(f"{row['Status__c']} Usage:{row['Usage_Count__c']}", anomaly_detector),
|
89 |
+
axis=1
|
90 |
+
)
|
91 |
|
92 |
# Pagination
|
93 |
page_size = 10
|
94 |
+
total_pages = max(1, len(df) // page_size + (1 if len(df) % page_size else 0))
|
95 |
+
page = st.number_input("Page", min_value=1, max_value=total_pages, value=1, step=1)
|
96 |
start_idx = (page - 1) * page_size
|
97 |
end_idx = start_idx + page_size
|
98 |
paginated_df = df[start_idx:end_idx]
|