Spaces:
Running
Running
Update services/state_manager.py
Browse files- services/state_manager.py +62 -214
services/state_manager.py
CHANGED
@@ -1,117 +1,69 @@
|
|
1 |
-
# state_manager.py
|
2 |
"""
|
3 |
-
Manages the application state
|
4 |
-
|
5 |
-
|
6 |
"""
|
7 |
import pandas as pd
|
8 |
import logging
|
9 |
import os
|
10 |
-
from datetime import timezone # Python's datetime, not to be confused with pandas'
|
11 |
-
import gradio as gr
|
12 |
|
13 |
-
# Assuming Bubble_API_Calls contains
|
14 |
from apis.Bubble_API_Calls import (
|
15 |
fetch_linkedin_token_from_bubble,
|
16 |
-
fetch_linkedin_posts_data_from_bubble #
|
17 |
)
|
18 |
# Assuming config.py contains all necessary constants
|
19 |
from config import (
|
20 |
-
DEFAULT_INITIAL_FETCH_COUNT, DEFAULT_POSTS_UPDATE_FETCH_COUNT,
|
21 |
-
BUBBLE_POST_DATE_COLUMN_NAME, BUBBLE_POSTS_TABLE_NAME,
|
22 |
-
BUBBLE_POST_STATS_TABLE_NAME,
|
23 |
-
BUBBLE_MENTIONS_TABLE_NAME, BUBBLE_MENTIONS_DATE_COLUMN_NAME,
|
24 |
-
BUBBLE_FOLLOWER_STATS_TABLE_NAME, FOLLOWER_STATS_TYPE_COLUMN, FOLLOWER_STATS_CATEGORY_COLUMN,
|
25 |
LINKEDIN_CLIENT_ID_ENV_VAR,
|
26 |
-
|
27 |
-
|
28 |
-
|
|
|
|
|
29 |
)
|
30 |
-
|
31 |
from .report_data_handler import fetch_latest_agentic_analysis
|
32 |
|
33 |
def check_token_status(token_state):
|
34 |
"""Checks the status of the LinkedIn token."""
|
35 |
return "β
Token available" if token_state and token_state.get("token") else "β Token not available"
|
36 |
|
37 |
-
def
|
38 |
"""
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
Args:
|
43 |
-
operations_log_df (pd.DataFrame): DataFrame containing operations log data.
|
44 |
-
Expected columns defined in config:
|
45 |
-
BUBBLE_OPERATIONS_LOG_DATE_COLUMN,
|
46 |
-
BUBBLE_OPERATIONS_LOG_SUBJECT_COLUMN,
|
47 |
-
BUBBLE_OPERATIONS_LOG_ORG_URN_COLUMN.
|
48 |
-
subject (str): The subject of the sync operation (e.g., "post", "mention").
|
49 |
-
org_urn (str): The organization URN.
|
50 |
-
|
51 |
-
Returns:
|
52 |
-
pd.Timestamp: The last sync attempt date (UTC), or pd.NaT if no relevant log entry is found.
|
53 |
-
"""
|
54 |
-
if operations_log_df.empty or not org_urn:
|
55 |
-
return pd.NaT
|
56 |
-
|
57 |
-
# Ensure required columns exist
|
58 |
-
required_cols = [BUBBLE_OPERATIONS_LOG_DATE_COLUMN, BUBBLE_OPERATIONS_LOG_SUBJECT_COLUMN, BUBBLE_OPERATIONS_LOG_ORG_URN_COLUMN]
|
59 |
-
if not all(col in operations_log_df.columns for col in required_cols):
|
60 |
-
logging.warning(f"Operations log DF is missing one or more required columns: {required_cols}")
|
61 |
-
return pd.NaT
|
62 |
-
|
63 |
-
try:
|
64 |
-
# Filter for the specific subject and organization URN
|
65 |
-
# Ensure data types are consistent for comparison, especially org_urn
|
66 |
-
filtered_df = operations_log_df[
|
67 |
-
(operations_log_df[BUBBLE_OPERATIONS_LOG_SUBJECT_COLUMN].astype(str) == str(subject)) &
|
68 |
-
(operations_log_df[BUBBLE_OPERATIONS_LOG_ORG_URN_COLUMN].astype(str) == str(org_urn))
|
69 |
-
]
|
70 |
-
|
71 |
-
if filtered_df.empty:
|
72 |
-
return pd.NaT
|
73 |
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
dates = pd.to_datetime(filtered_df[BUBBLE_OPERATIONS_LOG_DATE_COLUMN], errors='coerce', utc=True)
|
78 |
-
return dates.dropna().max()
|
79 |
-
except Exception as e:
|
80 |
-
logging.error(f"Error processing operations log for last sync attempt date: {e}", exc_info=True)
|
81 |
-
return pd.NaT
|
82 |
-
|
83 |
-
|
84 |
-
def process_and_store_bubble_token(url_user_token, org_urn, token_state):
|
85 |
-
"""
|
86 |
-
Processes user token, fetches LinkedIn token, fetches existing Bubble data (posts, mentions, follower stats, operations log),
|
87 |
-
and determines if a sync is needed for each data type based on the operations log.
|
88 |
-
Updates token state and UI for the sync button.
|
89 |
"""
|
90 |
-
logging.info(f"
|
91 |
|
92 |
-
|
|
|
93 |
new_state.update({
|
94 |
"token": new_state.get("token"), # Preserve existing token if any
|
95 |
"client_id": new_state.get("client_id"),
|
96 |
"org_urn": org_urn,
|
97 |
-
"bubble_posts_df":
|
98 |
-
"
|
99 |
-
"bubble_mentions_df":
|
100 |
-
"
|
101 |
-
"
|
102 |
-
"
|
103 |
-
"bubble_operations_log_df": new_state.get("bubble_operations_log_df", pd.DataFrame()), # NEW
|
104 |
"url_user_token_temp_storage": url_user_token
|
105 |
})
|
106 |
|
107 |
-
|
108 |
-
|
109 |
client_id = os.environ.get(LINKEDIN_CLIENT_ID_ENV_VAR)
|
110 |
new_state["client_id"] = client_id if client_id else "ENV VAR MISSING"
|
111 |
-
if not client_id:
|
|
|
112 |
|
|
|
113 |
if url_user_token and "not found" not in url_user_token and "Could not access" not in url_user_token:
|
114 |
-
logging.info(
|
115 |
try:
|
116 |
parsed_linkedin_token = fetch_linkedin_token_from_bubble(url_user_token)
|
117 |
if isinstance(parsed_linkedin_token, dict) and "access_token" in parsed_linkedin_token:
|
@@ -125,155 +77,51 @@ def process_and_store_bubble_token(url_user_token, org_urn, token_state):
|
|
125 |
logging.error(f"β Exception while fetching LinkedIn token from Bubble: {e}", exc_info=True)
|
126 |
else:
|
127 |
new_state["token"] = None
|
128 |
-
logging.info("No valid URL user token provided for LinkedIn token fetch
|
129 |
|
130 |
-
|
131 |
-
if
|
132 |
data_tables_to_fetch = {
|
133 |
"bubble_posts_df": BUBBLE_POSTS_TABLE_NAME,
|
|
|
134 |
"bubble_mentions_df": BUBBLE_MENTIONS_TABLE_NAME,
|
135 |
"bubble_follower_stats_df": BUBBLE_FOLLOWER_STATS_TABLE_NAME,
|
136 |
-
"bubble_operations_log_df": BUBBLE_OPERATIONS_LOG_TABLE_NAME,
|
137 |
-
"bubble_post_stats_df": BUBBLE_POST_STATS_TABLE_NAME
|
138 |
}
|
139 |
for state_key, table_name in data_tables_to_fetch.items():
|
140 |
-
logging.info(f"Attempting to fetch {table_name} from Bubble for org_urn: {
|
141 |
try:
|
142 |
-
fetched_df, error_message = fetch_linkedin_posts_data_from_bubble(
|
143 |
new_state[state_key] = pd.DataFrame() if error_message or fetched_df is None else fetched_df
|
144 |
-
if error_message:
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
except Exception as e:
|
150 |
-
logging.error(f"β
|
151 |
new_state[state_key] = pd.DataFrame()
|
152 |
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
new_state["bubble_agentic_analysis_data"] = all_analysis_data
|
161 |
-
|
162 |
-
quarter_data = all_analysis_data[all_analysis_data['report_type'] == 'Quarter'].copy()
|
163 |
-
|
164 |
-
if not quarter_data.empty:
|
165 |
-
latest_quarter_analysis = quarter_data.sort_values(by='Created Date', ascending=False).iloc[0]
|
166 |
-
should_run_due_to_no_data = pd.isna(latest_quarter_analysis).any()
|
167 |
-
else:
|
168 |
-
should_run_due_to_no_data = True # No quarter data found
|
169 |
-
else:
|
170 |
-
should_run_due_to_no_data = True # No data at all
|
171 |
-
|
172 |
-
if should_run_due_to_no_data:
|
173 |
-
logging.info(f"No existing agentic analysis data found for org {current_org_urn}. Triggering run.")
|
174 |
-
new_state["agentic_pipeline_should_run_now"] = True
|
175 |
else:
|
176 |
-
|
177 |
-
|
178 |
-
else:
|
179 |
-
logging.warning("Org URN not available in state. Cannot fetch data from Bubble.")
|
180 |
-
for key in ["bubble_posts_df", "bubble_mentions_df", "bubble_follower_stats_df", "bubble_operations_log_df"]:
|
181 |
-
new_state[key] = pd.DataFrame()
|
182 |
|
183 |
-
# --- Determine sync needs based on Operations Log ---
|
184 |
-
ops_log_df = new_state.get("bubble_operations_log_df", pd.DataFrame())
|
185 |
-
now_utc = pd.Timestamp.now(tz='UTC')
|
186 |
-
|
187 |
-
# 1. Posts Sync Logic
|
188 |
-
last_post_sync_attempt = get_last_sync_attempt_date(ops_log_df, LOG_SUBJECT_POSTS, current_org_urn)
|
189 |
-
if pd.isna(last_post_sync_attempt):
|
190 |
-
logging.info(f"βΉοΈ No previous '{LOG_SUBJECT_POSTS}' sync attempt logged. Setting to fetch initial {DEFAULT_INITIAL_FETCH_COUNT} posts.")
|
191 |
-
new_state['fetch_count_for_api'] = DEFAULT_INITIAL_FETCH_COUNT
|
192 |
else:
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
# For simplicity, using DEFAULT_POSTS_UPDATE_FETCH_COUNT
|
197 |
-
new_state['fetch_count_for_api'] = DEFAULT_POSTS_UPDATE_FETCH_COUNT
|
198 |
-
logging.info(f"Posts sync attempt is {days_since_last_attempt} days old. Setting fetch count to {new_state['fetch_count_for_api']}.")
|
199 |
-
else:
|
200 |
-
new_state['fetch_count_for_api'] = 0
|
201 |
-
logging.info(f"Posts sync attempt was recent ({days_since_last_attempt} days ago). No new posts fetch scheduled based on log.")
|
202 |
-
|
203 |
-
# 2. Mentions Sync Logic
|
204 |
-
last_mention_sync_attempt = get_last_sync_attempt_date(ops_log_df, LOG_SUBJECT_MENTIONS, current_org_urn)
|
205 |
-
if pd.isna(last_mention_sync_attempt):
|
206 |
-
new_state['mentions_should_sync_now'] = True
|
207 |
-
logging.info(f"Mentions sync needed: No previous '{LOG_SUBJECT_MENTIONS}' sync attempt logged.")
|
208 |
-
else:
|
209 |
-
days_since_last_attempt_mentions = (now_utc.normalize() - last_mention_sync_attempt.normalize()).days
|
210 |
-
if days_since_last_attempt_mentions >= 7:
|
211 |
-
new_state['mentions_should_sync_now'] = True
|
212 |
-
logging.info(f"Mentions sync needed: Last attempt was {days_since_last_attempt_mentions} days ago.")
|
213 |
-
else:
|
214 |
-
new_state['mentions_should_sync_now'] = False
|
215 |
-
logging.info(f"Mentions sync attempt was recent ({days_since_last_attempt_mentions} days ago). Sync not scheduled.")
|
216 |
-
|
217 |
-
# 3. Follower Stats Sync Logic
|
218 |
-
last_fs_sync_attempt = get_last_sync_attempt_date(ops_log_df, LOG_SUBJECT_FOLLOWER_STATS, current_org_urn)
|
219 |
-
fs_df_current = new_state.get("bubble_follower_stats_df", pd.DataFrame())
|
220 |
-
|
221 |
-
demographics_missing = False
|
222 |
-
if fs_df_current.empty:
|
223 |
-
demographics_missing = True # If entire table is empty, demographics are missing
|
224 |
-
logging.info("Follower stats: Main table is empty, considering demographics missing.")
|
225 |
-
elif FOLLOWER_STATS_TYPE_COLUMN not in fs_df_current.columns:
|
226 |
-
demographics_missing = True # If type column is missing, cannot check demographics
|
227 |
-
logging.info(f"Follower stats: Column '{FOLLOWER_STATS_TYPE_COLUMN}' is missing, considering demographics missing.")
|
228 |
-
else:
|
229 |
-
# Check if any rows exist that are NOT 'follower_gains_monthly'
|
230 |
-
if fs_df_current[fs_df_current[FOLLOWER_STATS_TYPE_COLUMN] != 'follower_gains_monthly'].empty:
|
231 |
-
demographics_missing = True
|
232 |
-
logging.info("Follower stats: Demographic data (non-monthly types) is missing.")
|
233 |
-
|
234 |
-
time_based_need_fs = False
|
235 |
-
if pd.isna(last_fs_sync_attempt):
|
236 |
-
time_based_need_fs = True
|
237 |
-
logging.info(f"Follower stats sync needed: No previous '{LOG_SUBJECT_FOLLOWER_STATS}' sync attempt logged.")
|
238 |
-
else:
|
239 |
-
start_of_current_month = now_utc.normalize().replace(day=1)
|
240 |
-
# Ensure last_fs_sync_attempt is timezone-aware (should be by get_last_sync_attempt_date)
|
241 |
-
if last_fs_sync_attempt.tzinfo is None: # Should not happen if get_last_sync_attempt_date works
|
242 |
-
last_fs_sync_attempt = last_fs_sync_attempt.tz_localize('UTC')
|
243 |
-
|
244 |
-
if last_fs_sync_attempt < start_of_current_month:
|
245 |
-
time_based_need_fs = True
|
246 |
-
logging.info(f"Follower stats sync needed: Last attempt {last_fs_sync_attempt.date()} is before current month start {start_of_current_month.date()}.")
|
247 |
-
|
248 |
-
if time_based_need_fs or demographics_missing:
|
249 |
-
new_state['fs_should_sync_now'] = True
|
250 |
-
if demographics_missing and not time_based_need_fs:
|
251 |
-
logging.info("Follower stats sync triggered: Demographic data missing, even if last sync attempt is recent.")
|
252 |
-
elif time_based_need_fs:
|
253 |
-
logging.info("Follower stats sync triggered by schedule.")
|
254 |
-
else:
|
255 |
-
new_state['fs_should_sync_now'] = False
|
256 |
-
logging.info("Follower stats sync not currently required by schedule or data presence.")
|
257 |
-
|
258 |
-
# Update Sync Button based on determined needs
|
259 |
-
sync_actions = []
|
260 |
-
if new_state.get('fetch_count_for_api', 0) > 0:
|
261 |
-
sync_actions.append(f"Posts ({new_state['fetch_count_for_api']})")
|
262 |
-
if new_state.get('mentions_should_sync_now', False):
|
263 |
-
sync_actions.append("Mentions")
|
264 |
-
if new_state.get('fs_should_sync_now', False):
|
265 |
-
sync_actions.append("Follower Stats")
|
266 |
-
|
267 |
-
if new_state["token"] and sync_actions:
|
268 |
-
button_label = f"π Sync LinkedIn Data ({', '.join(sync_actions)})"
|
269 |
-
button_update = gr.update(value=button_label, visible=True, interactive=True)
|
270 |
-
elif new_state["token"]:
|
271 |
-
button_label = "β
Data Up-to-Date (based on sync log)"
|
272 |
-
button_update = gr.update(value=button_label, visible=True, interactive=False)
|
273 |
-
else: # No token
|
274 |
-
button_update = gr.update(visible=False, interactive=False, value="π Sync LinkedIn Data")
|
275 |
-
|
276 |
|
277 |
token_status_message = check_token_status(new_state)
|
278 |
-
logging.info(f"
|
279 |
-
|
|
|
|
|
|
|
|
1 |
+
# services/state_manager.py
|
2 |
"""
|
3 |
+
Manages the application state by loading all necessary, pre-processed data
|
4 |
+
from Bubble.io. This includes the LinkedIn token, organizational data (posts,
|
5 |
+
mentions, stats), and the results of any pre-computed AI analysis.
|
6 |
"""
|
7 |
import pandas as pd
|
8 |
import logging
|
9 |
import os
|
|
|
|
|
10 |
|
11 |
+
# Assuming Bubble_API_Calls contains the necessary fetch functions
|
12 |
from apis.Bubble_API_Calls import (
|
13 |
fetch_linkedin_token_from_bubble,
|
14 |
+
fetch_linkedin_posts_data_from_bubble # Generic fetch function
|
15 |
)
|
16 |
# Assuming config.py contains all necessary constants
|
17 |
from config import (
|
|
|
|
|
|
|
|
|
|
|
18 |
LINKEDIN_CLIENT_ID_ENV_VAR,
|
19 |
+
BUBBLE_POSTS_TABLE_NAME,
|
20 |
+
BUBBLE_POST_STATS_TABLE_NAME,
|
21 |
+
BUBBLE_MENTIONS_TABLE_NAME,
|
22 |
+
BUBBLE_FOLLOWER_STATS_TABLE_NAME,
|
23 |
+
BUBBLE_OPERATIONS_LOG_TABLE_NAME # Kept for potential display/logging purposes
|
24 |
)
|
25 |
+
# The report_data_handler is responsible for fetching the already-computed AI analysis
|
26 |
from .report_data_handler import fetch_latest_agentic_analysis
|
27 |
|
28 |
def check_token_status(token_state):
|
29 |
"""Checks the status of the LinkedIn token."""
|
30 |
return "β
Token available" if token_state and token_state.get("token") else "β Token not available"
|
31 |
|
32 |
+
def load_data_from_bubble(url_user_token, org_urn, current_state):
|
33 |
"""
|
34 |
+
Fetches the LinkedIn token and all relevant pre-processed data (posts, mentions,
|
35 |
+
follower stats, and agentic analysis results) from Bubble.io.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
+
This function assumes data is populated in Bubble by an external process. It
|
38 |
+
only retrieves the data for display and does not trigger any syncing or
|
39 |
+
data processing.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
"""
|
41 |
+
logging.info(f"Loading all data from Bubble for Org URN: '{org_urn}'")
|
42 |
|
43 |
+
# Initialize a new state, preserving the core structure from the old state
|
44 |
+
new_state = current_state.copy() if current_state else {}
|
45 |
new_state.update({
|
46 |
"token": new_state.get("token"), # Preserve existing token if any
|
47 |
"client_id": new_state.get("client_id"),
|
48 |
"org_urn": org_urn,
|
49 |
+
"bubble_posts_df": pd.DataFrame(),
|
50 |
+
"bubble_post_stats_df": pd.DataFrame(),
|
51 |
+
"bubble_mentions_df": pd.DataFrame(),
|
52 |
+
"bubble_follower_stats_df": pd.DataFrame(),
|
53 |
+
"bubble_operations_log_df": pd.DataFrame(),
|
54 |
+
"bubble_agentic_analysis_data": pd.DataFrame(),
|
|
|
55 |
"url_user_token_temp_storage": url_user_token
|
56 |
})
|
57 |
|
58 |
+
# 1. Get Client ID from environment
|
|
|
59 |
client_id = os.environ.get(LINKEDIN_CLIENT_ID_ENV_VAR)
|
60 |
new_state["client_id"] = client_id if client_id else "ENV VAR MISSING"
|
61 |
+
if not client_id:
|
62 |
+
logging.error(f"CRITICAL ERROR: '{LINKEDIN_CLIENT_ID_ENV_VAR}' environment variable not set.")
|
63 |
|
64 |
+
# 2. Fetch LinkedIn Access Token from Bubble
|
65 |
if url_user_token and "not found" not in url_user_token and "Could not access" not in url_user_token:
|
66 |
+
logging.info("Attempting to fetch LinkedIn token from Bubble.")
|
67 |
try:
|
68 |
parsed_linkedin_token = fetch_linkedin_token_from_bubble(url_user_token)
|
69 |
if isinstance(parsed_linkedin_token, dict) and "access_token" in parsed_linkedin_token:
|
|
|
77 |
logging.error(f"β Exception while fetching LinkedIn token from Bubble: {e}", exc_info=True)
|
78 |
else:
|
79 |
new_state["token"] = None
|
80 |
+
logging.info("No valid URL user token provided for LinkedIn token fetch.")
|
81 |
|
82 |
+
# 3. Fetch all data tables from Bubble if an Org URN is present
|
83 |
+
if org_urn:
|
84 |
data_tables_to_fetch = {
|
85 |
"bubble_posts_df": BUBBLE_POSTS_TABLE_NAME,
|
86 |
+
"bubble_post_stats_df": BUBBLE_POST_STATS_TABLE_NAME,
|
87 |
"bubble_mentions_df": BUBBLE_MENTIONS_TABLE_NAME,
|
88 |
"bubble_follower_stats_df": BUBBLE_FOLLOWER_STATS_TABLE_NAME,
|
89 |
+
"bubble_operations_log_df": BUBBLE_OPERATIONS_LOG_TABLE_NAME,
|
|
|
90 |
}
|
91 |
for state_key, table_name in data_tables_to_fetch.items():
|
92 |
+
logging.info(f"Attempting to fetch '{table_name}' from Bubble for org_urn: {org_urn}")
|
93 |
try:
|
94 |
+
fetched_df, error_message = fetch_linkedin_posts_data_from_bubble(org_urn, table_name, "organization_urn", "equals")
|
95 |
new_state[state_key] = pd.DataFrame() if error_message or fetched_df is None else fetched_df
|
96 |
+
if error_message:
|
97 |
+
logging.warning(f"Error fetching '{table_name}' from Bubble: {error_message}.")
|
98 |
+
else:
|
99 |
+
logging.info(f"β
Successfully fetched {len(new_state[state_key])} records for '{table_name}'.")
|
|
|
100 |
except Exception as e:
|
101 |
+
logging.error(f"β Exception while fetching '{table_name}' from Bubble: {e}.", exc_info=True)
|
102 |
new_state[state_key] = pd.DataFrame()
|
103 |
|
104 |
+
# 4. Fetch the pre-computed Agentic Analysis data
|
105 |
+
logging.info(f"Attempting to fetch agentic analysis data from Bubble for org_urn: {org_urn}")
|
106 |
+
all_analysis_data, error = fetch_latest_agentic_analysis(org_urn)
|
107 |
+
if error:
|
108 |
+
logging.warning(f"Error fetching agentic analysis data: {error}")
|
109 |
+
new_state["bubble_agentic_analysis_data"] = pd.DataFrame()
|
110 |
+
elif all_analysis_data is not None and not all_analysis_data.empty:
|
111 |
new_state["bubble_agentic_analysis_data"] = all_analysis_data
|
112 |
+
logging.info(f"β
Successfully fetched {len(all_analysis_data)} records for agentic analysis.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
113 |
else:
|
114 |
+
new_state["bubble_agentic_analysis_data"] = pd.DataFrame()
|
115 |
+
logging.info("No agentic analysis data found in Bubble for this org.")
|
|
|
|
|
|
|
|
|
116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
else:
|
118 |
+
logging.warning("Org URN not available in state. Cannot fetch any data from Bubble.")
|
119 |
+
for key in ["bubble_posts_df", "bubble_post_stats_df", "bubble_mentions_df", "bubble_follower_stats_df", "bubble_operations_log_df", "bubble_agentic_analysis_data"]:
|
120 |
+
new_state[key] = pd.DataFrame()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
|
122 |
token_status_message = check_token_status(new_state)
|
123 |
+
logging.info(f"Data loading from Bubble complete. Status: {token_status_message}.")
|
124 |
+
|
125 |
+
# This function now only returns the status message and the updated state.
|
126 |
+
# The sync button logic has been removed.
|
127 |
+
return token_status_message, new_state
|