MaheshP98 commited on
Commit
7052c88
·
verified ·
1 Parent(s): bfc7a88

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -12
app.py CHANGED
@@ -223,15 +223,37 @@ h1 {
223
 
224
  def validate_csv(df):
225
  """
226
- Validate that the CSV has the required columns.
227
  Returns True if valid, False otherwise with an error message.
228
  """
229
  # Strip whitespace from column names
230
  df.columns = df.columns.str.strip()
231
- required_columns = ['device_id', 'usage_hours', 'amc_date', 'status']
232
- missing_columns = [col for col in required_columns if col not in df.columns]
233
- if missing_columns:
234
- return False, f"Missing required columns: {', '.join(missing_columns)}. Found columns: {', '.join(df.columns)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
235
  # Validate data types
236
  try:
237
  df['usage_hours'] = pd.to_numeric(df['usage_hours'], errors='raise')
@@ -242,6 +264,14 @@ def validate_csv(df):
242
  df['downtime'] = pd.to_numeric(df['downtime'], errors='raise')
243
  except Exception as e:
244
  return False, f"Invalid data types: {str(e)}"
 
 
 
 
 
 
 
 
245
  return True, ""
246
 
247
  def generate_device_cards(df, anomaly_df):
@@ -377,13 +407,7 @@ def process_files(uploaded_files):
377
  df = pd.read_csv(file.name, delimiter=',', skipinitialspace=True)
378
  # Log the columns for debugging
379
  logging.info(f"Columns in {file.name}: {', '.join(df.columns)}")
380
- # Rename columns to match expected names
381
- df = df.rename(columns={
382
- 'device_id': 'equipment',
383
- 'usage_hours': 'usage_count',
384
- 'amc_date': 'amc_expiry'
385
- })
386
- # Validate CSV structure
387
  is_valid, error_msg = validate_csv(df)
388
  if not is_valid:
389
  logging.error(f"Failed to load {file.name}: {error_msg}")
 
223
 
224
  def validate_csv(df):
225
  """
226
+ Validate that the CSV has the required columns, handling both original and renamed columns.
227
  Returns True if valid, False otherwise with an error message.
228
  """
229
  # Strip whitespace from column names
230
  df.columns = df.columns.str.strip()
231
+
232
+ # Define expected original and renamed columns
233
+ original_columns = ['device_id', 'usage_hours', 'amc_date', 'status']
234
+ renamed_columns = ['equipment', 'usage_count', 'amc_expiry', 'status']
235
+
236
+ # Check for original columns
237
+ missing_original = [col for col in original_columns if col not in df.columns]
238
+ # Check for renamed columns
239
+ missing_renamed = [col for col in renamed_columns if col not in df.columns]
240
+
241
+ # If original columns are present, proceed as is
242
+ if not missing_original:
243
+ logging.info("Found original columns in CSV. Proceeding with validation.")
244
+ # If renamed columns are present, map them back to original for validation
245
+ elif not missing_renamed:
246
+ logging.info("Found renamed columns in CSV. Mapping back to original names for validation.")
247
+ df.rename(columns={
248
+ 'equipment': 'device_id',
249
+ 'usage_count': 'usage_hours',
250
+ 'amc_expiry': 'amc_date'
251
+ }, inplace=True)
252
+ else:
253
+ # If neither set is fully present, report missing columns
254
+ found_columns = ', '.join(df.columns)
255
+ return False, f"Missing required columns. Expected either {', '.join(original_columns)} or {', '.join(renamed_columns)}. Found columns: {found_columns}"
256
+
257
  # Validate data types
258
  try:
259
  df['usage_hours'] = pd.to_numeric(df['usage_hours'], errors='raise')
 
264
  df['downtime'] = pd.to_numeric(df['downtime'], errors='raise')
265
  except Exception as e:
266
  return False, f"Invalid data types: {str(e)}"
267
+
268
+ # Rename columns to internal names after validation
269
+ df.rename(columns={
270
+ 'device_id': 'equipment',
271
+ 'usage_hours': 'usage_count',
272
+ 'amc_date': 'amc_expiry'
273
+ }, inplace=True)
274
+
275
  return True, ""
276
 
277
  def generate_device_cards(df, anomaly_df):
 
407
  df = pd.read_csv(file.name, delimiter=',', skipinitialspace=True)
408
  # Log the columns for debugging
409
  logging.info(f"Columns in {file.name}: {', '.join(df.columns)}")
410
+ # Validate CSV structure (renaming happens inside validate_csv now)
 
 
 
 
 
 
411
  is_valid, error_msg = validate_csv(df)
412
  if not is_valid:
413
  logging.error(f"Failed to load {file.name}: {error_msg}")