File size: 5,300 Bytes
36459c4 6eaa3dc 8846627 0f4361c 8846627 221e826 36459c4 221e826 13ed916 36459c4 13ed916 952210b 13ed916 952210b 13ed916 952210b 36459c4 13ed916 08fef74 36459c4 952210b 221e826 6eaa3dc 36459c4 684911e 36459c4 0d752e6 221e826 0d752e6 684911e 36459c4 0d752e6 8846627 36459c4 0d752e6 221e826 36459c4 08fef74 221e826 36459c4 952210b 36459c4 952210b 36459c4 952210b 221e826 36459c4 952210b 36459c4 952210b 36459c4 952210b 36459c4 221e826 36459c4 221e826 8c0a7a5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 |
import gradio as gr
import pandas as pd
from utils.load_data import load_logs
from utils.visualize import plot_usage
from utils.report import generate_pdf
from models.anomaly import detect_anomalies
from utils.amc import upcoming_amc_devices
import logging
import os
# Configure logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
logger = logging.getLogger(__name__)
def process_files(uploaded_files):
"""Process uploaded CSV files and generate dashboard outputs."""
logger.info(f"Received uploaded files: {uploaded_files}")
# Handle Gradio's File component output: may be a tuple/list of lists
if not uploaded_files:
logger.warning("No files uploaded.")
return "Please upload at least one valid CSV file.", None, None, None, None
# Flatten the structure: uploaded_files might be (['path'],) or ['path']
if isinstance(uploaded_files, (tuple, list)) and len(uploaded_files) > 0:
if isinstance(uploaded_files[0], list):
valid_files = uploaded_files[0] # Extract the inner list
else:
valid_files = uploaded_files
else:
valid_files = []
# Filter out None values and ensure we have valid files
valid_files = [f for f in valid_files if f is not None]
if not valid_files:
logger.warning("No valid files after filtering.")
return "Please upload at least one valid CSV file.", None, None, None, None
logger.info(f"Processing {len(valid_files)} valid files: {valid_files}")
try:
# Load data
df = load_logs(valid_files)
logger.info(f"Loaded {len(df)} log records from uploaded files.")
# Log table
log_table = df.head().to_dict(orient="records")
# Usage chart
logger.info("Generating usage plot...")
fig = plot_usage(df)
logger.info("Usage plot generated successfully.")
# Anomalies
anomaly_table = "Anomaly detection failed."
try:
anomalies = detect_anomalies(df)
anomaly_table = anomalies.to_dict(orient="records") if not anomalies.empty else "No anomalies detected."
except Exception as e:
logger.error(f"Anomaly detection failed: {e}")
# AMC expiries
amc_table = None
try:
if "amc_expiry" in df.columns:
logger.info("Processing AMC expiries...")
amc_df = upcoming_amc_devices(df)
amc_table = amc_df.to_dict(orient="records") if not amc_df.empty else "No upcoming AMC expiries."
else:
amc_table = "Column `amc_expiry` not found in uploaded data."
logger.warning("Missing `amc_expiry` column in data.")
except Exception as e:
logger.error(f"AMC processing failed: {e}")
amc_table = f"Error processing AMC expiries: {e}"
return log_table, fig, anomaly_table, amc_table, df
except Exception as e:
logger.error(f"Error processing files: {e}")
return f"Error: {e}", None, None, None, None
def generate_pdf_report(df):
"""Generate and return path to PDF report, with error message if applicable."""
if df is None:
logger.warning("No data available for PDF generation.")
return None, "Please upload CSV files first."
logger.info("Generating PDF report...")
try:
pdf_path = generate_pdf(df)
return pdf_path, "PDF generated successfully."
except Exception as e:
logger.error(f"Failed to generate PDF: {e}")
return None, f"Error generating PDF: {e}"
with gr.Blocks(title="Multi-Device LabOps Dashboard") as demo:
gr.Markdown("# π Multi-Device LabOps Dashboard")
with gr.Row():
file_input = gr.File(file_count="multiple", file_types=[".csv"], label="Upload Device Logs (CSV)")
with gr.Row():
submit_btn = gr.Button("Process Files")
with gr.Row():
with gr.Column():
gr.Markdown("## π Uploaded Logs")
log_output = gr.Dataframe()
with gr.Column():
gr.Markdown("## π Daily Usage Chart")
chart_output = gr.Plot()
with gr.Row():
with gr.Column():
gr.Markdown("## π¨ Detected Anomalies")
anomaly_output = gr.Dataframe()
with gr.Column():
gr.Markdown("## π Upcoming AMC Devices")
amc_output = gr.Dataframe()
with gr.Row():
pdf_btn = gr.Button("π Generate PDF Report")
pdf_output = gr.File(label="Download PDF Report")
pdf_message = gr.Textbox(label="PDF Generation Status")
# State to store dataframe
df_state = gr.State()
# Connect inputs to outputs
submit_btn.click(
fn=process_files,
inputs=[file_input],
outputs=[log_output, chart_output, anomaly_output, amc_output, df_state]
)
pdf_btn.click(
fn=generate_pdf_report,
inputs=[df_state],
outputs=[pdf_output, pdf_message]
)
if __name__ == "__main__":
try:
logger.info("Application starting...")
demo.launch(server_name="0.0.0.0", server_port=7860)
except Exception as e:
logger.error(f"Application failed to start: {e}")
raise |