File size: 4,484 Bytes
444ee18
 
 
 
 
 
f978017
444ee18
bffed60
 
 
 
 
444ee18
2b0cb4e
 
 
 
 
bffed60
8b11118
2b0cb4e
bffed60
2b0cb4e
444ee18
bffed60
 
 
 
 
 
 
8b11118
 
 
5529e78
bffed60
a915b02
5529e78
bffed60
444ee18
 
5529e78
bffed60
444ee18
 
bffed60
2b0cb4e
bffed60
 
 
 
444ee18
bffed60
 
444ee18
2b0cb4e
 
444ee18
2b0cb4e
444ee18
bffed60
 
 
 
 
2b0cb4e
5529e78
bffed60
 
 
 
 
 
 
 
 
374abdb
5529e78
2b0cb4e
5529e78
2b0cb4e
bffed60
444ee18
ed024cc
374abdb
2b0cb4e
 
 
 
 
bffed60
444ee18
bffed60
2b0cb4e
 
 
bffed60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f978017
374abdb
bffed60
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import gradio as gr
import pandas as pd
import numpy as np
import pickle
import json
import tensorflow as tf
from tensorflow.keras.models import model_from_json
import os
import logging

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# Initialize model components
model = None
scaler = None
metadata = {}
feature_names = []
model_loaded = False

def load_model():
    global model, scaler, metadata, feature_names, model_loaded
    
    try:
        # Verify all required files exist
        required_files = ['model_architecture.json', 'final_model.h5', 'scaler.pkl', 'metadata.json']
        for file in required_files:
            if not os.path.exists(file):
                raise FileNotFoundError(f"Missing required file: {file}")
        
        logger.info("Loading model architecture...")
        with open('model_architecture.json', 'r') as json_file:
            model_json = json_file.read()
        model = model_from_json(model_json)
        
        logger.info("Loading model weights...")
        model.load_weights('final_model.h5')
        
        logger.info("Loading scaler...")
        with open('scaler.pkl', 'rb') as f:
            scaler = pickle.load(f)
        
        logger.info("Loading metadata...")
        with open('metadata.json', 'r') as f:
            metadata = json.load(f)
            feature_names = metadata.get('feature_names', [])
            
        model_loaded = True
        logger.info("βœ… Model loaded successfully!")
        logger.info(f"Features: {feature_names}")
        
    except Exception as e:
        logger.error(f"❌ Model loading failed: {str(e)}")
        model_loaded = False

# Load model at startup
load_model()

def predict(*args):
    try:
        if not model_loaded:
            raise RuntimeError("Model failed to load. Check server logs for details.")
        
        if len(args) != len(feature_names):
            raise ValueError(f"Expected {len(feature_names)} features, got {len(args)}")
            
        # Create input dictionary
        input_data = {}
        for i, val in enumerate(args):
            try:
                input_data[feature_names[i]] = float(val)
            except ValueError:
                raise ValueError(f"Invalid value for {feature_names[i]}: {val}")
        
        # Create DataFrame
        input_df = pd.DataFrame([input_data], columns=feature_names)
        
        # Scale features
        scaled_input = scaler.transform(input_df)
        
        # Predict
        probability = float(model.predict(scaled_input, verbose=0)[0][0])
        prediction = "Eligible" if probability > 0.5 else "Not Eligible"
        confidence = abs(probability - 0.5) * 2
        
        return {
            "Prediction": prediction,
            "Probability": f"{probability:.4f}",
            "Confidence": f"{confidence:.4f}"
        }
        
    except Exception as e:
        logger.error(f"Prediction error: {str(e)}")
        return {"Error": str(e)}

# Create Gradio interface
with gr.Blocks(title="Student Eligibility Predictor") as demo:
    gr.Markdown("# πŸŽ“ Student Eligibility Predictor")
    gr.Markdown("Predict student eligibility based on academic performance metrics")
    
    with gr.Row():
        with gr.Column():
            input_components = [gr.Number(label=name) for name in feature_names]
            predict_btn = gr.Button("Predict", variant="primary")
        with gr.Column():
            prediction_output = gr.Textbox(label="Prediction")
            probability_output = gr.Textbox(label="Probability")
            confidence_output = gr.Textbox(label="Confidence")
    
    # Add examples if features exist
    if len(feature_names) > 0:
        examples = []
        if len(feature_names) >= 3:
            examples.append([75, 80, 85] + [0]*(len(feature_names)-3))
        elif len(feature_names) == 2:
            examples.append([75, 80])
        else:
            examples.append([75])
        
        gr.Examples(
            examples=examples,
            inputs=input_components,
            outputs=[prediction_output, probability_output, confidence_output],
            fn=predict,
            cache_examples=False
        )
    
    predict_btn.click(
        fn=predict,
        inputs=input_components,
        outputs=[prediction_output, probability_output, confidence_output]
    )

if __name__ == "__main__":
    demo.launch(server_name="0.0.0.0", server_port=7860)