Bibek Mukherjee
commited on
Upload 77 files
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .env +3 -0
- .gitattributes +38 -35
- .gitignore +55 -0
- Dockerfile +14 -0
- README.md +74 -12
- data/HR-Employee-Attrition.csv +0 -0
- data/diabetes.csv +769 -0
- data/indian_liver_patient.csv +584 -0
- data/loan_approval_dataset.csv +0 -0
- docker-compose.yml +13 -0
- loan_applications.db +3 -0
- loan_approval_dataset.csv +0 -0
- models/__init__.py +4 -0
- models/__pycache__/__init__.cpython-311.pyc +3 -0
- models/attrition_model.joblib +3 -0
- models/attrition_model.pkl +3 -0
- models/attrition_preprocessor.joblib +3 -0
- models/attrition_preprocessor.pkl +3 -0
- models/diabetes_feature_names.pkl +3 -0
- models/diabetes_model.pkl +3 -0
- models/diabetes_model_metrics.pkl +3 -0
- models/liver_disease_model.pkl +3 -0
- models/loan_explainer.joblib +3 -0
- models/loan_feature_names.joblib +3 -0
- models/loan_label_encoders.joblib +3 -0
- models/loan_model.joblib +3 -0
- models/loan_model.pkl +3 -0
- models/loan_scaler.joblib +3 -0
- requirements.txt +0 -0
- roadmap.txt +77 -0
- src/__init__.py +4 -0
- src/__pycache__/__init__.cpython-311.pyc +3 -0
- src/__pycache__/train_loan_model.cpython-311.pyc +3 -0
- src/api/__init__.py +4 -0
- src/api/__pycache__/__init__.cpython-311.pyc +3 -0
- src/api/__pycache__/attrition_model.cpython-311.pyc +3 -0
- src/api/__pycache__/diabetes_model.cpython-311.pyc +3 -0
- src/api/__pycache__/liver_disease_model.cpython-311.pyc +3 -0
- src/api/__pycache__/liver_model.cpython-311.pyc +3 -0
- src/api/__pycache__/loan_model.cpython-311.pyc +3 -0
- src/api/__pycache__/main.cpython-311.pyc +3 -0
- src/api/attrition_model.py +243 -0
- src/api/diabetes_model.py +320 -0
- src/api/liver_disease_model.py +266 -0
- src/api/liver_model.py +146 -0
- src/api/loan_applications.db +3 -0
- src/api/loan_model.py +288 -0
- src/api/main.py +344 -0
- src/data/__pycache__/database.cpython-311.pyc +3 -0
- src/data/database.py +51 -0
.env
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
API_URL=http://localhost:8000
|
2 |
+
# Change this to your deployed API URL in production
|
3 |
+
# API_URL=https://your-api-url.onrender.com
|
.gitattributes
CHANGED
@@ -1,35 +1,38 @@
|
|
1 |
-
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
-
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
*.db filter=lfs diff=lfs merge=lfs -text
|
37 |
+
*.exe filter=lfs diff=lfs merge=lfs -text
|
38 |
+
*.pyc filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Python
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
*.so
|
6 |
+
.Python
|
7 |
+
env/
|
8 |
+
build/
|
9 |
+
develop-eggs/
|
10 |
+
dist/
|
11 |
+
downloads/
|
12 |
+
eggs/
|
13 |
+
.eggs/
|
14 |
+
lib/
|
15 |
+
lib64/
|
16 |
+
parts/
|
17 |
+
sdist/
|
18 |
+
var/
|
19 |
+
*.egg-info/
|
20 |
+
.installed.cfg
|
21 |
+
*.egg
|
22 |
+
|
23 |
+
# Virtual Environment
|
24 |
+
venv/
|
25 |
+
ENV/
|
26 |
+
|
27 |
+
# IDE
|
28 |
+
.idea/
|
29 |
+
.vscode/
|
30 |
+
*.swp
|
31 |
+
*.swo
|
32 |
+
|
33 |
+
# OS
|
34 |
+
.DS_Store
|
35 |
+
Thumbs.db
|
36 |
+
|
37 |
+
# Logs
|
38 |
+
*.log
|
39 |
+
|
40 |
+
# Local development
|
41 |
+
.env
|
42 |
+
.env.local
|
43 |
+
|
44 |
+
# Database
|
45 |
+
*.db
|
46 |
+
|
47 |
+
# Model files
|
48 |
+
models/
|
49 |
+
*.joblib
|
50 |
+
*.pkl
|
51 |
+
*.h5
|
52 |
+
*.model
|
53 |
+
|
54 |
+
# Git LFS
|
55 |
+
.gitattributes
|
Dockerfile
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.10-slim
|
2 |
+
|
3 |
+
WORKDIR /code
|
4 |
+
|
5 |
+
COPY ./requirements.txt /code/requirements.txt
|
6 |
+
COPY ./src /code/src
|
7 |
+
COPY ./models /code/models
|
8 |
+
COPY ./data /code/data
|
9 |
+
|
10 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
11 |
+
|
12 |
+
EXPOSE 7860
|
13 |
+
|
14 |
+
CMD ["streamlit", "run", "src/frontend/app.py", "--server.address", "0.0.0.0", "--server.port", "7860"]
|
README.md
CHANGED
@@ -1,12 +1,74 @@
|
|
1 |
-
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
-
sdk: streamlit
|
7 |
-
sdk_version: 1.
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
-
---
|
11 |
-
|
12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: AI Prediction Dashboard
|
3 |
+
emoji: 🎯
|
4 |
+
colorFrom: blue
|
5 |
+
colorTo: indigo
|
6 |
+
sdk: streamlit
|
7 |
+
sdk_version: 1.32.0
|
8 |
+
app_file: src/frontend/app.py
|
9 |
+
pinned: false
|
10 |
+
---
|
11 |
+
|
12 |
+
# AI Prediction Dashboard
|
13 |
+
|
14 |
+
A comprehensive AI-powered prediction dashboard that provides insights into:
|
15 |
+
- Loan Approval Predictions
|
16 |
+
- Employee Attrition Analysis
|
17 |
+
- Healthcare Risk Assessment (Diabetes and Liver Disease)
|
18 |
+
|
19 |
+
## Features
|
20 |
+
- Interactive prediction interfaces
|
21 |
+
- Detailed explanations and visualizations
|
22 |
+
- Real-time risk assessment
|
23 |
+
- Personalized recommendations
|
24 |
+
|
25 |
+
## How to Use
|
26 |
+
1. Select a prediction model from the dashboard
|
27 |
+
2. Input the required information
|
28 |
+
3. Get instant predictions with detailed analysis
|
29 |
+
4. View personalized recommendations
|
30 |
+
|
31 |
+
## Technical Details
|
32 |
+
- Built with Streamlit
|
33 |
+
- Powered by machine learning models
|
34 |
+
- Real-time API integration
|
35 |
+
- Interactive visualizations using Plotly
|
36 |
+
|
37 |
+
## Technologies
|
38 |
+
|
39 |
+
- Python
|
40 |
+
- Streamlit
|
41 |
+
- FastAPI
|
42 |
+
- Scikit-learn
|
43 |
+
- Pandas
|
44 |
+
- NumPy
|
45 |
+
- Plotly
|
46 |
+
|
47 |
+
## Setup
|
48 |
+
|
49 |
+
1. Install dependencies:
|
50 |
+
```bash
|
51 |
+
pip install -r requirements.txt
|
52 |
+
```
|
53 |
+
2. Run the api:
|
54 |
+
```bash
|
55 |
+
cd src/api
|
56 |
+
python -m uvicorn main:app --reload
|
57 |
+
```
|
58 |
+
|
59 |
+
3. Run the application:
|
60 |
+
```bash
|
61 |
+
cd src/frontend
|
62 |
+
streamlit run app.py
|
63 |
+
```
|
64 |
+
|
65 |
+
## Models
|
66 |
+
|
67 |
+
The system includes pre-trained models for:
|
68 |
+
- Loan approval prediction
|
69 |
+
- Employee attrition prediction
|
70 |
+
- Healthcare predictions (diabetes and liver disease)
|
71 |
+
|
72 |
+
## Contributors
|
73 |
+
|
74 |
+
- Team Syntax Squad
|
data/HR-Employee-Attrition.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/diabetes.csv
ADDED
@@ -0,0 +1,769 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome
|
2 |
+
6,148,72,35,0,33.6,0.627,50,1
|
3 |
+
1,85,66,29,0,26.6,0.351,31,0
|
4 |
+
8,183,64,0,0,23.3,0.672,32,1
|
5 |
+
1,89,66,23,94,28.1,0.167,21,0
|
6 |
+
0,137,40,35,168,43.1,2.288,33,1
|
7 |
+
5,116,74,0,0,25.6,0.201,30,0
|
8 |
+
3,78,50,32,88,31,0.248,26,1
|
9 |
+
10,115,0,0,0,35.3,0.134,29,0
|
10 |
+
2,197,70,45,543,30.5,0.158,53,1
|
11 |
+
8,125,96,0,0,0,0.232,54,1
|
12 |
+
4,110,92,0,0,37.6,0.191,30,0
|
13 |
+
10,168,74,0,0,38,0.537,34,1
|
14 |
+
10,139,80,0,0,27.1,1.441,57,0
|
15 |
+
1,189,60,23,846,30.1,0.398,59,1
|
16 |
+
5,166,72,19,175,25.8,0.587,51,1
|
17 |
+
7,100,0,0,0,30,0.484,32,1
|
18 |
+
0,118,84,47,230,45.8,0.551,31,1
|
19 |
+
7,107,74,0,0,29.6,0.254,31,1
|
20 |
+
1,103,30,38,83,43.3,0.183,33,0
|
21 |
+
1,115,70,30,96,34.6,0.529,32,1
|
22 |
+
3,126,88,41,235,39.3,0.704,27,0
|
23 |
+
8,99,84,0,0,35.4,0.388,50,0
|
24 |
+
7,196,90,0,0,39.8,0.451,41,1
|
25 |
+
9,119,80,35,0,29,0.263,29,1
|
26 |
+
11,143,94,33,146,36.6,0.254,51,1
|
27 |
+
10,125,70,26,115,31.1,0.205,41,1
|
28 |
+
7,147,76,0,0,39.4,0.257,43,1
|
29 |
+
1,97,66,15,140,23.2,0.487,22,0
|
30 |
+
13,145,82,19,110,22.2,0.245,57,0
|
31 |
+
5,117,92,0,0,34.1,0.337,38,0
|
32 |
+
5,109,75,26,0,36,0.546,60,0
|
33 |
+
3,158,76,36,245,31.6,0.851,28,1
|
34 |
+
3,88,58,11,54,24.8,0.267,22,0
|
35 |
+
6,92,92,0,0,19.9,0.188,28,0
|
36 |
+
10,122,78,31,0,27.6,0.512,45,0
|
37 |
+
4,103,60,33,192,24,0.966,33,0
|
38 |
+
11,138,76,0,0,33.2,0.42,35,0
|
39 |
+
9,102,76,37,0,32.9,0.665,46,1
|
40 |
+
2,90,68,42,0,38.2,0.503,27,1
|
41 |
+
4,111,72,47,207,37.1,1.39,56,1
|
42 |
+
3,180,64,25,70,34,0.271,26,0
|
43 |
+
7,133,84,0,0,40.2,0.696,37,0
|
44 |
+
7,106,92,18,0,22.7,0.235,48,0
|
45 |
+
9,171,110,24,240,45.4,0.721,54,1
|
46 |
+
7,159,64,0,0,27.4,0.294,40,0
|
47 |
+
0,180,66,39,0,42,1.893,25,1
|
48 |
+
1,146,56,0,0,29.7,0.564,29,0
|
49 |
+
2,71,70,27,0,28,0.586,22,0
|
50 |
+
7,103,66,32,0,39.1,0.344,31,1
|
51 |
+
7,105,0,0,0,0,0.305,24,0
|
52 |
+
1,103,80,11,82,19.4,0.491,22,0
|
53 |
+
1,101,50,15,36,24.2,0.526,26,0
|
54 |
+
5,88,66,21,23,24.4,0.342,30,0
|
55 |
+
8,176,90,34,300,33.7,0.467,58,1
|
56 |
+
7,150,66,42,342,34.7,0.718,42,0
|
57 |
+
1,73,50,10,0,23,0.248,21,0
|
58 |
+
7,187,68,39,304,37.7,0.254,41,1
|
59 |
+
0,100,88,60,110,46.8,0.962,31,0
|
60 |
+
0,146,82,0,0,40.5,1.781,44,0
|
61 |
+
0,105,64,41,142,41.5,0.173,22,0
|
62 |
+
2,84,0,0,0,0,0.304,21,0
|
63 |
+
8,133,72,0,0,32.9,0.27,39,1
|
64 |
+
5,44,62,0,0,25,0.587,36,0
|
65 |
+
2,141,58,34,128,25.4,0.699,24,0
|
66 |
+
7,114,66,0,0,32.8,0.258,42,1
|
67 |
+
5,99,74,27,0,29,0.203,32,0
|
68 |
+
0,109,88,30,0,32.5,0.855,38,1
|
69 |
+
2,109,92,0,0,42.7,0.845,54,0
|
70 |
+
1,95,66,13,38,19.6,0.334,25,0
|
71 |
+
4,146,85,27,100,28.9,0.189,27,0
|
72 |
+
2,100,66,20,90,32.9,0.867,28,1
|
73 |
+
5,139,64,35,140,28.6,0.411,26,0
|
74 |
+
13,126,90,0,0,43.4,0.583,42,1
|
75 |
+
4,129,86,20,270,35.1,0.231,23,0
|
76 |
+
1,79,75,30,0,32,0.396,22,0
|
77 |
+
1,0,48,20,0,24.7,0.14,22,0
|
78 |
+
7,62,78,0,0,32.6,0.391,41,0
|
79 |
+
5,95,72,33,0,37.7,0.37,27,0
|
80 |
+
0,131,0,0,0,43.2,0.27,26,1
|
81 |
+
2,112,66,22,0,25,0.307,24,0
|
82 |
+
3,113,44,13,0,22.4,0.14,22,0
|
83 |
+
2,74,0,0,0,0,0.102,22,0
|
84 |
+
7,83,78,26,71,29.3,0.767,36,0
|
85 |
+
0,101,65,28,0,24.6,0.237,22,0
|
86 |
+
5,137,108,0,0,48.8,0.227,37,1
|
87 |
+
2,110,74,29,125,32.4,0.698,27,0
|
88 |
+
13,106,72,54,0,36.6,0.178,45,0
|
89 |
+
2,100,68,25,71,38.5,0.324,26,0
|
90 |
+
15,136,70,32,110,37.1,0.153,43,1
|
91 |
+
1,107,68,19,0,26.5,0.165,24,0
|
92 |
+
1,80,55,0,0,19.1,0.258,21,0
|
93 |
+
4,123,80,15,176,32,0.443,34,0
|
94 |
+
7,81,78,40,48,46.7,0.261,42,0
|
95 |
+
4,134,72,0,0,23.8,0.277,60,1
|
96 |
+
2,142,82,18,64,24.7,0.761,21,0
|
97 |
+
6,144,72,27,228,33.9,0.255,40,0
|
98 |
+
2,92,62,28,0,31.6,0.13,24,0
|
99 |
+
1,71,48,18,76,20.4,0.323,22,0
|
100 |
+
6,93,50,30,64,28.7,0.356,23,0
|
101 |
+
1,122,90,51,220,49.7,0.325,31,1
|
102 |
+
1,163,72,0,0,39,1.222,33,1
|
103 |
+
1,151,60,0,0,26.1,0.179,22,0
|
104 |
+
0,125,96,0,0,22.5,0.262,21,0
|
105 |
+
1,81,72,18,40,26.6,0.283,24,0
|
106 |
+
2,85,65,0,0,39.6,0.93,27,0
|
107 |
+
1,126,56,29,152,28.7,0.801,21,0
|
108 |
+
1,96,122,0,0,22.4,0.207,27,0
|
109 |
+
4,144,58,28,140,29.5,0.287,37,0
|
110 |
+
3,83,58,31,18,34.3,0.336,25,0
|
111 |
+
0,95,85,25,36,37.4,0.247,24,1
|
112 |
+
3,171,72,33,135,33.3,0.199,24,1
|
113 |
+
8,155,62,26,495,34,0.543,46,1
|
114 |
+
1,89,76,34,37,31.2,0.192,23,0
|
115 |
+
4,76,62,0,0,34,0.391,25,0
|
116 |
+
7,160,54,32,175,30.5,0.588,39,1
|
117 |
+
4,146,92,0,0,31.2,0.539,61,1
|
118 |
+
5,124,74,0,0,34,0.22,38,1
|
119 |
+
5,78,48,0,0,33.7,0.654,25,0
|
120 |
+
4,97,60,23,0,28.2,0.443,22,0
|
121 |
+
4,99,76,15,51,23.2,0.223,21,0
|
122 |
+
0,162,76,56,100,53.2,0.759,25,1
|
123 |
+
6,111,64,39,0,34.2,0.26,24,0
|
124 |
+
2,107,74,30,100,33.6,0.404,23,0
|
125 |
+
5,132,80,0,0,26.8,0.186,69,0
|
126 |
+
0,113,76,0,0,33.3,0.278,23,1
|
127 |
+
1,88,30,42,99,55,0.496,26,1
|
128 |
+
3,120,70,30,135,42.9,0.452,30,0
|
129 |
+
1,118,58,36,94,33.3,0.261,23,0
|
130 |
+
1,117,88,24,145,34.5,0.403,40,1
|
131 |
+
0,105,84,0,0,27.9,0.741,62,1
|
132 |
+
4,173,70,14,168,29.7,0.361,33,1
|
133 |
+
9,122,56,0,0,33.3,1.114,33,1
|
134 |
+
3,170,64,37,225,34.5,0.356,30,1
|
135 |
+
8,84,74,31,0,38.3,0.457,39,0
|
136 |
+
2,96,68,13,49,21.1,0.647,26,0
|
137 |
+
2,125,60,20,140,33.8,0.088,31,0
|
138 |
+
0,100,70,26,50,30.8,0.597,21,0
|
139 |
+
0,93,60,25,92,28.7,0.532,22,0
|
140 |
+
0,129,80,0,0,31.2,0.703,29,0
|
141 |
+
5,105,72,29,325,36.9,0.159,28,0
|
142 |
+
3,128,78,0,0,21.1,0.268,55,0
|
143 |
+
5,106,82,30,0,39.5,0.286,38,0
|
144 |
+
2,108,52,26,63,32.5,0.318,22,0
|
145 |
+
10,108,66,0,0,32.4,0.272,42,1
|
146 |
+
4,154,62,31,284,32.8,0.237,23,0
|
147 |
+
0,102,75,23,0,0,0.572,21,0
|
148 |
+
9,57,80,37,0,32.8,0.096,41,0
|
149 |
+
2,106,64,35,119,30.5,1.4,34,0
|
150 |
+
5,147,78,0,0,33.7,0.218,65,0
|
151 |
+
2,90,70,17,0,27.3,0.085,22,0
|
152 |
+
1,136,74,50,204,37.4,0.399,24,0
|
153 |
+
4,114,65,0,0,21.9,0.432,37,0
|
154 |
+
9,156,86,28,155,34.3,1.189,42,1
|
155 |
+
1,153,82,42,485,40.6,0.687,23,0
|
156 |
+
8,188,78,0,0,47.9,0.137,43,1
|
157 |
+
7,152,88,44,0,50,0.337,36,1
|
158 |
+
2,99,52,15,94,24.6,0.637,21,0
|
159 |
+
1,109,56,21,135,25.2,0.833,23,0
|
160 |
+
2,88,74,19,53,29,0.229,22,0
|
161 |
+
17,163,72,41,114,40.9,0.817,47,1
|
162 |
+
4,151,90,38,0,29.7,0.294,36,0
|
163 |
+
7,102,74,40,105,37.2,0.204,45,0
|
164 |
+
0,114,80,34,285,44.2,0.167,27,0
|
165 |
+
2,100,64,23,0,29.7,0.368,21,0
|
166 |
+
0,131,88,0,0,31.6,0.743,32,1
|
167 |
+
6,104,74,18,156,29.9,0.722,41,1
|
168 |
+
3,148,66,25,0,32.5,0.256,22,0
|
169 |
+
4,120,68,0,0,29.6,0.709,34,0
|
170 |
+
4,110,66,0,0,31.9,0.471,29,0
|
171 |
+
3,111,90,12,78,28.4,0.495,29,0
|
172 |
+
6,102,82,0,0,30.8,0.18,36,1
|
173 |
+
6,134,70,23,130,35.4,0.542,29,1
|
174 |
+
2,87,0,23,0,28.9,0.773,25,0
|
175 |
+
1,79,60,42,48,43.5,0.678,23,0
|
176 |
+
2,75,64,24,55,29.7,0.37,33,0
|
177 |
+
8,179,72,42,130,32.7,0.719,36,1
|
178 |
+
6,85,78,0,0,31.2,0.382,42,0
|
179 |
+
0,129,110,46,130,67.1,0.319,26,1
|
180 |
+
5,143,78,0,0,45,0.19,47,0
|
181 |
+
5,130,82,0,0,39.1,0.956,37,1
|
182 |
+
6,87,80,0,0,23.2,0.084,32,0
|
183 |
+
0,119,64,18,92,34.9,0.725,23,0
|
184 |
+
1,0,74,20,23,27.7,0.299,21,0
|
185 |
+
5,73,60,0,0,26.8,0.268,27,0
|
186 |
+
4,141,74,0,0,27.6,0.244,40,0
|
187 |
+
7,194,68,28,0,35.9,0.745,41,1
|
188 |
+
8,181,68,36,495,30.1,0.615,60,1
|
189 |
+
1,128,98,41,58,32,1.321,33,1
|
190 |
+
8,109,76,39,114,27.9,0.64,31,1
|
191 |
+
5,139,80,35,160,31.6,0.361,25,1
|
192 |
+
3,111,62,0,0,22.6,0.142,21,0
|
193 |
+
9,123,70,44,94,33.1,0.374,40,0
|
194 |
+
7,159,66,0,0,30.4,0.383,36,1
|
195 |
+
11,135,0,0,0,52.3,0.578,40,1
|
196 |
+
8,85,55,20,0,24.4,0.136,42,0
|
197 |
+
5,158,84,41,210,39.4,0.395,29,1
|
198 |
+
1,105,58,0,0,24.3,0.187,21,0
|
199 |
+
3,107,62,13,48,22.9,0.678,23,1
|
200 |
+
4,109,64,44,99,34.8,0.905,26,1
|
201 |
+
4,148,60,27,318,30.9,0.15,29,1
|
202 |
+
0,113,80,16,0,31,0.874,21,0
|
203 |
+
1,138,82,0,0,40.1,0.236,28,0
|
204 |
+
0,108,68,20,0,27.3,0.787,32,0
|
205 |
+
2,99,70,16,44,20.4,0.235,27,0
|
206 |
+
6,103,72,32,190,37.7,0.324,55,0
|
207 |
+
5,111,72,28,0,23.9,0.407,27,0
|
208 |
+
8,196,76,29,280,37.5,0.605,57,1
|
209 |
+
5,162,104,0,0,37.7,0.151,52,1
|
210 |
+
1,96,64,27,87,33.2,0.289,21,0
|
211 |
+
7,184,84,33,0,35.5,0.355,41,1
|
212 |
+
2,81,60,22,0,27.7,0.29,25,0
|
213 |
+
0,147,85,54,0,42.8,0.375,24,0
|
214 |
+
7,179,95,31,0,34.2,0.164,60,0
|
215 |
+
0,140,65,26,130,42.6,0.431,24,1
|
216 |
+
9,112,82,32,175,34.2,0.26,36,1
|
217 |
+
12,151,70,40,271,41.8,0.742,38,1
|
218 |
+
5,109,62,41,129,35.8,0.514,25,1
|
219 |
+
6,125,68,30,120,30,0.464,32,0
|
220 |
+
5,85,74,22,0,29,1.224,32,1
|
221 |
+
5,112,66,0,0,37.8,0.261,41,1
|
222 |
+
0,177,60,29,478,34.6,1.072,21,1
|
223 |
+
2,158,90,0,0,31.6,0.805,66,1
|
224 |
+
7,119,0,0,0,25.2,0.209,37,0
|
225 |
+
7,142,60,33,190,28.8,0.687,61,0
|
226 |
+
1,100,66,15,56,23.6,0.666,26,0
|
227 |
+
1,87,78,27,32,34.6,0.101,22,0
|
228 |
+
0,101,76,0,0,35.7,0.198,26,0
|
229 |
+
3,162,52,38,0,37.2,0.652,24,1
|
230 |
+
4,197,70,39,744,36.7,2.329,31,0
|
231 |
+
0,117,80,31,53,45.2,0.089,24,0
|
232 |
+
4,142,86,0,0,44,0.645,22,1
|
233 |
+
6,134,80,37,370,46.2,0.238,46,1
|
234 |
+
1,79,80,25,37,25.4,0.583,22,0
|
235 |
+
4,122,68,0,0,35,0.394,29,0
|
236 |
+
3,74,68,28,45,29.7,0.293,23,0
|
237 |
+
4,171,72,0,0,43.6,0.479,26,1
|
238 |
+
7,181,84,21,192,35.9,0.586,51,1
|
239 |
+
0,179,90,27,0,44.1,0.686,23,1
|
240 |
+
9,164,84,21,0,30.8,0.831,32,1
|
241 |
+
0,104,76,0,0,18.4,0.582,27,0
|
242 |
+
1,91,64,24,0,29.2,0.192,21,0
|
243 |
+
4,91,70,32,88,33.1,0.446,22,0
|
244 |
+
3,139,54,0,0,25.6,0.402,22,1
|
245 |
+
6,119,50,22,176,27.1,1.318,33,1
|
246 |
+
2,146,76,35,194,38.2,0.329,29,0
|
247 |
+
9,184,85,15,0,30,1.213,49,1
|
248 |
+
10,122,68,0,0,31.2,0.258,41,0
|
249 |
+
0,165,90,33,680,52.3,0.427,23,0
|
250 |
+
9,124,70,33,402,35.4,0.282,34,0
|
251 |
+
1,111,86,19,0,30.1,0.143,23,0
|
252 |
+
9,106,52,0,0,31.2,0.38,42,0
|
253 |
+
2,129,84,0,0,28,0.284,27,0
|
254 |
+
2,90,80,14,55,24.4,0.249,24,0
|
255 |
+
0,86,68,32,0,35.8,0.238,25,0
|
256 |
+
12,92,62,7,258,27.6,0.926,44,1
|
257 |
+
1,113,64,35,0,33.6,0.543,21,1
|
258 |
+
3,111,56,39,0,30.1,0.557,30,0
|
259 |
+
2,114,68,22,0,28.7,0.092,25,0
|
260 |
+
1,193,50,16,375,25.9,0.655,24,0
|
261 |
+
11,155,76,28,150,33.3,1.353,51,1
|
262 |
+
3,191,68,15,130,30.9,0.299,34,0
|
263 |
+
3,141,0,0,0,30,0.761,27,1
|
264 |
+
4,95,70,32,0,32.1,0.612,24,0
|
265 |
+
3,142,80,15,0,32.4,0.2,63,0
|
266 |
+
4,123,62,0,0,32,0.226,35,1
|
267 |
+
5,96,74,18,67,33.6,0.997,43,0
|
268 |
+
0,138,0,0,0,36.3,0.933,25,1
|
269 |
+
2,128,64,42,0,40,1.101,24,0
|
270 |
+
0,102,52,0,0,25.1,0.078,21,0
|
271 |
+
2,146,0,0,0,27.5,0.24,28,1
|
272 |
+
10,101,86,37,0,45.6,1.136,38,1
|
273 |
+
2,108,62,32,56,25.2,0.128,21,0
|
274 |
+
3,122,78,0,0,23,0.254,40,0
|
275 |
+
1,71,78,50,45,33.2,0.422,21,0
|
276 |
+
13,106,70,0,0,34.2,0.251,52,0
|
277 |
+
2,100,70,52,57,40.5,0.677,25,0
|
278 |
+
7,106,60,24,0,26.5,0.296,29,1
|
279 |
+
0,104,64,23,116,27.8,0.454,23,0
|
280 |
+
5,114,74,0,0,24.9,0.744,57,0
|
281 |
+
2,108,62,10,278,25.3,0.881,22,0
|
282 |
+
0,146,70,0,0,37.9,0.334,28,1
|
283 |
+
10,129,76,28,122,35.9,0.28,39,0
|
284 |
+
7,133,88,15,155,32.4,0.262,37,0
|
285 |
+
7,161,86,0,0,30.4,0.165,47,1
|
286 |
+
2,108,80,0,0,27,0.259,52,1
|
287 |
+
7,136,74,26,135,26,0.647,51,0
|
288 |
+
5,155,84,44,545,38.7,0.619,34,0
|
289 |
+
1,119,86,39,220,45.6,0.808,29,1
|
290 |
+
4,96,56,17,49,20.8,0.34,26,0
|
291 |
+
5,108,72,43,75,36.1,0.263,33,0
|
292 |
+
0,78,88,29,40,36.9,0.434,21,0
|
293 |
+
0,107,62,30,74,36.6,0.757,25,1
|
294 |
+
2,128,78,37,182,43.3,1.224,31,1
|
295 |
+
1,128,48,45,194,40.5,0.613,24,1
|
296 |
+
0,161,50,0,0,21.9,0.254,65,0
|
297 |
+
6,151,62,31,120,35.5,0.692,28,0
|
298 |
+
2,146,70,38,360,28,0.337,29,1
|
299 |
+
0,126,84,29,215,30.7,0.52,24,0
|
300 |
+
14,100,78,25,184,36.6,0.412,46,1
|
301 |
+
8,112,72,0,0,23.6,0.84,58,0
|
302 |
+
0,167,0,0,0,32.3,0.839,30,1
|
303 |
+
2,144,58,33,135,31.6,0.422,25,1
|
304 |
+
5,77,82,41,42,35.8,0.156,35,0
|
305 |
+
5,115,98,0,0,52.9,0.209,28,1
|
306 |
+
3,150,76,0,0,21,0.207,37,0
|
307 |
+
2,120,76,37,105,39.7,0.215,29,0
|
308 |
+
10,161,68,23,132,25.5,0.326,47,1
|
309 |
+
0,137,68,14,148,24.8,0.143,21,0
|
310 |
+
0,128,68,19,180,30.5,1.391,25,1
|
311 |
+
2,124,68,28,205,32.9,0.875,30,1
|
312 |
+
6,80,66,30,0,26.2,0.313,41,0
|
313 |
+
0,106,70,37,148,39.4,0.605,22,0
|
314 |
+
2,155,74,17,96,26.6,0.433,27,1
|
315 |
+
3,113,50,10,85,29.5,0.626,25,0
|
316 |
+
7,109,80,31,0,35.9,1.127,43,1
|
317 |
+
2,112,68,22,94,34.1,0.315,26,0
|
318 |
+
3,99,80,11,64,19.3,0.284,30,0
|
319 |
+
3,182,74,0,0,30.5,0.345,29,1
|
320 |
+
3,115,66,39,140,38.1,0.15,28,0
|
321 |
+
6,194,78,0,0,23.5,0.129,59,1
|
322 |
+
4,129,60,12,231,27.5,0.527,31,0
|
323 |
+
3,112,74,30,0,31.6,0.197,25,1
|
324 |
+
0,124,70,20,0,27.4,0.254,36,1
|
325 |
+
13,152,90,33,29,26.8,0.731,43,1
|
326 |
+
2,112,75,32,0,35.7,0.148,21,0
|
327 |
+
1,157,72,21,168,25.6,0.123,24,0
|
328 |
+
1,122,64,32,156,35.1,0.692,30,1
|
329 |
+
10,179,70,0,0,35.1,0.2,37,0
|
330 |
+
2,102,86,36,120,45.5,0.127,23,1
|
331 |
+
6,105,70,32,68,30.8,0.122,37,0
|
332 |
+
8,118,72,19,0,23.1,1.476,46,0
|
333 |
+
2,87,58,16,52,32.7,0.166,25,0
|
334 |
+
1,180,0,0,0,43.3,0.282,41,1
|
335 |
+
12,106,80,0,0,23.6,0.137,44,0
|
336 |
+
1,95,60,18,58,23.9,0.26,22,0
|
337 |
+
0,165,76,43,255,47.9,0.259,26,0
|
338 |
+
0,117,0,0,0,33.8,0.932,44,0
|
339 |
+
5,115,76,0,0,31.2,0.343,44,1
|
340 |
+
9,152,78,34,171,34.2,0.893,33,1
|
341 |
+
7,178,84,0,0,39.9,0.331,41,1
|
342 |
+
1,130,70,13,105,25.9,0.472,22,0
|
343 |
+
1,95,74,21,73,25.9,0.673,36,0
|
344 |
+
1,0,68,35,0,32,0.389,22,0
|
345 |
+
5,122,86,0,0,34.7,0.29,33,0
|
346 |
+
8,95,72,0,0,36.8,0.485,57,0
|
347 |
+
8,126,88,36,108,38.5,0.349,49,0
|
348 |
+
1,139,46,19,83,28.7,0.654,22,0
|
349 |
+
3,116,0,0,0,23.5,0.187,23,0
|
350 |
+
3,99,62,19,74,21.8,0.279,26,0
|
351 |
+
5,0,80,32,0,41,0.346,37,1
|
352 |
+
4,92,80,0,0,42.2,0.237,29,0
|
353 |
+
4,137,84,0,0,31.2,0.252,30,0
|
354 |
+
3,61,82,28,0,34.4,0.243,46,0
|
355 |
+
1,90,62,12,43,27.2,0.58,24,0
|
356 |
+
3,90,78,0,0,42.7,0.559,21,0
|
357 |
+
9,165,88,0,0,30.4,0.302,49,1
|
358 |
+
1,125,50,40,167,33.3,0.962,28,1
|
359 |
+
13,129,0,30,0,39.9,0.569,44,1
|
360 |
+
12,88,74,40,54,35.3,0.378,48,0
|
361 |
+
1,196,76,36,249,36.5,0.875,29,1
|
362 |
+
5,189,64,33,325,31.2,0.583,29,1
|
363 |
+
5,158,70,0,0,29.8,0.207,63,0
|
364 |
+
5,103,108,37,0,39.2,0.305,65,0
|
365 |
+
4,146,78,0,0,38.5,0.52,67,1
|
366 |
+
4,147,74,25,293,34.9,0.385,30,0
|
367 |
+
5,99,54,28,83,34,0.499,30,0
|
368 |
+
6,124,72,0,0,27.6,0.368,29,1
|
369 |
+
0,101,64,17,0,21,0.252,21,0
|
370 |
+
3,81,86,16,66,27.5,0.306,22,0
|
371 |
+
1,133,102,28,140,32.8,0.234,45,1
|
372 |
+
3,173,82,48,465,38.4,2.137,25,1
|
373 |
+
0,118,64,23,89,0,1.731,21,0
|
374 |
+
0,84,64,22,66,35.8,0.545,21,0
|
375 |
+
2,105,58,40,94,34.9,0.225,25,0
|
376 |
+
2,122,52,43,158,36.2,0.816,28,0
|
377 |
+
12,140,82,43,325,39.2,0.528,58,1
|
378 |
+
0,98,82,15,84,25.2,0.299,22,0
|
379 |
+
1,87,60,37,75,37.2,0.509,22,0
|
380 |
+
4,156,75,0,0,48.3,0.238,32,1
|
381 |
+
0,93,100,39,72,43.4,1.021,35,0
|
382 |
+
1,107,72,30,82,30.8,0.821,24,0
|
383 |
+
0,105,68,22,0,20,0.236,22,0
|
384 |
+
1,109,60,8,182,25.4,0.947,21,0
|
385 |
+
1,90,62,18,59,25.1,1.268,25,0
|
386 |
+
1,125,70,24,110,24.3,0.221,25,0
|
387 |
+
1,119,54,13,50,22.3,0.205,24,0
|
388 |
+
5,116,74,29,0,32.3,0.66,35,1
|
389 |
+
8,105,100,36,0,43.3,0.239,45,1
|
390 |
+
5,144,82,26,285,32,0.452,58,1
|
391 |
+
3,100,68,23,81,31.6,0.949,28,0
|
392 |
+
1,100,66,29,196,32,0.444,42,0
|
393 |
+
5,166,76,0,0,45.7,0.34,27,1
|
394 |
+
1,131,64,14,415,23.7,0.389,21,0
|
395 |
+
4,116,72,12,87,22.1,0.463,37,0
|
396 |
+
4,158,78,0,0,32.9,0.803,31,1
|
397 |
+
2,127,58,24,275,27.7,1.6,25,0
|
398 |
+
3,96,56,34,115,24.7,0.944,39,0
|
399 |
+
0,131,66,40,0,34.3,0.196,22,1
|
400 |
+
3,82,70,0,0,21.1,0.389,25,0
|
401 |
+
3,193,70,31,0,34.9,0.241,25,1
|
402 |
+
4,95,64,0,0,32,0.161,31,1
|
403 |
+
6,137,61,0,0,24.2,0.151,55,0
|
404 |
+
5,136,84,41,88,35,0.286,35,1
|
405 |
+
9,72,78,25,0,31.6,0.28,38,0
|
406 |
+
5,168,64,0,0,32.9,0.135,41,1
|
407 |
+
2,123,48,32,165,42.1,0.52,26,0
|
408 |
+
4,115,72,0,0,28.9,0.376,46,1
|
409 |
+
0,101,62,0,0,21.9,0.336,25,0
|
410 |
+
8,197,74,0,0,25.9,1.191,39,1
|
411 |
+
1,172,68,49,579,42.4,0.702,28,1
|
412 |
+
6,102,90,39,0,35.7,0.674,28,0
|
413 |
+
1,112,72,30,176,34.4,0.528,25,0
|
414 |
+
1,143,84,23,310,42.4,1.076,22,0
|
415 |
+
1,143,74,22,61,26.2,0.256,21,0
|
416 |
+
0,138,60,35,167,34.6,0.534,21,1
|
417 |
+
3,173,84,33,474,35.7,0.258,22,1
|
418 |
+
1,97,68,21,0,27.2,1.095,22,0
|
419 |
+
4,144,82,32,0,38.5,0.554,37,1
|
420 |
+
1,83,68,0,0,18.2,0.624,27,0
|
421 |
+
3,129,64,29,115,26.4,0.219,28,1
|
422 |
+
1,119,88,41,170,45.3,0.507,26,0
|
423 |
+
2,94,68,18,76,26,0.561,21,0
|
424 |
+
0,102,64,46,78,40.6,0.496,21,0
|
425 |
+
2,115,64,22,0,30.8,0.421,21,0
|
426 |
+
8,151,78,32,210,42.9,0.516,36,1
|
427 |
+
4,184,78,39,277,37,0.264,31,1
|
428 |
+
0,94,0,0,0,0,0.256,25,0
|
429 |
+
1,181,64,30,180,34.1,0.328,38,1
|
430 |
+
0,135,94,46,145,40.6,0.284,26,0
|
431 |
+
1,95,82,25,180,35,0.233,43,1
|
432 |
+
2,99,0,0,0,22.2,0.108,23,0
|
433 |
+
3,89,74,16,85,30.4,0.551,38,0
|
434 |
+
1,80,74,11,60,30,0.527,22,0
|
435 |
+
2,139,75,0,0,25.6,0.167,29,0
|
436 |
+
1,90,68,8,0,24.5,1.138,36,0
|
437 |
+
0,141,0,0,0,42.4,0.205,29,1
|
438 |
+
12,140,85,33,0,37.4,0.244,41,0
|
439 |
+
5,147,75,0,0,29.9,0.434,28,0
|
440 |
+
1,97,70,15,0,18.2,0.147,21,0
|
441 |
+
6,107,88,0,0,36.8,0.727,31,0
|
442 |
+
0,189,104,25,0,34.3,0.435,41,1
|
443 |
+
2,83,66,23,50,32.2,0.497,22,0
|
444 |
+
4,117,64,27,120,33.2,0.23,24,0
|
445 |
+
8,108,70,0,0,30.5,0.955,33,1
|
446 |
+
4,117,62,12,0,29.7,0.38,30,1
|
447 |
+
0,180,78,63,14,59.4,2.42,25,1
|
448 |
+
1,100,72,12,70,25.3,0.658,28,0
|
449 |
+
0,95,80,45,92,36.5,0.33,26,0
|
450 |
+
0,104,64,37,64,33.6,0.51,22,1
|
451 |
+
0,120,74,18,63,30.5,0.285,26,0
|
452 |
+
1,82,64,13,95,21.2,0.415,23,0
|
453 |
+
2,134,70,0,0,28.9,0.542,23,1
|
454 |
+
0,91,68,32,210,39.9,0.381,25,0
|
455 |
+
2,119,0,0,0,19.6,0.832,72,0
|
456 |
+
2,100,54,28,105,37.8,0.498,24,0
|
457 |
+
14,175,62,30,0,33.6,0.212,38,1
|
458 |
+
1,135,54,0,0,26.7,0.687,62,0
|
459 |
+
5,86,68,28,71,30.2,0.364,24,0
|
460 |
+
10,148,84,48,237,37.6,1.001,51,1
|
461 |
+
9,134,74,33,60,25.9,0.46,81,0
|
462 |
+
9,120,72,22,56,20.8,0.733,48,0
|
463 |
+
1,71,62,0,0,21.8,0.416,26,0
|
464 |
+
8,74,70,40,49,35.3,0.705,39,0
|
465 |
+
5,88,78,30,0,27.6,0.258,37,0
|
466 |
+
10,115,98,0,0,24,1.022,34,0
|
467 |
+
0,124,56,13,105,21.8,0.452,21,0
|
468 |
+
0,74,52,10,36,27.8,0.269,22,0
|
469 |
+
0,97,64,36,100,36.8,0.6,25,0
|
470 |
+
8,120,0,0,0,30,0.183,38,1
|
471 |
+
6,154,78,41,140,46.1,0.571,27,0
|
472 |
+
1,144,82,40,0,41.3,0.607,28,0
|
473 |
+
0,137,70,38,0,33.2,0.17,22,0
|
474 |
+
0,119,66,27,0,38.8,0.259,22,0
|
475 |
+
7,136,90,0,0,29.9,0.21,50,0
|
476 |
+
4,114,64,0,0,28.9,0.126,24,0
|
477 |
+
0,137,84,27,0,27.3,0.231,59,0
|
478 |
+
2,105,80,45,191,33.7,0.711,29,1
|
479 |
+
7,114,76,17,110,23.8,0.466,31,0
|
480 |
+
8,126,74,38,75,25.9,0.162,39,0
|
481 |
+
4,132,86,31,0,28,0.419,63,0
|
482 |
+
3,158,70,30,328,35.5,0.344,35,1
|
483 |
+
0,123,88,37,0,35.2,0.197,29,0
|
484 |
+
4,85,58,22,49,27.8,0.306,28,0
|
485 |
+
0,84,82,31,125,38.2,0.233,23,0
|
486 |
+
0,145,0,0,0,44.2,0.63,31,1
|
487 |
+
0,135,68,42,250,42.3,0.365,24,1
|
488 |
+
1,139,62,41,480,40.7,0.536,21,0
|
489 |
+
0,173,78,32,265,46.5,1.159,58,0
|
490 |
+
4,99,72,17,0,25.6,0.294,28,0
|
491 |
+
8,194,80,0,0,26.1,0.551,67,0
|
492 |
+
2,83,65,28,66,36.8,0.629,24,0
|
493 |
+
2,89,90,30,0,33.5,0.292,42,0
|
494 |
+
4,99,68,38,0,32.8,0.145,33,0
|
495 |
+
4,125,70,18,122,28.9,1.144,45,1
|
496 |
+
3,80,0,0,0,0,0.174,22,0
|
497 |
+
6,166,74,0,0,26.6,0.304,66,0
|
498 |
+
5,110,68,0,0,26,0.292,30,0
|
499 |
+
2,81,72,15,76,30.1,0.547,25,0
|
500 |
+
7,195,70,33,145,25.1,0.163,55,1
|
501 |
+
6,154,74,32,193,29.3,0.839,39,0
|
502 |
+
2,117,90,19,71,25.2,0.313,21,0
|
503 |
+
3,84,72,32,0,37.2,0.267,28,0
|
504 |
+
6,0,68,41,0,39,0.727,41,1
|
505 |
+
7,94,64,25,79,33.3,0.738,41,0
|
506 |
+
3,96,78,39,0,37.3,0.238,40,0
|
507 |
+
10,75,82,0,0,33.3,0.263,38,0
|
508 |
+
0,180,90,26,90,36.5,0.314,35,1
|
509 |
+
1,130,60,23,170,28.6,0.692,21,0
|
510 |
+
2,84,50,23,76,30.4,0.968,21,0
|
511 |
+
8,120,78,0,0,25,0.409,64,0
|
512 |
+
12,84,72,31,0,29.7,0.297,46,1
|
513 |
+
0,139,62,17,210,22.1,0.207,21,0
|
514 |
+
9,91,68,0,0,24.2,0.2,58,0
|
515 |
+
2,91,62,0,0,27.3,0.525,22,0
|
516 |
+
3,99,54,19,86,25.6,0.154,24,0
|
517 |
+
3,163,70,18,105,31.6,0.268,28,1
|
518 |
+
9,145,88,34,165,30.3,0.771,53,1
|
519 |
+
7,125,86,0,0,37.6,0.304,51,0
|
520 |
+
13,76,60,0,0,32.8,0.18,41,0
|
521 |
+
6,129,90,7,326,19.6,0.582,60,0
|
522 |
+
2,68,70,32,66,25,0.187,25,0
|
523 |
+
3,124,80,33,130,33.2,0.305,26,0
|
524 |
+
6,114,0,0,0,0,0.189,26,0
|
525 |
+
9,130,70,0,0,34.2,0.652,45,1
|
526 |
+
3,125,58,0,0,31.6,0.151,24,0
|
527 |
+
3,87,60,18,0,21.8,0.444,21,0
|
528 |
+
1,97,64,19,82,18.2,0.299,21,0
|
529 |
+
3,116,74,15,105,26.3,0.107,24,0
|
530 |
+
0,117,66,31,188,30.8,0.493,22,0
|
531 |
+
0,111,65,0,0,24.6,0.66,31,0
|
532 |
+
2,122,60,18,106,29.8,0.717,22,0
|
533 |
+
0,107,76,0,0,45.3,0.686,24,0
|
534 |
+
1,86,66,52,65,41.3,0.917,29,0
|
535 |
+
6,91,0,0,0,29.8,0.501,31,0
|
536 |
+
1,77,56,30,56,33.3,1.251,24,0
|
537 |
+
4,132,0,0,0,32.9,0.302,23,1
|
538 |
+
0,105,90,0,0,29.6,0.197,46,0
|
539 |
+
0,57,60,0,0,21.7,0.735,67,0
|
540 |
+
0,127,80,37,210,36.3,0.804,23,0
|
541 |
+
3,129,92,49,155,36.4,0.968,32,1
|
542 |
+
8,100,74,40,215,39.4,0.661,43,1
|
543 |
+
3,128,72,25,190,32.4,0.549,27,1
|
544 |
+
10,90,85,32,0,34.9,0.825,56,1
|
545 |
+
4,84,90,23,56,39.5,0.159,25,0
|
546 |
+
1,88,78,29,76,32,0.365,29,0
|
547 |
+
8,186,90,35,225,34.5,0.423,37,1
|
548 |
+
5,187,76,27,207,43.6,1.034,53,1
|
549 |
+
4,131,68,21,166,33.1,0.16,28,0
|
550 |
+
1,164,82,43,67,32.8,0.341,50,0
|
551 |
+
4,189,110,31,0,28.5,0.68,37,0
|
552 |
+
1,116,70,28,0,27.4,0.204,21,0
|
553 |
+
3,84,68,30,106,31.9,0.591,25,0
|
554 |
+
6,114,88,0,0,27.8,0.247,66,0
|
555 |
+
1,88,62,24,44,29.9,0.422,23,0
|
556 |
+
1,84,64,23,115,36.9,0.471,28,0
|
557 |
+
7,124,70,33,215,25.5,0.161,37,0
|
558 |
+
1,97,70,40,0,38.1,0.218,30,0
|
559 |
+
8,110,76,0,0,27.8,0.237,58,0
|
560 |
+
11,103,68,40,0,46.2,0.126,42,0
|
561 |
+
11,85,74,0,0,30.1,0.3,35,0
|
562 |
+
6,125,76,0,0,33.8,0.121,54,1
|
563 |
+
0,198,66,32,274,41.3,0.502,28,1
|
564 |
+
1,87,68,34,77,37.6,0.401,24,0
|
565 |
+
6,99,60,19,54,26.9,0.497,32,0
|
566 |
+
0,91,80,0,0,32.4,0.601,27,0
|
567 |
+
2,95,54,14,88,26.1,0.748,22,0
|
568 |
+
1,99,72,30,18,38.6,0.412,21,0
|
569 |
+
6,92,62,32,126,32,0.085,46,0
|
570 |
+
4,154,72,29,126,31.3,0.338,37,0
|
571 |
+
0,121,66,30,165,34.3,0.203,33,1
|
572 |
+
3,78,70,0,0,32.5,0.27,39,0
|
573 |
+
2,130,96,0,0,22.6,0.268,21,0
|
574 |
+
3,111,58,31,44,29.5,0.43,22,0
|
575 |
+
2,98,60,17,120,34.7,0.198,22,0
|
576 |
+
1,143,86,30,330,30.1,0.892,23,0
|
577 |
+
1,119,44,47,63,35.5,0.28,25,0
|
578 |
+
6,108,44,20,130,24,0.813,35,0
|
579 |
+
2,118,80,0,0,42.9,0.693,21,1
|
580 |
+
10,133,68,0,0,27,0.245,36,0
|
581 |
+
2,197,70,99,0,34.7,0.575,62,1
|
582 |
+
0,151,90,46,0,42.1,0.371,21,1
|
583 |
+
6,109,60,27,0,25,0.206,27,0
|
584 |
+
12,121,78,17,0,26.5,0.259,62,0
|
585 |
+
8,100,76,0,0,38.7,0.19,42,0
|
586 |
+
8,124,76,24,600,28.7,0.687,52,1
|
587 |
+
1,93,56,11,0,22.5,0.417,22,0
|
588 |
+
8,143,66,0,0,34.9,0.129,41,1
|
589 |
+
6,103,66,0,0,24.3,0.249,29,0
|
590 |
+
3,176,86,27,156,33.3,1.154,52,1
|
591 |
+
0,73,0,0,0,21.1,0.342,25,0
|
592 |
+
11,111,84,40,0,46.8,0.925,45,1
|
593 |
+
2,112,78,50,140,39.4,0.175,24,0
|
594 |
+
3,132,80,0,0,34.4,0.402,44,1
|
595 |
+
2,82,52,22,115,28.5,1.699,25,0
|
596 |
+
6,123,72,45,230,33.6,0.733,34,0
|
597 |
+
0,188,82,14,185,32,0.682,22,1
|
598 |
+
0,67,76,0,0,45.3,0.194,46,0
|
599 |
+
1,89,24,19,25,27.8,0.559,21,0
|
600 |
+
1,173,74,0,0,36.8,0.088,38,1
|
601 |
+
1,109,38,18,120,23.1,0.407,26,0
|
602 |
+
1,108,88,19,0,27.1,0.4,24,0
|
603 |
+
6,96,0,0,0,23.7,0.19,28,0
|
604 |
+
1,124,74,36,0,27.8,0.1,30,0
|
605 |
+
7,150,78,29,126,35.2,0.692,54,1
|
606 |
+
4,183,0,0,0,28.4,0.212,36,1
|
607 |
+
1,124,60,32,0,35.8,0.514,21,0
|
608 |
+
1,181,78,42,293,40,1.258,22,1
|
609 |
+
1,92,62,25,41,19.5,0.482,25,0
|
610 |
+
0,152,82,39,272,41.5,0.27,27,0
|
611 |
+
1,111,62,13,182,24,0.138,23,0
|
612 |
+
3,106,54,21,158,30.9,0.292,24,0
|
613 |
+
3,174,58,22,194,32.9,0.593,36,1
|
614 |
+
7,168,88,42,321,38.2,0.787,40,1
|
615 |
+
6,105,80,28,0,32.5,0.878,26,0
|
616 |
+
11,138,74,26,144,36.1,0.557,50,1
|
617 |
+
3,106,72,0,0,25.8,0.207,27,0
|
618 |
+
6,117,96,0,0,28.7,0.157,30,0
|
619 |
+
2,68,62,13,15,20.1,0.257,23,0
|
620 |
+
9,112,82,24,0,28.2,1.282,50,1
|
621 |
+
0,119,0,0,0,32.4,0.141,24,1
|
622 |
+
2,112,86,42,160,38.4,0.246,28,0
|
623 |
+
2,92,76,20,0,24.2,1.698,28,0
|
624 |
+
6,183,94,0,0,40.8,1.461,45,0
|
625 |
+
0,94,70,27,115,43.5,0.347,21,0
|
626 |
+
2,108,64,0,0,30.8,0.158,21,0
|
627 |
+
4,90,88,47,54,37.7,0.362,29,0
|
628 |
+
0,125,68,0,0,24.7,0.206,21,0
|
629 |
+
0,132,78,0,0,32.4,0.393,21,0
|
630 |
+
5,128,80,0,0,34.6,0.144,45,0
|
631 |
+
4,94,65,22,0,24.7,0.148,21,0
|
632 |
+
7,114,64,0,0,27.4,0.732,34,1
|
633 |
+
0,102,78,40,90,34.5,0.238,24,0
|
634 |
+
2,111,60,0,0,26.2,0.343,23,0
|
635 |
+
1,128,82,17,183,27.5,0.115,22,0
|
636 |
+
10,92,62,0,0,25.9,0.167,31,0
|
637 |
+
13,104,72,0,0,31.2,0.465,38,1
|
638 |
+
5,104,74,0,0,28.8,0.153,48,0
|
639 |
+
2,94,76,18,66,31.6,0.649,23,0
|
640 |
+
7,97,76,32,91,40.9,0.871,32,1
|
641 |
+
1,100,74,12,46,19.5,0.149,28,0
|
642 |
+
0,102,86,17,105,29.3,0.695,27,0
|
643 |
+
4,128,70,0,0,34.3,0.303,24,0
|
644 |
+
6,147,80,0,0,29.5,0.178,50,1
|
645 |
+
4,90,0,0,0,28,0.61,31,0
|
646 |
+
3,103,72,30,152,27.6,0.73,27,0
|
647 |
+
2,157,74,35,440,39.4,0.134,30,0
|
648 |
+
1,167,74,17,144,23.4,0.447,33,1
|
649 |
+
0,179,50,36,159,37.8,0.455,22,1
|
650 |
+
11,136,84,35,130,28.3,0.26,42,1
|
651 |
+
0,107,60,25,0,26.4,0.133,23,0
|
652 |
+
1,91,54,25,100,25.2,0.234,23,0
|
653 |
+
1,117,60,23,106,33.8,0.466,27,0
|
654 |
+
5,123,74,40,77,34.1,0.269,28,0
|
655 |
+
2,120,54,0,0,26.8,0.455,27,0
|
656 |
+
1,106,70,28,135,34.2,0.142,22,0
|
657 |
+
2,155,52,27,540,38.7,0.24,25,1
|
658 |
+
2,101,58,35,90,21.8,0.155,22,0
|
659 |
+
1,120,80,48,200,38.9,1.162,41,0
|
660 |
+
11,127,106,0,0,39,0.19,51,0
|
661 |
+
3,80,82,31,70,34.2,1.292,27,1
|
662 |
+
10,162,84,0,0,27.7,0.182,54,0
|
663 |
+
1,199,76,43,0,42.9,1.394,22,1
|
664 |
+
8,167,106,46,231,37.6,0.165,43,1
|
665 |
+
9,145,80,46,130,37.9,0.637,40,1
|
666 |
+
6,115,60,39,0,33.7,0.245,40,1
|
667 |
+
1,112,80,45,132,34.8,0.217,24,0
|
668 |
+
4,145,82,18,0,32.5,0.235,70,1
|
669 |
+
10,111,70,27,0,27.5,0.141,40,1
|
670 |
+
6,98,58,33,190,34,0.43,43,0
|
671 |
+
9,154,78,30,100,30.9,0.164,45,0
|
672 |
+
6,165,68,26,168,33.6,0.631,49,0
|
673 |
+
1,99,58,10,0,25.4,0.551,21,0
|
674 |
+
10,68,106,23,49,35.5,0.285,47,0
|
675 |
+
3,123,100,35,240,57.3,0.88,22,0
|
676 |
+
8,91,82,0,0,35.6,0.587,68,0
|
677 |
+
6,195,70,0,0,30.9,0.328,31,1
|
678 |
+
9,156,86,0,0,24.8,0.23,53,1
|
679 |
+
0,93,60,0,0,35.3,0.263,25,0
|
680 |
+
3,121,52,0,0,36,0.127,25,1
|
681 |
+
2,101,58,17,265,24.2,0.614,23,0
|
682 |
+
2,56,56,28,45,24.2,0.332,22,0
|
683 |
+
0,162,76,36,0,49.6,0.364,26,1
|
684 |
+
0,95,64,39,105,44.6,0.366,22,0
|
685 |
+
4,125,80,0,0,32.3,0.536,27,1
|
686 |
+
5,136,82,0,0,0,0.64,69,0
|
687 |
+
2,129,74,26,205,33.2,0.591,25,0
|
688 |
+
3,130,64,0,0,23.1,0.314,22,0
|
689 |
+
1,107,50,19,0,28.3,0.181,29,0
|
690 |
+
1,140,74,26,180,24.1,0.828,23,0
|
691 |
+
1,144,82,46,180,46.1,0.335,46,1
|
692 |
+
8,107,80,0,0,24.6,0.856,34,0
|
693 |
+
13,158,114,0,0,42.3,0.257,44,1
|
694 |
+
2,121,70,32,95,39.1,0.886,23,0
|
695 |
+
7,129,68,49,125,38.5,0.439,43,1
|
696 |
+
2,90,60,0,0,23.5,0.191,25,0
|
697 |
+
7,142,90,24,480,30.4,0.128,43,1
|
698 |
+
3,169,74,19,125,29.9,0.268,31,1
|
699 |
+
0,99,0,0,0,25,0.253,22,0
|
700 |
+
4,127,88,11,155,34.5,0.598,28,0
|
701 |
+
4,118,70,0,0,44.5,0.904,26,0
|
702 |
+
2,122,76,27,200,35.9,0.483,26,0
|
703 |
+
6,125,78,31,0,27.6,0.565,49,1
|
704 |
+
1,168,88,29,0,35,0.905,52,1
|
705 |
+
2,129,0,0,0,38.5,0.304,41,0
|
706 |
+
4,110,76,20,100,28.4,0.118,27,0
|
707 |
+
6,80,80,36,0,39.8,0.177,28,0
|
708 |
+
10,115,0,0,0,0,0.261,30,1
|
709 |
+
2,127,46,21,335,34.4,0.176,22,0
|
710 |
+
9,164,78,0,0,32.8,0.148,45,1
|
711 |
+
2,93,64,32,160,38,0.674,23,1
|
712 |
+
3,158,64,13,387,31.2,0.295,24,0
|
713 |
+
5,126,78,27,22,29.6,0.439,40,0
|
714 |
+
10,129,62,36,0,41.2,0.441,38,1
|
715 |
+
0,134,58,20,291,26.4,0.352,21,0
|
716 |
+
3,102,74,0,0,29.5,0.121,32,0
|
717 |
+
7,187,50,33,392,33.9,0.826,34,1
|
718 |
+
3,173,78,39,185,33.8,0.97,31,1
|
719 |
+
10,94,72,18,0,23.1,0.595,56,0
|
720 |
+
1,108,60,46,178,35.5,0.415,24,0
|
721 |
+
5,97,76,27,0,35.6,0.378,52,1
|
722 |
+
4,83,86,19,0,29.3,0.317,34,0
|
723 |
+
1,114,66,36,200,38.1,0.289,21,0
|
724 |
+
1,149,68,29,127,29.3,0.349,42,1
|
725 |
+
5,117,86,30,105,39.1,0.251,42,0
|
726 |
+
1,111,94,0,0,32.8,0.265,45,0
|
727 |
+
4,112,78,40,0,39.4,0.236,38,0
|
728 |
+
1,116,78,29,180,36.1,0.496,25,0
|
729 |
+
0,141,84,26,0,32.4,0.433,22,0
|
730 |
+
2,175,88,0,0,22.9,0.326,22,0
|
731 |
+
2,92,52,0,0,30.1,0.141,22,0
|
732 |
+
3,130,78,23,79,28.4,0.323,34,1
|
733 |
+
8,120,86,0,0,28.4,0.259,22,1
|
734 |
+
2,174,88,37,120,44.5,0.646,24,1
|
735 |
+
2,106,56,27,165,29,0.426,22,0
|
736 |
+
2,105,75,0,0,23.3,0.56,53,0
|
737 |
+
4,95,60,32,0,35.4,0.284,28,0
|
738 |
+
0,126,86,27,120,27.4,0.515,21,0
|
739 |
+
8,65,72,23,0,32,0.6,42,0
|
740 |
+
2,99,60,17,160,36.6,0.453,21,0
|
741 |
+
1,102,74,0,0,39.5,0.293,42,1
|
742 |
+
11,120,80,37,150,42.3,0.785,48,1
|
743 |
+
3,102,44,20,94,30.8,0.4,26,0
|
744 |
+
1,109,58,18,116,28.5,0.219,22,0
|
745 |
+
9,140,94,0,0,32.7,0.734,45,1
|
746 |
+
13,153,88,37,140,40.6,1.174,39,0
|
747 |
+
12,100,84,33,105,30,0.488,46,0
|
748 |
+
1,147,94,41,0,49.3,0.358,27,1
|
749 |
+
1,81,74,41,57,46.3,1.096,32,0
|
750 |
+
3,187,70,22,200,36.4,0.408,36,1
|
751 |
+
6,162,62,0,0,24.3,0.178,50,1
|
752 |
+
4,136,70,0,0,31.2,1.182,22,1
|
753 |
+
1,121,78,39,74,39,0.261,28,0
|
754 |
+
3,108,62,24,0,26,0.223,25,0
|
755 |
+
0,181,88,44,510,43.3,0.222,26,1
|
756 |
+
8,154,78,32,0,32.4,0.443,45,1
|
757 |
+
1,128,88,39,110,36.5,1.057,37,1
|
758 |
+
7,137,90,41,0,32,0.391,39,0
|
759 |
+
0,123,72,0,0,36.3,0.258,52,1
|
760 |
+
1,106,76,0,0,37.5,0.197,26,0
|
761 |
+
6,190,92,0,0,35.5,0.278,66,1
|
762 |
+
2,88,58,26,16,28.4,0.766,22,0
|
763 |
+
9,170,74,31,0,44,0.403,43,1
|
764 |
+
9,89,62,0,0,22.5,0.142,33,0
|
765 |
+
10,101,76,48,180,32.9,0.171,63,0
|
766 |
+
2,122,70,27,0,36.8,0.34,27,0
|
767 |
+
5,121,72,23,112,26.2,0.245,30,0
|
768 |
+
1,126,60,0,0,30.1,0.349,47,1
|
769 |
+
1,93,70,31,0,30.4,0.315,23,0
|
data/indian_liver_patient.csv
ADDED
@@ -0,0 +1,584 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Age,Gender,Total_Bilirubin,Direct_Bilirubin,Alkaline_Phosphotase,Alamine_Aminotransferase,Aspartate_Aminotransferase,Total_Protiens,Albumin,Albumin_and_Globulin_Ratio,Dataset
|
2 |
+
65,Female,0.7,0.1,187,16,18,6.8,3.3,0.9,1
|
3 |
+
62,Male,10.9,5.5,699,64,100,7.5,3.2,0.74,1
|
4 |
+
62,Male,7.3,4.1,490,60,68,7,3.3,0.89,1
|
5 |
+
58,Male,1,0.4,182,14,20,6.8,3.4,1,1
|
6 |
+
72,Male,3.9,2,195,27,59,7.3,2.4,0.4,1
|
7 |
+
46,Male,1.8,0.7,208,19,14,7.6,4.4,1.3,1
|
8 |
+
26,Female,0.9,0.2,154,16,12,7,3.5,1,1
|
9 |
+
29,Female,0.9,0.3,202,14,11,6.7,3.6,1.1,1
|
10 |
+
17,Male,0.9,0.3,202,22,19,7.4,4.1,1.2,2
|
11 |
+
55,Male,0.7,0.2,290,53,58,6.8,3.4,1,1
|
12 |
+
57,Male,0.6,0.1,210,51,59,5.9,2.7,0.8,1
|
13 |
+
72,Male,2.7,1.3,260,31,56,7.4,3,0.6,1
|
14 |
+
64,Male,0.9,0.3,310,61,58,7,3.4,0.9,2
|
15 |
+
74,Female,1.1,0.4,214,22,30,8.1,4.1,1,1
|
16 |
+
61,Male,0.7,0.2,145,53,41,5.8,2.7,0.87,1
|
17 |
+
25,Male,0.6,0.1,183,91,53,5.5,2.3,0.7,2
|
18 |
+
38,Male,1.8,0.8,342,168,441,7.6,4.4,1.3,1
|
19 |
+
33,Male,1.6,0.5,165,15,23,7.3,3.5,0.92,2
|
20 |
+
40,Female,0.9,0.3,293,232,245,6.8,3.1,0.8,1
|
21 |
+
40,Female,0.9,0.3,293,232,245,6.8,3.1,0.8,1
|
22 |
+
51,Male,2.2,1,610,17,28,7.3,2.6,0.55,1
|
23 |
+
51,Male,2.9,1.3,482,22,34,7,2.4,0.5,1
|
24 |
+
62,Male,6.8,3,542,116,66,6.4,3.1,0.9,1
|
25 |
+
40,Male,1.9,1,231,16,55,4.3,1.6,0.6,1
|
26 |
+
63,Male,0.9,0.2,194,52,45,6,3.9,1.85,2
|
27 |
+
34,Male,4.1,2,289,875,731,5,2.7,1.1,1
|
28 |
+
34,Male,4.1,2,289,875,731,5,2.7,1.1,1
|
29 |
+
34,Male,6.2,3,240,1680,850,7.2,4,1.2,1
|
30 |
+
20,Male,1.1,0.5,128,20,30,3.9,1.9,0.95,2
|
31 |
+
84,Female,0.7,0.2,188,13,21,6,3.2,1.1,2
|
32 |
+
57,Male,4,1.9,190,45,111,5.2,1.5,0.4,1
|
33 |
+
52,Male,0.9,0.2,156,35,44,4.9,2.9,1.4,1
|
34 |
+
57,Male,1,0.3,187,19,23,5.2,2.9,1.2,2
|
35 |
+
38,Female,2.6,1.2,410,59,57,5.6,3,0.8,2
|
36 |
+
38,Female,2.6,1.2,410,59,57,5.6,3,0.8,2
|
37 |
+
30,Male,1.3,0.4,482,102,80,6.9,3.3,0.9,1
|
38 |
+
17,Female,0.7,0.2,145,18,36,7.2,3.9,1.18,2
|
39 |
+
46,Female,14.2,7.8,374,38,77,4.3,2,0.8,1
|
40 |
+
48,Male,1.4,0.6,263,38,66,5.8,2.2,0.61,1
|
41 |
+
47,Male,2.7,1.3,275,123,73,6.2,3.3,1.1,1
|
42 |
+
45,Male,2.4,1.1,168,33,50,5.1,2.6,1,1
|
43 |
+
62,Male,0.6,0.1,160,42,110,4.9,2.6,1.1,2
|
44 |
+
42,Male,6.8,3.2,630,25,47,6.1,2.3,0.6,2
|
45 |
+
50,Male,2.6,1.2,415,407,576,6.4,3.2,1,1
|
46 |
+
85,Female,1,0.3,208,17,15,7,3.6,1,2
|
47 |
+
35,Male,1.8,0.6,275,48,178,6.5,3.2,0.9,2
|
48 |
+
21,Male,3.9,1.8,150,36,27,6.8,3.9,1.34,1
|
49 |
+
40,Male,1.1,0.3,230,1630,960,4.9,2.8,1.3,1
|
50 |
+
32,Female,0.6,0.1,176,39,28,6,3,1,1
|
51 |
+
55,Male,18.4,8.8,206,64,178,6.2,1.8,0.4,1
|
52 |
+
45,Female,0.7,0.2,170,21,14,5.7,2.5,0.7,1
|
53 |
+
34,Female,0.6,0.1,161,15,19,6.6,3.4,1,1
|
54 |
+
38,Male,3.1,1.6,253,80,406,6.8,3.9,1.3,1
|
55 |
+
38,Male,1.1,0.3,198,86,150,6.3,3.5,1.2,1
|
56 |
+
42,Male,8.9,4.5,272,31,61,5.8,2,0.5,1
|
57 |
+
42,Male,8.9,4.5,272,31,61,5.8,2,0.5,1
|
58 |
+
33,Male,0.8,0.2,198,26,23,8,4,1,2
|
59 |
+
48,Female,0.9,0.2,175,24,54,5.5,2.7,0.9,2
|
60 |
+
51,Male,0.8,0.2,367,42,18,5.2,2,0.6,1
|
61 |
+
64,Male,1.1,0.5,145,20,24,5.5,3.2,1.39,2
|
62 |
+
31,Female,0.8,0.2,158,21,16,6,3,1,1
|
63 |
+
58,Male,1,0.5,158,37,43,7.2,3.6,1,1
|
64 |
+
58,Male,1,0.5,158,37,43,7.2,3.6,1,1
|
65 |
+
57,Male,0.7,0.2,208,35,97,5.1,2.1,0.7,1
|
66 |
+
57,Male,1.3,0.4,259,40,86,6.5,2.5,0.6,1
|
67 |
+
57,Male,1.4,0.7,470,62,88,5.6,2.5,0.8,1
|
68 |
+
54,Male,2.2,1.2,195,55,95,6,3.7,1.6,1
|
69 |
+
37,Male,1.8,0.8,215,53,58,6.4,3.8,1.4,1
|
70 |
+
66,Male,0.7,0.2,239,27,26,6.3,3.7,1.4,1
|
71 |
+
60,Male,0.8,0.2,215,24,17,6.3,3,0.9,2
|
72 |
+
19,Female,0.7,0.2,186,166,397,5.5,3,1.2,1
|
73 |
+
75,Female,0.8,0.2,188,20,29,4.4,1.8,0.6,1
|
74 |
+
75,Female,0.8,0.2,205,27,24,4.4,2,0.8,1
|
75 |
+
52,Male,0.6,0.1,171,22,16,6.6,3.6,1.2,1
|
76 |
+
68,Male,0.7,0.1,145,20,22,5.8,2.9,1,1
|
77 |
+
29,Female,0.7,0.1,162,52,41,5.2,2.5,0.9,2
|
78 |
+
31,Male,0.9,0.2,518,189,17,5.3,2.3,0.7,1
|
79 |
+
68,Female,0.6,0.1,1620,95,127,4.6,2.1,0.8,1
|
80 |
+
70,Male,1.4,0.6,146,12,24,6.2,3.8,1.58,2
|
81 |
+
58,Female,2.8,1.3,670,48,79,4.7,1.6,0.5,1
|
82 |
+
58,Female,2.4,1.1,915,60,142,4.7,1.8,0.6,1
|
83 |
+
29,Male,1,0.3,75,25,26,5.1,2.9,1.3,1
|
84 |
+
49,Male,0.7,0.1,148,14,12,5.4,2.8,1,2
|
85 |
+
33,Male,2,1,258,194,152,5.4,3,1.25,1
|
86 |
+
32,Male,0.6,0.1,237,45,31,7.5,4.3,1.34,1
|
87 |
+
14,Male,1.4,0.5,269,58,45,6.7,3.9,1.4,1
|
88 |
+
13,Male,0.6,0.1,320,28,56,7.2,3.6,1,2
|
89 |
+
58,Male,0.8,0.2,298,33,59,6.2,3.1,1,1
|
90 |
+
18,Male,0.6,0.2,538,33,34,7.5,3.2,0.7,1
|
91 |
+
60,Male,4,1.9,238,119,350,7.1,3.3,0.8,1
|
92 |
+
60,Male,5.7,2.8,214,412,850,7.3,3.2,0.78,1
|
93 |
+
60,Male,6.8,3.2,308,404,794,6.8,3,0.7,1
|
94 |
+
60,Male,8.6,4,298,412,850,7.4,3,0.6,1
|
95 |
+
60,Male,5.8,2.7,204,220,400,7,3,0.7,1
|
96 |
+
60,Male,5.2,2.4,168,126,202,6.8,2.9,0.7,1
|
97 |
+
75,Male,0.9,0.2,282,25,23,4.4,2.2,1,1
|
98 |
+
39,Male,3.8,1.5,298,102,630,7.1,3.3,0.8,1
|
99 |
+
39,Male,6.6,3,215,190,950,4,1.7,0.7,1
|
100 |
+
18,Male,0.6,0.1,265,97,161,5.9,3.1,1.1,1
|
101 |
+
18,Male,0.7,0.1,312,308,405,6.9,3.7,1.1,1
|
102 |
+
27,Male,0.6,0.2,161,27,28,3.7,1.6,0.76,2
|
103 |
+
27,Male,0.7,0.2,243,21,23,5.3,2.3,0.7,2
|
104 |
+
17,Male,0.9,0.2,224,36,45,6.9,4.2,1.55,1
|
105 |
+
55,Female,0.8,0.2,225,14,23,6.1,3.3,1.2,2
|
106 |
+
63,Male,0.5,0.1,170,21,28,5.5,2.5,0.8,1
|
107 |
+
36,Male,5.3,2.3,145,32,92,5.1,2.6,1,2
|
108 |
+
36,Male,5.3,2.3,145,32,92,5.1,2.6,1,2
|
109 |
+
36,Male,0.8,0.2,158,29,39,6,2.2,0.5,2
|
110 |
+
36,Male,0.8,0.2,158,29,39,6,2.2,0.5,2
|
111 |
+
36,Male,0.9,0.1,486,25,34,5.9,2.8,0.9,2
|
112 |
+
24,Female,0.7,0.2,188,11,10,5.5,2.3,0.71,2
|
113 |
+
48,Male,3.2,1.6,257,33,116,5.7,2.2,0.62,1
|
114 |
+
27,Male,1.2,0.4,179,63,39,6.1,3.3,1.1,2
|
115 |
+
74,Male,0.6,0.1,272,24,98,5,2,0.6,1
|
116 |
+
50,Male,5.8,3,661,181,285,5.7,2.3,0.67,2
|
117 |
+
50,Male,7.3,3.6,1580,88,64,5.6,2.3,0.6,2
|
118 |
+
48,Male,0.7,0.1,1630,74,149,5.3,2,0.6,1
|
119 |
+
32,Male,12.7,6.2,194,2000,2946,5.7,3.3,1.3,1
|
120 |
+
32,Male,15.9,7,280,1350,1600,5.6,2.8,1,1
|
121 |
+
32,Male,18,8.2,298,1250,1050,5.4,2.6,0.9,1
|
122 |
+
32,Male,23,11.3,300,482,275,7.1,3.5,0.9,1
|
123 |
+
32,Male,22.7,10.2,290,322,113,6.6,2.8,0.7,1
|
124 |
+
58,Male,1.7,0.8,188,60,84,5.9,3.5,1.4,2
|
125 |
+
64,Female,0.8,0.2,178,17,18,6.3,3.1,0.9,1
|
126 |
+
28,Male,0.6,0.1,177,36,29,6.9,4.1,1.4,2
|
127 |
+
60,Male,1.8,0.5,201,45,25,3.9,1.7,0.7,2
|
128 |
+
48,Male,5.8,2.5,802,133,88,6,2.8,0.8,1
|
129 |
+
64,Male,3,1.4,248,46,40,6.5,3.2,0.9,1
|
130 |
+
58,Female,1.7,0.8,1896,61,83,8,3.9,0.95,1
|
131 |
+
45,Male,2.8,1.7,263,57,65,5.1,2.3,0.8,1
|
132 |
+
45,Male,3.2,1.4,512,50,58,6,2.7,0.8,1
|
133 |
+
70,Female,0.7,0.2,237,18,28,5.8,2.5,0.75,2
|
134 |
+
18,Female,0.8,0.2,199,34,31,6.5,3.5,1.16,2
|
135 |
+
53,Male,0.9,0.4,238,17,14,6.6,2.9,0.8,1
|
136 |
+
18,Male,1.8,0.7,178,35,36,6.8,3.6,1.1,1
|
137 |
+
66,Male,11.3,5.6,1110,1250,4929,7,2.4,0.5,1
|
138 |
+
46,Female,4.7,2.2,310,62,90,6.4,2.5,0.6,1
|
139 |
+
18,Male,0.8,0.2,282,72,140,5.5,2.5,0.8,1
|
140 |
+
18,Male,0.8,0.2,282,72,140,5.5,2.5,0.8,1
|
141 |
+
15,Male,0.8,0.2,380,25,66,6.1,3.7,1.5,1
|
142 |
+
60,Male,0.6,0.1,186,20,21,6.2,3.3,1.1,2
|
143 |
+
66,Female,4.2,2.1,159,15,30,7.1,2.2,0.4,1
|
144 |
+
30,Male,1.6,0.4,332,84,139,5.6,2.7,0.9,1
|
145 |
+
30,Male,1.6,0.4,332,84,139,5.6,2.7,0.9,1
|
146 |
+
45,Female,3.5,1.5,189,63,87,5.6,2.9,1,1
|
147 |
+
65,Male,0.8,0.2,201,18,22,5.4,2.9,1.1,2
|
148 |
+
66,Female,2.9,1.3,168,21,38,5.5,1.8,0.4,1
|
149 |
+
65,Male,0.7,0.1,392,20,30,5.3,2.8,1.1,1
|
150 |
+
50,Male,0.9,0.2,202,20,26,7.2,4.5,1.66,1
|
151 |
+
60,Male,0.8,0.2,286,21,27,7.1,4,1.2,1
|
152 |
+
56,Male,1.1,0.5,180,30,42,6.9,3.8,1.2,2
|
153 |
+
50,Male,1.6,0.8,218,18,20,5.9,2.9,0.96,1
|
154 |
+
46,Female,0.8,0.2,182,20,40,6,2.9,0.9,1
|
155 |
+
52,Male,0.6,0.1,178,26,27,6.5,3.6,1.2,2
|
156 |
+
34,Male,5.9,2.5,290,45,233,5.6,2.7,0.9,1
|
157 |
+
34,Male,8.7,4,298,58,138,5.8,2.4,0.7,1
|
158 |
+
32,Male,0.9,0.3,462,70,82,6.2,3.1,1,1
|
159 |
+
72,Male,0.7,0.1,196,20,35,5.8,2,0.5,1
|
160 |
+
72,Male,0.7,0.1,196,20,35,5.8,2,0.5,1
|
161 |
+
50,Male,1.2,0.4,282,36,32,7.2,3.9,1.1,1
|
162 |
+
60,Male,11,4.9,750,140,350,5.5,2.1,0.6,1
|
163 |
+
60,Male,11.5,5,1050,99,187,6.2,2.8,0.8,1
|
164 |
+
60,Male,5.8,2.7,599,43,66,5.4,1.8,0.5,1
|
165 |
+
39,Male,1.9,0.9,180,42,62,7.4,4.3,1.38,1
|
166 |
+
39,Male,1.9,0.9,180,42,62,7.4,4.3,1.38,1
|
167 |
+
48,Male,4.5,2.3,282,13,74,7,2.4,0.52,1
|
168 |
+
55,Male,75,3.6,332,40,66,6.2,2.5,0.6,1
|
169 |
+
47,Female,3,1.5,292,64,67,5.6,1.8,0.47,1
|
170 |
+
60,Male,22.8,12.6,962,53,41,6.9,3.3,0.9,1
|
171 |
+
60,Male,8.9,4,950,33,32,6.8,3.1,0.8,1
|
172 |
+
72,Male,1.7,0.8,200,28,37,6.2,3,0.93,1
|
173 |
+
44,Female,1.9,0.6,298,378,602,6.6,3.3,1,1
|
174 |
+
55,Male,14.1,7.6,750,35,63,5,1.6,0.47,1
|
175 |
+
31,Male,0.6,0.1,175,48,34,6,3.7,1.6,1
|
176 |
+
31,Male,0.6,0.1,175,48,34,6,3.7,1.6,1
|
177 |
+
31,Male,0.8,0.2,198,43,31,7.3,4,1.2,1
|
178 |
+
55,Male,0.8,0.2,482,112,99,5.7,2.6,0.8,1
|
179 |
+
75,Male,14.8,9,1020,71,42,5.3,2.2,0.7,1
|
180 |
+
75,Male,10.6,5,562,37,29,5.1,1.8,0.5,1
|
181 |
+
75,Male,8,4.6,386,30,25,5.5,1.8,0.48,1
|
182 |
+
75,Male,2.8,1.3,250,23,29,2.7,0.9,0.5,1
|
183 |
+
75,Male,2.9,1.3,218,33,37,3,1.5,1,1
|
184 |
+
65,Male,1.9,0.8,170,36,43,3.8,1.4,0.58,2
|
185 |
+
40,Male,0.6,0.1,171,20,17,5.4,2.5,0.8,1
|
186 |
+
64,Male,1.1,0.4,201,18,19,6.9,4.1,1.4,1
|
187 |
+
38,Male,1.5,0.4,298,60,103,6,3,1,2
|
188 |
+
60,Male,3.2,1.8,750,79,145,7.8,3.2,0.69,1
|
189 |
+
60,Male,2.1,1,191,114,247,4,1.6,0.6,1
|
190 |
+
60,Male,1.9,0.8,614,42,38,4.5,1.8,0.6,1
|
191 |
+
48,Female,0.8,0.2,218,32,28,5.2,2.5,0.9,2
|
192 |
+
60,Male,6.3,3.2,314,118,114,6.6,3.7,1.27,1
|
193 |
+
60,Male,5.8,3,257,107,104,6.6,3.5,1.12,1
|
194 |
+
60,Male,2.3,0.6,272,79,51,6.6,3.5,1.1,1
|
195 |
+
49,Male,1.3,0.4,206,30,25,6,3.1,1.06,2
|
196 |
+
49,Male,2,0.6,209,48,32,5.7,3,1.1,2
|
197 |
+
60,Male,2.4,1,1124,30,54,5.2,1.9,0.5,1
|
198 |
+
60,Male,2,1.1,664,52,104,6,2.1,0.53,1
|
199 |
+
26,Female,0.6,0.2,142,12,32,5.7,2.4,0.75,1
|
200 |
+
41,Male,0.9,0.2,169,22,18,6.1,3,0.9,2
|
201 |
+
7,Female,27.2,11.8,1420,790,1050,6.1,2,0.4,1
|
202 |
+
49,Male,0.6,0.1,218,50,53,5,2.4,0.9,1
|
203 |
+
49,Male,0.6,0.1,218,50,53,5,2.4,0.9,1
|
204 |
+
38,Female,0.8,0.2,145,19,23,6.1,3.1,1.03,2
|
205 |
+
21,Male,1,0.3,142,27,21,6.4,3.5,1.2,2
|
206 |
+
21,Male,0.7,0.2,135,27,26,6.4,3.3,1,2
|
207 |
+
45,Male,2.5,1.2,163,28,22,7.6,4,1.1,1
|
208 |
+
40,Male,3.6,1.8,285,50,60,7,2.9,0.7,1
|
209 |
+
40,Male,3.9,1.7,350,950,1500,6.7,3.8,1.3,1
|
210 |
+
70,Female,0.9,0.3,220,53,95,6.1,2.8,0.68,1
|
211 |
+
45,Female,0.9,0.3,189,23,33,6.6,3.9,,1
|
212 |
+
28,Male,0.8,0.3,190,20,14,4.1,2.4,1.4,1
|
213 |
+
42,Male,2.7,1.3,219,60,180,7,3.2,0.8,1
|
214 |
+
22,Male,2.7,1,160,82,127,5.5,3.1,1.2,2
|
215 |
+
8,Female,0.9,0.2,401,25,58,7.5,3.4,0.8,1
|
216 |
+
38,Male,1.7,1,180,18,34,7.2,3.6,1,1
|
217 |
+
66,Male,0.6,0.2,100,17,148,5,3.3,1.9,2
|
218 |
+
55,Male,0.9,0.2,116,36,16,6.2,3.2,1,2
|
219 |
+
49,Male,1.1,0.5,159,30,31,7,4.3,1.5,1
|
220 |
+
6,Male,0.6,0.1,289,38,30,4.8,2,0.7,2
|
221 |
+
37,Male,0.8,0.2,125,41,39,6.4,3.4,1.1,1
|
222 |
+
37,Male,0.8,0.2,147,27,46,5,2.5,1,1
|
223 |
+
47,Male,0.9,0.2,192,38,24,7.3,4.3,1.4,1
|
224 |
+
47,Male,0.9,0.2,265,40,28,8,4,1,1
|
225 |
+
50,Male,1.1,0.3,175,20,19,7.1,4.5,1.7,2
|
226 |
+
70,Male,1.7,0.5,400,56,44,5.7,3.1,1.1,1
|
227 |
+
26,Male,0.6,0.2,120,45,51,7.9,4,1,1
|
228 |
+
26,Male,1.3,0.4,173,38,62,8,4,1,1
|
229 |
+
68,Female,0.7,0.2,186,18,15,6.4,3.8,1.4,1
|
230 |
+
65,Female,1,0.3,202,26,13,5.3,2.6,0.9,2
|
231 |
+
46,Male,0.6,0.2,290,26,21,6,3,1,1
|
232 |
+
61,Male,1.5,0.6,196,61,85,6.7,3.8,1.3,2
|
233 |
+
61,Male,0.8,0.1,282,85,231,8.5,4.3,1,1
|
234 |
+
50,Male,2.7,1.6,157,149,156,7.9,3.1,0.6,1
|
235 |
+
33,Male,2,1.4,2110,48,89,6.2,3,0.9,1
|
236 |
+
40,Female,0.9,0.2,285,32,27,7.7,3.5,0.8,1
|
237 |
+
60,Male,1.5,0.6,360,230,298,4.5,2,0.8,1
|
238 |
+
22,Male,0.8,0.2,300,57,40,7.9,3.8,0.9,2
|
239 |
+
35,Female,0.9,0.3,158,20,16,8,4,1,1
|
240 |
+
35,Female,0.9,0.2,190,40,35,7.3,4.7,1.8,2
|
241 |
+
40,Male,0.9,0.3,196,69,48,6.8,3.1,0.8,1
|
242 |
+
48,Male,0.7,0.2,165,32,30,8,4,1,2
|
243 |
+
51,Male,0.8,0.2,230,24,46,6.5,3.1,,1
|
244 |
+
29,Female,0.8,0.2,205,30,23,8.2,4.1,1,1
|
245 |
+
28,Female,0.9,0.2,316,25,23,8.5,5.5,1.8,1
|
246 |
+
54,Male,0.8,0.2,218,20,19,6.3,2.5,0.6,1
|
247 |
+
54,Male,0.9,0.2,290,15,18,6.1,2.8,0.8,1
|
248 |
+
55,Male,1.8,9,272,22,79,6.1,2.7,0.7,1
|
249 |
+
55,Male,0.9,0.2,190,25,28,5.9,2.7,0.8,1
|
250 |
+
40,Male,0.7,0.1,202,37,29,5,2.6,1,1
|
251 |
+
33,Male,1.2,0.3,498,28,25,7,3,0.7,1
|
252 |
+
33,Male,2.1,1.3,480,38,22,6.5,3,0.8,1
|
253 |
+
33,Male,0.9,0.8,680,37,40,5.9,2.6,0.8,1
|
254 |
+
65,Male,1.1,0.3,258,48,40,7,3.9,1.2,2
|
255 |
+
35,Female,0.6,0.2,180,12,15,5.2,2.7,,2
|
256 |
+
38,Female,0.7,0.1,152,90,21,7.1,4.2,1.4,2
|
257 |
+
38,Male,1.7,0.7,859,89,48,6,3,1,1
|
258 |
+
50,Male,0.9,0.3,901,23,17,6.2,3.5,1.2,1
|
259 |
+
44,Male,0.8,0.2,335,148,86,5.6,3,1.1,1
|
260 |
+
36,Male,0.8,0.2,182,31,34,6.4,3.8,1.4,2
|
261 |
+
42,Male,30.5,14.2,285,65,130,5.2,2.1,0.6,1
|
262 |
+
42,Male,16.4,8.9,245,56,87,5.4,2,0.5,1
|
263 |
+
33,Male,1.5,7,505,205,140,7.5,3.9,1,1
|
264 |
+
18,Male,0.8,0.2,228,55,54,6.9,4,1.3,1
|
265 |
+
38,Female,0.8,0.2,185,25,21,7,3,0.7,1
|
266 |
+
38,Male,0.8,0.2,247,55,92,7.4,4.3,1.38,2
|
267 |
+
4,Male,0.9,0.2,348,30,34,8,4,1,2
|
268 |
+
62,Male,1.2,0.4,195,38,54,6.3,3.8,1.5,1
|
269 |
+
43,Female,0.9,0.3,140,12,29,7.4,3.5,1.8,1
|
270 |
+
40,Male,14.5,6.4,358,50,75,5.7,2.1,0.5,1
|
271 |
+
26,Male,0.6,0.1,110,15,20,2.8,1.6,1.3,1
|
272 |
+
37,Male,0.7,0.2,235,96,54,9.5,4.9,1,1
|
273 |
+
4,Male,0.8,0.2,460,152,231,6.5,3.2,0.9,2
|
274 |
+
21,Male,18.5,9.5,380,390,500,8.2,4.1,1,1
|
275 |
+
30,Male,0.7,0.2,262,15,18,9.6,4.7,1.2,1
|
276 |
+
33,Male,1.8,0.8,196,25,22,8,4,1,1
|
277 |
+
26,Male,1.9,0.8,180,22,19,8.2,4.1,1,2
|
278 |
+
35,Male,0.9,0.2,190,25,20,6.4,3.6,1.2,2
|
279 |
+
60,Male,2,0.8,190,45,40,6,2.8,0.8,1
|
280 |
+
45,Male,2.2,0.8,209,25,20,8,4,1,1
|
281 |
+
48,Female,1,1.4,144,18,14,8.3,4.2,1,1
|
282 |
+
58,Male,0.8,0.2,123,56,48,6,3,1,1
|
283 |
+
50,Male,0.7,0.2,192,18,15,7.4,4.2,1.3,2
|
284 |
+
50,Male,0.7,0.2,188,12,14,7,3.4,0.9,1
|
285 |
+
18,Male,1.3,0.7,316,10,21,6,2.1,0.5,2
|
286 |
+
18,Male,0.9,0.3,300,30,48,8,4,1,1
|
287 |
+
13,Male,1.5,0.5,575,29,24,7.9,3.9,0.9,1
|
288 |
+
34,Female,0.8,0.2,192,15,12,8.6,4.7,1.2,1
|
289 |
+
43,Male,1.3,0.6,155,15,20,8,4,1,2
|
290 |
+
50,Female,1,0.5,239,16,39,7.5,3.7,0.9,1
|
291 |
+
57,Male,4.5,2.3,315,120,105,7,4,1.3,1
|
292 |
+
45,Female,1,0.3,250,48,44,8.6,4.3,1,1
|
293 |
+
60,Male,0.7,0.2,174,32,14,7.8,4.2,1.1,2
|
294 |
+
45,Male,0.6,0.2,245,22,24,7.1,3.4,0.9,1
|
295 |
+
23,Male,1.1,0.5,191,37,41,7.7,4.3,1.2,2
|
296 |
+
22,Male,2.4,1,340,25,21,8.3,4.5,1.1,1
|
297 |
+
22,Male,0.6,0.2,202,78,41,8,3.9,0.9,1
|
298 |
+
74,Female,0.9,0.3,234,16,19,7.9,4,1,1
|
299 |
+
25,Female,0.9,0.3,159,24,25,6.9,4.4,1.7,2
|
300 |
+
31,Female,1.1,0.3,190,26,15,7.9,3.8,0.9,1
|
301 |
+
24,Female,0.9,0.2,195,40,35,7.4,4.1,1.2,2
|
302 |
+
58,Male,0.8,0.2,180,32,25,8.2,4.4,1.1,2
|
303 |
+
51,Female,0.9,0.2,280,21,30,6.7,3.2,0.8,1
|
304 |
+
50,Female,1.7,0.6,430,28,32,6.8,3.5,1,1
|
305 |
+
50,Male,0.7,0.2,206,18,17,8.4,4.2,1,2
|
306 |
+
55,Female,0.8,0.2,155,21,17,6.9,3.8,1.4,1
|
307 |
+
54,Female,1.4,0.7,195,36,16,7.9,3.7,0.9,2
|
308 |
+
48,Male,1.6,1,588,74,113,7.3,2.4,0.4,1
|
309 |
+
30,Male,0.8,0.2,174,21,47,4.6,2.3,1,1
|
310 |
+
45,Female,0.8,0.2,165,22,18,8.2,4.1,1,1
|
311 |
+
48,Female,1.1,0.7,527,178,250,8,4.2,1.1,1
|
312 |
+
51,Male,0.8,0.2,175,48,22,8.1,4.6,1.3,1
|
313 |
+
54,Female,23.2,12.6,574,43,47,7.2,3.5,0.9,1
|
314 |
+
27,Male,1.3,0.6,106,25,54,8.5,4.8,,2
|
315 |
+
30,Female,0.8,0.2,158,25,22,7.9,4.5,1.3,2
|
316 |
+
26,Male,2,0.9,195,24,65,7.8,4.3,1.2,1
|
317 |
+
22,Male,0.9,0.3,179,18,21,6.7,3.7,1.2,2
|
318 |
+
44,Male,0.9,0.2,182,29,82,7.1,3.7,1,2
|
319 |
+
35,Male,0.7,0.2,198,42,30,6.8,3.4,1,1
|
320 |
+
38,Male,3.7,2.2,216,179,232,7.8,4.5,1.3,1
|
321 |
+
14,Male,0.9,0.3,310,21,16,8.1,4.2,1,2
|
322 |
+
30,Female,0.7,0.2,63,31,27,5.8,3.4,1.4,1
|
323 |
+
30,Female,0.8,0.2,198,30,58,5.2,2.8,1.1,1
|
324 |
+
36,Male,1.7,0.5,205,36,34,7.1,3.9,1.2,1
|
325 |
+
12,Male,0.8,0.2,302,47,67,6.7,3.5,1.1,2
|
326 |
+
60,Male,2.6,1.2,171,42,37,5.4,2.7,1,1
|
327 |
+
42,Male,0.8,0.2,158,27,23,6.7,3.1,0.8,2
|
328 |
+
36,Female,1.2,0.4,358,160,90,8.3,4.4,1.1,2
|
329 |
+
24,Male,3.3,1.6,174,11,33,7.6,3.9,1,2
|
330 |
+
43,Male,0.8,0.2,192,29,20,6,2.9,0.9,2
|
331 |
+
21,Male,0.7,0.2,211,14,23,7.3,4.1,1.2,2
|
332 |
+
26,Male,2,0.9,157,54,68,6.1,2.7,0.8,1
|
333 |
+
26,Male,1.7,0.6,210,62,56,5.4,2.2,0.6,1
|
334 |
+
26,Male,7.1,3.3,258,80,113,6.2,2.9,0.8,1
|
335 |
+
36,Female,0.7,0.2,152,21,25,5.9,3.1,1.1,2
|
336 |
+
13,Female,0.7,0.2,350,17,24,7.4,4,1.1,1
|
337 |
+
13,Female,0.7,0.1,182,24,19,8.9,4.9,1.2,1
|
338 |
+
75,Male,6.7,3.6,458,198,143,6.2,3.2,1,1
|
339 |
+
75,Male,2.5,1.2,375,85,68,6.4,2.9,0.8,1
|
340 |
+
75,Male,1.8,0.8,405,79,50,6.1,2.9,0.9,1
|
341 |
+
75,Male,1.4,0.4,215,50,30,5.9,2.6,0.7,1
|
342 |
+
75,Male,0.9,0.2,206,44,33,6.2,2.9,0.8,1
|
343 |
+
36,Female,0.8,0.2,650,70,138,6.6,3.1,0.8,1
|
344 |
+
35,Male,0.8,0.2,198,36,32,7,4,1.3,2
|
345 |
+
70,Male,3.1,1.6,198,40,28,5.6,2,0.5,1
|
346 |
+
37,Male,0.8,0.2,195,60,40,8.2,5,1.5,2
|
347 |
+
60,Male,2.9,1.3,230,32,44,5.6,2,0.5,1
|
348 |
+
46,Male,0.6,0.2,115,14,11,6.9,3.4,0.9,1
|
349 |
+
38,Male,0.7,0.2,216,349,105,7,3.5,1,1
|
350 |
+
70,Male,1.3,0.4,358,19,14,6.1,2.8,0.8,1
|
351 |
+
49,Female,0.8,0.2,158,19,15,6.6,3.6,1.2,2
|
352 |
+
37,Male,1.8,0.8,145,62,58,5.7,2.9,1,1
|
353 |
+
37,Male,1.3,0.4,195,41,38,5.3,2.1,0.6,1
|
354 |
+
26,Female,0.7,0.2,144,36,33,8.2,4.3,1.1,1
|
355 |
+
48,Female,1.4,0.8,621,110,176,7.2,3.9,1.1,1
|
356 |
+
48,Female,0.8,0.2,150,25,23,7.5,3.9,1,1
|
357 |
+
19,Male,1.4,0.8,178,13,26,8,4.6,1.3,2
|
358 |
+
33,Male,0.7,0.2,256,21,30,8.5,3.9,0.8,1
|
359 |
+
33,Male,2.1,0.7,205,50,38,6.8,3,0.7,1
|
360 |
+
37,Male,0.7,0.2,176,28,34,5.6,2.6,0.8,1
|
361 |
+
69,Female,0.8,0.2,146,42,70,8.4,4.9,1.4,2
|
362 |
+
24,Male,0.7,0.2,218,47,26,6.6,3.3,1,1
|
363 |
+
65,Female,0.7,0.2,182,23,28,6.8,2.9,0.7,2
|
364 |
+
55,Male,1.1,0.3,215,21,15,6.2,2.9,0.8,2
|
365 |
+
42,Female,0.9,0.2,165,26,29,8.5,4.4,1,2
|
366 |
+
21,Male,0.8,0.2,183,33,57,6.8,3.5,1,2
|
367 |
+
40,Male,0.7,0.2,176,28,43,5.3,2.4,0.8,2
|
368 |
+
16,Male,0.7,0.2,418,28,35,7.2,4.1,1.3,2
|
369 |
+
60,Male,2.2,1,271,45,52,6.1,2.9,0.9,2
|
370 |
+
42,Female,0.8,0.2,182,22,20,7.2,3.9,1.1,1
|
371 |
+
58,Female,0.8,0.2,130,24,25,7,4,1.3,1
|
372 |
+
54,Female,22.6,11.4,558,30,37,7.8,3.4,0.8,1
|
373 |
+
33,Male,0.8,0.2,135,30,29,7.2,4.4,1.5,2
|
374 |
+
48,Male,0.7,0.2,326,29,17,8.7,5.5,1.7,1
|
375 |
+
25,Female,0.7,0.1,140,32,25,7.6,4.3,1.3,2
|
376 |
+
56,Female,0.7,0.1,145,26,23,7,4,1.3,2
|
377 |
+
47,Male,3.5,1.6,206,32,31,6.8,3.4,1,1
|
378 |
+
33,Male,0.7,0.1,168,35,33,7,3.7,1.1,1
|
379 |
+
20,Female,0.6,0.2,202,12,13,6.1,3,0.9,2
|
380 |
+
50,Female,0.7,0.1,192,20,41,7.3,3.3,0.8,1
|
381 |
+
72,Male,0.7,0.2,185,16,22,7.3,3.7,1,2
|
382 |
+
50,Male,1.7,0.8,331,36,53,7.3,3.4,0.9,1
|
383 |
+
39,Male,0.6,0.2,188,28,43,8.1,3.3,0.6,1
|
384 |
+
58,Female,0.7,0.1,172,27,22,6.7,3.2,0.9,1
|
385 |
+
60,Female,1.4,0.7,159,10,12,4.9,2.5,1,2
|
386 |
+
34,Male,3.7,2.1,490,115,91,6.5,2.8,0.7,1
|
387 |
+
50,Male,0.8,0.2,152,29,30,7.4,4.1,1.3,1
|
388 |
+
38,Male,2.7,1.4,105,25,21,7.5,4.2,1.2,2
|
389 |
+
51,Male,0.8,0.2,160,34,20,6.9,3.7,1.1,1
|
390 |
+
46,Male,0.8,0.2,160,31,40,7.3,3.8,1.1,1
|
391 |
+
72,Male,0.6,0.1,102,31,35,6.3,3.2,1,1
|
392 |
+
72,Male,0.8,0.2,148,23,35,6,3,1,1
|
393 |
+
75,Male,0.9,0.2,162,25,20,6.9,3.7,1.1,1
|
394 |
+
41,Male,7.5,4.3,149,94,92,6.3,3.1,0.9,1
|
395 |
+
41,Male,2.7,1.3,580,142,68,8,4,1,1
|
396 |
+
48,Female,1,0.3,310,37,56,5.9,2.5,0.7,1
|
397 |
+
45,Male,0.8,0.2,140,24,20,6.3,3.2,1,2
|
398 |
+
74,Male,1,0.3,175,30,32,6.4,3.4,1.1,1
|
399 |
+
78,Male,1,0.3,152,28,70,6.3,3.1,0.9,1
|
400 |
+
38,Male,0.8,0.2,208,25,50,7.1,3.7,1,1
|
401 |
+
27,Male,1,0.2,205,137,145,6,3,1,1
|
402 |
+
66,Female,0.7,0.2,162,24,20,6.4,3.2,1,2
|
403 |
+
50,Male,7.3,3.7,92,44,236,6.8,1.6,0.3,1
|
404 |
+
42,Female,0.5,0.1,162,155,108,8.1,4,0.9,1
|
405 |
+
65,Male,0.7,0.2,199,19,22,6.3,3.6,1.3,2
|
406 |
+
22,Male,0.8,0.2,198,20,26,6.8,3.9,1.3,1
|
407 |
+
31,Female,0.8,0.2,215,15,21,7.6,4,1.1,1
|
408 |
+
45,Male,0.7,0.2,180,18,58,6.7,3.7,1.2,2
|
409 |
+
12,Male,1,0.2,719,157,108,7.2,3.7,1,1
|
410 |
+
48,Male,2.4,1.1,554,141,73,7.5,3.6,0.9,1
|
411 |
+
48,Male,5,2.6,555,284,190,6.5,3.3,1,1
|
412 |
+
18,Male,1.4,0.6,215,440,850,5,1.9,0.6,1
|
413 |
+
23,Female,2.3,0.8,509,28,44,6.9,2.9,0.7,2
|
414 |
+
65,Male,4.9,2.7,190,33,71,7.1,2.9,0.7,1
|
415 |
+
48,Male,0.7,0.2,208,15,30,4.6,2.1,0.8,2
|
416 |
+
65,Male,1.4,0.6,260,28,24,5.2,2.2,0.7,2
|
417 |
+
70,Male,1.3,0.3,690,93,40,3.6,2.7,0.7,1
|
418 |
+
70,Male,0.6,0.1,862,76,180,6.3,2.7,0.75,1
|
419 |
+
11,Male,0.7,0.1,592,26,29,7.1,4.2,1.4,2
|
420 |
+
50,Male,4.2,2.3,450,69,50,7,3,0.7,1
|
421 |
+
55,Female,8.2,3.9,1350,52,65,6.7,2.9,0.7,1
|
422 |
+
55,Female,10.9,5.1,1350,48,57,6.4,2.3,0.5,1
|
423 |
+
26,Male,1,0.3,163,48,71,7.1,3.7,1,2
|
424 |
+
41,Male,1.2,0.5,246,34,42,6.9,3.4,0.97,1
|
425 |
+
53,Male,1.6,0.9,178,44,59,6.5,3.9,1.5,2
|
426 |
+
32,Female,0.7,0.1,240,12,15,7,3,0.7,1
|
427 |
+
58,Male,0.4,0.1,100,59,126,4.3,2.5,1.4,1
|
428 |
+
45,Male,1.3,0.6,166,49,42,5.6,2.5,0.8,2
|
429 |
+
65,Male,0.9,0.2,170,33,66,7,3,0.75,1
|
430 |
+
52,Female,0.6,0.1,194,10,12,6.9,3.3,0.9,2
|
431 |
+
73,Male,1.9,0.7,1750,102,141,5.5,2,0.5,1
|
432 |
+
53,Female,0.7,0.1,182,20,33,4.8,1.9,0.6,1
|
433 |
+
47,Female,0.8,0.2,236,10,13,6.7,2.9,0.76,2
|
434 |
+
29,Male,0.7,0.2,165,55,87,7.5,4.6,1.58,1
|
435 |
+
41,Female,0.9,0.2,201,31,24,7.6,3.8,1,2
|
436 |
+
30,Female,0.7,0.2,194,32,36,7.5,3.6,0.92,2
|
437 |
+
17,Female,0.5,0.1,206,28,21,7.1,4.5,1.7,2
|
438 |
+
23,Male,1,0.3,212,41,80,6.2,3.1,1,1
|
439 |
+
35,Male,1.6,0.7,157,15,44,5.2,2.5,0.9,1
|
440 |
+
65,Male,0.8,0.2,162,30,90,3.8,1.4,0.5,1
|
441 |
+
42,Female,0.8,0.2,168,25,18,6.2,3.1,1,1
|
442 |
+
49,Female,0.8,0.2,198,23,20,7,4.3,1.5,1
|
443 |
+
42,Female,2.3,1.1,292,29,39,4.1,1.8,0.7,1
|
444 |
+
42,Female,7.4,3.6,298,52,102,4.6,1.9,0.7,1
|
445 |
+
42,Female,0.7,0.2,152,35,81,6.2,3.2,1.06,1
|
446 |
+
61,Male,0.8,0.2,163,18,19,6.3,2.8,0.8,2
|
447 |
+
17,Male,0.9,0.2,279,40,46,7.3,4,1.2,2
|
448 |
+
54,Male,0.8,0.2,181,35,20,5.5,2.7,0.96,1
|
449 |
+
45,Female,23.3,12.8,1550,425,511,7.7,3.5,0.8,1
|
450 |
+
48,Female,0.8,0.2,142,26,25,6,2.6,0.7,1
|
451 |
+
48,Female,0.9,0.2,173,26,27,6.2,3.1,1,1
|
452 |
+
65,Male,7.9,4.3,282,50,72,6,3,1,1
|
453 |
+
35,Male,0.8,0.2,279,20,25,7.2,3.2,0.8,1
|
454 |
+
58,Male,0.9,0.2,1100,25,36,7.1,3.5,0.9,1
|
455 |
+
46,Male,0.7,0.2,224,40,23,7.1,3,0.7,1
|
456 |
+
28,Male,0.6,0.2,159,15,16,7,3.5,1,2
|
457 |
+
21,Female,0.6,0.1,186,25,22,6.8,3.4,1,1
|
458 |
+
32,Male,0.7,0.2,189,22,43,7.4,3.1,0.7,2
|
459 |
+
61,Male,0.8,0.2,192,28,35,6.9,3.4,0.9,2
|
460 |
+
26,Male,6.8,3.2,140,37,19,3.6,0.9,0.3,1
|
461 |
+
65,Male,1.1,0.5,686,16,46,5.7,1.5,0.35,1
|
462 |
+
22,Female,2.2,1,215,159,51,5.5,2.5,0.8,1
|
463 |
+
28,Female,0.8,0.2,309,55,23,6.8,4.1,1.51,1
|
464 |
+
38,Male,0.7,0.2,110,22,18,6.4,2.5,0.64,1
|
465 |
+
25,Male,0.8,0.1,130,23,42,8,4,1,1
|
466 |
+
45,Female,0.7,0.2,164,21,53,4.5,1.4,0.45,2
|
467 |
+
45,Female,0.6,0.1,270,23,42,5.1,2,0.5,2
|
468 |
+
28,Female,0.6,0.1,137,22,16,4.9,1.9,0.6,2
|
469 |
+
28,Female,1,0.3,90,18,108,6.8,3.1,0.8,2
|
470 |
+
66,Male,1,0.3,190,30,54,5.3,2.1,0.6,1
|
471 |
+
66,Male,0.8,0.2,165,22,32,4.4,2,0.8,1
|
472 |
+
66,Male,1.1,0.5,167,13,56,7.1,4.1,1.36,1
|
473 |
+
49,Female,0.6,0.1,185,17,26,6.6,2.9,0.7,2
|
474 |
+
42,Male,0.7,0.2,197,64,33,5.8,2.4,0.7,2
|
475 |
+
42,Male,1,0.3,154,38,21,6.8,3.9,1.3,2
|
476 |
+
35,Male,2,1.1,226,33,135,6,2.7,0.8,2
|
477 |
+
38,Male,2.2,1,310,119,42,7.9,4.1,1,2
|
478 |
+
38,Male,0.9,0.3,310,15,25,5.5,2.7,1,1
|
479 |
+
55,Male,0.6,0.2,220,24,32,5.1,2.4,0.88,1
|
480 |
+
33,Male,7.1,3.7,196,622,497,6.9,3.6,1.09,1
|
481 |
+
33,Male,3.4,1.6,186,779,844,7.3,3.2,0.7,1
|
482 |
+
7,Male,0.5,0.1,352,28,51,7.9,4.2,1.1,2
|
483 |
+
45,Male,2.3,1.3,282,132,368,7.3,4,1.2,1
|
484 |
+
45,Male,1.1,0.4,92,91,188,7.2,3.8,1.11,1
|
485 |
+
30,Male,0.8,0.2,182,46,57,7.8,4.3,1.2,2
|
486 |
+
62,Male,5,2.1,103,18,40,5,2.1,1.72,1
|
487 |
+
22,Female,6.7,3.2,850,154,248,6.2,2.8,0.8,1
|
488 |
+
42,Female,0.8,0.2,195,18,15,6.7,3,0.8,1
|
489 |
+
32,Male,0.7,0.2,276,102,190,6,2.9,0.93,1
|
490 |
+
60,Male,0.7,0.2,171,31,26,7,3.5,1,2
|
491 |
+
65,Male,0.8,0.1,146,17,29,5.9,3.2,1.18,2
|
492 |
+
53,Female,0.8,0.2,193,96,57,6.7,3.6,1.16,1
|
493 |
+
27,Male,1,0.3,180,56,111,6.8,3.9,1.85,2
|
494 |
+
35,Female,1,0.3,805,133,103,7.9,3.3,0.7,1
|
495 |
+
65,Male,0.7,0.2,265,30,28,5.2,1.8,0.52,2
|
496 |
+
25,Male,0.7,0.2,185,196,401,6.5,3.9,1.5,1
|
497 |
+
32,Male,0.7,0.2,165,31,29,6.1,3,0.96,2
|
498 |
+
24,Male,1,0.2,189,52,31,8,4.8,1.5,1
|
499 |
+
67,Male,2.2,1.1,198,42,39,7.2,3,0.7,1
|
500 |
+
68,Male,1.8,0.5,151,18,22,6.5,4,1.6,1
|
501 |
+
55,Male,3.6,1.6,349,40,70,7.2,2.9,0.6,1
|
502 |
+
70,Male,2.7,1.2,365,62,55,6,2.4,0.6,1
|
503 |
+
36,Male,2.8,1.5,305,28,76,5.9,2.5,0.7,1
|
504 |
+
42,Male,0.8,0.2,127,29,30,4.9,2.7,1.2,1
|
505 |
+
53,Male,19.8,10.4,238,39,221,8.1,2.5,0.4,1
|
506 |
+
32,Male,30.5,17.1,218,39,79,5.5,2.7,0.9,1
|
507 |
+
32,Male,32.6,14.1,219,95,235,5.8,3.1,1.1,1
|
508 |
+
56,Male,17.7,8.8,239,43,185,5.6,2.4,0.7,1
|
509 |
+
50,Male,0.9,0.3,194,190,73,7.5,3.9,1,1
|
510 |
+
46,Male,18.4,8.5,450,119,230,7.5,3.3,0.7,1
|
511 |
+
46,Male,20,10,254,140,540,5.4,3,1.2,1
|
512 |
+
37,Female,0.8,0.2,205,31,36,9.2,4.6,1,2
|
513 |
+
45,Male,2.2,1.6,320,37,48,6.8,3.4,1,1
|
514 |
+
56,Male,1,0.3,195,22,28,5.8,2.6,0.8,2
|
515 |
+
69,Male,0.9,0.2,215,32,24,6.9,3,0.7,1
|
516 |
+
49,Male,1,0.3,230,48,58,8.4,4.2,1,1
|
517 |
+
49,Male,3.9,2.1,189,65,181,6.9,3,0.7,1
|
518 |
+
60,Male,0.9,0.3,168,16,24,6.7,3,0.8,1
|
519 |
+
28,Male,0.9,0.2,215,50,28,8,4,1,1
|
520 |
+
45,Male,2.9,1.4,210,74,68,7.2,3.6,1,1
|
521 |
+
35,Male,26.3,12.1,108,168,630,9.2,2,0.3,1
|
522 |
+
62,Male,1.8,0.9,224,69,155,8.6,4,0.8,1
|
523 |
+
55,Male,4.4,2.9,230,14,25,7.1,2.1,0.4,1
|
524 |
+
46,Female,0.8,0.2,185,24,15,7.9,3.7,0.8,1
|
525 |
+
50,Male,0.6,0.2,137,15,16,4.8,2.6,1.1,1
|
526 |
+
29,Male,0.8,0.2,156,12,15,6.8,3.7,1.1,2
|
527 |
+
53,Female,0.9,0.2,210,35,32,8,3.9,0.9,2
|
528 |
+
46,Male,9.4,5.2,268,21,63,6.4,2.8,0.8,1
|
529 |
+
40,Male,3.5,1.6,298,68,200,7.1,3.4,0.9,1
|
530 |
+
45,Male,1.7,0.8,315,12,38,6.3,2.1,0.5,1
|
531 |
+
55,Male,3.3,1.5,214,54,152,5.1,1.8,0.5,1
|
532 |
+
22,Female,1.1,0.3,138,14,21,7,3.8,1.1,2
|
533 |
+
40,Male,30.8,18.3,285,110,186,7.9,2.7,0.5,1
|
534 |
+
62,Male,0.7,0.2,162,12,17,8.2,3.2,0.6,2
|
535 |
+
46,Female,1.4,0.4,298,509,623,3.6,1,0.3,1
|
536 |
+
39,Male,1.6,0.8,230,88,74,8,4,1,2
|
537 |
+
60,Male,19.6,9.5,466,46,52,6.1,2,0.4,1
|
538 |
+
46,Male,15.8,7.2,227,67,220,6.9,2.6,0.6,1
|
539 |
+
10,Female,0.8,0.1,395,25,75,7.6,3.6,0.9,1
|
540 |
+
52,Male,1.8,0.8,97,85,78,6.4,2.7,0.7,1
|
541 |
+
65,Female,0.7,0.2,406,24,45,7.2,3.5,0.9,2
|
542 |
+
42,Male,0.8,0.2,114,21,23,7,3,0.7,2
|
543 |
+
42,Male,0.8,0.2,198,29,19,6.6,3,0.8,2
|
544 |
+
62,Male,0.7,0.2,173,46,47,7.3,4.1,1.2,2
|
545 |
+
40,Male,1.2,0.6,204,23,27,7.6,4,1.1,1
|
546 |
+
54,Female,5.5,3.2,350,67,42,7,3.2,0.8,1
|
547 |
+
45,Female,0.7,0.2,153,41,42,4.5,2.2,0.9,2
|
548 |
+
45,Male,20.2,11.7,188,47,32,5.4,2.3,0.7,1
|
549 |
+
50,Female,27.7,10.8,380,39,348,7.1,2.3,0.4,1
|
550 |
+
42,Male,11.1,6.1,214,60,186,6.9,2.8,2.8,1
|
551 |
+
40,Female,2.1,1,768,74,141,7.8,4.9,1.6,1
|
552 |
+
46,Male,3.3,1.5,172,25,41,5.6,2.4,0.7,1
|
553 |
+
29,Male,1.2,0.4,160,20,22,6.2,3,0.9,2
|
554 |
+
45,Male,0.6,0.1,196,29,30,5.8,2.9,1,1
|
555 |
+
46,Male,10.2,4.2,232,58,140,7,2.7,0.6,1
|
556 |
+
73,Male,1.8,0.9,220,20,43,6.5,3,0.8,1
|
557 |
+
55,Male,0.8,0.2,290,139,87,7,3,0.7,1
|
558 |
+
51,Male,0.7,0.1,180,25,27,6.1,3.1,1,1
|
559 |
+
51,Male,2.9,1.2,189,80,125,6.2,3.1,1,1
|
560 |
+
51,Male,4,2.5,275,382,330,7.5,4,1.1,1
|
561 |
+
26,Male,42.8,19.7,390,75,138,7.5,2.6,0.5,1
|
562 |
+
66,Male,15.2,7.7,356,321,562,6.5,2.2,0.4,1
|
563 |
+
66,Male,16.6,7.6,315,233,384,6.9,2,0.4,1
|
564 |
+
66,Male,17.3,8.5,388,173,367,7.8,2.6,0.5,1
|
565 |
+
64,Male,1.4,0.5,298,31,83,7.2,2.6,0.5,1
|
566 |
+
38,Female,0.6,0.1,165,22,34,5.9,2.9,0.9,2
|
567 |
+
43,Male,22.5,11.8,143,22,143,6.6,2.1,0.46,1
|
568 |
+
50,Female,1,0.3,191,22,31,7.8,4,1,2
|
569 |
+
52,Male,2.7,1.4,251,20,40,6,1.7,0.39,1
|
570 |
+
20,Female,16.7,8.4,200,91,101,6.9,3.5,1.02,1
|
571 |
+
16,Male,7.7,4.1,268,213,168,7.1,4,1.2,1
|
572 |
+
16,Male,2.6,1.2,236,131,90,5.4,2.6,0.9,1
|
573 |
+
90,Male,1.1,0.3,215,46,134,6.9,3,0.7,1
|
574 |
+
32,Male,15.6,9.5,134,54,125,5.6,4,2.5,1
|
575 |
+
32,Male,3.7,1.6,612,50,88,6.2,1.9,0.4,1
|
576 |
+
32,Male,12.1,6,515,48,92,6.6,2.4,0.5,1
|
577 |
+
32,Male,25,13.7,560,41,88,7.9,2.5,2.5,1
|
578 |
+
32,Male,15,8.2,289,58,80,5.3,2.2,0.7,1
|
579 |
+
32,Male,12.7,8.4,190,28,47,5.4,2.6,0.9,1
|
580 |
+
60,Male,0.5,0.1,500,20,34,5.9,1.6,0.37,2
|
581 |
+
40,Male,0.6,0.1,98,35,31,6,3.2,1.1,1
|
582 |
+
52,Male,0.8,0.2,245,48,49,6.4,3.2,1,1
|
583 |
+
31,Male,1.3,0.5,184,29,32,6.8,3.4,1,1
|
584 |
+
38,Male,1,0.3,216,21,24,7.3,4.4,1.5,2
|
data/loan_approval_dataset.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
docker-compose.yml
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3'
|
2 |
+
|
3 |
+
services:
|
4 |
+
xai-assist:
|
5 |
+
build: .
|
6 |
+
ports:
|
7 |
+
- "8000:8000" # API
|
8 |
+
- "8501:8501" # Streamlit
|
9 |
+
volumes:
|
10 |
+
- ./data:/app/data
|
11 |
+
environment:
|
12 |
+
- DATABASE_URL=sqlite:///./data/xai_assist.db
|
13 |
+
restart: unless-stopped
|
loan_applications.db
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d1b1872ab6ad008019d6b5e082cfcfaa7551c3e5aa3c954f63f73156d04e932e
|
3 |
+
size 131
|
loan_approval_dataset.csv
ADDED
The diff for this file is too large to render.
See raw diff
|
|
models/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Models package for the Synthack project.
|
3 |
+
Contains machine learning models for loan approval, employee attrition, and healthcare predictions.
|
4 |
+
"""
|
models/__pycache__/__init__.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3028f8bf21cd9f81dd3561f5354f370668acae256504105bd7b477caed9a338d
|
3 |
+
size 128
|
models/attrition_model.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:78c82401b88dd086cad9e96a9c5c32c1e77855b988fe1424dde056426be3db49
|
3 |
+
size 132
|
models/attrition_model.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c65aa4de6a42133d1f0a8cf8c73055aff7ca90636234ddfe3db819a539e2177d
|
3 |
+
size 2027435
|
models/attrition_preprocessor.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a208dcef9af98776fddb8083fc4c32a32b2ca276ded54123e5ae442c096af17d
|
3 |
+
size 129
|
models/attrition_preprocessor.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c1dee19546fd87af1891390e02832b7855fd884cc566df639947ef2323691447
|
3 |
+
size 2133
|
models/diabetes_feature_names.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:65160bdb42445c1c367b6bddb2b72a5dc78499ab3d794aebcd040c88f1fa1aea
|
3 |
+
size 121
|
models/diabetes_model.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8a8c08794a186a3d216cf2335ac9aef4e18a2edf6e1181eba0dd5f9b429d663d
|
3 |
+
size 1488522
|
models/diabetes_model_metrics.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e4cf0235e3ce46ab3641d5ea6bf2f049c88744d60b06f32b33ba872c172781be
|
3 |
+
size 231
|
models/liver_disease_model.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f6e6cd4e8400f0744eecd2d1c7fce1562affb578b6852bf1de008c68bc043fb3
|
3 |
+
size 328124
|
models/loan_explainer.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66e2ece4c1d77dd57373342256b0dc228507c1019037e1814041304217f4d87a
|
3 |
+
size 4365509
|
models/loan_feature_names.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b97d6eec892839605e58dbe09cbf07bef2e7ea92bff26d362592d88531fd57a2
|
3 |
+
size 212
|
models/loan_label_encoders.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:33233716af7b97094b625ff4b26d083d45a31bd06724659ebe6399533f2ea192
|
3 |
+
size 851
|
models/loan_model.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25fd317c244ebcdaf47c6089b3a9b05f1ccf5137404e35bf0ec356ca31c4f485
|
3 |
+
size 2165785
|
models/loan_model.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:46a765a3d7ce99cdb9272aff9fc7713111a9824d5e0a3124ac1449830e53ff0b
|
3 |
+
size 132
|
models/loan_scaler.joblib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f25fe285c0c75d8fb7bda6063f774fd926150440b63644807f6552e080df5717
|
3 |
+
size 1311
|
requirements.txt
ADDED
Binary file (934 Bytes). View file
|
|
roadmap.txt
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
🚀 Project Title: XAI-Assist – Explainable AI for Critical Decision Support
|
2 |
+
🎯 Problem Statement
|
3 |
+
In high-stakes fields like Healthcare, Finance, and Legal Tech, AI-driven decisions can be black-boxed and hard to trust. Professionals (doctors, loan officers, lawyers) need a transparent AI system that provides clear, human-readable explanations for its decisions.
|
4 |
+
|
5 |
+
✅ Objective
|
6 |
+
Develop an Explainable AI decision support system that:
|
7 |
+
|
8 |
+
Makes predictions (diagnosis, loan approval, legal outcomes).
|
9 |
+
|
10 |
+
Explains why it made that decision using visual + textual insights.
|
11 |
+
|
12 |
+
Allows experts to tweak or simulate decisions based on feature changes.
|
13 |
+
|
14 |
+
💡 Project Scope & Use Cases
|
15 |
+
Pick one of these (or build a general framework):
|
16 |
+
|
17 |
+
Domain Use Case Example Prediction
|
18 |
+
🏥 Healthcare Disease Risk Prediction "Will this patient develop diabetes in 5 years?"
|
19 |
+
💰 Finance Loan Approval System "Should this applicant get a loan?"
|
20 |
+
⚖️ Legal Tech Case Outcome Prediction "Will the court rule in favor of the defendant?"
|
21 |
+
🔍 Core Features
|
22 |
+
🔹 1. Model Transparency & Explainability
|
23 |
+
Use SHAP, LIME, or RuleFit to explain AI predictions.
|
24 |
+
|
25 |
+
Generate visual feature importance charts (SHAP force plots, waterfall plots).
|
26 |
+
|
27 |
+
Provide natural language explanations like:
|
28 |
+
"Loan denied due to low income ($20k), high debt-to-income ratio (40%), and low credit score (580)."
|
29 |
+
|
30 |
+
🔹 2. Interactive "What-If" Analysis
|
31 |
+
Allow users to change feature values and see how decisions change.
|
32 |
+
|
33 |
+
Example: "If the income was $30k instead of $20k, the loan would have been approved."
|
34 |
+
|
35 |
+
🔹 3. Comparative Decision Insights
|
36 |
+
Compare two similar cases with different outcomes and highlight why.
|
37 |
+
|
38 |
+
Example (Loan Application):
|
39 |
+
|
40 |
+
Applicant A (Denied): Income = $20k, Credit Score = 580
|
41 |
+
|
42 |
+
Applicant B (Approved): Income = $50k, Credit Score = 720
|
43 |
+
|
44 |
+
Key Insight: Income and credit score had the biggest impact.
|
45 |
+
|
46 |
+
🔹 4. Trust Score & Human Override System
|
47 |
+
Show a Trust Score (how confident the AI is in its decision).
|
48 |
+
|
49 |
+
Allow human experts to override AI decisions and provide a reason.
|
50 |
+
|
51 |
+
Store overrides for model auditing and bias detection.
|
52 |
+
|
53 |
+
⚙️ Tech Stack
|
54 |
+
Component Tech
|
55 |
+
💻 Frontend Streamlit / ReactJS for UI
|
56 |
+
🧠 AI Model Random Forest, XGBoost, or Neural Networks
|
57 |
+
🔎 Explainability SHAP, LIME, ELI5, Fairlearn
|
58 |
+
📊 Visualization Matplotlib, Plotly, SHAP force plots
|
59 |
+
📦 Database PostgreSQL / Firebase (for saving decisions & overrides)
|
60 |
+
🎯 Why This Can Win the Hackathon
|
61 |
+
✅ Highly relevant & ethical – Explainability is a hot topic in AI.
|
62 |
+
✅ Real-world impact – Can be applied in multiple critical sectors.
|
63 |
+
✅ Great UI & Visuals – Judges love interactive dashboards & visual explanations.
|
64 |
+
✅ Customizable & expandable – Can work in healthcare, finance, or law.
|
65 |
+
|
66 |
+
🎁 Bonus Features (If Time Allows)
|
67 |
+
🚀 Bias Detection: Show if certain groups (e.g., women, minorities) are unfairly impacted.
|
68 |
+
🚀 Explainable Chatbot: An AI chatbot that explains decisions interactively.
|
69 |
+
🚀 PDF Report Generator: Generate a summary report of decisions and explanations.
|
70 |
+
|
71 |
+
💬 Next Steps
|
72 |
+
Do you want help with:
|
73 |
+
✅ Setting up a GitHub repo with boilerplate code?
|
74 |
+
✅ Designing an interactive UI mockup?
|
75 |
+
✅ Choosing a specific use-case (health, finance, law)?
|
76 |
+
|
77 |
+
I can help you with any of these! 🚀
|
src/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Source package for the Synthack project.
|
3 |
+
Contains the main application code, API endpoints, and data processing modules.
|
4 |
+
"""
|
src/__pycache__/__init__.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5c11450c47361b831d139a0162d6b68caaafbacb26ad819ffc79c9a564dd844e
|
3 |
+
size 300
|
src/__pycache__/train_loan_model.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a5939f4163b5da9d4165cc018fe555dd6fbf25dd42262268769ade54dbcfabb3
|
3 |
+
size 129
|
src/api/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
API package for the Synthack project.
|
3 |
+
Contains FastAPI endpoints and related functionality.
|
4 |
+
"""
|
src/api/__pycache__/__init__.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f65b85b16a9a372464514f6d3efc3828be42294fbfce9658efb9f1f583470630
|
3 |
+
size 275
|
src/api/__pycache__/attrition_model.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82c4a519896b94be2e4abc0d310c7fbf348dff38f4191a6bf3aa9c98b451d808
|
3 |
+
size 13534
|
src/api/__pycache__/diabetes_model.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e2c36b880152829f58f063d3cdba307ac99739f1a9b0a63ef02c84207c783962
|
3 |
+
size 18594
|
src/api/__pycache__/liver_disease_model.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c281001772fce91608b22ad3fa3518967aed38bf8b77a9700da9e2396b090721
|
3 |
+
size 14655
|
src/api/__pycache__/liver_model.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:929d042b52cdedea1d446e03c7a404f14b10fff9b52834d53b24c3ab488b25bf
|
3 |
+
size 8146
|
src/api/__pycache__/loan_model.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3a87e34e79c7e08bb8f43a2e361bd626d338171afa617c17c3576bcef413c380
|
3 |
+
size 15217
|
src/api/__pycache__/main.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8777d13738753e6004ddc456fae2ff9dcf2de253244ba3e99983f7e337d2fb35
|
3 |
+
size 16941
|
src/api/attrition_model.py
ADDED
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
from sklearn.model_selection import train_test_split
|
4 |
+
from sklearn.preprocessing import StandardScaler, OneHotEncoder
|
5 |
+
from sklearn.compose import ColumnTransformer
|
6 |
+
from sklearn.pipeline import Pipeline
|
7 |
+
from sklearn.ensemble import RandomForestClassifier
|
8 |
+
import pickle
|
9 |
+
import os
|
10 |
+
import sys
|
11 |
+
from typing import List
|
12 |
+
|
13 |
+
# Add the project root to Python path
|
14 |
+
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
15 |
+
sys.path.append(project_root)
|
16 |
+
|
17 |
+
class AttritionModel:
|
18 |
+
def __init__(self):
|
19 |
+
self.model = None
|
20 |
+
self.preprocessor = None
|
21 |
+
self.model_path = os.path.join(project_root, "models", "attrition_model.pkl")
|
22 |
+
self.preprocessor_path = os.path.join(project_root, "models", "attrition_preprocessor.pkl")
|
23 |
+
|
24 |
+
# Create models directory if it doesn't exist
|
25 |
+
os.makedirs(os.path.dirname(self.model_path), exist_ok=True)
|
26 |
+
|
27 |
+
# Define the features we'll use
|
28 |
+
self.numeric_features = [
|
29 |
+
'Age', 'DistanceFromHome', 'EnvironmentSatisfaction',
|
30 |
+
'JobLevel', 'JobSatisfaction', 'MonthlyIncome',
|
31 |
+
'TotalWorkingYears', 'WorkLifeBalance', 'YearsAtCompany'
|
32 |
+
]
|
33 |
+
self.categorical_features = ['OverTime']
|
34 |
+
|
35 |
+
# Try to load existing model and preprocessor
|
36 |
+
try:
|
37 |
+
with open(self.model_path, 'rb') as f:
|
38 |
+
self.model = pickle.load(f)
|
39 |
+
with open(self.preprocessor_path, 'rb') as f:
|
40 |
+
self.preprocessor = pickle.load(f)
|
41 |
+
except:
|
42 |
+
print("No existing model found. Please train the model first.")
|
43 |
+
|
44 |
+
def preprocess_data(self, X):
|
45 |
+
"""Preprocess the input data"""
|
46 |
+
# Create preprocessing steps for numeric and categorical data
|
47 |
+
numeric_transformer = StandardScaler()
|
48 |
+
categorical_transformer = OneHotEncoder(drop='first', sparse=False)
|
49 |
+
|
50 |
+
# Combine preprocessing steps
|
51 |
+
self.preprocessor = ColumnTransformer(
|
52 |
+
transformers=[
|
53 |
+
('num', numeric_transformer, self.numeric_features),
|
54 |
+
('cat', categorical_transformer, self.categorical_features)
|
55 |
+
],
|
56 |
+
remainder='drop' # Drop any columns not specified in features
|
57 |
+
)
|
58 |
+
|
59 |
+
return self.preprocessor.fit_transform(X)
|
60 |
+
|
61 |
+
def train(self, X, y):
|
62 |
+
"""Train the model with the given data"""
|
63 |
+
# Preprocess the data
|
64 |
+
X_processed = self.preprocess_data(X)
|
65 |
+
|
66 |
+
# Create and train the model
|
67 |
+
self.model = RandomForestClassifier(
|
68 |
+
n_estimators=100,
|
69 |
+
max_depth=10,
|
70 |
+
random_state=42
|
71 |
+
)
|
72 |
+
self.model.fit(X_processed, y)
|
73 |
+
|
74 |
+
# Save the model and preprocessor
|
75 |
+
with open(self.model_path, 'wb') as f:
|
76 |
+
pickle.dump(self.model, f)
|
77 |
+
with open(self.preprocessor_path, 'wb') as f:
|
78 |
+
pickle.dump(self.preprocessor, f)
|
79 |
+
|
80 |
+
def predict(self, features):
|
81 |
+
"""Make a prediction using the trained model."""
|
82 |
+
try:
|
83 |
+
if self.model is None:
|
84 |
+
raise ValueError("Model not loaded. Please ensure model file exists and is valid.")
|
85 |
+
|
86 |
+
print(f"Input features: {features}")
|
87 |
+
|
88 |
+
# Convert string inputs to appropriate types
|
89 |
+
processed_features = {}
|
90 |
+
for key, value in features.items():
|
91 |
+
if key == 'OverTime':
|
92 |
+
# Convert 'Yes'/'No' to 1/0
|
93 |
+
if isinstance(value, str):
|
94 |
+
processed_features[key] = 1 if value.lower() in ['yes', 'true', '1'] else 0
|
95 |
+
else:
|
96 |
+
processed_features[key] = 1 if value else 0
|
97 |
+
else:
|
98 |
+
# Convert other values to appropriate numeric types
|
99 |
+
try:
|
100 |
+
processed_features[key] = float(value)
|
101 |
+
except (ValueError, TypeError):
|
102 |
+
# Handle conversion errors
|
103 |
+
raise ValueError(f"Invalid value for feature {key}: {value}. Expected numeric value.")
|
104 |
+
|
105 |
+
print(f"Processed features: {processed_features}")
|
106 |
+
|
107 |
+
# Create DataFrame with processed values
|
108 |
+
X = pd.DataFrame([processed_features])
|
109 |
+
|
110 |
+
# Ensure all required columns are present
|
111 |
+
required_columns = self.numeric_features + self.categorical_features
|
112 |
+
|
113 |
+
for col in required_columns:
|
114 |
+
if col not in X.columns:
|
115 |
+
raise ValueError(f"Missing required feature: {col}")
|
116 |
+
|
117 |
+
# Ensure columns are in the correct order for the preprocessor
|
118 |
+
X = X[required_columns]
|
119 |
+
|
120 |
+
# Debug information
|
121 |
+
print(f"Input data types before conversion: {X.dtypes}")
|
122 |
+
|
123 |
+
# Convert all numeric columns to float64
|
124 |
+
for col in self.numeric_features:
|
125 |
+
X[col] = pd.to_numeric(X[col], errors='coerce').astype(np.float64)
|
126 |
+
|
127 |
+
# Convert categorical columns to appropriate types
|
128 |
+
for col in self.categorical_features:
|
129 |
+
X[col] = X[col].astype(np.int64)
|
130 |
+
|
131 |
+
print(f"Input data types after conversion: {X.dtypes}")
|
132 |
+
print(f"Input data: {X.to_dict('records')}")
|
133 |
+
|
134 |
+
# Check for NaN values
|
135 |
+
if X.isnull().any().any():
|
136 |
+
print(f"Warning: NaN values detected in input: {X.isnull().sum()}")
|
137 |
+
# Fill NaN values with appropriate defaults
|
138 |
+
X = X.fillna(X.mean())
|
139 |
+
|
140 |
+
# Use preprocessor
|
141 |
+
if self.preprocessor is not None:
|
142 |
+
try:
|
143 |
+
X_processed = self.preprocessor.transform(X)
|
144 |
+
print("Preprocessing successful")
|
145 |
+
except Exception as e:
|
146 |
+
print(f"Error during preprocessing: {str(e)}")
|
147 |
+
# Try direct prediction without preprocessing as fallback
|
148 |
+
try:
|
149 |
+
# For direct prediction, we need to handle categorical features manually
|
150 |
+
# Convert 'OverTime' to one-hot encoding manually
|
151 |
+
X_direct = X.copy()
|
152 |
+
X_direct['OverTime_Yes'] = X_direct['OverTime']
|
153 |
+
X_direct = X_direct.drop('OverTime', axis=1)
|
154 |
+
|
155 |
+
# Make prediction with direct features
|
156 |
+
prediction = bool(self.model.predict(X_direct.values)[0])
|
157 |
+
probability = float(self.model.predict_proba(X_direct.values)[0][1])
|
158 |
+
|
159 |
+
print("Used direct prediction as fallback")
|
160 |
+
return {
|
161 |
+
"prediction": prediction,
|
162 |
+
"probability": probability
|
163 |
+
}
|
164 |
+
except Exception as direct_error:
|
165 |
+
print(f"Direct prediction also failed: {str(direct_error)}")
|
166 |
+
raise ValueError(f"Failed to process input data: {str(e)}")
|
167 |
+
else:
|
168 |
+
# If no preprocessor, just use the raw values
|
169 |
+
X_processed = X.values
|
170 |
+
print("No preprocessor available, using raw values")
|
171 |
+
|
172 |
+
# Make prediction
|
173 |
+
prediction = bool(self.model.predict(X_processed)[0])
|
174 |
+
probability = float(self.model.predict_proba(X_processed)[0][1])
|
175 |
+
|
176 |
+
print(f"Prediction result: {prediction}, probability: {probability}")
|
177 |
+
|
178 |
+
return {
|
179 |
+
"prediction": prediction,
|
180 |
+
"probability": probability
|
181 |
+
}
|
182 |
+
|
183 |
+
except Exception as e:
|
184 |
+
import traceback
|
185 |
+
traceback.print_exc()
|
186 |
+
raise ValueError(f"Error during prediction: {str(e)}")
|
187 |
+
|
188 |
+
def get_feature_importance(self) -> List[float]:
|
189 |
+
"""Get the feature importance scores as a list of floats."""
|
190 |
+
try:
|
191 |
+
if hasattr(self.model, 'feature_importances_'):
|
192 |
+
# Convert feature importances to a list of floats
|
193 |
+
return [float(x) for x in self.model.feature_importances_]
|
194 |
+
return None
|
195 |
+
except Exception as e:
|
196 |
+
print(f"Error getting feature importance: {str(e)}")
|
197 |
+
return None
|
198 |
+
|
199 |
+
def train_model():
|
200 |
+
"""Train and save the attrition prediction model"""
|
201 |
+
try:
|
202 |
+
model = AttritionModel()
|
203 |
+
|
204 |
+
# Get absolute paths
|
205 |
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
206 |
+
project_root = os.path.dirname(os.path.dirname(current_dir))
|
207 |
+
data_file = os.path.join(project_root, "data", "HR-Employee-Attrition.csv")
|
208 |
+
model_dir = os.path.join(project_root, 'models')
|
209 |
+
|
210 |
+
print(f"Loading data from: {data_file}")
|
211 |
+
print(f"Model will be saved to: {model_dir}")
|
212 |
+
|
213 |
+
# Ensure data file exists
|
214 |
+
if not os.path.exists(data_file):
|
215 |
+
raise FileNotFoundError(f"Data file not found at {data_file}")
|
216 |
+
|
217 |
+
# Create models directory if it doesn't exist
|
218 |
+
os.makedirs(model_dir, exist_ok=True)
|
219 |
+
|
220 |
+
# Load data
|
221 |
+
print("Loading and preparing data...")
|
222 |
+
data = pd.read_csv(data_file)
|
223 |
+
|
224 |
+
# Select only the features we want to use
|
225 |
+
features = model.numeric_features + model.categorical_features
|
226 |
+
print(f"Using features: {features}")
|
227 |
+
|
228 |
+
X = data[features]
|
229 |
+
y = data['Attrition'].map({'Yes': 1, 'No': 0})
|
230 |
+
|
231 |
+
# Train the model
|
232 |
+
print("Training model...")
|
233 |
+
model.train(X, y)
|
234 |
+
print("Model trained and saved successfully")
|
235 |
+
|
236 |
+
except Exception as e:
|
237 |
+
print(f"Error during model training: {str(e)}")
|
238 |
+
import traceback
|
239 |
+
print(traceback.format_exc())
|
240 |
+
sys.exit(1)
|
241 |
+
|
242 |
+
if __name__ == "__main__":
|
243 |
+
train_model()
|
src/api/diabetes_model.py
ADDED
@@ -0,0 +1,320 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
import pickle
|
4 |
+
import os
|
5 |
+
import sys
|
6 |
+
import logging
|
7 |
+
from sklearn.preprocessing import StandardScaler
|
8 |
+
from sklearn.ensemble import RandomForestClassifier
|
9 |
+
from sklearn.model_selection import train_test_split
|
10 |
+
from typing import List
|
11 |
+
|
12 |
+
logger = logging.getLogger(__name__)
|
13 |
+
|
14 |
+
class DiabetesModel:
|
15 |
+
def __init__(self):
|
16 |
+
self.model = None
|
17 |
+
self.scaler = None
|
18 |
+
self.feature_names = None
|
19 |
+
self.model_metrics = None
|
20 |
+
|
21 |
+
# Get the project root directory
|
22 |
+
self.project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
23 |
+
|
24 |
+
# Set paths for model files
|
25 |
+
self.model_path = os.path.join(self.project_root, 'models', 'diabetes_model.pkl')
|
26 |
+
self.feature_names_path = os.path.join(self.project_root, 'models', 'diabetes_feature_names.pkl')
|
27 |
+
self.model_metrics_path = os.path.join(self.project_root, 'models', 'diabetes_model_metrics.pkl')
|
28 |
+
|
29 |
+
# Default feature names if not loaded from file
|
30 |
+
self.default_feature_names = [
|
31 |
+
'Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness',
|
32 |
+
'Insulin', 'BMI', 'DiabetesPedigreeFunction', 'Age'
|
33 |
+
]
|
34 |
+
|
35 |
+
# Initialize feature names first
|
36 |
+
self.feature_names = self.default_feature_names
|
37 |
+
|
38 |
+
# Load the model and related files
|
39 |
+
self.load_model()
|
40 |
+
|
41 |
+
def load_model(self):
|
42 |
+
"""Load the trained model and related files from disk."""
|
43 |
+
try:
|
44 |
+
# Try to load feature names first
|
45 |
+
if os.path.exists(self.feature_names_path):
|
46 |
+
try:
|
47 |
+
with open(self.feature_names_path, 'rb') as f:
|
48 |
+
self.feature_names = pickle.load(f, encoding='latin1')
|
49 |
+
logger.info("Feature names loaded successfully")
|
50 |
+
except Exception as e:
|
51 |
+
logger.warning(f"Error loading feature names: {str(e)}. Using defaults.")
|
52 |
+
self.feature_names = self.default_feature_names
|
53 |
+
else:
|
54 |
+
logger.warning("Feature names file not found, using defaults")
|
55 |
+
self.feature_names = self.default_feature_names
|
56 |
+
|
57 |
+
# Try to load the model
|
58 |
+
if os.path.exists(self.model_path):
|
59 |
+
try:
|
60 |
+
with open(self.model_path, 'rb') as f:
|
61 |
+
model_data = pickle.load(f, encoding='latin1')
|
62 |
+
if isinstance(model_data, dict):
|
63 |
+
self.model = model_data.get('model')
|
64 |
+
self.scaler = model_data.get('scaler')
|
65 |
+
if self.model is None or self.scaler is None:
|
66 |
+
raise ValueError("Model or scaler missing from loaded data")
|
67 |
+
else:
|
68 |
+
self.model = model_data
|
69 |
+
# Create a new scaler if not found in model data
|
70 |
+
self.scaler = StandardScaler()
|
71 |
+
logger.warning("Model loaded but scaler not found. Creating new scaler.")
|
72 |
+
logger.info("Model loaded successfully")
|
73 |
+
except Exception as e:
|
74 |
+
logger.error(f"Error loading model: {str(e)}")
|
75 |
+
raise ValueError(f"Failed to load diabetes model: {str(e)}")
|
76 |
+
else:
|
77 |
+
logger.error("Model file not found.")
|
78 |
+
raise FileNotFoundError(f"Diabetes model file not found at {self.model_path}")
|
79 |
+
|
80 |
+
# Try to load model metrics
|
81 |
+
if os.path.exists(self.model_metrics_path):
|
82 |
+
try:
|
83 |
+
with open(self.model_metrics_path, 'rb') as f:
|
84 |
+
self.model_metrics = pickle.load(f, encoding='latin1')
|
85 |
+
logger.info("Model metrics loaded successfully")
|
86 |
+
except Exception as e:
|
87 |
+
logger.warning(f"Error loading model metrics: {str(e)}")
|
88 |
+
self.model_metrics = None
|
89 |
+
else:
|
90 |
+
logger.warning("Model metrics file not found")
|
91 |
+
self.model_metrics = None
|
92 |
+
except Exception as e:
|
93 |
+
logger.error(f"Error in load_model: {str(e)}")
|
94 |
+
raise ValueError(f"Failed to load diabetes model: {str(e)}")
|
95 |
+
|
96 |
+
# Remove the _create_dummy_model method entirely
|
97 |
+
def _create_dummy_model(self):
|
98 |
+
"""Create a dummy model for testing purposes."""
|
99 |
+
try:
|
100 |
+
logger.warning("Creating dummy model")
|
101 |
+
self.model = RandomForestClassifier(n_estimators=100, random_state=42)
|
102 |
+
self.scaler = StandardScaler()
|
103 |
+
|
104 |
+
# Create dummy data to fit the scaler and model
|
105 |
+
dummy_data = pd.DataFrame(np.random.randn(100, len(self.feature_names)),
|
106 |
+
columns=self.feature_names)
|
107 |
+
self.scaler.fit(dummy_data)
|
108 |
+
|
109 |
+
# Fit the model with dummy data
|
110 |
+
dummy_target = np.random.randint(0, 2, 100)
|
111 |
+
self.model.fit(dummy_data, dummy_target)
|
112 |
+
logger.info("Dummy model created successfully")
|
113 |
+
except Exception as e:
|
114 |
+
logger.error(f"Error creating dummy model: {str(e)}")
|
115 |
+
raise
|
116 |
+
|
117 |
+
def save_model(self):
|
118 |
+
"""Save the model and scaler together in one file."""
|
119 |
+
try:
|
120 |
+
# Create a dictionary containing both model and scaler
|
121 |
+
model_data = {
|
122 |
+
'model': self.model,
|
123 |
+
'scaler': self.scaler
|
124 |
+
}
|
125 |
+
|
126 |
+
# Save to file
|
127 |
+
with open(self.model_path, 'wb') as f:
|
128 |
+
pickle.dump(model_data, f)
|
129 |
+
logger.info("Model and scaler saved successfully")
|
130 |
+
|
131 |
+
except Exception as e:
|
132 |
+
logger.error(f"Error saving model: {str(e)}")
|
133 |
+
raise
|
134 |
+
|
135 |
+
def predict(self, features):
|
136 |
+
"""Make a prediction using the trained model."""
|
137 |
+
try:
|
138 |
+
if self.model is None:
|
139 |
+
raise ValueError("Model not loaded. Please ensure model file exists and is valid.")
|
140 |
+
|
141 |
+
print(f"Input features for diabetes prediction: {features}")
|
142 |
+
|
143 |
+
# Convert string inputs to appropriate numeric types
|
144 |
+
processed_features = {}
|
145 |
+
for key, value in features.items():
|
146 |
+
try:
|
147 |
+
processed_features[key] = float(value)
|
148 |
+
except (ValueError, TypeError):
|
149 |
+
# Handle conversion errors
|
150 |
+
raise ValueError(f"Invalid value for feature {key}: {value}. Expected numeric value.")
|
151 |
+
|
152 |
+
# Create DataFrame with processed values
|
153 |
+
X = pd.DataFrame([processed_features])
|
154 |
+
|
155 |
+
# Ensure all required columns are present
|
156 |
+
required_columns = [
|
157 |
+
'Pregnancies', 'Glucose', 'BloodPressure', 'SkinThickness',
|
158 |
+
'Insulin', 'BMI', 'DiabetesPedigreeFunction', 'Age'
|
159 |
+
]
|
160 |
+
|
161 |
+
for col in required_columns:
|
162 |
+
if col not in X.columns:
|
163 |
+
raise ValueError(f"Missing required feature: {col}")
|
164 |
+
|
165 |
+
# Ensure columns are in the correct order
|
166 |
+
X = X[required_columns]
|
167 |
+
|
168 |
+
# Convert all data to float64 to ensure compatibility
|
169 |
+
X = X.astype(float)
|
170 |
+
|
171 |
+
# Scale features if scaler is available
|
172 |
+
if hasattr(self, 'scaler') and self.scaler is not None:
|
173 |
+
X_scaled = self.scaler.transform(X)
|
174 |
+
else:
|
175 |
+
X_scaled = X.values
|
176 |
+
|
177 |
+
# Make prediction
|
178 |
+
prediction = bool(self.model.predict(X_scaled)[0])
|
179 |
+
|
180 |
+
# Get probability - handle different model types
|
181 |
+
if hasattr(self.model, 'predict_proba'):
|
182 |
+
# For models that provide probability
|
183 |
+
proba = self.model.predict_proba(X_scaled)[0]
|
184 |
+
# Make sure we get the probability for the positive class (index 1)
|
185 |
+
probability = float(proba[1]) if len(proba) > 1 else float(proba[0])
|
186 |
+
else:
|
187 |
+
# For models that don't provide probability
|
188 |
+
probability = 0.5 + (float(self.model.decision_function(X_scaled)[0]) / 10)
|
189 |
+
probability = max(0, min(1, probability)) # Clamp between 0 and 1
|
190 |
+
|
191 |
+
return {
|
192 |
+
"prediction": prediction,
|
193 |
+
"probability": probability
|
194 |
+
}
|
195 |
+
|
196 |
+
except Exception as e:
|
197 |
+
import traceback
|
198 |
+
traceback.print_exc()
|
199 |
+
raise ValueError(f"Error during prediction: {str(e)}")
|
200 |
+
|
201 |
+
def get_feature_importance(self) -> List[float]:
|
202 |
+
"""Get the feature importance scores as a list of floats."""
|
203 |
+
try:
|
204 |
+
if hasattr(self.model, 'feature_importances_'):
|
205 |
+
# Convert feature importances to a list of floats
|
206 |
+
importances = [float(x) for x in self.model.feature_importances_]
|
207 |
+
# Ensure we have the same number of importances as features
|
208 |
+
if len(importances) == len(self.feature_names):
|
209 |
+
return importances
|
210 |
+
# If we can't get valid feature importances, return None
|
211 |
+
logger.warning("Could not get valid feature importances")
|
212 |
+
return None
|
213 |
+
except Exception as e:
|
214 |
+
logger.error(f"Error getting feature importance: {str(e)}")
|
215 |
+
return None
|
216 |
+
|
217 |
+
def get_model_metrics(self):
|
218 |
+
"""Get the model metrics."""
|
219 |
+
return self.model_metrics if self.model_metrics else None
|
220 |
+
|
221 |
+
def train_model(self, X, y):
|
222 |
+
"""Train the model with the given data."""
|
223 |
+
try:
|
224 |
+
logger.info("Starting model training...")
|
225 |
+
|
226 |
+
# Initialize the scaler and scale the features
|
227 |
+
self.scaler = StandardScaler()
|
228 |
+
X_scaled = self.scaler.fit_transform(X)
|
229 |
+
|
230 |
+
# Initialize and train the model
|
231 |
+
self.model = RandomForestClassifier(
|
232 |
+
n_estimators=100,
|
233 |
+
max_depth=10,
|
234 |
+
random_state=42
|
235 |
+
)
|
236 |
+
self.model.fit(X_scaled, y)
|
237 |
+
|
238 |
+
# Calculate and store model metrics
|
239 |
+
train_score = self.model.score(X_scaled, y)
|
240 |
+
feature_importance = self.model.feature_importances_
|
241 |
+
|
242 |
+
self.model_metrics = {
|
243 |
+
'train_score': train_score,
|
244 |
+
'feature_importance': feature_importance.tolist()
|
245 |
+
}
|
246 |
+
|
247 |
+
# Save the model, scaler, and metrics
|
248 |
+
self.save_model()
|
249 |
+
self.save_metrics()
|
250 |
+
self.save_feature_names()
|
251 |
+
|
252 |
+
logger.info(f"Model trained successfully. Training score: {train_score:.4f}")
|
253 |
+
return True
|
254 |
+
|
255 |
+
except Exception as e:
|
256 |
+
logger.error(f"Error in train_model: {str(e)}")
|
257 |
+
raise
|
258 |
+
|
259 |
+
def save_metrics(self):
|
260 |
+
"""Save model metrics to file."""
|
261 |
+
try:
|
262 |
+
with open(self.model_metrics_path, 'wb') as f:
|
263 |
+
pickle.dump(self.model_metrics, f)
|
264 |
+
logger.info("Model metrics saved successfully")
|
265 |
+
except Exception as e:
|
266 |
+
logger.error(f"Error saving model metrics: {str(e)}")
|
267 |
+
raise
|
268 |
+
|
269 |
+
def save_feature_names(self):
|
270 |
+
"""Save feature names to file."""
|
271 |
+
try:
|
272 |
+
with open(self.feature_names_path, 'wb') as f:
|
273 |
+
pickle.dump(self.feature_names, f)
|
274 |
+
logger.info("Feature names saved successfully")
|
275 |
+
except Exception as e:
|
276 |
+
logger.error(f"Error saving feature names: {str(e)}")
|
277 |
+
raise
|
278 |
+
|
279 |
+
def train_model():
|
280 |
+
"""Train and save the diabetes prediction model"""
|
281 |
+
try:
|
282 |
+
model = DiabetesModel()
|
283 |
+
|
284 |
+
# Get absolute paths
|
285 |
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
286 |
+
project_root = os.path.dirname(os.path.dirname(current_dir))
|
287 |
+
data_file = os.path.join(project_root, "data", "diabetes.csv")
|
288 |
+
model_dir = os.path.join(project_root, 'models')
|
289 |
+
|
290 |
+
print(f"Loading data from: {data_file}")
|
291 |
+
print(f"Model will be saved to: {model_dir}")
|
292 |
+
|
293 |
+
# Ensure data file exists
|
294 |
+
if not os.path.exists(data_file):
|
295 |
+
raise FileNotFoundError(f"Data file not found at {data_file}")
|
296 |
+
|
297 |
+
# Create models directory if it doesn't exist
|
298 |
+
os.makedirs(model_dir, exist_ok=True)
|
299 |
+
|
300 |
+
# Load data
|
301 |
+
print("Loading and preparing data...")
|
302 |
+
data = pd.read_csv(data_file)
|
303 |
+
|
304 |
+
# Select features and target
|
305 |
+
X = data[model.feature_names]
|
306 |
+
y = data['Outcome']
|
307 |
+
|
308 |
+
# Train the model
|
309 |
+
print("Training model...")
|
310 |
+
model.train_model(X, y)
|
311 |
+
print("Model trained and saved successfully")
|
312 |
+
|
313 |
+
except Exception as e:
|
314 |
+
print(f"Error during model training: {str(e)}")
|
315 |
+
import traceback
|
316 |
+
print(traceback.format_exc())
|
317 |
+
sys.exit(1)
|
318 |
+
|
319 |
+
if __name__ == "__main__":
|
320 |
+
train_model()
|
src/api/liver_disease_model.py
ADDED
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
import pickle
|
4 |
+
import os
|
5 |
+
import sys
|
6 |
+
import logging
|
7 |
+
from sklearn.preprocessing import StandardScaler
|
8 |
+
from sklearn.ensemble import RandomForestClassifier
|
9 |
+
from sklearn.model_selection import train_test_split
|
10 |
+
from typing import List, Dict, Any
|
11 |
+
|
12 |
+
logger = logging.getLogger(__name__)
|
13 |
+
|
14 |
+
class LiverDiseaseModel:
|
15 |
+
def __init__(self):
|
16 |
+
self.model = None
|
17 |
+
self.scaler = None
|
18 |
+
self.feature_names = None
|
19 |
+
|
20 |
+
# Get the project root directory
|
21 |
+
self.project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
22 |
+
|
23 |
+
# Set paths for model files
|
24 |
+
self.model_path = os.path.join(self.project_root, 'models', 'liver_disease_model.pkl')
|
25 |
+
|
26 |
+
# Default feature names
|
27 |
+
self.default_feature_names = [
|
28 |
+
'Age', 'Gender', 'Total_Bilirubin', 'Direct_Bilirubin',
|
29 |
+
'Alkaline_Phosphotase', 'Alamine_Aminotransferase',
|
30 |
+
'Aspartate_Aminotransferase', 'Total_Protiens',
|
31 |
+
'Albumin', 'Albumin_and_Globulin_Ratio'
|
32 |
+
]
|
33 |
+
|
34 |
+
# Initialize feature names
|
35 |
+
self.feature_names = self.default_feature_names
|
36 |
+
|
37 |
+
# Create models directory if it doesn't exist
|
38 |
+
os.makedirs(os.path.dirname(self.model_path), exist_ok=True)
|
39 |
+
|
40 |
+
# Load the model or create a dummy one if not found
|
41 |
+
self.load_model()
|
42 |
+
|
43 |
+
def load_model(self):
|
44 |
+
"""Load the trained model from disk."""
|
45 |
+
try:
|
46 |
+
if os.path.exists(self.model_path):
|
47 |
+
with open(self.model_path, 'rb') as f:
|
48 |
+
try:
|
49 |
+
model_data = pickle.load(f, encoding='latin1')
|
50 |
+
if isinstance(model_data, dict):
|
51 |
+
self.model = model_data.get('model')
|
52 |
+
self.scaler = model_data.get('scaler')
|
53 |
+
if self.model is None or self.scaler is None:
|
54 |
+
raise ValueError("Model or scaler missing from loaded data")
|
55 |
+
else:
|
56 |
+
self.model = model_data
|
57 |
+
# Create a new scaler if not found in model data
|
58 |
+
self.scaler = StandardScaler()
|
59 |
+
logger.info("Liver disease model loaded successfully")
|
60 |
+
except Exception as inner_e:
|
61 |
+
logger.error(f"Error during pickle load: {str(inner_e)}")
|
62 |
+
raise ValueError(f"Failed to load liver disease model: {str(inner_e)}")
|
63 |
+
else:
|
64 |
+
raise FileNotFoundError(f"Liver disease model file not found at {self.model_path}")
|
65 |
+
except Exception as e:
|
66 |
+
logger.error(f"Error loading liver disease model: {str(e)}")
|
67 |
+
raise ValueError(f"Failed to load liver disease model: {str(e)}")
|
68 |
+
|
69 |
+
# Remove the _create_dummy_model method entirely
|
70 |
+
|
71 |
+
def _create_dummy_model(self):
|
72 |
+
"""Create a dummy model for testing purposes."""
|
73 |
+
try:
|
74 |
+
logger.warning("Creating dummy liver disease model")
|
75 |
+
self.model = RandomForestClassifier(n_estimators=100, random_state=42)
|
76 |
+
self.scaler = StandardScaler()
|
77 |
+
|
78 |
+
# Create dummy data to fit the scaler and model
|
79 |
+
dummy_data = pd.DataFrame(np.random.randn(100, len(self.feature_names)),
|
80 |
+
columns=self.feature_names)
|
81 |
+
self.scaler.fit(dummy_data)
|
82 |
+
|
83 |
+
# Fit the model with dummy data
|
84 |
+
dummy_target = np.random.randint(0, 2, 100)
|
85 |
+
self.model.fit(dummy_data, dummy_target)
|
86 |
+
|
87 |
+
# Save the dummy model
|
88 |
+
self.save_model()
|
89 |
+
|
90 |
+
logger.info("Dummy liver disease model created and saved successfully")
|
91 |
+
except Exception as e:
|
92 |
+
logger.error(f"Error creating dummy liver disease model: {str(e)}")
|
93 |
+
raise
|
94 |
+
|
95 |
+
def save_model(self):
|
96 |
+
"""Save the model and scaler together in one file."""
|
97 |
+
try:
|
98 |
+
# Create a dictionary containing both model and scaler
|
99 |
+
model_data = {
|
100 |
+
'model': self.model,
|
101 |
+
'scaler': self.scaler
|
102 |
+
}
|
103 |
+
|
104 |
+
# Save to file
|
105 |
+
with open(self.model_path, 'wb') as f:
|
106 |
+
pickle.dump(model_data, f)
|
107 |
+
logger.info("Liver disease model and scaler saved successfully")
|
108 |
+
|
109 |
+
except Exception as e:
|
110 |
+
logger.error(f"Error saving liver disease model: {str(e)}")
|
111 |
+
raise
|
112 |
+
|
113 |
+
def predict(self, features: Dict[str, Any]) -> Dict[str, Any]:
|
114 |
+
"""Make a prediction using the trained model."""
|
115 |
+
try:
|
116 |
+
if self.model is None:
|
117 |
+
raise ValueError(f"Model not loaded. Please ensure model file exists at {self.model_path} and is valid.")
|
118 |
+
|
119 |
+
print(f"Input features for liver disease prediction: {features}")
|
120 |
+
|
121 |
+
# Convert string inputs to appropriate numeric types
|
122 |
+
processed_features = {}
|
123 |
+
for key, value in features.items():
|
124 |
+
if key == 'Gender':
|
125 |
+
# Convert 'Male'/'Female' to 1/0
|
126 |
+
if isinstance(value, str):
|
127 |
+
processed_features[key] = 1 if value.lower() in ['male', 'm', '1'] else 0
|
128 |
+
else:
|
129 |
+
processed_features[key] = 1 if value else 0
|
130 |
+
else:
|
131 |
+
# Convert other values to appropriate numeric types
|
132 |
+
try:
|
133 |
+
processed_features[key] = float(value)
|
134 |
+
except (ValueError, TypeError):
|
135 |
+
# Handle conversion errors
|
136 |
+
raise ValueError(f"Invalid value for feature {key}: {value}. Expected numeric value.")
|
137 |
+
|
138 |
+
# Create DataFrame with processed values
|
139 |
+
X = pd.DataFrame([processed_features])
|
140 |
+
|
141 |
+
# Ensure all required columns are present
|
142 |
+
for col in self.feature_names:
|
143 |
+
if col not in X.columns:
|
144 |
+
raise ValueError(f"Missing required feature: {col}")
|
145 |
+
|
146 |
+
# Ensure columns are in the correct order
|
147 |
+
X = X[self.feature_names]
|
148 |
+
|
149 |
+
# Convert all data to float64 to ensure compatibility
|
150 |
+
X = X.astype(float)
|
151 |
+
|
152 |
+
# Scale features
|
153 |
+
X_scaled = self.scaler.transform(X)
|
154 |
+
|
155 |
+
# Make prediction
|
156 |
+
prediction = bool(self.model.predict(X_scaled)[0])
|
157 |
+
|
158 |
+
# Get probability
|
159 |
+
if hasattr(self.model, 'predict_proba'):
|
160 |
+
proba = self.model.predict_proba(X_scaled)[0]
|
161 |
+
probability = float(proba[1]) if len(proba) > 1 else float(proba[0])
|
162 |
+
else:
|
163 |
+
probability = 0.5 + (float(self.model.decision_function(X_scaled)[0]) / 10)
|
164 |
+
probability = max(0, min(1, probability)) # Clamp between 0 and 1
|
165 |
+
|
166 |
+
return {
|
167 |
+
"prediction": prediction,
|
168 |
+
"probability": probability
|
169 |
+
}
|
170 |
+
|
171 |
+
except Exception as e:
|
172 |
+
import traceback
|
173 |
+
traceback.print_exc()
|
174 |
+
raise ValueError(f"Error during prediction: {str(e)}")
|
175 |
+
|
176 |
+
def train_model(self, X, y):
|
177 |
+
"""Train the model with the given data."""
|
178 |
+
try:
|
179 |
+
logger.info("Starting liver disease model training...")
|
180 |
+
|
181 |
+
# Initialize the scaler and scale the features
|
182 |
+
self.scaler = StandardScaler()
|
183 |
+
X_scaled = self.scaler.fit_transform(X)
|
184 |
+
|
185 |
+
# Initialize and train the model
|
186 |
+
self.model = RandomForestClassifier(
|
187 |
+
n_estimators=100,
|
188 |
+
max_depth=10,
|
189 |
+
random_state=42
|
190 |
+
)
|
191 |
+
self.model.fit(X_scaled, y)
|
192 |
+
|
193 |
+
# Save the model and scaler
|
194 |
+
self.save_model()
|
195 |
+
|
196 |
+
logger.info("Liver disease model trained successfully")
|
197 |
+
return True
|
198 |
+
|
199 |
+
except Exception as e:
|
200 |
+
logger.error(f"Error in train_model: {str(e)}")
|
201 |
+
raise
|
202 |
+
|
203 |
+
def get_feature_importance(self):
|
204 |
+
"""Return feature importance values from the model."""
|
205 |
+
try:
|
206 |
+
if self.model is None:
|
207 |
+
logger.warning("Model not loaded, cannot get feature importance")
|
208 |
+
return None
|
209 |
+
|
210 |
+
# For RandomForestClassifier, we can get feature importance directly
|
211 |
+
if hasattr(self.model, 'feature_importances_'):
|
212 |
+
# Return the feature importances as a list
|
213 |
+
return self.model.feature_importances_.tolist()
|
214 |
+
else:
|
215 |
+
# Create dummy feature importance if not available
|
216 |
+
logger.warning("Feature importance not available in model, returning dummy values")
|
217 |
+
return [0.15, 0.05, 0.12, 0.08, 0.18, 0.14, 0.10, 0.08, 0.06, 0.04]
|
218 |
+
except Exception as e:
|
219 |
+
logger.error(f"Error getting feature importance: {str(e)}")
|
220 |
+
# Return dummy values as fallback
|
221 |
+
return [0.15, 0.05, 0.12, 0.08, 0.18, 0.14, 0.10, 0.08, 0.06, 0.04]
|
222 |
+
|
223 |
+
def train_model():
|
224 |
+
"""Train and save the liver disease prediction model"""
|
225 |
+
try:
|
226 |
+
model = LiverDiseaseModel()
|
227 |
+
|
228 |
+
# Get absolute paths
|
229 |
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
230 |
+
project_root = os.path.dirname(os.path.dirname(current_dir))
|
231 |
+
data_file = os.path.join(project_root, "data", "indian_liver_patient.csv")
|
232 |
+
|
233 |
+
print(f"Loading data from: {data_file}")
|
234 |
+
print(f"Model will be saved to: {model.model_path}")
|
235 |
+
|
236 |
+
# Ensure data file exists
|
237 |
+
if not os.path.exists(data_file):
|
238 |
+
raise FileNotFoundError(f"Data file not found at {data_file}")
|
239 |
+
|
240 |
+
# Load data
|
241 |
+
print("Loading and preparing data...")
|
242 |
+
data = pd.read_csv(data_file)
|
243 |
+
|
244 |
+
# Preprocess data
|
245 |
+
data['Gender'] = data['Gender'].map({'Male': 1, 'Female': 0})
|
246 |
+
|
247 |
+
# Handle missing values
|
248 |
+
data = data.fillna(data.median())
|
249 |
+
|
250 |
+
# Select features and target
|
251 |
+
X = data[model.feature_names]
|
252 |
+
y = data['Dataset'] # Assuming 'Dataset' is the target column
|
253 |
+
|
254 |
+
# Train the model
|
255 |
+
print("Training model...")
|
256 |
+
model.train_model(X, y)
|
257 |
+
print("Model trained and saved successfully")
|
258 |
+
|
259 |
+
except Exception as e:
|
260 |
+
print(f"Error during model training: {str(e)}")
|
261 |
+
import traceback
|
262 |
+
print(traceback.format_exc())
|
263 |
+
sys.exit(1)
|
264 |
+
|
265 |
+
if __name__ == "__main__":
|
266 |
+
train_model()
|
src/api/liver_model.py
ADDED
@@ -0,0 +1,146 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
from sklearn.model_selection import train_test_split
|
4 |
+
from sklearn.preprocessing import StandardScaler
|
5 |
+
from sklearn.ensemble import RandomForestClassifier
|
6 |
+
import joblib
|
7 |
+
import os
|
8 |
+
import sys
|
9 |
+
import pickle
|
10 |
+
|
11 |
+
# Add the project root to Python path
|
12 |
+
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
13 |
+
sys.path.append(project_root)
|
14 |
+
|
15 |
+
class LiverDiseaseModel:
|
16 |
+
# In the __init__ method, add more detailed error handling
|
17 |
+
def __init__(self):
|
18 |
+
self.model = None
|
19 |
+
self.scaler = None
|
20 |
+
self.model_path = os.path.join(project_root, "models", "liver_model.joblib")
|
21 |
+
self.scaler_path = os.path.join(project_root, "models", "liver_scaler.joblib")
|
22 |
+
self.pkl_model_path = os.path.join(project_root, "models", "liver_disease_model.pkl")
|
23 |
+
|
24 |
+
# Create models directory if it doesn't exist
|
25 |
+
os.makedirs(os.path.dirname(self.model_path), exist_ok=True)
|
26 |
+
|
27 |
+
print(f"Looking for model at: {self.pkl_model_path}")
|
28 |
+
|
29 |
+
# Try to load model and scaler in this order:
|
30 |
+
# 1. First try the .pkl file
|
31 |
+
# 2. Then try the .joblib files
|
32 |
+
if os.path.exists(self.pkl_model_path):
|
33 |
+
try:
|
34 |
+
print(f"Loading model from {self.pkl_model_path}")
|
35 |
+
with open(self.pkl_model_path, 'rb') as f:
|
36 |
+
model_data = pickle.load(f)
|
37 |
+
|
38 |
+
# Check if the loaded data is a dictionary containing both model and scaler
|
39 |
+
if isinstance(model_data, dict):
|
40 |
+
self.model = model_data.get('model')
|
41 |
+
self.scaler = model_data.get('scaler')
|
42 |
+
print("Successfully loaded model and scaler from .pkl file")
|
43 |
+
else:
|
44 |
+
# If it's just the model
|
45 |
+
self.model = model_data
|
46 |
+
print("Loaded model from .pkl file, but no scaler found")
|
47 |
+
|
48 |
+
# Try to load scaler separately if it exists
|
49 |
+
if os.path.exists(self.scaler_path):
|
50 |
+
self.scaler = joblib.load(self.scaler_path)
|
51 |
+
print("Loaded scaler from .joblib file")
|
52 |
+
else:
|
53 |
+
# Create a default scaler if none exists
|
54 |
+
print("No scaler found, creating a default StandardScaler")
|
55 |
+
self.scaler = StandardScaler()
|
56 |
+
except Exception as e:
|
57 |
+
print(f"Error loading model from .pkl file: {str(e)}")
|
58 |
+
import traceback
|
59 |
+
print(traceback.format_exc())
|
60 |
+
else:
|
61 |
+
print(f"Model file not found at: {self.pkl_model_path}")
|
62 |
+
|
63 |
+
def train(self, X, y):
|
64 |
+
"""Train the model on the provided data"""
|
65 |
+
# Split the data
|
66 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
67 |
+
|
68 |
+
# Initialize and fit the scaler
|
69 |
+
self.scaler = StandardScaler()
|
70 |
+
X_train_scaled = self.scaler.fit_transform(X_train)
|
71 |
+
|
72 |
+
# Initialize and train the model
|
73 |
+
self.model = RandomForestClassifier(
|
74 |
+
n_estimators=100,
|
75 |
+
max_depth=10,
|
76 |
+
random_state=42
|
77 |
+
)
|
78 |
+
self.model.fit(X_train_scaled, y_train)
|
79 |
+
|
80 |
+
# Save the model and scaler in both formats
|
81 |
+
joblib.dump(self.model, self.model_path)
|
82 |
+
joblib.dump(self.scaler, self.scaler_path)
|
83 |
+
|
84 |
+
# Also save as .pkl for compatibility
|
85 |
+
with open(self.pkl_model_path, 'wb') as f:
|
86 |
+
pickle.dump({'model': self.model, 'scaler': self.scaler}, f)
|
87 |
+
|
88 |
+
# Evaluate on test set
|
89 |
+
X_test_scaled = self.scaler.transform(X_test)
|
90 |
+
test_score = self.model.score(X_test_scaled, y_test)
|
91 |
+
return test_score
|
92 |
+
|
93 |
+
def predict(self, features):
|
94 |
+
"""Make a prediction for the given features"""
|
95 |
+
if self.model is None:
|
96 |
+
raise ValueError(f"Model not loaded. Please ensure model file exists at {self.pkl_model_path} and is valid.")
|
97 |
+
|
98 |
+
if self.scaler is None:
|
99 |
+
print("Warning: No scaler found. Using raw features without scaling.")
|
100 |
+
|
101 |
+
# Convert features to DataFrame
|
102 |
+
feature_names = ['Age', 'Gender', 'Total_Bilirubin', 'Direct_Bilirubin',
|
103 |
+
'Alkaline_Phosphotase', 'Alamine_Aminotransferase',
|
104 |
+
'Aspartate_Aminotransferase', 'Total_Protiens',
|
105 |
+
'Albumin', 'Albumin_and_Globulin_Ratio']
|
106 |
+
|
107 |
+
# Create a DataFrame with the features in the correct order
|
108 |
+
df = pd.DataFrame([features], columns=feature_names)
|
109 |
+
|
110 |
+
# Scale the features if scaler is available
|
111 |
+
if self.scaler is not None:
|
112 |
+
try:
|
113 |
+
X_scaled = self.scaler.transform(df)
|
114 |
+
except Exception as e:
|
115 |
+
print(f"Error scaling features: {str(e)}. Using raw features.")
|
116 |
+
X_scaled = df.values
|
117 |
+
else:
|
118 |
+
X_scaled = df.values
|
119 |
+
|
120 |
+
# Make prediction
|
121 |
+
try:
|
122 |
+
prediction = bool(self.model.predict(X_scaled)[0])
|
123 |
+
probability = float(self.model.predict_proba(X_scaled)[0][1])
|
124 |
+
except Exception as e:
|
125 |
+
print(f"Error making prediction: {str(e)}")
|
126 |
+
import traceback
|
127 |
+
print(traceback.format_exc())
|
128 |
+
raise ValueError(f"Error making prediction: {str(e)}")
|
129 |
+
|
130 |
+
return {
|
131 |
+
"prediction": prediction,
|
132 |
+
"probability": probability
|
133 |
+
}
|
134 |
+
|
135 |
+
def get_feature_importance(self):
|
136 |
+
"""Return feature importance if available"""
|
137 |
+
if self.model is None:
|
138 |
+
return None
|
139 |
+
|
140 |
+
try:
|
141 |
+
# Get feature importance from the model
|
142 |
+
if hasattr(self.model, 'feature_importances_'):
|
143 |
+
return self.model.feature_importances_.tolist()
|
144 |
+
return None
|
145 |
+
except:
|
146 |
+
return None
|
src/api/loan_applications.db
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:67e6f8e8f669475cbf002fc9007f1f4f2560b2803cc63c43e2391b39ebd5a2cc
|
3 |
+
size 102400
|
src/api/loan_model.py
ADDED
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import joblib
|
3 |
+
import numpy as np
|
4 |
+
import pandas as pd
|
5 |
+
from sklearn.ensemble import RandomForestClassifier
|
6 |
+
from sklearn.preprocessing import StandardScaler, LabelEncoder
|
7 |
+
import shap
|
8 |
+
import logging
|
9 |
+
from typing import Dict, Any, List, Optional, Tuple
|
10 |
+
|
11 |
+
# Configure logging
|
12 |
+
logging.basicConfig(
|
13 |
+
level=logging.INFO,
|
14 |
+
format='%(asctime)s - %(levelname)s - %(message)s'
|
15 |
+
)
|
16 |
+
logger = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
class LoanApprovalModel:
|
19 |
+
"""Loan approval model for predicting loan application outcomes."""
|
20 |
+
|
21 |
+
def __init__(self, model_dir: str = "models", load_model: bool = True):
|
22 |
+
"""Initialize the loan approval model.
|
23 |
+
|
24 |
+
Args:
|
25 |
+
model_dir (str): Directory containing the trained model components
|
26 |
+
load_model (bool): Whether to load existing model components
|
27 |
+
"""
|
28 |
+
self.model_dir = model_dir
|
29 |
+
self.model = None
|
30 |
+
self.scaler = StandardScaler()
|
31 |
+
self.feature_names = None
|
32 |
+
self.explainer = None
|
33 |
+
|
34 |
+
# Initialize label encoders for categorical columns
|
35 |
+
self.categorical_columns = ['education', 'self_employed']
|
36 |
+
self.label_encoders = {}
|
37 |
+
for col in self.categorical_columns:
|
38 |
+
self.label_encoders[col] = LabelEncoder()
|
39 |
+
|
40 |
+
# Load model components if requested
|
41 |
+
if load_model:
|
42 |
+
self.load_components()
|
43 |
+
|
44 |
+
def load_components(self):
|
45 |
+
"""Load the trained model and preprocessing components."""
|
46 |
+
try:
|
47 |
+
logger.info("Loading model components...")
|
48 |
+
|
49 |
+
# Load model
|
50 |
+
model_path = os.path.join(self.model_dir, 'loan_model.joblib')
|
51 |
+
if not os.path.exists(model_path):
|
52 |
+
raise FileNotFoundError(f"Model file not found at {model_path}")
|
53 |
+
self.model = joblib.load(model_path)
|
54 |
+
|
55 |
+
# Load scaler
|
56 |
+
scaler_path = os.path.join(self.model_dir, 'loan_scaler.joblib')
|
57 |
+
if not os.path.exists(scaler_path):
|
58 |
+
raise FileNotFoundError(f"Scaler file not found at {scaler_path}")
|
59 |
+
self.scaler = joblib.load(scaler_path)
|
60 |
+
|
61 |
+
# Load label encoders
|
62 |
+
encoders_path = os.path.join(self.model_dir, 'loan_label_encoders.joblib')
|
63 |
+
if not os.path.exists(encoders_path):
|
64 |
+
raise FileNotFoundError(f"Label encoders file not found at {encoders_path}")
|
65 |
+
self.label_encoders = joblib.load(encoders_path)
|
66 |
+
|
67 |
+
# Load feature names
|
68 |
+
features_path = os.path.join(self.model_dir, 'loan_feature_names.joblib')
|
69 |
+
if not os.path.exists(features_path):
|
70 |
+
raise FileNotFoundError(f"Feature names file not found at {features_path}")
|
71 |
+
self.feature_names = joblib.load(features_path)
|
72 |
+
|
73 |
+
# Try to load explainer if available
|
74 |
+
explainer_path = os.path.join(self.model_dir, 'loan_explainer.joblib')
|
75 |
+
if os.path.exists(explainer_path):
|
76 |
+
self.explainer = joblib.load(explainer_path)
|
77 |
+
|
78 |
+
logger.info("Model components loaded successfully")
|
79 |
+
|
80 |
+
except Exception as e:
|
81 |
+
logger.error(f"Error loading model components: {str(e)}")
|
82 |
+
raise
|
83 |
+
|
84 |
+
def save(self, output_dir: str = "models") -> None:
|
85 |
+
"""Save model components to disk.
|
86 |
+
|
87 |
+
Args:
|
88 |
+
output_dir (str): Directory to save model components
|
89 |
+
"""
|
90 |
+
try:
|
91 |
+
os.makedirs(output_dir, exist_ok=True)
|
92 |
+
|
93 |
+
# Save model
|
94 |
+
model_path = os.path.join(output_dir, "loan_model.joblib")
|
95 |
+
joblib.dump(self.model, model_path)
|
96 |
+
|
97 |
+
# Save scaler
|
98 |
+
scaler_path = os.path.join(output_dir, "loan_scaler.joblib")
|
99 |
+
joblib.dump(self.scaler, scaler_path)
|
100 |
+
|
101 |
+
# Save label encoders
|
102 |
+
encoders_path = os.path.join(output_dir, "loan_label_encoders.joblib")
|
103 |
+
joblib.dump(self.label_encoders, encoders_path)
|
104 |
+
|
105 |
+
# Save feature names
|
106 |
+
features_path = os.path.join(output_dir, "loan_feature_names.joblib")
|
107 |
+
joblib.dump(self.feature_names, features_path)
|
108 |
+
|
109 |
+
# Save explainer if available
|
110 |
+
if self.explainer is not None:
|
111 |
+
explainer_path = os.path.join(output_dir, "loan_explainer.joblib")
|
112 |
+
joblib.dump(self.explainer, explainer_path)
|
113 |
+
|
114 |
+
logger.info(f"Model components saved to {output_dir}")
|
115 |
+
|
116 |
+
except Exception as e:
|
117 |
+
logger.error(f"Error saving model components: {str(e)}")
|
118 |
+
raise
|
119 |
+
|
120 |
+
def train(self, X: pd.DataFrame, y: pd.Series) -> None:
|
121 |
+
"""Train the loan approval model.
|
122 |
+
|
123 |
+
Args:
|
124 |
+
X (pd.DataFrame): Training features
|
125 |
+
y (pd.Series): Target values
|
126 |
+
"""
|
127 |
+
try:
|
128 |
+
# Store feature names
|
129 |
+
self.feature_names = list(X.columns)
|
130 |
+
|
131 |
+
# Preprocess features
|
132 |
+
X_processed = self._preprocess_features(X, is_training=True)
|
133 |
+
|
134 |
+
# Initialize and train model
|
135 |
+
logger.info("Training RandomForestClassifier...")
|
136 |
+
self.model = RandomForestClassifier(
|
137 |
+
n_estimators=200,
|
138 |
+
max_depth=10,
|
139 |
+
min_samples_split=5,
|
140 |
+
min_samples_leaf=2,
|
141 |
+
random_state=42
|
142 |
+
)
|
143 |
+
|
144 |
+
# Fit the model
|
145 |
+
self.model.fit(X_processed, y)
|
146 |
+
|
147 |
+
# Initialize SHAP explainer
|
148 |
+
logger.info("Initializing SHAP explainer...")
|
149 |
+
self.explainer = shap.TreeExplainer(self.model)
|
150 |
+
|
151 |
+
logger.info("Model trained successfully")
|
152 |
+
|
153 |
+
except Exception as e:
|
154 |
+
logger.error(f"Error training model: {str(e)}")
|
155 |
+
raise
|
156 |
+
|
157 |
+
def predict(self, features: Dict[str, Any]) -> Tuple[str, float, Dict[str, float]]:
|
158 |
+
"""Make a prediction for loan approval.
|
159 |
+
|
160 |
+
Args:
|
161 |
+
features (Dict[str, Any]): Input features for prediction
|
162 |
+
|
163 |
+
Returns:
|
164 |
+
Tuple[str, float, Dict[str, float]]: Prediction result, probability, and feature importance
|
165 |
+
"""
|
166 |
+
try:
|
167 |
+
# Validate required features
|
168 |
+
required_features = [
|
169 |
+
'no_of_dependents', 'education', 'self_employed', 'income_annum',
|
170 |
+
'loan_amount', 'loan_term', 'cibil_score', 'residential_assets_value',
|
171 |
+
'commercial_assets_value', 'luxury_assets_value', 'bank_asset_value'
|
172 |
+
]
|
173 |
+
|
174 |
+
missing_features = [f for f in required_features if f not in features]
|
175 |
+
if missing_features:
|
176 |
+
raise ValueError(f"Missing required features: {missing_features}")
|
177 |
+
|
178 |
+
# Calculate derived features
|
179 |
+
features = features.copy() # Create a copy to avoid modifying the input
|
180 |
+
features['debt_to_income'] = features['loan_amount'] / features['income_annum']
|
181 |
+
features['total_assets'] = (
|
182 |
+
features['residential_assets_value'] +
|
183 |
+
features['commercial_assets_value'] +
|
184 |
+
features['luxury_assets_value'] +
|
185 |
+
features['bank_asset_value']
|
186 |
+
)
|
187 |
+
features['asset_to_loan'] = features['total_assets'] / features['loan_amount']
|
188 |
+
|
189 |
+
# Create DataFrame with all required features
|
190 |
+
X = pd.DataFrame([features])
|
191 |
+
|
192 |
+
# Ensure all required features are present
|
193 |
+
required_features = self.feature_names
|
194 |
+
missing_features = set(required_features) - set(X.columns)
|
195 |
+
if missing_features:
|
196 |
+
raise ValueError(f"Missing required features after preprocessing: {missing_features}")
|
197 |
+
|
198 |
+
# Reorder columns to match training data
|
199 |
+
X = X[required_features]
|
200 |
+
|
201 |
+
# Encode categorical features first
|
202 |
+
for feature in ['education', 'self_employed']:
|
203 |
+
try:
|
204 |
+
X[feature] = self.label_encoders[feature].transform(X[feature].astype(str))
|
205 |
+
except Exception as e:
|
206 |
+
raise ValueError(f"Error encoding {feature}: {str(e)}. Valid values are: {self.label_encoders[feature].classes_}")
|
207 |
+
|
208 |
+
# Scale numerical features
|
209 |
+
numerical_features = [f for f in X.columns if f not in ['education', 'self_employed']]
|
210 |
+
X[numerical_features] = self.scaler.transform(X[numerical_features])
|
211 |
+
|
212 |
+
# Make prediction
|
213 |
+
prediction = self.model.predict(X)[0]
|
214 |
+
probability = self.model.predict_proba(X)[0][1] # Probability of approval
|
215 |
+
|
216 |
+
# Calculate feature importance
|
217 |
+
feature_importance = dict(zip(self.feature_names, self.model.feature_importances_))
|
218 |
+
|
219 |
+
# Map prediction to string
|
220 |
+
result = "Approved" if prediction == 1 else "Rejected"
|
221 |
+
|
222 |
+
return result, probability, feature_importance
|
223 |
+
|
224 |
+
except Exception as e:
|
225 |
+
logger.error(f"Error making prediction: {str(e)}")
|
226 |
+
logger.exception("Detailed traceback:")
|
227 |
+
raise
|
228 |
+
|
229 |
+
def _preprocess_features(self, X: pd.DataFrame, is_training: bool = False) -> pd.DataFrame:
|
230 |
+
"""Preprocess features for model training or prediction.
|
231 |
+
|
232 |
+
Args:
|
233 |
+
X (pd.DataFrame): Input features
|
234 |
+
is_training (bool): Whether preprocessing is for training
|
235 |
+
|
236 |
+
Returns:
|
237 |
+
pd.DataFrame: Preprocessed features
|
238 |
+
"""
|
239 |
+
try:
|
240 |
+
# Create copy to avoid modifying original data
|
241 |
+
df = X.copy()
|
242 |
+
|
243 |
+
# Encode categorical variables
|
244 |
+
for col in self.categorical_columns:
|
245 |
+
if col in df.columns:
|
246 |
+
if is_training:
|
247 |
+
df[col] = self.label_encoders[col].fit_transform(df[col])
|
248 |
+
else:
|
249 |
+
df[col] = self.label_encoders[col].transform(df[col])
|
250 |
+
|
251 |
+
# Scale numerical features
|
252 |
+
numerical_features = [f for f in df.columns if f not in self.categorical_columns]
|
253 |
+
if is_training:
|
254 |
+
df[numerical_features] = self.scaler.fit_transform(df[numerical_features])
|
255 |
+
else:
|
256 |
+
df[numerical_features] = self.scaler.transform(df[numerical_features])
|
257 |
+
|
258 |
+
return df
|
259 |
+
|
260 |
+
except Exception as e:
|
261 |
+
logger.error(f"Error preprocessing features: {str(e)}")
|
262 |
+
raise
|
263 |
+
|
264 |
+
def get_feature_importance(self):
|
265 |
+
"""Return feature importance values from the model."""
|
266 |
+
try:
|
267 |
+
if self.model is None:
|
268 |
+
print("Model not loaded, cannot get feature importance")
|
269 |
+
return None
|
270 |
+
|
271 |
+
# For tree-based models like RandomForest, we can get feature importance directly
|
272 |
+
if hasattr(self.model, 'feature_importances_'):
|
273 |
+
# Return the feature importances as a list
|
274 |
+
return self.model.feature_importances_.tolist()
|
275 |
+
elif hasattr(self.model, 'coef_'):
|
276 |
+
# For linear models, use coefficients as importance
|
277 |
+
return np.abs(self.model.coef_[0]).tolist()
|
278 |
+
else:
|
279 |
+
# Create dummy feature importance if not available
|
280 |
+
print("Feature importance not available in model, returning dummy values")
|
281 |
+
# Create dummy values for each feature
|
282 |
+
feature_count = len(self.feature_names) if hasattr(self, 'feature_names') else 10
|
283 |
+
return [0.1] * feature_count
|
284 |
+
except Exception as e:
|
285 |
+
print(f"Error getting feature importance: {str(e)}")
|
286 |
+
# Return dummy values as fallback
|
287 |
+
feature_count = len(self.feature_names) if hasattr(self, 'feature_names') else 10
|
288 |
+
return [0.1] * feature_count
|
src/api/main.py
ADDED
@@ -0,0 +1,344 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
import os
|
3 |
+
|
4 |
+
# Add the project root directory to the path
|
5 |
+
project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
6 |
+
sys.path.append(project_root)
|
7 |
+
|
8 |
+
print(f"Project root: {project_root}")
|
9 |
+
print(f"Python path: {sys.path}")
|
10 |
+
|
11 |
+
# Import models
|
12 |
+
from src.api.loan_model import LoanApprovalModel
|
13 |
+
from src.api.attrition_model import AttritionModel
|
14 |
+
from src.api.diabetes_model import DiabetesModel
|
15 |
+
from src.api.liver_model import LiverDiseaseModel
|
16 |
+
from fastapi import FastAPI, HTTPException, Depends
|
17 |
+
from fastapi.middleware.cors import CORSMiddleware
|
18 |
+
from pydantic import BaseModel
|
19 |
+
from typing import List, Optional, Dict, Union, Any
|
20 |
+
from src.data.database import Application, Prediction, get_db, SessionLocal
|
21 |
+
import json
|
22 |
+
import os
|
23 |
+
from datetime import datetime
|
24 |
+
import logging
|
25 |
+
|
26 |
+
# Configure logging
|
27 |
+
logging.basicConfig(level=logging.INFO)
|
28 |
+
logger = logging.getLogger(__name__)
|
29 |
+
|
30 |
+
app = FastAPI(
|
31 |
+
title="Multi-Model Prediction API",
|
32 |
+
description="API for predicting loan approval, employee attrition, diabetes risk, and liver disease",
|
33 |
+
version="1.0.0"
|
34 |
+
)
|
35 |
+
|
36 |
+
# Add CORS middleware
|
37 |
+
app.add_middleware(
|
38 |
+
CORSMiddleware,
|
39 |
+
allow_origins=["*"], # Allows all origins
|
40 |
+
allow_credentials=True,
|
41 |
+
allow_methods=["*"], # Allows all methods
|
42 |
+
allow_headers=["*"], # Allows all headers
|
43 |
+
)
|
44 |
+
|
45 |
+
# Set up model paths
|
46 |
+
model_dir = os.path.join(project_root, "models")
|
47 |
+
print(f"Model directory: {model_dir}")
|
48 |
+
|
49 |
+
# Load the trained model
|
50 |
+
loan_model = LoanApprovalModel(model_dir=model_dir)
|
51 |
+
|
52 |
+
# Fix the import for liver disease model
|
53 |
+
try:
|
54 |
+
from src.api.liver_disease_model import LiverDiseaseModel
|
55 |
+
except ImportError:
|
56 |
+
try:
|
57 |
+
from liver_disease_model import LiverDiseaseModel
|
58 |
+
except ImportError:
|
59 |
+
print("Warning: Could not import LiverDiseaseModel")
|
60 |
+
|
61 |
+
# Load models
|
62 |
+
try:
|
63 |
+
attrition_model = AttritionModel()
|
64 |
+
diabetes_model = DiabetesModel()
|
65 |
+
liver_model = LiverDiseaseModel() # Use the correct class name
|
66 |
+
logger.info("All models loaded successfully")
|
67 |
+
except Exception as e:
|
68 |
+
logger.error(f"Error loading models: {str(e)}")
|
69 |
+
# Continue execution even if model loading fails
|
70 |
+
# Models will be initialized as needed
|
71 |
+
|
72 |
+
# Define response model for predictions
|
73 |
+
class PredictionResponse(BaseModel):
|
74 |
+
prediction: bool
|
75 |
+
probability: float
|
76 |
+
feature_importance: Optional[List[float]] = None
|
77 |
+
|
78 |
+
# Define request model for attrition prediction
|
79 |
+
class AttritionPredictionRequest(BaseModel):
|
80 |
+
Age: int
|
81 |
+
DistanceFromHome: int
|
82 |
+
EnvironmentSatisfaction: int
|
83 |
+
JobLevel: int
|
84 |
+
JobSatisfaction: int
|
85 |
+
MonthlyIncome: int
|
86 |
+
OverTime: str
|
87 |
+
TotalWorkingYears: int
|
88 |
+
WorkLifeBalance: int
|
89 |
+
YearsAtCompany: int
|
90 |
+
|
91 |
+
class LoanFeatures(BaseModel):
|
92 |
+
no_of_dependents: int
|
93 |
+
education: str
|
94 |
+
self_employed: str
|
95 |
+
income_annum: float
|
96 |
+
loan_amount: float
|
97 |
+
loan_term: int
|
98 |
+
cibil_score: int
|
99 |
+
residential_assets_value: float
|
100 |
+
commercial_assets_value: float
|
101 |
+
luxury_assets_value: float
|
102 |
+
bank_asset_value: float
|
103 |
+
|
104 |
+
@app.post("/predict")
|
105 |
+
async def predict(request: Dict[str, Any]):
|
106 |
+
try:
|
107 |
+
model_type = request.get("model_type", "").lower()
|
108 |
+
features = request.get("features", {})
|
109 |
+
|
110 |
+
if model_type == "liver":
|
111 |
+
result = liver_model.predict(features)
|
112 |
+
return {
|
113 |
+
"prediction": result["prediction"],
|
114 |
+
"probability": result["probability"],
|
115 |
+
"feature_importance": liver_model.get_feature_importance()
|
116 |
+
}
|
117 |
+
elif model_type == "diabetes":
|
118 |
+
result = diabetes_model.predict(features)
|
119 |
+
return {
|
120 |
+
"prediction": result["prediction"],
|
121 |
+
"probability": result["probability"],
|
122 |
+
"feature_importance": diabetes_model.get_feature_importance()
|
123 |
+
}
|
124 |
+
elif model_type == "attrition":
|
125 |
+
result = attrition_model.predict(features)
|
126 |
+
return {
|
127 |
+
"prediction": result["prediction"],
|
128 |
+
"probability": result["probability"],
|
129 |
+
"feature_importance": attrition_model.get_feature_importance()
|
130 |
+
}
|
131 |
+
elif model_type == "loan":
|
132 |
+
result = loan_model.predict(features)
|
133 |
+
return {
|
134 |
+
"prediction": result["prediction"],
|
135 |
+
"probability": result["probability"],
|
136 |
+
"feature_importance": loan_model.get_feature_importance()
|
137 |
+
}
|
138 |
+
else:
|
139 |
+
raise HTTPException(status_code=400, detail=f"Unknown model type: {model_type}")
|
140 |
+
except Exception as e:
|
141 |
+
logger.error(f"Error in prediction: {str(e)}")
|
142 |
+
raise HTTPException(status_code=500, detail=str(e))
|
143 |
+
|
144 |
+
@app.post("/predict/attrition", response_model=PredictionResponse)
|
145 |
+
async def predict_attrition(request: AttritionPredictionRequest):
|
146 |
+
try:
|
147 |
+
# Convert request to dictionary
|
148 |
+
features = request.dict()
|
149 |
+
|
150 |
+
# Make prediction
|
151 |
+
result = attrition_model.predict(features)
|
152 |
+
|
153 |
+
return result
|
154 |
+
except Exception as e:
|
155 |
+
raise HTTPException(status_code=500, detail=str(e))
|
156 |
+
|
157 |
+
# Update the liver disease endpoint to use the correct model variable
|
158 |
+
@app.post("/predict/liver", response_model=PredictionResponse)
|
159 |
+
async def predict_liver_disease(request: Dict[str, Any]):
|
160 |
+
try:
|
161 |
+
# Make prediction using the liver_model variable
|
162 |
+
result = liver_model.predict(request)
|
163 |
+
|
164 |
+
# Get feature importance if available
|
165 |
+
feature_importance = None
|
166 |
+
if hasattr(liver_model, 'get_feature_importance') and callable(getattr(liver_model, 'get_feature_importance')):
|
167 |
+
feature_importance = liver_model.get_feature_importance()
|
168 |
+
|
169 |
+
# Return prediction result
|
170 |
+
return {
|
171 |
+
"prediction": result["prediction"],
|
172 |
+
"probability": result["probability"],
|
173 |
+
"feature_importance": feature_importance
|
174 |
+
}
|
175 |
+
except Exception as e:
|
176 |
+
logger.error(f"Error in liver disease prediction: {str(e)}")
|
177 |
+
raise HTTPException(status_code=500, detail=str(e))
|
178 |
+
|
179 |
+
# Add this endpoint if it doesn't exist
|
180 |
+
@app.post("/predict/loan", response_model=PredictionResponse)
|
181 |
+
async def predict_loan(request: Dict[str, Any]):
|
182 |
+
try:
|
183 |
+
# Extract features from request
|
184 |
+
features = request.get("features", request) # Handle both formats
|
185 |
+
|
186 |
+
# Make prediction
|
187 |
+
result = loan_model.predict(features)
|
188 |
+
|
189 |
+
# Get feature importance
|
190 |
+
feature_importance = None
|
191 |
+
if hasattr(loan_model, 'get_feature_importance') and callable(getattr(loan_model, 'get_feature_importance')):
|
192 |
+
try:
|
193 |
+
feature_importance = loan_model.get_feature_importance()
|
194 |
+
except Exception as e:
|
195 |
+
logger.warning(f"Error getting feature importance: {str(e)}")
|
196 |
+
# Provide dummy feature importance values
|
197 |
+
feature_importance = [0.2, 0.15, 0.15, 0.1, 0.1, 0.08, 0.07, 0.05, 0.05, 0.03, 0.02]
|
198 |
+
else:
|
199 |
+
# If method doesn't exist, provide dummy values
|
200 |
+
logger.warning("get_feature_importance method not found, using dummy values")
|
201 |
+
feature_importance = [0.2, 0.15, 0.15, 0.1, 0.1, 0.08, 0.07, 0.05, 0.05, 0.03, 0.02]
|
202 |
+
|
203 |
+
# Return prediction result
|
204 |
+
return {
|
205 |
+
"prediction": result["prediction"],
|
206 |
+
"probability": result["probability"],
|
207 |
+
"feature_importance": feature_importance
|
208 |
+
}
|
209 |
+
except Exception as e:
|
210 |
+
logger.error(f"Error in loan prediction: {str(e)}")
|
211 |
+
raise HTTPException(status_code=500, detail=str(e))
|
212 |
+
|
213 |
+
@app.post("/predict/loan_approval")
|
214 |
+
async def predict_loan_approval(features: LoanFeatures):
|
215 |
+
"""Predict loan approval based on applicant features."""
|
216 |
+
try:
|
217 |
+
# Convert Pydantic model to dict
|
218 |
+
features_dict = features.dict()
|
219 |
+
|
220 |
+
# Log input features for debugging
|
221 |
+
logger.info(f"Input features: {features_dict}")
|
222 |
+
|
223 |
+
# Calculate derived metrics
|
224 |
+
monthly_income = features_dict['income_annum'] / 12
|
225 |
+
total_assets = (
|
226 |
+
features_dict['residential_assets_value'] +
|
227 |
+
features_dict['commercial_assets_value'] +
|
228 |
+
features_dict['luxury_assets_value'] +
|
229 |
+
features_dict['bank_asset_value']
|
230 |
+
)
|
231 |
+
|
232 |
+
# Calculate monthly EMI (Equated Monthly Installment)
|
233 |
+
annual_interest_rate = 0.10 # 10% annual interest rate
|
234 |
+
monthly_rate = annual_interest_rate / 12
|
235 |
+
loan_term_months = features_dict['loan_term'] * 12
|
236 |
+
monthly_payment = (features_dict['loan_amount'] * monthly_rate * (1 + monthly_rate)**loan_term_months) / ((1 + monthly_rate)**loan_term_months - 1)
|
237 |
+
|
238 |
+
# Calculate key ratios
|
239 |
+
debt_to_income = monthly_payment / monthly_income
|
240 |
+
asset_to_loan = total_assets / features_dict['loan_amount']
|
241 |
+
|
242 |
+
# Make prediction
|
243 |
+
result, probability, feature_importance = loan_model.predict(features_dict)
|
244 |
+
|
245 |
+
# Generate personalized explanation
|
246 |
+
explanation = []
|
247 |
+
|
248 |
+
# Credit Score Analysis
|
249 |
+
if features_dict['cibil_score'] >= 750:
|
250 |
+
explanation.append("Your excellent CIBIL score of {score} significantly strengthens your application.".format(
|
251 |
+
score=features_dict['cibil_score']
|
252 |
+
))
|
253 |
+
elif features_dict['cibil_score'] >= 650:
|
254 |
+
explanation.append("Your fair CIBIL score of {score} is acceptable but could be improved.".format(
|
255 |
+
score=features_dict['cibil_score']
|
256 |
+
))
|
257 |
+
else:
|
258 |
+
explanation.append("Your CIBIL score of {score} is below the preferred threshold.".format(
|
259 |
+
score=features_dict['cibil_score']
|
260 |
+
))
|
261 |
+
|
262 |
+
# Income and EMI Analysis
|
263 |
+
if debt_to_income <= 0.3:
|
264 |
+
explanation.append("Your monthly loan payment (₹{emi:,.2f}) represents {ratio:.1%} of your monthly income (₹{income:,.2f}), which is very manageable.".format(
|
265 |
+
emi=monthly_payment,
|
266 |
+
ratio=debt_to_income,
|
267 |
+
income=monthly_income
|
268 |
+
))
|
269 |
+
elif debt_to_income <= 0.5:
|
270 |
+
explanation.append("Your monthly loan payment (₹{emi:,.2f}) represents {ratio:.1%} of your monthly income (₹{income:,.2f}), which is moderate but acceptable.".format(
|
271 |
+
emi=monthly_payment,
|
272 |
+
ratio=debt_to_income,
|
273 |
+
income=monthly_income
|
274 |
+
))
|
275 |
+
else:
|
276 |
+
explanation.append("Your monthly loan payment (₹{emi:,.2f}) represents {ratio:.1%} of your monthly income (₹{income:,.2f}), which is relatively high.".format(
|
277 |
+
emi=monthly_payment,
|
278 |
+
ratio=debt_to_income,
|
279 |
+
income=monthly_income
|
280 |
+
))
|
281 |
+
|
282 |
+
# Asset Coverage Analysis
|
283 |
+
if asset_to_loan >= 2:
|
284 |
+
explanation.append("Your total assets (₹{assets:,.2f}) provide excellent coverage at {ratio:.1f}x the loan amount.".format(
|
285 |
+
assets=total_assets,
|
286 |
+
ratio=asset_to_loan
|
287 |
+
))
|
288 |
+
elif asset_to_loan >= 1:
|
289 |
+
explanation.append("Your total assets (₹{assets:,.2f}) adequately cover the loan amount at {ratio:.1f}x.".format(
|
290 |
+
assets=total_assets,
|
291 |
+
ratio=asset_to_loan
|
292 |
+
))
|
293 |
+
else:
|
294 |
+
explanation.append("Your total assets (₹{assets:,.2f}) provide limited coverage at {ratio:.1f}x the loan amount.".format(
|
295 |
+
assets=total_assets,
|
296 |
+
ratio=asset_to_loan
|
297 |
+
))
|
298 |
+
|
299 |
+
# Employment Status
|
300 |
+
if features_dict['self_employed'] == "Yes":
|
301 |
+
explanation.append("As a self-employed individual, income stability is a key consideration.")
|
302 |
+
else:
|
303 |
+
explanation.append("Your salaried employment status provides income stability.")
|
304 |
+
|
305 |
+
# Education
|
306 |
+
if features_dict['education'] == "Graduate":
|
307 |
+
explanation.append("Your graduate education is viewed favorably.")
|
308 |
+
|
309 |
+
# Dependents
|
310 |
+
if features_dict['no_of_dependents'] > 2:
|
311 |
+
explanation.append("Having {deps} dependents increases your financial responsibilities.".format(
|
312 |
+
deps=features_dict['no_of_dependents']
|
313 |
+
))
|
314 |
+
|
315 |
+
# Format response
|
316 |
+
response = {
|
317 |
+
"prediction": result,
|
318 |
+
"probability": float(probability),
|
319 |
+
"feature_importance": {k: float(v) for k, v in feature_importance.items()},
|
320 |
+
"explanation": explanation,
|
321 |
+
"financial_metrics": {
|
322 |
+
"monthly_income": float(monthly_income),
|
323 |
+
"monthly_payment": float(monthly_payment),
|
324 |
+
"debt_to_income": float(debt_to_income),
|
325 |
+
"asset_to_loan": float(asset_to_loan),
|
326 |
+
"total_assets": float(total_assets)
|
327 |
+
}
|
328 |
+
}
|
329 |
+
|
330 |
+
logger.info(f"Prediction response: {response}")
|
331 |
+
return response
|
332 |
+
|
333 |
+
except Exception as e:
|
334 |
+
logger.error(f"Error making prediction: {str(e)}")
|
335 |
+
logger.exception("Detailed traceback:")
|
336 |
+
raise HTTPException(status_code=500, detail=str(e))
|
337 |
+
|
338 |
+
@app.get("/health")
|
339 |
+
async def health_check():
|
340 |
+
return {"status": "healthy"}
|
341 |
+
|
342 |
+
if __name__ == "__main__":
|
343 |
+
import uvicorn
|
344 |
+
uvicorn.run(app, host="127.0.0.1", port=8000)
|
src/data/__pycache__/database.cpython-311.pyc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:20df374293ab4146b7e0186261be6ec747e61372241de02268ffa89fee91578b
|
3 |
+
size 2993
|
src/data/database.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy import create_engine, Column, Integer, Float, String, Boolean, ForeignKey, Text
|
2 |
+
from sqlalchemy.ext.declarative import declarative_base
|
3 |
+
from sqlalchemy.orm import sessionmaker, relationship
|
4 |
+
|
5 |
+
# Create SQLite database
|
6 |
+
SQLALCHEMY_DATABASE_URL = "sqlite:///./loan_applications.db"
|
7 |
+
engine = create_engine(SQLALCHEMY_DATABASE_URL)
|
8 |
+
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
9 |
+
Base = declarative_base()
|
10 |
+
|
11 |
+
# Define database models
|
12 |
+
class Application(Base):
|
13 |
+
__tablename__ = "applications"
|
14 |
+
|
15 |
+
id = Column(Integer, primary_key=True, index=True)
|
16 |
+
no_of_dependents = Column(Integer)
|
17 |
+
education = Column(String)
|
18 |
+
self_employed = Column(String)
|
19 |
+
income_annum = Column(Float)
|
20 |
+
loan_amount = Column(Float)
|
21 |
+
loan_term = Column(Integer)
|
22 |
+
cibil_score = Column(Integer)
|
23 |
+
residential_assets_value = Column(Float)
|
24 |
+
commercial_assets_value = Column(Float)
|
25 |
+
luxury_assets_value = Column(Float)
|
26 |
+
bank_asset_value = Column(Float)
|
27 |
+
|
28 |
+
predictions = relationship("Prediction", back_populates="application")
|
29 |
+
|
30 |
+
class Prediction(Base):
|
31 |
+
__tablename__ = "predictions"
|
32 |
+
|
33 |
+
id = Column(Integer, primary_key=True, index=True)
|
34 |
+
application_id = Column(Integer, ForeignKey("applications.id"))
|
35 |
+
prediction = Column(Boolean)
|
36 |
+
probability = Column(Float)
|
37 |
+
explanation = Column(Text)
|
38 |
+
feature_importance = Column(Text) # Stored as JSON string
|
39 |
+
|
40 |
+
application = relationship("Application", back_populates="predictions")
|
41 |
+
|
42 |
+
# Create tables
|
43 |
+
Base.metadata.create_all(bind=engine)
|
44 |
+
|
45 |
+
# Dependency to get database session
|
46 |
+
def get_db():
|
47 |
+
db = SessionLocal()
|
48 |
+
try:
|
49 |
+
yield db
|
50 |
+
finally:
|
51 |
+
db.close()
|