coldlike commited on
Commit
faf90bc
·
0 Parent(s):

Initial commit

Browse files
.gitignore ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __pycache__
2
+ *.pyc
3
+ *.pth
4
+ *.pt
5
+ *.ipynb_checkpoints
6
+ *.DS_Store
7
+ *.env
8
+ *.log
9
+ *.tmp
10
+ *.csv
11
+ *.xlsx
12
+ *.tar.gz
13
+ *.zip
14
+ *.sublime-project
15
+ *.sublime-workspace
16
+ .vscode/
17
+ .idea/
18
+ .pytest_cache/
19
+ results/
20
+ temp_*
LICENSE.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Kudakwashe M
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 🧬 Malaria Cell Classifier with Grad-CAM & Streamlit UI
2
+
3
+ A deep learning-based malaria detection system using ResNet50 and Grad-CAM explainability.
4
+
5
+ ## 🚀 Features
6
+
7
+ - ✅ Binary classification of blood smear images (`Infected` / `Uninfected`)
8
+ - 🔍 Grad-CAM visualizations to highlight infected regions
9
+ - 🌐 Interactive Streamlit web interface
10
+ - 📦 Easy-to-deploy structure
11
+
12
+ ## 🛠️ Built With
13
+
14
+ - [PyTorch](https://pytorch.org/)
15
+ - [Streamlit](https://streamlit.io/)
16
+ - [Grad-CAM](https://arxiv.org/abs/1610.02391)
17
+ - [ResNet50](https://pytorch.org/vision/stable/models.html)
18
+
19
+ ## 📦 Dataset
20
+
21
+ Uses the [Malaria Cell Images Dataset](https://www.kaggle.com/iarunava/cell-images-for-detecting-malaria)
22
+
23
+ ## 📁 Folder Structure
24
+
25
+ Place raw images in:
26
+ data/cell_images/
27
+ ├── Parasitized/
28
+ └── Uninfected/
29
+
30
+
31
+ ## 📷 Example Output
32
+
33
+ ![Example Grad-CAM Output](image.png)
34
+
35
+ ## 🧪 Usage
36
+
37
+ ### Train the Model
38
+ ```bash
39
+ python notebooks/train.py
app/app.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ import torch
4
+ from PIL import Image
5
+ import numpy as np
6
+ import warnings
7
+ import torch.nn.functional as F
8
+
9
+ # Avoid OMP error from PyTorch/OpenCV
10
+ os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
11
+
12
+ # Suppress FutureWarning from Matplotlib
13
+ warnings.filterwarnings("ignore", category=UserWarning)
14
+
15
+ # Import custom modules
16
+ from models.resnet_model import MalariaResNet50
17
+ from gradcam import visualize_gradcam
18
+
19
+
20
+ # -----------------------------
21
+ # Streamlit Page Setup
22
+ # -----------------------------
23
+ st.set_page_config(page_title="🧬 Malaria Cell Classifier", layout="wide")
24
+ st.title("🧬 Malaria Cell Classifier with Grad-CAM")
25
+ st.write("Upload a blood smear image and the model will classify it as infected or uninfected, and highlight key regions using Grad-CAM.")
26
+
27
+
28
+ # -----------------------------
29
+ # Load Model
30
+ # -----------------------------
31
+ @st.cache_resource
32
+ def load_model():
33
+ # Ensure model class doesn't wrap backbone
34
+ model = MalariaResNet50(num_classes=2)
35
+ model.load_state_dict(torch.load("models/malaria_model.pth", map_location='cpu'))
36
+ model.eval()
37
+ return model
38
+
39
+ model = load_model()
40
+
41
+
42
+ # -----------------------------
43
+ # Upload Image
44
+ # -----------------------------
45
+ uploaded_file = st.file_uploader("Choose an image...", type=["jpg", "png", "jpeg"])
46
+
47
+ if uploaded_file is not None:
48
+ # Save uploaded image temporarily
49
+ temp_image_path = f"temp_{uploaded_file.name}"
50
+ with open(temp_image_path, "wb") as f:
51
+ f.write(uploaded_file.getbuffer())
52
+
53
+ # Display original image (resize if needed)
54
+ image = Image.open(uploaded_file).convert("RGB")
55
+ max_size = (400, 400) # Max width and height
56
+ image.thumbnail(max_size)
57
+ st.image(image, caption="Uploaded Image", use_container_width=False)
58
+
59
+ # Predict button
60
+ if st.button("Predict"):
61
+ with st.spinner("Classifying..."):
62
+ # Run prediction
63
+ pred_label, confidence = model.predict(temp_image_path, device='cpu', show_image=False)
64
+ st.success(f"✅ Prediction: **{pred_label}** | Confidence: **{confidence:.2%}**")
65
+
66
+ # Show Grad-CAM
67
+ st.subheader("🔍 Grad-CAM Visualization")
68
+ with st.expander("ℹ️ What is Grad-CAM?"):
69
+ st.markdown("""
70
+ **Grad-CAM (Gradient-weighted Class Activation Mapping)** is an interpretability method that shows which parts of an image are most important for a CNN's prediction.
71
+
72
+ How it works:
73
+ 1. Gradients flow from the output neuron back to the last convolutional layer.
74
+ 2. These gradients are global average pooled to get importance weights.
75
+ 3. A weighted combination creates a coarse heatmap.
76
+ 4. Final heatmap is overlaid on the original image.
77
+
78
+ 🔬 In this app:
79
+ - Helps understand *why* the model thinks a blood smear cell is infected
80
+ - Makes predictions more transparent and reliable
81
+ """)
82
+ visualize_gradcam(model, temp_image_path)
data/cell_images/Parasitized/C100P61ThinF_IMG_20150918_144104_cell_162.png ADDED
data/cell_images/Uninfected/C126P87ThinF_IMG_20151004_105342_cell_30.png ADDED
data_prep/data_prep.py ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # # Visualize data
5
+
6
+ # In[1]:
7
+
8
+
9
+ import os
10
+ import numpy as np
11
+ import matplotlib.pyplot as plt
12
+ import cv2
13
+ from pathlib import Path
14
+ from collections import defaultdict
15
+
16
+
17
+ # In[2]:
18
+
19
+
20
+ data_dir = "malaria_data/cell_images"
21
+ parasitized_dir = os.path.join(data_dir, 'Parasitized')
22
+ uninfected_dir = os.path.join(data_dir, 'Uninfected')
23
+
24
+ parasitized_files = list(Path(parasitized_dir).glob('*.png'))
25
+ uninfected_files = list(Path(uninfected_dir).glob('*.png'))
26
+
27
+ print(f"Parasitized Images: {len(parasitized_files)}")
28
+ print(f"Uninfected Images: {len(uninfected_files)}")
29
+
30
+
31
+ # In[3]:
32
+
33
+
34
+ labels = ['Parasitized', 'Uninfected']
35
+ counts = [len(parasitized_files), len(uninfected_files)]
36
+
37
+ plt.figure(figsize=(6, 4))
38
+ plt.bar(labels, counts, color=['#ff7f0e', '#1f77b4'])
39
+ plt.title("Class Distribution")
40
+ plt.ylabel("Number of Images")
41
+ plt.show()
42
+
43
+
44
+ # In[4]:
45
+
46
+
47
+ def plot_samples(image_files, title, num_samples=5):
48
+ plt.figure(figsize=(15, 3))
49
+ for i in range(num_samples):
50
+ img = cv2.imread(str(image_files[i]))
51
+ img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
52
+ plt.subplot(1, num_samples, i+1)
53
+ plt.imshow(img)
54
+ plt.axis("off")
55
+ plt.suptitle(title)
56
+ plt.show()
57
+
58
+ plot_samples(parasitized_files, "Parasitized Cells")
59
+ plot_samples(uninfected_files, "Uninfected Cells")
60
+
61
+
62
+ # In[5]:
63
+
64
+
65
+ def get_image_sizes(file_list):
66
+ sizes = []
67
+ for f in file_list:
68
+ img = cv2.imread(str(f))
69
+ sizes.append(img.shape[:2]) # height, width
70
+ return sizes
71
+
72
+ parasitized_sizes = get_image_sizes(parasitized_files)
73
+ uninfected_sizes = get_image_sizes(uninfected_files)
74
+
75
+ all_sizes = parasitized_sizes + uninfected_sizes
76
+ unique_sizes = set(all_sizes)
77
+
78
+ print("Unique image sizes found:")
79
+ print(unique_sizes)
80
+
81
+
82
+ # In[6]:
83
+
84
+
85
+ total_images = len(parasitized_files) + len(uninfected_files)
86
+ avg_height = np.mean([size[0] for size in all_sizes])
87
+ avg_width = np.mean([size[1] for size in all_sizes])
88
+
89
+ print(f"\nTotal Images: {total_images}")
90
+ print(f"Average Image Size: {avg_width:.0f}x{avg_height:.0f}")
91
+ print(f"Min/Max Height: {min(s[0] for s in all_sizes)} / {max(s[0] for s in all_sizes)}")
92
+ print(f"Min/Max Width: {min(s[1] for s in all_sizes)} / {max(s[1] for s in all_sizes)}")
93
+
94
+
95
+ # In[7]:
96
+
97
+
98
+ sample_img = cv2.imread(str(parasitized_files[5]))
99
+ print("Image shape:", sample_img.shape)
100
+
101
+
102
+ # # Data preprocessing
103
+
104
+ # In[8]:
105
+
106
+
107
+ import matplotlib.pyplot as plt
108
+ import numpy as np
109
+
110
+ # Assuming you have your image data in a numpy array called 'image_data'
111
+ # For a single image:
112
+ plt.figure(figsize=(10, 6))
113
+ plt.hist(sample_img.ravel(), bins=256, range=(0, 256), color='blue', alpha=0.7)
114
+ plt.title('Pixel Value Distribution')
115
+ plt.xlabel('Pixel Intensity')
116
+ plt.ylabel('Frequency')
117
+ plt.grid(True, linestyle='--', alpha=0.5)
118
+ plt.show()
119
+
120
+
121
+ # # Data Splitting
122
+
123
+ # In[20]:
124
+
125
+
126
+ import os
127
+ import shutil
128
+ from pathlib import Path
129
+ import random
130
+ from sklearn.model_selection import train_test_split
131
+ import numpy as np
132
+ import matplotlib.pyplot as plt
133
+ import cv2
134
+ import torch
135
+ from torchvision import datasets, transforms
136
+ from torch.utils.data import DataLoader
137
+
138
+
139
+ # In[21]:
140
+
141
+
142
+ RAW_DATA_DIR = 'malaria_data/cell_images'
143
+ OUTPUT_DIR = 'malaria_ds/split_dataset'
144
+
145
+ PARASITIZED_DIR = os.path.join(RAW_DATA_DIR, 'Parasitized')
146
+ UNINFECTED_DIR = os.path.join(RAW_DATA_DIR, 'Uninfected')
147
+
148
+ # Output directories
149
+ TRAIN_DIR = os.path.join(OUTPUT_DIR, 'train')
150
+ VAL_DIR = os.path.join(OUTPUT_DIR, 'validation')
151
+ TEST_DIR = os.path.join(OUTPUT_DIR, 'test')
152
+
153
+ # Ensure output directories exist
154
+ os.makedirs(PARASITIZED_DIR, exist_ok=True)
155
+ os.makedirs(UNINFECTED_DIR, exist_ok=True)
156
+
157
+ print("Paths defined.")
158
+
159
+
160
+ # In[22]:
161
+
162
+
163
+ def split_class_files(class_dir, train_dir, val_dir, test_dir):
164
+ all_files = list(Path(class_dir).glob('*.*'))
165
+ train_files, test_files = train_test_split(all_files, test_size=0.1, random_state=42)
166
+ train_files, val_files = train_test_split(train_files, test_size=0.1 / (1 - 0.1), random_state=42)
167
+
168
+ for f in train_files:
169
+ shutil.copy(f, train_dir)
170
+ for f in val_files:
171
+ shutil.copy(f, val_dir)
172
+ for f in test_files:
173
+ shutil.copy(f, test_dir)
174
+
175
+ return len(all_files)
176
+
177
+ def create_split_folders():
178
+ class_names = ['Parasitized', 'Uninfected']
179
+ for folder in ['train', 'validation', 'test']:
180
+ for cls in class_names:
181
+ os.makedirs(os.path.join(OUTPUT_DIR, folder, cls), exist_ok=True)
182
+
183
+ print("Splitting Parasitized Images:")
184
+ total_parasitized = split_class_files(
185
+ os.path.join(RAW_DATA_DIR, 'Parasitized'),
186
+ os.path.join(OUTPUT_DIR, 'train', 'Parasitized'),
187
+ os.path.join(OUTPUT_DIR, 'validation', 'Parasitized'),
188
+ os.path.join(OUTPUT_DIR, 'test', 'Parasitized')
189
+ )
190
+
191
+ print("\nSplitting Uninfected Images:")
192
+ total_uninfected = split_class_files(
193
+ os.path.join(RAW_DATA_DIR, 'Uninfected'),
194
+ os.path.join(OUTPUT_DIR, 'train', 'Uninfected'),
195
+ os.path.join(OUTPUT_DIR, 'validation', 'Uninfected'),
196
+ os.path.join(OUTPUT_DIR, 'test', 'Uninfected')
197
+ )
198
+
199
+ print(f"\nTotal Parasitized: {total_parasitized}, Uninfected: {total_uninfected}")
200
+ print("Dataset split completed.")
201
+
202
+
203
+ # ## Data Aug and transforms
204
+
205
+ # In[23]:
206
+
207
+
208
+ IMG_SIZE = (128, 128)
209
+ BATCH_SIZE = 32
210
+
211
+ # Custom class_to_idx mapping to fix label order
212
+ class_to_idx = {'Uninfected': 0, 'Parasitized': 1}
213
+ idx_to_class = {v: k for k, v in class_to_idx.items()}
214
+
215
+ # Define transforms
216
+ train_transforms = transforms.Compose([
217
+ transforms.Resize(IMG_SIZE),
218
+ transforms.ToTensor(),
219
+ transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
220
+ transforms.RandomRotation(20),
221
+ transforms.RandomHorizontalFlip(),
222
+ ])
223
+
224
+ val_test_transforms = transforms.Compose([
225
+ transforms.Resize(IMG_SIZE),
226
+ transforms.ToTensor(),
227
+ transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
228
+ ])
229
+
230
+ # Custom Dataset Class to enforce class_to_idx
231
+ class CustomImageFolder(datasets.ImageFolder):
232
+ def __init__(self, root, transform, class_to_idx_override=None):
233
+ super().__init__(root=root, transform=transform)
234
+ if class_to_idx_override:
235
+ self.class_to_idx = class_to_idx_override
236
+ self.samples = [
237
+ (path, class_to_idx[cls])
238
+ for path, cls_idx in self.samples
239
+ for cls in [self.classes[cls_idx]]
240
+ if cls in class_to_idx_override
241
+ ]
242
+ self.classes = list(class_to_idx_override.keys())
243
+
244
+
245
+
246
+ # In[24]:
247
+
248
+
249
+ def get_dataloaders():
250
+ # Create datasets
251
+ train_dataset = CustomImageFolder(root=os.path.join(OUTPUT_DIR, 'train'), transform=train_transforms, class_to_idx_override=class_to_idx)
252
+ val_dataset = CustomImageFolder(root=os.path.join(OUTPUT_DIR, 'validation'), transform=val_test_transforms, class_to_idx_override=class_to_idx)
253
+ test_dataset = CustomImageFolder(root=os.path.join(OUTPUT_DIR, 'test'), transform=val_test_transforms, class_to_idx_override=class_to_idx)
254
+
255
+ # Create data loaders
256
+ train_loader = DataLoader(train_dataset, batch_size=BATCH_SIZE, shuffle=True)
257
+ val_loader = DataLoader(val_dataset, batch_size=BATCH_SIZE, shuffle=False)
258
+ test_loader = DataLoader(test_dataset, batch_size=BATCH_SIZE, shuffle=False)
259
+
260
+ print(f"Train: {len(train_dataset)}, Val: {len(val_dataset)}, Test: {len(test_dataset)}")
261
+ print("Class Mapping:", train_dataset.class_to_idx)
262
+
263
+ return train_loader, val_loader, test_loader, train_dataset, val_dataset, test_dataset
264
+
265
+
266
+ # In[26]:
267
+
268
+
269
+ def show_batch_sample(loader, dataset):
270
+ images, labels = next(iter(loader))
271
+ plt.figure(figsize=(12, 6))
272
+ for i in range(min(6, BATCH_SIZE)):
273
+ img = images[i].numpy().transpose((1, 2, 0))
274
+ img = np.clip(img * np.array([0.229, 0.224, 0.225]) + np.array([0.485, 0.456, 0.406]), 0, 1)
275
+ plt.subplot(2, 3, i+1)
276
+ plt.imshow(img)
277
+ plt.title(idx_to_class[labels[i].item()])
278
+ plt.axis("off")
279
+ plt.suptitle("Sample Batch from DataLoader")
280
+ plt.show()
281
+
282
+
283
+ # In[32]:
284
+
285
+
286
+ create_split_folders()
287
+ train_loader, val_loader, test_loader, train_dataset, val_dataset, test_dataset = get_dataloaders()
288
+ show_batch_sample(train_loader, train_dataset)
289
+
290
+
291
+ # In[34]:
292
+
293
+
294
+ print(train_dataset)
295
+
grdcam/gradcam.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import torch
8
+ import torch.nn as nn
9
+ import torch.nn.functional as F
10
+ import cv2
11
+ import numpy as np
12
+ from torchvision import transforms
13
+ import matplotlib.pyplot as plt
14
+ from PIL import Image
15
+ import streamlit as st
16
+
17
+
18
+ # In[2]:
19
+
20
+
21
+ def preprocess_image(image_path):
22
+ """
23
+ Load and preprocess an image for inference.
24
+ """
25
+ transform = transforms.Compose([
26
+ transforms.Resize((224, 224)),
27
+ transforms.ToTensor(),
28
+ transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
29
+ ])
30
+
31
+ img = Image.open(image_path).convert('RGB')
32
+ tensor = transform(img)
33
+ return tensor.unsqueeze(0), img
34
+
35
+
36
+ # In[3]:
37
+
38
+
39
+ def get_last_conv_layer(model):
40
+ """
41
+ Get the last convolutional layer in the model.
42
+ """
43
+ # For ResNet architecture
44
+ for name, module in reversed(list(model.named_modules())):
45
+ if isinstance(module, nn.Conv2d):
46
+ return name
47
+ raise ValueError("No Conv2d layers found in the model.")
48
+
49
+
50
+ # In[4]:
51
+
52
+
53
+ def apply_gradcam(model, image_tensor, target_class=None):
54
+ """
55
+ Apply Grad-CAM to an image.
56
+ """
57
+ device = next(model.parameters()).device
58
+ image_tensor = image_tensor.to(device)
59
+
60
+ # Register hooks to get activations and gradients
61
+ features = []
62
+ gradients = []
63
+
64
+ def forward_hook(module, input, output):
65
+ features.append(output.detach())
66
+
67
+ def backward_hook(module, grad_input, grad_output):
68
+ gradients.append(grad_output[0].detach())
69
+
70
+ last_conv_layer_name = get_last_conv_layer(model)
71
+ last_conv_layer = dict(model.named_modules())[last_conv_layer_name]
72
+ handle_forward = last_conv_layer.register_forward_hook(forward_hook)
73
+ handle_backward = last_conv_layer.register_full_backward_hook(backward_hook)
74
+
75
+ # Forward pass
76
+ model.eval()
77
+ output = model(image_tensor)
78
+ if target_class is None:
79
+ target_class = output.argmax(dim=1).item()
80
+
81
+ # Zero out all gradients
82
+ model.zero_grad()
83
+
84
+ # Backward pass
85
+ one_hot = torch.zeros_like(output)
86
+ one_hot[0][target_class] = 1
87
+ output.backward(gradient=one_hot)
88
+
89
+ # Remove hooks
90
+ handle_forward.remove()
91
+ handle_backward.remove()
92
+
93
+ # Get feature maps and gradients
94
+ feature_map = features[-1].squeeze().cpu().numpy()
95
+ gradient = gradients[-1].squeeze().cpu().numpy()
96
+
97
+ # Global Average Pooling on gradients
98
+ pooled_gradients = np.mean(gradient, axis=(1, 2), keepdims=True)
99
+ cam = feature_map * pooled_gradients
100
+ cam = np.sum(cam, axis=0)
101
+
102
+ # Apply ReLU
103
+ cam = np.maximum(cam, 0)
104
+
105
+ # Normalize the CAM
106
+ cam = cam - np.min(cam)
107
+ cam = cam / np.max(cam)
108
+
109
+ # Resize CAM to match the original image size
110
+ cam = cv2.resize(cam, (224, 224))
111
+
112
+ return cam
113
+
114
+
115
+ # In[5]:
116
+
117
+
118
+ def overlay_heatmap(original_image, heatmap, alpha=0.5):
119
+ """
120
+ Overlay the heatmap on the original image.
121
+
122
+ Args:
123
+ original_image (np.ndarray): Original image (H, W, 3), uint8
124
+ heatmap (np.ndarray): Grad-CAM heatmap (H', W'), float between 0 and 1
125
+ alpha (float): Weight for the heatmap
126
+
127
+ Returns:
128
+ np.ndarray: Overlayed image
129
+ """
130
+ # Ensure heatmap is 2D
131
+ if heatmap.ndim == 3:
132
+ heatmap = np.mean(heatmap, axis=2)
133
+
134
+ # Resize heatmap to match original image size
135
+ heatmap_resized = cv2.resize(heatmap, (original_image.shape[1], original_image.shape[0]))
136
+
137
+ # Normalize heatmap to [0, 255]
138
+ heatmap_resized = np.uint8(255 * heatmap_resized)
139
+
140
+ # Apply colormap
141
+ heatmap_colored = cv2.applyColorMap(heatmap_resized, cv2.COLORMAP_JET)
142
+
143
+ # Convert from BGR to RGB
144
+ heatmap_colored = cv2.cvtColor(heatmap_colored, cv2.COLOR_BGR2RGB)
145
+
146
+ # Superimpose: blend heatmap and original image
147
+ superimposed_img = heatmap_colored * alpha + original_image * (1 - alpha)
148
+ return np.uint8(superimposed_img)
149
+
150
+ def visualize_gradcam(model, image_path):
151
+ """
152
+ Visualize Grad-CAM for a given image.
153
+ """
154
+ # Preprocess image
155
+ image_tensor, original_image = preprocess_image(image_path)
156
+ original_image_np = np.array(original_image) # PIL -> numpy array
157
+
158
+ # Resize original image for better display
159
+ max_size = (400, 400) # Max width and height
160
+ original_image_resized = cv2.resize(original_image_np, max_size)
161
+
162
+ # Apply Grad-CAM
163
+ cam = apply_gradcam(model, image_tensor)
164
+
165
+ # Resize CAM to match original image size
166
+ heatmap_resized = cv2.resize(cam, (original_image_np.shape[1], original_image_np.shape[0]))
167
+
168
+ # Normalize heatmap to [0, 255]
169
+ heatmap_resized = np.uint8(255 * heatmap_resized / np.max(heatmap_resized))
170
+
171
+ # Apply color map
172
+ heatmap_colored = cv2.applyColorMap(heatmap_resized, cv2.COLORMAP_JET)
173
+ heatmap_colored = cv2.cvtColor(heatmap_colored, cv2.COLOR_BGR2RGB)
174
+
175
+ # Overlay
176
+ superimposed_img = heatmap_colored * 0.4 + original_image_np * 0.6
177
+ superimposed_img = np.clip(superimposed_img, 0, 255).astype(np.uint8)
178
+
179
+ # Display results
180
+ fig, axes = plt.subplots(1, 2, figsize=(8, 4)) # Adjust figsize as needed
181
+ axes[0].imshow(original_image_resized)
182
+ axes[0].set_title("Original Image")
183
+ axes[0].axis("off")
184
+
185
+ axes[1].imshow(superimposed_img)
186
+ axes[1].set_title("Grad-CAM Heatmap")
187
+ axes[1].axis("off")
188
+
189
+ plt.tight_layout()
190
+ st.pyplot(fig)
191
+ plt.close(fig)
192
+
193
+
194
+ # In[6]:
195
+
196
+
197
+ if __name__ == "__main__":
198
+
199
+ from models.resnet_model import MalariaResNet50
200
+ # Load your trained model
201
+ model = MalariaResNet50(num_classes=2)
202
+ model.load_state_dict(torch.load("models/malaria_model.pth"))
203
+ model.eval()
204
+
205
+ # Path to an image
206
+ image_path = "malaria_ds/split_dataset/test/Parasitized/C33P1thinF_IMG_20150619_114756a_cell_181.png"
207
+
208
+ # Visualize Grad-CAM
209
+ visualize_gradcam(model, image_path)
210
+
211
+
212
+ # In[ ]:
213
+
214
+
215
+
216
+
models/__init__.py ADDED
File without changes
models/resnet_model.ipynb ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "72678f69-46b9-4908-b301-85ad5d4a6055",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import torch\n",
11
+ "import torch.nn as nn\n",
12
+ "from torchvision import models, transforms\n",
13
+ "from PIL import Image\n",
14
+ "import numpy as np\n",
15
+ "import torch.nn.functional as F\n",
16
+ "from torchvision.models import resnet50, ResNet50_Weights"
17
+ ]
18
+ },
19
+ {
20
+ "cell_type": "code",
21
+ "execution_count": 4,
22
+ "id": "88eb14fe-a198-4378-8817-13924bb328e3",
23
+ "metadata": {},
24
+ "outputs": [],
25
+ "source": [
26
+ "class MalariaResNet50(nn.Module):\n",
27
+ " def __init__(self, num_classes=2):\n",
28
+ " super(MalariaResNet50, self).__init__()\n",
29
+ " # Load pretrained ResNet50\n",
30
+ " self.backbone = models.resnet50(weights=ResNet50_Weights.DEFAULT)\n",
31
+ "\n",
32
+ " # Replace final fully connected layer for binary classification\n",
33
+ " num_ftrs = self.backbone.fc.in_features\n",
34
+ " self.backbone.fc = nn.Linear(num_ftrs, num_classes)\n",
35
+ "\n",
36
+ " def forward(self, x):\n",
37
+ " return self.backbone(x)\n",
38
+ "\n",
39
+ " def predict(self, image_path, device='cpu', show_image=False):\n",
40
+ " \"\"\"\n",
41
+ " Predict class of a single image.\n",
42
+ "\n",
43
+ " Args:\n",
44
+ " image_path (str): Path to input image\n",
45
+ " device (torch.device): 'cuda' or 'cpu'\n",
46
+ " show_image (bool): Whether to display the image\n",
47
+ "\n",
48
+ " Returns:\n",
49
+ " pred_label (str): \"Infected\" or \"Uninfected\"\n",
50
+ " confidence (float): Confidence score (softmax output)\n",
51
+ " \"\"\"\n",
52
+ " from torchvision import transforms\n",
53
+ " from PIL import Image\n",
54
+ " import matplotlib.pyplot as plt\n",
55
+ "\n",
56
+ " transform = transforms.Compose([\n",
57
+ " transforms.Resize((224, 224)),\n",
58
+ " transforms.ToTensor(),\n",
59
+ " transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n",
60
+ " ])\n",
61
+ "\n",
62
+ " # Load and preprocess image\n",
63
+ " img = Image.open(image_path).convert('RGB')\n",
64
+ " img_tensor = transform(img).unsqueeze(0).to(device)\n",
65
+ "\n",
66
+ " # Inference\n",
67
+ " self.eval()\n",
68
+ " with torch.no_grad():\n",
69
+ " output = self(img_tensor)\n",
70
+ " probs = F.softmax(output, dim=1)\n",
71
+ " _, preds = torch.max(output, 1)\n",
72
+ "\n",
73
+ " pred_idx = preds.item()\n",
74
+ " confidence = probs[0][pred_idx].item()\n",
75
+ "\n",
76
+ " classes = ['Uninfected', 'Infected']\n",
77
+ " pred_label = classes[pred_idx]\n",
78
+ "\n",
79
+ " if show_image:\n",
80
+ " plt.imshow(img)\n",
81
+ " plt.title(f\"Predicted: {pred_label} ({confidence:.2%})\")\n",
82
+ " plt.axis(\"off\")\n",
83
+ " plt.show()\n",
84
+ "\n",
85
+ " return pred_label, confidence\n",
86
+ "\n",
87
+ " def save(self, path):\n",
88
+ " \"\"\"Save model state dict\"\"\"\n",
89
+ " torch.save(self.state_dict(), path)\n",
90
+ " print(f\"Model saved to {path}\")\n",
91
+ "\n",
92
+ " def load(self, path):\n",
93
+ " \"\"\"Load model state dict from file\"\"\"\n",
94
+ " state_dict = torch.load(path, map_location=torch.device('cpu'))\n",
95
+ " self.load_state_dict(state_dict)\n",
96
+ " print(f\"Model loaded from {path}\")"
97
+ ]
98
+ },
99
+ {
100
+ "cell_type": "code",
101
+ "execution_count": null,
102
+ "id": "70b8f814-f126-4a12-afe8-051b9b9d4c2a",
103
+ "metadata": {},
104
+ "outputs": [],
105
+ "source": []
106
+ }
107
+ ],
108
+ "metadata": {
109
+ "kernelspec": {
110
+ "display_name": "Python 3 (ipykernel)",
111
+ "language": "python",
112
+ "name": "python3"
113
+ },
114
+ "language_info": {
115
+ "codemirror_mode": {
116
+ "name": "ipython",
117
+ "version": 3
118
+ },
119
+ "file_extension": ".py",
120
+ "mimetype": "text/x-python",
121
+ "name": "python",
122
+ "nbconvert_exporter": "python",
123
+ "pygments_lexer": "ipython3",
124
+ "version": "3.10.17"
125
+ }
126
+ },
127
+ "nbformat": 4,
128
+ "nbformat_minor": 5
129
+ }
models/resnet_model.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import torch
8
+ import torch.nn as nn
9
+ from torchvision import models, transforms
10
+ from PIL import Image
11
+ import numpy as np
12
+ import torch.nn.functional as F
13
+ from torchvision.models import resnet50, ResNet50_Weights
14
+
15
+
16
+ # In[4]:
17
+
18
+
19
+ class MalariaResNet50(nn.Module):
20
+ def __init__(self, num_classes=2):
21
+ super(MalariaResNet50, self).__init__()
22
+ # Load pretrained ResNet50
23
+ self.backbone = models.resnet50(weights=ResNet50_Weights.DEFAULT)
24
+
25
+ # Replace final fully connected layer for binary classification
26
+ num_ftrs = self.backbone.fc.in_features
27
+ self.backbone.fc = nn.Linear(num_ftrs, num_classes)
28
+
29
+ def forward(self, x):
30
+ return self.backbone(x)
31
+
32
+ def predict(self, image_path, device='cpu', show_image=False):
33
+ """
34
+ Predict class of a single image.
35
+
36
+ Args:
37
+ image_path (str): Path to input image
38
+ device (torch.device): 'cuda' or 'cpu'
39
+ show_image (bool): Whether to display the image
40
+
41
+ Returns:
42
+ pred_label (str): "Infected" or "Uninfected"
43
+ confidence (float): Confidence score (softmax output)
44
+ """
45
+ from torchvision import transforms
46
+ from PIL import Image
47
+ import matplotlib.pyplot as plt
48
+
49
+ transform = transforms.Compose([
50
+ transforms.Resize((224, 224)),
51
+ transforms.ToTensor(),
52
+ transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
53
+ ])
54
+
55
+ # Load and preprocess image
56
+ img = Image.open(image_path).convert('RGB')
57
+ img_tensor = transform(img).unsqueeze(0).to(device)
58
+
59
+ # Inference
60
+ self.eval()
61
+ with torch.no_grad():
62
+ output = self(img_tensor)
63
+ probs = F.softmax(output, dim=1)
64
+ _, preds = torch.max(output, 1)
65
+
66
+ pred_idx = preds.item()
67
+ confidence = probs[0][pred_idx].item()
68
+
69
+ classes = ['Uninfected', 'Infected']
70
+ pred_label = classes[pred_idx]
71
+
72
+ if show_image:
73
+ plt.imshow(img)
74
+ plt.title(f"Predicted: {pred_label} ({confidence:.2%})")
75
+ plt.axis("off")
76
+ plt.show()
77
+
78
+ return pred_label, confidence
79
+
80
+ def save(self, path):
81
+ """Save model state dict"""
82
+ torch.save(self.state_dict(), path)
83
+ print(f"Model saved to {path}")
84
+
85
+ def load(self, path):
86
+ """Load model state dict from file"""
87
+ state_dict = torch.load(path, map_location=torch.device('cpu'))
88
+ self.load_state_dict(state_dict)
89
+ print(f"Model loaded from {path}")
90
+
91
+
92
+ # In[ ]:
93
+
94
+
95
+
96
+
notebooks/app.ipynb ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "fa37d83b-2543-4b97-a6dd-f54fc56bfd9a",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import streamlit as st\n",
11
+ "import torch\n",
12
+ "from PIL import Image\n",
13
+ "import numpy as np\n",
14
+ "import torch.nn.functional as F"
15
+ ]
16
+ },
17
+ {
18
+ "cell_type": "code",
19
+ "execution_count": 2,
20
+ "id": "cfa28009-cc5a-42bf-a5a9-b1cbaeb09fa2",
21
+ "metadata": {},
22
+ "outputs": [],
23
+ "source": [
24
+ "# Import custom modules\n",
25
+ "from models.resnet_model import MalariaResNet50\n",
26
+ "from gradcam import visualize_gradcam"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 3,
32
+ "id": "4dbb399f-01b7-4143-97d0-0fc4b1a33148",
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "name": "stderr",
37
+ "output_type": "stream",
38
+ "text": [
39
+ "2025-05-28 22:57:39.942 WARNING streamlit.runtime.scriptrunner_utils.script_run_context: Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
40
+ "2025-05-28 22:57:39.948 WARNING streamlit.runtime.scriptrunner_utils.script_run_context: Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
41
+ "2025-05-28 22:57:41.270 \n",
42
+ " \u001b[33m\u001b[1mWarning:\u001b[0m to view this Streamlit app on a browser, run it with the following\n",
43
+ " command:\n",
44
+ "\n",
45
+ " streamlit run C:\\Users\\HP\\miniconda3\\envs\\pytorch_env\\lib\\site-packages\\ipykernel_launcher.py [ARGUMENTS]\n",
46
+ "2025-05-28 22:57:41.272 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
47
+ "2025-05-28 22:57:41.274 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
48
+ "2025-05-28 22:57:41.275 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n"
49
+ ]
50
+ }
51
+ ],
52
+ "source": [
53
+ "st.set_page_config(page_title=\"🧬 Malaria Cell Classifier\", layout=\"wide\")\n",
54
+ "st.title(\"🧬 Malaria Cell Classifier with Grad-CAM\")\n",
55
+ "st.write(\"Upload a blood smear image and the model will classify it as infected or uninfected, and highlight key regions using Grad-CAM.\")"
56
+ ]
57
+ },
58
+ {
59
+ "cell_type": "code",
60
+ "execution_count": 4,
61
+ "id": "cfba0e0a-094e-491f-8d7b-abe0643344dc",
62
+ "metadata": {},
63
+ "outputs": [
64
+ {
65
+ "name": "stderr",
66
+ "output_type": "stream",
67
+ "text": [
68
+ "2025-05-28 22:57:41.317 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
69
+ "2025-05-28 22:57:41.319 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
70
+ "2025-05-28 22:57:41.320 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
71
+ "2025-05-28 22:57:41.833 Thread 'Thread-3': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
72
+ "2025-05-28 22:57:41.840 Thread 'Thread-3': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n"
73
+ ]
74
+ },
75
+ {
76
+ "name": "stdout",
77
+ "output_type": "stream",
78
+ "text": [
79
+ "Downloading: \"https://download.pytorch.org/models/resnet50-11ad3fa6.pth\" to C:\\Users\\HP/.cache\\torch\\hub\\checkpoints\\resnet50-11ad3fa6.pth\n"
80
+ ]
81
+ },
82
+ {
83
+ "name": "stderr",
84
+ "output_type": "stream",
85
+ "text": [
86
+ "100%|█████████████████████████████████████████████████████████████████████████████| 97.8M/97.8M [00:09<00:00, 11.2MB/s]\n",
87
+ "2025-05-28 22:57:53.490 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
88
+ "2025-05-28 22:57:53.497 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n"
89
+ ]
90
+ }
91
+ ],
92
+ "source": [
93
+ "# Load Model\n",
94
+ "@st.cache_resource\n",
95
+ "def load_model():\n",
96
+ " model = MalariaResNet50(num_classes=2)\n",
97
+ " model.load_state_dict(torch.load(\"models/malaria_model.pth\", map_location=torch.device('cpu')))\n",
98
+ " model.eval()\n",
99
+ " return model\n",
100
+ "\n",
101
+ "model = load_model()"
102
+ ]
103
+ },
104
+ {
105
+ "cell_type": "code",
106
+ "execution_count": 5,
107
+ "id": "11f5d404-6c4c-4484-a11a-acede1e5ab7d",
108
+ "metadata": {},
109
+ "outputs": [
110
+ {
111
+ "name": "stderr",
112
+ "output_type": "stream",
113
+ "text": [
114
+ "2025-05-28 22:57:53.586 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
115
+ "2025-05-28 22:57:53.592 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
116
+ "2025-05-28 22:57:53.594 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
117
+ "2025-05-28 22:57:53.600 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n",
118
+ "2025-05-28 22:57:53.606 Thread 'MainThread': missing ScriptRunContext! This warning can be ignored when running in bare mode.\n"
119
+ ]
120
+ }
121
+ ],
122
+ "source": [
123
+ "# Upload Image\n",
124
+ "uploaded_file = st.file_uploader(\"Choose an image...\", type=[\"jpg\", \"png\", \"jpeg\"])\n",
125
+ "\n",
126
+ "if uploaded_file is not None:\n",
127
+ " # Save uploaded image temporarily\n",
128
+ " temp_image_path = f\"temp_{uploaded_file.name}\"\n",
129
+ " with open(temp_image_path, \"wb\") as f:\n",
130
+ " f.write(uploaded_file.getbuffer())\n",
131
+ "\n",
132
+ " # Display original image\n",
133
+ " image = Image.open(uploaded_file).convert(\"RGB\")\n",
134
+ " st.image(image, caption=\"Uploaded Image\", use_column_width=True)\n",
135
+ "\n",
136
+ " # Predict button\n",
137
+ " if st.button(\"Predict\"):\n",
138
+ " with st.spinner(\"Classifying...\"):\n",
139
+ " # Run prediction and show Grad-CAM\n",
140
+ " pred_label, confidence = model.predict(temp_image_path, device='cpu', show_image=False)\n",
141
+ " \n",
142
+ " st.success(f\"✅ Prediction: **{pred_label}** | Confidence: **{confidence:.2%}**\")\n",
143
+ "\n",
144
+ " # Show Grad-CAM\n",
145
+ " st.subheader(\"🔍 Grad-CAM Visualization\")\n",
146
+ " visualize_gradcam(model, temp_image_path)"
147
+ ]
148
+ },
149
+ {
150
+ "cell_type": "code",
151
+ "execution_count": null,
152
+ "id": "8fa9705e-7775-489a-ad09-27e96793dcc3",
153
+ "metadata": {},
154
+ "outputs": [],
155
+ "source": []
156
+ }
157
+ ],
158
+ "metadata": {
159
+ "kernelspec": {
160
+ "display_name": "Python 3 (ipykernel)",
161
+ "language": "python",
162
+ "name": "python3"
163
+ },
164
+ "language_info": {
165
+ "codemirror_mode": {
166
+ "name": "ipython",
167
+ "version": 3
168
+ },
169
+ "file_extension": ".py",
170
+ "mimetype": "text/x-python",
171
+ "name": "python",
172
+ "nbconvert_exporter": "python",
173
+ "pygments_lexer": "ipython3",
174
+ "version": "3.10.17"
175
+ }
176
+ },
177
+ "nbformat": 4,
178
+ "nbformat_minor": 5
179
+ }
notebooks/data_prep.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
notebooks/evaluate.py.ipynb ADDED
@@ -0,0 +1,184 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "id": "f7c4548e-c6b2-48dc-9df1-58a7c06481d8",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "import torch\n",
11
+ "from sklearn.metrics import classification_report, confusion_matrix, accuracy_score\n",
12
+ "import seaborn as sns\n",
13
+ "import matplotlib.pyplot as plt\n"
14
+ ]
15
+ },
16
+ {
17
+ "cell_type": "code",
18
+ "execution_count": 4,
19
+ "id": "9bd4822d-3ccd-416d-a03f-a0eda7b2bfaa",
20
+ "metadata": {
21
+ "scrolled": true
22
+ },
23
+ "outputs": [],
24
+ "source": [
25
+ "# Import custom modules\n",
26
+ "from models.resnet_model import MalariaResNet50\n",
27
+ "from data_prep import get_dataloaders"
28
+ ]
29
+ },
30
+ {
31
+ "cell_type": "code",
32
+ "execution_count": 5,
33
+ "id": "91ac3fe5-c5d7-497d-9a6d-2903f6b97bf7",
34
+ "metadata": {},
35
+ "outputs": [],
36
+ "source": [
37
+ "MODEL_PATH = 'models/malaria_model.pth'"
38
+ ]
39
+ },
40
+ {
41
+ "cell_type": "code",
42
+ "execution_count": 6,
43
+ "id": "1c7e9e80-2986-4979-8450-6821e6e0a3a8",
44
+ "metadata": {},
45
+ "outputs": [
46
+ {
47
+ "name": "stdout",
48
+ "output_type": "stream",
49
+ "text": [
50
+ "\n",
51
+ "Total Classes: 2\n",
52
+ "Train batches: 689, Val batches: 87, Test batches: 87\n",
53
+ "Using device: cuda\n"
54
+ ]
55
+ },
56
+ {
57
+ "name": "stderr",
58
+ "output_type": "stream",
59
+ "text": [
60
+ "/opt/conda/lib/python3.12/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=ResNet50_Weights.IMAGENET1K_V1`. You can also use `weights=ResNet50_Weights.DEFAULT` to get the most up-to-date weights.\n",
61
+ " warnings.warn(msg)\n"
62
+ ]
63
+ },
64
+ {
65
+ "name": "stdout",
66
+ "output_type": "stream",
67
+ "text": [
68
+ "Model loaded from models/malaria_model.pth\n",
69
+ "Running inference on test set...\n",
70
+ "\n",
71
+ "Test Accuracy: 0.9699\n",
72
+ "\n",
73
+ "Classification Report:\n",
74
+ " precision recall f1-score support\n",
75
+ "\n",
76
+ " Parasitized 0.97 0.97 0.97 1378\n",
77
+ " Uninfected 0.97 0.97 0.97 1378\n",
78
+ "\n",
79
+ " accuracy 0.97 2756\n",
80
+ " macro avg 0.97 0.97 0.97 2756\n",
81
+ "weighted avg 0.97 0.97 0.97 2756\n",
82
+ "\n"
83
+ ]
84
+ },
85
+ {
86
+ "data": {
87
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgoAAAHWCAYAAAAW1aGcAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAAU+tJREFUeJzt3XdYFFfbBvB7EFh6EaVFRexgL4liN2JDjS0qkURE1MRAjGILsVcidsxriRpbMInGEjWJimKJiogFReyKkkSKgoCA9Pn+8GOSFdYFXdx19/55zXW5Z87OPLOu8vicc2YEURRFEBEREZVCT90BEBERkeZiokBEREQKMVEgIiIihZgoEBERkUJMFIiIiEghJgpERESkEBMFIiIiUoiJAhERESnERIGIiIgUYqJAVEa3b99G9+7dYWlpCUEQsHfvXpUe//79+xAEAZs3b1bpcd9mnTt3RufOndUdBpFOY6JAb5W7d+/i008/Ra1atWBkZAQLCwu0a9cOK1euxLNnzyr03N7e3oiJicGCBQuwbds2tGrVqkLP9yaNGDECgiDAwsKi1M/x9u3bEAQBgiBgyZIl5T7+w4cPMXv2bERHR6sgWiJ6k/TVHQBRWf32228YPHgwZDIZhg8fjkaNGiEvLw+nTp3C5MmTERsbi++++65Czv3s2TNERERg2rRp8Pf3r5BzODk54dmzZzAwMKiQ4yujr6+P7Oxs7N+/H0OGDJHbFxoaCiMjI+Tk5LzSsR8+fIg5c+agZs2aaNasWZnfd/jw4Vc6HxGpDhMFeivExcXB09MTTk5OCA8Ph4ODg7TPz88Pd+7cwW+//VZh53/06BEAwMrKqsLOIQgCjIyMKuz4yshkMrRr1w4//vhjiURh+/bt6N27N3bt2vVGYsnOzoaJiQkMDQ3fyPmISDEOPdBbITg4GJmZmdi4caNcklCsTp06+PLLL6XXBQUFmDdvHmrXrg2ZTIaaNWvi66+/Rm5urtz7atasiT59+uDUqVN47733YGRkhFq1amHr1q1Sn9mzZ8PJyQkAMHnyZAiCgJo1awJ4XrIv/v1/zZ49G4IgyLWFhYWhffv2sLKygpmZGerXr4+vv/5a2q9ojkJ4eDg6dOgAU1NTWFlZoV+/frh+/Xqp57tz5w5GjBgBKysrWFpawsfHB9nZ2Yo/2BcMGzYMf/zxB9LS0qS2qKgo3L59G8OGDSvRPzU1FZMmTULjxo1hZmYGCwsL9OrVC5cvX5b6HD9+HO+++y4AwMfHRxrCKL7Ozp07o1GjRrhw4QI6duwIExMT6XN5cY6Ct7c3jIyMSlx/jx49YG1tjYcPH5b5WomobJgo0Fth//79qFWrFtq2bVum/qNGjcLMmTPRokULLF++HJ06dUJQUBA8PT1L9L1z5w4+/PBDdOvWDUuXLoW1tTVGjBiB2NhYAMDAgQOxfPlyAMBHH32Ebdu2YcWKFeWKPzY2Fn369EFubi7mzp2LpUuX4oMPPsDp06df+r4jR46gR48eSE5OxuzZsxEQEIAzZ86gXbt2uH//fon+Q4YMwdOnTxEUFIQhQ4Zg8+bNmDNnTpnjHDhwIARBwO7du6W27du3o0GDBmjRokWJ/vfu3cPevXvRp08fLFu2DJMnT0ZMTAw6deok/dB2cXHB3LlzAQBjxozBtm3bsG3bNnTs2FE6TkpKCnr16oVmzZphxYoV6NKlS6nxrVy5ElWrVoW3tzcKCwsBAOvWrcPhw4exatUqODo6lvlaiaiMRCINl56eLgIQ+/XrV6b+0dHRIgBx1KhRcu2TJk0SAYjh4eFSm5OTkwhAPHnypNSWnJwsymQyceLEiVJbXFycCEBcvHix3DG9vb1FJyenEjHMmjVL/O9fr+XLl4sAxEePHimMu/gcmzZtktqaNWsm2traiikpKVLb5cuXRT09PXH48OElzjdy5Ei5Yw4YMEC0sbFReM7/XoepqakoiqL44Ycfil27dhVFURQLCwtFe3t7cc6cOaV+Bjk5OWJhYWGJ65DJZOLcuXOltqioqBLXVqxTp04iAHHt2rWl7uvUqZNc26FDh0QA4vz588V79+6JZmZmYv/+/ZVeIxG9GlYUSONlZGQAAMzNzcvU//fffwcABAQEyLVPnDgRAErMZXB1dUWHDh2k11WrVkX9+vVx7969V475RcVzG3799VcUFRWV6T0JCQmIjo7GiBEjULlyZam9SZMm6Natm3Sd//XZZ5/Jve7QoQNSUlKkz7Ashg0bhuPHjyMxMRHh4eFITEwsddgBeD6vQU/v+T8jhYWFSElJkYZVLl68WOZzymQy+Pj4lKlv9+7d8emnn2Lu3LkYOHAgjIyMsG7dujKfi4jKh4kCaTwLCwsAwNOnT8vU/8GDB9DT00OdOnXk2u3t7WFlZYUHDx7ItdeoUaPEMaytrfHkyZNXjLikoUOHol27dhg1ahTs7Ozg6emJHTt2vDRpKI6zfv36Jfa5uLjg8ePHyMrKkmt/8Vqsra0BoFzX4uHhAXNzc/z8888IDQ3Fu+++W+KzLFZUVITly5ejbt26kMlkqFKlCqpWrYorV64gPT29zOd85513yjVxccmSJahcuTKio6MREhICW1vbMr+XiMqHiQJpPAsLCzg6OuLq1avlet+LkwkVqVSpUqntoii+8jmKx8+LGRsb4+TJkzhy5Ag++eQTXLlyBUOHDkW3bt1K9H0dr3MtxWQyGQYOHIgtW7Zgz549CqsJALBw4UIEBASgY8eO+OGHH3Do0CGEhYWhYcOGZa6cAM8/n/K4dOkSkpOTAQAxMTHlei8RlQ8TBXor9OnTB3fv3kVERITSvk5OTigqKsLt27fl2pOSkpCWliatYFAFa2truRUCxV6sWgCAnp4eunbtimXLluHatWtYsGABwsPDcezYsVKPXRznzZs3S+y7ceMGqlSpAlNT09e7AAWGDRuGS5cu4enTp6VOAC32yy+/oEuXLti4cSM8PT3RvXt3uLu7l/hMypq0lUVWVhZ8fHzg6uqKMWPGIDg4GFFRUSo7PhHJY6JAb4UpU6bA1NQUo0aNQlJSUon9d+/excqVKwE8L50DKLEyYdmyZQCA3r17qyyu2rVrIz09HVeuXJHaEhISsGfPHrl+qampJd5bfOOhF5dsFnNwcECzZs2wZcsWuR+8V69exeHDh6XrrAhdunTBvHnz8O2338Le3l5hv0qVKpWoVuzcuRP//POPXFtxQlNaUlVeU6dORXx8PLZs2YJly5ahZs2a8Pb2Vvg5EtHr4Q2X6K1Qu3ZtbN++HUOHDoWLi4vcnRnPnDmDnTt3YsSIEQCApk2bwtvbG9999x3S0tLQqVMnnDt3Dlu2bEH//v0VLr17FZ6enpg6dSoGDBiAcePGITs7G2vWrEG9evXkJvPNnTsXJ0+eRO/eveHk5ITk5GSsXr0a1apVQ/v27RUef/HixejVqxfc3Nzg6+uLZ8+eYdWqVbC0tMTs2bNVdh0v0tPTw/Tp05X269OnD+bOnQsfHx+0bdsWMTExCA0NRa1ateT61a5dG1ZWVli7di3Mzc1hamqK1q1bw9nZuVxxhYeHY/Xq1Zg1a5a0XHPTpk3o3LkzZsyYgeDg4HIdj4jKQM2rLojK5datW+Lo0aPFmjVrioaGhqK5ubnYrl07cdWqVWJOTo7ULz8/X5wzZ47o7OwsGhgYiNWrVxcDAwPl+oji8+WRvXv3LnGeF5flKVoeKYqiePjwYbFRo0aioaGhWL9+ffGHH34osTzy6NGjYr9+/URHR0fR0NBQdHR0FD/66CPx1q1bJc7x4hLCI0eOiO3atRONjY1FCwsLsW/fvuK1a9fk+hSf78Xll5s2bRIBiHFxcQo/U1GUXx6piKLlkRMnThQdHBxEY2NjsV27dmJERESpyxp//fVX0dXVVdTX15e7zk6dOokNGzYs9Zz/PU5GRobo5OQktmjRQszPz5frN2HCBFFPT0+MiIh46TUQUfkJoliOWU5ERESkUzhHgYiIiBRiokBEREQKMVEgIiIihZgoEBERkUJMFIiIiEghJgpERESkEBMFIiIiUkgr78xo3Nxf3SEQVbgnUd+qOwSiCmdUwT+lVPnz4tkl7fw7qZWJAhERUZkILKwrw0+IiIiIFGJFgYiIdJcKH4GurZgoEBGR7uLQg1L8hIiIiEghVhSIiEh3cehBKSYKRESkuzj0oBQ/ISIiIlKIFQUiItJdHHpQiokCERHpLg49KMVPiIiIiBRiRYGIiHQXhx6UYqJARES6i0MPSvETIiIiIoVYUSAiIt3FoQelmCgQEZHu4tCDUvyEiIiISCFWFIiISHdx6EEpJgpERKS7OPSgFD8hIiIiUogVBSIi0l2sKCjFRIGIiHSXHucoKMNUioiIiBRiRYGIiHQXhx6UYqJARES6i8sjlWIqRURERAqxokBERLqLQw9KMVEgIiLdxaEHpZhKERERkUKsKBARke7i0INSTBSIiEh3cehBKaZSREREpBArCkREpLs49KAUEwUiItJdHHpQiqkUERERKcSKAhER6S4OPSjFT4iIiHSXIKhuK4eTJ0+ib9++cHR0hCAI2Lt3r7QvPz8fU6dORePGjWFqagpHR0cMHz4cDx8+lDtGamoqvLy8YGFhASsrK/j6+iIzM1Ouz5UrV9ChQwcYGRmhevXqCA4OLvdHxESBiIjoDcvKykLTpk3xv//9r8S+7OxsXLx4ETNmzMDFixexe/du3Lx5Ex988IFcPy8vL8TGxiIsLAwHDhzAyZMnMWbMGGl/RkYGunfvDicnJ1y4cAGLFy/G7Nmz8d1335UrVkEURfHVLlNzGTf3V3cIRBXuSdS36g6BqMIZVfAAuXEf1f09enbg1X72CIKAPXv2oH///gr7REVF4b333sODBw9Qo0YNXL9+Ha6uroiKikKrVq0AAAcPHoSHhwf+/vtvODo6Ys2aNZg2bRoSExNhaGgIAPjqq6+wd+9e3Lhxo8zxsaJARES6S9BT2Zabm4uMjAy5LTc3VyVhpqenQxAEWFlZAQAiIiJgZWUlJQkA4O7uDj09PURGRkp9OnbsKCUJANCjRw/cvHkTT548KfO5mSgQERGpQFBQECwtLeW2oKCg1z5uTk4Opk6dio8++ggWFhYAgMTERNja2sr109fXR+XKlZGYmCj1sbOzk+tT/Lq4T1lw1QMREekuFd5HITAwEAEBAXJtMpnstY6Zn5+PIUOGQBRFrFmz5rWO9aqYKBARke5S4fJImUz22onBfxUnCQ8ePEB4eLhUTQAAe3t7JCcny/UvKChAamoq7O3tpT5JSUlyfYpfF/cpCw49EBERaZjiJOH27ds4cuQIbGxs5Pa7ubkhLS0NFy5ckNrCw8NRVFSE1q1bS31OnjyJ/Px8qU9YWBjq168Pa2vrMsfCRIGIiHSXmu6jkJmZiejoaERHRwMA4uLiEB0djfj4eOTn5+PDDz/E+fPnERoaisLCQiQmJiIxMRF5eXkAABcXF/Ts2ROjR4/GuXPncPr0afj7+8PT0xOOjo4AgGHDhsHQ0BC+vr6IjY3Fzz//jJUrV5YYHlH6EXF5JNHbicsjSRdU+PLIARtUdqxne0aVue/x48fRpUuXEu3e3t6YPXs2nJ2dS33fsWPH0LlzZwDPb7jk7++P/fv3Q09PD4MGDUJISAjMzMyk/leuXIGfnx+ioqJQpUoVfPHFF5g6dWq5rouJAtFbiokC6QJtTRTeJpzMSEREuotPj1SKiQIREeksgYmCUpzMSERERAqxokBERDqLFQXlmCgQEZHuYp6gFIceiIiISCFWFIiISGdx6EE5JgpERKSzmCgox6EHIiIiUogVBSIi0lmsKCjHRIGIiHQWEwXlOPRARERECrGiQEREuosFBaWYKBARkc7i0INyHHogIiIihdRWUQgICChz32XLllVgJEREpKtYUVBObYnCpUuX5F5fvHgRBQUFqF+/PgDg1q1bqFSpElq2bKmO8IiISAcwUVBObYnCsWPHpN8vW7YM5ubm2LJlC6ytrQEAT548gY+PDzp06KCuEImIiHSeRsxRWLp0KYKCgqQkAQCsra0xf/58LF26VI2RERGRNhMEQWWbttKIVQ8ZGRl49OhRifZHjx7h6dOnaoiIiIh0gvb+fFcZjagoDBgwAD4+Pti9ezf+/vtv/P3339i1axd8fX0xcOBAdYdHRESkszSiorB27VpMmjQJw4YNQ35+PgBAX18fvr6+WLx4sZqjIyIibaXNQwaqohGJgomJCVavXo3Fixfj7t27AIDatWvD1NRUzZEREZE2Y6KgnEYMPRRLSEhAQkIC6tatC1NTU4iiqO6QiIiIdJpGJAopKSno2rUr6tWrBw8PDyQkJAAAfH19MXHiRDVHR0RE2oqrHpTTiERhwoQJMDAwQHx8PExMTKT2oUOH4uDBg2qMjIiItJqgwk1LacQchcOHD+PQoUOoVq2aXHvdunXx4MEDNUVFREREGpEoZGVlyVUSiqWmpkImk6khIiIi0gXaPGSgKhox9NChQwds3bpVei0IAoqKihAcHIwuXbqoMTIiItJmnKOgnEZUFIKDg9G1a1ecP38eeXl5mDJlCmJjY5GamorTp0+rOzwiIiKdpREVhUaNGuHWrVto3749+vXrh6ysLAwcOBCXLl1C7dq11R0eERFpKVYUlNOIigIAWFpaYtq0aeoOg4iIdIg2/4BXFY2oKNSqVQs+Pj7Izc2Va3/8+DFq1aqlpqiIiIhIIxKF+/fv4/Tp0+jQoQMSExOl9sLCQi6PJCKiisP7KCilEYmCIAg4ePAgqlWrhpYtWyIqKkrdIRERkQ7gHAXlNCJREEURZmZm2L17N4YPH45OnTrhhx9+UHdYREREOk8jJjP+NxMLCgpCw4YNMXr0aHz00UdqjIqIiLSdNlcCVEUjEoUXnxL58ccfo3bt2hgwYICaIiIiIl3AREE5jUgUioqKSrS5ubnh8uXLuHHjhhoiIiIiIkBDEgVF7OzsYGdnp+4wiIhIW7GgoJTaEoUWLVrg6NGjsLa2RvPmzV9a/rl48eIbjIyIiHQFhx6UU1ui0K9fP+nJkP369eMfFhERkQZSW6Iwa9Ys6fezZ89WVxhERKTD+J9U5TTiPgq1atVCSkpKifa0tDTewvkNadeiNn5Z8SnuHV6AZ5e+Rd/OTeT2T/vUA9G7p+PxmaV4eCIYv631x7uNnOT67FzxKW79PhdPzi7HvcMLsHHecDhUtZTr4+7mghNbJiL51BLEhwfhxyWjUMOhcoVfH9Gr2Lj+OzRtWB/BQQukttzcXCycNwcd27ZGm1bNEfDlF0h5/FiNUdLr4A2XlNOIROH+/fsoLCws0Z6bm4u///5bDRHpHlNjGWJu/YPxQT+Xuv/Og2RMWLQTrQYvRFefZXjwMBX7V/ujirWZ1Odk1C18PPV7NB0wF8Mmb0Ct6lWwfbGvtN/J0QY7l4/B8ahbaO35DT74/H+wsTLFT0tHV/j1EZXX1Zgr+GXnT6hXr75c++JFC3Hi+DEsXrYC32/ZhkePkhHwpb+aoiSqeGpd9bBv3z7p94cOHYKl5b//+ywsLMTRo0fh7OysjtB0zuHT13D49DWF+38+eF7u9dSlu+EzoC0a1XXE8XO3AACrQo9J++MTnmDJpjDsWDYa+vp6KCgoQgvX6qikp4fZ/zsg3Ttjxdaj2Ll8jNSHSBNkZ2UhcOpkzJozH+vXrZHanz59ij27duGb4CVo3cYNADB3/kL07+uBK5ej0aRpMzVFTK9KmysBqqLWRKF///4Anv9BeXt7y+0zMDBAzZo1sXTpUjVERi9joF8JvgPbIe1pNmJu/VNqH2sLE3j2aoWzl+OkBODitb9QJBZheL822LbvLMxMZBjW+z2ER95kkkAaZeH8uejYsRPauLWVSxSuxV5FQUE+Wru1ldqca9WGg4MjLkczUXgrMU9QSq2JQvGNlpydnREVFYUqVaqU+xi5ubklHk8tFhVC0KukkhjpX706NMLWb3xgYmSAxMcZ6PPZt0hJy5LrM39cP3zm2RGmxjJEXonDwHFrpX0PHqagz+f/ww+LRuLbaZ7Q16+Es5fvob//mhdPRaQ2f/z+G65fv4btP/9SYl/K48cwMDCAhYWFXHtlGxs8fvzoTYVI9EZpxByFuLi4V0oSgOfPhrC0tJTbCpIuqDhCAoATUbfQ2jMIXUYsw+Ez1/BD8EhU/c8cBQBYvvUI2nguQu/PvkVhYRE2zPtE2mdnY47VM4YhdH8k2n+8GO6+y5GXX4jtS3xfPBWRWiQmJCD4mwUIWrRYWr5N2k1dkxlPnjyJvn37wtHREYIgYO/evXL7RVHEzJkz4eDgAGNjY7i7u+P27dtyfVJTU+Hl5QULCwtYWVnB19cXmZmZcn2uXLmCDh06wMjICNWrV0dwcHC5PyO1VRRCQkIwZswYGBkZISQk5KV9x40bp3BfYGAgAgIC5NpsO0xVSYwkLzsnD/f+eox7fz3GuZj7iPl1JrwHtMWS7w9LfVLSspCSloU78cm4GZeIO4fmo3UTZ0ReicOnQzsiI/MZpq38Veo/ctoW3Dk0H+81rolzMffVcFVE/7p2LRapKSnwHDxQaissLMSF81H46cdQrPluI/Lz85GRkSFXVUhNSUGVKlXVETK9JnXNUcjKykLTpk0xcuRIDBw4sMT+4OBghISEYMuWLXB2dsaMGTPQo0cPXLt2DUZGRgAALy8vJCQkICwsDPn5+fDx8cGYMWOwfft2AEBGRga6d+8Od3d3rF27FjExMRg5ciSsrKwwZsyYMseqtkRh+fLl8PLygpGREZYvX66wnyAIL00UZDJZicyfww5vhp4gQGag+Cukp/f8L6Dh//cxMTJEUZH8A8AK/3/4qbgvkTq1btMGv+zdL9c2a1ogataqBR/f0bC3d4C+vgHOnY2Ae/ceAID7cfeQkPAQTZs1U0PE9Lbq1asXevXqVeo+URSxYsUKTJ8+Hf369QMAbN26FXZ2dti7dy88PT1x/fp1HDx4EFFRUWjVqhUAYNWqVfDw8MCSJUvg6OiI0NBQ5OXl4fvvv4ehoSEaNmyI6OhoLFu27O1IFOLi4kr9PamHqbEhalf/939ENd+xQZN67+BJRjZS0rIwdVQP/HYiBomP02FjZYZPh3SEo60Vdoc9v732u42c0LKhE85cuou0p9lwrlYVsz7vjbvxjxB55fmf7x9/xuILry4IHNMTOw5egLmJDHP8P8CDhymIvsFlsKR+pqZmqFu3nlybsYkJrCytpPYBgwZhSfA3sLC0hJmZGb5ZOB9NmzXnRMa3lCoLCqXNmSvtP7PKxMXFITExEe7u7lKbpaUlWrdujYiICHh6eiIiIgJWVlZSkgAA7u7u0NPTQ2RkJAYMGICIiAh07NgRhoaGUp8ePXpg0aJFePLkCaytrcsUj0Y+FKqwsBAxMTFwcnIq84XQ62nh6oTDG76UXgdPGgQA2LbvLL5Y8BPq17TDx31bw8bKFKnp2Tgf+wDuI5fj+r1EAEB2Tj76vd8U0z/rDVNjQyQ+TsfhM9exaP33yMsvAPB8jsOIr7dggrc7Ary7ITsnD5FX4vCB32rk5Oa/+YsmegWTp34NPUEPE8ePQ15+Htq2a49p02cpfyNpJFUOPQQFBWHOnDlybbNmzSr33YcTE5//u/riQxHt7OykfYmJibC1tZXbr6+vj8qVK8v1efEWA8XHTExMfLsShfHjx6Nx48bw9fVFYWEhOnbsiIiICJiYmODAgQPo3LmzukPUen9euA3j5opvGuM5acNL3x975yF6fbpK6Xl2HrqAnYc42ZTeHhs3b5N7LZPJ8PWMWfh6BpMDklfanDltmBSrEasefvnlFzRt2hQAsH//fty/fx83btzAhAkTMG3aNDVHR0RE2koQVLfJZDJYWFjIba+SKNjb2wMAkpKS5NqTkpKkffb29khOTpbbX1BQgNTUVLk+pR3jv+coC41IFB4/fiwF/fvvv2Pw4MGoV68eRo4ciZiYGDVHR0RE2koTn/Xg7OwMe3t7HD16VGrLyMhAZGQk3Nye3xHUzc0NaWlpuHDh3wpteHg4ioqK0Lp1a6nPyZMnkZ//79BuWFgY6tevX65hfY1IFOzs7HDt2jUUFhbi4MGD6NatGwAgOzsblSpxBQMREWmXzMxMREdHIzo6GsDzCYzR0dGIj4+HIAgYP3485s+fj3379iEmJgbDhw+Ho6OjdEdjFxcX9OzZE6NHj8a5c+dw+vRp+Pv7w9PTE46OjgCAYcOGwdDQEL6+voiNjcXPP/+MlStXlhgeUUYj5ij4+PhgyJAhcHBwgCAI0kzPyMhINGjQQM3RERGRtlLXox7Onz+PLl26SK+Lf3h7e3tj8+bNmDJlCrKysjBmzBikpaWhffv2OHjwoHQPBQAIDQ2Fv78/unbtCj09PQwaNEjuvkSWlpY4fPgw/Pz80LJlS1SpUgUzZ84s19JIABDE4qfzqNkvv/yCv/76C4MHD0a1atUAAFu2bIGVlZW0jrSsXjYpj0hbPIn6Vt0hEFU4owr+76zr14eVdyqjawu7q+xYmkQjKgoA8OGHH5Zoe/FBUURERPRmaUyikJWVhRMnTiA+Ph55eXly+152Z0YiIqJXxadMK6cRicKlS5fg4eGB7OxsZGVloXLlynj8+DFMTExga2vLRIGIiEhNNGLVw4QJE9C3b188efIExsbGOHv2LB48eICWLVtiyZIl6g6PiIi0lCYuj9Q0GpEoREdHY+LEidDT00OlSpWQm5srPQ7z66+/Vnd4RESkpVR5wyVtpRGJgoGBAfT0nodia2uL+Ph4AM+Xdvz111/qDI2IiEinacQchebNmyMqKgp169ZFp06dMHPmTDx+/Bjbtm1Do0aN1B0eERFpKW0eMlAVjagoLFy4EA4ODgCABQsWwNraGmPHjsWjR4/w3XffqTk6IiLSVpyjoJzaKwqiKMLW1laqHNja2uLgwYNqjoqIiIgADagoiKKIOnXqcC4CERG9cZzMqJzaEwU9PT3UrVsXKSkp6g6FiIh0DIcelFN7ogAA33zzDSZPnoyrV6+qOxQiIiL6D7XPUQCA4cOHIzs7G02bNoWhoSGMjY3l9qempqopMiIi0mZaXAhQGY1IFFasWKHuEIiISAdp85CBqmhEosCnRBIREWkmjUgU/isnJ6fE0yMtLCzUFA0REWkzFhSU04jJjFlZWfD394etrS1MTU1hbW0ttxEREVUErnpQTiMShSlTpiA8PBxr1qyBTCbDhg0bMGfOHDg6OmLr1q3qDo+IiEhnacTQw/79+7F161Z07twZPj4+6NChA+rUqQMnJyeEhobCy8tL3SESEZEW0uJCgMpoREUhNTUVtWrVAvB8PkLxcsj27dvj5MmT6gyNiIi0GIcelNOIRKFWrVqIi4sDADRo0AA7duwA8LzSYGVlpcbIiIiIdJtGJAo+Pj64fPkyAOCrr77C//73PxgZGWHChAmYPHmymqMjIiJtxWc9KKfWOQpFRUVYvHgx9u3bh7y8PDx8+BCzZs3CjRs3cOHCBdSpUwdNmjRRZ4hERKTFtHnIQFXUmigsWLAAs2fPhru7O4yNjbFy5UokJyfj+++/h5OTkzpDIyIiIqh56GHr1q1YvXo1Dh06hL1792L//v0IDQ1FUVGROsMiIiIdwaEH5dSaKMTHx8PDw0N67e7uDkEQ8PDhQzVGRUREuoKrHpRTa6JQUFAAIyMjuTYDAwPk5+erKSIiIiL6L7XOURBFESNGjIBMJpPacnJy8Nlnn8HU1FRq2717tzrCIyIiLafFhQCVUWuiUNpTIz/++GM1REJERLpIm4cMVEWticKmTZvUeXoiIiJSQiOe9UBERKQOrCgox0SBiIh0FvME5TTiFs5ERESkmVhRICIincWhB+WYKBARkc5inqAchx6IiIhIIVYUiIhIZ3HoQTkmCkREpLOYJyjHoQciIiJSiBUFIiLSWXosKSjFRIGIiHQW8wTlOPRARERECrGiQEREOourHpRjokBERDpLj3mCUhx6ICIiIoVYUSAiIp3FoQflmCgQEZHOYp6gHIceiIiISCEmCkREpLMEFf4qj8LCQsyYMQPOzs4wNjZG7dq1MW/ePIiiKPURRREzZ86Eg4MDjI2N4e7ujtu3b8sdJzU1FV5eXrCwsICVlRV8fX2RmZmpks+mGBMFIiLSWXqC6rbyWLRoEdasWYNvv/0W169fx6JFixAcHIxVq1ZJfYKDgxESEoK1a9ciMjISpqam6NGjB3JycqQ+Xl5eiI2NRVhYGA4cOICTJ09izJgxqvp4AACC+N/0RUsYN/dXdwhEFe5J1LfqDoGowhlV8Ey6D76LUtmx9o15t8x9+/TpAzs7O2zcuFFqGzRoEIyNjfHDDz9AFEU4Ojpi4sSJmDRpEgAgPT0ddnZ22Lx5Mzw9PXH9+nW4uroiKioKrVq1AgAcPHgQHh4e+Pvvv+Ho6KiS62JFgYiIdJYgCCrbcnNzkZGRIbfl5uaWet62bdvi6NGjuHXrFgDg8uXLOHXqFHr16gUAiIuLQ2JiItzd3aX3WFpaonXr1oiIiAAAREREwMrKSkoSAMDd3R16enqIjIxU2WfERIGIiHSWIKhuCwoKgqWlpdwWFBRU6nm/+uoreHp6okGDBjAwMEDz5s0xfvx4eHl5AQASExMBAHZ2dnLvs7Ozk/YlJibC1tZWbr++vj4qV64s9VEFLo8kIiJSgcDAQAQEBMi1yWSyUvvu2LEDoaGh2L59Oxo2bIjo6GiMHz8ejo6O8Pb2fhPhlhkTBSIi0lmqfMy0TCZTmBi8aPLkyVJVAQAaN26MBw8eICgoCN7e3rC3twcAJCUlwcHBQXpfUlISmjVrBgCwt7dHcnKy3HELCgqQmpoqvV8VOPRAREQ6S5VDD+WRnZ0NPT35H8GVKlVCUVERAMDZ2Rn29vY4evSotD8jIwORkZFwc3MDALi5uSEtLQ0XLlyQ+oSHh6OoqAitW7d+xU+kJFYUiIiI3rC+fftiwYIFqFGjBho2bIhLly5h2bJlGDlyJIDnkyzHjx+P+fPno27dunB2dsaMGTPg6OiI/v37AwBcXFzQs2dPjB49GmvXrkV+fj78/f3h6empshUPABMFIiLSYep61sOqVaswY8YMfP7550hOToajoyM+/fRTzJw5U+ozZcoUZGVlYcyYMUhLS0P79u1x8OBBGBkZSX1CQ0Ph7++Prl27Qk9PD4MGDUJISIhKY+V9FIjeUryPAumCir6PwuDNF1V2rJ0jWqjsWJqEcxSIiIhIIQ49EBGRzlLlqgdtxUSBiIh0FtME5Tj0QERERAqxokBERDpLXase3iZMFIiISGeV9/HQuohDD0RERKQQKwpERKSzOPSgHBMFIiLSWcwTlOPQAxERESnEigIREeksDj0ox0SBiIh0Flc9KMehByIiIlKIFQUiItJZHHpQ7pUqCn/++Sc+/vhjuLm54Z9//gEAbNu2DadOnVJpcERERBVJUOGmrcqdKOzatQs9evSAsbExLl26hNzcXABAeno6Fi5cqPIAiYiISH3KnSjMnz8fa9euxfr162FgYCC1t2vXDhcvXlRpcERERBVJTxBUtmmrcs9RuHnzJjp27Fii3dLSEmlpaaqIiYiI6I3Q4p/vKlPuioK9vT3u3LlTov3UqVOoVauWSoIiIiIizVDuRGH06NH48ssvERkZCUEQ8PDhQ4SGhmLSpEkYO3ZsRcRIRERUIQRBUNmmrco99PDVV1+hqKgIXbt2RXZ2Njp27AiZTIZJkybhiy++qIgYiYiIKoQW/3xXmXInCoIgYNq0aZg8eTLu3LmDzMxMuLq6wszMrCLiIyIiIjV65RsuGRoawtXVVZWxEBERvVHavFpBVcqdKHTp0uWlYzHh4eGvFRAREdGbwjxBuXInCs2aNZN7nZ+fj+joaFy9ehXe3t6qiouIiIg0QLkTheXLl5faPnv2bGRmZr52QERERG+KNq9WUBVBFEVRFQe6c+cO3nvvPaSmpqricK8lp0DdERBVPOt3/dUdAlGFe3bp2wo9/hd7rqvsWKsGuKjsWJpEZY+ZjoiIgJGRkaoOR0RERBqg3EMPAwcOlHstiiISEhJw/vx5zJgxQ2WBERERVTQOPShX7kTB0tJS7rWenh7q16+PuXPnonv37ioLjIiIqKLpMU9QqlyJQmFhIXx8fNC4cWNYW1tXVExERESkIco1R6FSpUro3r07nxJJRERaQU9Q3aatyj2ZsVGjRrh3715FxEJERPRG8aFQypU7UZg/fz4mTZqEAwcOICEhARkZGXIbERERaY8yz1GYO3cuJk6cCA8PDwDABx98IJdBiaIIQRBQWFio+iiJiIgqgDYPGahKmROFOXPm4LPPPsOxY8cqMh4iIqI3RotHDFSmzIlC8Q0cO3XqVGHBEBERkWYp1/JIbZ6sQUREuoePmVauXIlCvXr1lCYLmvCsByIiorJQ2XMMtFi5EoU5c+aUuDMjERERaa9yJQqenp6wtbWtqFiIiIjeKI48KFfmRIHzE4iISNtwjoJyZR6eKV71QERERLqjzBWFoqKiioyDiIjojWNBQblyP2aaiIhIW/DOjMpxZQgREREpxIoCERHpLE5mVI6JAhER6SzmCcpx6IGIiIgUYqJAREQ6S09Q3VZe//zzDz7++GPY2NjA2NgYjRs3xvnz56X9oihi5syZcHBwgLGxMdzd3XH79m25Y6SmpsLLywsWFhawsrKCr68vMjMzX/djkcNEgYiIdJagwl/l8eTJE7Rr1w4GBgb4448/cO3aNSxduhTW1tZSn+DgYISEhGDt2rWIjIyEqakpevTogZycHKmPl5cXYmNjERYWhgMHDuDkyZMYM2aMyj4fABBELbyTUk6BuiMgqnjW7/qrOwSiCvfs0rcVevyFR++q7Fhfd61d5r5fffUVTp8+jT///LPU/aIowtHRERMnTsSkSZMAAOnp6bCzs8PmzZvh6emJ69evw9XVFVFRUWjVqhUA4ODBg/Dw8MDff/8NR0fH178osKJAREQ6TJVDD7m5ucjIyJDbcnNzSz3vvn370KpVKwwePBi2trZo3rw51q9fL+2Pi4tDYmIi3N3dpTZLS0u0bt0aERERAICIiAhYWVlJSQIAuLu7Q09PD5GRkar7jFR2JCIioreMKhOFoKAgWFpaym1BQUGlnvfevXtYs2YN6tati0OHDmHs2LEYN24ctmzZAgBITEwEANjZ2cm9z87OTtqXmJhY4kGN+vr6qFy5stRHFbg8koiISAUCAwMREBAg1yaTyUrtW1RUhFatWmHhwoUAgObNm+Pq1atYu3YtvL29KzzW8mBFgYiIdJYgCCrbZDIZLCws5DZFiYKDgwNcXV3l2lxcXBAfHw8AsLe3BwAkJSXJ9UlKSpL22dvbIzk5WW5/QUEBUlNTpT6qwESBiIh0lrqWR7Zr1w43b96Ua7t16xacnJwAAM7OzrC3t8fRo0el/RkZGYiMjISbmxsAwM3NDWlpabhw4YLUJzw8HEVFRWjduvUrfiIlceiBiIjoDZswYQLatm2LhQsXYsiQITh37hy+++47fPfddwCeVzrGjx+P+fPno27dunB2dsaMGTPg6OiI/v37A3hegejZsydGjx6NtWvXIj8/H/7+/vD09FTZigeAiQIREekwdd3C+d1338WePXsQGBiIuXPnwtnZGStWrICXl5fUZ8qUKcjKysKYMWOQlpaG9u3b4+DBgzAyMpL6hIaGwt/fH127doWenh4GDRqEkJAQlcbK+ygQvaV4HwXSBRV9H4UVf8ap7FjjOzir7FiahHMUiIiISCEOPRARkc56lWc06BomCkREpLP4mGnlOPRARERECrGiQEREOkuvnE991EVMFIiISGdx6EE5Dj0QERGRQqwoEBGRzuKqB+WYKBARkc7S49iDUhx6ICIiIoVYUSAiIp3FgoJyTBSIiEhncehBOQ49EBERkUKsKBARkc5iQUE5JgpERKSzWFZXjp8RERERKcSKAhER6SyBYw9KMVEgIiKdxTRBOQ49EBERkUKsKBARkc7ifRSUY6JAREQ6i2mCchx6ICIiIoVYUSAiIp3FkQflmCgQEZHO4vJI5Tj0QERERAqxokBERDqL/1tWjokCERHpLA49KMdkioiIiBRiRYGIiHQW6wnKqS1RuHLlSpn7NmnSpAIjISIiXcWhB+XUlig0a9YMgiBAFEWlf1CFhYVvKCoiIiL6L7XNUYiLi8O9e/cQFxeHXbt2wdnZGatXr8alS5dw6dIlrF69GrVr18auXbvUFSIREWk5PRVu2kptFQUnJyfp94MHD0ZISAg8PDyktiZNmqB69eqYMWMG+vfvr4YIiYhI23HoQTmNSIJiYmLg7Oxcot3Z2RnXrl1TQ0REREQEaEii4OLigqCgIOTl5UlteXl5CAoKgouLixojIyIibSaocNNWGrE8cu3atejbty+qVasmrXC4cuUKBEHA/v371RwdERFpK448KKcRicJ7772He/fuITQ0FDdu3AAADB06FMOGDYOpqamaoyMiItJdGpEoAICpqSnGjBmj7jCIiEiH6Gn1oIFqaMQcBQDYtm0b2rdvD0dHRzx48AAAsHz5cvz6669qjoyIiLSVIKhu01YakSisWbMGAQEB6NWrF548eSLdYMna2horVqxQb3BEREQ6TCMShVWrVmH9+vWYNm0a9PX/HQ1p1aoVYmJi1BgZERFpM0GFv7SVRsxRiIuLQ/PmzUu0y2QyZGVlqSEiIiLSBdo8ZKAqGlFRcHZ2RnR0dIn2gwcP8j4KREREaqQRFYWAgAD4+fkhJycHoiji3Llz+PHHHxEUFIQNGzaoOzwiItJSXPWgnEYkCqNGjYKxsTGmT5+O7OxsDBs2DI6Ojli5ciU8PT3VHR4REWkpDj0opxGJAgB4eXnBy8sL2dnZyMzMhK2trbpDIiIi0nkaMUfh/fffR1paGgDAxMREShIyMjLw/vvvqzEyIiLSZryPgnIaUVE4fvy43AOhiuXk5ODPP/9UQ0RERKQLtHlZo6qotaJw5coVXLlyBQBw7do16fWVK1dw6dIlbNy4Ee+88446QyQiIqpQ33zzDQRBwPjx46W2nJwc+Pn5wcbGBmZmZhg0aBCSkpLk3hcfH4/evXtLlfjJkyejoKBA5fGptaLQrFkzCIIAQRBKHWIwNjbGqlWr1BAZERHpAj01FxSioqKwbt066cnJxSZMmIDffvsNO3fuhKWlJfz9/TFw4ECcPn0aAFBYWIjevXvD3t4eZ86cQUJCAoYPHw4DAwMsXLhQpTGqNVGIi4uDKIqoVasWzp07h6pVq0r7DA0NYWtri0qVKqkxQiIi0mbqHHrIzMyEl5cX1q9fj/nz50vt6enp2LhxI7Zv3y79J3rTpk1wcXHB2bNn0aZNGxw+fBjXrl3DkSNHYGdnh2bNmmHevHmYOnUqZs+eDUNDQ5XFqdahBycnJ9SsWRNFRUVo1aoVnJycpM3BwYFJAhERvTVyc3ORkZEht+Xm5irs7+fnh969e8Pd3V2u/cKFC8jPz5drb9CgAWrUqIGIiAgAQEREBBo3bgw7OzupT48ePZCRkYHY2FiVXpdGrHoICgrC999/X6L9+++/x6JFi9QQERER6QJVrnoICgqCpaWl3BYUFFTqeX/66SdcvHix1P2JiYkwNDSElZWVXLudnR0SExOlPv9NEor3F+9TJY1IFNatW4cGDRqUaG/YsCHWrl2rhoiIiEgXqPKhUIGBgUhPT5fbAgMDS5zzr7/+wpdffonQ0FAYGRmp4arLRyMShcTERDg4OJRor1q1KhISEtQQERERUfnIZDJYWFjIbTKZrES/CxcuIDk5GS1atIC+vj709fVx4sQJhISEQF9fH3Z2dsjLy5PuL1QsKSkJ9vb2AAB7e/sSqyCKXxf3URWNSBSqV68uzeT8r9OnT8PR0VENERERkS7QE1S3lVXXrl0RExOD6OhoaWvVqhW8vLyk3xsYGODo0aPSe27evIn4+Hi4ubkBANzc3BATE4Pk5GSpT1hYGCwsLODq6qqyzwfQkBsujR49GuPHj0d+fr40w/Po0aOYMmUKJk6cqOboiIhIW6lj1YO5uTkaNWok12ZqagobGxup3dfXFwEBAahcuTIsLCzwxRdfwM3NDW3atAEAdO/eHa6urvjkk08QHByMxMRETJ8+HX5+fqVWMV6HRiQKkydPRkpKCj7//HPpDo1GRkaYOnVqqeM7pH4b13+HkBVL4fXxcEwJnAYAmDt7JiLPnsGj5GSYmJigabPmGB8wCc61aqs5WqLn2rWojQnD3dHCtQYcqlpiyITvsP/4FWn/tE89MLhHC1Szt0ZefiEuXY/H7G/3I+rqA6nPzhWfomm9d1C1sjmeZGTjWORNTA/5FQmP0gEANRwq4+bvc0ucu9PwJTgXc7/Cr5G0w/Lly6Gnp4dBgwYhNzcXPXr0wOrVq6X9lSpVwoEDBzB27Fi4ubnB1NQU3t7emDu35HfvdQmiKIoqP+oryszMxPXr12FsbIy6deu+claUo/obU9F/XI25gskTx8PM1AzvvtdaShR+2fEznGvVgr2DAzLS07Hmf6tw88YN/H74KJe6VgDrd/3VHcJbp3s7V7g1rYVL1+Px87IxJRKFoT1bIfnJU8T9/RjGMgN88fH7GOjeHI36zcHjJ5kAgC+8uiDyShwSH6fD0dYKQRMGAAC6jFgG4N9EodenIbh+9985VinpWSgoKHqDV6sdnl36tkKPf+r2E5Udq31da5UdS5NoREWhWGJiIlJTU9GxY0fIZDKIoghBm5+08RbKzspC4NTJmDVnPtavWyO378MhQ6Xfv/NONfiPG4/BA/vh4T//oHqNGm86VKISDp++hsOnrync//PB83Kvpy7dDZ8BbdGoriOOn7sFAFgVekzaH5/wBEs2hWHHstHQ19eTSwRS07KQlPJUxVdAqsafMMppxGTGlJQUdO3aFfXq1YOHh4e00sHX15dzFDTMwvlz0bFjJ7Rxa/vSftnZ2fh1z268U62aymfgEr0JBvqV4DuwHdKeZiPm1j+l9rG2MIFnr1Y4ezmuRLXglxWf4sHRIBz9fgJ6d2r8JkImqhAaUVGYMGECDAwMEB8fDxcXF6l96NChCAgIwNKlSxW+Nzc3t8Sdr8RKMpVP5iDgj99/w/Xr17D9518U9vn5x1AsX7oEz55lo6azM9at3wQDFd5KlKii9erQCFu/8YGJkQESH2egz2ffIiUtS67P/HH98JlnR5gayxB5JQ4Dx/17v5esZ7mYunQ3IqLvoqhIRH/3ZtixbDSGBKzHbydi3vTlkBJ6rForpREVhcOHD2PRokWoVq2aXHvdunXx4MEDBe96rrQ7YS1eVPqdsOjVJSYkIPibBQhatPilSZhHnw/w8649+H7LD3ByqonJE8e/9BamRJrmRNQttPYMQpcRy3D4zDX8EDwSVa3N5Pos33oEbTwXofdn36KwsAgb5n0i7UtJy0LID+GIuvoAF67FY0bIPvz4exQmDO/6pi+FykBQ4aatNKKikJWVBRMTkxLtqampSisDgYGBCAgIkGsTK7GaoGrXrsUiNSUFnoMHSm2FhYW4cD4KP/0YiqhLMahUqRLMzc1hbm4OJ6eaaNKkKdq3fQ/hR8LQq3cfNUZPVHbZOXm499dj3PvrMc7F3EfMrzPhPaAtlnx/WOqTkpaFlLQs3IlPxs24RNw5NB+tmzgj8kpcqceMinmA91uXvPss0dtAIxKFDh06YOvWrZg3bx4AQBAEFBUVITg4GF26dHnpe2WyksMMXPWgeq3btMEve/fLtc2aFoiatWrBx3d0qasaRAAQRWnJK9HbSE8QIDNQ/E+l3v/facfwJX2a1H8HiY8zVB4bqYA2lwJURCMSheDgYHTt2hXnz59HXl4epkyZgtjYWKSmppZ6x0Z680xNzVC3bj25NmMTE1hZWqFu3Xr4+6+/cOjg73Br2w7W1pWRlJSI7zd8B5nMCO07dlJT1ETyTI0NUbv6v4+zr/mODZrUewdPMrKRkpaFqaN64LcTMUh8nA4bKzN8OqQjHG2tsDvsIgDg3UZOaNnQCWcu3UXa02w4V6uKWZ/3xt34R1I1watva+TnFyD6xt8AgH7vN4V3PzeMnbv9zV8wKaXOx0y/LTQiUWjUqBFu3bqFb7/9Fubm5sjMzMTAgQPh5+dX6jMgSPMYygxx8cJ5/LBtCzLSM2BTxQYtW7bC1tAfYWNjo+7wiAAALVydcHjDl9Lr4EmDAADb9p3FFwt+Qv2advi4b2vYWJkiNT0b52MfwH3kcly/9/xpfNk5+ej3flNM/6w3TI0Nkfg4HYfPXMei9d8jL//fUuZXo3uihkNlFBQU4db9JHzy1ffYcyT6jV4rkaqo7YZLAwcOxObNm2FhYYGtW7di6NChKlupwKEH0gW84RLpgoq+4dK5e+kqO9Z7tSxVdixNorZVDwcOHEBW1vMlRz4+PkhPV90fFhERUVlw1YNyaht6aNCgAQIDA9GlSxeIoogdO3bAwsKi1L7Dhw9/w9ERERERoMahhzNnziAgIAB3795FamoqzM3NS71dsyAISE1NLdexOfRAuoBDD6QLKnroISpOddXsd521c+hBbRWFtm3b4uzZswAAPT093Lp1C7a2tuoKh4iIdBBXPSinEXdmjIuLQ9WqVZV3JCIiojdKI5ZHOjk5IS0tDefOnUNycjKKiuQfrsI5CkREVBH4qAflNCJR2L9/P7y8vJCZmQkLCwu5uQqCIDBRICIiUhONGHqYOHEiRo4ciczMTKSlpeHJkyfSVt6JjERERGXF5ZHKaURF4Z9//sG4ceNKfTAUERFRhdHmn/AqohEVhR49euD8+fPqDoOIiIheoBEVhd69e2Py5Mm4du0aGjduDAMDA7n9H3zwgZoiIyIibcblkcqp7YZL/6Wnp7iwIQgCCgsLy3U83nCJdAFvuES6oKJvuBQd/1Rlx2pWw1xlx9IkGlFReHE5JBEREWkGjUgUiIiI1IEDD8qpLVEICQnBmDFjYGRkhJCQkJf2HTdu3BuKioiIdAozBaXUNkfB2dkZ58+fh42NDZydnRX2EwQB9+7dK9exOUeBdAHnKJAuqOg5Cpf/Ut0chabVOUdBpeLi4kr9PRER0ZvCVQ/KcY4CERHpLD7rQTmNSBQKCwuxefNmHD16tNSHQoWHh6spMiIiIt2mEYnCl19+ic2bN6N3795o1KiR3EOhiIiIKgp/2iinEYnCTz/9hB07dsDDw0PdoRARkS5hpqCURjzrwdDQEHXq1FF3GERERPQCjUgUJk6ciJUrV0ID7iZNREQ6RFDhL22lEUMPp06dwrFjx/DHH3+gYcOGJR4KtXv3bjVFRkRE2oxT4pTTiETBysoKAwYMUHcYRERE9AKNSBQ2bdqk7hCIiEgHsaCgnFoTBWtr61KXQlpaWqJevXqYNGkSunXrpobIiIhIJzBTUEqticKKFStKbU9LS8OFCxfQp08f/PLLL+jbt++bDYyIiIgAqDlR8Pb2fun+Zs2aISgoiIkCERFVCG1eraAqGrE8UpE+ffrgxo0b6g6DiIi0lCCobtNWGp0o5ObmwtDQUN1hEBER6SyNWPWgyMaNG9GsWTN1h0FERFpKiwsBKqPWRCEgIKDU9vT0dFy8eBG3bt3CyZMn33BURESkM5gpKKXWROHSpUultltYWKBbt27YvXs3nJ2d33BUREREVEyticKxY8fUeXoiItJxXPWgnEbPUSAiIqpI2rxaQVU0etUDERERqRcrCkREpLNYUFCOiQIREekuZgpKceiBiIjoDQsKCsK7774Lc3Nz2Nraon///rh586Zcn5ycHPj5+cHGxgZmZmYYNGgQkpKS5PrEx8ejd+/eMDExga2tLSZPnoyCggKVxspEgYiIdJagwl/lceLECfj5+eHs2bMICwtDfn4+unfvjqysLKnPhAkTsH//fuzcuRMnTpzAw4cPMXDgQGl/YWEhevfujby8PJw5cwZbtmzB5s2bMXPmTJV9PgAgiKIoqvSIGiBHtckUkUayftdf3SEQVbhnl76t0OPHPc5R2bGcqxi98nsfPXoEW1tbnDhxAh07dkR6ejqqVq2K7du348MPPwQA3LhxAy4uLoiIiECbNm3wxx9/oE+fPnj48CHs7OwAAGvXrsXUqVPx6NEjlT0CgRUFIiIiFcjNzUVGRobclpubW6b3pqenAwAqV64MALhw4QLy8/Ph7u4u9WnQoAFq1KiBiIgIAEBERAQaN24sJQkA0KNHD2RkZCA2NlZVl8VEgYiIdJegwi0oKAiWlpZyW1BQkNIYioqKMH78eLRr1w6NGjUCACQmJsLQ0BBWVlZyfe3s7JCYmCj1+W+SULy/eJ+qcNUDERHpLhWueggMDCzxDCOZTKb0fX5+frh69SpOnTqlumBUiIkCERGRCshksjIlBv/l7++PAwcO4OTJk6hWrZrUbm9vj7y8PKSlpclVFZKSkmBvby/1OXfunNzxildFFPdRBQ49EBGRzlLXqgdRFOHv7489e/YgPDy8xAMQW7ZsCQMDAxw9elRqu3nzJuLj4+Hm5gYAcHNzQ0xMDJKTk6U+YWFhsLCwgKur62t8KvJYUSAiIp2lrmc9+Pn5Yfv27fj1119hbm4uzSmwtLSEsbExLC0t4evri4CAAFSuXBkWFhb44osv4ObmhjZt2gAAunfvDldXV3zyyScIDg5GYmIipk+fDj8/v3JXNl6GyyOJ3lJcHkm6oKKXR8anlm1VQlnUqFz2H86Cggxl06ZNGDFiBIDnN1yaOHEifvzxR+Tm5qJHjx5YvXq13LDCgwcPMHbsWBw/fhympqbw9vbGN998A3191dUBmCgQvaWYKJAuqOhE4S8VJgrVy5EovE049EBERDqLj5lWjpMZiYiISCFWFIiISIexpKAMEwUiItJZHHpQjkMPREREpBArCkREpLNYUFCOiQIREeksDj0ox6EHIiIiUogVBSIi0lnlfUaDLmKiQEREuot5glIceiAiIiKFWFEgIiKdxYKCckwUiIhIZ3HVg3IceiAiIiKFWFEgIiKdxVUPyjFRICIi3cU8QSkOPRAREZFCrCgQEZHOYkFBOSYKRESks7jqQTkOPRAREZFCrCgQEZHO4qoH5ZgoEBGRzuLQg3IceiAiIiKFmCgQERGRQhx6ICIincWhB+VYUSAiIiKFWFEgIiKdxVUPyjFRICIincWhB+U49EBEREQKsaJAREQ6iwUF5ZgoEBGR7mKmoBSHHoiIiEghVhSIiEhncdWDckwUiIhIZ3HVg3IceiAiIiKFWFEgIiKdxYKCckwUiIhIdzFTUIpDD0RERKQQKwpERKSzuOpBOSYKRESks7jqQTkOPRAREZFCgiiKorqDoLdbbm4ugoKCEBgYCJlMpu5wiCoEv+ekq5go0GvLyMiApaUl0tPTYWFhoe5wiCoEv+ekqzj0QERERAoxUSAiIiKFmCgQERGRQkwU6LXJZDLMmjWLE7xIq/F7TrqKkxmJiIhIIVYUiIiISCEmCkRERKQQEwUiIiJSiIkCqUzNmjWxYsWKl/aZPXs2mjVrVqFxdO7cGePHj6/Qc9DbpSzfzRedPn0ajRs3hoGBAfr3718hcb2q+/fvQxAEREdHqzsU0gFMFN5SI0aMgCAIEAQBhoaGqFOnDubOnYuCggK1xRQVFYUxY8ZIrwVBwN69e+X6TJo0CUePHn3DkdHbSlHSt3nzZlhZWZX5OC9+N8siICAAzZo1Q1xcHDZv3lyu95aGP9zpbcWnR77FevbsiU2bNiE3Nxe///47/Pz8YGBggMDAwHIdp7CwEIIgQE/v9fLGqlWrKu1jZmYGMzOz1zoPUXmV5bv5ort37+Kzzz5DtWrVKiAiorcHKwpvMZlMBnt7ezg5OWHs2LFwd3fHvn37sGzZMjRu3BimpqaoXr06Pv/8c2RmZkrvK/7f2L59++Dq6gqZTIb4+HhERUWhW7duqFKlCiwtLdGpUydcvHhRep8oipg9ezZq1KgBmUwGR0dHjBs3Ttr/3/JuzZo1AQADBgyAIAjS6xeHHoqrIv/divsCwNWrV9GrVy+YmZnBzs4On3zyCR4/fiztz8rKwvDhw2FmZgYHBwcsXbpUdR8wvRVGjBiB/v37Y8mSJXBwcICNjQ38/PyQn58v9Xlx6EEQBGzYsAEDBgyAiYkJ6tati3379gH493/+KSkpGDlyJARBkCoKyr6PRUVFCA4ORp06dSCTyVCjRg0sWLAAAODs7AwAaN68OQRBQOfOnaX3bdiwAS4uLjAyMkKDBg2wevVquWs8d+4cmjdvDiMjI7Rq1QqXLl1S5UdI9FJMFLSIsbEx8vLyoKenh5CQEMTGxmLLli0IDw/HlClT5PpmZ2dj0aJF2LBhA2JjY2Fra4unT5/C29sbp06dwtmzZ1G3bl14eHjg6dOnAIBdu3Zh+fLlWLduHW7fvo29e/eicePGpcYSFRUFANi0aRMSEhKk1y9KSEiQtjt37qBOnTro2LEjACAtLQ3vv/8+mjdvjvPnz+PgwYNISkrCkCFDpPdPnjwZJ06cwK+//orDhw/j+PHjcskN6YZjx47h7t27OHbsGLZs2YLNmzcrHS6YM2cOhgwZgitXrsDDwwNeXl5ITU1F9erVkZCQAAsLC6xYsQIJCQkYOnRomb6PgYGB+OabbzBjxgxcu3YN27dvh52dHYDnP+wB4MiRI0hISMDu3bsBAKGhoZg5cyYWLFiA69evY+HChZgxYwa2bNkCAMjMzESfPn3g6uqKCxcuYPbs2Zg0aVIFfIpECoj0VvL29hb79esniqIoFhUViWFhYaJMJhMnTZpUou/OnTtFGxsb6fWmTZtEAGJ0dPRLz1FYWCiam5uL+/fvF0VRFJcuXSrWq1dPzMvLK7W/k5OTuHz5cuk1AHHPnj1yfWbNmiU2bdq0xHuLiorEAQMGiC1bthSzs7NFURTFefPmid27d5fr99dff4kAxJs3b4pPnz4VDQ0NxR07dkj7U1JSRGNjY/HLL7986bXR26FTp06l/llu2rRJtLS0FEXx+d8FJycnsaCgQNo/ePBgcejQodLr0r6b06dPl15nZmaKAMQ//vhDarO0tBQ3bdokvVb2fczIyBBlMpm4fv36Uq8lLi5OBCBeunRJrr127dri9u3b5drmzZsnurm5iaIoiuvWrRNtbGzEZ8+eSfvXrFlT6rGIKgLnKLzFDhw4ADMzM+Tn56OoqAjDhg3D7NmzceTIEQQFBeHGjRvIyMhAQUEBcnJykJ2dDRMTEwCAoaEhmjRpIne8pKQkTJ8+HcePH0dycjIKCwuRnZ2N+Ph4AMDgwYOxYsUK1KpVCz179oSHhwf69u0Lff3X/xp9/fXXiIiIwPnz52FsbAwAuHz5Mo4dO1bqnIa7d+/i2bNnyMvLQ+vWraX2ypUro379+q8dD71dGjZsiEqVKkmvHRwcEBMT89L3/Pf7b2pqCgsLCyQnJyvsr+z7mJaWhtzcXHTt2rXMcWdlZeHu3bvw9fXF6NGjpfaCggJYWloCAK5fv44mTZrAyMhI2u/m5lbmcxC9LiYKb7EuXbpgzZo1MDQ0hKOjI/T19XH//n306dMHY8eOxYIFC1C5cmWcOnUKvr6+yMvLkxIFY2NjCIIgdzxvb2+kpKRg5cqVcHJygkwmg5ubG/Ly8gAA1atXx82bN3HkyBGEhYXh888/x+LFi3HixAkYGBi88nX88MMPWL58OY4fP4533nlHas/MzETfvn2xaNGiEu9xcHDAnTt3Xvmc9HawsLBAenp6ifa0tDTpBymAEt8/QRBQVFT00mOX9z3Kvo/37t176fkUHRMA1q9fL5fwApBLfIjUiYnCW8zU1BR16tSRa7tw4QKKioqwdOlSaRXDjh07ynS806dPY/Xq1fDw8AAA/PXXX3ITtYDnCUbfvn3Rt29f+Pn5oUGDBoiJiUGLFi1KHM/AwACFhYUvPWdERARGjRqFdevWoU2bNnL7WrRogV27dqFmzZqlVi1q164NAwMDREZGokaNGgCAJ0+e4NatW+jUqVOZrpk0W/369XH48OES7RcvXkS9evXeaCzKvo9169aFsbExjh49ilGjRpXYb2hoCAByfyfs7Ozg6OiIe/fuwcvLq9Tzuri4YNu2bcjJyZGqCmfPnlXFJRGVCSczapk6deogPz8fq1atwr1797Bt2zasXbu2TO+tW7cutm3bhuvXryMyMhJeXl7SMADwfLXExo0bcfXqVdy7dw8//PADjI2N4eTkVOrxatasiaNHjyIxMRFPnjwpsT8xMREDBgyAp6cnevTogcTERCQmJuLRo0cAAD8/P6SmpuKjjz5CVFQU7t69i0OHDsHHxweFhYUwMzODr68vJk+ejPDwcFy9ehUjRox47WWepDnGjh2LW7duYdy4cbhy5Qpu3ryJZcuW4ccff8TEiRPfaCzKvo9GRkaYOnUqpkyZgq1bt+Lu3bs4e/YsNm7cCACwtbWFsbGxNAmyuFIyZ84cBAUFISQkBLdu3UJMTAw2bdqEZcuWAQCGDRsGQRAwevRoXLt2Db///juWLFnyRq+ddBv/RdUyTZs2xbJly7Bo0SI0atQIoaGhCAoKKtN7N27ciCdPnqBFixb45JNPMG7cONja2kr7rayssH79erRr1w5NmjTBkSNHsH//ftjY2JR6vKVLlyIsLAzVq1dH8+bNS+y/ceMGkpKSsGXLFjg4OEjbu+++CwBwdHTE6dOnUVhYiO7du6Nx48YYP348rKyspGRg8eLF6NChA/r27Qt3d3e0b98eLVu2LO/HRhqqVq1aOHnyJG7cuAF3d3e0bt0aO3bswM6dO9GzZ883GktZvo8zZszAxIkTMXPmTLi4uGDo0KHSvAd9fX2EhIRg3bp1cHR0RL9+/QAAo0aNwoYNG7Bp0yY0btwYnTp1wubNm6XllGZmZti/fz9iYmLQvHlzTJs2rdThD6KKwsdMExERkUKsKBAREZFCTBSIiIhIISYKREREpBATBSIiIlKIiQIREREpxESBiIiIFGKiQERERAoxUSAiIiKFmCgQvQVGjBiB/v37S687d+6M8ePHv/E4jh8/DkEQkJaW9sbPTUTqwUSB6DWMGDECgiBAEAQYGhqiTp06mDt3LgoKCir0vLt378a8efPK1Jc/3InodfDpkUSvqWfPnti0aRNyc3Px+++/w8/PDwYGBggMDJTrl5eXJz1B8HVVrlxZJcchIlKGFQWi1ySTyWBvbw8nJyeMHTsW7u7u2LdvnzRcsGDBAjg6OqJ+/foAnj++e8iQIbCyskLlypXRr18/3L9/XzpeYWEhAgICYGVlBRsbG0yZMgUvPpLlxaGH3NxcTJ06FdWrV4dMJkOdOnWwceNG3L9/H126dAEAWFtbQxAEjBgxAgBQVFSEoKAgODs7w9jYGE2bNsUvv/wid57ff/8d9erVg7GxMbp06SIXJxHpBiYKRCpmbGyMvLw8AMDRo0dx8+ZNhIWF4cCBA8jPz0ePHj1gbm6OP//8E6dPn4aZmRl69uwpvWfp0qXYvHkzvv/+e5w6dQqpqanYs2fPS885fPhw/PjjjwgJCcH169exbt06mJmZoXr16ti1axcA4ObNm0hISMDKlSsBAEFBQdi6dSvWrl2L2NhYTJgwAR9//DFOnDgB4HlCM3DgQPTt2xfR0dEYNWoUvvrqq4r62IhIU4lE9Mq8vb3Ffv36iaIoikVFRWJYWJgok8nESZMmid7e3qKdnZ2Ym5sr9d+2bZtYv359saioSGrLzc0VjY2NxUOHDomiKIoODg5icHCwtD8/P1+sVq2adB5RFMVOnTqJX375pSiKonjz5k0RgBgWFlZqjMeOHRMBiE+ePJHacnJyRBMTE/HMmTNyfX19fcWPPvpIFEVRDAwMFF1dXeX2T506tcSxiEi7cY4C0Ws6cOAAzMzMkJ+fj6KiIgwbNgyzZ8+Gn58fGjduLDcv4fLly7hz5w7Mzc3ljpGTk4O7d+8iPT0dCQkJaN26tbRPX18frVq1KjH8UCw6OhqVKlVCp06dyhzznTt3kJ2djW7dusm15+XloXnz5gCA69evy8UBAG5ubmU+BxFpByYKRK+pS5cuWLNmDQwNDeHo6Ah9/X//Wpmamsr1zczMRMuWLREaGlriOFWrVn2l8xsbG5f7PZmZmQCA3377De+8847cPplM9kpxEJF2YqJA9JpMTU1Rp06dMvVt0aIFfv75Z9ja2sLCwqLUPg4ODoiMjETHjh0BAAUFBbhw4QJatGhRav/GjRujqKgIJ06cgLu7e4n9xRWNwsJCqc3V1RUymQzx8fEKKxEuLi7Yt2+fXNvZs2eVXyQRaRVOZiR6g7y8vFClShX069cPf/75J+Li4nD8+HGMGzcOf//9NwDgyy+/xDfffIO9e/fixo0b+Pzzz196D4SaNWvC29sbI0eOxN69e6Vj7tixAwDg5OQEQRBw4MABPHr0CJmZmTA3N8ekSZMwYcIEbNmyBXfv3sXFixexatUqbNmyBQDw2Wef4fbt25g8eTJu3ryJ7du3Y/PmzRX9ERGRhmGiQPQGmZiY4OTJk6hRowYGDhwIFxcX+Pr6IicnR6owTJw4EZ988gm8vb3h5uYGc3NzDBgw4KXHXbNmDT788EN8/vnnaNCgAUaPHo2srCwAwDvvvIM5c+bgq6++gp2dHfz9/QEA8+bNw4wZMxAUFAQXFxf07NkTv/32G5ydnQEANWrUwK5du7B37140bdoUa9euxcKFCyvw0yEiTSSIimZIERERkc5jRYGIiIgUYqJARERECjFRICIiIoWYKBAREZFCTBSIiIhIISYKREREpBATBSIiIlKIiQIREREpxESBiIiIFGKiQERERAoxUSAiIiKF/g+8Cs0U8M2R/AAAAABJRU5ErkJggg==",
88
+ "text/plain": [
89
+ "<Figure size 600x500 with 2 Axes>"
90
+ ]
91
+ },
92
+ "metadata": {},
93
+ "output_type": "display_data"
94
+ }
95
+ ],
96
+ "source": [
97
+ "def evaluate():\n",
98
+ " # Get all loaders and datasets\n",
99
+ " train_loader, val_loader, test_loader, train_dataset, val_dataset, test_dataset = get_dataloaders()\n",
100
+ "\n",
101
+ " # Define device\n",
102
+ " device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
103
+ " print(f\"Using device: {device}\")\n",
104
+ "\n",
105
+ " # Initialize and load model\n",
106
+ " model = MalariaResNet50(num_classes=2)\n",
107
+ " model.load(MODEL_PATH)\n",
108
+ " model = model.to(device)\n",
109
+ " model.eval() # Set to evaluation mode\n",
110
+ "\n",
111
+ " # Get test data\n",
112
+ " y_true = []\n",
113
+ " y_pred = []\n",
114
+ "\n",
115
+ " print(\"Running inference on test set...\")\n",
116
+ " with torch.no_grad():\n",
117
+ " for inputs, labels in test_loader:\n",
118
+ " inputs = inputs.to(device)\n",
119
+ " labels = labels.to(device)\n",
120
+ "\n",
121
+ " outputs = model(inputs)\n",
122
+ " _, preds = torch.max(outputs, 1)\n",
123
+ "\n",
124
+ " y_true.extend(labels.cpu().numpy())\n",
125
+ " y_pred.extend(preds.cpu().numpy())\n",
126
+ "\n",
127
+ " # -----------------------------\n",
128
+ " # Compute Metrics\n",
129
+ " # -----------------------------\n",
130
+ " classes = test_dataset.classes # ['uninfected', 'parasitized']\n",
131
+ "\n",
132
+ " # Accuracy\n",
133
+ " acc = accuracy_score(y_true, y_pred)\n",
134
+ " print(f\"\\nTest Accuracy: {acc:.4f}\")\n",
135
+ "\n",
136
+ " # Classification Report\n",
137
+ " print(\"\\nClassification Report:\")\n",
138
+ " print(classification_report(y_true, y_pred, target_names=classes))\n",
139
+ "\n",
140
+ " # Confusion Matrix\n",
141
+ " cm = confusion_matrix(y_true, y_pred)\n",
142
+ "\n",
143
+ " plt.figure(figsize=(6, 5))\n",
144
+ " sns.heatmap(cm, annot=True, fmt=\"d\", cmap=\"Blues\", xticklabels=classes, yticklabels=classes)\n",
145
+ " plt.xlabel(\"Predicted\")\n",
146
+ " plt.ylabel(\"True\")\n",
147
+ " plt.title(\"Confusion Matrix\")\n",
148
+ " plt.show()\n",
149
+ "\n",
150
+ "if __name__ == '__main__':\n",
151
+ " evaluate()"
152
+ ]
153
+ },
154
+ {
155
+ "cell_type": "code",
156
+ "execution_count": null,
157
+ "id": "3b8ec551-2713-4b2e-b33c-cdc7930f8c54",
158
+ "metadata": {},
159
+ "outputs": [],
160
+ "source": []
161
+ }
162
+ ],
163
+ "metadata": {
164
+ "kernelspec": {
165
+ "display_name": "Python 3 (ipykernel)",
166
+ "language": "python",
167
+ "name": "python3"
168
+ },
169
+ "language_info": {
170
+ "codemirror_mode": {
171
+ "name": "ipython",
172
+ "version": 3
173
+ },
174
+ "file_extension": ".py",
175
+ "mimetype": "text/x-python",
176
+ "name": "python",
177
+ "nbconvert_exporter": "python",
178
+ "pygments_lexer": "ipython3",
179
+ "version": "3.12.10"
180
+ }
181
+ },
182
+ "nbformat": 4,
183
+ "nbformat_minor": 5
184
+ }
notebooks/gradcam.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
notebooks/train.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
requirements.txt ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ absl-py==2.2.2
2
+ aiohappyeyeballs==2.6.1
3
+ aiohttp==3.11.18
4
+ aiosignal==1.3.2
5
+ albucore==0.0.24
6
+ albumentations==2.0.7
7
+ alembic @ file:///home/conda/feedstock_root/build_artifacts/alembic_1743207807124/work
8
+ altair==5.5.0
9
+ annotated-types @ file:///home/conda/feedstock_root/build_artifacts/annotated-types_1733247046149/work
10
+ anyio @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_anyio_1742243108/work
11
+ archspec @ file:///home/conda/feedstock_root/build_artifacts/archspec_1737352602016/work
12
+ argon2-cffi @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi_1733311059102/work
13
+ argon2-cffi-bindings @ file:///home/conda/feedstock_root/build_artifacts/argon2-cffi-bindings_1725356585055/work
14
+ arrow @ file:///home/conda/feedstock_root/build_artifacts/arrow_1733584251875/work
15
+ asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1733250440834/work
16
+ async-lru @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_async-lru_1742153708/work
17
+ async_generator @ file:///home/conda/feedstock_root/build_artifacts/async_generator_1734180388035/work
18
+ attrs @ file:///home/conda/feedstock_root/build_artifacts/attrs_1741918516150/work
19
+ babel @ file:///home/conda/feedstock_root/build_artifacts/babel_1738490167835/work
20
+ beautifulsoup4 @ file:///home/conda/feedstock_root/build_artifacts/beautifulsoup4_1744783198182/work
21
+ bleach @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_bleach_1737382993/work
22
+ blinker @ file:///home/conda/feedstock_root/build_artifacts/blinker_1731096409132/work
23
+ boltons @ file:///home/conda/feedstock_root/build_artifacts/boltons_1733827268945/work
24
+ Brotli @ file:///home/conda/feedstock_root/build_artifacts/brotli-split_1725267488082/work
25
+ cached-property @ file:///home/conda/feedstock_root/build_artifacts/cached_property_1615209429212/work
26
+ cachetools==5.5.2
27
+ certifi @ file:///home/conda/feedstock_root/build_artifacts/certifi_1746569525376/work/certifi
28
+ certipy @ file:///home/conda/feedstock_root/build_artifacts/certipy_1743237988054/work
29
+ cffi @ file:///home/conda/feedstock_root/build_artifacts/cffi_1725560558132/work
30
+ charset-normalizer @ file:///home/conda/feedstock_root/build_artifacts/charset-normalizer_1746214863626/work
31
+ click==8.2.1
32
+ colorama @ file:///home/conda/feedstock_root/build_artifacts/colorama_1733218098505/work
33
+ comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1733502965406/work
34
+ conda @ file:///home/conda/feedstock_root/build_artifacts/conda_1744220005005/work/conda-src
35
+ conda-libmamba-solver @ file:///home/conda/feedstock_root/build_artifacts/conda-libmamba-solver_1745834476052/work/src
36
+ conda-package-handling @ file:///home/conda/feedstock_root/build_artifacts/conda-package-handling_1736345463896/work
37
+ conda_package_streaming @ file:///home/conda/feedstock_root/build_artifacts/conda-package-streaming_1741620732069/work
38
+ constants==0.6.0
39
+ contourpy==1.3.2
40
+ cryptography @ file:///home/conda/feedstock_root/build_artifacts/cryptography-split_1746241906404/work
41
+ cycler==0.12.1
42
+ datasets==3.6.0
43
+ debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1744321241074/work
44
+ decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1740384970518/work
45
+ defusedxml @ file:///home/conda/feedstock_root/build_artifacts/defusedxml_1615232257335/work
46
+ dill==0.3.8
47
+ distro @ file:///home/conda/feedstock_root/build_artifacts/distro_1734729835256/work
48
+ exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1746947292760/work
49
+ executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1745502089858/work
50
+ fastjsonschema @ file:///home/conda/feedstock_root/build_artifacts/python-fastjsonschema_1733235979760/work/dist
51
+ filelock==3.18.0
52
+ fonttools==4.58.0
53
+ fqdn @ file:///home/conda/feedstock_root/build_artifacts/fqdn_1733327382592/work/dist
54
+ frozendict @ file:///home/conda/feedstock_root/build_artifacts/frozendict_1728841327252/work
55
+ frozenlist==1.6.0
56
+ fsspec==2025.3.0
57
+ gitdb==4.0.12
58
+ GitPython==3.1.44
59
+ greenlet @ file:///home/conda/feedstock_root/build_artifacts/greenlet_1746824022659/work
60
+ grpcio==1.71.0
61
+ h11 @ file:///home/conda/feedstock_root/build_artifacts/h11_1745526374115/work
62
+ h2 @ file:///home/conda/feedstock_root/build_artifacts/h2_1738578511449/work
63
+ hpack @ file:///home/conda/feedstock_root/build_artifacts/hpack_1737618293087/work
64
+ httpcore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_httpcore_1745602916/work
65
+ httpx @ file:///home/conda/feedstock_root/build_artifacts/httpx_1733663348460/work
66
+ huggingface-hub==0.31.2
67
+ hyperframe @ file:///home/conda/feedstock_root/build_artifacts/hyperframe_1737618333194/work
68
+ idna @ file:///home/conda/feedstock_root/build_artifacts/idna_1733211830134/work
69
+ imageio==2.37.0
70
+ importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1737420181517/work
71
+ importlib_resources @ file:///home/conda/feedstock_root/build_artifacts/importlib_resources_1736252299705/work
72
+ ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
73
+ ipython @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_ipython_1745672166/work
74
+ ipython_genutils @ file:///home/conda/feedstock_root/build_artifacts/ipython_genutils_1733399582966/work
75
+ ipython_pygments_lexers @ file:///home/conda/feedstock_root/build_artifacts/ipython_pygments_lexers_1737123620466/work
76
+ ipywidgets==8.1.7
77
+ isoduration @ file:///home/conda/feedstock_root/build_artifacts/isoduration_1733493628631/work/dist
78
+ iterative-stratification==0.1.9
79
+ jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1733300866624/work
80
+ Jinja2 @ file:///home/conda/feedstock_root/build_artifacts/jinja2_1741263328855/work
81
+ joblib==1.5.0
82
+ json5 @ file:///home/conda/feedstock_root/build_artifacts/json5_1743722064131/work
83
+ jsonpatch @ file:///home/conda/feedstock_root/build_artifacts/jsonpatch_1733814567314/work
84
+ jsonpointer @ file:///home/conda/feedstock_root/build_artifacts/jsonpointer_1725302935093/work
85
+ jsonschema @ file:///home/conda/feedstock_root/build_artifacts/jsonschema_1733472696581/work
86
+ jsonschema-specifications @ file:///tmp/tmpuvkyqc9y/src
87
+ jupyter-events @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_jupyter_events_1738765986/work
88
+ jupyter-lsp @ file:///home/conda/feedstock_root/build_artifacts/jupyter-lsp-meta_1733492907176/work/jupyter-lsp
89
+ jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1733440914442/work
90
+ jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1727163409502/work
91
+ jupyter_server @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_1734702637701/work
92
+ jupyter_server_terminals @ file:///home/conda/feedstock_root/build_artifacts/jupyter_server_terminals_1733427956852/work
93
+ jupyterhub @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_jupyterhub-base_1744782338/work
94
+ jupyterlab @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_1746536008058/work
95
+ jupyterlab_pygments @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_pygments_1733328101776/work
96
+ jupyterlab_server @ file:///home/conda/feedstock_root/build_artifacts/jupyterlab_server_1733599573484/work
97
+ jupyterlab_widgets==3.0.15
98
+ kiwisolver==1.4.8
99
+ lazy_loader==0.4
100
+ libmambapy @ file:///home/conda/feedstock_root/build_artifacts/mamba-split_1746515836725/work/libmambapy
101
+ Mako @ file:///home/conda/feedstock_root/build_artifacts/mako_1744317760971/work
102
+ Markdown==3.8
103
+ MarkupSafe @ file:///home/conda/feedstock_root/build_artifacts/markupsafe_1733219680183/work
104
+ matplotlib==3.10.3
105
+ matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1733416936468/work
106
+ menuinst @ file:///home/conda/feedstock_root/build_artifacts/menuinst_1731146972698/work
107
+ mistune @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_mistune_1742402716/work
108
+ mpmath==1.3.0
109
+ multidict==6.4.3
110
+ multiprocess==0.70.16
111
+ narwhals==1.41.0
112
+ nbclassic @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbclassic_1746550383/work
113
+ nbclient @ file:///home/conda/feedstock_root/build_artifacts/nbclient_1734628800805/work
114
+ nbconvert @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_nbconvert-core_1738067871/work
115
+ nbformat @ file:///home/conda/feedstock_root/build_artifacts/nbformat_1733402752141/work
116
+ nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1733325553580/work
117
+ networkx==3.4.2
118
+ notebook @ file:///home/conda/feedstock_root/build_artifacts/notebook_1746547590747/work
119
+ notebook_shim @ file:///home/conda/feedstock_root/build_artifacts/notebook-shim_1733408315203/work
120
+ numpy==1.26.4
121
+ nvidia-cublas-cu12==12.6.4.1
122
+ nvidia-cuda-cupti-cu12==12.6.80
123
+ nvidia-cuda-nvrtc-cu12==12.6.77
124
+ nvidia-cuda-runtime-cu12==12.6.77
125
+ nvidia-cudnn-cu12==9.5.1.17
126
+ nvidia-cufft-cu12==11.3.0.4
127
+ nvidia-cufile-cu12==1.11.1.6
128
+ nvidia-curand-cu12==10.3.7.77
129
+ nvidia-cusolver-cu12==11.7.1.2
130
+ nvidia-cusparse-cu12==12.5.4.2
131
+ nvidia-cusparselt-cu12==0.6.3
132
+ nvidia-ml-py==12.575.51
133
+ nvidia-nccl-cu12==2.26.2
134
+ nvidia-nvjitlink-cu12==12.6.85
135
+ nvidia-nvtx-cu12==12.6.77
136
+ oauthlib @ file:///home/conda/feedstock_root/build_artifacts/oauthlib_1733752848439/work
137
+ opencv-python==4.11.0.86
138
+ opencv-python-headless==4.11.0.86
139
+ overrides @ file:///home/conda/feedstock_root/build_artifacts/overrides_1734587627321/work
140
+ packaging==24.2
141
+ pamela @ file:///home/conda/feedstock_root/build_artifacts/pamela_1734511180361/work
142
+ pandas==2.2.3
143
+ pandocfilters @ file:///home/conda/feedstock_root/build_artifacts/pandocfilters_1631603243851/work
144
+ parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1733271261340/work
145
+ pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1733301927746/work
146
+ pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1733327343728/work
147
+ pillow==11.2.1
148
+ pkgutil_resolve_name @ file:///home/conda/feedstock_root/build_artifacts/pkgutil-resolve-name_1733344503739/work
149
+ platformdirs @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_platformdirs_1746710438/work
150
+ pluggy @ file:///home/conda/feedstock_root/build_artifacts/pluggy_1733222765875/work
151
+ prometheus_client @ file:///home/conda/feedstock_root/build_artifacts/prometheus_client_1733327310477/work
152
+ prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1744724089886/work
153
+ propcache==0.3.1
154
+ protobuf==6.31.0
155
+ psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1740663123172/work
156
+ ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1733302279685/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl#sha256=92c32ff62b5fd8cf325bec5ab90d7be3d2a8ca8c8a3813ff487a8d2002630d1f
157
+ pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1733569405015/work
158
+ pyarrow==20.0.0
159
+ pycosat @ file:///home/conda/feedstock_root/build_artifacts/pycosat_1732588402431/work
160
+ pycparser @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pycparser_1733195786/work
161
+ pydantic @ file:///home/conda/feedstock_root/build_artifacts/pydantic_1746631911634/work
162
+ pydantic_core @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_pydantic-core_1746625309/work
163
+ pydeck==0.9.1
164
+ Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1736243443484/work
165
+ PyJWT @ file:///home/conda/feedstock_root/build_artifacts/pyjwt_1732782409051/work
166
+ pynvml==12.0.0
167
+ pyparsing==3.2.3
168
+ PySocks @ file:///home/conda/feedstock_root/build_artifacts/pysocks_1733217236728/work
169
+ python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1733215673016/work
170
+ python-json-logger @ file:///home/conda/feedstock_root/build_artifacts/python-json-logger_1677079630776/work
171
+ pytz @ file:///home/conda/feedstock_root/build_artifacts/pytz_1742920838005/work
172
+ PyYAML @ file:///home/conda/feedstock_root/build_artifacts/pyyaml_1737454647378/work
173
+ pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1743831245863/work
174
+ referencing @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_referencing_1737836872/work
175
+ regex==2024.11.6
176
+ requests @ file:///home/conda/feedstock_root/build_artifacts/requests_1733217035951/work
177
+ rfc3339_validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3339-validator_1733599910982/work
178
+ rfc3986-validator @ file:///home/conda/feedstock_root/build_artifacts/rfc3986-validator_1598024191506/work
179
+ rpds-py @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_rpds-py_1743037659/work
180
+ ruamel.yaml @ file:///home/conda/feedstock_root/build_artifacts/ruamel.yaml_1736248036158/work
181
+ ruamel.yaml.clib @ file:///home/conda/feedstock_root/build_artifacts/ruamel.yaml.clib_1728724466132/work
182
+ safetensors==0.5.3
183
+ scikit-image==0.25.2
184
+ scikit-learn==1.6.1
185
+ scipy==1.15.3
186
+ seaborn==0.13.2
187
+ Send2Trash @ file:///home/conda/feedstock_root/build_artifacts/send2trash_1733322040660/work
188
+ setuptools==80.1.0
189
+ simsimd==6.2.1
190
+ six @ file:///home/conda/feedstock_root/build_artifacts/six_1733380938961/work
191
+ smmap==5.0.2
192
+ sniffio @ file:///home/conda/feedstock_root/build_artifacts/sniffio_1733244044561/work
193
+ soupsieve @ file:///home/conda/feedstock_root/build_artifacts/soupsieve_1746563585861/work
194
+ SQLAlchemy @ file:///home/conda/feedstock_root/build_artifacts/sqlalchemy_1743109724354/work
195
+ stack_data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1733569443808/work
196
+ streamlit==1.45.1
197
+ stringzilla==3.12.5
198
+ sympy==1.14.0
199
+ tenacity==9.1.2
200
+ tensorboard==2.19.0
201
+ tensorboard-data-server==0.7.2
202
+ terminado @ file:///home/conda/feedstock_root/build_artifacts/terminado_1710262609923/work
203
+ threadpoolctl==3.6.0
204
+ tifffile==2025.5.10
205
+ timm==1.0.15
206
+ tinycss2 @ file:///home/conda/feedstock_root/build_artifacts/tinycss2_1729802851396/work
207
+ tokenizers==0.21.1
208
+ toml==0.10.2
209
+ tomli @ file:///home/conda/feedstock_root/build_artifacts/tomli_1733256695513/work
210
+ torch==2.7.0
211
+ torchcam==0.4.0
212
+ torchvision==0.22.0
213
+ torchxrayvision==1.3.4
214
+ tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1732615905931/work
215
+ tqdm @ file:///home/conda/feedstock_root/build_artifacts/tqdm_1735661334605/work
216
+ traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1733367359838/work
217
+ transformers==4.51.3
218
+ triton==3.3.0
219
+ truststore @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_truststore_1739009763/work
220
+ types-python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/types-python-dateutil_1733612335562/work
221
+ typing-inspection @ file:///home/conda/feedstock_root/build_artifacts/typing-inspection_1741438046699/work
222
+ typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/bld/rattler-build_typing_extensions_1744302253/work
223
+ typing_utils @ file:///home/conda/feedstock_root/build_artifacts/typing_utils_1733331286120/work
224
+ tzdata==2025.2
225
+ uri-template @ file:///home/conda/feedstock_root/build_artifacts/uri-template_1733323593477/work/dist
226
+ urllib3 @ file:///home/conda/feedstock_root/build_artifacts/urllib3_1744323578849/work
227
+ watchdog==6.0.0
228
+ wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1733231326287/work
229
+ webcolors @ file:///home/conda/feedstock_root/build_artifacts/webcolors_1733359735138/work
230
+ webencodings @ file:///home/conda/feedstock_root/build_artifacts/webencodings_1733236011802/work
231
+ websocket-client @ file:///home/conda/feedstock_root/build_artifacts/websocket-client_1733157342724/work
232
+ Werkzeug==3.1.3
233
+ wheel==0.45.1
234
+ widgetsnbextension==4.0.14
235
+ xxhash==3.5.0
236
+ yarl==1.20.0
237
+ zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1732827521216/work
238
+ zstandard==0.23.0