Adjusting UI for specific gradio version
Browse files- gradio_test.py +226 -643
gradio_test.py
CHANGED
@@ -49,7 +49,7 @@ DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
|
49 |
CPU_DEVICE = torch.device("cpu")
|
50 |
torch.set_num_threads(4)
|
51 |
|
52 |
-
# ADE20K class mappings for floor detection
|
53 |
FLOOR_CLASSES = {
|
54 |
'floor': [3, 4, 13], # floor, wood floor, rug
|
55 |
'carpet': [28], # carpet
|
@@ -170,13 +170,26 @@ class OneFormerManager:
|
|
170 |
########################################
|
171 |
|
172 |
class ImprovedBlackspotDetector:
|
173 |
-
"""Enhanced blackspot detector that
|
174 |
|
175 |
def __init__(self, model_path: str = None):
|
176 |
self.model_path = model_path
|
177 |
self.predictor = None
|
178 |
-
#
|
179 |
self.floor_classes = [3, 4, 13, 28, 78] # floor, wood floor, rug, carpet, mat
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
|
181 |
def download_model(self) -> str:
|
182 |
"""Download blackspot model from HuggingFace"""
|
@@ -190,13 +203,13 @@ class ImprovedBlackspotDetector:
|
|
190 |
return model_path
|
191 |
except Exception as e:
|
192 |
logger.warning(f"Could not download blackspot model from HF: {e}")
|
193 |
-
|
194 |
# Fallback to local path
|
195 |
local_path = f"./output_floor_blackspot/{BLACKSPOT_MODEL_FILE}"
|
196 |
if os.path.exists(local_path):
|
197 |
logger.info(f"Using local blackspot model: {local_path}")
|
198 |
return local_path
|
199 |
-
|
200 |
return None
|
201 |
|
202 |
def initialize(self, threshold: float = 0.5) -> bool:
|
@@ -205,11 +218,11 @@ class ImprovedBlackspotDetector:
|
|
205 |
# Get model path
|
206 |
if self.model_path is None:
|
207 |
self.model_path = self.download_model()
|
208 |
-
|
209 |
if self.model_path is None:
|
210 |
logger.error("No blackspot model available")
|
211 |
return False
|
212 |
-
|
213 |
cfg = get_cfg()
|
214 |
cfg.merge_from_file(
|
215 |
model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")
|
@@ -232,9 +245,9 @@ class ImprovedBlackspotDetector:
|
|
232 |
blackspot_mask: np.ndarray,
|
233 |
segmentation: np.ndarray,
|
234 |
floor_mask: np.ndarray,
|
235 |
-
overlap_threshold: float = 0.
|
236 |
) -> bool:
|
237 |
-
"""
|
238 |
if np.sum(blackspot_mask) == 0:
|
239 |
return False
|
240 |
|
@@ -249,12 +262,23 @@ class ImprovedBlackspotDetector:
|
|
249 |
if len(blackspot_pixels) == 0:
|
250 |
return False
|
251 |
|
|
|
252 |
unique_classes, counts = np.unique(blackspot_pixels, return_counts=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
253 |
floor_pixel_count = sum(
|
254 |
-
counts[unique_classes == cls]
|
|
|
255 |
)
|
256 |
floor_ratio = floor_pixel_count / len(blackspot_pixels)
|
257 |
-
return floor_ratio > 0.
|
258 |
|
259 |
def filter_non_floor_blackspots(
|
260 |
self,
|
@@ -262,7 +286,7 @@ class ImprovedBlackspotDetector:
|
|
262 |
segmentation: np.ndarray,
|
263 |
floor_mask: np.ndarray
|
264 |
) -> List[np.ndarray]:
|
265 |
-
"""
|
266 |
filtered_masks = []
|
267 |
for mask in blackspot_masks:
|
268 |
if self.is_on_floor_surface(mask, segmentation, floor_mask):
|
@@ -277,7 +301,7 @@ class ImprovedBlackspotDetector:
|
|
277 |
segmentation: np.ndarray,
|
278 |
floor_prior: Optional[np.ndarray] = None
|
279 |
) -> Dict:
|
280 |
-
"""Detect blackspots
|
281 |
if self.predictor is None:
|
282 |
raise RuntimeError("Blackspot detector not initialized")
|
283 |
|
@@ -322,6 +346,7 @@ class ImprovedBlackspotDetector:
|
|
322 |
for cls in self.floor_classes:
|
323 |
floor_mask |= (segmentation == cls)
|
324 |
|
|
|
325 |
filtered_blackspot_masks = self.filter_non_floor_blackspots(
|
326 |
blackspot_masks, segmentation, floor_mask
|
327 |
)
|
@@ -356,12 +381,15 @@ class ImprovedBlackspotDetector:
|
|
356 |
"""Create clear visualization of blackspots on floors only"""
|
357 |
vis = image.copy()
|
358 |
|
|
|
359 |
floor_overlay = vis.copy()
|
360 |
floor_overlay[floor_mask] = [0, 255, 0]
|
361 |
vis = cv2.addWeighted(vis, 0.7, floor_overlay, 0.3, 0)
|
362 |
|
|
|
363 |
vis[blackspot_mask] = [255, 0, 0]
|
364 |
|
|
|
365 |
blackspot_contours, _ = cv2.findContours(
|
366 |
blackspot_mask.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
|
367 |
)
|
@@ -536,11 +564,11 @@ class NeuroNestApp:
|
|
536 |
return stats
|
537 |
|
538 |
########################################
|
539 |
-
#
|
540 |
########################################
|
541 |
|
542 |
def create_gradio_interface():
|
543 |
-
"""Create the
|
544 |
|
545 |
app = NeuroNestApp()
|
546 |
oneformer_ok, blackspot_ok = app.initialize()
|
@@ -557,7 +585,7 @@ def create_gradio_interface():
|
|
557 |
):
|
558 |
"""Wrapper function for Gradio interface"""
|
559 |
if image_path is None:
|
560 |
-
return None, None, None, "
|
561 |
|
562 |
results = app.analyze_image(
|
563 |
image_path=image_path,
|
@@ -568,7 +596,7 @@ def create_gradio_interface():
|
|
568 |
)
|
569 |
|
570 |
if "error" in results:
|
571 |
-
return None, None, None, f"
|
572 |
|
573 |
seg_output = results['segmentation']['visualization'] if results['segmentation'] else None
|
574 |
blackspot_output = results['blackspot']['visualization'] if results['blackspot'] else None
|
@@ -595,660 +623,215 @@ def create_gradio_interface():
|
|
595 |
return seg_output, blackspot_output, contrast_output, report
|
596 |
|
597 |
def generate_comprehensive_report(results: Dict, contrast_report: str, blackspot_report: str) -> str:
|
598 |
-
"""Generate comprehensive analysis report
|
599 |
report = []
|
600 |
-
|
601 |
-
|
602 |
-
report.append("# 🧠 NeuroNest Environmental Safety Analysis Report")
|
603 |
-
report.append(f"*Generated: {time.strftime('%Y-%m-%d %H:%M:%S')}*")
|
604 |
-
report.append("---\n")
|
605 |
-
|
606 |
-
# Quick Summary Box
|
607 |
-
report.append("## 📊 Executive Summary")
|
608 |
-
|
609 |
-
# Determine overall safety status
|
610 |
-
has_critical = False
|
611 |
-
has_issues = False
|
612 |
-
|
613 |
-
if results['blackspot'] and results['statistics']['blackspot']['coverage_percentage'] > 0:
|
614 |
-
has_issues = True
|
615 |
-
if results['statistics']['blackspot']['coverage_percentage'] > 5:
|
616 |
-
has_critical = True
|
617 |
-
|
618 |
-
if results['contrast'] and results['statistics']['contrast']['critical_issues'] > 0:
|
619 |
-
has_critical = True
|
620 |
-
has_issues = True
|
621 |
-
elif results['contrast'] and results['statistics']['contrast']['low_contrast_pairs'] > 0:
|
622 |
-
has_issues = True
|
623 |
-
|
624 |
-
# Safety Status Box
|
625 |
-
if has_critical:
|
626 |
-
report.append("""
|
627 |
-
<div style='background-color: #fef2f2; border: 2px solid #dc2626; padding: 15px; border-radius: 8px; margin: 10px 0;'>
|
628 |
-
<h3 style='color: #dc2626; margin: 0;'>🚨 CRITICAL SAFETY ISSUES DETECTED</h3>
|
629 |
-
<p style='margin: 5px 0 0 0;'><em>Immediate modifications required for Alzheimer's/dementia care safety</em></p>
|
630 |
-
</div>
|
631 |
-
""")
|
632 |
-
elif has_issues:
|
633 |
-
report.append("""
|
634 |
-
<div style='background-color: #fffbeb; border: 2px solid #f59e0b; padding: 15px; border-radius: 8px; margin: 10px 0;'>
|
635 |
-
<h3 style='color: #f59e0b; margin: 0;'>⚠️ SAFETY CONCERNS IDENTIFIED</h3>
|
636 |
-
<p style='margin: 5px 0 0 0;'><em>Several areas need improvement for optimal cognitive accessibility</em></p>
|
637 |
-
</div>
|
638 |
-
""")
|
639 |
-
else:
|
640 |
-
report.append("""
|
641 |
-
<div style='background-color: #f0fdf4; border: 2px solid #22c55e; padding: 15px; border-radius: 8px; margin: 10px 0;'>
|
642 |
-
<h3 style='color: #22c55e; margin: 0;'>✅ ENVIRONMENT PASSES SAFETY ASSESSMENT</h3>
|
643 |
-
<p style='margin: 5px 0 0 0;'><em>This space meets recommended standards for cognitive accessibility</em></p>
|
644 |
-
</div>
|
645 |
-
""")
|
646 |
-
|
647 |
-
report.append("\n---\n")
|
648 |
|
649 |
-
# Object Detection Results
|
650 |
if results['segmentation']:
|
651 |
stats = results['statistics'].get('segmentation', {})
|
652 |
-
report.append("## 🎯 Object
|
653 |
-
report.append(f""
|
654 |
-
|
655 |
-
<strong>Total Objects Identified:</strong> {stats.get('num_classes', 'N/A')}<br>
|
656 |
-
<strong>Image Resolution:</strong> {stats.get('image_size', 'N/A')}<br>
|
657 |
-
<em style='color: #6b7280;'>Successfully identified floors, walls, furniture, and other room elements</em>
|
658 |
-
</div>
|
659 |
-
""")
|
660 |
report.append("")
|
661 |
|
662 |
-
|
663 |
-
report.append(
|
664 |
-
if results['blackspot'] and results['statistics']['blackspot']['coverage_percentage'] > 0:
|
665 |
-
report.append("### 🔍 Detection Results:")
|
666 |
-
report.append(f"""
|
667 |
-
<div style='background-color: #1f2937; color: white; padding: 15px; border-radius: 8px; font-family: monospace;'>
|
668 |
-
{blackspot_report}
|
669 |
-
</div>
|
670 |
-
""")
|
671 |
-
|
672 |
-
# Severity assessment with visual indicators
|
673 |
-
coverage = results['statistics']['blackspot']['coverage_percentage']
|
674 |
-
if coverage > 10:
|
675 |
-
severity_color = "#dc2626"
|
676 |
-
severity_text = "SEVERE"
|
677 |
-
severity_desc = "Large dark areas detected - High fall risk"
|
678 |
-
severity_icon = "⛔"
|
679 |
-
elif coverage > 5:
|
680 |
-
severity_color = "#ef4444"
|
681 |
-
severity_text = "HIGH RISK"
|
682 |
-
severity_desc = "Significant dark areas present"
|
683 |
-
severity_icon = "🔴"
|
684 |
-
elif coverage > 2:
|
685 |
-
severity_color = "#f59e0b"
|
686 |
-
severity_text = "MODERATE"
|
687 |
-
severity_desc = "Some dark areas that may cause confusion"
|
688 |
-
severity_icon = "🟡"
|
689 |
-
else:
|
690 |
-
severity_color = "#22c55e"
|
691 |
-
severity_text = "LOW"
|
692 |
-
severity_desc = "Minor dark areas detected"
|
693 |
-
severity_icon = "🟢"
|
694 |
-
|
695 |
-
report.append(f"""
|
696 |
-
<div style='background-color: {severity_color}22; border-left: 4px solid {severity_color}; padding: 10px; margin: 10px 0;'>
|
697 |
-
<strong style='color: {severity_color};'>{severity_icon} {severity_text}:</strong> {severity_desc}
|
698 |
-
</div>
|
699 |
-
""")
|
700 |
-
else:
|
701 |
-
report.append("""
|
702 |
-
<div style='background-color: #f0fdf4; padding: 15px; border-radius: 8px;'>
|
703 |
-
✅ <strong>No hazardous blackspots detected on floor surfaces</strong>
|
704 |
-
</div>
|
705 |
-
""")
|
706 |
report.append("")
|
707 |
|
708 |
-
|
709 |
-
report.append(
|
710 |
-
if results['contrast'] and results['statistics']['contrast']['low_contrast_pairs'] > 0:
|
711 |
-
cs = results['statistics']['contrast']
|
712 |
-
|
713 |
-
# Summary table with better styling
|
714 |
-
report.append("### 📈 Analysis Summary:")
|
715 |
-
report.append("""
|
716 |
-
<table style='width: 100%; border-collapse: collapse; margin: 10px 0;'>
|
717 |
-
<thead style='background-color: #f3f4f6;'>
|
718 |
-
<tr>
|
719 |
-
<th style='padding: 10px; text-align: left; border-bottom: 2px solid #e5e7eb;'>Severity Level</th>
|
720 |
-
<th style='padding: 10px; text-align: center; border-bottom: 2px solid #e5e7eb;'>Count</th>
|
721 |
-
<th style='padding: 10px; text-align: left; border-bottom: 2px solid #e5e7eb;'>Impact</th>
|
722 |
-
</tr>
|
723 |
-
</thead>
|
724 |
-
<tbody>
|
725 |
-
""")
|
726 |
-
|
727 |
-
report.append(f"""
|
728 |
-
<tr style='background-color: #fef2f2;'>
|
729 |
-
<td style='padding: 10px; border-bottom: 1px solid #e5e7eb;'><strong>🔴 Critical</strong></td>
|
730 |
-
<td style='padding: 10px; text-align: center; border-bottom: 1px solid #e5e7eb;'><strong>{cs['critical_issues']}</strong></td>
|
731 |
-
<td style='padding: 10px; border-bottom: 1px solid #e5e7eb;'>Immediate safety hazard</td>
|
732 |
-
</tr>
|
733 |
-
<tr style='background-color: #fffbeb;'>
|
734 |
-
<td style='padding: 10px; border-bottom: 1px solid #e5e7eb;'><strong>🟠 High</strong></td>
|
735 |
-
<td style='padding: 10px; text-align: center; border-bottom: 1px solid #e5e7eb;'><strong>{cs['high_priority_issues']}</strong></td>
|
736 |
-
<td style='padding: 10px; border-bottom: 1px solid #e5e7eb;'>Significant navigation risk</td>
|
737 |
-
</tr>
|
738 |
-
<tr style='background-color: #fefce8;'>
|
739 |
-
<td style='padding: 10px; border-bottom: 1px solid #e5e7eb;'><strong>🟡 Medium</strong></td>
|
740 |
-
<td style='padding: 10px; text-align: center; border-bottom: 1px solid #e5e7eb;'><strong>{cs['medium_priority_issues']}</strong></td>
|
741 |
-
<td style='padding: 10px; border-bottom: 1px solid #e5e7eb;'>Potential confusion</td>
|
742 |
-
</tr>
|
743 |
-
</tbody>
|
744 |
-
</table>
|
745 |
-
""")
|
746 |
-
|
747 |
-
# Detailed issues
|
748 |
-
report.append("### ���� Detailed Findings:")
|
749 |
-
report.append("""
|
750 |
-
<div style='background-color: #f9fafb; border: 1px solid #e5e7eb; padding: 15px; border-radius: 8px; font-family: monospace; font-size: 0.9em;'>
|
751 |
-
<pre style='margin: 0; white-space: pre-wrap;'>""" + contrast_report + """</pre>
|
752 |
-
</div>
|
753 |
-
""")
|
754 |
-
else:
|
755 |
-
report.append("""
|
756 |
-
<div style='background-color: #f0fdf4; padding: 15px; border-radius: 8px;'>
|
757 |
-
✅ <strong>All adjacent objects have sufficient contrast</strong><br>
|
758 |
-
<em style='color: #6b7280;'>Environment meets WCAG 2.1 accessibility guidelines</em>
|
759 |
-
</div>
|
760 |
-
""")
|
761 |
report.append("")
|
762 |
|
763 |
-
|
764 |
-
|
765 |
-
|
766 |
-
|
767 |
-
|
768 |
if results['blackspot'] and results['statistics']['blackspot']['coverage_percentage'] > 0:
|
769 |
-
|
770 |
-
|
771 |
-
|
772 |
-
|
773 |
-
|
774 |
-
|
775 |
-
|
776 |
-
|
777 |
-
|
778 |
-
|
779 |
-
|
780 |
-
|
781 |
-
|
782 |
-
|
783 |
-
|
784 |
-
|
785 |
-
if
|
786 |
-
|
787 |
-
|
788 |
-
|
789 |
-
|
790 |
-
|
791 |
-
|
792 |
-
|
793 |
-
|
794 |
-
|
795 |
-
|
796 |
-
|
797 |
-
|
798 |
-
|
799 |
-
|
800 |
-
|
801 |
-
|
802 |
-
|
803 |
-
|
804 |
-
|
805 |
-
|
806 |
-
|
807 |
-
|
808 |
-
|
809 |
-
|
810 |
-
|
811 |
-
|
812 |
-
# Sort by priority
|
813 |
-
priority_order = {'CRITICAL': 0, 'HIGH': 1, 'MEDIUM': 2}
|
814 |
-
recommendations.sort(key=lambda x: priority_order.get(x['priority'], 3))
|
815 |
-
|
816 |
-
for rec in recommendations:
|
817 |
-
if rec['priority'] == 'CRITICAL':
|
818 |
-
bg_color = "#fef2f2"
|
819 |
-
border_color = "#dc2626"
|
820 |
-
timeline = "IMMEDIATE ACTION REQUIRED"
|
821 |
-
elif rec['priority'] == 'HIGH':
|
822 |
-
bg_color = "#fffbeb"
|
823 |
-
border_color = "#f59e0b"
|
824 |
-
timeline = "Address Within 1 Week"
|
825 |
-
else:
|
826 |
-
bg_color = "#f0f9ff"
|
827 |
-
border_color = "#3b82f6"
|
828 |
-
timeline = "Address Within 1 Month"
|
829 |
-
|
830 |
-
report.append(f"""
|
831 |
-
<div style='background-color: {bg_color}; border-left: 4px solid {border_color}; padding: 15px; margin: 15px 0; border-radius: 4px;'>
|
832 |
-
<h3 style='margin: 0 0 10px 0; color: {border_color};'>{rec['icon']} {rec['category']}</h3>
|
833 |
-
<p style='margin: 0 0 10px 0; font-style: italic; color: #6b7280;'>{timeline}</p>
|
834 |
-
<ul style='margin: 0; padding-left: 20px;'>
|
835 |
-
""")
|
836 |
-
|
837 |
-
for action in rec['actions']:
|
838 |
-
report.append(f"<li style='margin: 5px 0;'>{action}</li>")
|
839 |
-
|
840 |
-
report.append("""
|
841 |
-
</ul>
|
842 |
-
</div>
|
843 |
-
""")
|
844 |
-
else:
|
845 |
-
report.append("""
|
846 |
-
<div style='background-color: #f0fdf4; padding: 20px; border-radius: 8px; text-align: center;'>
|
847 |
-
<h3 style='color: #22c55e; margin: 0;'>✅ No immediate actions required</h3>
|
848 |
-
<p style='margin: 10px 0 0 0;'>This environment is well-optimized for individuals with Alzheimer's or dementia.</p>
|
849 |
-
</div>
|
850 |
-
""")
|
851 |
-
|
852 |
-
# Footer
|
853 |
-
report.append("""
|
854 |
-
---
|
855 |
-
<div style='text-align: center; color: #6b7280; margin-top: 30px; font-size: 0.9em;'>
|
856 |
-
<em>Report generated by NeuroNest v2.0 - Texas State University</em>
|
857 |
-
</div>
|
858 |
-
""")
|
859 |
-
|
860 |
return "\n".join(report)
|
861 |
|
862 |
-
|
863 |
-
|
864 |
-
|
865 |
-
|
866 |
-
|
867 |
-
|
868 |
-
|
869 |
-
|
870 |
-
|
871 |
-
|
872 |
-
|
873 |
-
|
874 |
-
margin-bottom: 2rem;
|
875 |
-
text-align: center;
|
876 |
-
}
|
877 |
-
.main-header h1 {
|
878 |
-
margin: 0 0 0.5rem 0;
|
879 |
-
font-size: 2.5rem;
|
880 |
-
font-weight: 700;
|
881 |
-
}
|
882 |
-
.main-header p {
|
883 |
-
margin: 0;
|
884 |
-
opacity: 0.9;
|
885 |
-
font-size: 1.1rem;
|
886 |
-
}
|
887 |
-
.control-panel {
|
888 |
-
background-color: #f9fafb;
|
889 |
-
border: 1px solid #e5e7eb;
|
890 |
-
border-radius: 12px;
|
891 |
-
padding: 1.5rem;
|
892 |
-
margin-bottom: 1.5rem;
|
893 |
-
}
|
894 |
-
.info-box {
|
895 |
-
background-color: #eff6ff;
|
896 |
-
border: 1px solid #3b82f6;
|
897 |
-
border-radius: 8px;
|
898 |
-
padding: 1rem;
|
899 |
-
margin: 1rem 0;
|
900 |
-
}
|
901 |
-
.warning-box {
|
902 |
-
background-color: #fef3c7;
|
903 |
-
border: 1px solid #f59e0b;
|
904 |
-
border-radius: 8px;
|
905 |
-
padding: 1rem;
|
906 |
-
margin: 1rem 0;
|
907 |
-
}
|
908 |
-
.button-primary {
|
909 |
-
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%) !important;
|
910 |
-
color: white !important;
|
911 |
-
font-size: 1.2rem !important;
|
912 |
-
font-weight: 600 !important;
|
913 |
-
padding: 14px 28px !important;
|
914 |
-
border-radius: 8px !important;
|
915 |
-
border: none !important;
|
916 |
-
cursor: pointer !important;
|
917 |
-
transition: transform 0.2s !important;
|
918 |
-
}
|
919 |
-
.button-primary:hover {
|
920 |
-
transform: translateY(-2px) !important;
|
921 |
-
box-shadow: 0 10px 20px rgba(0,0,0,0.1) !important;
|
922 |
-
}
|
923 |
-
.result-section {
|
924 |
-
margin-top: 2rem;
|
925 |
-
}
|
926 |
-
.markdown-text {
|
927 |
-
font-size: 1.05rem;
|
928 |
-
line-height: 1.7;
|
929 |
-
color: #1f2937;
|
930 |
-
}
|
931 |
-
.analysis-report {
|
932 |
-
background-color: #ffffff;
|
933 |
-
border: 1px solid #e5e7eb;
|
934 |
-
border-radius: 12px;
|
935 |
-
padding: 2rem;
|
936 |
-
margin-top: 1.5rem;
|
937 |
-
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
|
938 |
-
}
|
939 |
"""
|
940 |
|
941 |
-
|
942 |
-
|
943 |
-
gr.
|
944 |
-
|
945 |
-
<h1>🧠 NeuroNest: AI-Powered Environment Safety Analysis</h1>
|
946 |
-
<p>Advanced visual analysis for Alzheimer's and dementia care environments</p>
|
947 |
-
<p style="font-size: 0.9rem; margin-top: 0.5rem;">Texas State CS & Interior Design Dept. • Abheek Pradhan, Dr. Nadim Adi, Dr. Greg Lakomski</p>
|
948 |
-
</div>
|
949 |
-
""")
|
950 |
-
|
951 |
-
# Quick Start Guide
|
952 |
-
with gr.Row():
|
953 |
-
gr.HTML("""
|
954 |
-
<div class="info-box" style="width: 100%;">
|
955 |
-
<h3 style="margin-top: 0;">🚀 Quick Start Guide</h3>
|
956 |
-
<ol style="margin: 10px 0;">
|
957 |
-
<li><strong>Upload</strong> a well-lit photo of the room</li>
|
958 |
-
<li><strong>Adjust</strong> detection settings if needed (optional)</li>
|
959 |
-
<li><strong>Click</strong> "Analyze Environment" button</li>
|
960 |
-
<li><strong>Review</strong> the visual results and detailed report below</li>
|
961 |
-
</ol>
|
962 |
-
<p style="margin-bottom: 0;"><em>The analysis identifies safety hazards and provides actionable recommendations for creating dementia-friendly spaces.</em></p>
|
963 |
-
</div>
|
964 |
-
""")
|
965 |
|
966 |
-
#
|
967 |
if not blackspot_ok:
|
968 |
-
gr.
|
969 |
-
|
970 |
-
|
971 |
-
Upload the model to HuggingFace or place in local directory to enable floor hazard detection.
|
972 |
-
</div>
|
973 |
""")
|
974 |
|
975 |
-
#
|
976 |
-
with gr.
|
977 |
-
gr.
|
978 |
-
|
979 |
-
|
980 |
-
|
981 |
-
|
982 |
-
|
983 |
-
|
984 |
-
|
985 |
-
|
986 |
-
|
987 |
-
|
988 |
-
|
989 |
-
|
990 |
-
|
991 |
-
|
992 |
-
|
993 |
-
|
994 |
-
|
995 |
-
|
996 |
-
|
997 |
-
|
998 |
-
|
999 |
-
|
1000 |
-
|
1001 |
-
|
1002 |
-
|
1003 |
-
|
1004 |
-
|
1005 |
-
|
1006 |
-
|
1007 |
-
|
1008 |
-
|
1009 |
-
|
1010 |
-
|
1011 |
-
""
|
1012 |
-
|
1013 |
-
|
1014 |
-
|
1015 |
-
|
1016 |
-
|
1017 |
-
|
1018 |
-
|
1019 |
-
|
1020 |
-
|
1021 |
-
|
1022 |
-
|
1023 |
-
|
1024 |
-
|
1025 |
-
|
1026 |
-
|
1027 |
-
elem_classes="slider-custom"
|
1028 |
-
)
|
1029 |
-
|
1030 |
-
gr.HTML("</div>")
|
1031 |
-
|
1032 |
-
# Image Upload Section
|
1033 |
-
with gr.Group():
|
1034 |
-
gr.Markdown("## 📸 Room Image Upload")
|
1035 |
-
|
1036 |
-
with gr.Row():
|
1037 |
-
with gr.Column(scale=2):
|
1038 |
-
image_input = gr.Image(
|
1039 |
-
label="Upload Room Photo",
|
1040 |
-
type="filepath",
|
1041 |
-
height=400,
|
1042 |
-
elem_classes="image-upload"
|
1043 |
-
)
|
1044 |
-
|
1045 |
-
analyze_button = gr.Button(
|
1046 |
-
"🔍 Analyze Environment",
|
1047 |
-
variant="primary",
|
1048 |
-
size="lg",
|
1049 |
-
elem_classes="button-primary"
|
1050 |
-
)
|
1051 |
-
|
1052 |
-
with gr.Column(scale=1):
|
1053 |
-
gr.HTML("""
|
1054 |
-
<div class="info-box">
|
1055 |
-
<h4 style="margin-top: 0;">📷 Photo Guidelines</h4>
|
1056 |
-
<ul style="margin: 10px 0;">
|
1057 |
-
<li>Stand in corner for wide view</li>
|
1058 |
-
<li>Include floor, walls & furniture</li>
|
1059 |
-
<li>Turn on all room lights</li>
|
1060 |
-
<li>Avoid shadows and glare</li>
|
1061 |
-
<li>Use horizontal orientation</li>
|
1062 |
-
</ul>
|
1063 |
-
<p style="margin-bottom: 0;"><strong>Best:</strong> Daytime photos with natural light</p>
|
1064 |
-
</div>
|
1065 |
-
""")
|
1066 |
-
|
1067 |
-
# Results Section
|
1068 |
-
with gr.Group():
|
1069 |
-
gr.Markdown("## 🔬 Visual Analysis Results")
|
1070 |
-
|
1071 |
-
gr.HTML("""
|
1072 |
-
<p style="color: #6b7280; margin-bottom: 1rem;">
|
1073 |
-
Hover over images for details • Colors indicate: 🟢 Safe • 🟡 Caution • 🔴 Hazard
|
1074 |
-
</p>
|
1075 |
-
""")
|
1076 |
|
|
|
1077 |
with gr.Row():
|
1078 |
-
|
1079 |
-
|
1080 |
-
|
1081 |
-
|
1082 |
-
|
1083 |
-
|
1084 |
-
)
|
1085 |
-
gr.HTML("""
|
1086 |
-
<p style="text-align: center; color: #6b7280; font-size: 0.9rem; margin-top: 0.5rem;">
|
1087 |
-
Identifies all room elements
|
1088 |
-
</p>
|
1089 |
-
""")
|
1090 |
-
|
1091 |
-
with gr.Column():
|
1092 |
-
blackspot_display = gr.Image(
|
1093 |
-
label="⚫ Floor Blackspot Detection",
|
1094 |
-
height=300,
|
1095 |
-
interactive=False,
|
1096 |
-
visible=blackspot_ok,
|
1097 |
-
elem_classes="result-image"
|
1098 |
-
)
|
1099 |
-
if blackspot_ok:
|
1100 |
-
gr.HTML("""
|
1101 |
-
<p style="text-align: center; color: #6b7280; font-size: 0.9rem; margin-top: 0.5rem;">
|
1102 |
-
Red = Hazardous dark areas
|
1103 |
-
</p>
|
1104 |
-
""")
|
1105 |
-
|
1106 |
-
with gr.Column():
|
1107 |
-
contrast_display = gr.Image(
|
1108 |
-
label="🎨 Contrast Analysis",
|
1109 |
-
height=300,
|
1110 |
-
interactive=False,
|
1111 |
-
elem_classes="result-image"
|
1112 |
-
)
|
1113 |
-
gr.HTML("""
|
1114 |
-
<p style="text-align: center; color: #6b7280; font-size: 0.9rem; margin-top: 0.5rem;">
|
1115 |
-
Colored borders = Low contrast
|
1116 |
-
</p>
|
1117 |
-
""")
|
1118 |
-
|
1119 |
-
# Detailed Report Section
|
1120 |
-
with gr.Group():
|
1121 |
-
gr.Markdown("## 📊 Comprehensive Safety Report")
|
1122 |
|
1123 |
-
|
1124 |
-
|
1125 |
-
|
1126 |
-
|
1127 |
-
<p>The report will include detailed findings and personalized recommendations</p>
|
1128 |
-
</div>
|
1129 |
-
""",
|
1130 |
-
elem_classes="analysis-report markdown-text"
|
1131 |
)
|
1132 |
|
1133 |
-
#
|
1134 |
-
|
1135 |
-
|
1136 |
-
|
1137 |
-
|
1138 |
-
### What Each Analysis Does:
|
1139 |
-
|
1140 |
-
**1. Object Segmentation** 🎯
|
1141 |
-
- Identifies all elements in the room (floors, walls, furniture, etc.)
|
1142 |
-
- Creates a foundation map for other analyses
|
1143 |
-
- Helps understand spatial relationships
|
1144 |
-
|
1145 |
-
**2. Floor Blackspot Detection** ⚫
|
1146 |
-
- Finds dark areas on walking surfaces only
|
1147 |
-
- Ignores shadows on walls or furniture
|
1148 |
-
- Critical for preventing falls in dementia patients
|
1149 |
-
|
1150 |
-
**3. Universal Contrast Analysis** 🎨
|
1151 |
-
- Examines color differences between ALL adjacent objects
|
1152 |
-
- Not just floor-to-furniture, but everything touching
|
1153 |
-
- Ensures clear visual boundaries throughout the space
|
1154 |
-
|
1155 |
-
### 📊 Interpreting Results:
|
1156 |
-
|
1157 |
-
**Severity Levels:**
|
1158 |
-
- **🔴 Critical**: Immediate safety hazard - fix within 24 hours
|
1159 |
-
- **🟠 High**: Significant risk - address within 1 week
|
1160 |
-
- **🟡 Medium**: Potential confusion - fix within 1 month
|
1161 |
-
- **🟢 Low/Safe**: Acceptable for most individuals
|
1162 |
-
|
1163 |
-
**WCAG Contrast Ratios:**
|
1164 |
-
- **3:1** - Minimum for large text/graphics
|
1165 |
-
- **4.5:1** - Standard for normal text (recommended)
|
1166 |
-
- **7:1** - Enhanced for critical areas like stairs
|
1167 |
-
|
1168 |
-
### 💡 Common Issues & Solutions:
|
1169 |
-
|
1170 |
-
**Low Contrast Problems:**
|
1171 |
-
- White walls + beige furniture → Add colorful cushions
|
1172 |
-
- Similar floor/wall colors → Install contrasting baseboards
|
1173 |
-
- Invisible door frames → Paint doors in contrasting colors
|
1174 |
-
|
1175 |
-
**Blackspot Hazards:**
|
1176 |
-
- Dark rugs on light floors → Replace with light colors
|
1177 |
-
- Shadows from poor lighting → Add floor-level LED strips
|
1178 |
-
- Dark flooring materials → Install light-colored overlays
|
1179 |
-
|
1180 |
-
### 🏠 Room-Specific Tips:
|
1181 |
-
|
1182 |
-
**Living Room:**
|
1183 |
-
- Ensure sofa contrasts with floor AND wall
|
1184 |
-
- Coffee tables need defined edges
|
1185 |
-
- Pathways should be clearly visible
|
1186 |
-
|
1187 |
-
**Bedroom:**
|
1188 |
-
- Bed frame must contrast with floor
|
1189 |
-
- Night path to bathroom needs good visibility
|
1190 |
-
- Nightstands should stand out from walls
|
1191 |
-
|
1192 |
-
**Bathroom:**
|
1193 |
-
- Critical contrast between floor and fixtures
|
1194 |
-
- Non-slip surfaces in contrasting colors
|
1195 |
-
- Grab bars in colors that pop against walls
|
1196 |
-
|
1197 |
-
### 📸 Taking Better Photos:
|
1198 |
-
|
1199 |
-
1. **Lighting**: Turn on ALL lights, open curtains
|
1200 |
-
2. **Angle**: Stand in corner, capture whole room
|
1201 |
-
3. **Height**: Hold camera at eye level
|
1202 |
-
4. **Multiple**: Take several angles if needed
|
1203 |
-
5. **Quality**: Higher resolution = better analysis
|
1204 |
-
|
1205 |
-
### ⚠️ Special Considerations:
|
1206 |
-
|
1207 |
-
- Patterns can be confusing - solid colors work better
|
1208 |
-
- Glossy surfaces may create false shadows
|
1209 |
-
- Consider time of day - shadows change
|
1210 |
-
- Test changes with actual lighting conditions
|
1211 |
-
""")
|
1212 |
|
1213 |
-
#
|
1214 |
-
with gr.
|
1215 |
-
gr.
|
1216 |
-
|
1217 |
-
|
1218 |
-
|
1219 |
-
|
1220 |
-
|
1221 |
-
|
1222 |
-
|
1223 |
-
|
1224 |
-
|
1225 |
-
|
1226 |
-
|
1227 |
-
|
1228 |
-
|
1229 |
-
|
1230 |
-
|
1231 |
-
|
1232 |
-
|
1233 |
-
|
1234 |
-
|
1235 |
-
|
1236 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
1237 |
""")
|
1238 |
|
1239 |
-
|
1240 |
-
|
1241 |
-
|
1242 |
-
|
1243 |
-
<p style='font-size: 0.9rem;'>Creating safer spaces for cognitive health through computer vision and AI</p>
|
1244 |
-
<p style='font-size: 0.85rem; margin-top: 1rem;'>
|
1245 |
-
© 2024 Texas State University • Computer Science & Interior Design Departments<br>
|
1246 |
-
For research collaboration: neuronest@txstate.edu
|
1247 |
-
</p>
|
1248 |
-
</div>
|
1249 |
""")
|
1250 |
|
1251 |
-
#
|
1252 |
analyze_button.click(
|
1253 |
fn=analyze_wrapper,
|
1254 |
inputs=[
|
@@ -1260,7 +843,7 @@ def create_gradio_interface():
|
|
1260 |
],
|
1261 |
outputs=[
|
1262 |
seg_display,
|
1263 |
-
blackspot_display,
|
1264 |
contrast_display,
|
1265 |
analysis_report
|
1266 |
]
|
|
|
49 |
CPU_DEVICE = torch.device("cpu")
|
50 |
torch.set_num_threads(4)
|
51 |
|
52 |
+
# ADE20K class mappings for floor detection - COMPREHENSIVE LIST
|
53 |
FLOOR_CLASSES = {
|
54 |
'floor': [3, 4, 13], # floor, wood floor, rug
|
55 |
'carpet': [28], # carpet
|
|
|
170 |
########################################
|
171 |
|
172 |
class ImprovedBlackspotDetector:
|
173 |
+
"""Enhanced blackspot detector that ONLY detects on floor surfaces"""
|
174 |
|
175 |
def __init__(self, model_path: str = None):
|
176 |
self.model_path = model_path
|
177 |
self.predictor = None
|
178 |
+
# STRICT floor-related classes in ADE20K - ONLY walking surfaces
|
179 |
self.floor_classes = [3, 4, 13, 28, 78] # floor, wood floor, rug, carpet, mat
|
180 |
+
# Explicitly exclude these classes
|
181 |
+
self.excluded_classes = [
|
182 |
+
5, # ceiling
|
183 |
+
7, # bed
|
184 |
+
10, # sofa
|
185 |
+
15, # table
|
186 |
+
19, # chair
|
187 |
+
23, # cabinet
|
188 |
+
30, # desk
|
189 |
+
33, # counter
|
190 |
+
34, # stool
|
191 |
+
36, # bench
|
192 |
+
]
|
193 |
|
194 |
def download_model(self) -> str:
|
195 |
"""Download blackspot model from HuggingFace"""
|
|
|
203 |
return model_path
|
204 |
except Exception as e:
|
205 |
logger.warning(f"Could not download blackspot model from HF: {e}")
|
206 |
+
|
207 |
# Fallback to local path
|
208 |
local_path = f"./output_floor_blackspot/{BLACKSPOT_MODEL_FILE}"
|
209 |
if os.path.exists(local_path):
|
210 |
logger.info(f"Using local blackspot model: {local_path}")
|
211 |
return local_path
|
212 |
+
|
213 |
return None
|
214 |
|
215 |
def initialize(self, threshold: float = 0.5) -> bool:
|
|
|
218 |
# Get model path
|
219 |
if self.model_path is None:
|
220 |
self.model_path = self.download_model()
|
221 |
+
|
222 |
if self.model_path is None:
|
223 |
logger.error("No blackspot model available")
|
224 |
return False
|
225 |
+
|
226 |
cfg = get_cfg()
|
227 |
cfg.merge_from_file(
|
228 |
model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")
|
|
|
245 |
blackspot_mask: np.ndarray,
|
246 |
segmentation: np.ndarray,
|
247 |
floor_mask: np.ndarray,
|
248 |
+
overlap_threshold: float = 0.9 # INCREASED threshold for stricter detection
|
249 |
) -> bool:
|
250 |
+
"""STRICTLY check if a blackspot is actually on a floor surface"""
|
251 |
if np.sum(blackspot_mask) == 0:
|
252 |
return False
|
253 |
|
|
|
262 |
if len(blackspot_pixels) == 0:
|
263 |
return False
|
264 |
|
265 |
+
# Check if ANY excluded class pixels are in the blackspot
|
266 |
unique_classes, counts = np.unique(blackspot_pixels, return_counts=True)
|
267 |
+
for excluded_class in self.excluded_classes:
|
268 |
+
if excluded_class in unique_classes:
|
269 |
+
# If more than 5% of pixels belong to excluded class, reject
|
270 |
+
excluded_ratio = counts[unique_classes == excluded_class][0] / len(blackspot_pixels)
|
271 |
+
if excluded_ratio > 0.05:
|
272 |
+
logger.debug(f"Rejected blackspot with {excluded_ratio:.2%} pixels on excluded class {excluded_class}")
|
273 |
+
return False
|
274 |
+
|
275 |
+
# Verify floor pixels dominate
|
276 |
floor_pixel_count = sum(
|
277 |
+
counts[unique_classes == cls][0] if cls in unique_classes else 0
|
278 |
+
for cls in self.floor_classes
|
279 |
)
|
280 |
floor_ratio = floor_pixel_count / len(blackspot_pixels)
|
281 |
+
return floor_ratio > 0.85 # INCREASED to 85% floor pixels required
|
282 |
|
283 |
def filter_non_floor_blackspots(
|
284 |
self,
|
|
|
286 |
segmentation: np.ndarray,
|
287 |
floor_mask: np.ndarray
|
288 |
) -> List[np.ndarray]:
|
289 |
+
"""STRICTLY filter out blackspots that are not on floor surfaces"""
|
290 |
filtered_masks = []
|
291 |
for mask in blackspot_masks:
|
292 |
if self.is_on_floor_surface(mask, segmentation, floor_mask):
|
|
|
301 |
segmentation: np.ndarray,
|
302 |
floor_prior: Optional[np.ndarray] = None
|
303 |
) -> Dict:
|
304 |
+
"""Detect blackspots ONLY on floor surfaces with strict filtering"""
|
305 |
if self.predictor is None:
|
306 |
raise RuntimeError("Blackspot detector not initialized")
|
307 |
|
|
|
346 |
for cls in self.floor_classes:
|
347 |
floor_mask |= (segmentation == cls)
|
348 |
|
349 |
+
# STRICT filtering of blackspots
|
350 |
filtered_blackspot_masks = self.filter_non_floor_blackspots(
|
351 |
blackspot_masks, segmentation, floor_mask
|
352 |
)
|
|
|
381 |
"""Create clear visualization of blackspots on floors only"""
|
382 |
vis = image.copy()
|
383 |
|
384 |
+
# Show floor areas in semi-transparent green
|
385 |
floor_overlay = vis.copy()
|
386 |
floor_overlay[floor_mask] = [0, 255, 0]
|
387 |
vis = cv2.addWeighted(vis, 0.7, floor_overlay, 0.3, 0)
|
388 |
|
389 |
+
# Show blackspots in red
|
390 |
vis[blackspot_mask] = [255, 0, 0]
|
391 |
|
392 |
+
# Draw contours around blackspots
|
393 |
blackspot_contours, _ = cv2.findContours(
|
394 |
blackspot_mask.astype(np.uint8), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
|
395 |
)
|
|
|
564 |
return stats
|
565 |
|
566 |
########################################
|
567 |
+
# GRADIO INTERFACE (COMPATIBLE WITH 3.1.7)
|
568 |
########################################
|
569 |
|
570 |
def create_gradio_interface():
|
571 |
+
"""Create the Gradio interface compatible with version 3.1.7"""
|
572 |
|
573 |
app = NeuroNestApp()
|
574 |
oneformer_ok, blackspot_ok = app.initialize()
|
|
|
585 |
):
|
586 |
"""Wrapper function for Gradio interface"""
|
587 |
if image_path is None:
|
588 |
+
return None, None, None, "Please upload an image"
|
589 |
|
590 |
results = app.analyze_image(
|
591 |
image_path=image_path,
|
|
|
596 |
)
|
597 |
|
598 |
if "error" in results:
|
599 |
+
return None, None, None, f"Error: {results['error']}"
|
600 |
|
601 |
seg_output = results['segmentation']['visualization'] if results['segmentation'] else None
|
602 |
blackspot_output = results['blackspot']['visualization'] if results['blackspot'] else None
|
|
|
623 |
return seg_output, blackspot_output, contrast_output, report
|
624 |
|
625 |
def generate_comprehensive_report(results: Dict, contrast_report: str, blackspot_report: str) -> str:
|
626 |
+
"""Generate comprehensive analysis report"""
|
627 |
report = []
|
628 |
+
report.append("# 🧠 NeuroNest Analysis Report\n")
|
629 |
+
report.append(f"*Generated: {time.strftime('%Y-%m-%d %H:%M:%S')}*\n")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
630 |
|
|
|
631 |
if results['segmentation']:
|
632 |
stats = results['statistics'].get('segmentation', {})
|
633 |
+
report.append("## 🎯 Object Segmentation")
|
634 |
+
report.append(f"- **Classes detected:** {stats.get('num_classes', 'N/A')}")
|
635 |
+
report.append(f"- **Resolution:** {stats.get('image_size', 'N/A')}")
|
|
|
|
|
|
|
|
|
|
|
636 |
report.append("")
|
637 |
|
638 |
+
report.append("## ⚫ Blackspot Analysis (Floor Surfaces Only)")
|
639 |
+
report.append(blackspot_report)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
640 |
report.append("")
|
641 |
|
642 |
+
report.append("## 🎨 Universal Contrast Analysis (All Adjacent Objects)")
|
643 |
+
report.append(contrast_report)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
644 |
report.append("")
|
645 |
|
646 |
+
report.append("## 📋 Recommendations for Alzheimer's Care")
|
647 |
+
|
648 |
+
has_issues = False
|
649 |
+
|
|
|
650 |
if results['blackspot'] and results['statistics']['blackspot']['coverage_percentage'] > 0:
|
651 |
+
has_issues = True
|
652 |
+
report.append("\n### Blackspot Mitigation:")
|
653 |
+
report.append("- Replace dark flooring materials with lighter alternatives (min 70% luminance)")
|
654 |
+
report.append("- Install 3000+ lumen LED lighting in affected areas")
|
655 |
+
report.append("- Use light-colored rugs or runners to cover dark spots")
|
656 |
+
report.append("- Add contrasting tape or markers around blackspot perimeters")
|
657 |
+
|
658 |
+
if results['contrast'] and results['statistics']['contrast']['low_contrast_pairs'] > 0:
|
659 |
+
has_issues = True
|
660 |
+
report.append("\n### Contrast Improvements:")
|
661 |
+
|
662 |
+
# Get specific recommendations based on issue types
|
663 |
+
contrast_issues = results['contrast']['issues']
|
664 |
+
critical_issues = [i for i in contrast_issues if i['severity'] == 'critical']
|
665 |
+
high_issues = [i for i in contrast_issues if i['severity'] == 'high']
|
666 |
+
|
667 |
+
if critical_issues:
|
668 |
+
report.append("\n**CRITICAL - Immediate attention required:**")
|
669 |
+
for issue in critical_issues[:3]:
|
670 |
+
cat1, cat2 = issue['categories']
|
671 |
+
wcag = issue['wcag_ratio']
|
672 |
+
report.append(f"- {cat1.title()} ↔ {cat2.title()}: Current {wcag:.1f}:1, Need 7:1 minimum")
|
673 |
+
report.append(f" - Hue difference: {issue['hue_difference']:.0f}° (need >30°)")
|
674 |
+
report.append(f" - Saturation difference: {issue['saturation_difference']:.0f}% (need >50%)")
|
675 |
+
|
676 |
+
if high_issues:
|
677 |
+
report.append("\n**HIGH PRIORITY:**")
|
678 |
+
for issue in high_issues[:3]:
|
679 |
+
cat1, cat2 = issue['categories']
|
680 |
+
wcag = issue['wcag_ratio']
|
681 |
+
report.append(f"- {cat1.title()} ↔ {cat2.title()}: Current {wcag:.1f}:1, Need 4.5:1 minimum")
|
682 |
+
|
683 |
+
report.append("\n**General recommendations:**")
|
684 |
+
report.append("- Use warm colors (red, yellow, orange) for important objects")
|
685 |
+
report.append("- Maintain 70% luminance difference between adjacent surfaces")
|
686 |
+
report.append("- Avoid similar hues - ensure 30+ degree separation on color wheel")
|
687 |
+
report.append("- Use high saturation colors, avoid pastels or muted tones")
|
688 |
+
report.append("- Add textured surfaces to supplement color contrast")
|
689 |
+
|
690 |
+
if not has_issues:
|
691 |
+
report.append("\n✅ **Excellent!** This environment appears well-optimized for individuals with Alzheimer's.")
|
692 |
+
report.append("No significant visual hazards detected.")
|
693 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
694 |
return "\n".join(report)
|
695 |
|
696 |
+
title = "🧠 NeuroNest: AI-Powered Environment Safety Analysis"
|
697 |
+
description = """
|
698 |
+
**Advanced visual analysis for Alzheimer's and dementia care environments.**
|
699 |
+
|
700 |
+
**Texas State CS & Interior Design Dept. - Abheek Pradhan, Dr. Nadim Adi, Dr. Greg Lakomski**
|
701 |
+
|
702 |
+
This system provides:
|
703 |
+
- **Object Segmentation**: Identifies all room elements (floors, walls, furniture)
|
704 |
+
- **Floor-Only Blackspot Detection**: Locates dangerous dark areas ONLY on walking surfaces
|
705 |
+
- **Universal Contrast Analysis**: Evaluates visibility between ALL adjacent objects
|
706 |
+
|
707 |
+
*Following WCAG 2.1 guidelines and dementia-specific visual accessibility standards*
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
708 |
"""
|
709 |
|
710 |
+
# Create interface compatible with Gradio 3.1.7
|
711 |
+
with gr.Blocks(title="NeuroNest") as interface:
|
712 |
+
gr.Markdown(f"# {title}")
|
713 |
+
gr.Markdown(description)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
714 |
|
715 |
+
# Information about model availability
|
716 |
if not blackspot_ok:
|
717 |
+
gr.Markdown("""
|
718 |
+
⚠️ **Note:** Blackspot detection model not available.
|
719 |
+
To enable blackspot detection, upload the model to HuggingFace or ensure it's in the local directory.
|
|
|
|
|
720 |
""")
|
721 |
|
722 |
+
# Create two columns using Row for controls
|
723 |
+
with gr.Row():
|
724 |
+
with gr.Column():
|
725 |
+
gr.Markdown("### Detection Options")
|
726 |
+
enable_blackspot = gr.Checkbox(
|
727 |
+
value=blackspot_ok,
|
728 |
+
label="Enable Floor Blackspot Detection",
|
729 |
+
interactive=blackspot_ok
|
730 |
+
)
|
731 |
+
blackspot_threshold = gr.Slider(
|
732 |
+
minimum=0.1,
|
733 |
+
maximum=0.9,
|
734 |
+
value=0.5,
|
735 |
+
step=0.05,
|
736 |
+
label="Blackspot Sensitivity",
|
737 |
+
visible=blackspot_ok
|
738 |
+
)
|
739 |
+
|
740 |
+
with gr.Column():
|
741 |
+
gr.Markdown("### Contrast Analysis")
|
742 |
+
enable_contrast = gr.Checkbox(
|
743 |
+
value=True,
|
744 |
+
label="Enable Universal Contrast Analysis"
|
745 |
+
)
|
746 |
+
contrast_threshold = gr.Slider(
|
747 |
+
minimum=3.0,
|
748 |
+
maximum=7.0,
|
749 |
+
value=4.5,
|
750 |
+
step=0.1,
|
751 |
+
label="WCAG Contrast Threshold"
|
752 |
+
)
|
753 |
+
|
754 |
+
# Image upload and analyze button
|
755 |
+
with gr.Row():
|
756 |
+
with gr.Column():
|
757 |
+
image_input = gr.Image(
|
758 |
+
label="📸 Upload Room Image",
|
759 |
+
type="filepath"
|
760 |
+
)
|
761 |
+
analyze_button = gr.Button(
|
762 |
+
"🔍 Analyze Environment",
|
763 |
+
variant="primary"
|
764 |
+
)
|
765 |
+
|
766 |
+
# Results display
|
767 |
+
gr.Markdown("## Analysis Results")
|
768 |
+
|
769 |
+
with gr.Row():
|
770 |
+
seg_display = gr.Image(
|
771 |
+
label="🎯 Segmented Objects",
|
772 |
+
interactive=False
|
773 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
774 |
|
775 |
+
if blackspot_ok:
|
776 |
with gr.Row():
|
777 |
+
blackspot_display = gr.Image(
|
778 |
+
label="⚫ Blackspot Detection (Floor Only)",
|
779 |
+
interactive=False
|
780 |
+
)
|
781 |
+
else:
|
782 |
+
blackspot_display = gr.Image(visible=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
783 |
|
784 |
+
with gr.Row():
|
785 |
+
contrast_display = gr.Image(
|
786 |
+
label="🎨 Contrast Analysis (All Adjacent Objects)",
|
787 |
+
interactive=False
|
|
|
|
|
|
|
|
|
788 |
)
|
789 |
|
790 |
+
# Analysis report
|
791 |
+
gr.Markdown("## Detailed Analysis Report")
|
792 |
+
analysis_report = gr.Markdown(
|
793 |
+
value="Upload an image and click 'Analyze Environment' to begin."
|
794 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
795 |
|
796 |
+
# Additional information
|
797 |
+
with gr.Accordion("📚 Understanding the Analysis", open=False):
|
798 |
+
gr.Markdown("""
|
799 |
+
### Color Contrast Guidelines for Alzheimer's Care:
|
800 |
+
|
801 |
+
**WCAG Contrast Requirements:**
|
802 |
+
- **3:1** - Absolute minimum for large graphics
|
803 |
+
- **4.5:1** - Standard for normal visibility
|
804 |
+
- **7:1** - Enhanced for critical areas (stairs, doors)
|
805 |
+
|
806 |
+
**Additional Perceptual Requirements:**
|
807 |
+
- **Hue Difference**: >30° on color wheel
|
808 |
+
- **Saturation Difference**: >50% for clear distinction
|
809 |
+
- **Luminance Difference**: >70% between surfaces
|
810 |
+
|
811 |
+
**Priority Areas:**
|
812 |
+
1. **Critical**: Floor-stairs, floor-door, wall-stairs
|
813 |
+
2. **High**: Floor-furniture, wall-door, wall-furniture
|
814 |
+
3. **Medium**: All other adjacent objects
|
815 |
+
|
816 |
+
### Blackspot Detection:
|
817 |
+
- **ONLY** detects dark areas on floors, rugs, carpets, and mats
|
818 |
+
- **EXCLUDES** shadows on furniture, tables, ceilings, walls
|
819 |
+
- Dark floor areas can appear as "holes" to dementia patients
|
820 |
+
|
821 |
+
### Best Practices:
|
822 |
+
- Use warm colors (red, yellow, orange) for visibility
|
823 |
+
- Avoid pastels and muted tones
|
824 |
+
- Ensure all adjacent objects have distinct colors
|
825 |
+
- Add texture to supplement color contrast
|
826 |
""")
|
827 |
|
828 |
+
gr.Markdown("""
|
829 |
+
---
|
830 |
+
**NeuroNest** v2.0 - Creating safer environments for cognitive health through AI
|
831 |
+
*Strict floor-only blackspot detection & comprehensive contrast analysis*
|
|
|
|
|
|
|
|
|
|
|
|
|
832 |
""")
|
833 |
|
834 |
+
# Connect the analyze button
|
835 |
analyze_button.click(
|
836 |
fn=analyze_wrapper,
|
837 |
inputs=[
|
|
|
843 |
],
|
844 |
outputs=[
|
845 |
seg_display,
|
846 |
+
blackspot_display if blackspot_ok else seg_display, # Dummy output if not available
|
847 |
contrast_display,
|
848 |
analysis_report
|
849 |
]
|