lolout1 commited on
Commit
3311f6c
·
1 Parent(s): 2d53641

finishing up defect logic

Browse files
gradio_test.py CHANGED
@@ -67,11 +67,15 @@ ONEFORMER_CONFIG = {
67
  }
68
  }
69
 
 
 
 
 
70
  ########################################
71
  # IMPORT UNIVERSAL CONTRAST ANALYZER
72
  ########################################
73
 
74
- from universal_contrast_analyzer import UniversalContrastAnalyzer
75
 
76
  ########################################
77
  # ONEFORMER INTEGRATION
@@ -168,15 +172,44 @@ class OneFormerManager:
168
  class ImprovedBlackspotDetector:
169
  """Enhanced blackspot detector that only detects on floor surfaces"""
170
 
171
- def __init__(self, model_path: str):
172
  self.model_path = model_path
173
  self.predictor = None
174
  # Expanded floor-related classes in ADE20K
175
  self.floor_classes = [3, 4, 13, 28, 78] # floor, wood floor, rug, carpet, mat
176
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  def initialize(self, threshold: float = 0.5) -> bool:
178
  """Initialize MaskRCNN model"""
179
  try:
 
 
 
 
 
 
 
 
180
  cfg = get_cfg()
181
  cfg.merge_from_file(
182
  model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")
@@ -363,18 +396,19 @@ class NeuroNestApp:
363
  self.contrast_analyzer = UniversalContrastAnalyzer(wcag_threshold=4.5)
364
  self.initialized = False
365
 
366
- def initialize(self, blackspot_model_path: str = "./output_floor_blackspot/model_0004999.pth"):
367
  """Initialize all components"""
368
  logger.info("Initializing NeuroNest application...")
369
 
370
  oneformer_success = self.oneformer.initialize()
371
 
 
372
  blackspot_success = False
373
- if os.path.exists(blackspot_model_path):
374
- self.blackspot_detector = ImprovedBlackspotDetector(blackspot_model_path)
375
  blackspot_success = self.blackspot_detector.initialize()
376
- else:
377
- logger.warning(f"Blackspot model not found at {blackspot_model_path}")
378
 
379
  self.initialized = oneformer_success
380
  return oneformer_success, blackspot_success
@@ -595,6 +629,25 @@ def create_gradio_interface():
595
  if results['contrast'] and results['statistics']['contrast']['low_contrast_pairs'] > 0:
596
  has_issues = True
597
  report.append("\n### Contrast Improvements:")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
598
  report.append("- Paint furniture in colors that contrast with floors/walls")
599
  report.append("- Add colored tape or markers to furniture edges")
600
  report.append("- Install LED strip lighting under furniture edges")
@@ -622,6 +675,13 @@ def create_gradio_interface():
622
  gr.Markdown(f"# {title}")
623
  gr.Markdown(description)
624
 
 
 
 
 
 
 
 
625
  # Top row: toggles and sliders
626
  with gr.Row():
627
  enable_blackspot = gr.Checkbox(
@@ -730,4 +790,3 @@ if __name__ == "__main__":
730
  except Exception as e:
731
  logger.error(f"Failed to launch application: {e}")
732
  raise
733
-
 
67
  }
68
  }
69
 
70
+ # Blackspot model configuration for HF Spaces
71
+ BLACKSPOT_MODEL_REPO = "sww35/neuronest-blackspot" # Update with your HF repo
72
+ BLACKSPOT_MODEL_FILE = "model_0004999.pth"
73
+
74
  ########################################
75
  # IMPORT UNIVERSAL CONTRAST ANALYZER
76
  ########################################
77
 
78
+ from utils.universal_contrast_analyzer import UniversalContrastAnalyzer
79
 
80
  ########################################
81
  # ONEFORMER INTEGRATION
 
172
  class ImprovedBlackspotDetector:
173
  """Enhanced blackspot detector that only detects on floor surfaces"""
174
 
175
+ def __init__(self, model_path: str = None):
176
  self.model_path = model_path
177
  self.predictor = None
178
  # Expanded floor-related classes in ADE20K
179
  self.floor_classes = [3, 4, 13, 28, 78] # floor, wood floor, rug, carpet, mat
180
 
181
+ def download_model(self) -> str:
182
+ """Download blackspot model from HuggingFace"""
183
+ try:
184
+ # Try to download from HF repo
185
+ model_path = hf_hub_download(
186
+ repo_id=BLACKSPOT_MODEL_REPO,
187
+ filename=BLACKSPOT_MODEL_FILE
188
+ )
189
+ logger.info(f"Downloaded blackspot model to: {model_path}")
190
+ return model_path
191
+ except Exception as e:
192
+ logger.warning(f"Could not download blackspot model from HF: {e}")
193
+
194
+ # Fallback to local path
195
+ local_path = f"./output_floor_blackspot/{BLACKSPOT_MODEL_FILE}"
196
+ if os.path.exists(local_path):
197
+ logger.info(f"Using local blackspot model: {local_path}")
198
+ return local_path
199
+
200
+ return None
201
+
202
  def initialize(self, threshold: float = 0.5) -> bool:
203
  """Initialize MaskRCNN model"""
204
  try:
205
+ # Get model path
206
+ if self.model_path is None:
207
+ self.model_path = self.download_model()
208
+
209
+ if self.model_path is None:
210
+ logger.error("No blackspot model available")
211
+ return False
212
+
213
  cfg = get_cfg()
214
  cfg.merge_from_file(
215
  model_zoo.get_config_file("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml")
 
396
  self.contrast_analyzer = UniversalContrastAnalyzer(wcag_threshold=4.5)
397
  self.initialized = False
398
 
399
+ def initialize(self):
400
  """Initialize all components"""
401
  logger.info("Initializing NeuroNest application...")
402
 
403
  oneformer_success = self.oneformer.initialize()
404
 
405
+ # Initialize blackspot detector with HF model
406
  blackspot_success = False
407
+ try:
408
+ self.blackspot_detector = ImprovedBlackspotDetector()
409
  blackspot_success = self.blackspot_detector.initialize()
410
+ except Exception as e:
411
+ logger.warning(f"Could not initialize blackspot detector: {e}")
412
 
413
  self.initialized = oneformer_success
414
  return oneformer_success, blackspot_success
 
629
  if results['contrast'] and results['statistics']['contrast']['low_contrast_pairs'] > 0:
630
  has_issues = True
631
  report.append("\n### Contrast Improvements:")
632
+
633
+ # Get specific recommendations based on issue types
634
+ contrast_issues = results['contrast']['issues']
635
+ critical_issues = [i for i in contrast_issues if i['severity'] == 'critical']
636
+ high_issues = [i for i in contrast_issues if i['severity'] == 'high']
637
+
638
+ if critical_issues:
639
+ report.append("\n**CRITICAL - Immediate attention required:**")
640
+ for issue in critical_issues[:3]:
641
+ cat1, cat2 = issue['categories']
642
+ report.append(f"- {cat1.title()} ↔ {cat2.title()}: Increase contrast to 7:1 minimum")
643
+
644
+ if high_issues:
645
+ report.append("\n**HIGH PRIORITY:**")
646
+ for issue in high_issues[:3]:
647
+ cat1, cat2 = issue['categories']
648
+ report.append(f"- {cat1.title()} ↔ {cat2.title()}: Increase contrast to 4.5:1 minimum")
649
+
650
+ report.append("\n**General recommendations:**")
651
  report.append("- Paint furniture in colors that contrast with floors/walls")
652
  report.append("- Add colored tape or markers to furniture edges")
653
  report.append("- Install LED strip lighting under furniture edges")
 
675
  gr.Markdown(f"# {title}")
676
  gr.Markdown(description)
677
 
678
+ # Information about model availability
679
+ if not blackspot_ok:
680
+ gr.Markdown("""
681
+ ⚠️ **Note:** Blackspot detection model not available.
682
+ To enable blackspot detection, upload the model to HuggingFace or ensure it's in the local directory.
683
+ """)
684
+
685
  # Top row: toggles and sliders
686
  with gr.Row():
687
  enable_blackspot = gr.Checkbox(
 
790
  except Exception as e:
791
  logger.error(f"Failed to launch application: {e}")
792
  raise
 
output_floor_blackspot/model_0004999.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:caa8e868334fbd8d14d8f576704be3b7ad8b1ef1473a68380ad0addd7ad56405
3
+ size 351060772
universal_contrast_analyzer.py CHANGED
@@ -83,17 +83,18 @@ class UniversalContrastAnalyzer:
83
  """Calculate WCAG 2.0 contrast ratio between two colors"""
84
  def relative_luminance(rgb):
85
  # Normalize to 0-1
86
- rgb_norm = rgb / 255.0
87
 
88
  # Apply gamma correction (linearize)
89
  rgb_linear = np.where(
90
  rgb_norm <= 0.03928,
91
  rgb_norm / 12.92,
92
- ((rgb_norm + 0.055) / 1.055) ** 2.4
93
  )
94
 
95
  # Calculate luminance using ITU-R BT.709 coefficients
96
- return np.dot(rgb_linear, [0.2126, 0.7152, 0.0722])
 
97
 
98
  lum1 = relative_luminance(color1)
99
  lum2 = relative_luminance(color2)
@@ -102,27 +103,37 @@ class UniversalContrastAnalyzer:
102
  lighter = max(lum1, lum2)
103
  darker = min(lum1, lum2)
104
 
105
- return (lighter + 0.05) / (darker + 0.05)
 
 
 
106
 
107
  def calculate_hue_difference(self, color1: np.ndarray, color2: np.ndarray) -> float:
108
  """Calculate hue difference in degrees (0-180)"""
109
- # Convert RGB to HSV
110
- hsv1 = cv2.cvtColor(color1.reshape(1, 1, 3).astype(np.uint8), cv2.COLOR_RGB2HSV)[0, 0]
111
- hsv2 = cv2.cvtColor(color2.reshape(1, 1, 3).astype(np.uint8), cv2.COLOR_RGB2HSV)[0, 0]
 
 
 
112
 
113
- # Calculate circular hue difference (0-180 range in OpenCV)
114
- hue_diff = abs(hsv1[0] - hsv2[0])
115
  if hue_diff > 90:
116
  hue_diff = 180 - hue_diff
117
 
118
  return hue_diff
119
 
120
  def calculate_saturation_difference(self, color1: np.ndarray, color2: np.ndarray) -> float:
121
- """Calculate saturation difference (0-255)"""
122
- hsv1 = cv2.cvtColor(color1.reshape(1, 1, 3).astype(np.uint8), cv2.COLOR_RGB2HSV)[0, 0]
123
- hsv2 = cv2.cvtColor(color2.reshape(1, 1, 3).astype(np.uint8), cv2.COLOR_RGB2HSV)[0, 0]
 
 
 
124
 
125
- return abs(int(hsv1[1]) - int(hsv2[1]))
 
126
 
127
  def extract_dominant_color(self, image: np.ndarray, mask: np.ndarray,
128
  sample_size: int = 1000) -> np.ndarray:
@@ -239,13 +250,15 @@ class UniversalContrastAnalyzer:
239
  # Critical: require 7:1 contrast ratio
240
  if wcag_ratio < 7.0:
241
  return False, 'critical'
242
- if hue_diff < 30 and sat_diff < 50:
 
243
  return False, 'critical'
244
 
245
  elif relationship in high_priority_pairs:
246
  # High priority: require 4.5:1 contrast ratio
247
  if wcag_ratio < 4.5:
248
  return False, 'high'
 
249
  if wcag_ratio < 7.0 and hue_diff < 20 and sat_diff < 40:
250
  return False, 'high'
251
 
@@ -253,6 +266,7 @@ class UniversalContrastAnalyzer:
253
  # Standard: require 3:1 contrast ratio minimum
254
  if wcag_ratio < 3.0:
255
  return False, 'medium'
 
256
  if wcag_ratio < 4.5 and hue_diff < 15 and sat_diff < 30:
257
  return False, 'medium'
258
 
@@ -440,12 +454,21 @@ class UniversalContrastAnalyzer:
440
  severity = issue['severity'].upper()
441
 
442
  report.append(f"{i}. [{severity}] {cat1} ↔ {cat2}")
443
- report.append(f" - WCAG Contrast Ratio: {wcag:.2f}:1 (minimum: 4.5:1)")
 
 
 
 
 
 
 
 
 
444
  report.append(f" - Hue Difference: {hue_diff:.1f}° (recommended: >30°)")
445
- report.append(f" - Saturation Difference: {sat_diff} (recommended: >50)")
446
 
447
  if issue['is_floor_object']:
448
- report.append(" - ⚠️ Object on floor - requires high visibility!")
449
 
450
  report.append(f" - Boundary size: {issue['boundary_pixels']} pixels")
451
  report.append("")
 
83
  """Calculate WCAG 2.0 contrast ratio between two colors"""
84
  def relative_luminance(rgb):
85
  # Normalize to 0-1
86
+ rgb_norm = np.array(rgb) / 255.0
87
 
88
  # Apply gamma correction (linearize)
89
  rgb_linear = np.where(
90
  rgb_norm <= 0.03928,
91
  rgb_norm / 12.92,
92
+ np.power((rgb_norm + 0.055) / 1.055, 2.4)
93
  )
94
 
95
  # Calculate luminance using ITU-R BT.709 coefficients
96
+ # L = 0.2126 * R + 0.7152 * G + 0.0722 * B
97
+ return 0.2126 * rgb_linear[0] + 0.7152 * rgb_linear[1] + 0.0722 * rgb_linear[2]
98
 
99
  lum1 = relative_luminance(color1)
100
  lum2 = relative_luminance(color2)
 
103
  lighter = max(lum1, lum2)
104
  darker = min(lum1, lum2)
105
 
106
+ # Calculate contrast ratio
107
+ contrast_ratio = (lighter + 0.05) / (darker + 0.05)
108
+
109
+ return contrast_ratio
110
 
111
  def calculate_hue_difference(self, color1: np.ndarray, color2: np.ndarray) -> float:
112
  """Calculate hue difference in degrees (0-180)"""
113
+ # Convert RGB to HSV using colorsys for accuracy
114
+ rgb1 = color1 / 255.0
115
+ rgb2 = color2 / 255.0
116
+
117
+ hsv1 = colorsys.rgb_to_hsv(rgb1[0], rgb1[1], rgb1[2])
118
+ hsv2 = colorsys.rgb_to_hsv(rgb2[0], rgb2[1], rgb2[2])
119
 
120
+ # Calculate circular hue difference (0-1 range converted to 0-180)
121
+ hue_diff = abs(hsv1[0] - hsv2[0]) * 180
122
  if hue_diff > 90:
123
  hue_diff = 180 - hue_diff
124
 
125
  return hue_diff
126
 
127
  def calculate_saturation_difference(self, color1: np.ndarray, color2: np.ndarray) -> float:
128
+ """Calculate saturation difference (0-100)"""
129
+ rgb1 = color1 / 255.0
130
+ rgb2 = color2 / 255.0
131
+
132
+ hsv1 = colorsys.rgb_to_hsv(rgb1[0], rgb1[1], rgb1[2])
133
+ hsv2 = colorsys.rgb_to_hsv(rgb2[0], rgb2[1], rgb2[2])
134
 
135
+ # Return saturation difference as percentage
136
+ return abs(hsv1[1] - hsv2[1]) * 100
137
 
138
  def extract_dominant_color(self, image: np.ndarray, mask: np.ndarray,
139
  sample_size: int = 1000) -> np.ndarray:
 
250
  # Critical: require 7:1 contrast ratio
251
  if wcag_ratio < 7.0:
252
  return False, 'critical'
253
+ # Also check perceptual differences
254
+ if wcag_ratio < 10.0 and hue_diff < 30 and sat_diff < 50:
255
  return False, 'critical'
256
 
257
  elif relationship in high_priority_pairs:
258
  # High priority: require 4.5:1 contrast ratio
259
  if wcag_ratio < 4.5:
260
  return False, 'high'
261
+ # Also check perceptual differences
262
  if wcag_ratio < 7.0 and hue_diff < 20 and sat_diff < 40:
263
  return False, 'high'
264
 
 
266
  # Standard: require 3:1 contrast ratio minimum
267
  if wcag_ratio < 3.0:
268
  return False, 'medium'
269
+ # Also check perceptual differences
270
  if wcag_ratio < 4.5 and hue_diff < 15 and sat_diff < 30:
271
  return False, 'medium'
272
 
 
454
  severity = issue['severity'].upper()
455
 
456
  report.append(f"{i}. [{severity}] {cat1} ↔ {cat2}")
457
+ report.append(f" - WCAG Contrast Ratio: {wcag:.2f}:1")
458
+
459
+ # Add recommended values based on severity
460
+ if severity == 'CRITICAL':
461
+ report.append(f" - Required: 7:1 minimum")
462
+ elif severity == 'HIGH':
463
+ report.append(f" - Required: 4.5:1 minimum")
464
+ else:
465
+ report.append(f" - Required: 3:1 minimum")
466
+
467
  report.append(f" - Hue Difference: {hue_diff:.1f}° (recommended: >30°)")
468
+ report.append(f" - Saturation Difference: {sat_diff:.1f}% (recommended: >50%)")
469
 
470
  if issue['is_floor_object']:
471
+ report.append(" - ⚠️ Floor-object boundary - high visibility required!")
472
 
473
  report.append(f" - Boundary size: {issue['boundary_pixels']} pixels")
474
  report.append("")