Commit
·
0a28218
1
Parent(s):
8fa31fb
Ensure everything shaped 40x40
Browse files
app.py
CHANGED
@@ -101,7 +101,7 @@ def interpret_rgb_class(decoded_img):
|
|
101 |
ambiguous_rgb = np.array([150, 5, 61])
|
102 |
matches = np.all(np.abs(decoded_img - ambiguous_rgb) <= TOL, axis=-1)
|
103 |
match_ratio = np.count_nonzero(matches) / matches.size
|
104 |
-
return "garbage" if match_ratio > 0.
|
105 |
|
106 |
# Masking zones (Garbage and Water zone to be travelable)
|
107 |
def build_masks(seg):
|
@@ -149,7 +149,6 @@ def highlight_chunk_masks_on_frame(frame, labels, objs, color_uncollected=(0, 0,
|
|
149 |
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
150 |
color = color_collected if obj["col"] else color_uncollected
|
151 |
cv2.drawContours(overlay, contours, -1, color, thickness=cv2.FILLED)
|
152 |
-
|
153 |
# Blend overlay with original frame using alpha
|
154 |
return cv2.addWeighted(overlay, alpha, frame, 1 - alpha, 0)
|
155 |
|
@@ -465,13 +464,25 @@ def _pipeline(uid,img_path):
|
|
465 |
color = (0, 0, 128) if not o["col"] else (0, 128, 0)
|
466 |
x, y = o["pos"]
|
467 |
cv2.circle(frame, (x, y), 6, color, -1)
|
468 |
-
#
|
469 |
robot.step(path)
|
470 |
-
rx,ry=robot.pos
|
471 |
-
|
472 |
-
|
473 |
-
|
474 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
475 |
# collection check
|
476 |
for o in objs:
|
477 |
if not o["col"] and np.hypot(o["pos"][0]-rx,o["pos"][1]-ry)<=20:
|
|
|
101 |
ambiguous_rgb = np.array([150, 5, 61])
|
102 |
matches = np.all(np.abs(decoded_img - ambiguous_rgb) <= TOL, axis=-1)
|
103 |
match_ratio = np.count_nonzero(matches) / matches.size
|
104 |
+
return "garbage" if match_ratio > 0.3 else "sand"
|
105 |
|
106 |
# Masking zones (Garbage and Water zone to be travelable)
|
107 |
def build_masks(seg):
|
|
|
149 |
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
150 |
color = color_collected if obj["col"] else color_uncollected
|
151 |
cv2.drawContours(overlay, contours, -1, color, thickness=cv2.FILLED)
|
|
|
152 |
# Blend overlay with original frame using alpha
|
153 |
return cv2.addWeighted(overlay, alpha, frame, 1 - alpha, 0)
|
154 |
|
|
|
464 |
color = (0, 0, 128) if not o["col"] else (0, 128, 0)
|
465 |
x, y = o["pos"]
|
466 |
cv2.circle(frame, (x, y), 6, color, -1)
|
467 |
+
# Robot displacement
|
468 |
robot.step(path)
|
469 |
+
rx, ry = robot.pos
|
470 |
+
sp = robot.png
|
471 |
+
# Calculate actual available space (clamp to 640x640)
|
472 |
+
h, w = frame.shape[:2]
|
473 |
+
end_x = min(rx + 40, w)
|
474 |
+
end_y = min(ry + 40, h)
|
475 |
+
crop_w = end_x - rx
|
476 |
+
crop_h = end_y - ry
|
477 |
+
# Crop sprite and alpha accordingly
|
478 |
+
sprite_rgb = sp[:crop_h, :crop_w, :3]
|
479 |
+
alpha = sp[:crop_h, :crop_w, 3] / 255.0
|
480 |
+
alpha = np.stack([alpha] * 3, axis=-1)
|
481 |
+
# Crop background region
|
482 |
+
bgroi = frame[ry:end_y, rx:end_x]
|
483 |
+
blended = (alpha * sprite_rgb + (1 - alpha) * bgroi).astype(np.uint8)
|
484 |
+
# Update frame with blended result
|
485 |
+
frame[ry:end_y, rx:end_x] = blended
|
486 |
# collection check
|
487 |
for o in objs:
|
488 |
if not o["col"] and np.hypot(o["pos"][0]-rx,o["pos"][1]-ry)<=20:
|