Datasets:
File size: 6,124 Bytes
cfb0d6b 617c521 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 617c521 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 617c521 0d8ae15 cfb0d6b 0d8ae15 cfb0d6b 617c521 cfb0d6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 |
import torch
import cv2
import time
# Dataset constants
BASELINE = 0.191 # Baseline in meters
HEIGHT_ORIGINAL = 1920 # Original image height
HEIGHT_DOWNSCALED = 960 # Downscaled height for disparity
def compute_depth_from_disparity(disparity_map: torch.Tensor) -> torch.Tensor:
"""
Convert a disparity map to a depth map based on dataset-specific calibration.
The depth is computed using trigonometric projection:
depth = B * (sin(theta) / tan(disparity_rad) + cos(theta))
where:
- B is the baseline.
- theta is the vertical angle corresponding to each pixel in the y-grid.
- disparity_rad is the disparity map scaled to radians.
Parameters:
disparity_map (torch.Tensor): Input tensor of shape (bs, 1, h, w) or (bs, h, w).
Returns:
torch.Tensor: Depth map of shape (bs, h, w).
"""
# Ensure input is 3D (batch, height, width)
has_channel_dim = disparity_map.dim() == 4 and disparity_map.shape[1] == 1
if has_channel_dim:
disparity_map = disparity_map.squeeze(1)
bs, height, width = disparity_map.shape
# Compute y-grid values
y_grid = (
torch.arange(512 + 2 * height - 1, 512, step=-2, device=disparity_map.device)
.unsqueeze(0)
.unsqueeze(-1)
.expand(bs, -1, width)
)
# Compute angle and disparity in radians
theta_grid = y_grid * torch.pi / HEIGHT_ORIGINAL
disparity_map_rad = (torch.pi / HEIGHT_DOWNSCALED) * disparity_map
# Initialize depth map
depth_map = torch.zeros_like(disparity_map, dtype=torch.float32)
# Compute depth only where disparity is valid
non_zero_disparity = disparity_map != 0
depth_map[non_zero_disparity] = (
(torch.sin(theta_grid[non_zero_disparity]) / torch.tan(disparity_map_rad[non_zero_disparity]))
+ torch.cos(theta_grid[non_zero_disparity])
) * BASELINE
# Restore channel dimension if input had it
if has_channel_dim:
depth_map = depth_map.unsqueeze(1)
return depth_map
def compute_disparity_from_depth(depth_map: torch.Tensor) -> torch.Tensor:
"""
Convert a depth map to a disparity map based on dataset-specific calibration.
This function reverses the depth-to-disparity conversion, based on the relationship:
tan(disparity_rad) = sin(theta) / (depth / B - cos(theta))
The final disparity in pixel units is then:
disparity = (H / pi) * atan(tan(disparity_rad))
where:
- B is the baseline.
- theta is the vertical angle corresponding to each pixel in the y-grid.
- disparity_rad is the angular disparity in radians.
Parameters:
depth_map (torch.Tensor): Input tensor of shape (bs, 1, h, w) or (bs, h, w).
Returns:
torch.Tensor: Disparity map of shape (bs, h, w).
"""
# Ensure input is 3D (batch, height, width)
has_channel_dim: bool = depth_map.dim() == 4 and depth_map.shape[1] == 1
if has_channel_dim:
depth_map = depth_map.squeeze(1)
bs, height, width = depth_map.shape
# Compute y-grid values
y_grid = (
torch.arange(512 + 2 * height - 1, 512, step=-2, device=depth_map.device)
.unsqueeze(0)
.unsqueeze(-1)
.expand(bs, -1, width)
)
# Compute theta (polar angle)
theta_grid = y_grid * torch.pi / HEIGHT_ORIGINAL
# Initialize depth map
disparity_map = torch.zeros_like(depth_map, dtype=torch.float32)
# Compute disparity only where depth is valid
non_zero_depth = depth_map != 0
tan_disparity_rad = torch.sin(theta_grid[non_zero_depth]) / (
(depth_map[non_zero_depth] / BASELINE) - torch.cos(theta_grid[non_zero_depth])
)
disparity_map_rad = torch.atan(tan_disparity_rad)
disparity_map[non_zero_depth] = (HEIGHT_DOWNSCALED / torch.pi) * disparity_map_rad
# Restore channel dimension if input had it
if has_channel_dim:
disparity_map = disparity_map.unsqueeze(1)
return disparity_map
def disp_deg_to_disp_pix(disp_deg: float) -> float:
"""
Convert a disparity value from degrees to pixels.
The relationship is:
disp_pix = (H / 180) * disp_deg
where:
- H is the image height specific to the dataset.
- disp_deg is the disparity value in degrees.
Parameters:
disp_deg (float): Disparity in degrees.
Returns:
float: Disparity in pixels.
"""
H_down: int = 960
return (H_down / 180) * disp_deg
def disp_pix_to_disp_deg(disp_pix: float) -> float:
"""
Convert a disparity value from pixels to degrees.
The relationship is:
disp_deg = (180 / H) * disp_pix
where:
- H is the dataset-specific image height.
- disp_pix is the disparity value in pixels.
Parameters:
disp_pix (float): Disparity in pixels.
Returns:
float: Disparity in degrees.
"""
H_down: int = 960
return (180 / H_down) * disp_pix
def readDepthHelvipad(filename: str) -> tuple[torch.Tensor, torch.Tensor]:
"""
Read a depth map from a Helvipad dataset file.
The depth is stored in a 16-bit format and needs to be scaled by 1/256.
Parameters:
filename (str): Path to the depth file.
Returns:
tuple[torch.Tensor, torch.Tensor]:
- Depth map tensor (H, W).
- Validity mask tensor (H, W) indicating valid depth values.
"""
depth = load_depth_with_infinite_retry(filename) / 256.0
valid = depth > 0.0
return depth, valid
def readDisparityHelvipad(filename: str) -> tuple[torch.Tensor, torch.Tensor]:
"""
Read a disparity map from a Helvipad dataset file.
The disparity is stored in a 16-bit format and needs to be scaled by 1/2048.
Parameters:
filename (str): Path to the disparity file.
Returns:
tuple[torch.Tensor, torch.Tensor]:
- Disparity map tensor (H, W).
- Validity mask tensor (H, W) indicating valid disparity values.
"""
disp = load_depth_with_infinite_retry(filename) / 2048.0
valid = disp > 0.0
return disp, valid
|