Spaces:
Runtime error
Runtime error
add back normal map visualization
Browse files- apps/infer.py +16 -5
apps/infer.py
CHANGED
|
@@ -269,26 +269,37 @@ def generate_model(in_path, model_type):
|
|
| 269 |
os.makedirs(os.path.join(config_dict['out_dir'],
|
| 270 |
cfg.name, "obj"), exist_ok=True)
|
| 271 |
|
| 272 |
-
|
| 273 |
((in_tensor["normal_F"][0].permute(1, 2, 0) + 1.0) * 255.0 / 2.0)
|
| 274 |
.detach()
|
| 275 |
.cpu()
|
| 276 |
.numpy()
|
| 277 |
.astype(np.uint8)
|
| 278 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 279 |
|
| 280 |
-
|
|
|
|
|
|
|
| 281 |
mask_orig = unwrap(
|
| 282 |
np.repeat(
|
| 283 |
data["mask"].permute(1, 2, 0).detach().cpu().numpy(), 3, axis=2
|
| 284 |
).astype(np.uint8),
|
| 285 |
data,
|
| 286 |
)
|
| 287 |
-
|
|
|
|
| 288 |
|
| 289 |
Image.fromarray(
|
| 290 |
np.concatenate(
|
| 291 |
-
[data["ori_image"].astype(np.uint8),
|
| 292 |
).save(os.path.join(config_dict['out_dir'], cfg.name, f"png/{data['name']}_overlap.png"))
|
| 293 |
|
| 294 |
smpl_obj = trimesh.Trimesh(
|
|
@@ -448,7 +459,7 @@ def generate_model(in_path, model_type):
|
|
| 448 |
dataset.render.load_meshes(
|
| 449 |
verts_lst, faces_lst)
|
| 450 |
dataset.render.get_rendered_video(
|
| 451 |
-
[data["ori_image"],
|
| 452 |
os.path.join(config_dict['out_dir'], cfg.name,
|
| 453 |
f"vid/{data['name']}_cloth.mp4"),
|
| 454 |
)
|
|
|
|
| 269 |
os.makedirs(os.path.join(config_dict['out_dir'],
|
| 270 |
cfg.name, "obj"), exist_ok=True)
|
| 271 |
|
| 272 |
+
norm_pred_F = (
|
| 273 |
((in_tensor["normal_F"][0].permute(1, 2, 0) + 1.0) * 255.0 / 2.0)
|
| 274 |
.detach()
|
| 275 |
.cpu()
|
| 276 |
.numpy()
|
| 277 |
.astype(np.uint8)
|
| 278 |
)
|
| 279 |
+
|
| 280 |
+
norm_pred_B = (
|
| 281 |
+
((in_tensor["normal_B"][0].permute(1, 2, 0) + 1.0) * 255.0 / 2.0)
|
| 282 |
+
.detach()
|
| 283 |
+
.cpu()
|
| 284 |
+
.numpy()
|
| 285 |
+
.astype(np.uint8)
|
| 286 |
+
)
|
| 287 |
|
| 288 |
+
norm_orig_F = unwrap(norm_pred_F, data)
|
| 289 |
+
norm_orig_B = unwrap(norm_pred_B, data)
|
| 290 |
+
|
| 291 |
mask_orig = unwrap(
|
| 292 |
np.repeat(
|
| 293 |
data["mask"].permute(1, 2, 0).detach().cpu().numpy(), 3, axis=2
|
| 294 |
).astype(np.uint8),
|
| 295 |
data,
|
| 296 |
)
|
| 297 |
+
rgb_norm_F = blend_rgb_norm(data["ori_image"], norm_orig_F, mask_orig)
|
| 298 |
+
rgb_norm_B = blend_rgb_norm(data["ori_image"], norm_orig_B, mask_orig)
|
| 299 |
|
| 300 |
Image.fromarray(
|
| 301 |
np.concatenate(
|
| 302 |
+
[data["ori_image"].astype(np.uint8), rgb_norm_F, rgb_norm_B], axis=1)
|
| 303 |
).save(os.path.join(config_dict['out_dir'], cfg.name, f"png/{data['name']}_overlap.png"))
|
| 304 |
|
| 305 |
smpl_obj = trimesh.Trimesh(
|
|
|
|
| 459 |
dataset.render.load_meshes(
|
| 460 |
verts_lst, faces_lst)
|
| 461 |
dataset.render.get_rendered_video(
|
| 462 |
+
[data["ori_image"], rgb_norm_F, rgb_norm_B],
|
| 463 |
os.path.join(config_dict['out_dir'], cfg.name,
|
| 464 |
f"vid/{data['name']}_cloth.mp4"),
|
| 465 |
)
|