TensorBoard
quantaji commited on
Commit
c507eb4
·
verified ·
1 Parent(s): 93cc7ff

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. scannet/.DS_Store +0 -0
  3. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/.DS_Store +0 -0
  4. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/.DS_Store +0 -0
  5. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/__init__.py +0 -0
  6. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/__init__.py +25 -0
  7. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/alc.py +161 -0
  8. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/arkitscenes.py +114 -0
  9. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/builder.py +15 -0
  10. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/dataloader.py +112 -0
  11. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/defaults.py +297 -0
  12. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/modelnet.py +150 -0
  13. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/nuscenes.py +120 -0
  14. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/alc/preprocess_arkitscenes_labelmaker_consensus.py +375 -0
  15. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/arkitscenes/preprocess_arkitscenes_mesh.py +87 -0
  16. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/nuscenes/preprocess_nuscenes_info.py +607 -0
  17. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/s3dis/preprocess_s3dis.py +233 -0
  18. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/sampling_chunking_data.py +149 -0
  19. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/classes_ObjClassification-ShapeNetCore55.txt +17 -0
  20. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/classes_SemVoxLabel-nyu40id.txt +20 -0
  21. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannet200_constants.py +704 -0
  22. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannet200_splits.py +625 -0
  23. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannet_means.npz +3 -0
  24. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv1_test.txt +312 -0
  25. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv1_train.txt +1045 -0
  26. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv1_val.txt +156 -0
  27. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2-labels-old.combined.tsv +608 -0
  28. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2-labels.combined.tsv +608 -0
  29. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_test.txt +100 -0
  30. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_train.txt +1201 -0
  31. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_val.txt +312 -0
  32. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/preprocess_scannet.py +255 -0
  33. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/SensorData.py +183 -0
  34. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/compute_full_overlapping.py +91 -0
  35. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/generage_list.py +33 -0
  36. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/plyfile.py +894 -0
  37. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/point_cloud_extractor.py +98 -0
  38. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/preprocess.py +51 -0
  39. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/reader.py +33 -0
  40. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannetpp/preprocess_scannetpp.py +252 -0
  41. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/structured3d/preprocess_structured3d.py +420 -0
  42. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/waymo/3d_semseg_test_set_frames.txt +0 -0
  43. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/waymo/preprocess_waymo.py +387 -0
  44. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/s3dis.py +18 -0
  45. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/scannet.py +290 -0
  46. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/scannet_pair.py +89 -0
  47. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/scannetpp.py +78 -0
  48. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/semantic_kitti.py +139 -0
  49. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/shapenet_part.py +160 -0
  50. scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/structure3d.py +61 -0
.gitattributes CHANGED
@@ -36,3 +36,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
36
  scannet200/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/train.log filter=lfs diff=lfs merge=lfs -text
37
  scannet200/semseg-pt-v3m1-1-ppt-extreme-with-alc/train.log filter=lfs diff=lfs merge=lfs -text
38
  scannet200/semseg-pt-v3m1-1-ppt-extreme-with-alc-submit/train.log filter=lfs diff=lfs merge=lfs -text
 
 
 
36
  scannet200/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/train.log filter=lfs diff=lfs merge=lfs -text
37
  scannet200/semseg-pt-v3m1-1-ppt-extreme-with-alc/train.log filter=lfs diff=lfs merge=lfs -text
38
  scannet200/semseg-pt-v3m1-1-ppt-extreme-with-alc-submit/train.log filter=lfs diff=lfs merge=lfs -text
39
+ scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/train.log filter=lfs diff=lfs merge=lfs -text
40
+ scannet/semseg-pt-v3m1-1-ppt-extreme-with-alc/train.log filter=lfs diff=lfs merge=lfs -text
scannet/.DS_Store ADDED
Binary file (6.15 kB). View file
 
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/.DS_Store ADDED
Binary file (6.15 kB). View file
 
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/.DS_Store ADDED
Binary file (6.15 kB). View file
 
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/__init__.py ADDED
File without changes
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/__init__.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .defaults import DefaultDataset, ConcatDataset
2
+ from .builder import build_dataset
3
+ from .utils import point_collate_fn, collate_fn
4
+
5
+ # indoor scene
6
+ from .s3dis import S3DISDataset
7
+ from .scannet import ScanNetDataset, ScanNet200Dataset
8
+ from .scannetpp import ScanNetPPDataset
9
+ from .scannet_pair import ScanNetPairDataset
10
+ from .arkitscenes import ArkitScenesDataset
11
+ from .structure3d import Structured3DDataset
12
+ from .alc import ARKitScenesLabelMakerConsensusDataset, ARKitScenesLabelMakerScanNet200Dataset
13
+ from .scannetpp import ScanNetPPDataset
14
+
15
+ # outdoor scene
16
+ from .semantic_kitti import SemanticKITTIDataset
17
+ from .nuscenes import NuScenesDataset
18
+ from .waymo import WaymoDataset
19
+
20
+ # object
21
+ from .modelnet import ModelNetDataset
22
+ from .shapenet_part import ShapeNetPartDataset
23
+
24
+ # dataloader
25
+ from .dataloader import MultiDatasetDataloader
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/alc.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import glob
2
+ import os
3
+ from collections.abc import Sequence
4
+ from copy import deepcopy
5
+
6
+ import numpy as np
7
+ import torch
8
+ from labelmaker.label_data import get_wordnet
9
+ from torch.utils.data import Dataset
10
+
11
+ from pointcept.utils.cache import shared_dict
12
+ from pointcept.utils.logger import get_root_logger
13
+
14
+ from .builder import DATASETS
15
+ from .preprocessing.alc.preprocess_arkitscenes_labelmaker_consensus import get_wordnet_compact_mapping
16
+ from .preprocessing.scannet.meta_data.scannet200_constants import VALID_CLASS_IDS_20, VALID_CLASS_IDS_200
17
+ from .transform import TRANSFORMS, Compose
18
+
19
+
20
+ @DATASETS.register_module()
21
+ class ARKitScenesLabelMakerConsensusDataset(Dataset):
22
+
23
+ label_key = "semantic_pseudo_gt_wn199"
24
+
25
+ def __init__(
26
+ self,
27
+ split="train",
28
+ data_root="data/alc",
29
+ transform=None,
30
+ ignore_index=-1,
31
+ test_mode=False,
32
+ test_cfg=None,
33
+ cache=False,
34
+ loop=1,
35
+ ):
36
+ super(ARKitScenesLabelMakerConsensusDataset, self).__init__()
37
+ self.get_class_to_id()
38
+
39
+ self.data_root = data_root
40
+ self.split = split
41
+ self.transform = Compose(transform)
42
+ self.cache = cache
43
+ self.loop = loop if not test_mode else 1 # force make loop = 1 while in test mode
44
+ self.test_mode = test_mode
45
+ self.test_cfg = test_cfg if test_mode else None
46
+
47
+ if test_mode:
48
+ self.test_voxelize = TRANSFORMS.build(self.test_cfg.voxelize)
49
+ self.test_crop = TRANSFORMS.build(self.test_cfg.crop) if self.test_cfg.crop else None
50
+ self.post_transform = Compose(self.test_cfg.post_transform)
51
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
52
+
53
+ self.data_list = self.get_data_list()
54
+
55
+ self.ignore_index = ignore_index
56
+
57
+ logger = get_root_logger()
58
+ logger.info(
59
+ "Totally {} x {} samples in {} set.".format(
60
+ len(self.data_list),
61
+ self.loop,
62
+ split,
63
+ )
64
+ )
65
+
66
+ def get_class_to_id(self):
67
+ self.class2id = get_wordnet_compact_mapping()[0]
68
+
69
+ def get_data_list(self):
70
+ if isinstance(self.split, str):
71
+ data_list = glob.glob(os.path.join(self.data_root, self.split, "*.pth"))
72
+ elif isinstance(self.split, Sequence):
73
+ data_list = []
74
+ for split in self.split:
75
+ data_list += glob.glob(os.path.join(self.data_root, split, "*.pth"))
76
+ else:
77
+ raise NotImplementedError
78
+ return data_list
79
+
80
+ def get_data(self, idx):
81
+ data_path = self.data_list[idx % len(self.data_list)]
82
+
83
+ if not self.cache:
84
+ data = torch.load(data_path)
85
+ else:
86
+ data_name = data_path.replace(os.path.dirname(self.data_root), "").split(".")[0]
87
+ cache_name = "pointcept" + data_name.replace(os.path.sep, "-")
88
+ data = shared_dict(cache_name)
89
+
90
+ coord = data["coord"]
91
+ color = data["color"]
92
+ normal = data["normal"]
93
+ scene_id = data["scene_id"]
94
+ if self.label_key in data.keys():
95
+ segment = data[self.label_key].reshape(-1)
96
+ else:
97
+ segment = np.ones(coord.shape[0]) * -1
98
+ instance = np.ones(coord.shape[0]) * -1
99
+
100
+ data_dict = dict(
101
+ coord=coord,
102
+ color=color,
103
+ segment=segment,
104
+ instance=instance,
105
+ scene_id=scene_id,
106
+ )
107
+
108
+ if normal is not None:
109
+ data_dict["normal"] = normal
110
+
111
+ return data_dict
112
+
113
+ def get_data_name(self, idx):
114
+ return os.path.basename(self.data_list[idx % len(self.data_list)]).split(".")[0]
115
+
116
+ def prepare_train_data(self, idx):
117
+ # load data
118
+ data_dict = self.get_data(idx)
119
+ data_dict = self.transform(data_dict)
120
+ return data_dict
121
+
122
+ def prepare_test_data(self, idx):
123
+ # load data
124
+ data_dict = self.get_data(idx)
125
+ segment = data_dict.pop("segment")
126
+ data_dict = self.transform(data_dict)
127
+ data_dict_list = []
128
+ for aug in self.aug_transform:
129
+ data_dict_list.append(aug(deepcopy(data_dict)))
130
+
131
+ input_dict_list = []
132
+ for data in data_dict_list:
133
+ data_part_list = self.test_voxelize(data)
134
+ for data_part in data_part_list:
135
+ if self.test_crop:
136
+ data_part = self.test_crop(data_part)
137
+ else:
138
+ data_part = [data_part]
139
+ input_dict_list += data_part
140
+
141
+ for i in range(len(input_dict_list)):
142
+ input_dict_list[i] = self.post_transform(input_dict_list[i])
143
+ data_dict = dict(fragment_list=input_dict_list, segment=segment, name=self.get_data_name(idx))
144
+ return data_dict
145
+
146
+ def __getitem__(self, idx):
147
+ if self.test_mode:
148
+ return self.prepare_test_data(idx)
149
+ else:
150
+ return self.prepare_train_data(idx)
151
+
152
+ def __len__(self):
153
+ return len(self.data_list) * self.loop
154
+
155
+
156
+ @DATASETS.register_module()
157
+ class ARKitScenesLabelMakerScanNet200Dataset(ARKitScenesLabelMakerConsensusDataset):
158
+ label_key = "semantic_pseudo_gt_scannet200"
159
+
160
+ def get_class_to_id(self):
161
+ self.class2id = np.array(VALID_CLASS_IDS_200)
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/arkitscenes.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ArkitScenes Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import glob
10
+ import numpy as np
11
+ import torch
12
+ from copy import deepcopy
13
+ from torch.utils.data import Dataset
14
+
15
+ from pointcept.utils.logger import get_root_logger
16
+ from .builder import DATASETS
17
+ from .transform import Compose, TRANSFORMS
18
+ from .preprocessing.scannet.meta_data.scannet200_constants import VALID_CLASS_IDS_200
19
+
20
+
21
+ @DATASETS.register_module()
22
+ class ArkitScenesDataset(Dataset):
23
+ def __init__(
24
+ self,
25
+ split="Training",
26
+ data_root="data/ARKitScenesMesh",
27
+ transform=None,
28
+ test_mode=False,
29
+ test_cfg=None,
30
+ loop=1,
31
+ ):
32
+ super(ArkitScenesDataset, self).__init__()
33
+ self.data_root = data_root
34
+ self.split = split
35
+ self.transform = Compose(transform)
36
+ self.loop = (
37
+ loop if not test_mode else 1
38
+ ) # force make loop = 1 while in test mode
39
+ self.test_mode = test_mode
40
+ self.test_cfg = test_cfg if test_mode else None
41
+ self.class2id = np.array(VALID_CLASS_IDS_200)
42
+
43
+ if test_mode:
44
+ self.test_voxelize = TRANSFORMS.build(self.test_cfg.voxelize)
45
+ self.test_crop = TRANSFORMS.build(self.test_cfg.crop)
46
+ self.post_transform = Compose(self.test_cfg.post_transform)
47
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
48
+
49
+ self.data_list = self.get_data_list()
50
+ logger = get_root_logger()
51
+ logger.info(
52
+ "Totally {} x {} samples in {} set.".format(
53
+ len(self.data_list), self.loop, split
54
+ )
55
+ )
56
+
57
+ def get_data_list(self):
58
+ if isinstance(self.split, str):
59
+ data_list = glob.glob(os.path.join(self.data_root, self.split, "*.pth"))
60
+ elif isinstance(self.split, list):
61
+ data_list = []
62
+ for split in self.split:
63
+ data_list += glob.glob(os.path.join(self.data_root, split, "*.pth"))
64
+ else:
65
+ raise NotImplementedError
66
+ return data_list
67
+
68
+ def get_data(self, idx):
69
+ data = torch.load(self.data_list[idx % len(self.data_list)])
70
+ coord = data["coord"]
71
+ color = data["color"]
72
+ normal = data["normal"]
73
+ segment = np.zeros(coord.shape[0])
74
+ data_dict = dict(coord=coord, normal=normal, color=color, segment=segment)
75
+ return data_dict
76
+
77
+ def get_data_name(self, idx):
78
+ data_idx = self.data_idx[idx % len(self.data_idx)]
79
+ return os.path.basename(self.data_list[data_idx]).split(".")[0]
80
+
81
+ def prepare_train_data(self, idx):
82
+ # load data
83
+ data_dict = self.get_data(idx)
84
+ data_dict = self.transform(data_dict)
85
+ return data_dict
86
+
87
+ def prepare_test_data(self, idx):
88
+ # load data
89
+ data_dict = self.get_data(idx)
90
+ segment = data_dict.pop("segment")
91
+ data_dict = self.transform(data_dict)
92
+ data_dict_list = []
93
+ for aug in self.aug_transform:
94
+ data_dict_list.append(aug(deepcopy(data_dict)))
95
+
96
+ input_dict_list = []
97
+ for data in data_dict_list:
98
+ data_part_list = self.test_voxelize(data)
99
+ for data_part in data_part_list:
100
+ data_part_list = self.test_crop(data_part)
101
+ input_dict_list += data_part_list
102
+
103
+ for i in range(len(input_dict_list)):
104
+ input_dict_list[i] = self.post_transform(input_dict_list[i])
105
+ return input_dict_list, segment
106
+
107
+ def __getitem__(self, idx):
108
+ if self.test_mode:
109
+ return self.prepare_test_data(idx)
110
+ else:
111
+ return self.prepare_train_data(idx)
112
+
113
+ def __len__(self):
114
+ return len(self.data_list) * self.loop
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/builder.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Dataset Builder
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ from pointcept.utils.registry import Registry
9
+
10
+ DATASETS = Registry("datasets")
11
+
12
+
13
+ def build_dataset(cfg):
14
+ """Build datasets."""
15
+ return DATASETS.build(cfg)
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/dataloader.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import partial
2
+ import weakref
3
+ import torch
4
+ import torch.utils.data
5
+
6
+ import pointcept.utils.comm as comm
7
+ from pointcept.datasets.utils import point_collate_fn
8
+ from pointcept.datasets import ConcatDataset
9
+ from pointcept.utils.env import set_seed
10
+
11
+
12
+ class MultiDatasetDummySampler:
13
+ def __init__(self):
14
+ self.dataloader = None
15
+
16
+ def set_epoch(self, epoch):
17
+ if comm.get_world_size() > 1:
18
+ for dataloader in self.dataloader.dataloaders:
19
+ dataloader.sampler.set_epoch(epoch)
20
+ return
21
+
22
+
23
+ class MultiDatasetDataloader:
24
+ """
25
+ Multiple Datasets Dataloader, batch data from a same dataset and mix up ratio determined by loop of each sub dataset.
26
+ The overall length is determined by the main dataset (first) and loop of concat dataset.
27
+ """
28
+
29
+ def __init__(
30
+ self,
31
+ concat_dataset: ConcatDataset,
32
+ batch_size_per_gpu: int,
33
+ num_worker_per_gpu: int,
34
+ mix_prob=0,
35
+ seed=None,
36
+ ):
37
+ self.datasets = concat_dataset.datasets
38
+ self.ratios = [dataset.loop for dataset in self.datasets]
39
+ # reset data loop, original loop serve as ratios
40
+ for dataset in self.datasets:
41
+ dataset.loop = 1
42
+ # determine union training epoch by main dataset
43
+ self.datasets[0].loop = concat_dataset.loop
44
+ # build sub-dataloaders
45
+ num_workers = num_worker_per_gpu // len(self.datasets)
46
+ self.dataloaders = []
47
+ for dataset_id, dataset in enumerate(self.datasets):
48
+ if comm.get_world_size() > 1:
49
+ sampler = torch.utils.data.distributed.DistributedSampler(dataset)
50
+ else:
51
+ sampler = None
52
+
53
+ init_fn = (
54
+ partial(
55
+ self._worker_init_fn,
56
+ dataset_id=dataset_id,
57
+ num_workers=num_workers,
58
+ num_datasets=len(self.datasets),
59
+ rank=comm.get_rank(),
60
+ seed=seed,
61
+ )
62
+ if seed is not None
63
+ else None
64
+ )
65
+ self.dataloaders.append(
66
+ torch.utils.data.DataLoader(
67
+ dataset,
68
+ batch_size=batch_size_per_gpu,
69
+ shuffle=(sampler is None),
70
+ num_workers=num_worker_per_gpu,
71
+ sampler=sampler,
72
+ collate_fn=partial(point_collate_fn, mix_prob=mix_prob),
73
+ pin_memory=True,
74
+ worker_init_fn=init_fn,
75
+ drop_last=True,
76
+ persistent_workers=True,
77
+ )
78
+ )
79
+ self.sampler = MultiDatasetDummySampler()
80
+ self.sampler.dataloader = weakref.proxy(self)
81
+
82
+ def __iter__(self):
83
+ iterator = [iter(dataloader) for dataloader in self.dataloaders]
84
+ while True:
85
+ for i in range(len(self.ratios)):
86
+ for _ in range(self.ratios[i]):
87
+ try:
88
+ batch = next(iterator[i])
89
+ except StopIteration:
90
+ if i == 0:
91
+ return
92
+ else:
93
+ iterator[i] = iter(self.dataloaders[i])
94
+ batch = next(iterator[i])
95
+ yield batch
96
+
97
+ def __len__(self):
98
+ main_data_loader_length = len(self.dataloaders[0])
99
+ return (
100
+ main_data_loader_length // self.ratios[0] * sum(self.ratios)
101
+ + main_data_loader_length % self.ratios[0]
102
+ )
103
+
104
+ @staticmethod
105
+ def _worker_init_fn(worker_id, num_workers, dataset_id, num_datasets, rank, seed):
106
+ worker_seed = (
107
+ num_workers * num_datasets * rank
108
+ + num_workers * dataset_id
109
+ + worker_id
110
+ + seed
111
+ )
112
+ set_seed(worker_seed)
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/defaults.py ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Default Datasets
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import glob
10
+ import numpy as np
11
+ import torch
12
+ from copy import deepcopy
13
+ from torch.utils.data import Dataset
14
+ from collections.abc import Sequence
15
+
16
+ from pointcept.utils.logger import get_root_logger
17
+ from pointcept.utils.cache import shared_dict
18
+ from .builder import DATASETS, build_dataset
19
+ from .transform import Compose, TRANSFORMS
20
+
21
+
22
+ @DATASETS.register_module()
23
+ class DefaultDataset(Dataset):
24
+ def __init__(
25
+ self,
26
+ split="train",
27
+ data_root="data/dataset",
28
+ transform=None,
29
+ test_mode=False,
30
+ test_cfg=None,
31
+ loop=1,
32
+ ):
33
+ super(DefaultDataset, self).__init__()
34
+ self.data_root = data_root
35
+ self.split = split
36
+ self.transform = Compose(transform)
37
+ self.loop = loop if not test_mode else 1 # force make loop = 1 while in test mode
38
+ self.test_mode = test_mode
39
+ self.test_cfg = test_cfg if test_mode else None
40
+
41
+ if test_mode:
42
+ self.test_voxelize = TRANSFORMS.build(self.test_cfg.voxelize) if self.test_cfg.voxelize is not None else None
43
+ self.test_crop = TRANSFORMS.build(self.test_cfg.crop) if self.test_cfg.crop is not None else None
44
+ self.post_transform = Compose(self.test_cfg.post_transform)
45
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
46
+
47
+ self.data_list = self.get_data_list()
48
+ logger = get_root_logger()
49
+ logger.info("Totally {} x {} samples in {} set.".format(len(self.data_list), self.loop, split))
50
+
51
+ def get_data_list(self):
52
+ if isinstance(self.split, str):
53
+ data_list = glob.glob(os.path.join(self.data_root, self.split, "*.pth"))
54
+ elif isinstance(self.split, Sequence):
55
+ data_list = []
56
+ for split in self.split:
57
+ data_list += glob.glob(os.path.join(self.data_root, split, "*.pth"))
58
+ else:
59
+ raise NotImplementedError
60
+ return data_list
61
+
62
+ def get_data(self, idx):
63
+ data = torch.load(self.data_list[idx % len(self.data_list)])
64
+ coord = data["coord"]
65
+ color = data["color"]
66
+ normal = data["normal"]
67
+ if "semantic_gt" in data.keys():
68
+ segment = data["semantic_gt"].reshape([-1])
69
+ else:
70
+ segment = np.ones(coord.shape[0]) * -1
71
+ data_dict = dict(coord=coord, normal=normal, color=color, segment=segment)
72
+ return data_dict
73
+
74
+ def get_data_name(self, idx):
75
+ return os.path.basename(self.data_list[idx % len(self.data_list)]).split(".")[0]
76
+
77
+ def prepare_train_data(self, idx):
78
+ # load data
79
+ data_dict = self.get_data(idx)
80
+ data_dict = self.transform(data_dict)
81
+ return data_dict
82
+
83
+ def prepare_test_data(self, idx):
84
+ # load data
85
+ data_dict = self.get_data(idx)
86
+ data_dict = self.transform(data_dict)
87
+ result_dict = dict(segment=data_dict.pop("segment"), name=self.get_data_name(idx))
88
+ if "origin_segment" in data_dict:
89
+ assert "inverse" in data_dict
90
+ result_dict["origin_segment"] = data_dict.pop("origin_segment")
91
+ result_dict["inverse"] = data_dict.pop("inverse")
92
+
93
+ data_dict_list = []
94
+ for aug in self.aug_transform:
95
+ data_dict_list.append(aug(deepcopy(data_dict)))
96
+
97
+ fragment_list = []
98
+ for data in data_dict_list:
99
+ if self.test_voxelize is not None:
100
+ data_part_list = self.test_voxelize(data)
101
+ else:
102
+ data["index"] = np.arange(data["coord"].shape[0])
103
+ data_part_list = [data]
104
+ for data_part in data_part_list:
105
+ if self.test_crop is not None:
106
+ data_part = self.test_crop(data_part)
107
+ else:
108
+ data_part = [data_part]
109
+ fragment_list += data_part
110
+
111
+ for i in range(len(fragment_list)):
112
+ fragment_list[i] = self.post_transform(fragment_list[i])
113
+ result_dict["fragment_list"] = fragment_list
114
+ return result_dict
115
+
116
+ def __getitem__(self, idx):
117
+ if self.test_mode:
118
+ return self.prepare_test_data(idx)
119
+ else:
120
+ return self.prepare_train_data(idx)
121
+
122
+ def __len__(self):
123
+ return len(self.data_list) * self.loop
124
+
125
+
126
+ @DATASETS.register_module()
127
+ class DefaultDatasetV2(Dataset):
128
+ VALID_ASSETS = [
129
+ "coord",
130
+ "color",
131
+ "normal",
132
+ "strength",
133
+ "segment",
134
+ "instance",
135
+ "pose",
136
+ ]
137
+
138
+ def __init__(
139
+ self,
140
+ split="train",
141
+ data_root="data/dataset",
142
+ transform=None,
143
+ test_mode=False,
144
+ test_cfg=None,
145
+ cache=False,
146
+ ignore_index=-1,
147
+ loop=1,
148
+ ):
149
+ super(DefaultDatasetV2, self).__init__()
150
+ self.data_root = data_root
151
+ self.split = split
152
+ self.transform = Compose(transform)
153
+ self.cache = cache
154
+ self.ignore_index = ignore_index
155
+ self.loop = loop if not test_mode else 1 # force make loop = 1 while in test mode
156
+ self.test_mode = test_mode
157
+ self.test_cfg = test_cfg if test_mode else None
158
+
159
+ if test_mode:
160
+ self.test_voxelize = TRANSFORMS.build(self.test_cfg.voxelize)
161
+ self.test_crop = TRANSFORMS.build(self.test_cfg.crop) if self.test_cfg.crop else None
162
+ self.post_transform = Compose(self.test_cfg.post_transform)
163
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
164
+
165
+ self.data_list = self.get_data_list()
166
+ logger = get_root_logger()
167
+ logger.info("Totally {} x {} samples in {} set.".format(len(self.data_list), self.loop, split))
168
+
169
+ def get_data_list(self):
170
+ if isinstance(self.split, str):
171
+ data_list = glob.glob(os.path.join(self.data_root, self.split, "*"))
172
+ elif isinstance(self.split, Sequence):
173
+ data_list = []
174
+ for split in self.split:
175
+ data_list += glob.glob(os.path.join(self.data_root, split, "*"))
176
+ else:
177
+ raise NotImplementedError
178
+ return data_list
179
+
180
+ def get_data(self, idx):
181
+ data_path = self.data_list[idx % len(self.data_list)]
182
+ name = self.get_data_name(idx)
183
+ if self.cache:
184
+ cache_name = f"pointcept-{name}"
185
+ return shared_dict(cache_name)
186
+
187
+ data_dict = {}
188
+ assets = os.listdir(data_path)
189
+ for asset in assets:
190
+ if not asset.endswith(".npy"):
191
+ continue
192
+ if asset[:-4] not in self.VALID_ASSETS:
193
+ continue
194
+ data_dict[asset[:-4]] = np.load(os.path.join(data_path, asset))
195
+ data_dict["name"] = name
196
+
197
+ if "coord" in data_dict.keys():
198
+ data_dict["coord"] = data_dict["coord"].astype(np.float32)
199
+
200
+ if "color" in data_dict.keys():
201
+ data_dict["color"] = data_dict["color"].astype(np.float32)
202
+
203
+ if "normal" in data_dict.keys():
204
+ data_dict["normal"] = data_dict["normal"].astype(np.float32)
205
+
206
+ if "segment" in data_dict.keys():
207
+ data_dict["segment"] = data_dict["segment"].reshape([-1]).astype(np.int32)
208
+ else:
209
+ data_dict["segment"] = np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1
210
+
211
+ if "instance" in data_dict.keys():
212
+ data_dict["instance"] = data_dict["instance"].reshape([-1]).astype(np.int32)
213
+ else:
214
+ data_dict["instance"] = np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1
215
+ return data_dict
216
+
217
+ def get_data_name(self, idx):
218
+ return os.path.basename(self.data_list[idx % len(self.data_list)])
219
+
220
+ def prepare_train_data(self, idx):
221
+ # load data
222
+ data_dict = self.get_data(idx)
223
+ data_dict = self.transform(data_dict)
224
+ return data_dict
225
+
226
+ def prepare_test_data(self, idx):
227
+ # load data
228
+ data_dict = self.get_data(idx)
229
+ data_dict = self.transform(data_dict)
230
+ result_dict = dict(segment=data_dict.pop("segment"), name=data_dict.pop("name"))
231
+ if "origin_segment" in data_dict:
232
+ assert "inverse" in data_dict
233
+ result_dict["origin_segment"] = data_dict.pop("origin_segment")
234
+ result_dict["inverse"] = data_dict.pop("inverse")
235
+
236
+ data_dict_list = []
237
+ for aug in self.aug_transform:
238
+ data_dict_list.append(aug(deepcopy(data_dict)))
239
+
240
+ fragment_list = []
241
+ for data in data_dict_list:
242
+ if self.test_voxelize is not None:
243
+ data_part_list = self.test_voxelize(data)
244
+ else:
245
+ data["index"] = np.arange(data["coord"].shape[0])
246
+ data_part_list = [data]
247
+ for data_part in data_part_list:
248
+ if self.test_crop is not None:
249
+ data_part = self.test_crop(data_part)
250
+ else:
251
+ data_part = [data_part]
252
+ fragment_list += data_part
253
+
254
+ for i in range(len(fragment_list)):
255
+ fragment_list[i] = self.post_transform(fragment_list[i])
256
+ result_dict["fragment_list"] = fragment_list
257
+ return result_dict
258
+
259
+ def __getitem__(self, idx):
260
+ if self.test_mode:
261
+ return self.prepare_test_data(idx)
262
+ else:
263
+ return self.prepare_train_data(idx)
264
+
265
+ def __len__(self):
266
+ return len(self.data_list) * self.loop
267
+
268
+
269
+ @DATASETS.register_module()
270
+ class ConcatDataset(Dataset):
271
+ def __init__(self, datasets, loop=1):
272
+ super(ConcatDataset, self).__init__()
273
+ self.datasets = [build_dataset(dataset) for dataset in datasets]
274
+ self.loop = loop
275
+ self.data_list = self.get_data_list()
276
+ logger = get_root_logger()
277
+ logger.info("Totally {} x {} samples in the concat set.".format(len(self.data_list), self.loop))
278
+
279
+ def get_data_list(self):
280
+ data_list = []
281
+ for i in range(len(self.datasets)):
282
+ data_list.extend(zip(np.ones(len(self.datasets[i])) * i, np.arange(len(self.datasets[i]))))
283
+ return data_list
284
+
285
+ def get_data(self, idx):
286
+ dataset_idx, data_idx = self.data_list[idx % len(self.data_list)]
287
+ return self.datasets[dataset_idx][data_idx]
288
+
289
+ def get_data_name(self, idx):
290
+ dataset_idx, data_idx = self.data_list[idx % len(self.data_list)]
291
+ return self.datasets[dataset_idx].get_data_name(data_idx)
292
+
293
+ def __getitem__(self, idx):
294
+ return self.get_data(idx)
295
+
296
+ def __len__(self):
297
+ return len(self.data_list) * self.loop
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/modelnet.py ADDED
@@ -0,0 +1,150 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ModelNet40 Dataset
3
+
4
+ get sampled point clouds of ModelNet40 (XYZ and normal from mesh, 10k points per shape)
5
+ at "https://shapenet.cs.stanford.edu/media/modelnet40_normal_resampled.zip"
6
+
7
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
8
+ Please cite our work if the code is helpful to you.
9
+ """
10
+
11
+ import os
12
+ import numpy as np
13
+ import pointops
14
+ import torch
15
+ from torch.utils.data import Dataset
16
+ from copy import deepcopy
17
+
18
+
19
+ from pointcept.utils.logger import get_root_logger
20
+ from .builder import DATASETS
21
+ from .transform import Compose
22
+
23
+
24
+ @DATASETS.register_module()
25
+ class ModelNetDataset(Dataset):
26
+ def __init__(
27
+ self,
28
+ split="train",
29
+ data_root="data/modelnet40",
30
+ class_names=None,
31
+ transform=None,
32
+ num_points=8192,
33
+ uniform_sampling=True,
34
+ save_record=True,
35
+ test_mode=False,
36
+ test_cfg=None,
37
+ loop=1,
38
+ ):
39
+ super().__init__()
40
+ self.data_root = data_root
41
+ self.class_names = dict(zip(class_names, range(len(class_names))))
42
+ self.split = split
43
+ self.num_point = num_points
44
+ self.uniform_sampling = uniform_sampling
45
+ self.transform = Compose(transform)
46
+ self.loop = (
47
+ loop if not test_mode else 1
48
+ ) # force make loop = 1 while in test mode
49
+ self.test_mode = test_mode
50
+ self.test_cfg = test_cfg if test_mode else None
51
+ if test_mode:
52
+ self.post_transform = Compose(self.test_cfg.post_transform)
53
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
54
+
55
+ self.data_list = self.get_data_list()
56
+ logger = get_root_logger()
57
+ logger.info(
58
+ "Totally {} x {} samples in {} set.".format(
59
+ len(self.data_list), self.loop, split
60
+ )
61
+ )
62
+
63
+ # check, prepare record
64
+ record_name = f"modelnet40_{self.split}"
65
+ if num_points is not None:
66
+ record_name += f"_{num_points}points"
67
+ if uniform_sampling:
68
+ record_name += "_uniform"
69
+ record_path = os.path.join(self.data_root, f"{record_name}.pth")
70
+ if os.path.isfile(record_path):
71
+ logger.info(f"Loading record: {record_name} ...")
72
+ self.data = torch.load(record_path)
73
+ else:
74
+ logger.info(f"Preparing record: {record_name} ...")
75
+ self.data = {}
76
+ for idx in range(len(self.data_list)):
77
+ data_name = self.data_list[idx]
78
+ logger.info(f"Parsing data [{idx}/{len(self.data_list)}]: {data_name}")
79
+ self.data[data_name] = self.get_data(idx)
80
+ if save_record:
81
+ torch.save(self.data, record_path)
82
+
83
+ def get_data(self, idx):
84
+ data_idx = idx % len(self.data_list)
85
+ data_name = self.data_list[data_idx]
86
+ if data_name in self.data.keys():
87
+ return self.data[data_name]
88
+ else:
89
+ data_shape = "_".join(data_name.split("_")[0:-1])
90
+ data_path = os.path.join(
91
+ self.data_root, data_shape, self.data_list[data_idx] + ".txt"
92
+ )
93
+ data = np.loadtxt(data_path, delimiter=",").astype(np.float32)
94
+ if self.num_point is not None:
95
+ if self.uniform_sampling:
96
+ with torch.no_grad():
97
+ mask = pointops.farthest_point_sampling(
98
+ torch.tensor(data).float().cuda(),
99
+ torch.tensor([len(data)]).long().cuda(),
100
+ torch.tensor([self.num_point]).long().cuda(),
101
+ )
102
+ data = data[mask.cpu()]
103
+ else:
104
+ data = data[: self.num_point]
105
+ coord, normal = data[:, 0:3], data[:, 3:6]
106
+ category = np.array([self.class_names[data_shape]])
107
+ return dict(coord=coord, normal=normal, category=category)
108
+
109
+ def get_data_list(self):
110
+ assert isinstance(self.split, str)
111
+ split_path = os.path.join(
112
+ self.data_root, "modelnet40_{}.txt".format(self.split)
113
+ )
114
+ data_list = np.loadtxt(split_path, dtype="str")
115
+ return data_list
116
+
117
+ def get_data_name(self, idx):
118
+ data_idx = idx % len(self.data_list)
119
+ return self.data_list[data_idx]
120
+
121
+ def __getitem__(self, idx):
122
+ if self.test_mode:
123
+ return self.prepare_test_data(idx)
124
+ else:
125
+ return self.prepare_train_data(idx)
126
+
127
+ def __len__(self):
128
+ return len(self.data_list) * self.loop
129
+
130
+ def prepare_train_data(self, idx):
131
+ data_dict = self.get_data(idx)
132
+ data_dict = self.transform(data_dict)
133
+ return data_dict
134
+
135
+ def prepare_test_data(self, idx):
136
+ assert idx < len(self.data_list)
137
+ data_dict = self.get_data(idx)
138
+ category = data_dict.pop("category")
139
+ data_dict = self.transform(data_dict)
140
+ data_dict_list = []
141
+ for aug in self.aug_transform:
142
+ data_dict_list.append(aug(deepcopy(data_dict)))
143
+ for i in range(len(data_dict_list)):
144
+ data_dict_list[i] = self.post_transform(data_dict_list[i])
145
+ data_dict = dict(
146
+ voting_list=data_dict_list,
147
+ category=category,
148
+ name=self.get_data_name(idx),
149
+ )
150
+ return data_dict
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/nuscenes.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ nuScenes Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com), Zheng Zhang
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import numpy as np
10
+ from collections.abc import Sequence
11
+ import pickle
12
+
13
+ from .builder import DATASETS
14
+ from .defaults import DefaultDataset
15
+
16
+
17
+ @DATASETS.register_module()
18
+ class NuScenesDataset(DefaultDataset):
19
+ def __init__(self, sweeps=10, ignore_index=-1, **kwargs):
20
+ self.sweeps = sweeps
21
+ self.ignore_index = ignore_index
22
+ self.learning_map = self.get_learning_map(ignore_index)
23
+ super().__init__(ignore_index=ignore_index, **kwargs)
24
+
25
+ def get_info_path(self, split):
26
+ assert split in ["train", "val", "test"]
27
+ if split == "train":
28
+ return os.path.join(
29
+ self.data_root, "info", f"nuscenes_infos_{self.sweeps}sweeps_train.pkl"
30
+ )
31
+ elif split == "val":
32
+ return os.path.join(
33
+ self.data_root, "info", f"nuscenes_infos_{self.sweeps}sweeps_val.pkl"
34
+ )
35
+ elif split == "test":
36
+ return os.path.join(
37
+ self.data_root, "info", f"nuscenes_infos_{self.sweeps}sweeps_test.pkl"
38
+ )
39
+ else:
40
+ raise NotImplementedError
41
+
42
+ def get_data_list(self):
43
+ if isinstance(self.split, str):
44
+ info_paths = [self.get_info_path(self.split)]
45
+ elif isinstance(self.split, Sequence):
46
+ info_paths = [self.get_info_path(s) for s in self.split]
47
+ else:
48
+ raise NotImplementedError
49
+ data_list = []
50
+ for info_path in info_paths:
51
+ with open(info_path, "rb") as f:
52
+ info = pickle.load(f)
53
+ data_list.extend(info)
54
+ return data_list
55
+
56
+ def get_data(self, idx):
57
+ data = self.data_list[idx % len(self.data_list)]
58
+ lidar_path = os.path.join(self.data_root, "raw", data["lidar_path"])
59
+ points = np.fromfile(str(lidar_path), dtype=np.float32, count=-1).reshape(
60
+ [-1, 5]
61
+ )
62
+ coord = points[:, :3]
63
+ strength = points[:, 3].reshape([-1, 1]) / 255 # scale strength to [0, 1]
64
+
65
+ if "gt_segment_path" in data.keys():
66
+ gt_segment_path = os.path.join(
67
+ self.data_root, "raw", data["gt_segment_path"]
68
+ )
69
+ segment = np.fromfile(
70
+ str(gt_segment_path), dtype=np.uint8, count=-1
71
+ ).reshape([-1])
72
+ segment = np.vectorize(self.learning_map.__getitem__)(segment).astype(
73
+ np.int64
74
+ )
75
+ else:
76
+ segment = np.ones((points.shape[0],), dtype=np.int64) * self.ignore_index
77
+ data_dict = dict(coord=coord, strength=strength, segment=segment)
78
+ return data_dict
79
+
80
+ def get_data_name(self, idx):
81
+ # return data name for lidar seg, optimize the code when need to support detection
82
+ return self.data_list[idx % len(self.data_list)]["lidar_token"]
83
+
84
+ @staticmethod
85
+ def get_learning_map(ignore_index):
86
+ learning_map = {
87
+ 0: ignore_index,
88
+ 1: ignore_index,
89
+ 2: 6,
90
+ 3: 6,
91
+ 4: 6,
92
+ 5: ignore_index,
93
+ 6: 6,
94
+ 7: ignore_index,
95
+ 8: ignore_index,
96
+ 9: 0,
97
+ 10: ignore_index,
98
+ 11: ignore_index,
99
+ 12: 7,
100
+ 13: ignore_index,
101
+ 14: 1,
102
+ 15: 2,
103
+ 16: 2,
104
+ 17: 3,
105
+ 18: 4,
106
+ 19: ignore_index,
107
+ 20: ignore_index,
108
+ 21: 5,
109
+ 22: 8,
110
+ 23: 9,
111
+ 24: 10,
112
+ 25: 11,
113
+ 26: 12,
114
+ 27: 13,
115
+ 28: 14,
116
+ 29: ignore_index,
117
+ 30: 15,
118
+ 31: ignore_index,
119
+ }
120
+ return learning_map
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/alc/preprocess_arkitscenes_labelmaker_consensus.py ADDED
@@ -0,0 +1,375 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import warnings
2
+
3
+ import torch
4
+
5
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
6
+
7
+ import argparse
8
+ import glob
9
+ import json
10
+ import multiprocessing as mp
11
+ import os
12
+ from concurrent.futures import ProcessPoolExecutor
13
+ from itertools import repeat
14
+ from pathlib import Path
15
+
16
+ import numpy as np
17
+ import pandas as pd
18
+ import plyfile
19
+ from labelmaker import label_mappings
20
+ from labelmaker.label_data import get_wordnet
21
+ from labelmaker.scannet_200_labels import VALID_CLASS_IDS_200
22
+ from tqdm import tqdm
23
+
24
+ IGNORE_INDEX = -1
25
+
26
+
27
+ def get_wordnet_to_scannet200_mapping():
28
+ table = pd.read_csv(Path(os.path.dirname(os.path.realpath(label_mappings.__file__))) / "mappings" / "label_mapping.csv")
29
+ wordnet = get_wordnet()
30
+ wordnet_keys = [x["name"] for x in wordnet]
31
+ mapping = {}
32
+ for row in table.index:
33
+ if table["wnsynsetkey"][row] not in wordnet_keys:
34
+ continue
35
+ scannet_id = table.loc[row, "id"]
36
+ wordnet199_id = next(x for x in wordnet if x["name"] == table["wnsynsetkey"][row])["id"]
37
+
38
+ if scannet_id in VALID_CLASS_IDS_200:
39
+ mapping.setdefault(wordnet199_id, set()).add(scannet_id)
40
+
41
+ wn199_size = np.array([x["id"] for x in wordnet]).max() + 1
42
+ mapping_array = np.zeros(shape=(wn199_size,), dtype=np.uint16)
43
+ for wordnet199_id in mapping.keys():
44
+ mapping_array[wordnet199_id] = min(mapping[wordnet199_id])
45
+
46
+ return mapping_array
47
+
48
+
49
+ def get_wordnet_compact_mapping():
50
+ wordnet_info = get_wordnet()[1:]
51
+ wordnet_info = sorted(wordnet_info, key=lambda x: x["id"])
52
+
53
+ class2id = np.array([item["id"] for item in wordnet_info])
54
+ id2class = np.array([IGNORE_INDEX] * (class2id.max() + 1))
55
+ for class_, id_ in enumerate(class2id):
56
+ id2class[id_] = class_
57
+
58
+ return class2id, id2class
59
+
60
+
61
+ def get_scannet200_compact_mapping():
62
+ class2id = np.array(VALID_CLASS_IDS_200)
63
+ id2class = np.array([IGNORE_INDEX] * (class2id.max() + 1))
64
+ for class_, id_ in enumerate(VALID_CLASS_IDS_200):
65
+ id2class[id_] = class_
66
+
67
+ return class2id, id2class
68
+
69
+
70
+ def get_wordnet_names():
71
+ wordnet_info = get_wordnet()[1:]
72
+ wordnet_info = sorted(wordnet_info, key=lambda x: x["id"])
73
+
74
+ names = [item["name"].split(".")[0].replace("_", " ") for item in wordnet_info]
75
+
76
+ return names
77
+
78
+
79
+ def read_plypcd(filepath):
80
+ """Read ply file and return it as numpy array. Returns None if emtpy."""
81
+
82
+ with open(filepath, "rb") as f:
83
+ plydata = plyfile.PlyData.read(f)
84
+ if plydata.elements:
85
+ data = plydata.elements[0].data
86
+ coords = np.array([data["x"], data["y"], data["z"]], dtype=np.float32).T
87
+
88
+ colors = None
89
+ if ({"red", "green", "blue"} - set(data.dtype.names)) == set():
90
+ colors = np.array([data["red"], data["green"], data["blue"]], dtype=np.uint8).T
91
+
92
+ normals = None
93
+ if ({"nx", "ny", "nz"} - set(data.dtype.names)) == set():
94
+ normals = np.array([data["nx"], data["ny"], data["nz"]], dtype=np.float32).T
95
+
96
+ return coords, colors, normals
97
+
98
+
99
+ def handle_process(
100
+ scene_dir: str,
101
+ output_path: str,
102
+ label_mapping,
103
+ wn199_id2class,
104
+ scannet200_id2class,
105
+ ):
106
+ scene_dir = Path(scene_dir)
107
+
108
+ print(f"Processing: {scene_dir.name} in {scene_dir.parent.name}")
109
+
110
+ coords, colors, normals = read_plypcd(str(scene_dir / "pcd_downsampled.ply"))
111
+ save_dict = dict(
112
+ coord=coords,
113
+ color=colors,
114
+ scene_id=scene_dir.name,
115
+ normal=normals,
116
+ )
117
+
118
+ label_file = scene_dir / "labels_downsampled.txt"
119
+ wordnet_label = np.loadtxt(str(label_file), dtype=np.uint8).reshape(-1, 1)
120
+ scannet200_label = label_mapping[wordnet_label]
121
+ save_dict["semantic_pseudo_gt_wn199"] = wn199_id2class[wordnet_label]
122
+ save_dict["semantic_pseudo_gt_scannet200"] = scannet200_id2class[scannet200_label]
123
+
124
+ torch.save(save_dict, output_path)
125
+
126
+
127
+ if __name__ == "__main__":
128
+ parser = argparse.ArgumentParser()
129
+ parser.add_argument(
130
+ "--dataset_root",
131
+ required=True,
132
+ help="Path to the ScanNet dataset containing scene folders",
133
+ )
134
+ parser.add_argument(
135
+ "--output_root",
136
+ required=True,
137
+ help="Output path where train/val folders will be located",
138
+ )
139
+ config = parser.parse_args()
140
+
141
+ # Create output directories
142
+ train_output_dir = os.path.join(config.output_root, "train")
143
+ os.makedirs(train_output_dir, exist_ok=True)
144
+ val_output_dir = os.path.join(config.output_root, "val")
145
+ os.makedirs(val_output_dir, exist_ok=True)
146
+
147
+ # Load label map
148
+ wn_scannet200_label_mapping = get_wordnet_to_scannet200_mapping()
149
+ _, wn199_id2class = get_wordnet_compact_mapping()
150
+ _, scannet200_id2class = get_scannet200_compact_mapping()
151
+
152
+ scene_dirs = []
153
+ output_paths = []
154
+
155
+ # Load train/val splits
156
+ train_folder = Path(config.dataset_root) / "Training"
157
+ train_scene_names = os.listdir(str(train_folder))
158
+ for scene in tqdm(train_scene_names):
159
+ file_path = train_folder / scene / "pcd_downsampled.ply"
160
+ if file_path.exists() and os.path.getsize(str(file_path)) <= 50 * 1024 * 1024:
161
+ scene_dirs.append(str(train_folder / scene))
162
+ output_paths.append(str(Path(config.output_root) / "train" / f"{scene}.pth"))
163
+
164
+ val_folder = Path(config.dataset_root) / "Validation"
165
+ val_scene_names = os.listdir(str(val_folder))
166
+ for scene in tqdm(val_scene_names):
167
+ file_path = val_folder / scene / "pcd_downsampled.ply"
168
+ if file_path.exists() and os.path.getsize(str(file_path)) <= 50 * 1024 * 1024:
169
+ scene_dirs.append(str(val_folder / scene))
170
+ output_paths.append(str(Path(config.output_root) / "val" / f"{scene}.pth"))
171
+
172
+ # Preprocess data.
173
+ print("Processing scenes...")
174
+ pool = ProcessPoolExecutor(max_workers=mp.cpu_count())
175
+ print(f"Using {mp.cpu_count()} cores")
176
+ # pool = ProcessPoolExecutor(max_workers=1)
177
+ _ = list(
178
+ pool.map(
179
+ handle_process,
180
+ scene_dirs,
181
+ output_paths,
182
+ repeat(wn_scannet200_label_mapping),
183
+ repeat(wn199_id2class),
184
+ repeat(scannet200_id2class),
185
+ )
186
+ )
187
+
188
+
189
+ WORDNET_NAMES = (
190
+ "wall",
191
+ "chair",
192
+ "book",
193
+ "cabinet",
194
+ "door",
195
+ "floor",
196
+ "ashcan",
197
+ "table",
198
+ "window",
199
+ "bookshelf",
200
+ "display",
201
+ "cushion",
202
+ "box",
203
+ "picture",
204
+ "ceiling",
205
+ "doorframe",
206
+ "desk",
207
+ "swivel chair",
208
+ "towel",
209
+ "sofa",
210
+ "sink",
211
+ "backpack",
212
+ "lamp",
213
+ "chest of drawers",
214
+ "apparel",
215
+ "armchair",
216
+ "bed",
217
+ "curtain",
218
+ "mirror",
219
+ "plant",
220
+ "radiator",
221
+ "toilet tissue",
222
+ "shoe",
223
+ "bag",
224
+ "bottle",
225
+ "countertop",
226
+ "coffee table",
227
+ "toilet",
228
+ "computer keyboard",
229
+ "fridge",
230
+ "stool",
231
+ "computer",
232
+ "mug",
233
+ "telephone",
234
+ "light",
235
+ "jacket",
236
+ "bathtub",
237
+ "shower curtain",
238
+ "microwave",
239
+ "footstool",
240
+ "baggage",
241
+ "laptop",
242
+ "printer",
243
+ "shower stall",
244
+ "soap dispenser",
245
+ "stove",
246
+ "fan",
247
+ "paper",
248
+ "stand",
249
+ "bench",
250
+ "wardrobe",
251
+ "blanket",
252
+ "booth",
253
+ "duplicator",
254
+ "bar",
255
+ "soap dish",
256
+ "switch",
257
+ "coffee maker",
258
+ "decoration",
259
+ "range hood",
260
+ "blackboard",
261
+ "clock",
262
+ "railing",
263
+ "mat",
264
+ "seat",
265
+ "bannister",
266
+ "container",
267
+ "mouse",
268
+ "person",
269
+ "stairway",
270
+ "basket",
271
+ "dumbbell",
272
+ "column",
273
+ "bucket",
274
+ "windowsill",
275
+ "signboard",
276
+ "dishwasher",
277
+ "loudspeaker",
278
+ "washer",
279
+ "paper towel",
280
+ "clothes hamper",
281
+ "piano",
282
+ "sack",
283
+ "handcart",
284
+ "blind",
285
+ "dish rack",
286
+ "mailbox",
287
+ "bag",
288
+ "bicycle",
289
+ "ladder",
290
+ "rack",
291
+ "tray",
292
+ "toaster",
293
+ "paper cutter",
294
+ "plunger",
295
+ "dryer",
296
+ "guitar",
297
+ "fire extinguisher",
298
+ "pitcher",
299
+ "pipe",
300
+ "plate",
301
+ "vacuum",
302
+ "bowl",
303
+ "hat",
304
+ "rod",
305
+ "water cooler",
306
+ "kettle",
307
+ "oven",
308
+ "scale",
309
+ "broom",
310
+ "hand blower",
311
+ "coatrack",
312
+ "teddy",
313
+ "alarm clock",
314
+ "ironing board",
315
+ "fire alarm",
316
+ "machine",
317
+ "music stand",
318
+ "fireplace",
319
+ "furniture",
320
+ "vase",
321
+ "vent",
322
+ "candle",
323
+ "crate",
324
+ "dustpan",
325
+ "earphone",
326
+ "jar",
327
+ "projector",
328
+ "gat",
329
+ "step",
330
+ "step stool",
331
+ "vending machine",
332
+ "coat",
333
+ "coat hanger",
334
+ "drinking fountain",
335
+ "hamper",
336
+ "thermostat",
337
+ "banner",
338
+ "iron",
339
+ "soap",
340
+ "chopping board",
341
+ "kitchen island",
342
+ "shirt",
343
+ "sleeping bag",
344
+ "tire",
345
+ "toothbrush",
346
+ "bathrobe",
347
+ "faucet",
348
+ "slipper",
349
+ "thermos",
350
+ "tripod",
351
+ "dispenser",
352
+ "heater",
353
+ "pool table",
354
+ "remote control",
355
+ "stapler",
356
+ "treadmill",
357
+ "beanbag",
358
+ "dartboard",
359
+ "metronome",
360
+ "rope",
361
+ "sewing machine",
362
+ "shredder",
363
+ "toolbox",
364
+ "water heater",
365
+ "brush",
366
+ "control",
367
+ "dais",
368
+ "dollhouse",
369
+ "envelope",
370
+ "food",
371
+ "frying pan",
372
+ "helmet",
373
+ "tennis racket",
374
+ "umbrella",
375
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/arkitscenes/preprocess_arkitscenes_mesh.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing ArkitScenes
3
+ """
4
+
5
+ import os
6
+ import argparse
7
+ import glob
8
+ import plyfile
9
+ import numpy as np
10
+ import pandas as pd
11
+ import multiprocessing as mp
12
+ from concurrent.futures import ProcessPoolExecutor
13
+ from itertools import repeat
14
+
15
+ import torch
16
+
17
+
18
+ def read_plymesh(filepath):
19
+ """Read ply file and return it as numpy array. Returns None if emtpy."""
20
+ with open(filepath, "rb") as f:
21
+ plydata = plyfile.PlyData.read(f)
22
+ if plydata.elements:
23
+ vertices = pd.DataFrame(plydata["vertex"].data).values
24
+ faces = np.stack(plydata["face"].data["vertex_indices"], axis=0)
25
+ return vertices, faces
26
+
27
+
28
+ def face_normal(vertex, face):
29
+ v01 = vertex[face[:, 1]] - vertex[face[:, 0]]
30
+ v02 = vertex[face[:, 2]] - vertex[face[:, 0]]
31
+ vec = np.cross(v01, v02)
32
+ length = np.sqrt(np.sum(vec**2, axis=1, keepdims=True)) + 1.0e-8
33
+ nf = vec / length
34
+ area = length * 0.5
35
+ return nf, area
36
+
37
+
38
+ def vertex_normal(vertex, face):
39
+ nf, area = face_normal(vertex, face)
40
+ nf = nf * area
41
+
42
+ nv = np.zeros_like(vertex)
43
+ for i in range(face.shape[0]):
44
+ nv[face[i]] += nf[i]
45
+
46
+ length = np.sqrt(np.sum(nv**2, axis=1, keepdims=True)) + 1.0e-8
47
+ nv = nv / length
48
+ return nv
49
+
50
+
51
+ def parse_scene(scene_path, output_dir):
52
+ print(f"Parsing scene {scene_path}")
53
+ split = os.path.basename(os.path.dirname(os.path.dirname(scene_path)))
54
+ scene_id = os.path.basename(os.path.dirname(scene_path))
55
+ vertices, faces = read_plymesh(scene_path)
56
+ coords = vertices[:, :3]
57
+ colors = vertices[:, 3:6]
58
+ data_dict = dict(coord=coords, color=colors, scene_id=scene_id)
59
+ data_dict["normal"] = vertex_normal(coords, faces)
60
+ torch.save(data_dict, os.path.join(output_dir, split, f"{scene_id}.pth"))
61
+
62
+
63
+ if __name__ == "__main__":
64
+ parser = argparse.ArgumentParser()
65
+ parser.add_argument(
66
+ "--dataset_root",
67
+ required=True,
68
+ help="Path to the ScanNet dataset containing scene folders",
69
+ )
70
+ parser.add_argument(
71
+ "--output_root",
72
+ required=True,
73
+ help="Output path where train/val folders will be located",
74
+ )
75
+ opt = parser.parse_args()
76
+ # Create output directories
77
+ train_output_dir = os.path.join(opt.output_root, "Training")
78
+ os.makedirs(train_output_dir, exist_ok=True)
79
+ val_output_dir = os.path.join(opt.output_root, "Validation")
80
+ os.makedirs(val_output_dir, exist_ok=True)
81
+ # Load scene paths
82
+ scene_paths = sorted(glob.glob(opt.dataset_root + "/3dod/*/*/*_mesh.ply"))
83
+ # Preprocess data.
84
+ pool = ProcessPoolExecutor(max_workers=mp.cpu_count())
85
+ # pool = ProcessPoolExecutor(max_workers=1)
86
+ print("Processing scenes...")
87
+ _ = list(pool.map(parse_scene, scene_paths, repeat(opt.output_root)))
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/nuscenes/preprocess_nuscenes_info.py ADDED
@@ -0,0 +1,607 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for nuScenes Informantion
3
+ modified from OpenPCDet (https://github.com/open-mmlab/OpenPCDet)
4
+
5
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
6
+ Please cite our work if the code is helpful to you.
7
+ """
8
+
9
+ import os
10
+ from pathlib import Path
11
+ import numpy as np
12
+ import argparse
13
+ import tqdm
14
+ import pickle
15
+ from functools import reduce
16
+ from pyquaternion import Quaternion
17
+ from nuscenes.nuscenes import NuScenes
18
+ from nuscenes.utils import splits
19
+ from nuscenes.utils.geometry_utils import transform_matrix
20
+
21
+
22
+ map_name_from_general_to_detection = {
23
+ "human.pedestrian.adult": "pedestrian",
24
+ "human.pedestrian.child": "pedestrian",
25
+ "human.pedestrian.wheelchair": "ignore",
26
+ "human.pedestrian.stroller": "ignore",
27
+ "human.pedestrian.personal_mobility": "ignore",
28
+ "human.pedestrian.police_officer": "pedestrian",
29
+ "human.pedestrian.construction_worker": "pedestrian",
30
+ "animal": "ignore",
31
+ "vehicle.car": "car",
32
+ "vehicle.motorcycle": "motorcycle",
33
+ "vehicle.bicycle": "bicycle",
34
+ "vehicle.bus.bendy": "bus",
35
+ "vehicle.bus.rigid": "bus",
36
+ "vehicle.truck": "truck",
37
+ "vehicle.construction": "construction_vehicle",
38
+ "vehicle.emergency.ambulance": "ignore",
39
+ "vehicle.emergency.police": "ignore",
40
+ "vehicle.trailer": "trailer",
41
+ "movable_object.barrier": "barrier",
42
+ "movable_object.trafficcone": "traffic_cone",
43
+ "movable_object.pushable_pullable": "ignore",
44
+ "movable_object.debris": "ignore",
45
+ "static_object.bicycle_rack": "ignore",
46
+ }
47
+
48
+
49
+ cls_attr_dist = {
50
+ "barrier": {
51
+ "cycle.with_rider": 0,
52
+ "cycle.without_rider": 0,
53
+ "pedestrian.moving": 0,
54
+ "pedestrian.sitting_lying_down": 0,
55
+ "pedestrian.standing": 0,
56
+ "vehicle.moving": 0,
57
+ "vehicle.parked": 0,
58
+ "vehicle.stopped": 0,
59
+ },
60
+ "bicycle": {
61
+ "cycle.with_rider": 2791,
62
+ "cycle.without_rider": 8946,
63
+ "pedestrian.moving": 0,
64
+ "pedestrian.sitting_lying_down": 0,
65
+ "pedestrian.standing": 0,
66
+ "vehicle.moving": 0,
67
+ "vehicle.parked": 0,
68
+ "vehicle.stopped": 0,
69
+ },
70
+ "bus": {
71
+ "cycle.with_rider": 0,
72
+ "cycle.without_rider": 0,
73
+ "pedestrian.moving": 0,
74
+ "pedestrian.sitting_lying_down": 0,
75
+ "pedestrian.standing": 0,
76
+ "vehicle.moving": 9092,
77
+ "vehicle.parked": 3294,
78
+ "vehicle.stopped": 3881,
79
+ },
80
+ "car": {
81
+ "cycle.with_rider": 0,
82
+ "cycle.without_rider": 0,
83
+ "pedestrian.moving": 0,
84
+ "pedestrian.sitting_lying_down": 0,
85
+ "pedestrian.standing": 0,
86
+ "vehicle.moving": 114304,
87
+ "vehicle.parked": 330133,
88
+ "vehicle.stopped": 46898,
89
+ },
90
+ "construction_vehicle": {
91
+ "cycle.with_rider": 0,
92
+ "cycle.without_rider": 0,
93
+ "pedestrian.moving": 0,
94
+ "pedestrian.sitting_lying_down": 0,
95
+ "pedestrian.standing": 0,
96
+ "vehicle.moving": 882,
97
+ "vehicle.parked": 11549,
98
+ "vehicle.stopped": 2102,
99
+ },
100
+ "ignore": {
101
+ "cycle.with_rider": 307,
102
+ "cycle.without_rider": 73,
103
+ "pedestrian.moving": 0,
104
+ "pedestrian.sitting_lying_down": 0,
105
+ "pedestrian.standing": 0,
106
+ "vehicle.moving": 165,
107
+ "vehicle.parked": 400,
108
+ "vehicle.stopped": 102,
109
+ },
110
+ "motorcycle": {
111
+ "cycle.with_rider": 4233,
112
+ "cycle.without_rider": 8326,
113
+ "pedestrian.moving": 0,
114
+ "pedestrian.sitting_lying_down": 0,
115
+ "pedestrian.standing": 0,
116
+ "vehicle.moving": 0,
117
+ "vehicle.parked": 0,
118
+ "vehicle.stopped": 0,
119
+ },
120
+ "pedestrian": {
121
+ "cycle.with_rider": 0,
122
+ "cycle.without_rider": 0,
123
+ "pedestrian.moving": 157444,
124
+ "pedestrian.sitting_lying_down": 13939,
125
+ "pedestrian.standing": 46530,
126
+ "vehicle.moving": 0,
127
+ "vehicle.parked": 0,
128
+ "vehicle.stopped": 0,
129
+ },
130
+ "traffic_cone": {
131
+ "cycle.with_rider": 0,
132
+ "cycle.without_rider": 0,
133
+ "pedestrian.moving": 0,
134
+ "pedestrian.sitting_lying_down": 0,
135
+ "pedestrian.standing": 0,
136
+ "vehicle.moving": 0,
137
+ "vehicle.parked": 0,
138
+ "vehicle.stopped": 0,
139
+ },
140
+ "trailer": {
141
+ "cycle.with_rider": 0,
142
+ "cycle.without_rider": 0,
143
+ "pedestrian.moving": 0,
144
+ "pedestrian.sitting_lying_down": 0,
145
+ "pedestrian.standing": 0,
146
+ "vehicle.moving": 3421,
147
+ "vehicle.parked": 19224,
148
+ "vehicle.stopped": 1895,
149
+ },
150
+ "truck": {
151
+ "cycle.with_rider": 0,
152
+ "cycle.without_rider": 0,
153
+ "pedestrian.moving": 0,
154
+ "pedestrian.sitting_lying_down": 0,
155
+ "pedestrian.standing": 0,
156
+ "vehicle.moving": 21339,
157
+ "vehicle.parked": 55626,
158
+ "vehicle.stopped": 11097,
159
+ },
160
+ }
161
+
162
+
163
+ def get_available_scenes(nusc):
164
+ available_scenes = []
165
+ for scene in nusc.scene:
166
+ scene_token = scene["token"]
167
+ scene_rec = nusc.get("scene", scene_token)
168
+ sample_rec = nusc.get("sample", scene_rec["first_sample_token"])
169
+ sd_rec = nusc.get("sample_data", sample_rec["data"]["LIDAR_TOP"])
170
+ has_more_frames = True
171
+ scene_not_exist = False
172
+ while has_more_frames:
173
+ lidar_path, boxes, _ = nusc.get_sample_data(sd_rec["token"])
174
+ if not Path(lidar_path).exists():
175
+ scene_not_exist = True
176
+ break
177
+ else:
178
+ break
179
+ if scene_not_exist:
180
+ continue
181
+ available_scenes.append(scene)
182
+ return available_scenes
183
+
184
+
185
+ def get_sample_data(nusc, sample_data_token, selected_anntokens=None):
186
+ """
187
+ Returns the data path as well as all annotations related to that sample_data.
188
+ Note that the boxes are transformed into the current sensor"s coordinate frame.
189
+ Args:
190
+ nusc:
191
+ sample_data_token: Sample_data token.
192
+ selected_anntokens: If provided only return the selected annotation.
193
+
194
+ Returns:
195
+
196
+ """
197
+ # Retrieve sensor & pose records
198
+ sd_record = nusc.get("sample_data", sample_data_token)
199
+ cs_record = nusc.get("calibrated_sensor", sd_record["calibrated_sensor_token"])
200
+ sensor_record = nusc.get("sensor", cs_record["sensor_token"])
201
+ pose_record = nusc.get("ego_pose", sd_record["ego_pose_token"])
202
+
203
+ data_path = nusc.get_sample_data_path(sample_data_token)
204
+
205
+ if sensor_record["modality"] == "camera":
206
+ cam_intrinsic = np.array(cs_record["camera_intrinsic"])
207
+ else:
208
+ cam_intrinsic = None
209
+
210
+ # Retrieve all sample annotations and map to sensor coordinate system.
211
+ if selected_anntokens is not None:
212
+ boxes = list(map(nusc.get_box, selected_anntokens))
213
+ else:
214
+ boxes = nusc.get_boxes(sample_data_token)
215
+
216
+ # Make list of Box objects including coord system transforms.
217
+ box_list = []
218
+ for box in boxes:
219
+ box.velocity = nusc.box_velocity(box.token)
220
+ # Move box to ego vehicle coord system
221
+ box.translate(-np.array(pose_record["translation"]))
222
+ box.rotate(Quaternion(pose_record["rotation"]).inverse)
223
+
224
+ # Move box to sensor coord system
225
+ box.translate(-np.array(cs_record["translation"]))
226
+ box.rotate(Quaternion(cs_record["rotation"]).inverse)
227
+
228
+ box_list.append(box)
229
+
230
+ return data_path, box_list, cam_intrinsic
231
+
232
+
233
+ def quaternion_yaw(q: Quaternion) -> float:
234
+ """
235
+ Calculate the yaw angle from a quaternion.
236
+ Note that this only works for a quaternion that represents a box in lidar or global coordinate frame.
237
+ It does not work for a box in the camera frame.
238
+ :param q: Quaternion of interest.
239
+ :return: Yaw angle in radians.
240
+ """
241
+
242
+ # Project into xy plane.
243
+ v = np.dot(q.rotation_matrix, np.array([1, 0, 0]))
244
+
245
+ # Measure yaw using arctan.
246
+ yaw = np.arctan2(v[1], v[0])
247
+
248
+ return yaw
249
+
250
+
251
+ def obtain_sensor2top(
252
+ nusc, sensor_token, l2e_t, l2e_r_mat, e2g_t, e2g_r_mat, sensor_type="lidar"
253
+ ):
254
+ """Obtain the info with RT matric from general sensor to Top LiDAR.
255
+
256
+ Args:
257
+ nusc (class): Dataset class in the nuScenes dataset.
258
+ sensor_token (str): Sample data token corresponding to the
259
+ specific sensor type.
260
+ l2e_t (np.ndarray): Translation from lidar to ego in shape (1, 3).
261
+ l2e_r_mat (np.ndarray): Rotation matrix from lidar to ego
262
+ in shape (3, 3).
263
+ e2g_t (np.ndarray): Translation from ego to global in shape (1, 3).
264
+ e2g_r_mat (np.ndarray): Rotation matrix from ego to global
265
+ in shape (3, 3).
266
+ sensor_type (str): Sensor to calibrate. Default: "lidar".
267
+
268
+ Returns:
269
+ sweep (dict): Sweep information after transformation.
270
+ """
271
+ sd_rec = nusc.get("sample_data", sensor_token)
272
+ cs_record = nusc.get("calibrated_sensor", sd_rec["calibrated_sensor_token"])
273
+ pose_record = nusc.get("ego_pose", sd_rec["ego_pose_token"])
274
+ data_path = str(nusc.get_sample_data_path(sd_rec["token"]))
275
+ # if os.getcwd() in data_path: # path from lyftdataset is absolute path
276
+ # data_path = data_path.split(f"{os.getcwd()}/")[-1] # relative path
277
+ sweep = {
278
+ "data_path": data_path,
279
+ "type": sensor_type,
280
+ "sample_data_token": sd_rec["token"],
281
+ "sensor2ego_translation": cs_record["translation"],
282
+ "sensor2ego_rotation": cs_record["rotation"],
283
+ "ego2global_translation": pose_record["translation"],
284
+ "ego2global_rotation": pose_record["rotation"],
285
+ "timestamp": sd_rec["timestamp"],
286
+ }
287
+ l2e_r_s = sweep["sensor2ego_rotation"]
288
+ l2e_t_s = sweep["sensor2ego_translation"]
289
+ e2g_r_s = sweep["ego2global_rotation"]
290
+ e2g_t_s = sweep["ego2global_translation"]
291
+
292
+ # obtain the RT from sensor to Top LiDAR
293
+ # sweep->ego->global->ego'->lidar
294
+ l2e_r_s_mat = Quaternion(l2e_r_s).rotation_matrix
295
+ e2g_r_s_mat = Quaternion(e2g_r_s).rotation_matrix
296
+ R = (l2e_r_s_mat.T @ e2g_r_s_mat.T) @ (
297
+ np.linalg.inv(e2g_r_mat).T @ np.linalg.inv(l2e_r_mat).T
298
+ )
299
+ T = (l2e_t_s @ e2g_r_s_mat.T + e2g_t_s) @ (
300
+ np.linalg.inv(e2g_r_mat).T @ np.linalg.inv(l2e_r_mat).T
301
+ )
302
+ T -= (
303
+ e2g_t @ (np.linalg.inv(e2g_r_mat).T @ np.linalg.inv(l2e_r_mat).T)
304
+ + l2e_t @ np.linalg.inv(l2e_r_mat).T
305
+ ).squeeze(0)
306
+ sweep["sensor2lidar_rotation"] = R.T # points @ R.T + T
307
+ sweep["sensor2lidar_translation"] = T
308
+ return sweep
309
+
310
+
311
+ def fill_trainval_infos(
312
+ data_path, nusc, train_scenes, test=False, max_sweeps=10, with_camera=False
313
+ ):
314
+ train_nusc_infos = []
315
+ val_nusc_infos = []
316
+ progress_bar = tqdm.tqdm(
317
+ total=len(nusc.sample), desc="create_info", dynamic_ncols=True
318
+ )
319
+
320
+ ref_chan = "LIDAR_TOP" # The radar channel from which we track back n sweeps to aggregate the point cloud.
321
+ chan = "LIDAR_TOP" # The reference channel of the current sample_rec that the point clouds are mapped to.
322
+
323
+ for index, sample in enumerate(nusc.sample):
324
+ progress_bar.update()
325
+
326
+ ref_sd_token = sample["data"][ref_chan]
327
+ ref_sd_rec = nusc.get("sample_data", ref_sd_token)
328
+ ref_cs_rec = nusc.get(
329
+ "calibrated_sensor", ref_sd_rec["calibrated_sensor_token"]
330
+ )
331
+ ref_pose_rec = nusc.get("ego_pose", ref_sd_rec["ego_pose_token"])
332
+ ref_time = 1e-6 * ref_sd_rec["timestamp"]
333
+
334
+ ref_lidar_path, ref_boxes, _ = get_sample_data(nusc, ref_sd_token)
335
+
336
+ ref_cam_front_token = sample["data"]["CAM_FRONT"]
337
+ ref_cam_path, _, ref_cam_intrinsic = nusc.get_sample_data(ref_cam_front_token)
338
+
339
+ # Homogeneous transform from ego car frame to reference frame
340
+ ref_from_car = transform_matrix(
341
+ ref_cs_rec["translation"], Quaternion(ref_cs_rec["rotation"]), inverse=True
342
+ )
343
+
344
+ # Homogeneous transformation matrix from global to _current_ ego car frame
345
+ car_from_global = transform_matrix(
346
+ ref_pose_rec["translation"],
347
+ Quaternion(ref_pose_rec["rotation"]),
348
+ inverse=True,
349
+ )
350
+ info = {
351
+ "lidar_path": Path(ref_lidar_path).relative_to(data_path).__str__(),
352
+ "lidar_token": ref_sd_token,
353
+ "cam_front_path": Path(ref_cam_path).relative_to(data_path).__str__(),
354
+ "cam_intrinsic": ref_cam_intrinsic,
355
+ "token": sample["token"],
356
+ "sweeps": [],
357
+ "ref_from_car": ref_from_car,
358
+ "car_from_global": car_from_global,
359
+ "timestamp": ref_time,
360
+ }
361
+ if with_camera:
362
+ info["cams"] = dict()
363
+ l2e_r = ref_cs_rec["rotation"]
364
+ l2e_t = (ref_cs_rec["translation"],)
365
+ e2g_r = ref_pose_rec["rotation"]
366
+ e2g_t = ref_pose_rec["translation"]
367
+ l2e_r_mat = Quaternion(l2e_r).rotation_matrix
368
+ e2g_r_mat = Quaternion(e2g_r).rotation_matrix
369
+
370
+ # obtain 6 image's information per frame
371
+ camera_types = [
372
+ "CAM_FRONT",
373
+ "CAM_FRONT_RIGHT",
374
+ "CAM_FRONT_LEFT",
375
+ "CAM_BACK",
376
+ "CAM_BACK_LEFT",
377
+ "CAM_BACK_RIGHT",
378
+ ]
379
+ for cam in camera_types:
380
+ cam_token = sample["data"][cam]
381
+ cam_path, _, camera_intrinsics = nusc.get_sample_data(cam_token)
382
+ cam_info = obtain_sensor2top(
383
+ nusc, cam_token, l2e_t, l2e_r_mat, e2g_t, e2g_r_mat, cam
384
+ )
385
+ cam_info["data_path"] = (
386
+ Path(cam_info["data_path"]).relative_to(data_path).__str__()
387
+ )
388
+ cam_info.update(camera_intrinsics=camera_intrinsics)
389
+ info["cams"].update({cam: cam_info})
390
+
391
+ sample_data_token = sample["data"][chan]
392
+ curr_sd_rec = nusc.get("sample_data", sample_data_token)
393
+ sweeps = []
394
+ while len(sweeps) < max_sweeps - 1:
395
+ if curr_sd_rec["prev"] == "":
396
+ if len(sweeps) == 0:
397
+ sweep = {
398
+ "lidar_path": Path(ref_lidar_path)
399
+ .relative_to(data_path)
400
+ .__str__(),
401
+ "sample_data_token": curr_sd_rec["token"],
402
+ "transform_matrix": None,
403
+ "time_lag": curr_sd_rec["timestamp"] * 0,
404
+ }
405
+ sweeps.append(sweep)
406
+ else:
407
+ sweeps.append(sweeps[-1])
408
+ else:
409
+ curr_sd_rec = nusc.get("sample_data", curr_sd_rec["prev"])
410
+
411
+ # Get past pose
412
+ current_pose_rec = nusc.get("ego_pose", curr_sd_rec["ego_pose_token"])
413
+ global_from_car = transform_matrix(
414
+ current_pose_rec["translation"],
415
+ Quaternion(current_pose_rec["rotation"]),
416
+ inverse=False,
417
+ )
418
+
419
+ # Homogeneous transformation matrix from sensor coordinate frame to ego car frame.
420
+ current_cs_rec = nusc.get(
421
+ "calibrated_sensor", curr_sd_rec["calibrated_sensor_token"]
422
+ )
423
+ car_from_current = transform_matrix(
424
+ current_cs_rec["translation"],
425
+ Quaternion(current_cs_rec["rotation"]),
426
+ inverse=False,
427
+ )
428
+
429
+ tm = reduce(
430
+ np.dot,
431
+ [ref_from_car, car_from_global, global_from_car, car_from_current],
432
+ )
433
+
434
+ lidar_path = nusc.get_sample_data_path(curr_sd_rec["token"])
435
+
436
+ time_lag = ref_time - 1e-6 * curr_sd_rec["timestamp"]
437
+
438
+ sweep = {
439
+ "lidar_path": Path(lidar_path).relative_to(data_path).__str__(),
440
+ "sample_data_token": curr_sd_rec["token"],
441
+ "transform_matrix": tm,
442
+ "global_from_car": global_from_car,
443
+ "car_from_current": car_from_current,
444
+ "time_lag": time_lag,
445
+ }
446
+ sweeps.append(sweep)
447
+
448
+ info["sweeps"] = sweeps
449
+
450
+ assert len(info["sweeps"]) == max_sweeps - 1, (
451
+ f"sweep {curr_sd_rec['token']} only has {len(info['sweeps'])} sweeps, "
452
+ f"you should duplicate to sweep num {max_sweeps - 1}"
453
+ )
454
+
455
+ if not test:
456
+ # processing gt bbox
457
+ annotations = [
458
+ nusc.get("sample_annotation", token) for token in sample["anns"]
459
+ ]
460
+
461
+ # the filtering gives 0.5~1 map improvement
462
+ num_lidar_pts = np.array([anno["num_lidar_pts"] for anno in annotations])
463
+ num_radar_pts = np.array([anno["num_radar_pts"] for anno in annotations])
464
+ mask = num_lidar_pts + num_radar_pts > 0
465
+
466
+ locs = np.array([b.center for b in ref_boxes]).reshape(-1, 3)
467
+ dims = np.array([b.wlh for b in ref_boxes]).reshape(-1, 3)[
468
+ :, [1, 0, 2]
469
+ ] # wlh == > dxdydz (lwh)
470
+ velocity = np.array([b.velocity for b in ref_boxes]).reshape(-1, 3)
471
+ rots = np.array([quaternion_yaw(b.orientation) for b in ref_boxes]).reshape(
472
+ -1, 1
473
+ )
474
+ names = np.array([b.name for b in ref_boxes])
475
+ tokens = np.array([b.token for b in ref_boxes])
476
+ gt_boxes = np.concatenate([locs, dims, rots, velocity[:, :2]], axis=1)
477
+
478
+ assert len(annotations) == len(gt_boxes) == len(velocity)
479
+
480
+ info["gt_boxes"] = gt_boxes[mask, :]
481
+ info["gt_boxes_velocity"] = velocity[mask, :]
482
+ info["gt_names"] = np.array(
483
+ [map_name_from_general_to_detection[name] for name in names]
484
+ )[mask]
485
+ info["gt_boxes_token"] = tokens[mask]
486
+ info["num_lidar_pts"] = num_lidar_pts[mask]
487
+ info["num_radar_pts"] = num_radar_pts[mask]
488
+
489
+ # processing gt segment
490
+ segment_path = nusc.get("lidarseg", ref_sd_token)["filename"]
491
+ info["gt_segment_path"] = segment_path
492
+
493
+ if sample["scene_token"] in train_scenes:
494
+ train_nusc_infos.append(info)
495
+ else:
496
+ val_nusc_infos.append(info)
497
+
498
+ progress_bar.close()
499
+ return train_nusc_infos, val_nusc_infos
500
+
501
+
502
+ if __name__ == "__main__":
503
+ parser = argparse.ArgumentParser()
504
+ parser.add_argument(
505
+ "--dataset_root", required=True, help="Path to the nuScenes dataset."
506
+ )
507
+ parser.add_argument(
508
+ "--output_root",
509
+ required=True,
510
+ help="Output path where processed information located.",
511
+ )
512
+ parser.add_argument(
513
+ "--max_sweeps", default=10, type=int, help="Max number of sweeps. Default: 10."
514
+ )
515
+ parser.add_argument(
516
+ "--with_camera",
517
+ action="store_true",
518
+ default=False,
519
+ help="Whether use camera or not.",
520
+ )
521
+ config = parser.parse_args()
522
+
523
+ print(f"Loading nuScenes tables for version v1.0-trainval...")
524
+ nusc_trainval = NuScenes(
525
+ version="v1.0-trainval", dataroot=config.dataset_root, verbose=False
526
+ )
527
+ available_scenes_trainval = get_available_scenes(nusc_trainval)
528
+ available_scene_names_trainval = [s["name"] for s in available_scenes_trainval]
529
+ print("total scene num:", len(nusc_trainval.scene))
530
+ print("exist scene num:", len(available_scenes_trainval))
531
+ assert len(available_scenes_trainval) == len(nusc_trainval.scene) == 850
532
+
533
+ print(f"Loading nuScenes tables for version v1.0-test...")
534
+ nusc_test = NuScenes(
535
+ version="v1.0-test", dataroot=config.dataset_root, verbose=False
536
+ )
537
+ available_scenes_test = get_available_scenes(nusc_test)
538
+ available_scene_names_test = [s["name"] for s in available_scenes_test]
539
+ print("total scene num:", len(nusc_test.scene))
540
+ print("exist scene num:", len(available_scenes_test))
541
+ assert len(available_scenes_test) == len(nusc_test.scene) == 150
542
+
543
+ train_scenes = splits.train
544
+ train_scenes = set(
545
+ [
546
+ available_scenes_trainval[available_scene_names_trainval.index(s)]["token"]
547
+ for s in train_scenes
548
+ ]
549
+ )
550
+ test_scenes = splits.test
551
+ test_scenes = set(
552
+ [
553
+ available_scenes_test[available_scene_names_test.index(s)]["token"]
554
+ for s in test_scenes
555
+ ]
556
+ )
557
+ print(f"Filling trainval information...")
558
+ train_nusc_infos, val_nusc_infos = fill_trainval_infos(
559
+ config.dataset_root,
560
+ nusc_trainval,
561
+ train_scenes,
562
+ test=False,
563
+ max_sweeps=config.max_sweeps,
564
+ with_camera=config.with_camera,
565
+ )
566
+ print(f"Filling test information...")
567
+ test_nusc_infos, _ = fill_trainval_infos(
568
+ config.dataset_root,
569
+ nusc_test,
570
+ test_scenes,
571
+ test=True,
572
+ max_sweeps=config.max_sweeps,
573
+ with_camera=config.with_camera,
574
+ )
575
+
576
+ print(f"Saving nuScenes information...")
577
+ os.makedirs(os.path.join(config.output_root, "info"), exist_ok=True)
578
+ print(
579
+ f"train sample: {len(train_nusc_infos)}, val sample: {len(val_nusc_infos)}, test sample: {len(test_nusc_infos)}"
580
+ )
581
+ with open(
582
+ os.path.join(
583
+ config.output_root,
584
+ "info",
585
+ f"nuscenes_infos_{config.max_sweeps}sweeps_train.pkl",
586
+ ),
587
+ "wb",
588
+ ) as f:
589
+ pickle.dump(train_nusc_infos, f)
590
+ with open(
591
+ os.path.join(
592
+ config.output_root,
593
+ "info",
594
+ f"nuscenes_infos_{config.max_sweeps}sweeps_val.pkl",
595
+ ),
596
+ "wb",
597
+ ) as f:
598
+ pickle.dump(val_nusc_infos, f)
599
+ with open(
600
+ os.path.join(
601
+ config.output_root,
602
+ "info",
603
+ f"nuscenes_infos_{config.max_sweeps}sweeps_test.pkl",
604
+ ),
605
+ "wb",
606
+ ) as f:
607
+ pickle.dump(test_nusc_infos, f)
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/s3dis/preprocess_s3dis.py ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for S3DIS
3
+ Parsing normal vectors has a large consumption of memory. Please reduce max_workers if memory is limited.
4
+
5
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
6
+ Please cite our work if the code is helpful to you.
7
+ """
8
+
9
+ import os
10
+ import argparse
11
+ import glob
12
+ import numpy as np
13
+
14
+ try:
15
+ import open3d
16
+ except ImportError:
17
+ import warnings
18
+
19
+ warnings.warn("Please install open3d for parsing normal")
20
+
21
+ try:
22
+ import trimesh
23
+ except ImportError:
24
+ import warnings
25
+
26
+ warnings.warn("Please install trimesh for parsing normal")
27
+
28
+ from concurrent.futures import ProcessPoolExecutor
29
+ from itertools import repeat
30
+
31
+ area_mesh_dict = {}
32
+
33
+
34
+ def parse_room(
35
+ room, angle, dataset_root, output_root, align_angle=True, parse_normal=False
36
+ ):
37
+ print("Parsing: {}".format(room))
38
+ classes = [
39
+ "ceiling",
40
+ "floor",
41
+ "wall",
42
+ "beam",
43
+ "column",
44
+ "window",
45
+ "door",
46
+ "table",
47
+ "chair",
48
+ "sofa",
49
+ "bookcase",
50
+ "board",
51
+ "clutter",
52
+ ]
53
+ class2label = {cls: i for i, cls in enumerate(classes)}
54
+ source_dir = os.path.join(dataset_root, room)
55
+ save_path = os.path.join(output_root, room)
56
+ os.makedirs(save_path, exist_ok=True)
57
+ object_path_list = sorted(glob.glob(os.path.join(source_dir, "Annotations/*.txt")))
58
+
59
+ room_coords = []
60
+ room_colors = []
61
+ room_normals = []
62
+ room_semantic_gt = []
63
+ room_instance_gt = []
64
+
65
+ for object_id, object_path in enumerate(object_path_list):
66
+ object_name = os.path.basename(object_path).split("_")[0]
67
+ obj = np.loadtxt(object_path)
68
+ coords = obj[:, :3]
69
+ colors = obj[:, 3:6]
70
+ # note: in some room there is 'stairs' class
71
+ class_name = object_name if object_name in classes else "clutter"
72
+ semantic_gt = np.repeat(class2label[class_name], coords.shape[0])
73
+ semantic_gt = semantic_gt.reshape([-1, 1])
74
+ instance_gt = np.repeat(object_id, coords.shape[0])
75
+ instance_gt = instance_gt.reshape([-1, 1])
76
+
77
+ room_coords.append(coords)
78
+ room_colors.append(colors)
79
+ room_semantic_gt.append(semantic_gt)
80
+ room_instance_gt.append(instance_gt)
81
+
82
+ room_coords = np.ascontiguousarray(np.vstack(room_coords))
83
+
84
+ if parse_normal:
85
+ x_min, z_max, y_min = np.min(room_coords, axis=0)
86
+ x_max, z_min, y_max = np.max(room_coords, axis=0)
87
+ z_max = -z_max
88
+ z_min = -z_min
89
+ max_bound = np.array([x_max, y_max, z_max]) + 0.1
90
+ min_bound = np.array([x_min, y_min, z_min]) - 0.1
91
+ bbox = open3d.geometry.AxisAlignedBoundingBox(
92
+ min_bound=min_bound, max_bound=max_bound
93
+ )
94
+ # crop room
95
+ room_mesh = (
96
+ area_mesh_dict[os.path.dirname(room)]
97
+ .crop(bbox)
98
+ .transform(
99
+ np.array([[1, 0, 0, 0], [0, 0, -1, 0], [0, 1, 0, 0], [0, 0, 0, 1]])
100
+ )
101
+ )
102
+ vertices = np.array(room_mesh.vertices)
103
+ faces = np.array(room_mesh.triangles)
104
+ vertex_normals = np.array(room_mesh.vertex_normals)
105
+ room_mesh = trimesh.Trimesh(
106
+ vertices=vertices, faces=faces, vertex_normals=vertex_normals
107
+ )
108
+ (closest_points, distances, face_id) = room_mesh.nearest.on_surface(room_coords)
109
+ room_normals = room_mesh.face_normals[face_id]
110
+
111
+ if align_angle:
112
+ angle = (2 - angle / 180) * np.pi
113
+ rot_cos, rot_sin = np.cos(angle), np.sin(angle)
114
+ rot_t = np.array([[rot_cos, -rot_sin, 0], [rot_sin, rot_cos, 0], [0, 0, 1]])
115
+ room_center = (np.max(room_coords, axis=0) + np.min(room_coords, axis=0)) / 2
116
+ room_coords = (room_coords - room_center) @ np.transpose(rot_t) + room_center
117
+ if parse_normal:
118
+ room_normals = room_normals @ np.transpose(rot_t)
119
+
120
+ room_colors = np.ascontiguousarray(np.vstack(room_colors))
121
+ room_semantic_gt = np.ascontiguousarray(np.vstack(room_semantic_gt))
122
+ room_instance_gt = np.ascontiguousarray(np.vstack(room_instance_gt))
123
+ np.save(os.path.join(save_path, "coord.npy"), room_coords.astype(np.float32))
124
+ np.save(os.path.join(save_path, "color.npy"), room_colors.astype(np.uint8))
125
+ np.save(os.path.join(save_path, "segment.npy"), room_semantic_gt.astype(np.int16))
126
+ np.save(os.path.join(save_path, "instance.npy"), room_instance_gt.astype(np.int16))
127
+
128
+ if parse_normal:
129
+ np.save(os.path.join(save_path, "normal.npy"), room_normals.astype(np.float32))
130
+
131
+
132
+ def main_process():
133
+ parser = argparse.ArgumentParser()
134
+ parser.add_argument(
135
+ "--splits",
136
+ required=True,
137
+ nargs="+",
138
+ choices=["Area_1", "Area_2", "Area_3", "Area_4", "Area_5", "Area_6"],
139
+ help="Splits need to process ([Area_1, Area_2, Area_3, Area_4, Area_5, Area_6]).",
140
+ )
141
+ parser.add_argument(
142
+ "--dataset_root", required=True, help="Path to Stanford3dDataset_v1.2 dataset"
143
+ )
144
+ parser.add_argument(
145
+ "--output_root",
146
+ required=True,
147
+ help="Output path where area folders will be located",
148
+ )
149
+ parser.add_argument(
150
+ "--raw_root",
151
+ default=None,
152
+ help="Path to Stanford2d3dDataset_noXYZ dataset (optional)",
153
+ )
154
+ parser.add_argument(
155
+ "--align_angle", action="store_true", help="Whether align room angles"
156
+ )
157
+ parser.add_argument(
158
+ "--parse_normal", action="store_true", help="Whether process normal"
159
+ )
160
+ parser.add_argument(
161
+ "--num_workers", default=1, type=int, help="Num workers for preprocessing."
162
+ )
163
+ args = parser.parse_args()
164
+
165
+ if args.parse_normal:
166
+ assert args.raw_root is not None
167
+
168
+ room_list = []
169
+ angle_list = []
170
+
171
+ # Load room information
172
+ print("Loading room information ...")
173
+ for split in args.splits:
174
+ area_info = np.loadtxt(
175
+ os.path.join(
176
+ args.dataset_root,
177
+ split,
178
+ f"{split}_alignmentAngle.txt",
179
+ ),
180
+ dtype=str,
181
+ )
182
+ room_list += [os.path.join(split, room_info[0]) for room_info in area_info]
183
+ angle_list += [int(room_info[1]) for room_info in area_info]
184
+
185
+ if args.parse_normal:
186
+ # load raw mesh file to extract normal
187
+ print("Loading raw mesh file ...")
188
+ for split in args.splits:
189
+ if split != "Area_5":
190
+ mesh_dir = os.path.join(args.raw_root, split, "3d", "rgb.obj")
191
+ mesh = open3d.io.read_triangle_mesh(mesh_dir)
192
+ mesh.triangle_uvs.clear()
193
+ else:
194
+ mesh_a_dir = os.path.join(args.raw_root, f"{split}a", "3d", "rgb.obj")
195
+ mesh_b_dir = os.path.join(args.raw_root, f"{split}b", "3d", "rgb.obj")
196
+ mesh_a = open3d.io.read_triangle_mesh(mesh_a_dir)
197
+ mesh_a.triangle_uvs.clear()
198
+ mesh_b = open3d.io.read_triangle_mesh(mesh_b_dir)
199
+ mesh_b.triangle_uvs.clear()
200
+ mesh_b = mesh_b.transform(
201
+ np.array(
202
+ [
203
+ [0, 0, -1, -4.09703582],
204
+ [0, 1, 0, 0],
205
+ [1, 0, 0, -6.22617759],
206
+ [0, 0, 0, 1],
207
+ ]
208
+ )
209
+ )
210
+ mesh = mesh_a + mesh_b
211
+ area_mesh_dict[split] = mesh
212
+ print(f"{split} mesh is loaded")
213
+
214
+ # Preprocess data.
215
+ print("Processing scenes...")
216
+ pool = ProcessPoolExecutor(
217
+ max_workers=args.num_workers
218
+ ) # peak 110G memory when parsing normal.
219
+ _ = list(
220
+ pool.map(
221
+ parse_room,
222
+ room_list,
223
+ angle_list,
224
+ repeat(args.dataset_root),
225
+ repeat(args.output_root),
226
+ repeat(args.align_angle),
227
+ repeat(args.parse_normal),
228
+ )
229
+ )
230
+
231
+
232
+ if __name__ == "__main__":
233
+ main_process()
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/sampling_chunking_data.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Chunking Data
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import argparse
10
+ import numpy as np
11
+ import multiprocessing as mp
12
+ from concurrent.futures import ProcessPoolExecutor
13
+ from itertools import repeat
14
+ from pathlib import Path
15
+
16
+
17
+ def chunking_scene(
18
+ name,
19
+ dataset_root,
20
+ split,
21
+ grid_size=None,
22
+ chunk_range=(6, 6),
23
+ chunk_stride=(3, 3),
24
+ chunk_minimum_size=10000,
25
+ ):
26
+ print(f"Chunking scene {name} in {split} split")
27
+ dataset_root = Path(dataset_root)
28
+ scene_path = dataset_root / split / name
29
+ assets = os.listdir(scene_path)
30
+ data_dict = dict()
31
+ for asset in assets:
32
+ if not asset.endswith(".npy"):
33
+ continue
34
+ data_dict[asset[:-4]] = np.load(scene_path / asset)
35
+ coord = data_dict["coord"] - data_dict["coord"].min(axis=0)
36
+
37
+ if grid_size is not None:
38
+ grid_coord = np.floor(coord / grid_size).astype(int)
39
+ _, idx = np.unique(grid_coord, axis=0, return_index=True)
40
+ coord = coord[idx]
41
+ for key in data_dict.keys():
42
+ data_dict[key] = data_dict[key][idx]
43
+
44
+ bev_range = coord.max(axis=0)[:2]
45
+ x, y = np.meshgrid(
46
+ np.arange(0, bev_range[0] + chunk_stride[0] - chunk_range[0], chunk_stride[0]),
47
+ np.arange(0, bev_range[0] + chunk_stride[0] - chunk_range[0], chunk_stride[0]),
48
+ indexing="ij",
49
+ )
50
+ chunks = np.concatenate([x.reshape([-1, 1]), y.reshape([-1, 1])], axis=-1)
51
+ chunk_idx = 0
52
+ for chunk in chunks:
53
+ mask = (
54
+ (coord[:, 0] >= chunk[0])
55
+ & (coord[:, 0] < chunk[0] + chunk_range[0])
56
+ & (coord[:, 1] >= chunk[1])
57
+ & (coord[:, 1] < chunk[1] + chunk_range[1])
58
+ )
59
+ if np.sum(mask) < chunk_minimum_size:
60
+ continue
61
+
62
+ chunk_data_name = f"{name}_{chunk_idx}"
63
+ if grid_size is not None:
64
+ chunk_split_name = (
65
+ f"{split}_"
66
+ f"grid{grid_size * 100:.0f}mm_"
67
+ f"chunk{chunk_range[0]}x{chunk_range[1]}_"
68
+ f"stride{chunk_stride[0]}x{chunk_stride[1]}"
69
+ )
70
+ else:
71
+ chunk_split_name = (
72
+ f"{split}_"
73
+ f"chunk{chunk_range[0]}x{chunk_range[1]}_"
74
+ f"stride{chunk_stride[0]}x{chunk_stride[1]}"
75
+ )
76
+
77
+ chunk_save_path = dataset_root / chunk_split_name / chunk_data_name
78
+ chunk_save_path.mkdir(parents=True, exist_ok=True)
79
+ for key in data_dict.keys():
80
+ np.save(chunk_save_path / f"{key}.npy", data_dict[key][mask])
81
+ chunk_idx += 1
82
+
83
+
84
+ if __name__ == "__main__":
85
+ parser = argparse.ArgumentParser()
86
+ parser.add_argument(
87
+ "--dataset_root",
88
+ required=True,
89
+ help="Path to the Pointcept processed ScanNet++ dataset.",
90
+ )
91
+ parser.add_argument(
92
+ "--split",
93
+ required=True,
94
+ default="train",
95
+ type=str,
96
+ help="Split need to process.",
97
+ )
98
+ parser.add_argument(
99
+ "--grid_size",
100
+ default=None,
101
+ type=float,
102
+ help="Grid size for initial grid sampling",
103
+ )
104
+ parser.add_argument(
105
+ "--chunk_range",
106
+ default=[6, 6],
107
+ type=int,
108
+ nargs="+",
109
+ help="Range of each chunk, e.g. --chunk_range 6 6",
110
+ )
111
+ parser.add_argument(
112
+ "--chunk_stride",
113
+ default=[3, 3],
114
+ type=int,
115
+ nargs="+",
116
+ help="Stride of each chunk, e.g. --chunk_stride 3 3",
117
+ )
118
+ parser.add_argument(
119
+ "--chunk_minimum_size",
120
+ default=10000,
121
+ type=int,
122
+ help="Minimum number of points in each chunk",
123
+ )
124
+ parser.add_argument(
125
+ "--num_workers",
126
+ default=mp.cpu_count(),
127
+ type=int,
128
+ help="Num workers for preprocessing.",
129
+ )
130
+
131
+ config = parser.parse_args()
132
+ config.dataset_root = Path(config.dataset_root)
133
+ data_list = os.listdir(config.dataset_root / config.split)
134
+
135
+ print("Processing scenes...")
136
+ pool = ProcessPoolExecutor(max_workers=config.num_workers)
137
+ _ = list(
138
+ pool.map(
139
+ chunking_scene,
140
+ data_list,
141
+ repeat(config.dataset_root),
142
+ repeat(config.split),
143
+ repeat(config.grid_size),
144
+ repeat(config.chunk_range),
145
+ repeat(config.chunk_stride),
146
+ repeat(config.chunk_minimum_size),
147
+ )
148
+ )
149
+ pool.shutdown()
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/classes_ObjClassification-ShapeNetCore55.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1 trash
2
+ 3 basket
3
+ 4 bathtub
4
+ 5 bed
5
+ 9 shelf
6
+ 13 cabinet
7
+ 18 chair
8
+ 20 keyboard
9
+ 22 tv
10
+ 30 lamp
11
+ 31 laptop
12
+ 35 microwave
13
+ 39 pillow
14
+ 42 printer
15
+ 47 sofa
16
+ 48 stove
17
+ 49 table
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/classes_SemVoxLabel-nyu40id.txt ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 1 wall
2
+ 2 floor
3
+ 3 cabinet
4
+ 4 bed
5
+ 5 chair
6
+ 6 sofa
7
+ 7 table
8
+ 8 door
9
+ 9 window
10
+ 10 bookshelf
11
+ 11 picture
12
+ 12 counter
13
+ 14 desk
14
+ 16 curtain
15
+ 24 refridgerator
16
+ 28 shower curtain
17
+ 33 toilet
18
+ 34 sink
19
+ 36 bathtub
20
+ 39 otherfurniture
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannet200_constants.py ADDED
@@ -0,0 +1,704 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ScanNet Benchmark constants
2
+ VALID_CLASS_IDS_20 = (
3
+ 1,
4
+ 2,
5
+ 3,
6
+ 4,
7
+ 5,
8
+ 6,
9
+ 7,
10
+ 8,
11
+ 9,
12
+ 10,
13
+ 11,
14
+ 12,
15
+ 14,
16
+ 16,
17
+ 24,
18
+ 28,
19
+ 33,
20
+ 34,
21
+ 36,
22
+ 39,
23
+ )
24
+
25
+ CLASS_LABELS_20 = (
26
+ "wall",
27
+ "floor",
28
+ "cabinet",
29
+ "bed",
30
+ "chair",
31
+ "sofa",
32
+ "table",
33
+ "door",
34
+ "window",
35
+ "bookshelf",
36
+ "picture",
37
+ "counter",
38
+ "desk",
39
+ "curtain",
40
+ "refrigerator",
41
+ "shower curtain",
42
+ "toilet",
43
+ "sink",
44
+ "bathtub",
45
+ "otherfurniture",
46
+ )
47
+
48
+ SCANNET_COLOR_MAP_20 = {
49
+ 0: (0.0, 0.0, 0.0),
50
+ 1: (174.0, 199.0, 232.0),
51
+ 2: (152.0, 223.0, 138.0),
52
+ 3: (31.0, 119.0, 180.0),
53
+ 4: (255.0, 187.0, 120.0),
54
+ 5: (188.0, 189.0, 34.0),
55
+ 6: (140.0, 86.0, 75.0),
56
+ 7: (255.0, 152.0, 150.0),
57
+ 8: (214.0, 39.0, 40.0),
58
+ 9: (197.0, 176.0, 213.0),
59
+ 10: (148.0, 103.0, 189.0),
60
+ 11: (196.0, 156.0, 148.0),
61
+ 12: (23.0, 190.0, 207.0),
62
+ 14: (247.0, 182.0, 210.0),
63
+ 15: (66.0, 188.0, 102.0),
64
+ 16: (219.0, 219.0, 141.0),
65
+ 17: (140.0, 57.0, 197.0),
66
+ 18: (202.0, 185.0, 52.0),
67
+ 19: (51.0, 176.0, 203.0),
68
+ 20: (200.0, 54.0, 131.0),
69
+ 21: (92.0, 193.0, 61.0),
70
+ 22: (78.0, 71.0, 183.0),
71
+ 23: (172.0, 114.0, 82.0),
72
+ 24: (255.0, 127.0, 14.0),
73
+ 25: (91.0, 163.0, 138.0),
74
+ 26: (153.0, 98.0, 156.0),
75
+ 27: (140.0, 153.0, 101.0),
76
+ 28: (158.0, 218.0, 229.0),
77
+ 29: (100.0, 125.0, 154.0),
78
+ 30: (178.0, 127.0, 135.0),
79
+ 32: (146.0, 111.0, 194.0),
80
+ 33: (44.0, 160.0, 44.0),
81
+ 34: (112.0, 128.0, 144.0),
82
+ 35: (96.0, 207.0, 209.0),
83
+ 36: (227.0, 119.0, 194.0),
84
+ 37: (213.0, 92.0, 176.0),
85
+ 38: (94.0, 106.0, 211.0),
86
+ 39: (82.0, 84.0, 163.0),
87
+ 40: (100.0, 85.0, 144.0),
88
+ }
89
+
90
+ # ScanNet200 Benchmark constants
91
+ VALID_CLASS_IDS_200 = (
92
+ 1,
93
+ 2,
94
+ 3,
95
+ 4,
96
+ 5,
97
+ 6,
98
+ 7,
99
+ 8,
100
+ 9,
101
+ 10,
102
+ 11,
103
+ 13,
104
+ 14,
105
+ 15,
106
+ 16,
107
+ 17,
108
+ 18,
109
+ 19,
110
+ 21,
111
+ 22,
112
+ 23,
113
+ 24,
114
+ 26,
115
+ 27,
116
+ 28,
117
+ 29,
118
+ 31,
119
+ 32,
120
+ 33,
121
+ 34,
122
+ 35,
123
+ 36,
124
+ 38,
125
+ 39,
126
+ 40,
127
+ 41,
128
+ 42,
129
+ 44,
130
+ 45,
131
+ 46,
132
+ 47,
133
+ 48,
134
+ 49,
135
+ 50,
136
+ 51,
137
+ 52,
138
+ 54,
139
+ 55,
140
+ 56,
141
+ 57,
142
+ 58,
143
+ 59,
144
+ 62,
145
+ 63,
146
+ 64,
147
+ 65,
148
+ 66,
149
+ 67,
150
+ 68,
151
+ 69,
152
+ 70,
153
+ 71,
154
+ 72,
155
+ 73,
156
+ 74,
157
+ 75,
158
+ 76,
159
+ 77,
160
+ 78,
161
+ 79,
162
+ 80,
163
+ 82,
164
+ 84,
165
+ 86,
166
+ 87,
167
+ 88,
168
+ 89,
169
+ 90,
170
+ 93,
171
+ 95,
172
+ 96,
173
+ 97,
174
+ 98,
175
+ 99,
176
+ 100,
177
+ 101,
178
+ 102,
179
+ 103,
180
+ 104,
181
+ 105,
182
+ 106,
183
+ 107,
184
+ 110,
185
+ 112,
186
+ 115,
187
+ 116,
188
+ 118,
189
+ 120,
190
+ 121,
191
+ 122,
192
+ 125,
193
+ 128,
194
+ 130,
195
+ 131,
196
+ 132,
197
+ 134,
198
+ 136,
199
+ 138,
200
+ 139,
201
+ 140,
202
+ 141,
203
+ 145,
204
+ 148,
205
+ 154,
206
+ 155,
207
+ 156,
208
+ 157,
209
+ 159,
210
+ 161,
211
+ 163,
212
+ 165,
213
+ 166,
214
+ 168,
215
+ 169,
216
+ 170,
217
+ 177,
218
+ 180,
219
+ 185,
220
+ 188,
221
+ 191,
222
+ 193,
223
+ 195,
224
+ 202,
225
+ 208,
226
+ 213,
227
+ 214,
228
+ 221,
229
+ 229,
230
+ 230,
231
+ 232,
232
+ 233,
233
+ 242,
234
+ 250,
235
+ 261,
236
+ 264,
237
+ 276,
238
+ 283,
239
+ 286,
240
+ 300,
241
+ 304,
242
+ 312,
243
+ 323,
244
+ 325,
245
+ 331,
246
+ 342,
247
+ 356,
248
+ 370,
249
+ 392,
250
+ 395,
251
+ 399,
252
+ 408,
253
+ 417,
254
+ 488,
255
+ 540,
256
+ 562,
257
+ 570,
258
+ 572,
259
+ 581,
260
+ 609,
261
+ 748,
262
+ 776,
263
+ 1156,
264
+ 1163,
265
+ 1164,
266
+ 1165,
267
+ 1166,
268
+ 1167,
269
+ 1168,
270
+ 1169,
271
+ 1170,
272
+ 1171,
273
+ 1172,
274
+ 1173,
275
+ 1174,
276
+ 1175,
277
+ 1176,
278
+ 1178,
279
+ 1179,
280
+ 1180,
281
+ 1181,
282
+ 1182,
283
+ 1183,
284
+ 1184,
285
+ 1185,
286
+ 1186,
287
+ 1187,
288
+ 1188,
289
+ 1189,
290
+ 1190,
291
+ 1191,
292
+ )
293
+
294
+ CLASS_LABELS_200 = (
295
+ "wall",
296
+ "chair",
297
+ "floor",
298
+ "table",
299
+ "door",
300
+ "couch",
301
+ "cabinet",
302
+ "shelf",
303
+ "desk",
304
+ "office chair",
305
+ "bed",
306
+ "pillow",
307
+ "sink",
308
+ "picture",
309
+ "window",
310
+ "toilet",
311
+ "bookshelf",
312
+ "monitor",
313
+ "curtain",
314
+ "book",
315
+ "armchair",
316
+ "coffee table",
317
+ "box",
318
+ "refrigerator",
319
+ "lamp",
320
+ "kitchen cabinet",
321
+ "towel",
322
+ "clothes",
323
+ "tv",
324
+ "nightstand",
325
+ "counter",
326
+ "dresser",
327
+ "stool",
328
+ "cushion",
329
+ "plant",
330
+ "ceiling",
331
+ "bathtub",
332
+ "end table",
333
+ "dining table",
334
+ "keyboard",
335
+ "bag",
336
+ "backpack",
337
+ "toilet paper",
338
+ "printer",
339
+ "tv stand",
340
+ "whiteboard",
341
+ "blanket",
342
+ "shower curtain",
343
+ "trash can",
344
+ "closet",
345
+ "stairs",
346
+ "microwave",
347
+ "stove",
348
+ "shoe",
349
+ "computer tower",
350
+ "bottle",
351
+ "bin",
352
+ "ottoman",
353
+ "bench",
354
+ "board",
355
+ "washing machine",
356
+ "mirror",
357
+ "copier",
358
+ "basket",
359
+ "sofa chair",
360
+ "file cabinet",
361
+ "fan",
362
+ "laptop",
363
+ "shower",
364
+ "paper",
365
+ "person",
366
+ "paper towel dispenser",
367
+ "oven",
368
+ "blinds",
369
+ "rack",
370
+ "plate",
371
+ "blackboard",
372
+ "piano",
373
+ "suitcase",
374
+ "rail",
375
+ "radiator",
376
+ "recycling bin",
377
+ "container",
378
+ "wardrobe",
379
+ "soap dispenser",
380
+ "telephone",
381
+ "bucket",
382
+ "clock",
383
+ "stand",
384
+ "light",
385
+ "laundry basket",
386
+ "pipe",
387
+ "clothes dryer",
388
+ "guitar",
389
+ "toilet paper holder",
390
+ "seat",
391
+ "speaker",
392
+ "column",
393
+ "bicycle",
394
+ "ladder",
395
+ "bathroom stall",
396
+ "shower wall",
397
+ "cup",
398
+ "jacket",
399
+ "storage bin",
400
+ "coffee maker",
401
+ "dishwasher",
402
+ "paper towel roll",
403
+ "machine",
404
+ "mat",
405
+ "windowsill",
406
+ "bar",
407
+ "toaster",
408
+ "bulletin board",
409
+ "ironing board",
410
+ "fireplace",
411
+ "soap dish",
412
+ "kitchen counter",
413
+ "doorframe",
414
+ "toilet paper dispenser",
415
+ "mini fridge",
416
+ "fire extinguisher",
417
+ "ball",
418
+ "hat",
419
+ "shower curtain rod",
420
+ "water cooler",
421
+ "paper cutter",
422
+ "tray",
423
+ "shower door",
424
+ "pillar",
425
+ "ledge",
426
+ "toaster oven",
427
+ "mouse",
428
+ "toilet seat cover dispenser",
429
+ "furniture",
430
+ "cart",
431
+ "storage container",
432
+ "scale",
433
+ "tissue box",
434
+ "light switch",
435
+ "crate",
436
+ "power outlet",
437
+ "decoration",
438
+ "sign",
439
+ "projector",
440
+ "closet door",
441
+ "vacuum cleaner",
442
+ "candle",
443
+ "plunger",
444
+ "stuffed animal",
445
+ "headphones",
446
+ "dish rack",
447
+ "broom",
448
+ "guitar case",
449
+ "range hood",
450
+ "dustpan",
451
+ "hair dryer",
452
+ "water bottle",
453
+ "handicap bar",
454
+ "purse",
455
+ "vent",
456
+ "shower floor",
457
+ "water pitcher",
458
+ "mailbox",
459
+ "bowl",
460
+ "paper bag",
461
+ "alarm clock",
462
+ "music stand",
463
+ "projector screen",
464
+ "divider",
465
+ "laundry detergent",
466
+ "bathroom counter",
467
+ "object",
468
+ "bathroom vanity",
469
+ "closet wall",
470
+ "laundry hamper",
471
+ "bathroom stall door",
472
+ "ceiling light",
473
+ "trash bin",
474
+ "dumbbell",
475
+ "stair rail",
476
+ "tube",
477
+ "bathroom cabinet",
478
+ "cd case",
479
+ "closet rod",
480
+ "coffee kettle",
481
+ "structure",
482
+ "shower head",
483
+ "keyboard piano",
484
+ "case of water bottles",
485
+ "coat rack",
486
+ "storage organizer",
487
+ "folded chair",
488
+ "fire alarm",
489
+ "power strip",
490
+ "calendar",
491
+ "poster",
492
+ "potted plant",
493
+ "luggage",
494
+ "mattress",
495
+ )
496
+
497
+ SCANNET_COLOR_MAP_200 = {
498
+ 0: (0.0, 0.0, 0.0),
499
+ 1: (174.0, 199.0, 232.0),
500
+ 2: (188.0, 189.0, 34.0),
501
+ 3: (152.0, 223.0, 138.0),
502
+ 4: (255.0, 152.0, 150.0),
503
+ 5: (214.0, 39.0, 40.0),
504
+ 6: (91.0, 135.0, 229.0),
505
+ 7: (31.0, 119.0, 180.0),
506
+ 8: (229.0, 91.0, 104.0),
507
+ 9: (247.0, 182.0, 210.0),
508
+ 10: (91.0, 229.0, 110.0),
509
+ 11: (255.0, 187.0, 120.0),
510
+ 13: (141.0, 91.0, 229.0),
511
+ 14: (112.0, 128.0, 144.0),
512
+ 15: (196.0, 156.0, 148.0),
513
+ 16: (197.0, 176.0, 213.0),
514
+ 17: (44.0, 160.0, 44.0),
515
+ 18: (148.0, 103.0, 189.0),
516
+ 19: (229.0, 91.0, 223.0),
517
+ 21: (219.0, 219.0, 141.0),
518
+ 22: (192.0, 229.0, 91.0),
519
+ 23: (88.0, 218.0, 137.0),
520
+ 24: (58.0, 98.0, 137.0),
521
+ 26: (177.0, 82.0, 239.0),
522
+ 27: (255.0, 127.0, 14.0),
523
+ 28: (237.0, 204.0, 37.0),
524
+ 29: (41.0, 206.0, 32.0),
525
+ 31: (62.0, 143.0, 148.0),
526
+ 32: (34.0, 14.0, 130.0),
527
+ 33: (143.0, 45.0, 115.0),
528
+ 34: (137.0, 63.0, 14.0),
529
+ 35: (23.0, 190.0, 207.0),
530
+ 36: (16.0, 212.0, 139.0),
531
+ 38: (90.0, 119.0, 201.0),
532
+ 39: (125.0, 30.0, 141.0),
533
+ 40: (150.0, 53.0, 56.0),
534
+ 41: (186.0, 197.0, 62.0),
535
+ 42: (227.0, 119.0, 194.0),
536
+ 44: (38.0, 100.0, 128.0),
537
+ 45: (120.0, 31.0, 243.0),
538
+ 46: (154.0, 59.0, 103.0),
539
+ 47: (169.0, 137.0, 78.0),
540
+ 48: (143.0, 245.0, 111.0),
541
+ 49: (37.0, 230.0, 205.0),
542
+ 50: (14.0, 16.0, 155.0),
543
+ 51: (196.0, 51.0, 182.0),
544
+ 52: (237.0, 80.0, 38.0),
545
+ 54: (138.0, 175.0, 62.0),
546
+ 55: (158.0, 218.0, 229.0),
547
+ 56: (38.0, 96.0, 167.0),
548
+ 57: (190.0, 77.0, 246.0),
549
+ 58: (208.0, 49.0, 84.0),
550
+ 59: (208.0, 193.0, 72.0),
551
+ 62: (55.0, 220.0, 57.0),
552
+ 63: (10.0, 125.0, 140.0),
553
+ 64: (76.0, 38.0, 202.0),
554
+ 65: (191.0, 28.0, 135.0),
555
+ 66: (211.0, 120.0, 42.0),
556
+ 67: (118.0, 174.0, 76.0),
557
+ 68: (17.0, 242.0, 171.0),
558
+ 69: (20.0, 65.0, 247.0),
559
+ 70: (208.0, 61.0, 222.0),
560
+ 71: (162.0, 62.0, 60.0),
561
+ 72: (210.0, 235.0, 62.0),
562
+ 73: (45.0, 152.0, 72.0),
563
+ 74: (35.0, 107.0, 149.0),
564
+ 75: (160.0, 89.0, 237.0),
565
+ 76: (227.0, 56.0, 125.0),
566
+ 77: (169.0, 143.0, 81.0),
567
+ 78: (42.0, 143.0, 20.0),
568
+ 79: (25.0, 160.0, 151.0),
569
+ 80: (82.0, 75.0, 227.0),
570
+ 82: (253.0, 59.0, 222.0),
571
+ 84: (240.0, 130.0, 89.0),
572
+ 86: (123.0, 172.0, 47.0),
573
+ 87: (71.0, 194.0, 133.0),
574
+ 88: (24.0, 94.0, 205.0),
575
+ 89: (134.0, 16.0, 179.0),
576
+ 90: (159.0, 32.0, 52.0),
577
+ 93: (213.0, 208.0, 88.0),
578
+ 95: (64.0, 158.0, 70.0),
579
+ 96: (18.0, 163.0, 194.0),
580
+ 97: (65.0, 29.0, 153.0),
581
+ 98: (177.0, 10.0, 109.0),
582
+ 99: (152.0, 83.0, 7.0),
583
+ 100: (83.0, 175.0, 30.0),
584
+ 101: (18.0, 199.0, 153.0),
585
+ 102: (61.0, 81.0, 208.0),
586
+ 103: (213.0, 85.0, 216.0),
587
+ 104: (170.0, 53.0, 42.0),
588
+ 105: (161.0, 192.0, 38.0),
589
+ 106: (23.0, 241.0, 91.0),
590
+ 107: (12.0, 103.0, 170.0),
591
+ 110: (151.0, 41.0, 245.0),
592
+ 112: (133.0, 51.0, 80.0),
593
+ 115: (184.0, 162.0, 91.0),
594
+ 116: (50.0, 138.0, 38.0),
595
+ 118: (31.0, 237.0, 236.0),
596
+ 120: (39.0, 19.0, 208.0),
597
+ 121: (223.0, 27.0, 180.0),
598
+ 122: (254.0, 141.0, 85.0),
599
+ 125: (97.0, 144.0, 39.0),
600
+ 128: (106.0, 231.0, 176.0),
601
+ 130: (12.0, 61.0, 162.0),
602
+ 131: (124.0, 66.0, 140.0),
603
+ 132: (137.0, 66.0, 73.0),
604
+ 134: (250.0, 253.0, 26.0),
605
+ 136: (55.0, 191.0, 73.0),
606
+ 138: (60.0, 126.0, 146.0),
607
+ 139: (153.0, 108.0, 234.0),
608
+ 140: (184.0, 58.0, 125.0),
609
+ 141: (135.0, 84.0, 14.0),
610
+ 145: (139.0, 248.0, 91.0),
611
+ 148: (53.0, 200.0, 172.0),
612
+ 154: (63.0, 69.0, 134.0),
613
+ 155: (190.0, 75.0, 186.0),
614
+ 156: (127.0, 63.0, 52.0),
615
+ 157: (141.0, 182.0, 25.0),
616
+ 159: (56.0, 144.0, 89.0),
617
+ 161: (64.0, 160.0, 250.0),
618
+ 163: (182.0, 86.0, 245.0),
619
+ 165: (139.0, 18.0, 53.0),
620
+ 166: (134.0, 120.0, 54.0),
621
+ 168: (49.0, 165.0, 42.0),
622
+ 169: (51.0, 128.0, 133.0),
623
+ 170: (44.0, 21.0, 163.0),
624
+ 177: (232.0, 93.0, 193.0),
625
+ 180: (176.0, 102.0, 54.0),
626
+ 185: (116.0, 217.0, 17.0),
627
+ 188: (54.0, 209.0, 150.0),
628
+ 191: (60.0, 99.0, 204.0),
629
+ 193: (129.0, 43.0, 144.0),
630
+ 195: (252.0, 100.0, 106.0),
631
+ 202: (187.0, 196.0, 73.0),
632
+ 208: (13.0, 158.0, 40.0),
633
+ 213: (52.0, 122.0, 152.0),
634
+ 214: (128.0, 76.0, 202.0),
635
+ 221: (187.0, 50.0, 115.0),
636
+ 229: (180.0, 141.0, 71.0),
637
+ 230: (77.0, 208.0, 35.0),
638
+ 232: (72.0, 183.0, 168.0),
639
+ 233: (97.0, 99.0, 203.0),
640
+ 242: (172.0, 22.0, 158.0),
641
+ 250: (155.0, 64.0, 40.0),
642
+ 261: (118.0, 159.0, 30.0),
643
+ 264: (69.0, 252.0, 148.0),
644
+ 276: (45.0, 103.0, 173.0),
645
+ 283: (111.0, 38.0, 149.0),
646
+ 286: (184.0, 9.0, 49.0),
647
+ 300: (188.0, 174.0, 67.0),
648
+ 304: (53.0, 206.0, 53.0),
649
+ 312: (97.0, 235.0, 252.0),
650
+ 323: (66.0, 32.0, 182.0),
651
+ 325: (236.0, 114.0, 195.0),
652
+ 331: (241.0, 154.0, 83.0),
653
+ 342: (133.0, 240.0, 52.0),
654
+ 356: (16.0, 205.0, 144.0),
655
+ 370: (75.0, 101.0, 198.0),
656
+ 392: (237.0, 95.0, 251.0),
657
+ 395: (191.0, 52.0, 49.0),
658
+ 399: (227.0, 254.0, 54.0),
659
+ 408: (49.0, 206.0, 87.0),
660
+ 417: (48.0, 113.0, 150.0),
661
+ 488: (125.0, 73.0, 182.0),
662
+ 540: (229.0, 32.0, 114.0),
663
+ 562: (158.0, 119.0, 28.0),
664
+ 570: (60.0, 205.0, 27.0),
665
+ 572: (18.0, 215.0, 201.0),
666
+ 581: (79.0, 76.0, 153.0),
667
+ 609: (134.0, 13.0, 116.0),
668
+ 748: (192.0, 97.0, 63.0),
669
+ 776: (108.0, 163.0, 18.0),
670
+ 1156: (95.0, 220.0, 156.0),
671
+ 1163: (98.0, 141.0, 208.0),
672
+ 1164: (144.0, 19.0, 193.0),
673
+ 1165: (166.0, 36.0, 57.0),
674
+ 1166: (212.0, 202.0, 34.0),
675
+ 1167: (23.0, 206.0, 34.0),
676
+ 1168: (91.0, 211.0, 236.0),
677
+ 1169: (79.0, 55.0, 137.0),
678
+ 1170: (182.0, 19.0, 117.0),
679
+ 1171: (134.0, 76.0, 14.0),
680
+ 1172: (87.0, 185.0, 28.0),
681
+ 1173: (82.0, 224.0, 187.0),
682
+ 1174: (92.0, 110.0, 214.0),
683
+ 1175: (168.0, 80.0, 171.0),
684
+ 1176: (197.0, 63.0, 51.0),
685
+ 1178: (175.0, 199.0, 77.0),
686
+ 1179: (62.0, 180.0, 98.0),
687
+ 1180: (8.0, 91.0, 150.0),
688
+ 1181: (77.0, 15.0, 130.0),
689
+ 1182: (154.0, 65.0, 96.0),
690
+ 1183: (197.0, 152.0, 11.0),
691
+ 1184: (59.0, 155.0, 45.0),
692
+ 1185: (12.0, 147.0, 145.0),
693
+ 1186: (54.0, 35.0, 219.0),
694
+ 1187: (210.0, 73.0, 181.0),
695
+ 1188: (221.0, 124.0, 77.0),
696
+ 1189: (149.0, 214.0, 66.0),
697
+ 1190: (72.0, 185.0, 134.0),
698
+ 1191: (42.0, 94.0, 198.0),
699
+ }
700
+
701
+ # For instance segmentation the non-object categories
702
+ VALID_PANOPTIC_IDS = (1, 3)
703
+
704
+ CLASS_LABELS_PANOPTIC = ("wall", "floor")
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannet200_splits.py ADDED
@@ -0,0 +1,625 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file contains the HEAD - COMMON - TAIL split category ids for ScanNet 200
2
+
3
+ HEAD_CATS_SCANNET_200 = [
4
+ "tv stand",
5
+ "curtain",
6
+ "blinds",
7
+ "shower curtain",
8
+ "bookshelf",
9
+ "tv",
10
+ "kitchen cabinet",
11
+ "pillow",
12
+ "lamp",
13
+ "dresser",
14
+ "monitor",
15
+ "object",
16
+ "ceiling",
17
+ "board",
18
+ "stove",
19
+ "closet wall",
20
+ "couch",
21
+ "office chair",
22
+ "kitchen counter",
23
+ "shower",
24
+ "closet",
25
+ "doorframe",
26
+ "sofa chair",
27
+ "mailbox",
28
+ "nightstand",
29
+ "washing machine",
30
+ "picture",
31
+ "book",
32
+ "sink",
33
+ "recycling bin",
34
+ "table",
35
+ "backpack",
36
+ "shower wall",
37
+ "toilet",
38
+ "copier",
39
+ "counter",
40
+ "stool",
41
+ "refrigerator",
42
+ "window",
43
+ "file cabinet",
44
+ "chair",
45
+ "wall",
46
+ "plant",
47
+ "coffee table",
48
+ "stairs",
49
+ "armchair",
50
+ "cabinet",
51
+ "bathroom vanity",
52
+ "bathroom stall",
53
+ "mirror",
54
+ "blackboard",
55
+ "trash can",
56
+ "stair rail",
57
+ "box",
58
+ "towel",
59
+ "door",
60
+ "clothes",
61
+ "whiteboard",
62
+ "bed",
63
+ "floor",
64
+ "bathtub",
65
+ "desk",
66
+ "wardrobe",
67
+ "clothes dryer",
68
+ "radiator",
69
+ "shelf",
70
+ ]
71
+ COMMON_CATS_SCANNET_200 = [
72
+ "cushion",
73
+ "end table",
74
+ "dining table",
75
+ "keyboard",
76
+ "bag",
77
+ "toilet paper",
78
+ "printer",
79
+ "blanket",
80
+ "microwave",
81
+ "shoe",
82
+ "computer tower",
83
+ "bottle",
84
+ "bin",
85
+ "ottoman",
86
+ "bench",
87
+ "basket",
88
+ "fan",
89
+ "laptop",
90
+ "person",
91
+ "paper towel dispenser",
92
+ "oven",
93
+ "rack",
94
+ "piano",
95
+ "suitcase",
96
+ "rail",
97
+ "container",
98
+ "telephone",
99
+ "stand",
100
+ "light",
101
+ "laundry basket",
102
+ "pipe",
103
+ "seat",
104
+ "column",
105
+ "bicycle",
106
+ "ladder",
107
+ "jacket",
108
+ "storage bin",
109
+ "coffee maker",
110
+ "dishwasher",
111
+ "machine",
112
+ "mat",
113
+ "windowsill",
114
+ "bulletin board",
115
+ "fireplace",
116
+ "mini fridge",
117
+ "water cooler",
118
+ "shower door",
119
+ "pillar",
120
+ "ledge",
121
+ "furniture",
122
+ "cart",
123
+ "decoration",
124
+ "closet door",
125
+ "vacuum cleaner",
126
+ "dish rack",
127
+ "range hood",
128
+ "projector screen",
129
+ "divider",
130
+ "bathroom counter",
131
+ "laundry hamper",
132
+ "bathroom stall door",
133
+ "ceiling light",
134
+ "trash bin",
135
+ "bathroom cabinet",
136
+ "structure",
137
+ "storage organizer",
138
+ "potted plant",
139
+ "mattress",
140
+ ]
141
+ TAIL_CATS_SCANNET_200 = [
142
+ "paper",
143
+ "plate",
144
+ "soap dispenser",
145
+ "bucket",
146
+ "clock",
147
+ "guitar",
148
+ "toilet paper holder",
149
+ "speaker",
150
+ "cup",
151
+ "paper towel roll",
152
+ "bar",
153
+ "toaster",
154
+ "ironing board",
155
+ "soap dish",
156
+ "toilet paper dispenser",
157
+ "fire extinguisher",
158
+ "ball",
159
+ "hat",
160
+ "shower curtain rod",
161
+ "paper cutter",
162
+ "tray",
163
+ "toaster oven",
164
+ "mouse",
165
+ "toilet seat cover dispenser",
166
+ "storage container",
167
+ "scale",
168
+ "tissue box",
169
+ "light switch",
170
+ "crate",
171
+ "power outlet",
172
+ "sign",
173
+ "projector",
174
+ "candle",
175
+ "plunger",
176
+ "stuffed animal",
177
+ "headphones",
178
+ "broom",
179
+ "guitar case",
180
+ "dustpan",
181
+ "hair dryer",
182
+ "water bottle",
183
+ "handicap bar",
184
+ "purse",
185
+ "vent",
186
+ "shower floor",
187
+ "water pitcher",
188
+ "bowl",
189
+ "paper bag",
190
+ "alarm clock",
191
+ "music stand",
192
+ "laundry detergent",
193
+ "dumbbell",
194
+ "tube",
195
+ "cd case",
196
+ "closet rod",
197
+ "coffee kettle",
198
+ "shower head",
199
+ "keyboard piano",
200
+ "case of water bottles",
201
+ "coat rack",
202
+ "folded chair",
203
+ "fire alarm",
204
+ "power strip",
205
+ "calendar",
206
+ "poster",
207
+ "luggage",
208
+ ]
209
+
210
+
211
+ # Given the different size of the official train and val sets, not all ScanNet200 categories are present in the validation set.
212
+ # Here we list of categories with labels and IDs present in both train and validation set, and the remaining categories those are present in train, but not in val
213
+ # We dont evaluate on unseen validation categories in this benchmark
214
+
215
+ VALID_CLASS_IDS_200_VALIDATION = (
216
+ "wall",
217
+ "chair",
218
+ "floor",
219
+ "table",
220
+ "door",
221
+ "couch",
222
+ "cabinet",
223
+ "shelf",
224
+ "desk",
225
+ "office chair",
226
+ "bed",
227
+ "pillow",
228
+ "sink",
229
+ "picture",
230
+ "window",
231
+ "toilet",
232
+ "bookshelf",
233
+ "monitor",
234
+ "curtain",
235
+ "book",
236
+ "armchair",
237
+ "coffee table",
238
+ "box",
239
+ "refrigerator",
240
+ "lamp",
241
+ "kitchen cabinet",
242
+ "towel",
243
+ "clothes",
244
+ "tv",
245
+ "nightstand",
246
+ "counter",
247
+ "dresser",
248
+ "stool",
249
+ "cushion",
250
+ "plant",
251
+ "ceiling",
252
+ "bathtub",
253
+ "end table",
254
+ "dining table",
255
+ "keyboard",
256
+ "bag",
257
+ "backpack",
258
+ "toilet paper",
259
+ "printer",
260
+ "tv stand",
261
+ "whiteboard",
262
+ "blanket",
263
+ "shower curtain",
264
+ "trash can",
265
+ "closet",
266
+ "stairs",
267
+ "microwave",
268
+ "stove",
269
+ "shoe",
270
+ "computer tower",
271
+ "bottle",
272
+ "bin",
273
+ "ottoman",
274
+ "bench",
275
+ "board",
276
+ "washing machine",
277
+ "mirror",
278
+ "copier",
279
+ "basket",
280
+ "sofa chair",
281
+ "file cabinet",
282
+ "fan",
283
+ "laptop",
284
+ "shower",
285
+ "paper",
286
+ "person",
287
+ "paper towel dispenser",
288
+ "oven",
289
+ "blinds",
290
+ "rack",
291
+ "plate",
292
+ "blackboard",
293
+ "piano",
294
+ "suitcase",
295
+ "rail",
296
+ "radiator",
297
+ "recycling bin",
298
+ "container",
299
+ "wardrobe",
300
+ "soap dispenser",
301
+ "telephone",
302
+ "bucket",
303
+ "clock",
304
+ "stand",
305
+ "light",
306
+ "laundry basket",
307
+ "pipe",
308
+ "clothes dryer",
309
+ "guitar",
310
+ "toilet paper holder",
311
+ "seat",
312
+ "speaker",
313
+ "column",
314
+ "ladder",
315
+ "bathroom stall",
316
+ "shower wall",
317
+ "cup",
318
+ "jacket",
319
+ "storage bin",
320
+ "coffee maker",
321
+ "dishwasher",
322
+ "paper towel roll",
323
+ "machine",
324
+ "mat",
325
+ "windowsill",
326
+ "bar",
327
+ "toaster",
328
+ "bulletin board",
329
+ "ironing board",
330
+ "fireplace",
331
+ "soap dish",
332
+ "kitchen counter",
333
+ "doorframe",
334
+ "toilet paper dispenser",
335
+ "mini fridge",
336
+ "fire extinguisher",
337
+ "ball",
338
+ "hat",
339
+ "shower curtain rod",
340
+ "water cooler",
341
+ "paper cutter",
342
+ "tray",
343
+ "shower door",
344
+ "pillar",
345
+ "ledge",
346
+ "toaster oven",
347
+ "mouse",
348
+ "toilet seat cover dispenser",
349
+ "furniture",
350
+ "cart",
351
+ "scale",
352
+ "tissue box",
353
+ "light switch",
354
+ "crate",
355
+ "power outlet",
356
+ "decoration",
357
+ "sign",
358
+ "projector",
359
+ "closet door",
360
+ "vacuum cleaner",
361
+ "plunger",
362
+ "stuffed animal",
363
+ "headphones",
364
+ "dish rack",
365
+ "broom",
366
+ "range hood",
367
+ "dustpan",
368
+ "hair dryer",
369
+ "water bottle",
370
+ "handicap bar",
371
+ "vent",
372
+ "shower floor",
373
+ "water pitcher",
374
+ "mailbox",
375
+ "bowl",
376
+ "paper bag",
377
+ "projector screen",
378
+ "divider",
379
+ "laundry detergent",
380
+ "bathroom counter",
381
+ "object",
382
+ "bathroom vanity",
383
+ "closet wall",
384
+ "laundry hamper",
385
+ "bathroom stall door",
386
+ "ceiling light",
387
+ "trash bin",
388
+ "dumbbell",
389
+ "stair rail",
390
+ "tube",
391
+ "bathroom cabinet",
392
+ "closet rod",
393
+ "coffee kettle",
394
+ "shower head",
395
+ "keyboard piano",
396
+ "case of water bottles",
397
+ "coat rack",
398
+ "folded chair",
399
+ "fire alarm",
400
+ "power strip",
401
+ "calendar",
402
+ "poster",
403
+ "potted plant",
404
+ "mattress",
405
+ )
406
+
407
+ CLASS_LABELS_200_VALIDATION = (
408
+ 1,
409
+ 2,
410
+ 3,
411
+ 4,
412
+ 5,
413
+ 6,
414
+ 7,
415
+ 8,
416
+ 9,
417
+ 10,
418
+ 11,
419
+ 13,
420
+ 14,
421
+ 15,
422
+ 16,
423
+ 17,
424
+ 18,
425
+ 19,
426
+ 21,
427
+ 22,
428
+ 23,
429
+ 24,
430
+ 26,
431
+ 27,
432
+ 28,
433
+ 29,
434
+ 31,
435
+ 32,
436
+ 33,
437
+ 34,
438
+ 35,
439
+ 36,
440
+ 38,
441
+ 39,
442
+ 40,
443
+ 41,
444
+ 42,
445
+ 44,
446
+ 45,
447
+ 46,
448
+ 47,
449
+ 48,
450
+ 49,
451
+ 50,
452
+ 51,
453
+ 52,
454
+ 54,
455
+ 55,
456
+ 56,
457
+ 57,
458
+ 58,
459
+ 59,
460
+ 62,
461
+ 63,
462
+ 64,
463
+ 65,
464
+ 66,
465
+ 67,
466
+ 68,
467
+ 69,
468
+ 70,
469
+ 71,
470
+ 72,
471
+ 73,
472
+ 74,
473
+ 75,
474
+ 76,
475
+ 77,
476
+ 78,
477
+ 79,
478
+ 80,
479
+ 82,
480
+ 84,
481
+ 86,
482
+ 87,
483
+ 88,
484
+ 89,
485
+ 90,
486
+ 93,
487
+ 95,
488
+ 96,
489
+ 97,
490
+ 98,
491
+ 99,
492
+ 100,
493
+ 101,
494
+ 102,
495
+ 103,
496
+ 104,
497
+ 105,
498
+ 106,
499
+ 107,
500
+ 110,
501
+ 112,
502
+ 115,
503
+ 116,
504
+ 118,
505
+ 120,
506
+ 122,
507
+ 125,
508
+ 128,
509
+ 130,
510
+ 131,
511
+ 132,
512
+ 134,
513
+ 136,
514
+ 138,
515
+ 139,
516
+ 140,
517
+ 141,
518
+ 145,
519
+ 148,
520
+ 154,
521
+ 155,
522
+ 156,
523
+ 157,
524
+ 159,
525
+ 161,
526
+ 163,
527
+ 165,
528
+ 166,
529
+ 168,
530
+ 169,
531
+ 170,
532
+ 177,
533
+ 180,
534
+ 185,
535
+ 188,
536
+ 191,
537
+ 193,
538
+ 195,
539
+ 202,
540
+ 208,
541
+ 213,
542
+ 214,
543
+ 229,
544
+ 230,
545
+ 232,
546
+ 233,
547
+ 242,
548
+ 250,
549
+ 261,
550
+ 264,
551
+ 276,
552
+ 283,
553
+ 300,
554
+ 304,
555
+ 312,
556
+ 323,
557
+ 325,
558
+ 342,
559
+ 356,
560
+ 370,
561
+ 392,
562
+ 395,
563
+ 408,
564
+ 417,
565
+ 488,
566
+ 540,
567
+ 562,
568
+ 570,
569
+ 609,
570
+ 748,
571
+ 776,
572
+ 1156,
573
+ 1163,
574
+ 1164,
575
+ 1165,
576
+ 1166,
577
+ 1167,
578
+ 1168,
579
+ 1169,
580
+ 1170,
581
+ 1171,
582
+ 1172,
583
+ 1173,
584
+ 1175,
585
+ 1176,
586
+ 1179,
587
+ 1180,
588
+ 1181,
589
+ 1182,
590
+ 1184,
591
+ 1185,
592
+ 1186,
593
+ 1187,
594
+ 1188,
595
+ 1189,
596
+ 1191,
597
+ )
598
+
599
+ VALID_CLASS_IDS_200_TRAIN_ONLY = (
600
+ "bicycle",
601
+ "storage container",
602
+ "candle",
603
+ "guitar case",
604
+ "purse",
605
+ "alarm clock",
606
+ "music stand",
607
+ "cd case",
608
+ "structure",
609
+ "storage organizer",
610
+ "luggage",
611
+ )
612
+
613
+ CLASS_LABELS_200_TRAIN_ONLY = (
614
+ 121,
615
+ 221,
616
+ 286,
617
+ 331,
618
+ 399,
619
+ 572,
620
+ 581,
621
+ 1174,
622
+ 1178,
623
+ 1183,
624
+ 1190,
625
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannet_means.npz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df5c2bd40e8518e982c7d7b4b39020b07ac774695038bf49cb28b44e5760457e
3
+ size 676
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv1_test.txt ADDED
@@ -0,0 +1,312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ scene0568_00
2
+ scene0568_01
3
+ scene0568_02
4
+ scene0304_00
5
+ scene0488_00
6
+ scene0488_01
7
+ scene0412_00
8
+ scene0412_01
9
+ scene0217_00
10
+ scene0019_00
11
+ scene0019_01
12
+ scene0414_00
13
+ scene0575_00
14
+ scene0575_01
15
+ scene0575_02
16
+ scene0426_00
17
+ scene0426_01
18
+ scene0426_02
19
+ scene0426_03
20
+ scene0549_00
21
+ scene0549_01
22
+ scene0578_00
23
+ scene0578_01
24
+ scene0578_02
25
+ scene0665_00
26
+ scene0665_01
27
+ scene0050_00
28
+ scene0050_01
29
+ scene0050_02
30
+ scene0257_00
31
+ scene0025_00
32
+ scene0025_01
33
+ scene0025_02
34
+ scene0583_00
35
+ scene0583_01
36
+ scene0583_02
37
+ scene0701_00
38
+ scene0701_01
39
+ scene0701_02
40
+ scene0580_00
41
+ scene0580_01
42
+ scene0565_00
43
+ scene0169_00
44
+ scene0169_01
45
+ scene0655_00
46
+ scene0655_01
47
+ scene0655_02
48
+ scene0063_00
49
+ scene0221_00
50
+ scene0221_01
51
+ scene0591_00
52
+ scene0591_01
53
+ scene0591_02
54
+ scene0678_00
55
+ scene0678_01
56
+ scene0678_02
57
+ scene0462_00
58
+ scene0427_00
59
+ scene0595_00
60
+ scene0193_00
61
+ scene0193_01
62
+ scene0164_00
63
+ scene0164_01
64
+ scene0164_02
65
+ scene0164_03
66
+ scene0598_00
67
+ scene0598_01
68
+ scene0598_02
69
+ scene0599_00
70
+ scene0599_01
71
+ scene0599_02
72
+ scene0328_00
73
+ scene0300_00
74
+ scene0300_01
75
+ scene0354_00
76
+ scene0458_00
77
+ scene0458_01
78
+ scene0423_00
79
+ scene0423_01
80
+ scene0423_02
81
+ scene0307_00
82
+ scene0307_01
83
+ scene0307_02
84
+ scene0606_00
85
+ scene0606_01
86
+ scene0606_02
87
+ scene0432_00
88
+ scene0432_01
89
+ scene0608_00
90
+ scene0608_01
91
+ scene0608_02
92
+ scene0651_00
93
+ scene0651_01
94
+ scene0651_02
95
+ scene0430_00
96
+ scene0430_01
97
+ scene0689_00
98
+ scene0357_00
99
+ scene0357_01
100
+ scene0574_00
101
+ scene0574_01
102
+ scene0574_02
103
+ scene0329_00
104
+ scene0329_01
105
+ scene0329_02
106
+ scene0153_00
107
+ scene0153_01
108
+ scene0616_00
109
+ scene0616_01
110
+ scene0671_00
111
+ scene0671_01
112
+ scene0618_00
113
+ scene0382_00
114
+ scene0382_01
115
+ scene0490_00
116
+ scene0621_00
117
+ scene0607_00
118
+ scene0607_01
119
+ scene0149_00
120
+ scene0695_00
121
+ scene0695_01
122
+ scene0695_02
123
+ scene0695_03
124
+ scene0389_00
125
+ scene0377_00
126
+ scene0377_01
127
+ scene0377_02
128
+ scene0342_00
129
+ scene0139_00
130
+ scene0629_00
131
+ scene0629_01
132
+ scene0629_02
133
+ scene0496_00
134
+ scene0633_00
135
+ scene0633_01
136
+ scene0518_00
137
+ scene0652_00
138
+ scene0406_00
139
+ scene0406_01
140
+ scene0406_02
141
+ scene0144_00
142
+ scene0144_01
143
+ scene0494_00
144
+ scene0278_00
145
+ scene0278_01
146
+ scene0316_00
147
+ scene0609_00
148
+ scene0609_01
149
+ scene0609_02
150
+ scene0609_03
151
+ scene0084_00
152
+ scene0084_01
153
+ scene0084_02
154
+ scene0696_00
155
+ scene0696_01
156
+ scene0696_02
157
+ scene0351_00
158
+ scene0351_01
159
+ scene0643_00
160
+ scene0644_00
161
+ scene0645_00
162
+ scene0645_01
163
+ scene0645_02
164
+ scene0081_00
165
+ scene0081_01
166
+ scene0081_02
167
+ scene0647_00
168
+ scene0647_01
169
+ scene0535_00
170
+ scene0353_00
171
+ scene0353_01
172
+ scene0353_02
173
+ scene0559_00
174
+ scene0559_01
175
+ scene0559_02
176
+ scene0593_00
177
+ scene0593_01
178
+ scene0246_00
179
+ scene0653_00
180
+ scene0653_01
181
+ scene0064_00
182
+ scene0064_01
183
+ scene0356_00
184
+ scene0356_01
185
+ scene0356_02
186
+ scene0030_00
187
+ scene0030_01
188
+ scene0030_02
189
+ scene0222_00
190
+ scene0222_01
191
+ scene0338_00
192
+ scene0338_01
193
+ scene0338_02
194
+ scene0378_00
195
+ scene0378_01
196
+ scene0378_02
197
+ scene0660_00
198
+ scene0553_00
199
+ scene0553_01
200
+ scene0553_02
201
+ scene0527_00
202
+ scene0663_00
203
+ scene0663_01
204
+ scene0663_02
205
+ scene0664_00
206
+ scene0664_01
207
+ scene0664_02
208
+ scene0334_00
209
+ scene0334_01
210
+ scene0334_02
211
+ scene0046_00
212
+ scene0046_01
213
+ scene0046_02
214
+ scene0203_00
215
+ scene0203_01
216
+ scene0203_02
217
+ scene0088_00
218
+ scene0088_01
219
+ scene0088_02
220
+ scene0088_03
221
+ scene0086_00
222
+ scene0086_01
223
+ scene0086_02
224
+ scene0670_00
225
+ scene0670_01
226
+ scene0256_00
227
+ scene0256_01
228
+ scene0256_02
229
+ scene0249_00
230
+ scene0441_00
231
+ scene0658_00
232
+ scene0704_00
233
+ scene0704_01
234
+ scene0187_00
235
+ scene0187_01
236
+ scene0131_00
237
+ scene0131_01
238
+ scene0131_02
239
+ scene0207_00
240
+ scene0207_01
241
+ scene0207_02
242
+ scene0461_00
243
+ scene0011_00
244
+ scene0011_01
245
+ scene0343_00
246
+ scene0251_00
247
+ scene0077_00
248
+ scene0077_01
249
+ scene0684_00
250
+ scene0684_01
251
+ scene0550_00
252
+ scene0686_00
253
+ scene0686_01
254
+ scene0686_02
255
+ scene0208_00
256
+ scene0500_00
257
+ scene0500_01
258
+ scene0552_00
259
+ scene0552_01
260
+ scene0648_00
261
+ scene0648_01
262
+ scene0435_00
263
+ scene0435_01
264
+ scene0435_02
265
+ scene0435_03
266
+ scene0690_00
267
+ scene0690_01
268
+ scene0693_00
269
+ scene0693_01
270
+ scene0693_02
271
+ scene0700_00
272
+ scene0700_01
273
+ scene0700_02
274
+ scene0699_00
275
+ scene0231_00
276
+ scene0231_01
277
+ scene0231_02
278
+ scene0697_00
279
+ scene0697_01
280
+ scene0697_02
281
+ scene0697_03
282
+ scene0474_00
283
+ scene0474_01
284
+ scene0474_02
285
+ scene0474_03
286
+ scene0474_04
287
+ scene0474_05
288
+ scene0355_00
289
+ scene0355_01
290
+ scene0146_00
291
+ scene0146_01
292
+ scene0146_02
293
+ scene0196_00
294
+ scene0702_00
295
+ scene0702_01
296
+ scene0702_02
297
+ scene0314_00
298
+ scene0277_00
299
+ scene0277_01
300
+ scene0277_02
301
+ scene0095_00
302
+ scene0095_01
303
+ scene0015_00
304
+ scene0100_00
305
+ scene0100_01
306
+ scene0100_02
307
+ scene0558_00
308
+ scene0558_01
309
+ scene0558_02
310
+ scene0685_00
311
+ scene0685_01
312
+ scene0685_02
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv1_train.txt ADDED
@@ -0,0 +1,1045 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ scene0191_00
2
+ scene0191_01
3
+ scene0191_02
4
+ scene0119_00
5
+ scene0230_00
6
+ scene0528_00
7
+ scene0528_01
8
+ scene0705_00
9
+ scene0705_01
10
+ scene0705_02
11
+ scene0415_00
12
+ scene0415_01
13
+ scene0415_02
14
+ scene0007_00
15
+ scene0141_00
16
+ scene0141_01
17
+ scene0141_02
18
+ scene0515_00
19
+ scene0515_01
20
+ scene0515_02
21
+ scene0447_00
22
+ scene0447_01
23
+ scene0447_02
24
+ scene0531_00
25
+ scene0503_00
26
+ scene0285_00
27
+ scene0069_00
28
+ scene0584_00
29
+ scene0584_01
30
+ scene0584_02
31
+ scene0581_00
32
+ scene0581_01
33
+ scene0581_02
34
+ scene0620_00
35
+ scene0620_01
36
+ scene0263_00
37
+ scene0263_01
38
+ scene0481_00
39
+ scene0481_01
40
+ scene0020_00
41
+ scene0020_01
42
+ scene0291_00
43
+ scene0291_01
44
+ scene0291_02
45
+ scene0469_00
46
+ scene0469_01
47
+ scene0469_02
48
+ scene0659_00
49
+ scene0659_01
50
+ scene0024_00
51
+ scene0024_01
52
+ scene0024_02
53
+ scene0564_00
54
+ scene0117_00
55
+ scene0027_00
56
+ scene0027_01
57
+ scene0027_02
58
+ scene0028_00
59
+ scene0330_00
60
+ scene0418_00
61
+ scene0418_01
62
+ scene0418_02
63
+ scene0233_00
64
+ scene0233_01
65
+ scene0673_00
66
+ scene0673_01
67
+ scene0673_02
68
+ scene0673_03
69
+ scene0673_04
70
+ scene0673_05
71
+ scene0585_00
72
+ scene0585_01
73
+ scene0362_00
74
+ scene0362_01
75
+ scene0362_02
76
+ scene0362_03
77
+ scene0035_00
78
+ scene0035_01
79
+ scene0358_00
80
+ scene0358_01
81
+ scene0358_02
82
+ scene0037_00
83
+ scene0194_00
84
+ scene0321_00
85
+ scene0293_00
86
+ scene0293_01
87
+ scene0623_00
88
+ scene0623_01
89
+ scene0592_00
90
+ scene0592_01
91
+ scene0569_00
92
+ scene0569_01
93
+ scene0413_00
94
+ scene0313_00
95
+ scene0313_01
96
+ scene0313_02
97
+ scene0480_00
98
+ scene0480_01
99
+ scene0401_00
100
+ scene0517_00
101
+ scene0517_01
102
+ scene0517_02
103
+ scene0032_00
104
+ scene0032_01
105
+ scene0613_00
106
+ scene0613_01
107
+ scene0613_02
108
+ scene0306_00
109
+ scene0306_01
110
+ scene0052_00
111
+ scene0052_01
112
+ scene0052_02
113
+ scene0053_00
114
+ scene0444_00
115
+ scene0444_01
116
+ scene0055_00
117
+ scene0055_01
118
+ scene0055_02
119
+ scene0560_00
120
+ scene0589_00
121
+ scene0589_01
122
+ scene0589_02
123
+ scene0610_00
124
+ scene0610_01
125
+ scene0610_02
126
+ scene0364_00
127
+ scene0364_01
128
+ scene0383_00
129
+ scene0383_01
130
+ scene0383_02
131
+ scene0006_00
132
+ scene0006_01
133
+ scene0006_02
134
+ scene0275_00
135
+ scene0451_00
136
+ scene0451_01
137
+ scene0451_02
138
+ scene0451_03
139
+ scene0451_04
140
+ scene0451_05
141
+ scene0135_00
142
+ scene0065_00
143
+ scene0065_01
144
+ scene0065_02
145
+ scene0104_00
146
+ scene0674_00
147
+ scene0674_01
148
+ scene0448_00
149
+ scene0448_01
150
+ scene0448_02
151
+ scene0502_00
152
+ scene0502_01
153
+ scene0502_02
154
+ scene0440_00
155
+ scene0440_01
156
+ scene0440_02
157
+ scene0071_00
158
+ scene0072_00
159
+ scene0072_01
160
+ scene0072_02
161
+ scene0509_00
162
+ scene0509_01
163
+ scene0509_02
164
+ scene0649_00
165
+ scene0649_01
166
+ scene0602_00
167
+ scene0694_00
168
+ scene0694_01
169
+ scene0101_00
170
+ scene0101_01
171
+ scene0101_02
172
+ scene0101_03
173
+ scene0101_04
174
+ scene0101_05
175
+ scene0218_00
176
+ scene0218_01
177
+ scene0579_00
178
+ scene0579_01
179
+ scene0579_02
180
+ scene0039_00
181
+ scene0039_01
182
+ scene0493_00
183
+ scene0493_01
184
+ scene0242_00
185
+ scene0242_01
186
+ scene0242_02
187
+ scene0083_00
188
+ scene0083_01
189
+ scene0127_00
190
+ scene0127_01
191
+ scene0662_00
192
+ scene0662_01
193
+ scene0662_02
194
+ scene0018_00
195
+ scene0087_00
196
+ scene0087_01
197
+ scene0087_02
198
+ scene0332_00
199
+ scene0332_01
200
+ scene0332_02
201
+ scene0628_00
202
+ scene0628_01
203
+ scene0628_02
204
+ scene0134_00
205
+ scene0134_01
206
+ scene0134_02
207
+ scene0238_00
208
+ scene0238_01
209
+ scene0092_00
210
+ scene0092_01
211
+ scene0092_02
212
+ scene0092_03
213
+ scene0092_04
214
+ scene0022_00
215
+ scene0022_01
216
+ scene0467_00
217
+ scene0392_00
218
+ scene0392_01
219
+ scene0392_02
220
+ scene0424_00
221
+ scene0424_01
222
+ scene0424_02
223
+ scene0646_00
224
+ scene0646_01
225
+ scene0646_02
226
+ scene0098_00
227
+ scene0098_01
228
+ scene0044_00
229
+ scene0044_01
230
+ scene0044_02
231
+ scene0510_00
232
+ scene0510_01
233
+ scene0510_02
234
+ scene0571_00
235
+ scene0571_01
236
+ scene0166_00
237
+ scene0166_01
238
+ scene0166_02
239
+ scene0563_00
240
+ scene0172_00
241
+ scene0172_01
242
+ scene0388_00
243
+ scene0388_01
244
+ scene0215_00
245
+ scene0215_01
246
+ scene0252_00
247
+ scene0287_00
248
+ scene0668_00
249
+ scene0572_00
250
+ scene0572_01
251
+ scene0572_02
252
+ scene0026_00
253
+ scene0224_00
254
+ scene0113_00
255
+ scene0113_01
256
+ scene0551_00
257
+ scene0381_00
258
+ scene0381_01
259
+ scene0381_02
260
+ scene0371_00
261
+ scene0371_01
262
+ scene0460_00
263
+ scene0118_00
264
+ scene0118_01
265
+ scene0118_02
266
+ scene0417_00
267
+ scene0008_00
268
+ scene0634_00
269
+ scene0521_00
270
+ scene0123_00
271
+ scene0123_01
272
+ scene0123_02
273
+ scene0045_00
274
+ scene0045_01
275
+ scene0511_00
276
+ scene0511_01
277
+ scene0114_00
278
+ scene0114_01
279
+ scene0114_02
280
+ scene0070_00
281
+ scene0029_00
282
+ scene0029_01
283
+ scene0029_02
284
+ scene0129_00
285
+ scene0103_00
286
+ scene0103_01
287
+ scene0002_00
288
+ scene0002_01
289
+ scene0132_00
290
+ scene0132_01
291
+ scene0132_02
292
+ scene0124_00
293
+ scene0124_01
294
+ scene0143_00
295
+ scene0143_01
296
+ scene0143_02
297
+ scene0604_00
298
+ scene0604_01
299
+ scene0604_02
300
+ scene0507_00
301
+ scene0105_00
302
+ scene0105_01
303
+ scene0105_02
304
+ scene0428_00
305
+ scene0428_01
306
+ scene0311_00
307
+ scene0140_00
308
+ scene0140_01
309
+ scene0182_00
310
+ scene0182_01
311
+ scene0182_02
312
+ scene0142_00
313
+ scene0142_01
314
+ scene0399_00
315
+ scene0399_01
316
+ scene0012_00
317
+ scene0012_01
318
+ scene0012_02
319
+ scene0060_00
320
+ scene0060_01
321
+ scene0370_00
322
+ scene0370_01
323
+ scene0370_02
324
+ scene0310_00
325
+ scene0310_01
326
+ scene0310_02
327
+ scene0661_00
328
+ scene0650_00
329
+ scene0152_00
330
+ scene0152_01
331
+ scene0152_02
332
+ scene0158_00
333
+ scene0158_01
334
+ scene0158_02
335
+ scene0482_00
336
+ scene0482_01
337
+ scene0600_00
338
+ scene0600_01
339
+ scene0600_02
340
+ scene0393_00
341
+ scene0393_01
342
+ scene0393_02
343
+ scene0562_00
344
+ scene0174_00
345
+ scene0174_01
346
+ scene0157_00
347
+ scene0157_01
348
+ scene0161_00
349
+ scene0161_01
350
+ scene0161_02
351
+ scene0159_00
352
+ scene0254_00
353
+ scene0254_01
354
+ scene0115_00
355
+ scene0115_01
356
+ scene0115_02
357
+ scene0162_00
358
+ scene0163_00
359
+ scene0163_01
360
+ scene0523_00
361
+ scene0523_01
362
+ scene0523_02
363
+ scene0459_00
364
+ scene0459_01
365
+ scene0175_00
366
+ scene0085_00
367
+ scene0085_01
368
+ scene0279_00
369
+ scene0279_01
370
+ scene0279_02
371
+ scene0201_00
372
+ scene0201_01
373
+ scene0201_02
374
+ scene0283_00
375
+ scene0456_00
376
+ scene0456_01
377
+ scene0429_00
378
+ scene0043_00
379
+ scene0043_01
380
+ scene0419_00
381
+ scene0419_01
382
+ scene0419_02
383
+ scene0368_00
384
+ scene0368_01
385
+ scene0348_00
386
+ scene0348_01
387
+ scene0348_02
388
+ scene0442_00
389
+ scene0178_00
390
+ scene0380_00
391
+ scene0380_01
392
+ scene0380_02
393
+ scene0165_00
394
+ scene0165_01
395
+ scene0165_02
396
+ scene0181_00
397
+ scene0181_01
398
+ scene0181_02
399
+ scene0181_03
400
+ scene0333_00
401
+ scene0614_00
402
+ scene0614_01
403
+ scene0614_02
404
+ scene0404_00
405
+ scene0404_01
406
+ scene0404_02
407
+ scene0185_00
408
+ scene0126_00
409
+ scene0126_01
410
+ scene0126_02
411
+ scene0519_00
412
+ scene0236_00
413
+ scene0236_01
414
+ scene0189_00
415
+ scene0075_00
416
+ scene0267_00
417
+ scene0192_00
418
+ scene0192_01
419
+ scene0192_02
420
+ scene0281_00
421
+ scene0420_00
422
+ scene0420_01
423
+ scene0420_02
424
+ scene0195_00
425
+ scene0195_01
426
+ scene0195_02
427
+ scene0597_00
428
+ scene0597_01
429
+ scene0597_02
430
+ scene0041_00
431
+ scene0041_01
432
+ scene0111_00
433
+ scene0111_01
434
+ scene0111_02
435
+ scene0666_00
436
+ scene0666_01
437
+ scene0666_02
438
+ scene0200_00
439
+ scene0200_01
440
+ scene0200_02
441
+ scene0536_00
442
+ scene0536_01
443
+ scene0536_02
444
+ scene0390_00
445
+ scene0280_00
446
+ scene0280_01
447
+ scene0280_02
448
+ scene0344_00
449
+ scene0344_01
450
+ scene0205_00
451
+ scene0205_01
452
+ scene0205_02
453
+ scene0484_00
454
+ scene0484_01
455
+ scene0009_00
456
+ scene0009_01
457
+ scene0009_02
458
+ scene0302_00
459
+ scene0302_01
460
+ scene0209_00
461
+ scene0209_01
462
+ scene0209_02
463
+ scene0210_00
464
+ scene0210_01
465
+ scene0395_00
466
+ scene0395_01
467
+ scene0395_02
468
+ scene0683_00
469
+ scene0601_00
470
+ scene0601_01
471
+ scene0214_00
472
+ scene0214_01
473
+ scene0214_02
474
+ scene0477_00
475
+ scene0477_01
476
+ scene0439_00
477
+ scene0439_01
478
+ scene0468_00
479
+ scene0468_01
480
+ scene0468_02
481
+ scene0546_00
482
+ scene0466_00
483
+ scene0466_01
484
+ scene0220_00
485
+ scene0220_01
486
+ scene0220_02
487
+ scene0122_00
488
+ scene0122_01
489
+ scene0130_00
490
+ scene0110_00
491
+ scene0110_01
492
+ scene0110_02
493
+ scene0327_00
494
+ scene0156_00
495
+ scene0266_00
496
+ scene0266_01
497
+ scene0001_00
498
+ scene0001_01
499
+ scene0228_00
500
+ scene0199_00
501
+ scene0219_00
502
+ scene0464_00
503
+ scene0232_00
504
+ scene0232_01
505
+ scene0232_02
506
+ scene0299_00
507
+ scene0299_01
508
+ scene0530_00
509
+ scene0363_00
510
+ scene0453_00
511
+ scene0453_01
512
+ scene0570_00
513
+ scene0570_01
514
+ scene0570_02
515
+ scene0183_00
516
+ scene0239_00
517
+ scene0239_01
518
+ scene0239_02
519
+ scene0373_00
520
+ scene0373_01
521
+ scene0241_00
522
+ scene0241_01
523
+ scene0241_02
524
+ scene0188_00
525
+ scene0622_00
526
+ scene0622_01
527
+ scene0244_00
528
+ scene0244_01
529
+ scene0691_00
530
+ scene0691_01
531
+ scene0206_00
532
+ scene0206_01
533
+ scene0206_02
534
+ scene0247_00
535
+ scene0247_01
536
+ scene0061_00
537
+ scene0061_01
538
+ scene0082_00
539
+ scene0250_00
540
+ scene0250_01
541
+ scene0250_02
542
+ scene0501_00
543
+ scene0501_01
544
+ scene0501_02
545
+ scene0320_00
546
+ scene0320_01
547
+ scene0320_02
548
+ scene0320_03
549
+ scene0631_00
550
+ scene0631_01
551
+ scene0631_02
552
+ scene0255_00
553
+ scene0255_01
554
+ scene0255_02
555
+ scene0047_00
556
+ scene0265_00
557
+ scene0265_01
558
+ scene0265_02
559
+ scene0004_00
560
+ scene0336_00
561
+ scene0336_01
562
+ scene0058_00
563
+ scene0058_01
564
+ scene0260_00
565
+ scene0260_01
566
+ scene0260_02
567
+ scene0243_00
568
+ scene0603_00
569
+ scene0603_01
570
+ scene0093_00
571
+ scene0093_01
572
+ scene0093_02
573
+ scene0109_00
574
+ scene0109_01
575
+ scene0434_00
576
+ scene0434_01
577
+ scene0434_02
578
+ scene0290_00
579
+ scene0627_00
580
+ scene0627_01
581
+ scene0470_00
582
+ scene0470_01
583
+ scene0137_00
584
+ scene0137_01
585
+ scene0137_02
586
+ scene0270_00
587
+ scene0270_01
588
+ scene0270_02
589
+ scene0271_00
590
+ scene0271_01
591
+ scene0504_00
592
+ scene0274_00
593
+ scene0274_01
594
+ scene0274_02
595
+ scene0036_00
596
+ scene0036_01
597
+ scene0276_00
598
+ scene0276_01
599
+ scene0272_00
600
+ scene0272_01
601
+ scene0499_00
602
+ scene0698_00
603
+ scene0698_01
604
+ scene0051_00
605
+ scene0051_01
606
+ scene0051_02
607
+ scene0051_03
608
+ scene0108_00
609
+ scene0245_00
610
+ scene0369_00
611
+ scene0369_01
612
+ scene0369_02
613
+ scene0284_00
614
+ scene0289_00
615
+ scene0289_01
616
+ scene0286_00
617
+ scene0286_01
618
+ scene0286_02
619
+ scene0286_03
620
+ scene0031_00
621
+ scene0031_01
622
+ scene0031_02
623
+ scene0545_00
624
+ scene0545_01
625
+ scene0545_02
626
+ scene0557_00
627
+ scene0557_01
628
+ scene0557_02
629
+ scene0533_00
630
+ scene0533_01
631
+ scene0116_00
632
+ scene0116_01
633
+ scene0116_02
634
+ scene0611_00
635
+ scene0611_01
636
+ scene0688_00
637
+ scene0294_00
638
+ scene0294_01
639
+ scene0294_02
640
+ scene0295_00
641
+ scene0295_01
642
+ scene0296_00
643
+ scene0296_01
644
+ scene0596_00
645
+ scene0596_01
646
+ scene0596_02
647
+ scene0532_00
648
+ scene0532_01
649
+ scene0637_00
650
+ scene0638_00
651
+ scene0121_00
652
+ scene0121_01
653
+ scene0121_02
654
+ scene0040_00
655
+ scene0040_01
656
+ scene0197_00
657
+ scene0197_01
658
+ scene0197_02
659
+ scene0410_00
660
+ scene0410_01
661
+ scene0305_00
662
+ scene0305_01
663
+ scene0615_00
664
+ scene0615_01
665
+ scene0703_00
666
+ scene0703_01
667
+ scene0555_00
668
+ scene0297_00
669
+ scene0297_01
670
+ scene0297_02
671
+ scene0582_00
672
+ scene0582_01
673
+ scene0582_02
674
+ scene0023_00
675
+ scene0094_00
676
+ scene0013_00
677
+ scene0013_01
678
+ scene0013_02
679
+ scene0136_00
680
+ scene0136_01
681
+ scene0136_02
682
+ scene0407_00
683
+ scene0407_01
684
+ scene0062_00
685
+ scene0062_01
686
+ scene0062_02
687
+ scene0386_00
688
+ scene0318_00
689
+ scene0554_00
690
+ scene0554_01
691
+ scene0497_00
692
+ scene0213_00
693
+ scene0258_00
694
+ scene0323_00
695
+ scene0323_01
696
+ scene0324_00
697
+ scene0324_01
698
+ scene0016_00
699
+ scene0016_01
700
+ scene0016_02
701
+ scene0681_00
702
+ scene0398_00
703
+ scene0398_01
704
+ scene0227_00
705
+ scene0090_00
706
+ scene0066_00
707
+ scene0262_00
708
+ scene0262_01
709
+ scene0155_00
710
+ scene0155_01
711
+ scene0155_02
712
+ scene0352_00
713
+ scene0352_01
714
+ scene0352_02
715
+ scene0038_00
716
+ scene0038_01
717
+ scene0038_02
718
+ scene0335_00
719
+ scene0335_01
720
+ scene0335_02
721
+ scene0261_00
722
+ scene0261_01
723
+ scene0261_02
724
+ scene0261_03
725
+ scene0640_00
726
+ scene0640_01
727
+ scene0640_02
728
+ scene0080_00
729
+ scene0080_01
730
+ scene0080_02
731
+ scene0403_00
732
+ scene0403_01
733
+ scene0282_00
734
+ scene0282_01
735
+ scene0282_02
736
+ scene0682_00
737
+ scene0173_00
738
+ scene0173_01
739
+ scene0173_02
740
+ scene0522_00
741
+ scene0687_00
742
+ scene0345_00
743
+ scene0345_01
744
+ scene0612_00
745
+ scene0612_01
746
+ scene0411_00
747
+ scene0411_01
748
+ scene0411_02
749
+ scene0625_00
750
+ scene0625_01
751
+ scene0211_00
752
+ scene0211_01
753
+ scene0211_02
754
+ scene0211_03
755
+ scene0676_00
756
+ scene0676_01
757
+ scene0179_00
758
+ scene0498_00
759
+ scene0498_01
760
+ scene0498_02
761
+ scene0547_00
762
+ scene0547_01
763
+ scene0547_02
764
+ scene0269_00
765
+ scene0269_01
766
+ scene0269_02
767
+ scene0366_00
768
+ scene0680_00
769
+ scene0680_01
770
+ scene0588_00
771
+ scene0588_01
772
+ scene0588_02
773
+ scene0588_03
774
+ scene0346_00
775
+ scene0346_01
776
+ scene0359_00
777
+ scene0359_01
778
+ scene0014_00
779
+ scene0120_00
780
+ scene0120_01
781
+ scene0212_00
782
+ scene0212_01
783
+ scene0212_02
784
+ scene0176_00
785
+ scene0049_00
786
+ scene0259_00
787
+ scene0259_01
788
+ scene0586_00
789
+ scene0586_01
790
+ scene0586_02
791
+ scene0309_00
792
+ scene0309_01
793
+ scene0125_00
794
+ scene0455_00
795
+ scene0177_00
796
+ scene0177_01
797
+ scene0177_02
798
+ scene0326_00
799
+ scene0372_00
800
+ scene0171_00
801
+ scene0171_01
802
+ scene0374_00
803
+ scene0654_00
804
+ scene0654_01
805
+ scene0445_00
806
+ scene0445_01
807
+ scene0475_00
808
+ scene0475_01
809
+ scene0475_02
810
+ scene0349_00
811
+ scene0349_01
812
+ scene0234_00
813
+ scene0669_00
814
+ scene0669_01
815
+ scene0375_00
816
+ scene0375_01
817
+ scene0375_02
818
+ scene0387_00
819
+ scene0387_01
820
+ scene0387_02
821
+ scene0312_00
822
+ scene0312_01
823
+ scene0312_02
824
+ scene0384_00
825
+ scene0385_00
826
+ scene0385_01
827
+ scene0385_02
828
+ scene0000_00
829
+ scene0000_01
830
+ scene0000_02
831
+ scene0376_00
832
+ scene0376_01
833
+ scene0376_02
834
+ scene0301_00
835
+ scene0301_01
836
+ scene0301_02
837
+ scene0322_00
838
+ scene0542_00
839
+ scene0079_00
840
+ scene0079_01
841
+ scene0099_00
842
+ scene0099_01
843
+ scene0476_00
844
+ scene0476_01
845
+ scene0476_02
846
+ scene0394_00
847
+ scene0394_01
848
+ scene0147_00
849
+ scene0147_01
850
+ scene0067_00
851
+ scene0067_01
852
+ scene0067_02
853
+ scene0397_00
854
+ scene0397_01
855
+ scene0337_00
856
+ scene0337_01
857
+ scene0337_02
858
+ scene0431_00
859
+ scene0223_00
860
+ scene0223_01
861
+ scene0223_02
862
+ scene0010_00
863
+ scene0010_01
864
+ scene0402_00
865
+ scene0268_00
866
+ scene0268_01
867
+ scene0268_02
868
+ scene0679_00
869
+ scene0679_01
870
+ scene0405_00
871
+ scene0128_00
872
+ scene0408_00
873
+ scene0408_01
874
+ scene0190_00
875
+ scene0107_00
876
+ scene0076_00
877
+ scene0167_00
878
+ scene0361_00
879
+ scene0361_01
880
+ scene0361_02
881
+ scene0216_00
882
+ scene0202_00
883
+ scene0303_00
884
+ scene0303_01
885
+ scene0303_02
886
+ scene0446_00
887
+ scene0446_01
888
+ scene0089_00
889
+ scene0089_01
890
+ scene0089_02
891
+ scene0360_00
892
+ scene0150_00
893
+ scene0150_01
894
+ scene0150_02
895
+ scene0421_00
896
+ scene0421_01
897
+ scene0421_02
898
+ scene0454_00
899
+ scene0626_00
900
+ scene0626_01
901
+ scene0626_02
902
+ scene0186_00
903
+ scene0186_01
904
+ scene0538_00
905
+ scene0479_00
906
+ scene0479_01
907
+ scene0479_02
908
+ scene0656_00
909
+ scene0656_01
910
+ scene0656_02
911
+ scene0656_03
912
+ scene0525_00
913
+ scene0525_01
914
+ scene0525_02
915
+ scene0308_00
916
+ scene0396_00
917
+ scene0396_01
918
+ scene0396_02
919
+ scene0624_00
920
+ scene0292_00
921
+ scene0292_01
922
+ scene0632_00
923
+ scene0253_00
924
+ scene0021_00
925
+ scene0325_00
926
+ scene0325_01
927
+ scene0437_00
928
+ scene0437_01
929
+ scene0438_00
930
+ scene0590_00
931
+ scene0590_01
932
+ scene0400_00
933
+ scene0400_01
934
+ scene0541_00
935
+ scene0541_01
936
+ scene0541_02
937
+ scene0677_00
938
+ scene0677_01
939
+ scene0677_02
940
+ scene0443_00
941
+ scene0315_00
942
+ scene0288_00
943
+ scene0288_01
944
+ scene0288_02
945
+ scene0422_00
946
+ scene0672_00
947
+ scene0672_01
948
+ scene0184_00
949
+ scene0449_00
950
+ scene0449_01
951
+ scene0449_02
952
+ scene0048_00
953
+ scene0048_01
954
+ scene0138_00
955
+ scene0452_00
956
+ scene0452_01
957
+ scene0452_02
958
+ scene0667_00
959
+ scene0667_01
960
+ scene0667_02
961
+ scene0463_00
962
+ scene0463_01
963
+ scene0078_00
964
+ scene0078_01
965
+ scene0078_02
966
+ scene0636_00
967
+ scene0457_00
968
+ scene0457_01
969
+ scene0457_02
970
+ scene0465_00
971
+ scene0465_01
972
+ scene0577_00
973
+ scene0151_00
974
+ scene0151_01
975
+ scene0339_00
976
+ scene0573_00
977
+ scene0573_01
978
+ scene0154_00
979
+ scene0096_00
980
+ scene0096_01
981
+ scene0096_02
982
+ scene0235_00
983
+ scene0168_00
984
+ scene0168_01
985
+ scene0168_02
986
+ scene0594_00
987
+ scene0587_00
988
+ scene0587_01
989
+ scene0587_02
990
+ scene0587_03
991
+ scene0229_00
992
+ scene0229_01
993
+ scene0229_02
994
+ scene0512_00
995
+ scene0106_00
996
+ scene0106_01
997
+ scene0106_02
998
+ scene0472_00
999
+ scene0472_01
1000
+ scene0472_02
1001
+ scene0489_00
1002
+ scene0489_01
1003
+ scene0489_02
1004
+ scene0425_00
1005
+ scene0425_01
1006
+ scene0641_00
1007
+ scene0526_00
1008
+ scene0526_01
1009
+ scene0317_00
1010
+ scene0317_01
1011
+ scene0544_00
1012
+ scene0017_00
1013
+ scene0017_01
1014
+ scene0017_02
1015
+ scene0042_00
1016
+ scene0042_01
1017
+ scene0042_02
1018
+ scene0576_00
1019
+ scene0576_01
1020
+ scene0576_02
1021
+ scene0347_00
1022
+ scene0347_01
1023
+ scene0347_02
1024
+ scene0436_00
1025
+ scene0226_00
1026
+ scene0226_01
1027
+ scene0485_00
1028
+ scene0486_00
1029
+ scene0487_00
1030
+ scene0487_01
1031
+ scene0619_00
1032
+ scene0097_00
1033
+ scene0367_00
1034
+ scene0367_01
1035
+ scene0491_00
1036
+ scene0492_00
1037
+ scene0492_01
1038
+ scene0005_00
1039
+ scene0005_01
1040
+ scene0543_00
1041
+ scene0543_01
1042
+ scene0543_02
1043
+ scene0657_00
1044
+ scene0341_00
1045
+ scene0341_01
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv1_val.txt ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ scene0534_00
2
+ scene0534_01
3
+ scene0319_00
4
+ scene0273_00
5
+ scene0273_01
6
+ scene0225_00
7
+ scene0198_00
8
+ scene0003_00
9
+ scene0003_01
10
+ scene0003_02
11
+ scene0409_00
12
+ scene0409_01
13
+ scene0331_00
14
+ scene0331_01
15
+ scene0505_00
16
+ scene0505_01
17
+ scene0505_02
18
+ scene0505_03
19
+ scene0505_04
20
+ scene0506_00
21
+ scene0057_00
22
+ scene0057_01
23
+ scene0074_00
24
+ scene0074_01
25
+ scene0074_02
26
+ scene0091_00
27
+ scene0112_00
28
+ scene0112_01
29
+ scene0112_02
30
+ scene0240_00
31
+ scene0102_00
32
+ scene0102_01
33
+ scene0513_00
34
+ scene0514_00
35
+ scene0514_01
36
+ scene0537_00
37
+ scene0516_00
38
+ scene0516_01
39
+ scene0495_00
40
+ scene0617_00
41
+ scene0133_00
42
+ scene0520_00
43
+ scene0520_01
44
+ scene0635_00
45
+ scene0635_01
46
+ scene0054_00
47
+ scene0473_00
48
+ scene0473_01
49
+ scene0524_00
50
+ scene0524_01
51
+ scene0379_00
52
+ scene0471_00
53
+ scene0471_01
54
+ scene0471_02
55
+ scene0566_00
56
+ scene0248_00
57
+ scene0248_01
58
+ scene0248_02
59
+ scene0529_00
60
+ scene0529_01
61
+ scene0529_02
62
+ scene0391_00
63
+ scene0264_00
64
+ scene0264_01
65
+ scene0264_02
66
+ scene0675_00
67
+ scene0675_01
68
+ scene0350_00
69
+ scene0350_01
70
+ scene0350_02
71
+ scene0450_00
72
+ scene0068_00
73
+ scene0068_01
74
+ scene0237_00
75
+ scene0237_01
76
+ scene0365_00
77
+ scene0365_01
78
+ scene0365_02
79
+ scene0605_00
80
+ scene0605_01
81
+ scene0539_00
82
+ scene0539_01
83
+ scene0539_02
84
+ scene0540_00
85
+ scene0540_01
86
+ scene0540_02
87
+ scene0170_00
88
+ scene0170_01
89
+ scene0170_02
90
+ scene0433_00
91
+ scene0340_00
92
+ scene0340_01
93
+ scene0340_02
94
+ scene0160_00
95
+ scene0160_01
96
+ scene0160_02
97
+ scene0160_03
98
+ scene0160_04
99
+ scene0059_00
100
+ scene0059_01
101
+ scene0059_02
102
+ scene0056_00
103
+ scene0056_01
104
+ scene0478_00
105
+ scene0478_01
106
+ scene0548_00
107
+ scene0548_01
108
+ scene0548_02
109
+ scene0204_00
110
+ scene0204_01
111
+ scene0204_02
112
+ scene0033_00
113
+ scene0145_00
114
+ scene0483_00
115
+ scene0508_00
116
+ scene0508_01
117
+ scene0508_02
118
+ scene0180_00
119
+ scene0148_00
120
+ scene0556_00
121
+ scene0556_01
122
+ scene0416_00
123
+ scene0416_01
124
+ scene0416_02
125
+ scene0416_03
126
+ scene0416_04
127
+ scene0073_00
128
+ scene0073_01
129
+ scene0073_02
130
+ scene0073_03
131
+ scene0034_00
132
+ scene0034_01
133
+ scene0034_02
134
+ scene0639_00
135
+ scene0561_00
136
+ scene0561_01
137
+ scene0298_00
138
+ scene0692_00
139
+ scene0692_01
140
+ scene0692_02
141
+ scene0692_03
142
+ scene0692_04
143
+ scene0642_00
144
+ scene0642_01
145
+ scene0642_02
146
+ scene0642_03
147
+ scene0630_00
148
+ scene0630_01
149
+ scene0630_02
150
+ scene0630_03
151
+ scene0630_04
152
+ scene0630_05
153
+ scene0630_06
154
+ scene0706_00
155
+ scene0567_00
156
+ scene0567_01
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2-labels-old.combined.tsv ADDED
@@ -0,0 +1,608 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ id raw_category category count nyu40id eigen13id nyuClass nyu40class eigen13class ModelNet40 ModelNet10 ShapeNetCore55 synsetoffset wnsynsetid wnsynsetkey mpcat40 mpcat40index
2
+ 1 wall wall 8277 1 12 wall wall Wall n04546855 wall.n.01 wall 1
3
+ 2 chair chair 4646 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
4
+ 22 books book 1678 23 2 book books Books n02870526 book.n.11 objects 39
5
+ 3 floor floor 1553 2 5 floor floor Floor n03365592 floor.n.01 floor 2
6
+ 5 door door 1483 8 12 door door Wall door n03221720 door.n.01 door 4
7
+ 1163 object object 1313 40 7 otherprop Objects objects 39
8
+ 16 window window 1209 9 13 window window Window n04587648 window.n.01 window 9
9
+ 4 table table 1170 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
10
+ 56 trash can trash can 1090 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
11
+ 13 pillow pillow 937 18 7 pillow pillow Objects pillow 3938244 n03938244 pillow.n.01 cushion 8
12
+ 15 picture picture 862 11 8 picture picture Picture n03931044 picture.n.01 picture 6
13
+ 41 ceiling ceiling 806 22 3 ceiling ceiling Ceiling n02990373 ceiling.n.01 ceiling 17
14
+ 26 box box 775 29 7 box box Objects n02883344 box.n.01 objects 39
15
+ 161 doorframe doorframe 768 8 12 door door Wall door doorframe.n.01 door 4
16
+ 19 monitor monitor 765 40 7 monitor otherprop Objects monitor monitor tv or monitor 3211117 n03782190 monitor.n.04 objects 39
17
+ 7 cabinet cabinet 731 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
18
+ 9 desk desk 680 14 10 desk desk Table desk desk table 4379243 n03179701 desk.n.01 table 5
19
+ 8 shelf shelf 641 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
20
+ 10 office chair office chair 595 5 4 chair chair Chair chair chair chair 3001627 n04373704 swivel_chair.n.01 chair 3
21
+ 31 towel towel 570 27 7 towel towel Objects n04459362 towel.n.01 towel 20
22
+ 6 couch couch 502 6 9 sofa sofa Sofa sofa sofa sofa 4256520 n04256520 sofa.n.01 sofa 10
23
+ 14 sink sink 488 34 7 sink sink Objects sink n04223580 sink.n.01 sink 15
24
+ 48 backpack backpack 479 40 7 backpack otherprop Objects n02769748 backpack.n.01 objects 39
25
+ 28 lamp lamp 419 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
26
+ 11 bed bed 370 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
27
+ 18 bookshelf bookshelf 360 10 6 bookshelf bookshelf Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
28
+ 71 mirror mirror 349 19 7 mirror mirror Objects n03773035 mirror.n.01 mirror 21
29
+ 21 curtain curtain 347 16 13 curtain curtain Window curtain n03151077 curtain.n.01 curtain 12
30
+ 40 plant plant 331 40 7 plant otherprop Objects plant n00017222 plant.n.02 plant 14
31
+ 52 whiteboard whiteboard 327 30 7 whiteboard whiteboard Objects n03211616 display_panel.n.01 board_panel 35
32
+ 96 radiator radiator 322 39 6 radiator otherfurniture Furniture n04041069 radiator.n.02 misc 40
33
+ 22 book book 318 23 2 book books Books n02870526 book.n.11 objects 39
34
+ 29 kitchen cabinet kitchen cabinet 310 3 6 cabinet cabinet Furniture n02933112 cabinet.n.01 cabinet 7
35
+ 49 toilet paper toilet paper 291 40 7 toilet paper otherprop Objects n15075141 toilet_tissue.n.01 objects 39
36
+ 29 kitchen cabinets kitchen cabinet 289 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
37
+ 23 armchair armchair 281 5 4 chair chair Chair chair chair chair 3001627 n02738535 armchair.n.01 chair 3
38
+ 63 shoes shoe 272 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
39
+ 24 coffee table coffee table 258 7 10 coffee table table Table table table table 4379243 n03063968 coffee_table.n.01 table 5
40
+ 17 toilet toilet 256 33 7 toilet toilet Objects toilet toilet n04446276 toilet.n.01 toilet 18
41
+ 47 bag bag 252 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
42
+ 32 clothes clothes 248 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
43
+ 46 keyboard keyboard 246 40 7 keyboard otherprop Objects keyboard computer keyboard 3085013 n03085013 computer_keyboard.n.01 objects 39
44
+ 65 bottle bottle 226 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
45
+ 97 recycling bin recycling bin 225 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
46
+ 34 nightstand nightstand 224 32 6 night stand night stand Furniture night_stand night_stand n03015254 chest_of_drawers.n.01 chest_of_drawers 13
47
+ 38 stool stool 221 40 7 stool otherprop Objects stool n04326896 stool.n.01 stool 19
48
+ 33 tv tv 219 25 11 television television TV tv or monitor 3211117 n03211117 display.n.06 tv_monitor 22
49
+ 75 file cabinet file cabinet 217 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
50
+ 36 dresser dresser 213 17 6 dresser dresser Furniture dresser dresser n03015254 chest_of_drawers.n.01 chest_of_drawers 13
51
+ 64 computer tower computer tower 203 40 7 computer otherprop Objects n03082979 computer.n.01 objects 39
52
+ 32 clothing clothes 165 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
53
+ 101 telephone telephone 164 40 7 telephone otherprop Objects telephone 4401088 n04401088 telephone.n.01 objects 39
54
+ 130 cup cup 157 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
55
+ 27 refrigerator refrigerator 154 24 6 refridgerator refridgerator Furniture n04070727 refrigerator.n.01 appliances 37
56
+ 44 end table end table 147 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
57
+ 131 jacket jacket 146 40 7 jacket otherprop Objects n03589791 jacket.n.01 clothes 38
58
+ 55 shower curtain shower curtain 144 28 7 shower curtain shower curtain Objects curtain n04209239 shower_curtain.n.01 curtain 12
59
+ 42 bathtub bathtub 144 36 7 bathtub bathtub Objects bathtub bathtub tub 2808440 n02808440 bathtub.n.01 bathtub 25
60
+ 59 microwave microwave 141 40 7 microwave otherprop Objects microwave 3761084 n03761084 microwave.n.02 appliances 37
61
+ 159 kitchen counter kitchen counter 140 12 6 counter counter Furniture table table table 4379243 n03116530 counter.n.01 counter 26
62
+ 74 sofa chair sofa chair 129 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
63
+ 82 paper towel dispenser paper towel dispenser 129 40 7 paper towel dispenser otherprop Objects objects 39
64
+ 1164 bathroom vanity bathroom vanity 126 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 table 5
65
+ 93 suitcase suitcase 118 40 7 luggage otherprop Objects n02773838 bag.n.06 objects 39
66
+ 77 laptop laptop 111 40 7 laptop otherprop Objects laptop laptop 3642806 n03642806 laptop.n.01 objects 39
67
+ 67 ottoman ottoman 111 39 6 ottoman otherfurniture Furniture stool n03380724 footstool.n.01 stool 19
68
+ 128 shower walls shower wall 109 1 12 wall wall Wall n04546855 wall.n.01 wall 1
69
+ 50 printer printer 106 40 7 printer otherprop Objects printer 4004475 n04004475 printer.n.03 appliances 37
70
+ 35 counter counter 104 12 6 counter counter Furniture table table table 4379243 n03116530 counter.n.01 counter 26
71
+ 69 board board 100 38 7 board otherstructure Objects board_panel 35
72
+ 100 soap dispenser soap dispenser 99 40 7 otherprop Objects n04254120 soap_dispenser.n.01 objects 39
73
+ 62 stove stove 95 38 7 stove otherstructure Objects stove 4330267 n04330267 stove.n.02 appliances 37
74
+ 105 light light 93 38 7 light otherstructure Objects n03665366 light.n.02 lighting 28
75
+ 1165 closet wall closet wall 90 1 12 wall wall Wall n04546855 wall.n.01 wall 1
76
+ 165 mini fridge mini fridge 87 24 6 refridgerator refridgerator Furniture n03273913 electric_refrigerator.n.01 appliances 37
77
+ 7 cabinets cabinet 79 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
78
+ 5 doors door 76 8 12 door door Wall door n03221720 door.n.01 door 4
79
+ 76 fan fan 75 40 7 fan otherprop Objects n03320046 fan.n.01 misc 40
80
+ 230 tissue box tissue box 73 40 7 tissue box otherprop Objects n02883344 box.n.01 objects 39
81
+ 54 blanket blanket 72 40 7 blanket otherprop Objects n02849154 blanket.n.01 objects 39
82
+ 125 bathroom stall bathroom stall 71 38 7 otherstructure Objects n02873839 booth.n.02 misc 40
83
+ 72 copier copier 70 40 7 otherprop Objects n03257586 duplicator.n.01 appliances 37
84
+ 68 bench bench 66 39 6 bench otherfurniture Furniture bench bench 2828884 n02828884 bench.n.01 seating 34
85
+ 145 bar bar 66 38 7 bar otherstructure Objects n02788689 bar.n.03 misc 40
86
+ 157 soap dish soap dish 65 40 7 soap dish otherprop Objects n04254009 soap_dish.n.01 objects 39
87
+ 1166 laundry hamper laundry hamper 65 40 7 laundry basket otherprop Objects objects 39
88
+ 132 storage bin storage bin 63 40 7 storage bin otherprop Objects objects 39
89
+ 1167 bathroom stall door bathroom stall door 62 8 12 door door Wall door n03221720 door.n.01 door 4
90
+ 232 light switch light switch 61 38 7 light switch otherstructure Objects n04372370 switch.n.01 misc 40
91
+ 134 coffee maker coffee maker 61 40 7 otherprop Objects n03063338 coffee_maker.n.01 appliances 37
92
+ 51 tv stand tv stand 61 39 6 tv stand otherfurniture Furniture tv_stand n03290653 entertainment_center.n.01 furniture 36
93
+ 250 decoration decoration 60 40 7 otherprop Objects n03169390 decoration.n.01 misc 40
94
+ 1168 ceiling light ceiling light 59 38 7 light otherstructure Objects n03665366 light.n.02 lighting 28
95
+ 342 range hood range hood 59 38 7 range hood otherstructure Objects range_hood n04053677 range_hood.n.01 misc 40
96
+ 89 blackboard blackboard 58 38 7 blackboard otherstructure Objects n02846511 blackboard.n.01 board_panel 35
97
+ 103 clock clock 58 40 7 clock otherprop Objects clock 3046257 n03046257 clock.n.01 objects 39
98
+ 99 wardrobe closet wardrobe 54 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
99
+ 95 rail rail 53 38 7 railing otherstructure Objects n04047401 railing.n.01 railing 30
100
+ 154 bulletin board bulletin board 53 38 7 board otherstructure Objects n03211616 display_panel.n.01 board_panel 35
101
+ 140 mat mat 52 20 5 floor mat floor mat Floor n03727837 mat.n.01 floor 2
102
+ 1169 trash bin trash bin 52 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
103
+ 193 ledge ledge 51 38 7 otherstructure Objects n09337253 ledge.n.01 misc 40
104
+ 116 seat seat 49 39 6 furniture otherfurniture Furniture n04161981 seat.n.03 furniture 36
105
+ 202 mouse mouse 49 40 7 mouse otherprop Objects n03793489 mouse.n.04 objects 39
106
+ 73 basket basket 48 40 7 basket otherprop Objects basket 2801938 n02801938 basket.n.01 objects 39
107
+ 78 shower shower 48 38 7 otherstructure Objects n04208936 shower.n.01 shower 23
108
+ 1170 dumbbell dumbbell 48 40 7 otherprop Objects n03255030 dumbbell.n.01 objects 39
109
+ 79 paper paper 46 26 7 paper paper Objects n14974264 paper.n.01 objects 39
110
+ 80 person person 46 31 7 person person Objects person n05217688 person.n.02 misc 40
111
+ 141 windowsill windowsill 45 38 7 otherstructure Objects n04590263 windowsill.n.01 window 9
112
+ 57 closet closet 45 39 6 wardrobe otherfurniture Furniture wardrobe misc 40
113
+ 102 bucket bucket 45 40 7 bucket otherprop Objects n02909870 bucket.n.01 misc 40
114
+ 261 sign sign 44 40 7 sign otherprop Objects n04217882 signboard.n.01 objects 39
115
+ 118 speaker speaker 43 40 7 speaker otherprop Objects speaker 3691459 n03691459 loudspeaker.n.01 objects 39
116
+ 136 dishwasher dishwasher 43 38 7 dishwasher otherstructure Objects dishwasher 3207941 n03207941 dishwasher.n.01 appliances 37
117
+ 98 container container 43 40 7 container otherprop Objects n03094503 container.n.01 objects 39
118
+ 1171 stair rail stair rail 42 38 7 banister otherstructure Objects n02788148 bannister.n.02 railing 30
119
+ 170 shower curtain rod shower curtain rod 42 40 7 otherprop Objects curtain 12
120
+ 1172 tube tube 41 40 7 otherprop Objects misc 40
121
+ 1173 bathroom cabinet bathroom cabinet 39 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
122
+ 79 papers paper 39 26 7 paper paper Objects n14974264 paper.n.01 objects 39
123
+ 221 storage container storage container 39 40 7 container otherprop Objects objects 39
124
+ 570 paper bag paper bag 39 37 7 bag bag Objects n04122825 sack.n.01 objects 39
125
+ 138 paper towel roll paper towel roll 39 40 7 paper towel otherprop Objects n03887697 paper_towel.n.01 towel 20
126
+ 168 ball ball 39 40 7 ball otherprop Objects objects 39
127
+ 276 closet doors closet door 38 8 12 door door Wall door n03221720 door.n.01 door 4
128
+ 106 laundry basket laundry basket 37 40 7 laundry basket otherprop Objects basket 2801938 n03050864 clothes_hamper.n.01 objects 39
129
+ 214 cart cart 37 40 7 cart otherprop Objects n03484083 handcart.n.01 shelving 31
130
+ 276 closet door closet door 35 8 12 door door Wall door n03221720 door.n.01 door 4
131
+ 323 dish rack dish rack 35 40 7 dish rack otherprop Objects n03207630 dish_rack.n.01 objects 39
132
+ 58 stairs stairs 35 38 7 stairs otherstructure Objects n04298308 stairway.n.01 stairs 16
133
+ 86 blinds blinds 35 13 13 blinds blinds Window n02851099 blind.n.03 blinds 32
134
+ 2 stack of chairs chair 35 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
135
+ 399 purse purse 34 40 7 purse otherprop Objects n02774152 bag.n.04 objects 39
136
+ 121 bicycle bicycle 33 40 7 bicycle otherprop Objects bicycle 2834778 n02834778 bicycle.n.01 objects 39
137
+ 185 tray tray 32 40 7 tray otherprop Objects n04476259 tray.n.01 objects 39
138
+ 300 plunger plunger 30 40 7 otherprop Objects n03970156 plunger.n.03 objects 39
139
+ 180 paper cutter paper cutter 30 40 7 paper cutter otherprop Objects n03886940 paper_cutter.n.01 objects 39
140
+ 163 toilet paper dispenser toilet paper dispenser 29 40 7 otherprop Objects objects 39
141
+ 26 boxes box 29 29 7 box box Objects n02883344 box.n.01 objects 39
142
+ 66 bin bin 28 40 7 bin otherprop Objects n02839910 bin.n.01 objects 39
143
+ 208 toilet seat cover dispenser toilet seat cover dispenser 28 40 7 otherprop Objects objects 39
144
+ 112 guitar guitar 28 40 7 guitar otherprop Objects guitar guitar 3467517 n03467517 guitar.n.01 objects 39
145
+ 540 mailboxes mailbox 28 29 7 box box Objects mailbox 3710193 n03710193 mailbox.n.01 misc 40
146
+ 395 handicap bar handicap bar 27 38 7 bar otherstructure Objects misc 40
147
+ 166 fire extinguisher fire extinguisher 27 40 7 fire extinguisher otherprop Objects n03345837 fire_extinguisher.n.01 misc 40
148
+ 122 ladder ladder 27 39 6 ladder otherfurniture Furniture stairs n03632277 ladder.n.01 stairs 16
149
+ 120 column column 26 38 7 column otherstructure Objects n03074380 column.n.06 column 24
150
+ 107 pipe pipe 25 40 7 pipe otherprop Objects n03944672 pipe.n.02 misc 40
151
+ 283 vacuum cleaner vacuum cleaner 25 40 7 otherprop Objects n04517823 vacuum.n.04 objects 39
152
+ 88 plate plate 24 40 7 plate otherprop Objects n03959485 plate.n.04 objects 39
153
+ 90 piano piano 24 39 6 piano otherfurniture Furniture piano piano 3928116 n03928116 piano.n.01 furniture 36
154
+ 177 water cooler water cooler 24 39 6 water cooler otherfurniture Furniture n04559166 water_cooler.n.01 misc 40
155
+ 1174 cd case cd case 24 40 7 otherprop Objects objects 39
156
+ 562 bowl bowl 24 40 7 bowl otherprop Objects bowl bowl 2880940 n02880940 bowl.n.03 objects 39
157
+ 1175 closet rod closet rod 24 40 7 otherprop Objects n04100174 rod.n.01 misc 40
158
+ 1156 bathroom counter bathroom counter 24 12 6 counter counter Furniture table table table 4379243 n03116530 counter.n.01 counter 26
159
+ 84 oven oven 23 38 7 oven otherstructure Objects n03862676 oven.n.01 appliances 37
160
+ 104 stand stand 23 39 6 stand otherfurniture Furniture table table table 4379243 n04301000 stand.n.04 table 5
161
+ 229 scale scale 23 40 7 scale otherprop Objects n04141975 scale.n.07 objects 39
162
+ 70 washing machine washing machine 23 39 6 washing machine otherfurniture Furniture washing_machine 4554684 n04554684 washer.n.03 appliances 37
163
+ 325 broom broom 22 40 7 broom otherprop Objects n02906734 broom.n.01 objects 39
164
+ 169 hat hat 22 40 7 hat otherprop Objects n03497657 hat.n.01 clothes 38
165
+ 128 shower wall shower wall 22 1 12 wall wall Wall n04208936 shower.n.01 wall 1
166
+ 331 guitar case guitar case 21 40 7 guitar case otherprop Objects objects 39
167
+ 87 rack rack 21 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
168
+ 488 water pitcher water pitcher 21 40 7 pitcher otherprop Objects n03950228 pitcher.n.02 objects 39
169
+ 776 laundry detergent laundry detergent 21 40 7 otherprop Objects objects 39
170
+ 370 hair dryer hair dryer 21 40 7 hair dryer otherprop Objects n03483316 hand_blower.n.01 objects 39
171
+ 191 pillar pillar 21 38 7 column otherstructure Objects n03073977 column.n.07 column 24
172
+ 748 divider divider 20 40 7 otherprop Objects wall 1
173
+ 242 power outlet power outlet 19 40 7 otherprop Objects misc 40
174
+ 45 dining table dining table 19 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
175
+ 417 shower floor shower floor 19 2 5 floor floor Floor n04208936 shower.n.01 floor 2
176
+ 70 washing machines washing machine 19 39 6 washing machine otherfurniture Furniture washing_machine 4554684 n04554684 washer.n.03 appliances 37
177
+ 188 shower door shower door 19 8 12 door door Wall door n04208936 shower.n.01 door 4
178
+ 1176 coffee kettle coffee kettle 18 40 7 pot otherprop Objects n03612814 kettle.n.01 objects 39
179
+ 1177 wardrobe cabinet wardrobe 18 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
180
+ 1178 structure structure 18 38 7 otherstructure Objects misc 40
181
+ 18 bookshelves bookshelf 17 10 6 bookshelf bookshelf Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
182
+ 110 clothes dryer clothes dryer 17 39 6 otherfurniture Furniture n03251766 dryer.n.01 appliances 37
183
+ 148 toaster toaster 17 40 7 toaster otherprop Objects n04442312 toaster.n.02 appliances 37
184
+ 63 shoe shoe 17 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
185
+ 155 ironing board ironing board 16 39 6 ironing board otherfurniture Furniture n03586090 ironing_board.n.01 objects 39
186
+ 572 alarm clock alarm clock 16 40 7 alarm clock otherprop Objects clock 3046257 n02694662 alarm_clock.n.01 objects 39
187
+ 1179 shower head shower head 15 38 7 otherstructure Objects shower 23
188
+ 28 lamp base lamp 15 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
189
+ 392 water bottle water bottle 15 40 7 bottle otherprop Objects bottle bottle 2876657 n04557648 water_bottle.n.01 objects 39
190
+ 1180 keyboard piano keyboard piano 15 39 6 piano otherfurniture Furniture piano piano 3928116 n03928116 piano.n.01 furniture 36
191
+ 609 projector screen projector screen 15 38 7 projector screen otherstructure Objects misc 40
192
+ 1181 case of water bottles case of water bottles 15 40 7 otherprop Objects objects 39
193
+ 195 toaster oven toaster oven 14 40 7 toaster oven otherprop Objects n04442441 toaster_oven.n.01 appliances 37
194
+ 581 music stand music stand 14 39 6 music stand otherfurniture Furniture n03801760 music_stand.n.01 furniture 36
195
+ 58 staircase stairs 14 38 7 stairs otherstructure Objects n04298308 stairway.n.01 stairs 16
196
+ 1182 coat rack coat rack 14 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 3
197
+ 1183 storage organizer storage organizer 14 40 7 otherprop Objects shelving 3
198
+ 139 machine machine 14 40 7 machine otherprop Objects n03699975 machine.n.01 appliances 37
199
+ 1184 folded chair folded chair 14 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
200
+ 1185 fire alarm fire alarm 14 40 7 otherprop Objects n03343737 fire_alarm.n.02 misc 40
201
+ 156 fireplace fireplace 13 38 7 fireplace otherstructure Objects n03346455 fireplace.n.01 fireplace 27
202
+ 408 vent vent 13 40 7 otherprop Objects n04526241 vent.n.01 misc 40
203
+ 213 furniture furniture 13 39 6 furniture otherfurniture Furniture n03405725 furniture.n.01 furniture 36
204
+ 1186 power strip power strip 13 40 7 otherprop Objects objects 39
205
+ 1187 calendar calendar 13 40 7 otherprop Objects objects 39
206
+ 1188 poster poster 13 11 8 picture picture Picture n03931044 picture.n.01 picture 6
207
+ 115 toilet paper holder toilet paper holder 13 40 7 toilet paper holder otherprop Objects objects 39
208
+ 1189 potted plant potted plant 12 40 7 plant otherprop Objects plant n00017222 plant.n.02 plant 14
209
+ 304 stuffed animal stuffed animal 12 40 7 stuffed animal otherprop Objects n04399382 teddy.n.01 objects 39
210
+ 1190 luggage luggage 12 40 7 luggage otherprop Objects n02774630 baggage.n.01 objects 39
211
+ 21 curtains curtain 12 16 13 curtain curtain Window curtain n03151077 curtain.n.01 curtain 12
212
+ 312 headphones headphones 12 40 7 otherprop Objects n03261776 earphone.n.01 objects 39
213
+ 233 crate crate 12 39 6 crate otherfurniture Furniture n03127925 crate.n.01 objects 39
214
+ 286 candle candle 12 40 7 candle otherprop Objects lamp n02948072 candle.n.01 objects 39
215
+ 264 projector projector 12 40 7 projector otherprop Objects n04009552 projector.n.02 objects 39
216
+ 110 clothes dryers clothes dryer 12 39 6 otherfurniture Furniture n03251766 dryer.n.01 appliances 37
217
+ 1191 mattress mattress 12 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
218
+ 356 dustpan dustpan 12 40 7 otherprop Objects n03259009 dustpan.n.02 objects 39
219
+ 25 drawer drawer 11 39 6 drawer otherfurniture Furniture n03233905 drawer.n.01 furniture 36
220
+ 750 rod rod 11 40 7 otherprop Objects pistol 3948459 n03427202 gat.n.01 misc 40
221
+ 269 globe globe 11 40 7 globe otherprop Objects objects 39
222
+ 307 footrest footrest 11 39 6 foot rest otherfurniture Furniture stool n03380724 footstool.n.01 stool 19
223
+ 410 piano bench piano bench 11 39 6 piano bench otherfurniture Furniture bench bench 2828884 n02828884 bench.n.01 seating 34
224
+ 730 breakfast bar breakfast bar 11 38 7 bar otherstructure Objects counter 26
225
+ 216 step stool step stool 11 40 7 step stool otherprop Objects stool n04315713 step_stool.n.01 stool 19
226
+ 1192 hand rail hand rail 11 38 7 railing otherstructure Objects railing 30
227
+ 119 vending machine vending machine 11 40 7 machine otherprop Objects n04525305 vending_machine.n.01 appliances 37
228
+ 682 ceiling fan ceiling fan 11 40 7 fan otherprop Objects n03320046 fan.n.01 misc 40
229
+ 434 swiffer swiffer 11 40 7 otherprop Objects objects 39
230
+ 126 foosball table foosball table 11 39 6 foosball table otherfurniture Furniture table table table 4379243 n04379243 table.n.02 table 5
231
+ 919 jar jar 11 40 7 jar otherprop Objects jar 3593526 n03593526 jar.n.01 objects 39
232
+ 85 footstool footstool 11 39 6 ottoman otherfurniture Furniture stool n03380724 footstool.n.01 stool 19
233
+ 1193 folded table folded table 10 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
234
+ 108 round table round table 10 7 10 table table Table table table table 4379243 n04114554 round_table.n.02 table 5
235
+ 135 hamper hamper 10 40 7 basket otherprop Objects basket 2801938 n03482405 hamper.n.02 objects 39
236
+ 1194 poster tube poster tube 10 40 7 otherprop Objects objects 39
237
+ 432 case case 10 40 7 case otherprop Objects objects 39
238
+ 53 carpet carpet 10 40 7 rug otherprop Objects n04118021 rug.n.01 floor 2
239
+ 1195 thermostat thermostat 10 40 7 otherprop Objects n04422875 thermostat.n.01 misc 40
240
+ 111 coat coat 10 40 7 jacket otherprop Objects n03057021 coat.n.01 clothes 38
241
+ 305 water fountain water fountain 10 38 7 water fountain otherstructure Objects n03241335 drinking_fountain.n.01 misc 40
242
+ 1125 smoke detector smoke detector 10 40 7 otherprop Objects misc 40
243
+ 13 pillows pillow 9 18 7 pillow pillow Objects pillow 3938244 n03938244 pillow.n.01 cushion 8
244
+ 1196 flip flops flip flops 9 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
245
+ 1197 cloth cloth 9 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
246
+ 1198 banner banner 9 40 7 otherprop Objects n02788021 banner.n.01 misc 40
247
+ 1199 clothes hanger clothes hanger 9 40 7 otherprop Objects n03057920 coat_hanger.n.01 objects 39
248
+ 1200 whiteboard eraser whiteboard eraser 9 40 7 otherprop Objects objects 39
249
+ 378 iron iron 9 40 7 otherprop Objects n03584829 iron.n.04 objects 39
250
+ 591 instrument case instrument case 9 40 7 case otherprop Objects objects 39
251
+ 49 toilet paper rolls toilet paper 9 40 7 toilet paper otherprop Objects n15075141 toilet_tissue.n.01 objects 39
252
+ 92 soap soap 9 40 7 soap otherprop Objects n04253437 soap.n.01 objects 39
253
+ 1098 block block 9 40 7 otherprop Objects misc 40
254
+ 291 wall hanging wall hanging 8 40 7 otherprop Objects n03491178 hanging.n.01 picture 6
255
+ 1063 kitchen island kitchen island 8 38 7 kitchen island otherstructure Objects n03620600 kitchen_island.n.01 counter 26
256
+ 107 pipes pipe 8 38 7 otherstructure Objects misc 40
257
+ 1135 toothbrush toothbrush 8 40 7 toothbrush otherprop Objects n04453156 toothbrush.n.01 objects 39
258
+ 189 shirt shirt 8 40 7 otherprop Objects n04197391 shirt.n.01 clothes 38
259
+ 245 cutting board cutting board 8 40 7 cutting board otherprop Objects n03025513 chopping_board.n.01 objects 39
260
+ 194 vase vase 8 40 7 vase otherprop Objects vase jar 3593526 n04522168 vase.n.01 objects 39
261
+ 1201 shower control valve shower control valve 8 38 7 otherstructure Objects n04208936 shower.n.01 shower 23
262
+ 386 exercise machine exercise machine 8 40 7 machine otherprop Objects gym_equipment 33
263
+ 1202 compost bin compost bin 8 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
264
+ 857 shorts shorts 8 40 7 shorts otherprop Objects clothes 38
265
+ 452 tire tire 8 40 7 otherprop Objects n04440749 tire.n.01 objects 39
266
+ 1203 teddy bear teddy bear 7 40 7 stuffed animal otherprop Objects n04399382 teddy.n.01 objects 39
267
+ 346 bathrobe bathrobe 7 40 7 otherprop Objects n02807616 bathrobe.n.01 clothes 38
268
+ 152 handrail handrail 7 38 7 railing otherstructure Objects n02788148 bannister.n.02 railing 30
269
+ 83 faucet faucet 7 40 7 faucet otherprop Objects faucet 3325088 n03325088 faucet.n.01 misc 40
270
+ 1204 pantry wall pantry wall 7 1 12 wall wall Wall n04546855 wall.n.01 wall 1
271
+ 726 thermos thermos 7 40 7 flask otherprop Objects bottle bottle 2876657 n04422727 thermos.n.01 objects 39
272
+ 61 rug rug 7 40 7 rug otherprop Objects n04118021 rug.n.01 floor 2
273
+ 39 couch cushions cushion 7 18 7 pillow pillow Objects n03151500 cushion.n.03 cushion 8
274
+ 1117 tripod tripod 7 39 6 stand otherfurniture Furniture n04485082 tripod.n.01 objects 39
275
+ 540 mailbox mailbox 7 29 7 box box Objects mailbox 3710193 n03710193 mailbox.n.01 misc 40
276
+ 1205 tupperware tupperware 7 40 7 otherprop Objects objects 39
277
+ 415 shoe rack shoe rack 7 40 7 shoe rack otherprop Objects shelving 31
278
+ 31 towels towel 6 27 7 towel towel Objects n04459362 towel.n.01 towel 20
279
+ 1206 beer bottles beer bottle 6 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
280
+ 153 treadmill treadmill 6 39 6 treadmill otherfurniture Furniture n04477387 treadmill.n.01 gym_equipment 33
281
+ 1207 salt salt 6 40 7 otherprop Objects objects 39
282
+ 129 chest chest 6 39 6 chest otherfurniture Furniture dresser dresser chest_of_drawers 13
283
+ 220 dispenser dispenser 6 40 7 otherprop Objects n03210683 dispenser.n.01 objects 39
284
+ 1208 mirror doors mirror door 6 8 12 door door Wall door n03221720 door.n.01 door 4
285
+ 231 remote remote 6 40 7 otherprop Objects remote_control 4074963 n04074963 remote_control.n.01 objects 39
286
+ 1209 folded ladder folded ladder 6 39 6 ladder otherfurniture Furniture stairs n03632277 ladder.n.01 misc 40
287
+ 39 cushion cushion 6 18 7 pillow pillow Objects n03151500 cushion.n.03 cushion 8
288
+ 1210 carton carton 6 40 7 otherprop Objects objects 39
289
+ 117 step step 6 38 7 otherstructure Objects n04314914 step.n.04 misc 40
290
+ 822 drying rack drying rack 6 39 6 drying rack otherfurniture Furniture shelving 31
291
+ 238 slippers slipper 6 40 7 shoe otherprop Objects n04241394 slipper.n.01 clothes 38
292
+ 143 pool table pool table 6 39 6 pool table otherfurniture Furniture table table table 4379243 n03982430 pool_table.n.01 table 5
293
+ 1211 soda stream soda stream 6 40 7 otherprop Objects objects 39
294
+ 228 toilet brush toilet brush 6 40 7 toilet brush otherprop Objects objects 39
295
+ 494 loft bed loft bed 6 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
296
+ 226 cooking pot cooking pot 6 40 7 pot otherprop Objects objects 39
297
+ 91 heater heater 6 39 6 heater otherfurniture Furniture n03508101 heater.n.01 misc 40
298
+ 1072 messenger bag messenger bag 6 37 7 bag bag Objects objects 39
299
+ 435 stapler stapler 6 40 7 stapler otherprop Objects n04303497 stapler.n.01 objects 39
300
+ 1165 closet walls closet wall 5 1 12 wall wall Wall n04546855 wall.n.01 wall 1
301
+ 345 scanner scanner 5 40 7 otherprop Objects appliances 37
302
+ 893 elliptical machine elliptical machine 5 40 7 machine otherprop Objects gym_equipment 33
303
+ 621 kettle kettle 5 40 7 pot otherprop Objects n03612814 kettle.n.01 objects 39
304
+ 1212 metronome metronome 5 40 7 otherprop Objects n03757604 metronome.n.01 objects 39
305
+ 297 dumbell dumbell 5 40 7 otherprop Objects objects 39
306
+ 1213 music book music book 5 23 2 book books Books n02870526 book.n.11 objects 39
307
+ 1214 rice cooker rice cooker 5 40 7 otherprop Objects objects 39
308
+ 1215 dart board dart board 5 38 7 board otherstructure Objects n03162940 dartboard.n.01 objects 39
309
+ 529 sewing machine sewing machine 5 40 7 sewing machine otherprop Objects n04179913 sewing_machine.n.01 objects 39
310
+ 1216 grab bar grab bar 5 38 7 railing otherstructure Objects railing 30
311
+ 1217 flowerpot flowerpot 5 40 7 vase otherprop Objects vase jar 3593526 n04522168 vase.n.01 objects 39
312
+ 1218 painting painting 5 11 8 picture picture Picture n03931044 picture.n.01 picture 6
313
+ 1219 railing railing 5 38 7 railing otherstructure Objects n04047401 railing.n.01 railing 30
314
+ 1220 stair stair 5 38 7 stairs otherstructure Objects stairs n04314914 step.n.04 stairs 16
315
+ 525 toolbox toolbox 5 39 6 chest otherfurniture Furniture n04452615 toolbox.n.01 objects 39
316
+ 204 nerf gun nerf gun 5 40 7 otherprop Objects objects 39
317
+ 693 binders binder 5 40 7 binder otherprop Objects objects 39
318
+ 179 desk lamp desk lamp 5 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
319
+ 1221 quadcopter quadcopter 5 40 7 otherprop Objects objects 39
320
+ 1222 pitcher pitcher 5 40 7 pitcher otherprop Objects n03950228 pitcher.n.02 objects 39
321
+ 1223 hanging hanging 5 40 7 otherprop Objects misc 40
322
+ 1224 mail mail 5 40 7 otherprop Objects misc 40
323
+ 1225 closet ceiling closet ceiling 5 22 3 ceiling ceiling Ceiling n02990373 ceiling.n.01 ceiling 17
324
+ 1226 hoverboard hoverboard 5 40 7 otherprop Objects objects 39
325
+ 1227 beanbag chair beanbag chair 5 39 6 bean bag otherfurniture Furniture n02816656 beanbag.n.01 chair 3
326
+ 571 water heater water heater 5 40 7 water heater otherprop Objects n04560113 water_heater.n.01 misc 40
327
+ 1228 spray bottle spray bottle 5 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
328
+ 556 rope rope 5 40 7 rope otherprop Objects n04108268 rope.n.01 objects 39
329
+ 280 plastic container plastic container 5 40 7 container otherprop Objects objects 39
330
+ 1229 soap bottle soap bottle 5 40 7 soap otherprop Objects objects 39
331
+ 1230 ikea bag ikea bag 4 37 7 bag bag Objects 2773838 n02773838 bag.n.06 objects 39
332
+ 1231 sleeping bag sleeping bag 4 40 7 otherprop Objects n04235860 sleeping_bag.n.01 objects 39
333
+ 1232 duffel bag duffel bag 4 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
334
+ 746 frying pan frying pan 4 40 7 frying pan otherprop Objects n03400231 frying_pan.n.01 objects 39
335
+ 1233 oven mitt oven mitt 4 40 7 otherprop Objects objects 39
336
+ 1234 pot pot 4 40 7 pot otherprop Objects n04235860 sleeping_bag.n.01 objects 39
337
+ 144 hand dryer hand dryer 4 40 7 otherprop Objects objects 39
338
+ 282 dollhouse dollhouse 4 39 6 doll house otherfurniture Furniture n03219483 dollhouse.n.01 objects 39
339
+ 167 shampoo bottle shampoo bottle 4 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
340
+ 1235 hair brush hair brush 4 40 7 otherprop Objects n02908217 brush.n.02 objects 39
341
+ 1236 tennis racket tennis racket 4 40 7 otherprop Objects n04409806 tennis_racket.n.01 objects 39
342
+ 1237 display case display case 4 40 7 case otherprop Objects objects 39
343
+ 234 ping pong table ping pong table 4 39 6 ping pong table otherfurniture Furniture table table table 4379243 n04379243 table.n.02 table 5
344
+ 563 boiler boiler 4 40 7 otherprop Objects misc 40
345
+ 1238 bag of coffee beans bag of coffee beans 4 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
346
+ 1239 bananas banana 4 40 7 otherprop Objects n00021265 food.n.01 objects 39
347
+ 1240 carseat carseat 4 40 7 otherprop Objects misc 40
348
+ 366 helmet helmet 4 40 7 otherprop Objects helmet 3513137 n03513137 helmet.n.02 clothes 38
349
+ 816 umbrella umbrella 4 40 7 umbrella otherprop Objects n04507155 umbrella.n.01 objects 39
350
+ 1241 coffee box coffee box 4 40 7 otherprop Objects objects 39
351
+ 719 envelope envelope 4 40 7 envelope otherprop Objects n03291819 envelope.n.01 objects 39
352
+ 284 wet floor sign wet floor sign 4 40 7 sign otherprop Objects misc 40
353
+ 1242 clothing rack clothing rack 4 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
354
+ 247 controller controller 4 40 7 otherprop Objects n03096960 control.n.09 objects 39
355
+ 1243 bath walls bathroom wall 4 1 12 wall wall Wall n04546855 wall.n.01 wall 1
356
+ 1244 podium podium 4 39 6 otherfurniture Furniture n03159640 dais.n.01 furniture 36
357
+ 1245 storage box storage box 4 29 7 box box Objects n02883344 box.n.01 objects 39
358
+ 1246 dolly dolly 4 40 7 otherprop Objects misc 40
359
+ 1247 shampoo shampoo 3 40 7 otherprop Objects n04183516 shampoo.n.01 objects 39
360
+ 592 paper tray paper tray 3 40 7 paper tray otherprop Objects objects 39
361
+ 385 cabinet door cabinet door 3 8 12 door door Wall door door 4
362
+ 1248 changing station changing station 3 40 7 otherprop Objects misc 40
363
+ 1249 poster printer poster printer 3 40 7 printer otherprop Objects printer 4004475 n04004475 printer.n.03 appliances 37
364
+ 133 screen screen 3 40 7 otherprop Objects n03151077 curtain.n.01 curtain 12
365
+ 301 soap bar soap bar 3 38 7 bar otherstructure Objects objects 39
366
+ 1250 crutches crutches 3 40 7 otherprop Objects n03141823 crutch.n.01 objects 39
367
+ 379 studio light studio light 3 38 7 light otherstructure Objects lighting 28
368
+ 130 stack of cups cup 3 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
369
+ 1251 toilet flush button toilet flush button 3 40 7 otherprop Objects objects 39
370
+ 450 trunk trunk 3 40 7 otherprop Objects misc 40
371
+ 1252 grocery bag grocery bag 3 37 7 bag bag Objects suitcase 2773838 n03461288 grocery_bag.n.01 objects 39
372
+ 316 plastic bin plastic bin 3 40 7 bin otherprop Objects objects 39
373
+ 1253 pizza box pizza box 3 29 7 box box Objects objects 39
374
+ 385 cabinet doors cabinet door 3 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 door 4
375
+ 1254 legs legs 3 31 7 person person Objects person n05217688 person.n.02 misc 40
376
+ 461 car car 3 40 7 car otherprop Objects car car 2958343 n02958343 car.n.01 misc 40
377
+ 1255 shaving cream shaving cream 3 40 7 otherprop Objects n04186051 shaving_cream.n.01 objects 39
378
+ 1256 luggage stand luggage stand 3 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
379
+ 599 shredder shredder 3 40 7 otherprop Objects n04210120 shredder.n.01 objects 39
380
+ 281 statue statue 3 40 7 sculpture otherprop Objects n04306847 statue.n.01 misc 40
381
+ 1257 urinal urinal 3 33 7 toilet toilet Objects toilet toilet n04515991 urinal.n.01 toilet 18
382
+ 1258 hose hose 3 40 7 otherprop Objects n03539875 hose.n.03 misc 40
383
+ 1259 bike pump bike pump 3 40 7 otherprop Objects objects 39
384
+ 319 coatrack coatrack 3 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 31
385
+ 1260 bear bear 3 40 7 otherprop Objects objects 39
386
+ 28 wall lamp lamp 3 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
387
+ 1261 humidifier humidifier 3 40 7 otherprop Objects objects 39
388
+ 546 toothpaste toothpaste 3 40 7 toothpaste otherprop Objects objects 39
389
+ 1262 mouthwash bottle mouthwash bottle 3 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
390
+ 1263 poster cutter poster cutter 3 40 7 otherprop Objects objects 39
391
+ 1264 golf bag golf bag 3 37 7 bag bag Objects suitcase 2773838 n03445617 golf_bag.n.01 objects 39
392
+ 1265 food container food container 3 40 7 container otherprop Objects n03094503 container.n.01 objects 39
393
+ 1266 camera camera 3 40 7 otherprop Objects objects 39
394
+ 28 table lamp lamp 3 35 7 lamp lamp Objects lamp lamp 3636649 n04380533 table_lamp.n.01 lighting 28
395
+ 1267 yoga mat yoga mat 3 20 5 floor mat floor mat Floor n03727837 mat.n.01 floor 2
396
+ 1268 card card 3 40 7 otherprop Objects objects 39
397
+ 1269 mug mug 3 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
398
+ 188 shower doors shower door 3 38 7 otherstructure Objects n04208936 shower.n.01 door 4
399
+ 689 cardboard cardboard 3 40 7 otherprop Objects objects 39
400
+ 1270 rack stand rack stand 3 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
401
+ 1271 boxes of paper boxes of paper 3 29 7 box box Objects n02883344 box.n.01 objects 39
402
+ 1272 flag flag 3 40 7 otherprop Objects misc 40
403
+ 354 futon futon 3 39 6 mattress otherfurniture Furniture n03408444 futon.n.01 sofa 10
404
+ 339 magazine magazine 3 40 7 magazine otherprop Objects n06595351 magazine.n.01 objects 39
405
+ 1009 exit sign exit sign 3 40 7 exit sign otherprop Objects misc 40
406
+ 1273 rolled poster rolled poster 3 40 7 otherprop Objects objects 39
407
+ 1274 wheel wheel 3 40 7 otherprop Objects objects 39
408
+ 15 pictures picture 3 11 8 picture picture Picture n03931044 picture.n.01 picture 6
409
+ 1275 blackboard eraser blackboard eraser 3 40 7 eraser otherprop Objects n03294833 eraser.n.01 objects 39
410
+ 361 organizer organizer 3 40 7 otherprop Objects n03918737 personal_digital_assistant.n.01 objects 39
411
+ 1276 doll doll 3 40 7 toy otherprop Objects n03219135 doll.n.01 objects 39
412
+ 326 book rack book rack 3 39 6 bookrack otherfurniture Furniture objects 39
413
+ 1277 laundry bag laundry bag 3 40 7 laundry basket otherprop Objects basket 2801938 n03050864 clothes_hamper.n.01 objects 39
414
+ 1278 sponge sponge 3 40 7 otherprop Objects n01906749 sponge.n.04 objects 39
415
+ 116 seating seat 3 39 6 furniture otherfurniture Furniture n04161981 seat.n.03 furniture 36
416
+ 1184 folded chairs folded chair 2 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
417
+ 1279 lotion bottle lotion bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
418
+ 212 can can 2 40 7 can otherprop Objects can 2946921 n02946921 can.n.01 objects 39
419
+ 1280 lunch box lunch box 2 40 7 otherprop Objects objects 39
420
+ 1281 food display food display 2 40 7 otherprop Objects misc 40
421
+ 794 storage shelf storage shelf 2 40 7 otherprop Objects shelving 31
422
+ 1282 sliding wood door sliding wood door 2 40 7 otherprop Objects door 4
423
+ 955 pants pants 2 40 7 otherprop Objects n04489008 trouser.n.01 clothes 38
424
+ 387 wood wood 2 40 7 otherprop Objects misc 40
425
+ 69 boards board 2 38 7 board otherstructure Objects board_panel 35
426
+ 65 bottles bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
427
+ 523 washcloth washcloth 2 40 7 otherprop Objects n04554523 washcloth.n.01 towel 20
428
+ 389 workbench workbench 2 39 6 bench otherfurniture Furniture bench table 4379243 n04600486 workbench.n.01 table 5
429
+ 29 open kitchen cabinet kitchen cabinet 2 3 6 cabinet cabinet Furniture n02933112 cabinet.n.01 cabinet 7
430
+ 1283 organizer shelf organizer shelf 2 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
431
+ 146 frame frame 2 38 7 otherstructure Objects misc 40
432
+ 130 cups cup 2 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
433
+ 372 exercise ball exercise ball 2 40 7 ball otherprop Objects n04285146 sports_equipment.n.01 gym_equipment 33
434
+ 289 easel easel 2 39 6 stand otherfurniture Furniture n03262809 easel.n.01 furniture 36
435
+ 440 garbage bag garbage bag 2 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
436
+ 321 roomba roomba 2 40 7 otherprop Objects objects 39
437
+ 976 garage door garage door 2 38 7 garage door otherstructure Objects door door 4
438
+ 1256 luggage rack luggage stand 2 39 6 stand otherfurniture Furniture n04038440 shelving 31
439
+ 1284 bike lock bike lock 2 40 7 otherprop Objects objects 39
440
+ 1285 briefcase briefcase 2 40 7 otherprop Objects n02900705 briefcase.n.01 objects 39
441
+ 357 hand towel hand towel 2 27 7 towel towel Objects n03490006 hand_towel.n.01 towel 20
442
+ 1286 bath products bath product 2 40 7 otherprop Objects objects 39
443
+ 1287 star star 2 40 7 otherprop Objects n09444783 star.n.03 misc 40
444
+ 365 map map 2 40 7 map otherprop Objects n03720163 map.n.01 misc 40
445
+ 1288 coffee bean bag coffee bean bag 2 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
446
+ 81 headboard headboard 2 39 6 headboard otherfurniture Furniture n03502200 headboard.n.01 bed 11
447
+ 1289 ipad ipad 2 40 7 otherprop Objects objects 39
448
+ 1290 display rack display rack 2 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
449
+ 948 traffic cone traffic cone 2 40 7 cone otherprop Objects cone objects 39
450
+ 174 toiletry toiletry 2 40 7 otherprop Objects n04447443 toiletry.n.01 objects 39
451
+ 1028 canopy canopy 2 40 7 otherprop Objects misc 40
452
+ 1291 massage chair massage chair 2 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
453
+ 1292 paper organizer paper organizer 2 40 7 otherprop Objects objects 39
454
+ 1005 barricade barricade 2 40 7 otherprop Objects misc 40
455
+ 235 platform platform 2 38 7 otherstructure Objects misc 40
456
+ 1293 cap cap 2 40 7 hat otherprop Objects n03497657 hat.n.01 clothes 38
457
+ 1294 dumbbell plates dumbbell plates 2 40 7 otherprop Objects objects 39
458
+ 1295 elevator elevator 2 38 7 otherstructure Objects misc 40
459
+ 1296 cooking pan cooking pan 2 40 7 pan otherprop Objects n03880531 pan.n.01 objects 39
460
+ 1297 trash bag trash bag 2 37 7 bag bag Objects objects 39
461
+ 1298 santa santa 2 40 7 otherprop Objects misc 40
462
+ 1299 jewelry box jewelry box 2 29 7 box box Objects n02883344 box.n.01 objects 39
463
+ 1300 boat boat 2 40 7 otherprop Objects misc 40
464
+ 1301 sock sock 2 21 7 clothes clothes Objects n04254777 sock.n.01 clothes 38
465
+ 1051 kinect kinect 2 40 7 kinect otherprop Objects objects 39
466
+ 566 crib crib 2 39 6 crib otherfurniture Furniture furniture 36
467
+ 1302 plastic storage bin plastic storage bin 2 40 7 container otherprop Objects n03094503 container.n.01 objects 39
468
+ 1062 cooler cooler 2 24 6 refridgerator refridgerator Furniture n03102654 cooler.n.01 appliances 37
469
+ 1303 kitchen apron kitchen apron 2 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
470
+ 1304 dishwashing soap bottle dishwashing soap bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
471
+ 1305 xbox controller xbox controller 2 40 7 otherprop Objects objects 39
472
+ 1306 banana holder banana holder 2 40 7 otherprop Objects objects 39
473
+ 298 ping pong paddle ping pong paddle 2 40 7 otherprop Objects table 5
474
+ 1307 airplane airplane 2 40 7 otherprop Objects misc 40
475
+ 1308 conditioner bottle conditioner bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
476
+ 1309 tea kettle tea kettle 2 40 7 tea kettle otherprop Objects n04397768 teakettle.n.01 objects 39
477
+ 43 bedframe bedframe 2 39 6 otherfurniture Furniture n02822579 bedstead.n.01 bed 11
478
+ 1310 wood beam wood beam 2 38 7 otherstructure Objects beam 29
479
+ 593 toilet paper package toilet paper package 2 40 7 otherprop Objects objects 39
480
+ 1311 wall mounted coat rack wall mounted coat rack 2 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 31
481
+ 1312 film light film light 2 40 7 otherprop Objects lighting 28
482
+ 749 ceiling lamp ceiling lamp 1 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
483
+ 623 chain chain 1 40 7 otherprop Objects chair 3
484
+ 1313 sofa sofa 1 6 9 sofa sofa Sofa sofa sofa sofa 4256520 n04256520 sofa.n.01 sofa 10
485
+ 99 closet wardrobe wardrobe 1 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
486
+ 265 sweater sweater 1 40 7 otherprop Objects n04370048 sweater.n.01 clothes 38
487
+ 1314 kitchen mixer kitchen mixer 1 40 7 otherprop Objects appliances 37
488
+ 99 wardrobe wardrobe 1 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
489
+ 1315 water softener water softener 1 40 7 otherprop Objects misc 40
490
+ 448 banister banister 1 38 7 banister otherstructure Objects n02788148 bannister.n.02 railing 30
491
+ 257 trolley trolley 1 40 7 trolley otherprop Objects n04335435 streetcar.n.01 misc 40
492
+ 1316 pantry shelf pantry shelf 1 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
493
+ 786 sofa bed sofa bed 1 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
494
+ 801 loofa loofa 1 40 7 otherprop Objects objects 39
495
+ 972 shower faucet handle shower faucet handle 1 40 7 handle otherprop Objects shower 23
496
+ 1317 toy piano toy piano 1 40 7 toy otherprop Objects n03964744 plaything.n.01 objects 39
497
+ 1318 fish fish 1 40 7 otherprop Objects n02512053 fish.n.01 objects 39
498
+ 75 file cabinets file cabinet 1 3 6 cabinet cabinet Furniture cabinet 2933112 n03337140 file.n.03 cabinet 7
499
+ 657 cat litter box cat litter box 1 29 7 box box Objects objects 39
500
+ 561 electric panel electric panel 1 40 7 otherprop Objects misc 40
501
+ 93 suitcases suitcase 1 40 7 luggage otherprop Objects n02774630 baggage.n.01 objects 39
502
+ 513 curtain rod curtain rod 1 38 7 curtain rod otherstructure Objects curtain 12
503
+ 411 bunk bed bunk bed 1 39 6 bunk bed otherfurniture Furniture bed bed bed 2818832 n02920259 bunk_bed.n.01 bed 11
504
+ 1122 chandelier chandelier 1 38 7 chandelier otherstructure Objects n03005285 chandelier.n.01 lighting 28
505
+ 922 tape tape 1 40 7 tape otherprop Objects objects 39
506
+ 88 plates plate 1 40 7 otherprop Objects n03959485 plate.n.04 objects 39
507
+ 518 alarm alarm 1 40 7 alarm otherprop Objects clock 3046257 n02694662 alarm_clock.n.01 objects 39
508
+ 814 fire hose fire hose 1 40 7 otherprop Objects n03346004 fire_hose.n.01 misc 40
509
+ 1319 toy dinosaur toy dinosaur 1 40 7 toy otherprop Objects n03964744 plaything.n.01 objects 39
510
+ 1320 cone cone 1 40 7 otherprop Objects objects 39
511
+ 649 glass doors glass door 1 8 12 door door Wall door n03221720 door.n.01 door 4
512
+ 607 hatrack hatrack 1 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 31
513
+ 819 subwoofer subwoofer 1 40 7 speaker otherprop Objects speaker 3691459 n04349401 subwoofer.n.01 objects 39
514
+ 1321 fire sprinkler fire sprinkler 1 40 7 otherprop Objects misc 40
515
+ 1322 trash cabinet trash cabinet 1 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
516
+ 1204 pantry walls pantry wall 1 1 12 wall wall Wall n04546855 wall.n.01 wall 1
517
+ 227 photo photo 1 40 7 photo otherprop Objects n03925226 photograph.n.01 picture 6
518
+ 817 barrier barrier 1 40 7 otherprop Objects n02796623 barrier.n.01 misc 40
519
+ 130 stacks of cups cup 1 40 7 otherprop Objects n03147509 cup.n.01 objects 39
520
+ 712 beachball beachball 1 40 7 ball otherprop Objects n02814224 beach_ball.n.01 objects 39
521
+ 1323 folded boxes folded boxes 1 40 7 otherprop Objects objects 39
522
+ 1324 contact lens solution bottle contact lens solution bottle 1 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
523
+ 673 covered box covered box 1 29 7 box box Objects objects 39
524
+ 459 folder folder 1 40 7 folder otherprop Objects n03376279 folder.n.02 objects 39
525
+ 643 mail trays mail tray 1 40 7 mail tray otherprop Objects objects 39
526
+ 238 slipper slipper 1 40 7 otherprop Objects n04241394 slipper.n.01 clothes 38
527
+ 765 magazine rack magazine rack 1 39 6 stand otherfurniture Furniture n03704549 magazine_rack.n.01 shelving 31
528
+ 1008 sticker sticker 1 40 7 sticker otherprop Objects n07272545 gummed_label.n.01 objects 39
529
+ 225 lotion lotion 1 40 7 otherprop Objects n03690938 lotion.n.01 objects 39
530
+ 1083 buddha buddha 1 40 7 otherprop Objects objects 39
531
+ 813 file organizer file organizer 1 40 7 otherprop Objects objects 39
532
+ 138 paper towel rolls paper towel roll 1 40 7 paper towel otherprop Objects n03887697 paper_towel.n.01 towel 20
533
+ 1145 night lamp night lamp 1 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
534
+ 796 fuse box fuse box 1 40 7 otherprop Objects misc 40
535
+ 1325 knife block knife block 1 40 7 otherprop Objects objects 39
536
+ 363 furnace furnace 1 39 6 furnace otherfurniture Furniture n03404449 furnace.n.01
537
+ 1174 cd cases cd case 1 40 7 otherprop Objects objects 39
538
+ 38 stools stool 1 40 7 stool otherprop Objects stool n04326896 stool.n.01 stool 19
539
+ 1326 hand sanitzer dispenser hand sanitzer dispenser 1 40 7 otherprop Objects n04254120 soap_dispenser.n.01 objects 39
540
+ 997 teapot teapot 1 40 7 tea pot otherprop Objects n04398044 teapot.n.01 objects 39
541
+ 1327 pen holder pen holder 1 40 7 otherprop Objects objects 39
542
+ 1328 tray rack tray rack 1 40 7 otherprop Objects objects 39
543
+ 1329 wig wig 1 40 7 otherprop Objects n04584207 wig.n.01 objects 39
544
+ 182 switch switch 1 40 7 otherprop Objects n04372370 switch.n.01 misc 40
545
+ 280 plastic containers plastic container 1 40 7 container otherprop Objects n03094503 container.n.01 objects 39
546
+ 1330 night light night light 1 40 7 otherprop Objects lighting 28
547
+ 1331 notepad notepad 1 40 7 otherprop Objects objects 39
548
+ 1332 mail bin mail bin 1 40 7 otherprop Objects misc 40
549
+ 1333 elevator button elevator button 1 40 7 otherprop Objects misc 40
550
+ 939 gaming wheel gaming wheel 1 40 7 otherprop Objects objects 39
551
+ 1334 drum set drum set 1 40 7 otherprop Objects objects 39
552
+ 480 cosmetic bag cosmetic bag 1 37 7 bag bag Objects objects 39
553
+ 907 coffee mug coffee mug 1 40 7 vessel otherprop Objects cup or mug 3797390 n03063599 coffee_mug.n.01 objects 39
554
+ 1335 closet shelf closet shelf 1 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
555
+ 1336 baby mobile baby mobile 1 40 7 otherprop Objects objects 39
556
+ 829 diaper bin diaper bin 1 40 7 bin otherprop Objects objects 39
557
+ 947 door wall door wall 1 1 12 wall wall Wall wall 1
558
+ 1116 stepstool stepstool 1 40 7 step stool otherprop Objects objects 39
559
+ 599 paper shredder shredder 1 40 7 otherprop Objects n04210120 shredder.n.01 objects 39
560
+ 733 dress rack dress rack 1 40 7 otherprop Objects n03238762 dress_rack.n.01 misc 40
561
+ 123 cover cover 1 40 7 blanket otherprop Objects objects 39
562
+ 506 shopping bag shopping bag 1 37 7 bag bag Objects n04204081 shopping_bag.n.01 objects 39
563
+ 569 sliding door sliding door 1 8 12 door door Wall door n04239074 sliding_door.n.01 door 4
564
+ 1337 exercise bike exercise bike 1 40 7 machine otherprop Objects n04210120 shredder.n.01 gym_equipment 33
565
+ 1338 recliner chair recliner chair 1 5 4 chair chair Chair chair chair chair 3001627 n03238762 dress_rack.n.01 chair 3
566
+ 1314 kitchenaid mixer kitchen mixer 1 40 7 otherprop Objects appliances 37
567
+ 1339 soda can soda can 1 40 7 can otherprop Objects can 2946921 n02946921 can.n.01 objects 39
568
+ 1340 stovetop stovetop 1 38 7 stove otherstructure Objects stove 4330267 n04330267 stove.n.02 appliances 37
569
+ 851 stepladder stepladder 1 39 6 ladder otherfurniture Furniture stairs n04315599 step_ladder.n.01 stairs 16
570
+ 142 tap tap 1 40 7 faucet otherprop Objects faucet 3325088 n04559451 water_faucet.n.01 objects 39
571
+ 436 cable cable 1 40 7 cables otherprop Objects objects 39
572
+ 1341 baby changing station baby changing station 1 39 6 otherfurniture Furniture furniture 36
573
+ 1342 costume costume 1 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
574
+ 885 rocking chair rocking chair 1 5 4 chair chair Chair chair chair chair 3001627 n04099969 rocking_chair.n.01 chair 3
575
+ 693 binder binder 1 40 7 binder otherprop Objects objects 39
576
+ 815 media center media center 1 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
577
+ 401 towel rack towel rack 1 40 7 otherprop Objects n04459773 towel_rack.n.01 misc 40
578
+ 1343 medal medal 1 40 7 otherprop Objects objects 39
579
+ 1184 stack of folded chairs folded chair 1 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
580
+ 1344 telescope telescope 1 40 7 otherprop Objects n04403638 telescope.n.01 objects 39
581
+ 1345 closet doorframe closet doorframe 1 8 12 door door Wall door door 4
582
+ 160 glass glass 1 38 7 glass otherstructure Objects n03438257 glass.n.02 misc 40
583
+ 1126 baseball cap baseball cap 1 40 7 otherprop Objects cap 2954340 n02799323 baseball_cap.n.01 clothes 38
584
+ 1346 battery disposal jar battery disposal jar 1 40 7 jar otherprop Objects jar 3593526 n03593526 jar.n.01 objects 39
585
+ 332 mop mop 1 40 7 otherprop Objects n04367480 swab.n.02 objects 39
586
+ 397 tank tank 1 40 7 otherprop Objects objects 39
587
+ 643 mail tray mail tray 1 40 7 mail tray otherprop Objects objects 39
588
+ 551 centerpiece centerpiece 1 40 7 centerpiece otherprop Objects n02994419 centerpiece.n.02 objects 39
589
+ 1163 stick stick 1 40 7 stick otherprop Objects objects 39
590
+ 1347 closet floor closet floor 1 2 5 floor floor Floor n03365592 floor.n.01 floor 2
591
+ 1348 dryer sheets dryer sheets 1 40 7 otherprop Objects objects 39
592
+ 803 bycicle bycicle 1 40 7 otherprop Objects misc 40
593
+ 484 flower stand flower stand 1 39 6 stand otherfurniture Furniture furniture 36
594
+ 1349 air mattress air mattress 1 4 1 bed bed Bed bed bed bed 2818832 n02690809 air_mattress.n.01 bed 11
595
+ 1350 clip clip 1 40 7 otherprop Objects objects 39
596
+ 222 side table side table 1 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
597
+ 1253 pizza boxes pizza box 1 29 7 box box Objects n02883344 box.n.01 objects 39
598
+ 1351 display display 1 39 7 otherfurniture Furniture n03211117 display.n.06 misc 40
599
+ 1352 postcard postcard 1 40 7 otherprop Objects objects 39
600
+ 828 display sign display sign 1 40 7 sign otherprop Objects misc 40
601
+ 1353 paper towel paper towel 1 40 7 paper towel otherprop Objects n03887697 paper_towel.n.01 towel 20
602
+ 612 boots boot 1 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
603
+ 1354 tennis racket bag tennis racket bag 1 40 7 otherprop Objects objects 39
604
+ 1355 air hockey table air hockey table 1 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
605
+ 1301 socks sock 1 21 7 clothes clothes Objects n04254777 sock.n.01 clothes 38
606
+ 1356 food bag food bag 1 37 7 bag bag Objects objects 39
607
+ 1199 clothes hangers clothes hanger 1 40 7 otherprop Objects n03057920 coat_hanger.n.01 misc 40
608
+ 1357 starbucks cup starbucks cup 1 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2-labels.combined.tsv ADDED
@@ -0,0 +1,608 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ id raw_category category count nyu40id eigen13id nyuClass nyu40class eigen13class ModelNet40 ModelNet10 ShapeNetCore55 synsetoffset wnsynsetid wnsynsetkey mpcat40 mpcat40index
2
+ 1 wall wall 8277 1 12 wall wall Wall n04546855 wall.n.01 wall 1
3
+ 2 chair chair 4646 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
4
+ 22 books book 1678 23 2 book books Books n02870526 book.n.11 objects 39
5
+ 3 floor floor 1553 2 5 floor floor Floor n03365592 floor.n.01 floor 2
6
+ 5 door door 1483 8 12 door door Wall door n03221720 door.n.01 door 4
7
+ 1163 object object 1313 40 7 otherprop Objects objects 39
8
+ 16 window window 1209 9 13 window window Window n04587648 window.n.01 window 9
9
+ 4 table table 1170 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
10
+ 56 trash can trash can 1090 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
11
+ 13 pillow pillow 937 18 7 pillow pillow Objects pillow 3938244 n03938244 pillow.n.01 cushion 8
12
+ 15 picture picture 862 11 8 picture picture Picture n03931044 picture.n.01 picture 6
13
+ 41 ceiling ceiling 806 22 3 ceiling ceiling Ceiling n02990373 ceiling.n.01 ceiling 17
14
+ 26 box box 775 29 7 box box Objects n02883344 box.n.01 objects 39
15
+ 161 doorframe doorframe 768 8 12 door door Wall door doorframe.n.01 door 4
16
+ 19 monitor monitor 765 40 7 monitor otherprop Objects monitor monitor tv or monitor 3211117 n03782190 monitor.n.04 objects 39
17
+ 7 cabinet cabinet 731 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
18
+ 9 desk desk 680 14 10 desk desk Table desk desk table 4379243 n03179701 desk.n.01 table 5
19
+ 8 shelf shelf 641 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
20
+ 10 office chair office chair 595 5 4 chair chair Chair chair chair chair 3001627 n04373704 swivel_chair.n.01 chair 3
21
+ 31 towel towel 570 27 7 towel towel Objects n04459362 towel.n.01 towel 20
22
+ 6 couch couch 502 6 9 sofa sofa Sofa sofa sofa sofa 4256520 n04256520 sofa.n.01 sofa 10
23
+ 14 sink sink 488 34 7 sink sink Objects sink n04223580 sink.n.01 sink 15
24
+ 48 backpack backpack 479 40 7 backpack otherprop Objects n02769748 backpack.n.01 objects 39
25
+ 28 lamp lamp 419 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
26
+ 11 bed bed 370 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
27
+ 18 bookshelf bookshelf 360 10 6 bookshelf bookshelf Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
28
+ 71 mirror mirror 349 19 7 mirror mirror Objects n03773035 mirror.n.01 mirror 21
29
+ 21 curtain curtain 347 16 13 curtain curtain Window curtain n03151077 curtain.n.01 curtain 12
30
+ 40 plant plant 331 40 7 plant otherprop Objects plant n00017222 plant.n.02 plant 14
31
+ 52 whiteboard whiteboard 327 30 7 whiteboard whiteboard Objects n03211616 display_panel.n.01 board_panel 35
32
+ 96 radiator radiator 322 39 6 radiator otherfurniture Furniture n04041069 radiator.n.02 misc 40
33
+ 22 book book 318 23 2 book books Books n02870526 book.n.11 objects 39
34
+ 29 kitchen cabinet kitchen cabinet 310 3 6 cabinet cabinet Furniture n02933112 cabinet.n.01 cabinet 7
35
+ 49 toilet paper toilet paper 291 40 7 toilet paper otherprop Objects n15075141 toilet_tissue.n.01 objects 39
36
+ 29 kitchen cabinets kitchen cabinet 289 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
37
+ 23 armchair armchair 281 5 4 chair chair Chair chair chair chair 3001627 n02738535 armchair.n.01 chair 3
38
+ 63 shoes shoe 272 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
39
+ 24 coffee table coffee table 258 7 10 coffee table table Table table table table 4379243 n03063968 coffee_table.n.01 table 5
40
+ 17 toilet toilet 256 33 7 toilet toilet Objects toilet toilet n04446276 toilet.n.01 toilet 18
41
+ 47 bag bag 252 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
42
+ 32 clothes clothes 248 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
43
+ 46 keyboard keyboard 246 40 7 keyboard otherprop Objects keyboard computer keyboard 3085013 n03085013 computer_keyboard.n.01 objects 39
44
+ 65 bottle bottle 226 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
45
+ 97 recycling bin recycling bin 225 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
46
+ 34 nightstand nightstand 224 32 6 night stand night stand Furniture night_stand night_stand n03015254 chest_of_drawers.n.01 chest_of_drawers 13
47
+ 38 stool stool 221 40 7 stool otherprop Objects stool n04326896 stool.n.01 stool 19
48
+ 33 tv tv 219 25 11 television television TV tv or monitor 3211117 n03211117 display.n.06 tv_monitor 22
49
+ 75 file cabinet file cabinet 217 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
50
+ 36 dresser dresser 213 17 6 dresser dresser Furniture dresser dresser n03015254 chest_of_drawers.n.01 chest_of_drawers 13
51
+ 64 computer tower computer tower 203 40 7 computer otherprop Objects n03082979 computer.n.01 objects 39
52
+ 32 clothing clothes 165 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
53
+ 101 telephone telephone 164 40 7 telephone otherprop Objects telephone 4401088 n04401088 telephone.n.01 objects 39
54
+ 130 cup cup 157 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
55
+ 27 refrigerator refrigerator 154 24 6 refridgerator refridgerator Furniture n04070727 refrigerator.n.01 appliances 37
56
+ 44 end table end table 147 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
57
+ 131 jacket jacket 146 40 7 jacket otherprop Objects n03589791 jacket.n.01 clothes 38
58
+ 55 shower curtain shower curtain 144 28 7 shower curtain shower curtain Objects curtain n04209239 shower_curtain.n.01 curtain 12
59
+ 42 bathtub bathtub 144 36 7 bathtub bathtub Objects bathtub bathtub tub 2808440 n02808440 bathtub.n.01 bathtub 25
60
+ 59 microwave microwave 141 40 7 microwave otherprop Objects microwave 3761084 n03761084 microwave.n.02 appliances 37
61
+ 159 kitchen counter kitchen counter 140 12 6 counter counter Furniture table table table 4379243 n03116530 counter.n.01 counter 26
62
+ 74 sofa chair sofa chair 129 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
63
+ 82 paper towel dispenser paper towel dispenser 129 40 7 paper towel dispenser otherprop Objects objects 39
64
+ 1164 bathroom vanity bathroom vanity 126 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 table 5
65
+ 93 suitcase suitcase 118 40 7 luggage otherprop Objects n02773838 bag.n.06 objects 39
66
+ 77 laptop laptop 111 40 7 laptop otherprop Objects laptop laptop 3642806 n03642806 laptop.n.01 objects 39
67
+ 67 ottoman ottoman 111 39 6 ottoman otherfurniture Furniture stool n03380724 footstool.n.01 stool 19
68
+ 128 shower walls shower wall 109 1 12 wall wall Wall n04546855 wall.n.01 wall 1
69
+ 50 printer printer 106 40 7 printer otherprop Objects printer 4004475 n04004475 printer.n.03 appliances 37
70
+ 35 counter counter 104 12 6 counter counter Furniture table table table 4379243 n03116530 counter.n.01 counter 26
71
+ 69 board board 100 38 7 board otherstructure Objects board_panel 35
72
+ 100 soap dispenser soap dispenser 99 40 7 otherprop Objects n04254120 soap_dispenser.n.01 objects 39
73
+ 62 stove stove 95 38 7 stove otherstructure Objects stove 4330267 n04330267 stove.n.02 appliances 37
74
+ 105 light light 93 38 7 light otherstructure Objects n03665366 light.n.02 lighting 28
75
+ 1165 closet wall closet wall 90 1 12 wall wall Wall n04546855 wall.n.01 wall 1
76
+ 165 mini fridge mini fridge 87 24 6 refridgerator refridgerator Furniture n03273913 electric_refrigerator.n.01 appliances 37
77
+ 7 cabinets cabinet 79 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
78
+ 5 doors door 76 8 12 door door Wall door n03221720 door.n.01 door 4
79
+ 76 fan fan 75 40 7 fan otherprop Objects n03320046 fan.n.01 misc 40
80
+ 230 tissue box tissue box 73 40 7 tissue box otherprop Objects n02883344 box.n.01 objects 39
81
+ 54 blanket blanket 72 40 7 blanket otherprop Objects n02849154 blanket.n.01 objects 39
82
+ 125 bathroom stall bathroom stall 71 38 7 otherstructure Objects n02873839 booth.n.02 misc 40
83
+ 72 copier copier 70 40 7 otherprop Objects n03257586 duplicator.n.01 appliances 37
84
+ 68 bench bench 66 39 6 bench otherfurniture Furniture bench bench 2828884 n02828884 bench.n.01 seating 34
85
+ 145 bar bar 66 38 7 bar otherstructure Objects n02788689 bar.n.03 misc 40
86
+ 157 soap dish soap dish 65 40 7 soap dish otherprop Objects n04254009 soap_dish.n.01 objects 39
87
+ 1166 laundry hamper laundry hamper 65 40 7 laundry basket otherprop Objects objects 39
88
+ 132 storage bin storage bin 63 40 7 storage bin otherprop Objects objects 39
89
+ 1167 bathroom stall door bathroom stall door 62 8 12 door door Wall door n03221720 door.n.01 door 4
90
+ 232 light switch light switch 61 38 7 light switch otherstructure Objects n04372370 switch.n.01 misc 40
91
+ 134 coffee maker coffee maker 61 40 7 otherprop Objects n03063338 coffee_maker.n.01 appliances 37
92
+ 51 tv stand tv stand 61 39 6 tv stand otherfurniture Furniture tv_stand n03290653 entertainment_center.n.01 furniture 36
93
+ 250 decoration decoration 60 40 7 otherprop Objects n03169390 decoration.n.01 misc 40
94
+ 1168 ceiling light ceiling light 59 38 7 light otherstructure Objects n03665366 light.n.02 lighting 28
95
+ 342 range hood range hood 59 38 7 range hood otherstructure Objects range_hood n04053677 range_hood.n.01 misc 40
96
+ 89 blackboard blackboard 58 38 7 blackboard otherstructure Objects n02846511 blackboard.n.01 board_panel 35
97
+ 103 clock clock 58 40 7 clock otherprop Objects clock 3046257 n03046257 clock.n.01 objects 39
98
+ 99 wardrobe closet wardrobe 54 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
99
+ 95 rail rail 53 38 7 railing otherstructure Objects n04047401 railing.n.01 railing 30
100
+ 154 bulletin board bulletin board 53 38 7 board otherstructure Objects n03211616 display_panel.n.01 board_panel 35
101
+ 140 mat mat 52 20 5 floor mat floor mat Floor n03727837 mat.n.01 floor 2
102
+ 1169 trash bin trash bin 52 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
103
+ 193 ledge ledge 51 38 7 otherstructure Objects n09337253 ledge.n.01 misc 40
104
+ 116 seat seat 49 39 6 furniture otherfurniture Furniture n04161981 seat.n.03 furniture 36
105
+ 202 mouse mouse 49 40 7 mouse otherprop Objects n03793489 mouse.n.04 objects 39
106
+ 73 basket basket 48 40 7 basket otherprop Objects basket 2801938 n02801938 basket.n.01 objects 39
107
+ 78 shower shower 48 38 7 otherstructure Objects n04208936 shower.n.01 shower 23
108
+ 1170 dumbbell dumbbell 48 40 7 otherprop Objects n03255030 dumbbell.n.01 objects 39
109
+ 79 paper paper 46 26 7 paper paper Objects n14974264 paper.n.01 objects 39
110
+ 80 person person 46 31 7 person person Objects person n05217688 person.n.02 misc 40
111
+ 141 windowsill windowsill 45 38 7 otherstructure Objects n04590263 windowsill.n.01 window 9
112
+ 57 closet closet 45 39 6 wardrobe otherfurniture Furniture wardrobe misc 40
113
+ 102 bucket bucket 45 40 7 bucket otherprop Objects n02909870 bucket.n.01 misc 40
114
+ 261 sign sign 44 40 7 sign otherprop Objects n04217882 signboard.n.01 objects 39
115
+ 118 speaker speaker 43 40 7 speaker otherprop Objects speaker 3691459 n03691459 loudspeaker.n.01 objects 39
116
+ 136 dishwasher dishwasher 43 38 7 dishwasher otherstructure Objects dishwasher 3207941 n03207941 dishwasher.n.01 appliances 37
117
+ 98 container container 43 40 7 container otherprop Objects n03094503 container.n.01 objects 39
118
+ 1171 stair rail stair rail 42 38 7 banister otherstructure Objects n02788148 bannister.n.02 railing 30
119
+ 170 shower curtain rod shower curtain rod 42 40 7 otherprop Objects curtain 12
120
+ 1172 tube tube 41 40 7 otherprop Objects misc 40
121
+ 1173 bathroom cabinet bathroom cabinet 39 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
122
+ 79 papers paper 39 26 7 paper paper Objects n14974264 paper.n.01 objects 39
123
+ 221 storage container storage container 39 40 7 container otherprop Objects objects 39
124
+ 570 paper bag paper bag 39 37 7 bag bag Objects n04122825 sack.n.01 objects 39
125
+ 138 paper towel roll paper towel roll 39 40 7 paper towel otherprop Objects n03887697 paper_towel.n.01 towel 20
126
+ 168 ball ball 39 40 7 ball otherprop Objects objects 39
127
+ 276 closet doors closet door 38 8 12 door door Wall door n03221720 door.n.01 door 4
128
+ 106 laundry basket laundry basket 37 40 7 laundry basket otherprop Objects basket 2801938 n03050864 clothes_hamper.n.01 objects 39
129
+ 214 cart cart 37 40 7 cart otherprop Objects n03484083 handcart.n.01 shelving 31
130
+ 276 closet door closet door 35 8 12 door door Wall door n03221720 door.n.01 door 4
131
+ 323 dish rack dish rack 35 40 7 dish rack otherprop Objects n03207630 dish_rack.n.01 objects 39
132
+ 58 stairs stairs 35 38 7 stairs otherstructure Objects n04298308 stairway.n.01 stairs 16
133
+ 86 blinds blinds 35 13 13 blinds blinds Window n02851099 blind.n.03 blinds 32
134
+ 2 stack of chairs chair 35 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
135
+ 399 purse purse 34 40 7 purse otherprop Objects n02774152 bag.n.04 objects 39
136
+ 121 bicycle bicycle 33 40 7 bicycle otherprop Objects bicycle 2834778 n02834778 bicycle.n.01 objects 39
137
+ 185 tray tray 32 40 7 tray otherprop Objects n04476259 tray.n.01 objects 39
138
+ 300 plunger plunger 30 40 7 otherprop Objects n03970156 plunger.n.03 objects 39
139
+ 180 paper cutter paper cutter 30 40 7 paper cutter otherprop Objects n03886940 paper_cutter.n.01 objects 39
140
+ 163 toilet paper dispenser toilet paper dispenser 29 40 7 otherprop Objects objects 39
141
+ 26 boxes box 29 29 7 box box Objects n02883344 box.n.01 objects 39
142
+ 66 bin bin 28 40 7 bin otherprop Objects n02839910 bin.n.01 objects 39
143
+ 208 toilet seat cover dispenser toilet seat cover dispenser 28 40 7 otherprop Objects objects 39
144
+ 112 guitar guitar 28 40 7 guitar otherprop Objects guitar guitar 3467517 n03467517 guitar.n.01 objects 39
145
+ 540 mailboxes mailbox 28 29 7 box box Objects mailbox 3710193 n03710193 mailbox.n.01 misc 40
146
+ 395 handicap bar handicap bar 27 38 7 bar otherstructure Objects misc 40
147
+ 166 fire extinguisher fire extinguisher 27 40 7 fire extinguisher otherprop Objects n03345837 fire_extinguisher.n.01 misc 40
148
+ 122 ladder ladder 27 39 6 ladder otherfurniture Furniture stairs n03632277 ladder.n.01 stairs 16
149
+ 120 column column 26 38 7 column otherstructure Objects n03074380 column.n.06 column 24
150
+ 107 pipe pipe 25 40 7 pipe otherprop Objects n03944672 pipe.n.02 misc 40
151
+ 283 vacuum cleaner vacuum cleaner 25 40 7 otherprop Objects n04517823 vacuum.n.04 objects 39
152
+ 88 plate plate 24 40 7 plate otherprop Objects n03959485 plate.n.04 objects 39
153
+ 90 piano piano 24 39 6 piano otherfurniture Furniture piano piano 3928116 n03928116 piano.n.01 furniture 36
154
+ 177 water cooler water cooler 24 39 6 water cooler otherfurniture Furniture n04559166 water_cooler.n.01 misc 40
155
+ 1174 cd case cd case 24 40 7 otherprop Objects objects 39
156
+ 562 bowl bowl 24 40 7 bowl otherprop Objects bowl bowl 2880940 n02880940 bowl.n.03 objects 39
157
+ 1175 closet rod closet rod 24 40 7 otherprop Objects n04100174 rod.n.01 misc 40
158
+ 1156 bathroom counter bathroom counter 24 12 6 counter counter Furniture table table table 4379243 n03116530 counter.n.01 counter 26
159
+ 84 oven oven 23 38 7 oven otherstructure Objects n03862676 oven.n.01 appliances 37
160
+ 104 stand stand 23 39 6 stand otherfurniture Furniture table table table 4379243 n04301000 stand.n.04 table 5
161
+ 229 scale scale 23 40 7 scale otherprop Objects n04141975 scale.n.07 objects 39
162
+ 70 washing machine washing machine 23 39 6 washing machine otherfurniture Furniture washing_machine 4554684 n04554684 washer.n.03 appliances 37
163
+ 325 broom broom 22 40 7 broom otherprop Objects n02906734 broom.n.01 objects 39
164
+ 169 hat hat 22 40 7 hat otherprop Objects n03497657 hat.n.01 clothes 38
165
+ 128 shower wall shower wall 22 1 12 wall wall Wall n04208936 shower.n.01 wall 1
166
+ 331 guitar case guitar case 21 40 7 guitar case otherprop Objects objects 39
167
+ 87 rack rack 21 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
168
+ 488 water pitcher water pitcher 21 40 7 pitcher otherprop Objects n03950228 pitcher.n.02 objects 39
169
+ 776 laundry detergent laundry detergent 21 40 7 otherprop Objects objects 39
170
+ 370 hair dryer hair dryer 21 40 7 hair dryer otherprop Objects n03483316 hand_blower.n.01 objects 39
171
+ 191 pillar pillar 21 38 7 column otherstructure Objects n03073977 column.n.07 column 24
172
+ 748 divider divider 20 40 7 otherprop Objects wall 1
173
+ 242 power outlet power outlet 19 40 7 otherprop Objects misc 40
174
+ 45 dining table dining table 19 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
175
+ 417 shower floor shower floor 19 2 5 floor floor Floor n04208936 shower.n.01 floor 2
176
+ 70 washing machines washing machine 19 39 6 washing machine otherfurniture Furniture washing_machine 4554684 n04554684 washer.n.03 appliances 37
177
+ 188 shower door shower door 19 8 12 door door Wall door n04208936 shower.n.01 door 4
178
+ 1176 coffee kettle coffee kettle 18 40 7 pot otherprop Objects n03612814 kettle.n.01 objects 39
179
+ 1177 wardrobe cabinet wardrobe 18 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
180
+ 1178 structure structure 18 38 7 otherstructure Objects misc 40
181
+ 18 bookshelves bookshelf 17 10 6 bookshelf bookshelf Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
182
+ 110 clothes dryer clothes dryer 17 39 6 otherfurniture Furniture n03251766 dryer.n.01 appliances 37
183
+ 148 toaster toaster 17 40 7 toaster otherprop Objects n04442312 toaster.n.02 appliances 37
184
+ 63 shoe shoe 17 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
185
+ 155 ironing board ironing board 16 39 6 ironing board otherfurniture Furniture n03586090 ironing_board.n.01 objects 39
186
+ 572 alarm clock alarm clock 16 40 7 alarm clock otherprop Objects clock 3046257 n02694662 alarm_clock.n.01 objects 39
187
+ 1179 shower head shower head 15 38 7 otherstructure Objects shower 23
188
+ 28 lamp base lamp 15 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
189
+ 392 water bottle water bottle 15 40 7 bottle otherprop Objects bottle bottle 2876657 n04557648 water_bottle.n.01 objects 39
190
+ 1180 keyboard piano keyboard piano 15 39 6 piano otherfurniture Furniture piano piano 3928116 n03928116 piano.n.01 furniture 36
191
+ 609 projector screen projector screen 15 38 7 projector screen otherstructure Objects misc 40
192
+ 1181 case of water bottles case of water bottles 15 40 7 otherprop Objects objects 39
193
+ 195 toaster oven toaster oven 14 40 7 toaster oven otherprop Objects n04442441 toaster_oven.n.01 appliances 37
194
+ 581 music stand music stand 14 39 6 music stand otherfurniture Furniture n03801760 music_stand.n.01 furniture 36
195
+ 58 staircase stairs 14 38 7 stairs otherstructure Objects n04298308 stairway.n.01 stairs 16
196
+ 1182 coat rack coat rack 14 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 3
197
+ 1183 storage organizer storage organizer 14 40 7 otherprop Objects shelving 3
198
+ 139 machine machine 14 40 7 machine otherprop Objects n03699975 machine.n.01 appliances 37
199
+ 1184 folded chair folded chair 14 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
200
+ 1185 fire alarm fire alarm 14 40 7 otherprop Objects n03343737 fire_alarm.n.02 misc 40
201
+ 156 fireplace fireplace 13 38 7 fireplace otherstructure Objects n03346455 fireplace.n.01 fireplace 27
202
+ 408 vent vent 13 40 7 otherprop Objects n04526241 vent.n.01 misc 40
203
+ 213 furniture furniture 13 39 6 furniture otherfurniture Furniture n03405725 furniture.n.01 furniture 36
204
+ 1186 power strip power strip 13 40 7 otherprop Objects objects 39
205
+ 1187 calendar calendar 13 40 7 otherprop Objects objects 39
206
+ 1188 poster poster 13 11 8 picture picture Picture n03931044 picture.n.01 picture 6
207
+ 115 toilet paper holder toilet paper holder 13 40 7 toilet paper holder otherprop Objects objects 39
208
+ 1189 potted plant potted plant 12 40 7 plant otherprop Objects plant n00017222 plant.n.02 plant 14
209
+ 304 stuffed animal stuffed animal 12 40 7 stuffed animal otherprop Objects n04399382 teddy.n.01 objects 39
210
+ 1190 luggage luggage 12 40 7 luggage otherprop Objects n02774630 baggage.n.01 objects 39
211
+ 21 curtains curtain 12 16 13 curtain curtain Window curtain n03151077 curtain.n.01 curtain 12
212
+ 312 headphones headphones 12 40 7 otherprop Objects n03261776 earphone.n.01 objects 39
213
+ 233 crate crate 12 39 6 crate otherfurniture Furniture n03127925 crate.n.01 objects 39
214
+ 286 candle candle 12 40 7 candle otherprop Objects lamp n02948072 candle.n.01 objects 39
215
+ 264 projector projector 12 40 7 projector otherprop Objects n04009552 projector.n.02 objects 39
216
+ 110 clothes dryers clothes dryer 12 39 6 otherfurniture Furniture n03251766 dryer.n.01 appliances 37
217
+ 1191 mattress mattress 12 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
218
+ 356 dustpan dustpan 12 40 7 otherprop Objects n03259009 dustpan.n.02 objects 39
219
+ 25 drawer drawer 11 39 6 drawer otherfurniture Furniture n03233905 drawer.n.01 furniture 36
220
+ 750 rod rod 11 40 7 otherprop Objects pistol 3948459 n03427202 gat.n.01 misc 40
221
+ 269 globe globe 11 40 7 globe otherprop Objects objects 39
222
+ 307 footrest footrest 11 39 6 foot rest otherfurniture Furniture stool n03380724 footstool.n.01 stool 19
223
+ 410 piano bench piano bench 11 39 6 piano bench otherfurniture Furniture bench bench 2828884 n02828884 bench.n.01 seating 34
224
+ 730 breakfast bar breakfast bar 11 38 7 bar otherstructure Objects counter 26
225
+ 216 step stool step stool 11 40 7 step stool otherprop Objects stool n04315713 step_stool.n.01 stool 19
226
+ 1192 hand rail hand rail 11 38 7 railing otherstructure Objects railing 30
227
+ 119 vending machine vending machine 11 40 7 machine otherprop Objects n04525305 vending_machine.n.01 appliances 37
228
+ 682 ceiling fan ceiling fan 11 40 7 fan otherprop Objects n03320046 fan.n.01 misc 40
229
+ 434 swiffer swiffer 11 40 7 otherprop Objects objects 39
230
+ 126 foosball table foosball table 11 39 6 foosball table otherfurniture Furniture table table table 4379243 n04379243 table.n.02 table 5
231
+ 919 jar jar 11 40 7 jar otherprop Objects jar 3593526 n03593526 jar.n.01 objects 39
232
+ 85 footstool footstool 11 39 6 ottoman otherfurniture Furniture stool n03380724 footstool.n.01 stool 19
233
+ 1193 folded table folded table 10 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
234
+ 108 round table round table 10 7 10 table table Table table table table 4379243 n04114554 round_table.n.02 table 5
235
+ 135 hamper hamper 10 40 7 basket otherprop Objects basket 2801938 n03482405 hamper.n.02 objects 39
236
+ 1194 poster tube poster tube 10 40 7 otherprop Objects objects 39
237
+ 432 case case 10 40 7 case otherprop Objects objects 39
238
+ 53 carpet carpet 10 40 7 rug otherprop Objects n04118021 rug.n.01 floor 2
239
+ 1195 thermostat thermostat 10 40 7 otherprop Objects n04422875 thermostat.n.01 misc 40
240
+ 111 coat coat 10 40 7 jacket otherprop Objects n03057021 coat.n.01 clothes 38
241
+ 305 water fountain water fountain 10 38 7 water fountain otherstructure Objects n03241335 drinking_fountain.n.01 misc 40
242
+ 1125 smoke detector smoke detector 10 40 7 otherprop Objects misc 40
243
+ 13 pillows pillow 9 18 7 pillow pillow Objects pillow 3938244 n03938244 pillow.n.01 cushion 8
244
+ 1196 flip flops flip flops 9 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
245
+ 1197 cloth cloth 9 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
246
+ 1198 banner banner 9 40 7 otherprop Objects n02788021 banner.n.01 misc 40
247
+ 1199 clothes hanger clothes hanger 9 40 7 otherprop Objects n03057920 coat_hanger.n.01 objects 39
248
+ 1200 whiteboard eraser whiteboard eraser 9 40 7 otherprop Objects objects 39
249
+ 378 iron iron 9 40 7 otherprop Objects n03584829 iron.n.04 objects 39
250
+ 591 instrument case instrument case 9 40 7 case otherprop Objects objects 39
251
+ 49 toilet paper rolls toilet paper 9 40 7 toilet paper otherprop Objects n15075141 toilet_tissue.n.01 objects 39
252
+ 92 soap soap 9 40 7 soap otherprop Objects n04253437 soap.n.01 objects 39
253
+ 1098 block block 9 40 7 otherprop Objects misc 40
254
+ 291 wall hanging wall hanging 8 40 7 otherprop Objects n03491178 hanging.n.01 picture 6
255
+ 1063 kitchen island kitchen island 8 38 7 kitchen island otherstructure Objects n03620600 kitchen_island.n.01 counter 26
256
+ 107 pipes pipe 8 38 7 otherstructure Objects misc 40
257
+ 1135 toothbrush toothbrush 8 40 7 toothbrush otherprop Objects n04453156 toothbrush.n.01 objects 39
258
+ 189 shirt shirt 8 40 7 otherprop Objects n04197391 shirt.n.01 clothes 38
259
+ 245 cutting board cutting board 8 40 7 cutting board otherprop Objects n03025513 chopping_board.n.01 objects 39
260
+ 194 vase vase 8 40 7 vase otherprop Objects vase jar 3593526 n04522168 vase.n.01 objects 39
261
+ 1201 shower control valve shower control valve 8 38 7 otherstructure Objects n04208936 shower.n.01 shower 23
262
+ 386 exercise machine exercise machine 8 40 7 machine otherprop Objects gym_equipment 33
263
+ 1202 compost bin compost bin 8 39 6 garbage bin otherfurniture Furniture trash_bin 2747177 n02747177 ashcan.n.01 objects 39
264
+ 857 shorts shorts 8 40 7 shorts otherprop Objects clothes 38
265
+ 452 tire tire 8 40 7 otherprop Objects n04440749 tire.n.01 objects 39
266
+ 1203 teddy bear teddy bear 7 40 7 stuffed animal otherprop Objects n04399382 teddy.n.01 objects 39
267
+ 346 bathrobe bathrobe 7 40 7 otherprop Objects n02807616 bathrobe.n.01 clothes 38
268
+ 152 handrail handrail 7 38 7 railing otherstructure Objects n02788148 bannister.n.02 railing 30
269
+ 83 faucet faucet 7 40 7 faucet otherprop Objects faucet 3325088 n03325088 faucet.n.01 misc 40
270
+ 1204 pantry wall pantry wall 7 1 12 wall wall Wall n04546855 wall.n.01 wall 1
271
+ 726 thermos thermos 7 40 7 flask otherprop Objects bottle bottle 2876657 n04422727 thermos.n.01 objects 39
272
+ 61 rug rug 7 40 7 rug otherprop Objects n04118021 rug.n.01 floor 2
273
+ 39 couch cushions cushion 7 18 7 pillow pillow Objects n03151500 cushion.n.03 cushion 8
274
+ 1117 tripod tripod 7 39 6 stand otherfurniture Furniture n04485082 tripod.n.01 objects 39
275
+ 540 mailbox mailbox 7 29 7 box box Objects mailbox 3710193 n03710193 mailbox.n.01 misc 40
276
+ 1205 tupperware tupperware 7 40 7 otherprop Objects objects 39
277
+ 415 shoe rack shoe rack 7 40 7 shoe rack otherprop Objects shelving 31
278
+ 31 towels towel 6 27 7 towel towel Objects n04459362 towel.n.01 towel 20
279
+ 1206 beer bottles beer bottle 6 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
280
+ 153 treadmill treadmill 6 39 6 treadmill otherfurniture Furniture n04477387 treadmill.n.01 gym_equipment 33
281
+ 1207 salt salt 6 40 7 otherprop Objects objects 39
282
+ 129 chest chest 6 39 6 chest otherfurniture Furniture dresser dresser chest_of_drawers 13
283
+ 220 dispenser dispenser 6 40 7 otherprop Objects n03210683 dispenser.n.01 objects 39
284
+ 1208 mirror doors mirror door 6 8 12 door door Wall door n03221720 door.n.01 door 4
285
+ 231 remote remote 6 40 7 otherprop Objects remote_control 4074963 n04074963 remote_control.n.01 objects 39
286
+ 1209 folded ladder folded ladder 6 39 6 ladder otherfurniture Furniture stairs n03632277 ladder.n.01 misc 40
287
+ 39 cushion cushion 6 18 7 pillow pillow Objects n03151500 cushion.n.03 cushion 8
288
+ 1210 carton carton 6 40 7 otherprop Objects objects 39
289
+ 117 step step 6 38 7 otherstructure Objects n04314914 step.n.04 misc 40
290
+ 822 drying rack drying rack 6 39 6 drying rack otherfurniture Furniture shelving 31
291
+ 238 slippers slipper 6 40 7 shoe otherprop Objects n04241394 slipper.n.01 clothes 38
292
+ 143 pool table pool table 6 39 6 pool table otherfurniture Furniture table table table 4379243 n03982430 pool_table.n.01 table 5
293
+ 1211 soda stream soda stream 6 40 7 otherprop Objects objects 39
294
+ 228 toilet brush toilet brush 6 40 7 toilet brush otherprop Objects objects 39
295
+ 494 loft bed loft bed 6 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
296
+ 226 cooking pot cooking pot 6 40 7 pot otherprop Objects objects 39
297
+ 91 heater heater 6 39 6 heater otherfurniture Furniture n03508101 heater.n.01 misc 40
298
+ 1072 messenger bag messenger bag 6 37 7 bag bag Objects objects 39
299
+ 435 stapler stapler 6 40 7 stapler otherprop Objects n04303497 stapler.n.01 objects 39
300
+ 1165 closet walls closet wall 5 1 12 wall wall Wall n04546855 wall.n.01 wall 1
301
+ 345 scanner scanner 5 40 7 otherprop Objects appliances 37
302
+ 893 elliptical machine elliptical machine 5 40 7 machine otherprop Objects gym_equipment 33
303
+ 621 kettle kettle 5 40 7 pot otherprop Objects n03612814 kettle.n.01 objects 39
304
+ 1212 metronome metronome 5 40 7 otherprop Objects n03757604 metronome.n.01 objects 39
305
+ 297 dumbell dumbell 5 40 7 otherprop Objects objects 39
306
+ 1213 music book music book 5 23 2 book books Books n02870526 book.n.11 objects 39
307
+ 1214 rice cooker rice cooker 5 40 7 otherprop Objects objects 39
308
+ 1215 dart board dart board 5 38 7 board otherstructure Objects n03162940 dartboard.n.01 objects 39
309
+ 529 sewing machine sewing machine 5 40 7 sewing machine otherprop Objects n04179913 sewing_machine.n.01 objects 39
310
+ 1216 grab bar grab bar 5 38 7 railing otherstructure Objects railing 30
311
+ 1217 flowerpot flowerpot 5 40 7 vase otherprop Objects vase jar 3593526 n04522168 vase.n.01 objects 39
312
+ 1218 painting painting 5 11 8 picture picture Picture n03931044 picture.n.01 picture 6
313
+ 1219 railing railing 5 38 7 railing otherstructure Objects n04047401 railing.n.01 railing 30
314
+ 1220 stair stair 5 38 7 stairs otherstructure Objects stairs n04314914 step.n.04 stairs 16
315
+ 525 toolbox toolbox 5 39 6 chest otherfurniture Furniture n04452615 toolbox.n.01 objects 39
316
+ 204 nerf gun nerf gun 5 40 7 otherprop Objects objects 39
317
+ 693 binders binder 5 40 7 binder otherprop Objects objects 39
318
+ 179 desk lamp desk lamp 5 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
319
+ 1221 quadcopter quadcopter 5 40 7 otherprop Objects objects 39
320
+ 1222 pitcher pitcher 5 40 7 pitcher otherprop Objects n03950228 pitcher.n.02 objects 39
321
+ 1223 hanging hanging 5 40 7 otherprop Objects misc 40
322
+ 1224 mail mail 5 40 7 otherprop Objects misc 40
323
+ 1225 closet ceiling closet ceiling 5 22 3 ceiling ceiling Ceiling n02990373 ceiling.n.01 ceiling 17
324
+ 1226 hoverboard hoverboard 5 40 7 otherprop Objects objects 39
325
+ 1227 beanbag chair beanbag chair 5 39 6 bean bag otherfurniture Furniture n02816656 beanbag.n.01 chair 3
326
+ 571 water heater water heater 5 40 7 water heater otherprop Objects n04560113 water_heater.n.01 misc 40
327
+ 1228 spray bottle spray bottle 5 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
328
+ 556 rope rope 5 40 7 rope otherprop Objects n04108268 rope.n.01 objects 39
329
+ 280 plastic container plastic container 5 40 7 container otherprop Objects objects 39
330
+ 1229 soap bottle soap bottle 5 40 7 soap otherprop Objects objects 39
331
+ 1230 ikea bag ikea bag 4 37 7 bag bag Objects 2773838 n02773838 bag.n.06 objects 39
332
+ 1231 sleeping bag sleeping bag 4 40 7 otherprop Objects n04235860 sleeping_bag.n.01 objects 39
333
+ 1232 duffel bag duffel bag 4 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
334
+ 746 frying pan frying pan 4 40 7 frying pan otherprop Objects n03400231 frying_pan.n.01 objects 39
335
+ 1233 oven mitt oven mitt 4 40 7 otherprop Objects objects 39
336
+ 1234 pot pot 4 40 7 pot otherprop Objects n04235860 sleeping_bag.n.01 objects 39
337
+ 144 hand dryer hand dryer 4 40 7 otherprop Objects objects 39
338
+ 282 dollhouse dollhouse 4 39 6 doll house otherfurniture Furniture n03219483 dollhouse.n.01 objects 39
339
+ 167 shampoo bottle shampoo bottle 4 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
340
+ 1235 hair brush hair brush 4 40 7 otherprop Objects n02908217 brush.n.02 objects 39
341
+ 1236 tennis racket tennis racket 4 40 7 otherprop Objects n04409806 tennis_racket.n.01 objects 39
342
+ 1237 display case display case 4 40 7 case otherprop Objects objects 39
343
+ 234 ping pong table ping pong table 4 39 6 ping pong table otherfurniture Furniture table table table 4379243 n04379243 table.n.02 table 5
344
+ 563 boiler boiler 4 40 7 otherprop Objects misc 40
345
+ 1238 bag of coffee beans bag of coffee beans 4 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
346
+ 1239 bananas banana 4 40 7 otherprop Objects n00021265 food.n.01 objects 39
347
+ 1240 carseat carseat 4 40 7 otherprop Objects misc 40
348
+ 366 helmet helmet 4 40 7 otherprop Objects helmet 3513137 n03513137 helmet.n.02 clothes 38
349
+ 816 umbrella umbrella 4 40 7 umbrella otherprop Objects n04507155 umbrella.n.01 objects 39
350
+ 1241 coffee box coffee box 4 40 7 otherprop Objects objects 39
351
+ 719 envelope envelope 4 40 7 envelope otherprop Objects n03291819 envelope.n.01 objects 39
352
+ 284 wet floor sign wet floor sign 4 40 7 sign otherprop Objects misc 40
353
+ 1242 clothing rack clothing rack 4 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
354
+ 247 controller controller 4 40 7 otherprop Objects n03096960 control.n.09 objects 39
355
+ 1243 bath walls bathroom wall 4 1 12 wall wall Wall n04546855 wall.n.01 wall 1
356
+ 1244 podium podium 4 39 6 otherfurniture Furniture n03159640 dais.n.01 furniture 36
357
+ 1245 storage box storage box 4 29 7 box box Objects n02883344 box.n.01 objects 39
358
+ 1246 dolly dolly 4 40 7 otherprop Objects misc 40
359
+ 1247 shampoo shampoo 3 40 7 otherprop Objects n04183516 shampoo.n.01 objects 39
360
+ 592 paper tray paper tray 3 40 7 paper tray otherprop Objects objects 39
361
+ 385 cabinet door cabinet door 3 8 12 door door Wall door door 4
362
+ 1248 changing station changing station 3 40 7 otherprop Objects misc 40
363
+ 1249 poster printer poster printer 3 40 7 printer otherprop Objects printer 4004475 n04004475 printer.n.03 appliances 37
364
+ 133 screen screen 3 40 7 otherprop Objects n03151077 curtain.n.01 curtain 12
365
+ 301 soap bar soap bar 3 38 7 bar otherstructure Objects objects 39
366
+ 1250 crutches crutches 3 40 7 otherprop Objects n03141823 crutch.n.01 objects 39
367
+ 379 studio light studio light 3 38 7 light otherstructure Objects lighting 28
368
+ 130 stack of cups cup 3 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
369
+ 1251 toilet flush button toilet flush button 3 40 7 otherprop Objects objects 39
370
+ 450 trunk trunk 3 40 7 otherprop Objects misc 40
371
+ 1252 grocery bag grocery bag 3 37 7 bag bag Objects suitcase 2773838 n03461288 grocery_bag.n.01 objects 39
372
+ 316 plastic bin plastic bin 3 40 7 bin otherprop Objects objects 39
373
+ 1253 pizza box pizza box 3 29 7 box box Objects objects 39
374
+ 385 cabinet doors cabinet door 3 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 door 4
375
+ 1254 legs legs 3 31 7 person person Objects person n05217688 person.n.02 misc 40
376
+ 461 car car 3 40 7 car otherprop Objects car car 2958343 n02958343 car.n.01 misc 40
377
+ 1255 shaving cream shaving cream 3 40 7 otherprop Objects n04186051 shaving_cream.n.01 objects 39
378
+ 1256 luggage stand luggage stand 3 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
379
+ 599 shredder shredder 3 40 7 otherprop Objects n04210120 shredder.n.01 objects 39
380
+ 281 statue statue 3 40 7 sculpture otherprop Objects n04306847 statue.n.01 misc 40
381
+ 1257 urinal urinal 3 33 7 toilet toilet Objects toilet toilet n04515991 urinal.n.01 toilet 18
382
+ 1258 hose hose 3 40 7 otherprop Objects n03539875 hose.n.03 misc 40
383
+ 1259 bike pump bike pump 3 40 7 otherprop Objects objects 39
384
+ 319 coatrack coatrack 3 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 31
385
+ 1260 bear bear 3 40 7 otherprop Objects objects 39
386
+ 28 wall lamp lamp 3 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
387
+ 1261 humidifier humidifier 3 40 7 otherprop Objects objects 39
388
+ 546 toothpaste toothpaste 3 40 7 toothpaste otherprop Objects objects 39
389
+ 1262 mouthwash bottle mouthwash bottle 3 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
390
+ 1263 poster cutter poster cutter 3 40 7 otherprop Objects objects 39
391
+ 1264 golf bag golf bag 3 37 7 bag bag Objects suitcase 2773838 n03445617 golf_bag.n.01 objects 39
392
+ 1265 food container food container 3 40 7 container otherprop Objects n03094503 container.n.01 objects 39
393
+ 1266 camera camera 3 40 7 otherprop Objects objects 39
394
+ 28 table lamp lamp 3 35 7 lamp lamp Objects lamp lamp 3636649 n04380533 table_lamp.n.01 lighting 28
395
+ 1267 yoga mat yoga mat 3 20 5 floor mat floor mat Floor n03727837 mat.n.01 floor 2
396
+ 1268 card card 3 40 7 otherprop Objects objects 39
397
+ 1269 mug mug 3 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
398
+ 188 shower doors shower door 3 38 7 otherstructure Objects n04208936 shower.n.01 door 4
399
+ 689 cardboard cardboard 3 40 7 otherprop Objects objects 39
400
+ 1270 rack stand rack stand 3 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
401
+ 1271 boxes of paper boxes of paper 3 29 7 box box Objects n02883344 box.n.01 objects 39
402
+ 1272 flag flag 3 40 7 otherprop Objects misc 40
403
+ 354 futon futon 3 39 6 mattress otherfurniture Furniture n03408444 futon.n.01 sofa 10
404
+ 339 magazine magazine 3 40 7 magazine otherprop Objects n06595351 magazine.n.01 objects 39
405
+ 1009 exit sign exit sign 3 40 7 exit sign otherprop Objects misc 40
406
+ 1273 rolled poster rolled poster 3 40 7 otherprop Objects objects 39
407
+ 1274 wheel wheel 3 40 7 otherprop Objects objects 39
408
+ 15 pictures picture 3 11 8 picture picture Picture n03931044 picture.n.01 picture 6
409
+ 1275 blackboard eraser blackboard eraser 3 40 7 eraser otherprop Objects n03294833 eraser.n.01 objects 39
410
+ 361 organizer organizer 3 40 7 otherprop Objects n03918737 personal_digital_assistant.n.01 objects 39
411
+ 1276 doll doll 3 40 7 toy otherprop Objects n03219135 doll.n.01 objects 39
412
+ 326 book rack book rack 3 39 6 bookrack otherfurniture Furniture objects 39
413
+ 1277 laundry bag laundry bag 3 40 7 laundry basket otherprop Objects basket 2801938 n03050864 clothes_hamper.n.01 objects 39
414
+ 1278 sponge sponge 3 40 7 otherprop Objects n01906749 sponge.n.04 objects 39
415
+ 116 seating seat 3 39 6 furniture otherfurniture Furniture n04161981 seat.n.03 furniture 36
416
+ 1184 folded chairs folded chair 2 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
417
+ 1279 lotion bottle lotion bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
418
+ 212 can can 2 40 7 can otherprop Objects can 2946921 n02946921 can.n.01 objects 39
419
+ 1280 lunch box lunch box 2 40 7 otherprop Objects objects 39
420
+ 1281 food display food display 2 40 7 otherprop Objects misc 40
421
+ 794 storage shelf storage shelf 2 40 7 otherprop Objects shelving 31
422
+ 1282 sliding wood door sliding wood door 2 40 7 otherprop Objects door 4
423
+ 955 pants pants 2 40 7 otherprop Objects n04489008 trouser.n.01 clothes 38
424
+ 387 wood wood 2 40 7 otherprop Objects misc 40
425
+ 69 boards board 2 38 7 board otherstructure Objects board_panel 35
426
+ 65 bottles bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
427
+ 523 washcloth washcloth 2 40 7 otherprop Objects n04554523 washcloth.n.01 towel 20
428
+ 389 workbench workbench 2 39 6 bench otherfurniture Furniture bench table 4379243 n04600486 workbench.n.01 table 5
429
+ 29 open kitchen cabinet kitchen cabinet 2 3 6 cabinet cabinet Furniture n02933112 cabinet.n.01 cabinet 7
430
+ 1283 organizer shelf organizer shelf 2 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
431
+ 146 frame frame 2 38 7 otherstructure Objects misc 40
432
+ 130 cups cup 2 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
433
+ 372 exercise ball exercise ball 2 40 7 ball otherprop Objects n04285146 sports_equipment.n.01 gym_equipment 33
434
+ 289 easel easel 2 39 6 stand otherfurniture Furniture n03262809 easel.n.01 furniture 36
435
+ 440 garbage bag garbage bag 2 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
436
+ 321 roomba roomba 2 40 7 otherprop Objects objects 39
437
+ 976 garage door garage door 2 38 7 garage door otherstructure Objects door door 4
438
+ 1256 luggage rack luggage stand 2 39 6 stand otherfurniture Furniture n04038440 shelving 31
439
+ 1284 bike lock bike lock 2 40 7 otherprop Objects objects 39
440
+ 1285 briefcase briefcase 2 40 7 otherprop Objects n02900705 briefcase.n.01 objects 39
441
+ 357 hand towel hand towel 2 27 7 towel towel Objects n03490006 hand_towel.n.01 towel 20
442
+ 1286 bath products bath product 2 40 7 otherprop Objects objects 39
443
+ 1287 star star 2 40 7 otherprop Objects n09444783 star.n.03 misc 40
444
+ 365 map map 2 40 7 map otherprop Objects n03720163 map.n.01 misc 40
445
+ 1288 coffee bean bag coffee bean bag 2 37 7 bag bag Objects suitcase 2773838 n02773838 bag.n.06 objects 39
446
+ 81 headboard headboard 2 39 6 headboard otherfurniture Furniture n03502200 headboard.n.01 bed 11
447
+ 1289 ipad ipad 2 40 7 otherprop Objects objects 39
448
+ 1290 display rack display rack 2 39 6 stand otherfurniture Furniture n04038440 rack.n.05 shelving 31
449
+ 948 traffic cone traffic cone 2 40 7 cone otherprop Objects cone objects 39
450
+ 174 toiletry toiletry 2 40 7 otherprop Objects n04447443 toiletry.n.01 objects 39
451
+ 1028 canopy canopy 2 40 7 otherprop Objects misc 40
452
+ 1291 massage chair massage chair 2 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
453
+ 1292 paper organizer paper organizer 2 40 7 otherprop Objects objects 39
454
+ 1005 barricade barricade 2 40 7 otherprop Objects misc 40
455
+ 235 platform platform 2 38 7 otherstructure Objects misc 40
456
+ 1293 cap cap 2 40 7 hat otherprop Objects n03497657 hat.n.01 clothes 38
457
+ 1294 dumbbell plates dumbbell plates 2 40 7 otherprop Objects objects 39
458
+ 1295 elevator elevator 2 38 7 otherstructure Objects misc 40
459
+ 1296 cooking pan cooking pan 2 40 7 pan otherprop Objects n03880531 pan.n.01 objects 39
460
+ 1297 trash bag trash bag 2 37 7 bag bag Objects objects 39
461
+ 1298 santa santa 2 40 7 otherprop Objects misc 40
462
+ 1299 jewelry box jewelry box 2 29 7 box box Objects n02883344 box.n.01 objects 39
463
+ 1300 boat boat 2 40 7 otherprop Objects misc 40
464
+ 1301 sock sock 2 21 7 clothes clothes Objects n04254777 sock.n.01 clothes 38
465
+ 1051 kinect kinect 2 40 7 kinect otherprop Objects objects 39
466
+ 566 crib crib 2 39 6 crib otherfurniture Furniture furniture 36
467
+ 1302 plastic storage bin plastic storage bin 2 40 7 container otherprop Objects n03094503 container.n.01 objects 39
468
+ 1062 cooler cooler 2 24 6 refridgerator refridgerator Furniture n03102654 cooler.n.01 appliances 37
469
+ 1303 kitchen apron kitchen apron 2 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
470
+ 1304 dishwashing soap bottle dishwashing soap bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
471
+ 1305 xbox controller xbox controller 2 40 7 otherprop Objects objects 39
472
+ 1306 banana holder banana holder 2 40 7 otherprop Objects objects 39
473
+ 298 ping pong paddle ping pong paddle 2 40 7 otherprop Objects table 5
474
+ 1307 airplane airplane 2 40 7 otherprop Objects misc 40
475
+ 1308 conditioner bottle conditioner bottle 2 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
476
+ 1309 tea kettle tea kettle 2 40 7 tea kettle otherprop Objects n04397768 teakettle.n.01 objects 39
477
+ 43 bedframe bedframe 2 39 6 otherfurniture Furniture n02822579 bedstead.n.01 bed 11
478
+ 1310 wood beam wood beam 2 38 7 otherstructure Objects beam 29
479
+ 593 toilet paper package toilet paper package 2 40 7 otherprop Objects objects 39
480
+ 1311 wall mounted coat rack wall mounted coat rack 2 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 31
481
+ 1312 film light film light 2 40 7 otherprop Objects lighting 28
482
+ 749 ceiling lamp ceiling lamp 1 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
483
+ 623 chain chain 1 40 7 otherprop Objects chair 3
484
+ 1313 sofa sofa 1 6 9 sofa sofa Sofa sofa sofa sofa 4256520 n04256520 sofa.n.01 sofa 10
485
+ 99 closet wardrobe wardrobe 1 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
486
+ 265 sweater sweater 1 40 7 otherprop Objects n04370048 sweater.n.01 clothes 38
487
+ 1314 kitchen mixer kitchen mixer 1 40 7 otherprop Objects appliances 37
488
+ 99 wardrobe wardrobe 1 39 6 wardrobe otherfurniture Furniture wardrobe n04550184 wardrobe.n.01 furniture 36
489
+ 1315 water softener water softener 1 40 7 otherprop Objects misc 40
490
+ 448 banister banister 1 38 7 banister otherstructure Objects n02788148 bannister.n.02 railing 30
491
+ 257 trolley trolley 1 40 7 trolley otherprop Objects n04335435 streetcar.n.01 misc 40
492
+ 1316 pantry shelf pantry shelf 1 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
493
+ 786 sofa bed sofa bed 1 4 1 bed bed Bed bed bed bed 2818832 n02818832 bed.n.01 bed 11
494
+ 801 loofa loofa 1 40 7 otherprop Objects objects 39
495
+ 972 shower faucet handle shower faucet handle 1 40 7 handle otherprop Objects shower 23
496
+ 1317 toy piano toy piano 1 40 7 toy otherprop Objects n03964744 plaything.n.01 objects 39
497
+ 1318 fish fish 1 40 7 otherprop Objects n02512053 fish.n.01 objects 39
498
+ 75 file cabinets file cabinet 1 3 6 cabinet cabinet Furniture cabinet 2933112 n03337140 file.n.03 cabinet 7
499
+ 657 cat litter box cat litter box 1 29 7 box box Objects objects 39
500
+ 561 electric panel electric panel 1 40 7 otherprop Objects misc 40
501
+ 93 suitcases suitcase 1 40 7 luggage otherprop Objects n02774630 baggage.n.01 objects 39
502
+ 513 curtain rod curtain rod 1 38 7 curtain rod otherstructure Objects curtain 12
503
+ 411 bunk bed bunk bed 1 39 6 bunk bed otherfurniture Furniture bed bed bed 2818832 n02920259 bunk_bed.n.01 bed 11
504
+ 1122 chandelier chandelier 1 38 7 chandelier otherstructure Objects n03005285 chandelier.n.01 lighting 28
505
+ 922 tape tape 1 40 7 tape otherprop Objects objects 39
506
+ 88 plates plate 1 40 7 otherprop Objects n03959485 plate.n.04 objects 39
507
+ 518 alarm alarm 1 40 7 alarm otherprop Objects clock 3046257 n02694662 alarm_clock.n.01 objects 39
508
+ 814 fire hose fire hose 1 40 7 otherprop Objects n03346004 fire_hose.n.01 misc 40
509
+ 1319 toy dinosaur toy dinosaur 1 40 7 toy otherprop Objects n03964744 plaything.n.01 objects 39
510
+ 1320 cone cone 1 40 7 otherprop Objects objects 39
511
+ 649 glass doors glass door 1 8 12 door door Wall door n03221720 door.n.01 door 4
512
+ 607 hatrack hatrack 1 40 7 otherprop Objects n03059103 coatrack.n.01 shelving 31
513
+ 819 subwoofer subwoofer 1 40 7 speaker otherprop Objects speaker 3691459 n04349401 subwoofer.n.01 objects 39
514
+ 1321 fire sprinkler fire sprinkler 1 40 7 otherprop Objects misc 40
515
+ 1322 trash cabinet trash cabinet 1 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
516
+ 1204 pantry walls pantry wall 1 1 12 wall wall Wall n04546855 wall.n.01 wall 1
517
+ 227 photo photo 1 40 7 photo otherprop Objects n03925226 photograph.n.01 picture 6
518
+ 817 barrier barrier 1 40 7 otherprop Objects n02796623 barrier.n.01 misc 40
519
+ 130 stacks of cups cup 1 40 7 otherprop Objects n03147509 cup.n.01 objects 39
520
+ 712 beachball beachball 1 40 7 ball otherprop Objects n02814224 beach_ball.n.01 objects 39
521
+ 1323 folded boxes folded boxes 1 40 7 otherprop Objects objects 39
522
+ 1324 contact lens solution bottle contact lens solution bottle 1 40 7 bottle otherprop Objects bottle bottle 2876657 n02876657 bottle.n.01 objects 39
523
+ 673 covered box covered box 1 29 7 box box Objects objects 39
524
+ 459 folder folder 1 40 7 folder otherprop Objects n03376279 folder.n.02 objects 39
525
+ 643 mail trays mail tray 1 40 7 mail tray otherprop Objects objects 39
526
+ 238 slipper slipper 1 40 7 otherprop Objects n04241394 slipper.n.01 clothes 38
527
+ 765 magazine rack magazine rack 1 39 6 stand otherfurniture Furniture n03704549 magazine_rack.n.01 shelving 31
528
+ 1008 sticker sticker 1 40 7 sticker otherprop Objects n07272545 gummed_label.n.01 objects 39
529
+ 225 lotion lotion 1 40 7 otherprop Objects n03690938 lotion.n.01 objects 39
530
+ 1083 buddha buddha 1 40 7 otherprop Objects objects 39
531
+ 813 file organizer file organizer 1 40 7 otherprop Objects objects 39
532
+ 138 paper towel rolls paper towel roll 1 40 7 paper towel otherprop Objects n03887697 paper_towel.n.01 towel 20
533
+ 1145 night lamp night lamp 1 35 7 lamp lamp Objects lamp lamp 3636649 n03636649 lamp.n.02 lighting 28
534
+ 796 fuse box fuse box 1 40 7 otherprop Objects misc 40
535
+ 1325 knife block knife block 1 40 7 otherprop Objects objects 39
536
+ 363 furnace furnace 1 39 6 furnace otherfurniture Furniture n03404449 furnace.n.01
537
+ 1174 cd cases cd case 1 40 7 otherprop Objects objects 39
538
+ 38 stools stool 1 40 7 stool otherprop Objects stool n04326896 stool.n.01 stool 19
539
+ 1326 hand sanitzer dispenser hand sanitzer dispenser 1 40 7 otherprop Objects n04254120 soap_dispenser.n.01 objects 39
540
+ 997 teapot teapot 1 40 7 tea pot otherprop Objects n04398044 teapot.n.01 objects 39
541
+ 1327 pen holder pen holder 1 40 7 otherprop Objects objects 39
542
+ 1328 tray rack tray rack 1 40 7 otherprop Objects objects 39
543
+ 1329 wig wig 1 40 7 otherprop Objects n04584207 wig.n.01 objects 39
544
+ 182 switch switch 1 40 7 otherprop Objects n04372370 switch.n.01 misc 40
545
+ 280 plastic containers plastic container 1 40 7 container otherprop Objects n03094503 container.n.01 objects 39
546
+ 1330 night light night light 1 40 7 otherprop Objects lighting 28
547
+ 1331 notepad notepad 1 40 7 otherprop Objects objects 39
548
+ 1332 mail bin mail bin 1 40 7 otherprop Objects misc 40
549
+ 1333 elevator button elevator button 1 40 7 otherprop Objects misc 40
550
+ 939 gaming wheel gaming wheel 1 40 7 otherprop Objects objects 39
551
+ 1334 drum set drum set 1 40 7 otherprop Objects objects 39
552
+ 480 cosmetic bag cosmetic bag 1 37 7 bag bag Objects objects 39
553
+ 907 coffee mug coffee mug 1 40 7 vessel otherprop Objects cup or mug 3797390 n03063599 coffee_mug.n.01 objects 39
554
+ 1335 closet shelf closet shelf 1 15 6 shelves shelves Furniture bookshelf bookshelf 2871439 n02871439 bookshelf.n.01 shelving 31
555
+ 1336 baby mobile baby mobile 1 40 7 otherprop Objects objects 39
556
+ 829 diaper bin diaper bin 1 40 7 bin otherprop Objects objects 39
557
+ 947 door wall door wall 1 1 12 wall wall Wall wall 1
558
+ 1116 stepstool stepstool 1 40 7 step stool otherprop Objects objects 39
559
+ 599 paper shredder shredder 1 40 7 otherprop Objects n04210120 shredder.n.01 objects 39
560
+ 733 dress rack dress rack 1 40 7 otherprop Objects n03238762 dress_rack.n.01 misc 40
561
+ 123 cover cover 1 40 7 blanket otherprop Objects objects 39
562
+ 506 shopping bag shopping bag 1 37 7 bag bag Objects n04204081 shopping_bag.n.01 objects 39
563
+ 569 sliding door sliding door 1 8 12 door door Wall door n04239074 sliding_door.n.01 door 4
564
+ 1337 exercise bike exercise bike 1 40 7 machine otherprop Objects n04210120 shredder.n.01 gym_equipment 33
565
+ 1338 recliner chair recliner chair 1 5 4 chair chair Chair chair chair chair 3001627 n03238762 dress_rack.n.01 chair 3
566
+ 1314 kitchenaid mixer kitchen mixer 1 40 7 otherprop Objects appliances 37
567
+ 1339 soda can soda can 1 40 7 can otherprop Objects can 2946921 n02946921 can.n.01 objects 39
568
+ 1340 stovetop stovetop 1 38 7 stove otherstructure Objects stove 4330267 n04330267 stove.n.02 appliances 37
569
+ 851 stepladder stepladder 1 39 6 ladder otherfurniture Furniture stairs n04315599 step_ladder.n.01 stairs 16
570
+ 142 tap tap 1 40 7 faucet otherprop Objects faucet 3325088 n04559451 water_faucet.n.01 objects 39
571
+ 436 cable cable 1 40 7 cables otherprop Objects objects 39
572
+ 1341 baby changing station baby changing station 1 39 6 otherfurniture Furniture furniture 36
573
+ 1342 costume costume 1 21 7 clothes clothes Objects n02728440 apparel.n.01 clothes 38
574
+ 885 rocking chair rocking chair 1 5 4 chair chair Chair chair chair chair 3001627 n04099969 rocking_chair.n.01 chair 3
575
+ 693 binder binder 1 40 7 binder otherprop Objects objects 39
576
+ 815 media center media center 1 3 6 cabinet cabinet Furniture cabinet 2933112 n02933112 cabinet.n.01 cabinet 7
577
+ 401 towel rack towel rack 1 40 7 otherprop Objects n04459773 towel_rack.n.01 misc 40
578
+ 1343 medal medal 1 40 7 otherprop Objects objects 39
579
+ 1184 stack of folded chairs folded chair 1 5 4 chair chair Chair chair chair chair 3001627 n03001627 chair.n.01 chair 3
580
+ 1344 telescope telescope 1 40 7 otherprop Objects n04403638 telescope.n.01 objects 39
581
+ 1345 closet doorframe closet doorframe 1 8 12 door door Wall door door 4
582
+ 160 glass glass 1 38 7 glass otherstructure Objects n03438257 glass.n.02 misc 40
583
+ 1126 baseball cap baseball cap 1 40 7 otherprop Objects cap 2954340 n02799323 baseball_cap.n.01 clothes 38
584
+ 1346 battery disposal jar battery disposal jar 1 40 7 jar otherprop Objects jar 3593526 n03593526 jar.n.01 objects 39
585
+ 332 mop mop 1 40 7 otherprop Objects n04367480 swab.n.02 objects 39
586
+ 397 tank tank 1 40 7 otherprop Objects objects 39
587
+ 643 mail tray mail tray 1 40 7 mail tray otherprop Objects objects 39
588
+ 551 centerpiece centerpiece 1 40 7 centerpiece otherprop Objects n02994419 centerpiece.n.02 objects 39
589
+ 1163 object stick 1 40 7 stick otherprop Objects objects 39
590
+ 1347 closet floor closet floor 1 2 5 floor floor Floor n03365592 floor.n.01 floor 2
591
+ 1348 dryer sheets dryer sheets 1 40 7 otherprop Objects objects 39
592
+ 803 bycicle bycicle 1 40 7 otherprop Objects misc 40
593
+ 484 flower stand flower stand 1 39 6 stand otherfurniture Furniture furniture 36
594
+ 1349 air mattress air mattress 1 4 1 bed bed Bed bed bed bed 2818832 n02690809 air_mattress.n.01 bed 11
595
+ 1350 clip clip 1 40 7 otherprop Objects objects 39
596
+ 222 side table side table 1 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
597
+ 1253 pizza boxes pizza box 1 29 7 box box Objects n02883344 box.n.01 objects 39
598
+ 1351 display display 1 39 7 otherfurniture Furniture n03211117 display.n.06 misc 40
599
+ 1352 postcard postcard 1 40 7 otherprop Objects objects 39
600
+ 828 display sign display sign 1 40 7 sign otherprop Objects misc 40
601
+ 1353 paper towel paper towel 1 40 7 paper towel otherprop Objects n03887697 paper_towel.n.01 towel 20
602
+ 612 boots boot 1 40 7 shoe otherprop Objects n04199027 shoe.n.01 clothes 38
603
+ 1354 tennis racket bag tennis racket bag 1 40 7 otherprop Objects objects 39
604
+ 1355 air hockey table air hockey table 1 7 10 table table Table table table table 4379243 n04379243 table.n.02 table 5
605
+ 1301 socks sock 1 21 7 clothes clothes Objects n04254777 sock.n.01 clothes 38
606
+ 1356 food bag food bag 1 37 7 bag bag Objects objects 39
607
+ 1199 clothes hangers clothes hanger 1 40 7 otherprop Objects n03057920 coat_hanger.n.01 misc 40
608
+ 1357 starbucks cup starbucks cup 1 40 7 cup otherprop Objects cup cup or mug 3797390 n03797390 mug.n.04 objects 39
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_test.txt ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ scene0707_00
2
+ scene0708_00
3
+ scene0709_00
4
+ scene0710_00
5
+ scene0711_00
6
+ scene0712_00
7
+ scene0713_00
8
+ scene0714_00
9
+ scene0715_00
10
+ scene0716_00
11
+ scene0717_00
12
+ scene0718_00
13
+ scene0719_00
14
+ scene0720_00
15
+ scene0721_00
16
+ scene0722_00
17
+ scene0723_00
18
+ scene0724_00
19
+ scene0725_00
20
+ scene0726_00
21
+ scene0727_00
22
+ scene0728_00
23
+ scene0729_00
24
+ scene0730_00
25
+ scene0731_00
26
+ scene0732_00
27
+ scene0733_00
28
+ scene0734_00
29
+ scene0735_00
30
+ scene0736_00
31
+ scene0737_00
32
+ scene0738_00
33
+ scene0739_00
34
+ scene0740_00
35
+ scene0741_00
36
+ scene0742_00
37
+ scene0743_00
38
+ scene0744_00
39
+ scene0745_00
40
+ scene0746_00
41
+ scene0747_00
42
+ scene0748_00
43
+ scene0749_00
44
+ scene0750_00
45
+ scene0751_00
46
+ scene0752_00
47
+ scene0753_00
48
+ scene0754_00
49
+ scene0755_00
50
+ scene0756_00
51
+ scene0757_00
52
+ scene0758_00
53
+ scene0759_00
54
+ scene0760_00
55
+ scene0761_00
56
+ scene0762_00
57
+ scene0763_00
58
+ scene0764_00
59
+ scene0765_00
60
+ scene0766_00
61
+ scene0767_00
62
+ scene0768_00
63
+ scene0769_00
64
+ scene0770_00
65
+ scene0771_00
66
+ scene0772_00
67
+ scene0773_00
68
+ scene0774_00
69
+ scene0775_00
70
+ scene0776_00
71
+ scene0777_00
72
+ scene0778_00
73
+ scene0779_00
74
+ scene0780_00
75
+ scene0781_00
76
+ scene0782_00
77
+ scene0783_00
78
+ scene0784_00
79
+ scene0785_00
80
+ scene0786_00
81
+ scene0787_00
82
+ scene0788_00
83
+ scene0789_00
84
+ scene0790_00
85
+ scene0791_00
86
+ scene0792_00
87
+ scene0793_00
88
+ scene0794_00
89
+ scene0795_00
90
+ scene0796_00
91
+ scene0797_00
92
+ scene0798_00
93
+ scene0799_00
94
+ scene0800_00
95
+ scene0801_00
96
+ scene0802_00
97
+ scene0803_00
98
+ scene0804_00
99
+ scene0805_00
100
+ scene0806_00
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_train.txt ADDED
@@ -0,0 +1,1201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ scene0191_00
2
+ scene0191_01
3
+ scene0191_02
4
+ scene0119_00
5
+ scene0230_00
6
+ scene0528_00
7
+ scene0528_01
8
+ scene0705_00
9
+ scene0705_01
10
+ scene0705_02
11
+ scene0415_00
12
+ scene0415_01
13
+ scene0415_02
14
+ scene0007_00
15
+ scene0141_00
16
+ scene0141_01
17
+ scene0141_02
18
+ scene0515_00
19
+ scene0515_01
20
+ scene0515_02
21
+ scene0447_00
22
+ scene0447_01
23
+ scene0447_02
24
+ scene0531_00
25
+ scene0503_00
26
+ scene0285_00
27
+ scene0069_00
28
+ scene0584_00
29
+ scene0584_01
30
+ scene0584_02
31
+ scene0581_00
32
+ scene0581_01
33
+ scene0581_02
34
+ scene0620_00
35
+ scene0620_01
36
+ scene0263_00
37
+ scene0263_01
38
+ scene0481_00
39
+ scene0481_01
40
+ scene0020_00
41
+ scene0020_01
42
+ scene0291_00
43
+ scene0291_01
44
+ scene0291_02
45
+ scene0469_00
46
+ scene0469_01
47
+ scene0469_02
48
+ scene0659_00
49
+ scene0659_01
50
+ scene0024_00
51
+ scene0024_01
52
+ scene0024_02
53
+ scene0564_00
54
+ scene0117_00
55
+ scene0027_00
56
+ scene0027_01
57
+ scene0027_02
58
+ scene0028_00
59
+ scene0330_00
60
+ scene0418_00
61
+ scene0418_01
62
+ scene0418_02
63
+ scene0233_00
64
+ scene0233_01
65
+ scene0673_00
66
+ scene0673_01
67
+ scene0673_02
68
+ scene0673_03
69
+ scene0673_04
70
+ scene0673_05
71
+ scene0585_00
72
+ scene0585_01
73
+ scene0362_00
74
+ scene0362_01
75
+ scene0362_02
76
+ scene0362_03
77
+ scene0035_00
78
+ scene0035_01
79
+ scene0358_00
80
+ scene0358_01
81
+ scene0358_02
82
+ scene0037_00
83
+ scene0194_00
84
+ scene0321_00
85
+ scene0293_00
86
+ scene0293_01
87
+ scene0623_00
88
+ scene0623_01
89
+ scene0592_00
90
+ scene0592_01
91
+ scene0569_00
92
+ scene0569_01
93
+ scene0413_00
94
+ scene0313_00
95
+ scene0313_01
96
+ scene0313_02
97
+ scene0480_00
98
+ scene0480_01
99
+ scene0401_00
100
+ scene0517_00
101
+ scene0517_01
102
+ scene0517_02
103
+ scene0032_00
104
+ scene0032_01
105
+ scene0613_00
106
+ scene0613_01
107
+ scene0613_02
108
+ scene0306_00
109
+ scene0306_01
110
+ scene0052_00
111
+ scene0052_01
112
+ scene0052_02
113
+ scene0053_00
114
+ scene0444_00
115
+ scene0444_01
116
+ scene0055_00
117
+ scene0055_01
118
+ scene0055_02
119
+ scene0560_00
120
+ scene0589_00
121
+ scene0589_01
122
+ scene0589_02
123
+ scene0610_00
124
+ scene0610_01
125
+ scene0610_02
126
+ scene0364_00
127
+ scene0364_01
128
+ scene0383_00
129
+ scene0383_01
130
+ scene0383_02
131
+ scene0006_00
132
+ scene0006_01
133
+ scene0006_02
134
+ scene0275_00
135
+ scene0451_00
136
+ scene0451_01
137
+ scene0451_02
138
+ scene0451_03
139
+ scene0451_04
140
+ scene0451_05
141
+ scene0135_00
142
+ scene0065_00
143
+ scene0065_01
144
+ scene0065_02
145
+ scene0104_00
146
+ scene0674_00
147
+ scene0674_01
148
+ scene0448_00
149
+ scene0448_01
150
+ scene0448_02
151
+ scene0502_00
152
+ scene0502_01
153
+ scene0502_02
154
+ scene0440_00
155
+ scene0440_01
156
+ scene0440_02
157
+ scene0071_00
158
+ scene0072_00
159
+ scene0072_01
160
+ scene0072_02
161
+ scene0509_00
162
+ scene0509_01
163
+ scene0509_02
164
+ scene0649_00
165
+ scene0649_01
166
+ scene0602_00
167
+ scene0694_00
168
+ scene0694_01
169
+ scene0101_00
170
+ scene0101_01
171
+ scene0101_02
172
+ scene0101_03
173
+ scene0101_04
174
+ scene0101_05
175
+ scene0218_00
176
+ scene0218_01
177
+ scene0579_00
178
+ scene0579_01
179
+ scene0579_02
180
+ scene0039_00
181
+ scene0039_01
182
+ scene0493_00
183
+ scene0493_01
184
+ scene0242_00
185
+ scene0242_01
186
+ scene0242_02
187
+ scene0083_00
188
+ scene0083_01
189
+ scene0127_00
190
+ scene0127_01
191
+ scene0662_00
192
+ scene0662_01
193
+ scene0662_02
194
+ scene0018_00
195
+ scene0087_00
196
+ scene0087_01
197
+ scene0087_02
198
+ scene0332_00
199
+ scene0332_01
200
+ scene0332_02
201
+ scene0628_00
202
+ scene0628_01
203
+ scene0628_02
204
+ scene0134_00
205
+ scene0134_01
206
+ scene0134_02
207
+ scene0238_00
208
+ scene0238_01
209
+ scene0092_00
210
+ scene0092_01
211
+ scene0092_02
212
+ scene0092_03
213
+ scene0092_04
214
+ scene0022_00
215
+ scene0022_01
216
+ scene0467_00
217
+ scene0392_00
218
+ scene0392_01
219
+ scene0392_02
220
+ scene0424_00
221
+ scene0424_01
222
+ scene0424_02
223
+ scene0646_00
224
+ scene0646_01
225
+ scene0646_02
226
+ scene0098_00
227
+ scene0098_01
228
+ scene0044_00
229
+ scene0044_01
230
+ scene0044_02
231
+ scene0510_00
232
+ scene0510_01
233
+ scene0510_02
234
+ scene0571_00
235
+ scene0571_01
236
+ scene0166_00
237
+ scene0166_01
238
+ scene0166_02
239
+ scene0563_00
240
+ scene0172_00
241
+ scene0172_01
242
+ scene0388_00
243
+ scene0388_01
244
+ scene0215_00
245
+ scene0215_01
246
+ scene0252_00
247
+ scene0287_00
248
+ scene0668_00
249
+ scene0572_00
250
+ scene0572_01
251
+ scene0572_02
252
+ scene0026_00
253
+ scene0224_00
254
+ scene0113_00
255
+ scene0113_01
256
+ scene0551_00
257
+ scene0381_00
258
+ scene0381_01
259
+ scene0381_02
260
+ scene0371_00
261
+ scene0371_01
262
+ scene0460_00
263
+ scene0118_00
264
+ scene0118_01
265
+ scene0118_02
266
+ scene0417_00
267
+ scene0008_00
268
+ scene0634_00
269
+ scene0521_00
270
+ scene0123_00
271
+ scene0123_01
272
+ scene0123_02
273
+ scene0045_00
274
+ scene0045_01
275
+ scene0511_00
276
+ scene0511_01
277
+ scene0114_00
278
+ scene0114_01
279
+ scene0114_02
280
+ scene0070_00
281
+ scene0029_00
282
+ scene0029_01
283
+ scene0029_02
284
+ scene0129_00
285
+ scene0103_00
286
+ scene0103_01
287
+ scene0002_00
288
+ scene0002_01
289
+ scene0132_00
290
+ scene0132_01
291
+ scene0132_02
292
+ scene0124_00
293
+ scene0124_01
294
+ scene0143_00
295
+ scene0143_01
296
+ scene0143_02
297
+ scene0604_00
298
+ scene0604_01
299
+ scene0604_02
300
+ scene0507_00
301
+ scene0105_00
302
+ scene0105_01
303
+ scene0105_02
304
+ scene0428_00
305
+ scene0428_01
306
+ scene0311_00
307
+ scene0140_00
308
+ scene0140_01
309
+ scene0182_00
310
+ scene0182_01
311
+ scene0182_02
312
+ scene0142_00
313
+ scene0142_01
314
+ scene0399_00
315
+ scene0399_01
316
+ scene0012_00
317
+ scene0012_01
318
+ scene0012_02
319
+ scene0060_00
320
+ scene0060_01
321
+ scene0370_00
322
+ scene0370_01
323
+ scene0370_02
324
+ scene0310_00
325
+ scene0310_01
326
+ scene0310_02
327
+ scene0661_00
328
+ scene0650_00
329
+ scene0152_00
330
+ scene0152_01
331
+ scene0152_02
332
+ scene0158_00
333
+ scene0158_01
334
+ scene0158_02
335
+ scene0482_00
336
+ scene0482_01
337
+ scene0600_00
338
+ scene0600_01
339
+ scene0600_02
340
+ scene0393_00
341
+ scene0393_01
342
+ scene0393_02
343
+ scene0562_00
344
+ scene0174_00
345
+ scene0174_01
346
+ scene0157_00
347
+ scene0157_01
348
+ scene0161_00
349
+ scene0161_01
350
+ scene0161_02
351
+ scene0159_00
352
+ scene0254_00
353
+ scene0254_01
354
+ scene0115_00
355
+ scene0115_01
356
+ scene0115_02
357
+ scene0162_00
358
+ scene0163_00
359
+ scene0163_01
360
+ scene0523_00
361
+ scene0523_01
362
+ scene0523_02
363
+ scene0459_00
364
+ scene0459_01
365
+ scene0175_00
366
+ scene0085_00
367
+ scene0085_01
368
+ scene0279_00
369
+ scene0279_01
370
+ scene0279_02
371
+ scene0201_00
372
+ scene0201_01
373
+ scene0201_02
374
+ scene0283_00
375
+ scene0456_00
376
+ scene0456_01
377
+ scene0429_00
378
+ scene0043_00
379
+ scene0043_01
380
+ scene0419_00
381
+ scene0419_01
382
+ scene0419_02
383
+ scene0368_00
384
+ scene0368_01
385
+ scene0348_00
386
+ scene0348_01
387
+ scene0348_02
388
+ scene0442_00
389
+ scene0178_00
390
+ scene0380_00
391
+ scene0380_01
392
+ scene0380_02
393
+ scene0165_00
394
+ scene0165_01
395
+ scene0165_02
396
+ scene0181_00
397
+ scene0181_01
398
+ scene0181_02
399
+ scene0181_03
400
+ scene0333_00
401
+ scene0614_00
402
+ scene0614_01
403
+ scene0614_02
404
+ scene0404_00
405
+ scene0404_01
406
+ scene0404_02
407
+ scene0185_00
408
+ scene0126_00
409
+ scene0126_01
410
+ scene0126_02
411
+ scene0519_00
412
+ scene0236_00
413
+ scene0236_01
414
+ scene0189_00
415
+ scene0075_00
416
+ scene0267_00
417
+ scene0192_00
418
+ scene0192_01
419
+ scene0192_02
420
+ scene0281_00
421
+ scene0420_00
422
+ scene0420_01
423
+ scene0420_02
424
+ scene0195_00
425
+ scene0195_01
426
+ scene0195_02
427
+ scene0597_00
428
+ scene0597_01
429
+ scene0597_02
430
+ scene0041_00
431
+ scene0041_01
432
+ scene0111_00
433
+ scene0111_01
434
+ scene0111_02
435
+ scene0666_00
436
+ scene0666_01
437
+ scene0666_02
438
+ scene0200_00
439
+ scene0200_01
440
+ scene0200_02
441
+ scene0536_00
442
+ scene0536_01
443
+ scene0536_02
444
+ scene0390_00
445
+ scene0280_00
446
+ scene0280_01
447
+ scene0280_02
448
+ scene0344_00
449
+ scene0344_01
450
+ scene0205_00
451
+ scene0205_01
452
+ scene0205_02
453
+ scene0484_00
454
+ scene0484_01
455
+ scene0009_00
456
+ scene0009_01
457
+ scene0009_02
458
+ scene0302_00
459
+ scene0302_01
460
+ scene0209_00
461
+ scene0209_01
462
+ scene0209_02
463
+ scene0210_00
464
+ scene0210_01
465
+ scene0395_00
466
+ scene0395_01
467
+ scene0395_02
468
+ scene0683_00
469
+ scene0601_00
470
+ scene0601_01
471
+ scene0214_00
472
+ scene0214_01
473
+ scene0214_02
474
+ scene0477_00
475
+ scene0477_01
476
+ scene0439_00
477
+ scene0439_01
478
+ scene0468_00
479
+ scene0468_01
480
+ scene0468_02
481
+ scene0546_00
482
+ scene0466_00
483
+ scene0466_01
484
+ scene0220_00
485
+ scene0220_01
486
+ scene0220_02
487
+ scene0122_00
488
+ scene0122_01
489
+ scene0130_00
490
+ scene0110_00
491
+ scene0110_01
492
+ scene0110_02
493
+ scene0327_00
494
+ scene0156_00
495
+ scene0266_00
496
+ scene0266_01
497
+ scene0001_00
498
+ scene0001_01
499
+ scene0228_00
500
+ scene0199_00
501
+ scene0219_00
502
+ scene0464_00
503
+ scene0232_00
504
+ scene0232_01
505
+ scene0232_02
506
+ scene0299_00
507
+ scene0299_01
508
+ scene0530_00
509
+ scene0363_00
510
+ scene0453_00
511
+ scene0453_01
512
+ scene0570_00
513
+ scene0570_01
514
+ scene0570_02
515
+ scene0183_00
516
+ scene0239_00
517
+ scene0239_01
518
+ scene0239_02
519
+ scene0373_00
520
+ scene0373_01
521
+ scene0241_00
522
+ scene0241_01
523
+ scene0241_02
524
+ scene0188_00
525
+ scene0622_00
526
+ scene0622_01
527
+ scene0244_00
528
+ scene0244_01
529
+ scene0691_00
530
+ scene0691_01
531
+ scene0206_00
532
+ scene0206_01
533
+ scene0206_02
534
+ scene0247_00
535
+ scene0247_01
536
+ scene0061_00
537
+ scene0061_01
538
+ scene0082_00
539
+ scene0250_00
540
+ scene0250_01
541
+ scene0250_02
542
+ scene0501_00
543
+ scene0501_01
544
+ scene0501_02
545
+ scene0320_00
546
+ scene0320_01
547
+ scene0320_02
548
+ scene0320_03
549
+ scene0631_00
550
+ scene0631_01
551
+ scene0631_02
552
+ scene0255_00
553
+ scene0255_01
554
+ scene0255_02
555
+ scene0047_00
556
+ scene0265_00
557
+ scene0265_01
558
+ scene0265_02
559
+ scene0004_00
560
+ scene0336_00
561
+ scene0336_01
562
+ scene0058_00
563
+ scene0058_01
564
+ scene0260_00
565
+ scene0260_01
566
+ scene0260_02
567
+ scene0243_00
568
+ scene0603_00
569
+ scene0603_01
570
+ scene0093_00
571
+ scene0093_01
572
+ scene0093_02
573
+ scene0109_00
574
+ scene0109_01
575
+ scene0434_00
576
+ scene0434_01
577
+ scene0434_02
578
+ scene0290_00
579
+ scene0627_00
580
+ scene0627_01
581
+ scene0470_00
582
+ scene0470_01
583
+ scene0137_00
584
+ scene0137_01
585
+ scene0137_02
586
+ scene0270_00
587
+ scene0270_01
588
+ scene0270_02
589
+ scene0271_00
590
+ scene0271_01
591
+ scene0504_00
592
+ scene0274_00
593
+ scene0274_01
594
+ scene0274_02
595
+ scene0036_00
596
+ scene0036_01
597
+ scene0276_00
598
+ scene0276_01
599
+ scene0272_00
600
+ scene0272_01
601
+ scene0499_00
602
+ scene0698_00
603
+ scene0698_01
604
+ scene0051_00
605
+ scene0051_01
606
+ scene0051_02
607
+ scene0051_03
608
+ scene0108_00
609
+ scene0245_00
610
+ scene0369_00
611
+ scene0369_01
612
+ scene0369_02
613
+ scene0284_00
614
+ scene0289_00
615
+ scene0289_01
616
+ scene0286_00
617
+ scene0286_01
618
+ scene0286_02
619
+ scene0286_03
620
+ scene0031_00
621
+ scene0031_01
622
+ scene0031_02
623
+ scene0545_00
624
+ scene0545_01
625
+ scene0545_02
626
+ scene0557_00
627
+ scene0557_01
628
+ scene0557_02
629
+ scene0533_00
630
+ scene0533_01
631
+ scene0116_00
632
+ scene0116_01
633
+ scene0116_02
634
+ scene0611_00
635
+ scene0611_01
636
+ scene0688_00
637
+ scene0294_00
638
+ scene0294_01
639
+ scene0294_02
640
+ scene0295_00
641
+ scene0295_01
642
+ scene0296_00
643
+ scene0296_01
644
+ scene0596_00
645
+ scene0596_01
646
+ scene0596_02
647
+ scene0532_00
648
+ scene0532_01
649
+ scene0637_00
650
+ scene0638_00
651
+ scene0121_00
652
+ scene0121_01
653
+ scene0121_02
654
+ scene0040_00
655
+ scene0040_01
656
+ scene0197_00
657
+ scene0197_01
658
+ scene0197_02
659
+ scene0410_00
660
+ scene0410_01
661
+ scene0305_00
662
+ scene0305_01
663
+ scene0615_00
664
+ scene0615_01
665
+ scene0703_00
666
+ scene0703_01
667
+ scene0555_00
668
+ scene0297_00
669
+ scene0297_01
670
+ scene0297_02
671
+ scene0582_00
672
+ scene0582_01
673
+ scene0582_02
674
+ scene0023_00
675
+ scene0094_00
676
+ scene0013_00
677
+ scene0013_01
678
+ scene0013_02
679
+ scene0136_00
680
+ scene0136_01
681
+ scene0136_02
682
+ scene0407_00
683
+ scene0407_01
684
+ scene0062_00
685
+ scene0062_01
686
+ scene0062_02
687
+ scene0386_00
688
+ scene0318_00
689
+ scene0554_00
690
+ scene0554_01
691
+ scene0497_00
692
+ scene0213_00
693
+ scene0258_00
694
+ scene0323_00
695
+ scene0323_01
696
+ scene0324_00
697
+ scene0324_01
698
+ scene0016_00
699
+ scene0016_01
700
+ scene0016_02
701
+ scene0681_00
702
+ scene0398_00
703
+ scene0398_01
704
+ scene0227_00
705
+ scene0090_00
706
+ scene0066_00
707
+ scene0262_00
708
+ scene0262_01
709
+ scene0155_00
710
+ scene0155_01
711
+ scene0155_02
712
+ scene0352_00
713
+ scene0352_01
714
+ scene0352_02
715
+ scene0038_00
716
+ scene0038_01
717
+ scene0038_02
718
+ scene0335_00
719
+ scene0335_01
720
+ scene0335_02
721
+ scene0261_00
722
+ scene0261_01
723
+ scene0261_02
724
+ scene0261_03
725
+ scene0640_00
726
+ scene0640_01
727
+ scene0640_02
728
+ scene0080_00
729
+ scene0080_01
730
+ scene0080_02
731
+ scene0403_00
732
+ scene0403_01
733
+ scene0282_00
734
+ scene0282_01
735
+ scene0282_02
736
+ scene0682_00
737
+ scene0173_00
738
+ scene0173_01
739
+ scene0173_02
740
+ scene0522_00
741
+ scene0687_00
742
+ scene0345_00
743
+ scene0345_01
744
+ scene0612_00
745
+ scene0612_01
746
+ scene0411_00
747
+ scene0411_01
748
+ scene0411_02
749
+ scene0625_00
750
+ scene0625_01
751
+ scene0211_00
752
+ scene0211_01
753
+ scene0211_02
754
+ scene0211_03
755
+ scene0676_00
756
+ scene0676_01
757
+ scene0179_00
758
+ scene0498_00
759
+ scene0498_01
760
+ scene0498_02
761
+ scene0547_00
762
+ scene0547_01
763
+ scene0547_02
764
+ scene0269_00
765
+ scene0269_01
766
+ scene0269_02
767
+ scene0366_00
768
+ scene0680_00
769
+ scene0680_01
770
+ scene0588_00
771
+ scene0588_01
772
+ scene0588_02
773
+ scene0588_03
774
+ scene0346_00
775
+ scene0346_01
776
+ scene0359_00
777
+ scene0359_01
778
+ scene0014_00
779
+ scene0120_00
780
+ scene0120_01
781
+ scene0212_00
782
+ scene0212_01
783
+ scene0212_02
784
+ scene0176_00
785
+ scene0049_00
786
+ scene0259_00
787
+ scene0259_01
788
+ scene0586_00
789
+ scene0586_01
790
+ scene0586_02
791
+ scene0309_00
792
+ scene0309_01
793
+ scene0125_00
794
+ scene0455_00
795
+ scene0177_00
796
+ scene0177_01
797
+ scene0177_02
798
+ scene0326_00
799
+ scene0372_00
800
+ scene0171_00
801
+ scene0171_01
802
+ scene0374_00
803
+ scene0654_00
804
+ scene0654_01
805
+ scene0445_00
806
+ scene0445_01
807
+ scene0475_00
808
+ scene0475_01
809
+ scene0475_02
810
+ scene0349_00
811
+ scene0349_01
812
+ scene0234_00
813
+ scene0669_00
814
+ scene0669_01
815
+ scene0375_00
816
+ scene0375_01
817
+ scene0375_02
818
+ scene0387_00
819
+ scene0387_01
820
+ scene0387_02
821
+ scene0312_00
822
+ scene0312_01
823
+ scene0312_02
824
+ scene0384_00
825
+ scene0385_00
826
+ scene0385_01
827
+ scene0385_02
828
+ scene0000_00
829
+ scene0000_01
830
+ scene0000_02
831
+ scene0376_00
832
+ scene0376_01
833
+ scene0376_02
834
+ scene0301_00
835
+ scene0301_01
836
+ scene0301_02
837
+ scene0322_00
838
+ scene0542_00
839
+ scene0079_00
840
+ scene0079_01
841
+ scene0099_00
842
+ scene0099_01
843
+ scene0476_00
844
+ scene0476_01
845
+ scene0476_02
846
+ scene0394_00
847
+ scene0394_01
848
+ scene0147_00
849
+ scene0147_01
850
+ scene0067_00
851
+ scene0067_01
852
+ scene0067_02
853
+ scene0397_00
854
+ scene0397_01
855
+ scene0337_00
856
+ scene0337_01
857
+ scene0337_02
858
+ scene0431_00
859
+ scene0223_00
860
+ scene0223_01
861
+ scene0223_02
862
+ scene0010_00
863
+ scene0010_01
864
+ scene0402_00
865
+ scene0268_00
866
+ scene0268_01
867
+ scene0268_02
868
+ scene0679_00
869
+ scene0679_01
870
+ scene0405_00
871
+ scene0128_00
872
+ scene0408_00
873
+ scene0408_01
874
+ scene0190_00
875
+ scene0107_00
876
+ scene0076_00
877
+ scene0167_00
878
+ scene0361_00
879
+ scene0361_01
880
+ scene0361_02
881
+ scene0216_00
882
+ scene0202_00
883
+ scene0303_00
884
+ scene0303_01
885
+ scene0303_02
886
+ scene0446_00
887
+ scene0446_01
888
+ scene0089_00
889
+ scene0089_01
890
+ scene0089_02
891
+ scene0360_00
892
+ scene0150_00
893
+ scene0150_01
894
+ scene0150_02
895
+ scene0421_00
896
+ scene0421_01
897
+ scene0421_02
898
+ scene0454_00
899
+ scene0626_00
900
+ scene0626_01
901
+ scene0626_02
902
+ scene0186_00
903
+ scene0186_01
904
+ scene0538_00
905
+ scene0479_00
906
+ scene0479_01
907
+ scene0479_02
908
+ scene0656_00
909
+ scene0656_01
910
+ scene0656_02
911
+ scene0656_03
912
+ scene0525_00
913
+ scene0525_01
914
+ scene0525_02
915
+ scene0308_00
916
+ scene0396_00
917
+ scene0396_01
918
+ scene0396_02
919
+ scene0624_00
920
+ scene0292_00
921
+ scene0292_01
922
+ scene0632_00
923
+ scene0253_00
924
+ scene0021_00
925
+ scene0325_00
926
+ scene0325_01
927
+ scene0437_00
928
+ scene0437_01
929
+ scene0438_00
930
+ scene0590_00
931
+ scene0590_01
932
+ scene0400_00
933
+ scene0400_01
934
+ scene0541_00
935
+ scene0541_01
936
+ scene0541_02
937
+ scene0677_00
938
+ scene0677_01
939
+ scene0677_02
940
+ scene0443_00
941
+ scene0315_00
942
+ scene0288_00
943
+ scene0288_01
944
+ scene0288_02
945
+ scene0422_00
946
+ scene0672_00
947
+ scene0672_01
948
+ scene0184_00
949
+ scene0449_00
950
+ scene0449_01
951
+ scene0449_02
952
+ scene0048_00
953
+ scene0048_01
954
+ scene0138_00
955
+ scene0452_00
956
+ scene0452_01
957
+ scene0452_02
958
+ scene0667_00
959
+ scene0667_01
960
+ scene0667_02
961
+ scene0463_00
962
+ scene0463_01
963
+ scene0078_00
964
+ scene0078_01
965
+ scene0078_02
966
+ scene0636_00
967
+ scene0457_00
968
+ scene0457_01
969
+ scene0457_02
970
+ scene0465_00
971
+ scene0465_01
972
+ scene0577_00
973
+ scene0151_00
974
+ scene0151_01
975
+ scene0339_00
976
+ scene0573_00
977
+ scene0573_01
978
+ scene0154_00
979
+ scene0096_00
980
+ scene0096_01
981
+ scene0096_02
982
+ scene0235_00
983
+ scene0168_00
984
+ scene0168_01
985
+ scene0168_02
986
+ scene0594_00
987
+ scene0587_00
988
+ scene0587_01
989
+ scene0587_02
990
+ scene0587_03
991
+ scene0229_00
992
+ scene0229_01
993
+ scene0229_02
994
+ scene0512_00
995
+ scene0106_00
996
+ scene0106_01
997
+ scene0106_02
998
+ scene0472_00
999
+ scene0472_01
1000
+ scene0472_02
1001
+ scene0489_00
1002
+ scene0489_01
1003
+ scene0489_02
1004
+ scene0425_00
1005
+ scene0425_01
1006
+ scene0641_00
1007
+ scene0526_00
1008
+ scene0526_01
1009
+ scene0317_00
1010
+ scene0317_01
1011
+ scene0544_00
1012
+ scene0017_00
1013
+ scene0017_01
1014
+ scene0017_02
1015
+ scene0042_00
1016
+ scene0042_01
1017
+ scene0042_02
1018
+ scene0576_00
1019
+ scene0576_01
1020
+ scene0576_02
1021
+ scene0347_00
1022
+ scene0347_01
1023
+ scene0347_02
1024
+ scene0436_00
1025
+ scene0226_00
1026
+ scene0226_01
1027
+ scene0485_00
1028
+ scene0486_00
1029
+ scene0487_00
1030
+ scene0487_01
1031
+ scene0619_00
1032
+ scene0097_00
1033
+ scene0367_00
1034
+ scene0367_01
1035
+ scene0491_00
1036
+ scene0492_00
1037
+ scene0492_01
1038
+ scene0005_00
1039
+ scene0005_01
1040
+ scene0543_00
1041
+ scene0543_01
1042
+ scene0543_02
1043
+ scene0657_00
1044
+ scene0341_00
1045
+ scene0341_01
1046
+ scene0534_00
1047
+ scene0534_01
1048
+ scene0319_00
1049
+ scene0273_00
1050
+ scene0273_01
1051
+ scene0225_00
1052
+ scene0198_00
1053
+ scene0003_00
1054
+ scene0003_01
1055
+ scene0003_02
1056
+ scene0409_00
1057
+ scene0409_01
1058
+ scene0331_00
1059
+ scene0331_01
1060
+ scene0505_00
1061
+ scene0505_01
1062
+ scene0505_02
1063
+ scene0505_03
1064
+ scene0505_04
1065
+ scene0506_00
1066
+ scene0057_00
1067
+ scene0057_01
1068
+ scene0074_00
1069
+ scene0074_01
1070
+ scene0074_02
1071
+ scene0091_00
1072
+ scene0112_00
1073
+ scene0112_01
1074
+ scene0112_02
1075
+ scene0240_00
1076
+ scene0102_00
1077
+ scene0102_01
1078
+ scene0513_00
1079
+ scene0514_00
1080
+ scene0514_01
1081
+ scene0537_00
1082
+ scene0516_00
1083
+ scene0516_01
1084
+ scene0495_00
1085
+ scene0617_00
1086
+ scene0133_00
1087
+ scene0520_00
1088
+ scene0520_01
1089
+ scene0635_00
1090
+ scene0635_01
1091
+ scene0054_00
1092
+ scene0473_00
1093
+ scene0473_01
1094
+ scene0524_00
1095
+ scene0524_01
1096
+ scene0379_00
1097
+ scene0471_00
1098
+ scene0471_01
1099
+ scene0471_02
1100
+ scene0566_00
1101
+ scene0248_00
1102
+ scene0248_01
1103
+ scene0248_02
1104
+ scene0529_00
1105
+ scene0529_01
1106
+ scene0529_02
1107
+ scene0391_00
1108
+ scene0264_00
1109
+ scene0264_01
1110
+ scene0264_02
1111
+ scene0675_00
1112
+ scene0675_01
1113
+ scene0350_00
1114
+ scene0350_01
1115
+ scene0350_02
1116
+ scene0450_00
1117
+ scene0068_00
1118
+ scene0068_01
1119
+ scene0237_00
1120
+ scene0237_01
1121
+ scene0365_00
1122
+ scene0365_01
1123
+ scene0365_02
1124
+ scene0605_00
1125
+ scene0605_01
1126
+ scene0539_00
1127
+ scene0539_01
1128
+ scene0539_02
1129
+ scene0540_00
1130
+ scene0540_01
1131
+ scene0540_02
1132
+ scene0170_00
1133
+ scene0170_01
1134
+ scene0170_02
1135
+ scene0433_00
1136
+ scene0340_00
1137
+ scene0340_01
1138
+ scene0340_02
1139
+ scene0160_00
1140
+ scene0160_01
1141
+ scene0160_02
1142
+ scene0160_03
1143
+ scene0160_04
1144
+ scene0059_00
1145
+ scene0059_01
1146
+ scene0059_02
1147
+ scene0056_00
1148
+ scene0056_01
1149
+ scene0478_00
1150
+ scene0478_01
1151
+ scene0548_00
1152
+ scene0548_01
1153
+ scene0548_02
1154
+ scene0204_00
1155
+ scene0204_01
1156
+ scene0204_02
1157
+ scene0033_00
1158
+ scene0145_00
1159
+ scene0483_00
1160
+ scene0508_00
1161
+ scene0508_01
1162
+ scene0508_02
1163
+ scene0180_00
1164
+ scene0148_00
1165
+ scene0556_00
1166
+ scene0556_01
1167
+ scene0416_00
1168
+ scene0416_01
1169
+ scene0416_02
1170
+ scene0416_03
1171
+ scene0416_04
1172
+ scene0073_00
1173
+ scene0073_01
1174
+ scene0073_02
1175
+ scene0073_03
1176
+ scene0034_00
1177
+ scene0034_01
1178
+ scene0034_02
1179
+ scene0639_00
1180
+ scene0561_00
1181
+ scene0561_01
1182
+ scene0298_00
1183
+ scene0692_00
1184
+ scene0692_01
1185
+ scene0692_02
1186
+ scene0692_03
1187
+ scene0692_04
1188
+ scene0642_00
1189
+ scene0642_01
1190
+ scene0642_02
1191
+ scene0642_03
1192
+ scene0630_00
1193
+ scene0630_01
1194
+ scene0630_02
1195
+ scene0630_03
1196
+ scene0630_04
1197
+ scene0630_05
1198
+ scene0630_06
1199
+ scene0706_00
1200
+ scene0567_00
1201
+ scene0567_01
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_val.txt ADDED
@@ -0,0 +1,312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ scene0568_00
2
+ scene0568_01
3
+ scene0568_02
4
+ scene0304_00
5
+ scene0488_00
6
+ scene0488_01
7
+ scene0412_00
8
+ scene0412_01
9
+ scene0217_00
10
+ scene0019_00
11
+ scene0019_01
12
+ scene0414_00
13
+ scene0575_00
14
+ scene0575_01
15
+ scene0575_02
16
+ scene0426_00
17
+ scene0426_01
18
+ scene0426_02
19
+ scene0426_03
20
+ scene0549_00
21
+ scene0549_01
22
+ scene0578_00
23
+ scene0578_01
24
+ scene0578_02
25
+ scene0665_00
26
+ scene0665_01
27
+ scene0050_00
28
+ scene0050_01
29
+ scene0050_02
30
+ scene0257_00
31
+ scene0025_00
32
+ scene0025_01
33
+ scene0025_02
34
+ scene0583_00
35
+ scene0583_01
36
+ scene0583_02
37
+ scene0701_00
38
+ scene0701_01
39
+ scene0701_02
40
+ scene0580_00
41
+ scene0580_01
42
+ scene0565_00
43
+ scene0169_00
44
+ scene0169_01
45
+ scene0655_00
46
+ scene0655_01
47
+ scene0655_02
48
+ scene0063_00
49
+ scene0221_00
50
+ scene0221_01
51
+ scene0591_00
52
+ scene0591_01
53
+ scene0591_02
54
+ scene0678_00
55
+ scene0678_01
56
+ scene0678_02
57
+ scene0462_00
58
+ scene0427_00
59
+ scene0595_00
60
+ scene0193_00
61
+ scene0193_01
62
+ scene0164_00
63
+ scene0164_01
64
+ scene0164_02
65
+ scene0164_03
66
+ scene0598_00
67
+ scene0598_01
68
+ scene0598_02
69
+ scene0599_00
70
+ scene0599_01
71
+ scene0599_02
72
+ scene0328_00
73
+ scene0300_00
74
+ scene0300_01
75
+ scene0354_00
76
+ scene0458_00
77
+ scene0458_01
78
+ scene0423_00
79
+ scene0423_01
80
+ scene0423_02
81
+ scene0307_00
82
+ scene0307_01
83
+ scene0307_02
84
+ scene0606_00
85
+ scene0606_01
86
+ scene0606_02
87
+ scene0432_00
88
+ scene0432_01
89
+ scene0608_00
90
+ scene0608_01
91
+ scene0608_02
92
+ scene0651_00
93
+ scene0651_01
94
+ scene0651_02
95
+ scene0430_00
96
+ scene0430_01
97
+ scene0689_00
98
+ scene0357_00
99
+ scene0357_01
100
+ scene0574_00
101
+ scene0574_01
102
+ scene0574_02
103
+ scene0329_00
104
+ scene0329_01
105
+ scene0329_02
106
+ scene0153_00
107
+ scene0153_01
108
+ scene0616_00
109
+ scene0616_01
110
+ scene0671_00
111
+ scene0671_01
112
+ scene0618_00
113
+ scene0382_00
114
+ scene0382_01
115
+ scene0490_00
116
+ scene0621_00
117
+ scene0607_00
118
+ scene0607_01
119
+ scene0149_00
120
+ scene0695_00
121
+ scene0695_01
122
+ scene0695_02
123
+ scene0695_03
124
+ scene0389_00
125
+ scene0377_00
126
+ scene0377_01
127
+ scene0377_02
128
+ scene0342_00
129
+ scene0139_00
130
+ scene0629_00
131
+ scene0629_01
132
+ scene0629_02
133
+ scene0496_00
134
+ scene0633_00
135
+ scene0633_01
136
+ scene0518_00
137
+ scene0652_00
138
+ scene0406_00
139
+ scene0406_01
140
+ scene0406_02
141
+ scene0144_00
142
+ scene0144_01
143
+ scene0494_00
144
+ scene0278_00
145
+ scene0278_01
146
+ scene0316_00
147
+ scene0609_00
148
+ scene0609_01
149
+ scene0609_02
150
+ scene0609_03
151
+ scene0084_00
152
+ scene0084_01
153
+ scene0084_02
154
+ scene0696_00
155
+ scene0696_01
156
+ scene0696_02
157
+ scene0351_00
158
+ scene0351_01
159
+ scene0643_00
160
+ scene0644_00
161
+ scene0645_00
162
+ scene0645_01
163
+ scene0645_02
164
+ scene0081_00
165
+ scene0081_01
166
+ scene0081_02
167
+ scene0647_00
168
+ scene0647_01
169
+ scene0535_00
170
+ scene0353_00
171
+ scene0353_01
172
+ scene0353_02
173
+ scene0559_00
174
+ scene0559_01
175
+ scene0559_02
176
+ scene0593_00
177
+ scene0593_01
178
+ scene0246_00
179
+ scene0653_00
180
+ scene0653_01
181
+ scene0064_00
182
+ scene0064_01
183
+ scene0356_00
184
+ scene0356_01
185
+ scene0356_02
186
+ scene0030_00
187
+ scene0030_01
188
+ scene0030_02
189
+ scene0222_00
190
+ scene0222_01
191
+ scene0338_00
192
+ scene0338_01
193
+ scene0338_02
194
+ scene0378_00
195
+ scene0378_01
196
+ scene0378_02
197
+ scene0660_00
198
+ scene0553_00
199
+ scene0553_01
200
+ scene0553_02
201
+ scene0527_00
202
+ scene0663_00
203
+ scene0663_01
204
+ scene0663_02
205
+ scene0664_00
206
+ scene0664_01
207
+ scene0664_02
208
+ scene0334_00
209
+ scene0334_01
210
+ scene0334_02
211
+ scene0046_00
212
+ scene0046_01
213
+ scene0046_02
214
+ scene0203_00
215
+ scene0203_01
216
+ scene0203_02
217
+ scene0088_00
218
+ scene0088_01
219
+ scene0088_02
220
+ scene0088_03
221
+ scene0086_00
222
+ scene0086_01
223
+ scene0086_02
224
+ scene0670_00
225
+ scene0670_01
226
+ scene0256_00
227
+ scene0256_01
228
+ scene0256_02
229
+ scene0249_00
230
+ scene0441_00
231
+ scene0658_00
232
+ scene0704_00
233
+ scene0704_01
234
+ scene0187_00
235
+ scene0187_01
236
+ scene0131_00
237
+ scene0131_01
238
+ scene0131_02
239
+ scene0207_00
240
+ scene0207_01
241
+ scene0207_02
242
+ scene0461_00
243
+ scene0011_00
244
+ scene0011_01
245
+ scene0343_00
246
+ scene0251_00
247
+ scene0077_00
248
+ scene0077_01
249
+ scene0684_00
250
+ scene0684_01
251
+ scene0550_00
252
+ scene0686_00
253
+ scene0686_01
254
+ scene0686_02
255
+ scene0208_00
256
+ scene0500_00
257
+ scene0500_01
258
+ scene0552_00
259
+ scene0552_01
260
+ scene0648_00
261
+ scene0648_01
262
+ scene0435_00
263
+ scene0435_01
264
+ scene0435_02
265
+ scene0435_03
266
+ scene0690_00
267
+ scene0690_01
268
+ scene0693_00
269
+ scene0693_01
270
+ scene0693_02
271
+ scene0700_00
272
+ scene0700_01
273
+ scene0700_02
274
+ scene0699_00
275
+ scene0231_00
276
+ scene0231_01
277
+ scene0231_02
278
+ scene0697_00
279
+ scene0697_01
280
+ scene0697_02
281
+ scene0697_03
282
+ scene0474_00
283
+ scene0474_01
284
+ scene0474_02
285
+ scene0474_03
286
+ scene0474_04
287
+ scene0474_05
288
+ scene0355_00
289
+ scene0355_01
290
+ scene0146_00
291
+ scene0146_01
292
+ scene0146_02
293
+ scene0196_00
294
+ scene0702_00
295
+ scene0702_01
296
+ scene0702_02
297
+ scene0314_00
298
+ scene0277_00
299
+ scene0277_01
300
+ scene0277_02
301
+ scene0095_00
302
+ scene0095_01
303
+ scene0015_00
304
+ scene0100_00
305
+ scene0100_01
306
+ scene0100_02
307
+ scene0558_00
308
+ scene0558_01
309
+ scene0558_02
310
+ scene0685_00
311
+ scene0685_01
312
+ scene0685_02
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/preprocess_scannet.py ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for ScanNet 20/200
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import warnings
9
+
10
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
11
+
12
+ import os
13
+ import argparse
14
+ import glob
15
+ import json
16
+ import plyfile
17
+ import numpy as np
18
+ import pandas as pd
19
+ import multiprocessing as mp
20
+ from concurrent.futures import ProcessPoolExecutor
21
+ from itertools import repeat
22
+
23
+ # Load external constants
24
+ from meta_data.scannet200_constants import VALID_CLASS_IDS_200, VALID_CLASS_IDS_20
25
+
26
+ CLOUD_FILE_PFIX = "_vh_clean_2"
27
+ SEGMENTS_FILE_PFIX = ".0.010000.segs.json"
28
+ AGGREGATIONS_FILE_PFIX = ".aggregation.json"
29
+ CLASS_IDS200 = VALID_CLASS_IDS_200
30
+ CLASS_IDS20 = VALID_CLASS_IDS_20
31
+ IGNORE_INDEX = -1
32
+
33
+
34
+ def read_plymesh(filepath):
35
+ """Read ply file and return it as numpy array. Returns None if emtpy."""
36
+ with open(filepath, "rb") as f:
37
+ plydata = plyfile.PlyData.read(f)
38
+ if plydata.elements:
39
+ vertices = pd.DataFrame(plydata["vertex"].data).values
40
+ faces = np.stack(plydata["face"].data["vertex_indices"], axis=0)
41
+ return vertices, faces
42
+
43
+
44
+ # Map the raw category id to the point cloud
45
+ def point_indices_from_group(seg_indices, group, labels_pd):
46
+ group_segments = np.array(group["segments"])
47
+ label = group["label"]
48
+
49
+ # Map the category name to id
50
+ label_id20 = labels_pd[labels_pd["raw_category"] == label]["nyu40id"]
51
+ label_id20 = int(label_id20.iloc[0]) if len(label_id20) > 0 else 0
52
+ label_id200 = labels_pd[labels_pd["raw_category"] == label]["id"]
53
+ label_id200 = int(label_id200.iloc[0]) if len(label_id200) > 0 else 0
54
+
55
+ # Only store for the valid categories
56
+ if label_id20 in CLASS_IDS20:
57
+ label_id20 = CLASS_IDS20.index(label_id20)
58
+ else:
59
+ label_id20 = IGNORE_INDEX
60
+
61
+ if label_id200 in CLASS_IDS200:
62
+ label_id200 = CLASS_IDS200.index(label_id200)
63
+ else:
64
+ label_id200 = IGNORE_INDEX
65
+
66
+ # get points, where segment indices (points labelled with segment ids) are in the group segment list
67
+ point_idx = np.where(np.isin(seg_indices, group_segments))[0]
68
+ return point_idx, label_id20, label_id200
69
+
70
+
71
+ def face_normal(vertex, face):
72
+ v01 = vertex[face[:, 1]] - vertex[face[:, 0]]
73
+ v02 = vertex[face[:, 2]] - vertex[face[:, 0]]
74
+ vec = np.cross(v01, v02)
75
+ length = np.sqrt(np.sum(vec**2, axis=1, keepdims=True)) + 1.0e-8
76
+ nf = vec / length
77
+ area = length * 0.5
78
+ return nf, area
79
+
80
+
81
+ def vertex_normal(vertex, face):
82
+ nf, area = face_normal(vertex, face)
83
+ nf = nf * area
84
+
85
+ nv = np.zeros_like(vertex)
86
+ for i in range(face.shape[0]):
87
+ nv[face[i]] += nf[i]
88
+
89
+ length = np.sqrt(np.sum(nv**2, axis=1, keepdims=True)) + 1.0e-8
90
+ nv = nv / length
91
+ return nv
92
+
93
+
94
+ def handle_process(
95
+ scene_path, output_path, labels_pd, train_scenes, val_scenes, parse_normals=True
96
+ ):
97
+ scene_id = os.path.basename(scene_path)
98
+ mesh_path = os.path.join(scene_path, f"{scene_id}{CLOUD_FILE_PFIX}.ply")
99
+ segments_file = os.path.join(
100
+ scene_path, f"{scene_id}{CLOUD_FILE_PFIX}{SEGMENTS_FILE_PFIX}"
101
+ )
102
+ aggregations_file = os.path.join(scene_path, f"{scene_id}{AGGREGATIONS_FILE_PFIX}")
103
+ info_file = os.path.join(scene_path, f"{scene_id}.txt")
104
+
105
+ if scene_id in train_scenes:
106
+ output_path = os.path.join(output_path, "train", f"{scene_id}")
107
+ split_name = "train"
108
+ elif scene_id in val_scenes:
109
+ output_path = os.path.join(output_path, "val", f"{scene_id}")
110
+ split_name = "val"
111
+ else:
112
+ output_path = os.path.join(output_path, "test", f"{scene_id}")
113
+ split_name = "test"
114
+
115
+ print(f"Processing: {scene_id} in {split_name}")
116
+
117
+ vertices, faces = read_plymesh(mesh_path)
118
+ coords = vertices[:, :3]
119
+ colors = vertices[:, 3:6]
120
+ save_dict = dict(
121
+ coord=coords.astype(np.float32),
122
+ color=colors.astype(np.uint8),
123
+ )
124
+
125
+ # # Rotating the mesh to axis aligned
126
+ # info_dict = {}
127
+ # with open(info_file) as f:
128
+ # for line in f:
129
+ # (key, val) = line.split(" = ")
130
+ # info_dict[key] = np.fromstring(val, sep=' ')
131
+ #
132
+ # if 'axisAlignment' not in info_dict:
133
+ # rot_matrix = np.identity(4)
134
+ # else:
135
+ # rot_matrix = info_dict['axisAlignment'].reshape(4, 4)
136
+ # r_coords = coords.transpose()
137
+ # r_coords = np.append(r_coords, np.ones((1, r_coords.shape[1])), axis=0)
138
+ # r_coords = np.dot(rot_matrix, r_coords)
139
+ # coords = r_coords
140
+
141
+ # Parse Normals
142
+ if parse_normals:
143
+ save_dict["normal"] = vertex_normal(coords, faces).astype(np.float32)
144
+
145
+ # Load segments file
146
+ if split_name != "test":
147
+ with open(segments_file) as f:
148
+ segments = json.load(f)
149
+ seg_indices = np.array(segments["segIndices"])
150
+
151
+ # Load Aggregations file
152
+ with open(aggregations_file) as f:
153
+ aggregation = json.load(f)
154
+ seg_groups = np.array(aggregation["segGroups"])
155
+
156
+ # Generate new labels
157
+ semantic_gt20 = np.ones((vertices.shape[0]), dtype=np.int16) * IGNORE_INDEX
158
+ semantic_gt200 = np.ones((vertices.shape[0]), dtype=np.int16) * IGNORE_INDEX
159
+ instance_ids = np.ones((vertices.shape[0]), dtype=np.int16) * IGNORE_INDEX
160
+ for group in seg_groups:
161
+ point_idx, label_id20, label_id200 = point_indices_from_group(
162
+ seg_indices, group, labels_pd
163
+ )
164
+
165
+ semantic_gt20[point_idx] = label_id20
166
+ semantic_gt200[point_idx] = label_id200
167
+ instance_ids[point_idx] = group["id"]
168
+
169
+ semantic_gt20 = semantic_gt20.astype(int)
170
+ semantic_gt200 = semantic_gt200.astype(int)
171
+ instance_ids = instance_ids.astype(int)
172
+
173
+ save_dict["segment20"] = semantic_gt20
174
+ save_dict["segment200"] = semantic_gt200
175
+ save_dict["instance"] = instance_ids
176
+
177
+ # Concatenate with original cloud
178
+ processed_vertices = np.hstack((semantic_gt200, instance_ids))
179
+
180
+ if np.any(np.isnan(processed_vertices)) or not np.all(
181
+ np.isfinite(processed_vertices)
182
+ ):
183
+ raise ValueError(f"Find NaN in Scene: {scene_id}")
184
+
185
+ # Save processed data
186
+ os.makedirs(output_path, exist_ok=True)
187
+ for key in save_dict.keys():
188
+ np.save(os.path.join(output_path, f"{key}.npy"), save_dict[key])
189
+
190
+
191
+ if __name__ == "__main__":
192
+ parser = argparse.ArgumentParser()
193
+ parser.add_argument(
194
+ "--dataset_root",
195
+ required=True,
196
+ help="Path to the ScanNet dataset containing scene folders",
197
+ )
198
+ parser.add_argument(
199
+ "--output_root",
200
+ required=True,
201
+ help="Output path where train/val folders will be located",
202
+ )
203
+ parser.add_argument(
204
+ "--parse_normals", default=True, type=bool, help="Whether parse point normals"
205
+ )
206
+ parser.add_argument(
207
+ "--num_workers",
208
+ default=mp.cpu_count(),
209
+ type=int,
210
+ help="Num workers for preprocessing.",
211
+ )
212
+ config = parser.parse_args()
213
+
214
+ # Load label map
215
+ labels_pd = pd.read_csv(
216
+ "pointcept/datasets/preprocessing/scannet/meta_data/scannetv2-labels.combined.tsv",
217
+ sep="\t",
218
+ header=0,
219
+ )
220
+
221
+ # Load train/val splits
222
+ with open(
223
+ "pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_train.txt"
224
+ ) as train_file:
225
+ train_scenes = train_file.read().splitlines()
226
+ with open(
227
+ "pointcept/datasets/preprocessing/scannet/meta_data/scannetv2_val.txt"
228
+ ) as val_file:
229
+ val_scenes = val_file.read().splitlines()
230
+
231
+ # Create output directories
232
+ train_output_dir = os.path.join(config.output_root, "train")
233
+ os.makedirs(train_output_dir, exist_ok=True)
234
+ val_output_dir = os.path.join(config.output_root, "val")
235
+ os.makedirs(val_output_dir, exist_ok=True)
236
+ test_output_dir = os.path.join(config.output_root, "test")
237
+ os.makedirs(test_output_dir, exist_ok=True)
238
+
239
+ # Load scene paths
240
+ scene_paths = sorted(glob.glob(config.dataset_root + "/scans*/scene*"))
241
+
242
+ # Preprocess data.
243
+ print("Processing scenes...")
244
+ pool = ProcessPoolExecutor(max_workers=config.num_workers)
245
+ _ = list(
246
+ pool.map(
247
+ handle_process,
248
+ scene_paths,
249
+ repeat(config.output_root),
250
+ repeat(labels_pd),
251
+ repeat(train_scenes),
252
+ repeat(val_scenes),
253
+ repeat(config.parse_normals),
254
+ )
255
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/SensorData.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, struct
2
+ import numpy as np
3
+ import zlib
4
+ import imageio
5
+ import cv2
6
+
7
+ COMPRESSION_TYPE_COLOR = {-1: "unknown", 0: "raw", 1: "png", 2: "jpeg"}
8
+ COMPRESSION_TYPE_DEPTH = {
9
+ -1: "unknown",
10
+ 0: "raw_ushort",
11
+ 1: "zlib_ushort",
12
+ 2: "occi_ushort",
13
+ }
14
+
15
+
16
+ class RGBDFrame:
17
+ def load(self, file_handle):
18
+ self.camera_to_world = np.asarray(
19
+ struct.unpack("f" * 16, file_handle.read(16 * 4)), dtype=np.float32
20
+ ).reshape(4, 4)
21
+ self.timestamp_color = struct.unpack("Q", file_handle.read(8))[0]
22
+ self.timestamp_depth = struct.unpack("Q", file_handle.read(8))[0]
23
+ self.color_size_bytes = struct.unpack("Q", file_handle.read(8))[0]
24
+ self.depth_size_bytes = struct.unpack("Q", file_handle.read(8))[0]
25
+ self.color_data = b"".join(
26
+ struct.unpack(
27
+ "c" * self.color_size_bytes, file_handle.read(self.color_size_bytes)
28
+ )
29
+ )
30
+ self.depth_data = b"".join(
31
+ struct.unpack(
32
+ "c" * self.depth_size_bytes, file_handle.read(self.depth_size_bytes)
33
+ )
34
+ )
35
+
36
+ def decompress_depth(self, compression_type):
37
+ if compression_type == "zlib_ushort":
38
+ return self.decompress_depth_zlib()
39
+ else:
40
+ raise
41
+
42
+ def decompress_depth_zlib(self):
43
+ return zlib.decompress(self.depth_data)
44
+
45
+ def decompress_color(self, compression_type):
46
+ if compression_type == "jpeg":
47
+ return self.decompress_color_jpeg()
48
+ else:
49
+ raise
50
+
51
+ def decompress_color_jpeg(self):
52
+ return imageio.imread(self.color_data)
53
+
54
+
55
+ class SensorData:
56
+ def __init__(self, filename):
57
+ self.version = 4
58
+ self.load(filename)
59
+
60
+ def load(self, filename):
61
+ with open(filename, "rb") as f:
62
+ version = struct.unpack("I", f.read(4))[0]
63
+ assert self.version == version
64
+ strlen = struct.unpack("Q", f.read(8))[0]
65
+ self.sensor_name = b"".join(struct.unpack("c" * strlen, f.read(strlen)))
66
+ self.intrinsic_color = np.asarray(
67
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
68
+ ).reshape(4, 4)
69
+ self.extrinsic_color = np.asarray(
70
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
71
+ ).reshape(4, 4)
72
+ self.intrinsic_depth = np.asarray(
73
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
74
+ ).reshape(4, 4)
75
+ self.extrinsic_depth = np.asarray(
76
+ struct.unpack("f" * 16, f.read(16 * 4)), dtype=np.float32
77
+ ).reshape(4, 4)
78
+ self.color_compression_type = COMPRESSION_TYPE_COLOR[
79
+ struct.unpack("i", f.read(4))[0]
80
+ ]
81
+ self.depth_compression_type = COMPRESSION_TYPE_DEPTH[
82
+ struct.unpack("i", f.read(4))[0]
83
+ ]
84
+ self.color_width = struct.unpack("I", f.read(4))[0]
85
+ self.color_height = struct.unpack("I", f.read(4))[0]
86
+ self.depth_width = struct.unpack("I", f.read(4))[0]
87
+ self.depth_height = struct.unpack("I", f.read(4))[0]
88
+ self.depth_shift = struct.unpack("f", f.read(4))[0]
89
+ num_frames = struct.unpack("Q", f.read(8))[0]
90
+ self.frames = []
91
+ for i in range(num_frames):
92
+ frame = RGBDFrame()
93
+ frame.load(f)
94
+ self.frames.append(frame)
95
+
96
+ def export_depth_images(self, output_path, image_size=None, frame_skip=1):
97
+ if not os.path.exists(output_path):
98
+ os.makedirs(output_path)
99
+ print(
100
+ "exporting", len(self.frames) // frame_skip, " depth frames to", output_path
101
+ )
102
+ for f in range(0, len(self.frames), frame_skip):
103
+ if os.path.exists((os.path.join(output_path, str(f) + ".png"))):
104
+ continue
105
+ if f % 100 == 0:
106
+ print(
107
+ "exporting",
108
+ f,
109
+ "th depth frames to",
110
+ os.path.join(output_path, str(f) + ".png"),
111
+ )
112
+
113
+ depth_data = self.frames[f].decompress_depth(self.depth_compression_type)
114
+ depth = np.fromstring(depth_data, dtype=np.uint16).reshape(
115
+ self.depth_height, self.depth_width
116
+ )
117
+ if image_size is not None:
118
+ depth = cv2.resize(
119
+ depth,
120
+ (image_size[1], image_size[0]),
121
+ interpolation=cv2.INTER_NEAREST,
122
+ )
123
+ imageio.imwrite(os.path.join(output_path, str(f) + ".png"), depth)
124
+
125
+ def export_color_images(self, output_path, image_size=None, frame_skip=1):
126
+ if not os.path.exists(output_path):
127
+ os.makedirs(output_path)
128
+ print(
129
+ "exporting", len(self.frames) // frame_skip, "color frames to", output_path
130
+ )
131
+ for f in range(0, len(self.frames), frame_skip):
132
+ if os.path.exists((os.path.join(output_path, str(f) + ".png"))):
133
+ continue
134
+ if f % 100 == 0:
135
+ print(
136
+ "exporting",
137
+ f,
138
+ "th color frames to",
139
+ os.path.join(output_path, str(f) + ".png"),
140
+ )
141
+ color = self.frames[f].decompress_color(self.color_compression_type)
142
+ if image_size is not None:
143
+ color = cv2.resize(
144
+ color,
145
+ (image_size[1], image_size[0]),
146
+ interpolation=cv2.INTER_NEAREST,
147
+ )
148
+ # imageio.imwrite(os.path.join(output_path, str(f) + '.jpg'), color)
149
+ imageio.imwrite(os.path.join(output_path, str(f) + ".png"), color)
150
+
151
+ def save_mat_to_file(self, matrix, filename):
152
+ with open(filename, "w") as f:
153
+ for line in matrix:
154
+ np.savetxt(f, line[np.newaxis], fmt="%f")
155
+
156
+ def export_poses(self, output_path, frame_skip=1):
157
+ if not os.path.exists(output_path):
158
+ os.makedirs(output_path)
159
+ print(
160
+ "exporting", len(self.frames) // frame_skip, "camera poses to", output_path
161
+ )
162
+ for f in range(0, len(self.frames), frame_skip):
163
+ self.save_mat_to_file(
164
+ self.frames[f].camera_to_world,
165
+ os.path.join(output_path, str(f) + ".txt"),
166
+ )
167
+
168
+ def export_intrinsics(self, output_path):
169
+ if not os.path.exists(output_path):
170
+ os.makedirs(output_path)
171
+ print("exporting camera intrinsics to", output_path)
172
+ self.save_mat_to_file(
173
+ self.intrinsic_color, os.path.join(output_path, "intrinsic_color.txt")
174
+ )
175
+ self.save_mat_to_file(
176
+ self.extrinsic_color, os.path.join(output_path, "extrinsic_color.txt")
177
+ )
178
+ self.save_mat_to_file(
179
+ self.intrinsic_depth, os.path.join(output_path, "intrinsic_depth.txt")
180
+ )
181
+ self.save_mat_to_file(
182
+ self.extrinsic_depth, os.path.join(output_path, "extrinsic_depth.txt")
183
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/compute_full_overlapping.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+ import copy
7
+ import torch
8
+ import numpy as np
9
+ import math
10
+ import glob, os
11
+ import argparse
12
+ import open3d as o3d
13
+
14
+
15
+ def make_open3d_point_cloud(xyz, color=None, voxel_size=None):
16
+ if np.isnan(xyz).any():
17
+ return None
18
+
19
+ xyz = xyz[:, :3]
20
+ pcd = o3d.geometry.PointCloud()
21
+ pcd.points = o3d.utility.Vector3dVector(xyz)
22
+ if color is not None:
23
+ pcd.colors = o3d.utility.Vector3dVector(color)
24
+ if voxel_size is not None:
25
+ pcd = pcd.voxel_down_sample(voxel_size)
26
+
27
+ return pcd
28
+
29
+
30
+ def compute_overlap_ratio(pcd0, pcd1, voxel_size):
31
+ pcd0_down = pcd0.voxel_down_sample(voxel_size)
32
+ pcd1_down = pcd1.voxel_down_sample(voxel_size)
33
+ matching01 = get_matching_indices(pcd0_down, pcd1_down, voxel_size * 1.5, 1)
34
+ matching10 = get_matching_indices(pcd1_down, pcd0_down, voxel_size * 1.5, 1)
35
+ overlap0 = float(len(matching01)) / float(len(pcd0_down.points))
36
+ overlap1 = float(len(matching10)) / float(len(pcd1_down.points))
37
+ return max(overlap0, overlap1)
38
+
39
+
40
+ def get_matching_indices(source, pcd_tree, search_voxel_size, K=None):
41
+ match_inds = []
42
+ for i, point in enumerate(source.points):
43
+ [_, idx, _] = pcd_tree.search_radius_vector_3d(point, search_voxel_size)
44
+ if K is not None:
45
+ idx = idx[:K]
46
+ for j in idx:
47
+ match_inds.append((i, j))
48
+ return match_inds
49
+
50
+
51
+ def compute_full_overlapping(data_root, scene_id, voxel_size=0.05):
52
+ _points = [
53
+ (
54
+ pcd_name,
55
+ make_open3d_point_cloud(
56
+ torch.load(pcd_name)["coord"], voxel_size=voxel_size
57
+ ),
58
+ )
59
+ for pcd_name in glob.glob(os.path.join(data_root, scene_id, "pcd", "*.pth"))
60
+ ]
61
+ points = [(pcd_name, pcd) for (pcd_name, pcd) in _points if pcd is not None]
62
+ print(
63
+ "load {} point clouds ({} invalid has been filtered), computing matching/overlapping".format(
64
+ len(points), len(_points) - len(points)
65
+ )
66
+ )
67
+
68
+ matching_matrix = np.zeros((len(points), len(points)))
69
+ for i, (pcd0_name, pcd0) in enumerate(points):
70
+ print("matching to...{}".format(pcd0_name))
71
+ pcd0_tree = o3d.geometry.KDTreeFlann(copy.deepcopy(pcd0))
72
+ for j, (pcd1_name, pcd1) in enumerate(points):
73
+ if i == j:
74
+ continue
75
+ matching_matrix[i, j] = float(
76
+ len(get_matching_indices(pcd1, pcd0_tree, 1.5 * voxel_size, 1))
77
+ ) / float(len(pcd1.points))
78
+
79
+ # write to file
80
+ with open(os.path.join(data_root, scene_id, "pcd", "overlap.txt"), "w") as f:
81
+ for i, (pcd0_name, pcd0) in enumerate(points):
82
+ for j, (pcd1_name, pcd1) in enumerate(points):
83
+ if i < j:
84
+ overlap = max(matching_matrix[i, j], matching_matrix[j, i])
85
+ f.write(
86
+ "{} {} {}\n".format(
87
+ pcd0_name.replace(data_root, ""),
88
+ pcd1_name.replace(data_root, ""),
89
+ overlap,
90
+ )
91
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/generage_list.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+
7
+ import argparse
8
+ import glob, os, sys
9
+
10
+ from SensorData import SensorData
11
+
12
+ # params
13
+ parser = argparse.ArgumentParser()
14
+ # data paths
15
+ parser.add_argument("--target_dir", required=True, help="path to the target dir")
16
+
17
+ opt = parser.parse_args()
18
+ print(opt)
19
+
20
+
21
+ def main():
22
+ overlaps = glob.glob(os.path.join(opt.target_dir, "*/pcd/overlap.txt"))
23
+ with open(os.path.join(opt.target_dir, "overlap30.txt"), "w") as f:
24
+ for fo in overlaps:
25
+ for line in open(fo):
26
+ pcd0, pcd1, op = line.strip().split()
27
+ if float(op) >= 0.3:
28
+ print("{} {} {}".format(pcd0, pcd1, op), file=f)
29
+ print("done")
30
+
31
+
32
+ if __name__ == "__main__":
33
+ main()
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/plyfile.py ADDED
@@ -0,0 +1,894 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2014 Darsh Ranjan
2
+ #
3
+ # This file is part of python-plyfile.
4
+ #
5
+ # python-plyfile is free software: you can redistribute it and/or
6
+ # modify it under the terms of the GNU General Public License as
7
+ # published by the Free Software Foundation, either version 3 of the
8
+ # License, or (at your option) any later version.
9
+ #
10
+ # python-plyfile is distributed in the hope that it will be useful,
11
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
+ # General Public License for more details.
14
+ #
15
+ # You should have received a copy of the GNU General Public License
16
+ # along with python-plyfile. If not, see
17
+ # <http://www.gnu.org/licenses/>.
18
+
19
+ from itertools import islice as _islice
20
+
21
+ import numpy as _np
22
+ from sys import byteorder as _byteorder
23
+
24
+
25
+ try:
26
+ _range = xrange
27
+ except NameError:
28
+ _range = range
29
+
30
+
31
+ # Many-many relation
32
+ _data_type_relation = [
33
+ ("int8", "i1"),
34
+ ("char", "i1"),
35
+ ("uint8", "u1"),
36
+ ("uchar", "b1"),
37
+ ("uchar", "u1"),
38
+ ("int16", "i2"),
39
+ ("short", "i2"),
40
+ ("uint16", "u2"),
41
+ ("ushort", "u2"),
42
+ ("int32", "i4"),
43
+ ("int", "i4"),
44
+ ("uint32", "u4"),
45
+ ("uint", "u4"),
46
+ ("float32", "f4"),
47
+ ("float", "f4"),
48
+ ("float64", "f8"),
49
+ ("double", "f8"),
50
+ ]
51
+
52
+ _data_types = dict(_data_type_relation)
53
+ _data_type_reverse = dict((b, a) for (a, b) in _data_type_relation)
54
+
55
+ _types_list = []
56
+ _types_set = set()
57
+ for _a, _b in _data_type_relation:
58
+ if _a not in _types_set:
59
+ _types_list.append(_a)
60
+ _types_set.add(_a)
61
+ if _b not in _types_set:
62
+ _types_list.append(_b)
63
+ _types_set.add(_b)
64
+
65
+
66
+ _byte_order_map = {"ascii": "=", "binary_little_endian": "<", "binary_big_endian": ">"}
67
+
68
+ _byte_order_reverse = {"<": "binary_little_endian", ">": "binary_big_endian"}
69
+
70
+ _native_byte_order = {"little": "<", "big": ">"}[_byteorder]
71
+
72
+
73
+ def _lookup_type(type_str):
74
+ if type_str not in _data_type_reverse:
75
+ try:
76
+ type_str = _data_types[type_str]
77
+ except KeyError:
78
+ raise ValueError("field type %r not in %r" % (type_str, _types_list))
79
+
80
+ return _data_type_reverse[type_str]
81
+
82
+
83
+ def _split_line(line, n):
84
+ fields = line.split(None, n)
85
+ if len(fields) == n:
86
+ fields.append("")
87
+
88
+ assert len(fields) == n + 1
89
+
90
+ return fields
91
+
92
+
93
+ def make2d(array, cols=None, dtype=None):
94
+ """
95
+ Make a 2D array from an array of arrays. The `cols' and `dtype'
96
+ arguments can be omitted if the array is not empty.
97
+
98
+ """
99
+ if (cols is None or dtype is None) and not len(array):
100
+ raise RuntimeError("cols and dtype must be specified for empty " "array")
101
+
102
+ if cols is None:
103
+ cols = len(array[0])
104
+
105
+ if dtype is None:
106
+ dtype = array[0].dtype
107
+
108
+ return _np.fromiter(array, [("_", dtype, (cols,))], count=len(array))["_"]
109
+
110
+
111
+ class PlyParseError(Exception):
112
+ """
113
+ Raised when a PLY file cannot be parsed.
114
+
115
+ The attributes `element', `row', `property', and `message' give
116
+ additional information.
117
+
118
+ """
119
+
120
+ def __init__(self, message, element=None, row=None, prop=None):
121
+ self.message = message
122
+ self.element = element
123
+ self.row = row
124
+ self.prop = prop
125
+
126
+ s = ""
127
+ if self.element:
128
+ s += "element %r: " % self.element.name
129
+ if self.row is not None:
130
+ s += "row %d: " % self.row
131
+ if self.prop:
132
+ s += "property %r: " % self.prop.name
133
+ s += self.message
134
+
135
+ Exception.__init__(self, s)
136
+
137
+ def __repr__(self):
138
+ return (
139
+ "PlyParseError(%r, element=%r, row=%r, prop=%r)" % self.message,
140
+ self.element,
141
+ self.row,
142
+ self.prop,
143
+ )
144
+
145
+
146
+ class PlyData(object):
147
+ """
148
+ PLY file header and data.
149
+
150
+ A PlyData instance is created in one of two ways: by the static
151
+ method PlyData.read (to read a PLY file), or directly from __init__
152
+ given a sequence of elements (which can then be written to a PLY
153
+ file).
154
+
155
+ """
156
+
157
+ def __init__(
158
+ self, elements=[], text=False, byte_order="=", comments=[], obj_info=[]
159
+ ):
160
+ """
161
+ elements: sequence of PlyElement instances.
162
+
163
+ text: whether the resulting PLY file will be text (True) or
164
+ binary (False).
165
+
166
+ byte_order: '<' for little-endian, '>' for big-endian, or '='
167
+ for native. This is only relevant if `text' is False.
168
+
169
+ comments: sequence of strings that will be placed in the header
170
+ between the 'ply' and 'format ...' lines.
171
+
172
+ obj_info: like comments, but will be placed in the header with
173
+ "obj_info ..." instead of "comment ...".
174
+
175
+ """
176
+ if byte_order == "=" and not text:
177
+ byte_order = _native_byte_order
178
+
179
+ self.byte_order = byte_order
180
+ self.text = text
181
+
182
+ self.comments = list(comments)
183
+ self.obj_info = list(obj_info)
184
+ self.elements = elements
185
+
186
+ def _get_elements(self):
187
+ return self._elements
188
+
189
+ def _set_elements(self, elements):
190
+ self._elements = tuple(elements)
191
+ self._index()
192
+
193
+ elements = property(_get_elements, _set_elements)
194
+
195
+ def _get_byte_order(self):
196
+ return self._byte_order
197
+
198
+ def _set_byte_order(self, byte_order):
199
+ if byte_order not in ["<", ">", "="]:
200
+ raise ValueError("byte order must be '<', '>', or '='")
201
+
202
+ self._byte_order = byte_order
203
+
204
+ byte_order = property(_get_byte_order, _set_byte_order)
205
+
206
+ def _index(self):
207
+ self._element_lookup = dict((elt.name, elt) for elt in self._elements)
208
+ if len(self._element_lookup) != len(self._elements):
209
+ raise ValueError("two elements with same name")
210
+
211
+ @staticmethod
212
+ def _parse_header(stream):
213
+ """
214
+ Parse a PLY header from a readable file-like stream.
215
+
216
+ """
217
+ lines = []
218
+ comments = {"comment": [], "obj_info": []}
219
+ while True:
220
+ line = stream.readline().decode("ascii").strip()
221
+ fields = _split_line(line, 1)
222
+
223
+ if fields[0] == "end_header":
224
+ break
225
+
226
+ elif fields[0] in comments.keys():
227
+ lines.append(fields)
228
+ else:
229
+ lines.append(line.split())
230
+
231
+ a = 0
232
+ if lines[a] != ["ply"]:
233
+ raise PlyParseError("expected 'ply'")
234
+
235
+ a += 1
236
+ while lines[a][0] in comments.keys():
237
+ comments[lines[a][0]].append(lines[a][1])
238
+ a += 1
239
+
240
+ if lines[a][0] != "format":
241
+ raise PlyParseError("expected 'format'")
242
+
243
+ if lines[a][2] != "1.0":
244
+ raise PlyParseError("expected version '1.0'")
245
+
246
+ if len(lines[a]) != 3:
247
+ raise PlyParseError("too many fields after 'format'")
248
+
249
+ fmt = lines[a][1]
250
+
251
+ if fmt not in _byte_order_map:
252
+ raise PlyParseError("don't understand format %r" % fmt)
253
+
254
+ byte_order = _byte_order_map[fmt]
255
+ text = fmt == "ascii"
256
+
257
+ a += 1
258
+ while a < len(lines) and lines[a][0] in comments.keys():
259
+ comments[lines[a][0]].append(lines[a][1])
260
+ a += 1
261
+
262
+ return PlyData(
263
+ PlyElement._parse_multi(lines[a:]),
264
+ text,
265
+ byte_order,
266
+ comments["comment"],
267
+ comments["obj_info"],
268
+ )
269
+
270
+ @staticmethod
271
+ def read(stream):
272
+ """
273
+ Read PLY data from a readable file-like object or filename.
274
+
275
+ """
276
+ (must_close, stream) = _open_stream(stream, "read")
277
+ try:
278
+ data = PlyData._parse_header(stream)
279
+ for elt in data:
280
+ elt._read(stream, data.text, data.byte_order)
281
+ finally:
282
+ if must_close:
283
+ stream.close()
284
+
285
+ return data
286
+
287
+ def write(self, stream):
288
+ """
289
+ Write PLY data to a writeable file-like object or filename.
290
+
291
+ """
292
+ (must_close, stream) = _open_stream(stream, "write")
293
+ try:
294
+ stream.write(self.header.encode("ascii"))
295
+ stream.write(b"\r\n")
296
+ for elt in self:
297
+ elt._write(stream, self.text, self.byte_order)
298
+ finally:
299
+ if must_close:
300
+ stream.close()
301
+
302
+ @property
303
+ def header(self):
304
+ """
305
+ Provide PLY-formatted metadata for the instance.
306
+
307
+ """
308
+ lines = ["ply"]
309
+
310
+ if self.text:
311
+ lines.append("format ascii 1.0")
312
+ else:
313
+ lines.append("format " + _byte_order_reverse[self.byte_order] + " 1.0")
314
+
315
+ # Some information is lost here, since all comments are placed
316
+ # between the 'format' line and the first element.
317
+ for c in self.comments:
318
+ lines.append("comment " + c)
319
+
320
+ for c in self.obj_info:
321
+ lines.append("obj_info " + c)
322
+
323
+ lines.extend(elt.header for elt in self.elements)
324
+ lines.append("end_header")
325
+ return "\r\n".join(lines)
326
+
327
+ def __iter__(self):
328
+ return iter(self.elements)
329
+
330
+ def __len__(self):
331
+ return len(self.elements)
332
+
333
+ def __contains__(self, name):
334
+ return name in self._element_lookup
335
+
336
+ def __getitem__(self, name):
337
+ return self._element_lookup[name]
338
+
339
+ def __str__(self):
340
+ return self.header
341
+
342
+ def __repr__(self):
343
+ return "PlyData(%r, text=%r, byte_order=%r, " "comments=%r, obj_info=%r)" % (
344
+ self.elements,
345
+ self.text,
346
+ self.byte_order,
347
+ self.comments,
348
+ self.obj_info,
349
+ )
350
+
351
+
352
+ def _open_stream(stream, read_or_write):
353
+ if hasattr(stream, read_or_write):
354
+ return (False, stream)
355
+ try:
356
+ return (True, open(stream, read_or_write[0] + "b"))
357
+ except TypeError:
358
+ raise RuntimeError("expected open file or filename")
359
+
360
+
361
+ class PlyElement(object):
362
+ """
363
+ PLY file element.
364
+
365
+ A client of this library doesn't normally need to instantiate this
366
+ directly, so the following is only for the sake of documenting the
367
+ internals.
368
+
369
+ Creating a PlyElement instance is generally done in one of two ways:
370
+ as a byproduct of PlyData.read (when reading a PLY file) and by
371
+ PlyElement.describe (before writing a PLY file).
372
+
373
+ """
374
+
375
+ def __init__(self, name, properties, count, comments=[]):
376
+ """
377
+ This is not part of the public interface. The preferred methods
378
+ of obtaining PlyElement instances are PlyData.read (to read from
379
+ a file) and PlyElement.describe (to construct from a numpy
380
+ array).
381
+
382
+ """
383
+ self._name = str(name)
384
+ self._check_name()
385
+ self._count = count
386
+
387
+ self._properties = tuple(properties)
388
+ self._index()
389
+
390
+ self.comments = list(comments)
391
+
392
+ self._have_list = any(isinstance(p, PlyListProperty) for p in self.properties)
393
+
394
+ @property
395
+ def count(self):
396
+ return self._count
397
+
398
+ def _get_data(self):
399
+ return self._data
400
+
401
+ def _set_data(self, data):
402
+ self._data = data
403
+ self._count = len(data)
404
+ self._check_sanity()
405
+
406
+ data = property(_get_data, _set_data)
407
+
408
+ def _check_sanity(self):
409
+ for prop in self.properties:
410
+ if prop.name not in self._data.dtype.fields:
411
+ raise ValueError("dangling property %r" % prop.name)
412
+
413
+ def _get_properties(self):
414
+ return self._properties
415
+
416
+ def _set_properties(self, properties):
417
+ self._properties = tuple(properties)
418
+ self._check_sanity()
419
+ self._index()
420
+
421
+ properties = property(_get_properties, _set_properties)
422
+
423
+ def _index(self):
424
+ self._property_lookup = dict((prop.name, prop) for prop in self._properties)
425
+ if len(self._property_lookup) != len(self._properties):
426
+ raise ValueError("two properties with same name")
427
+
428
+ def ply_property(self, name):
429
+ return self._property_lookup[name]
430
+
431
+ @property
432
+ def name(self):
433
+ return self._name
434
+
435
+ def _check_name(self):
436
+ if any(c.isspace() for c in self._name):
437
+ msg = "element name %r contains spaces" % self._name
438
+ raise ValueError(msg)
439
+
440
+ def dtype(self, byte_order="="):
441
+ """
442
+ Return the numpy dtype of the in-memory representation of the
443
+ data. (If there are no list properties, and the PLY format is
444
+ binary, then this also accurately describes the on-disk
445
+ representation of the element.)
446
+
447
+ """
448
+ return [(prop.name, prop.dtype(byte_order)) for prop in self.properties]
449
+
450
+ @staticmethod
451
+ def _parse_multi(header_lines):
452
+ """
453
+ Parse a list of PLY element definitions.
454
+
455
+ """
456
+ elements = []
457
+ while header_lines:
458
+ (elt, header_lines) = PlyElement._parse_one(header_lines)
459
+ elements.append(elt)
460
+
461
+ return elements
462
+
463
+ @staticmethod
464
+ def _parse_one(lines):
465
+ """
466
+ Consume one element definition. The unconsumed input is
467
+ returned along with a PlyElement instance.
468
+
469
+ """
470
+ a = 0
471
+ line = lines[a]
472
+
473
+ if line[0] != "element":
474
+ raise PlyParseError("expected 'element'")
475
+ if len(line) > 3:
476
+ raise PlyParseError("too many fields after 'element'")
477
+ if len(line) < 3:
478
+ raise PlyParseError("too few fields after 'element'")
479
+
480
+ (name, count) = (line[1], int(line[2]))
481
+
482
+ comments = []
483
+ properties = []
484
+ while True:
485
+ a += 1
486
+ if a >= len(lines):
487
+ break
488
+
489
+ if lines[a][0] == "comment":
490
+ comments.append(lines[a][1])
491
+ elif lines[a][0] == "property":
492
+ properties.append(PlyProperty._parse_one(lines[a]))
493
+ else:
494
+ break
495
+
496
+ return (PlyElement(name, properties, count, comments), lines[a:])
497
+
498
+ @staticmethod
499
+ def describe(data, name, len_types={}, val_types={}, comments=[]):
500
+ """
501
+ Construct a PlyElement from an array's metadata.
502
+
503
+ len_types and val_types can be given as mappings from list
504
+ property names to type strings (like 'u1', 'f4', etc., or
505
+ 'int8', 'float32', etc.). These can be used to define the length
506
+ and value types of list properties. List property lengths
507
+ always default to type 'u1' (8-bit unsigned integer), and value
508
+ types default to 'i4' (32-bit integer).
509
+
510
+ """
511
+ if not isinstance(data, _np.ndarray):
512
+ raise TypeError("only numpy arrays are supported")
513
+
514
+ if len(data.shape) != 1:
515
+ raise ValueError("only one-dimensional arrays are " "supported")
516
+
517
+ count = len(data)
518
+
519
+ properties = []
520
+ descr = data.dtype.descr
521
+
522
+ for t in descr:
523
+ if not isinstance(t[1], str):
524
+ raise ValueError("nested records not supported")
525
+
526
+ if not t[0]:
527
+ raise ValueError("field with empty name")
528
+
529
+ if len(t) != 2 or t[1][1] == "O":
530
+ # non-scalar field, which corresponds to a list
531
+ # property in PLY.
532
+
533
+ if t[1][1] == "O":
534
+ if len(t) != 2:
535
+ raise ValueError("non-scalar object fields not " "supported")
536
+
537
+ len_str = _data_type_reverse[len_types.get(t[0], "u1")]
538
+ if t[1][1] == "O":
539
+ val_type = val_types.get(t[0], "i4")
540
+ val_str = _lookup_type(val_type)
541
+ else:
542
+ val_str = _lookup_type(t[1][1:])
543
+
544
+ prop = PlyListProperty(t[0], len_str, val_str)
545
+ else:
546
+ val_str = _lookup_type(t[1][1:])
547
+ prop = PlyProperty(t[0], val_str)
548
+
549
+ properties.append(prop)
550
+
551
+ elt = PlyElement(name, properties, count, comments)
552
+ elt.data = data
553
+
554
+ return elt
555
+
556
+ def _read(self, stream, text, byte_order):
557
+ """
558
+ Read the actual data from a PLY file.
559
+
560
+ """
561
+ if text:
562
+ self._read_txt(stream)
563
+ else:
564
+ if self._have_list:
565
+ # There are list properties, so a simple load is
566
+ # impossible.
567
+ self._read_bin(stream, byte_order)
568
+ else:
569
+ # There are no list properties, so loading the data is
570
+ # much more straightforward.
571
+ self._data = _np.fromfile(stream, self.dtype(byte_order), self.count)
572
+
573
+ if len(self._data) < self.count:
574
+ k = len(self._data)
575
+ del self._data
576
+ raise PlyParseError("early end-of-file", self, k)
577
+
578
+ self._check_sanity()
579
+
580
+ def _write(self, stream, text, byte_order):
581
+ """
582
+ Write the data to a PLY file.
583
+
584
+ """
585
+ if text:
586
+ self._write_txt(stream)
587
+ else:
588
+ if self._have_list:
589
+ # There are list properties, so serialization is
590
+ # slightly complicated.
591
+ self._write_bin(stream, byte_order)
592
+ else:
593
+ # no list properties, so serialization is
594
+ # straightforward.
595
+ self.data.astype(self.dtype(byte_order), copy=False).tofile(stream)
596
+
597
+ def _read_txt(self, stream):
598
+ """
599
+ Load a PLY element from an ASCII-format PLY file. The element
600
+ may contain list properties.
601
+
602
+ """
603
+ self._data = _np.empty(self.count, dtype=self.dtype())
604
+
605
+ k = 0
606
+ for line in _islice(iter(stream.readline, b""), self.count):
607
+ fields = iter(line.strip().split())
608
+ for prop in self.properties:
609
+ try:
610
+ self._data[prop.name][k] = prop._from_fields(fields)
611
+ except StopIteration:
612
+ raise PlyParseError("early end-of-line", self, k, prop)
613
+ except ValueError:
614
+ raise PlyParseError("malformed input", self, k, prop)
615
+ try:
616
+ next(fields)
617
+ except StopIteration:
618
+ pass
619
+ else:
620
+ raise PlyParseError("expected end-of-line", self, k)
621
+ k += 1
622
+
623
+ if k < self.count:
624
+ del self._data
625
+ raise PlyParseError("early end-of-file", self, k)
626
+
627
+ def _write_txt(self, stream):
628
+ """
629
+ Save a PLY element to an ASCII-format PLY file. The element may
630
+ contain list properties.
631
+
632
+ """
633
+ for rec in self.data:
634
+ fields = []
635
+ for prop in self.properties:
636
+ fields.extend(prop._to_fields(rec[prop.name]))
637
+
638
+ _np.savetxt(stream, [fields], "%.18g", newline="\r\n")
639
+
640
+ def _read_bin(self, stream, byte_order):
641
+ """
642
+ Load a PLY element from a binary PLY file. The element may
643
+ contain list properties.
644
+
645
+ """
646
+ self._data = _np.empty(self.count, dtype=self.dtype(byte_order))
647
+
648
+ for k in _range(self.count):
649
+ for prop in self.properties:
650
+ try:
651
+ self._data[prop.name][k] = prop._read_bin(stream, byte_order)
652
+ except StopIteration:
653
+ raise PlyParseError("early end-of-file", self, k, prop)
654
+
655
+ def _write_bin(self, stream, byte_order):
656
+ """
657
+ Save a PLY element to a binary PLY file. The element may
658
+ contain list properties.
659
+
660
+ """
661
+ for rec in self.data:
662
+ for prop in self.properties:
663
+ prop._write_bin(rec[prop.name], stream, byte_order)
664
+
665
+ @property
666
+ def header(self):
667
+ """
668
+ Format this element's metadata as it would appear in a PLY
669
+ header.
670
+
671
+ """
672
+ lines = ["element %s %d" % (self.name, self.count)]
673
+
674
+ # Some information is lost here, since all comments are placed
675
+ # between the 'element' line and the first property definition.
676
+ for c in self.comments:
677
+ lines.append("comment " + c)
678
+
679
+ lines.extend(list(map(str, self.properties)))
680
+
681
+ return "\r\n".join(lines)
682
+
683
+ def __getitem__(self, key):
684
+ return self.data[key]
685
+
686
+ def __setitem__(self, key, value):
687
+ self.data[key] = value
688
+
689
+ def __str__(self):
690
+ return self.header
691
+
692
+ def __repr__(self):
693
+ return "PlyElement(%r, %r, count=%d, comments=%r)" % (
694
+ self.name,
695
+ self.properties,
696
+ self.count,
697
+ self.comments,
698
+ )
699
+
700
+
701
+ class PlyProperty(object):
702
+ """
703
+ PLY property description. This class is pure metadata; the data
704
+ itself is contained in PlyElement instances.
705
+
706
+ """
707
+
708
+ def __init__(self, name, val_dtype):
709
+ self._name = str(name)
710
+ self._check_name()
711
+ self.val_dtype = val_dtype
712
+
713
+ def _get_val_dtype(self):
714
+ return self._val_dtype
715
+
716
+ def _set_val_dtype(self, val_dtype):
717
+ self._val_dtype = _data_types[_lookup_type(val_dtype)]
718
+
719
+ val_dtype = property(_get_val_dtype, _set_val_dtype)
720
+
721
+ @property
722
+ def name(self):
723
+ return self._name
724
+
725
+ def _check_name(self):
726
+ if any(c.isspace() for c in self._name):
727
+ msg = "Error: property name %r contains spaces" % self._name
728
+ raise RuntimeError(msg)
729
+
730
+ @staticmethod
731
+ def _parse_one(line):
732
+ assert line[0] == "property"
733
+
734
+ if line[1] == "list":
735
+ if len(line) > 5:
736
+ raise PlyParseError("too many fields after " "'property list'")
737
+ if len(line) < 5:
738
+ raise PlyParseError("too few fields after " "'property list'")
739
+
740
+ return PlyListProperty(line[4], line[2], line[3])
741
+
742
+ else:
743
+ if len(line) > 3:
744
+ raise PlyParseError("too many fields after " "'property'")
745
+ if len(line) < 3:
746
+ raise PlyParseError("too few fields after " "'property'")
747
+
748
+ return PlyProperty(line[2], line[1])
749
+
750
+ def dtype(self, byte_order="="):
751
+ """
752
+ Return the numpy dtype description for this property (as a tuple
753
+ of strings).
754
+
755
+ """
756
+ return byte_order + self.val_dtype
757
+
758
+ def _from_fields(self, fields):
759
+ """
760
+ Parse from generator. Raise StopIteration if the property could
761
+ not be read.
762
+
763
+ """
764
+ return _np.dtype(self.dtype()).type(next(fields))
765
+
766
+ def _to_fields(self, data):
767
+ """
768
+ Return generator over one item.
769
+
770
+ """
771
+ yield _np.dtype(self.dtype()).type(data)
772
+
773
+ def _read_bin(self, stream, byte_order):
774
+ """
775
+ Read data from a binary stream. Raise StopIteration if the
776
+ property could not be read.
777
+
778
+ """
779
+ try:
780
+ return _np.fromfile(stream, self.dtype(byte_order), 1)[0]
781
+ except IndexError:
782
+ raise StopIteration
783
+
784
+ def _write_bin(self, data, stream, byte_order):
785
+ """
786
+ Write data to a binary stream.
787
+
788
+ """
789
+ _np.dtype(self.dtype(byte_order)).type(data).tofile(stream)
790
+
791
+ def __str__(self):
792
+ val_str = _data_type_reverse[self.val_dtype]
793
+ return "property %s %s" % (val_str, self.name)
794
+
795
+ def __repr__(self):
796
+ return "PlyProperty(%r, %r)" % (self.name, _lookup_type(self.val_dtype))
797
+
798
+
799
+ class PlyListProperty(PlyProperty):
800
+ """
801
+ PLY list property description.
802
+
803
+ """
804
+
805
+ def __init__(self, name, len_dtype, val_dtype):
806
+ PlyProperty.__init__(self, name, val_dtype)
807
+
808
+ self.len_dtype = len_dtype
809
+
810
+ def _get_len_dtype(self):
811
+ return self._len_dtype
812
+
813
+ def _set_len_dtype(self, len_dtype):
814
+ self._len_dtype = _data_types[_lookup_type(len_dtype)]
815
+
816
+ len_dtype = property(_get_len_dtype, _set_len_dtype)
817
+
818
+ def dtype(self, byte_order="="):
819
+ """
820
+ List properties always have a numpy dtype of "object".
821
+
822
+ """
823
+ return "|O"
824
+
825
+ def list_dtype(self, byte_order="="):
826
+ """
827
+ Return the pair (len_dtype, val_dtype) (both numpy-friendly
828
+ strings).
829
+
830
+ """
831
+ return (byte_order + self.len_dtype, byte_order + self.val_dtype)
832
+
833
+ def _from_fields(self, fields):
834
+ (len_t, val_t) = self.list_dtype()
835
+
836
+ n = int(_np.dtype(len_t).type(next(fields)))
837
+
838
+ data = _np.loadtxt(list(_islice(fields, n)), val_t, ndmin=1)
839
+ if len(data) < n:
840
+ raise StopIteration
841
+
842
+ return data
843
+
844
+ def _to_fields(self, data):
845
+ """
846
+ Return generator over the (numerical) PLY representation of the
847
+ list data (length followed by actual data).
848
+
849
+ """
850
+ (len_t, val_t) = self.list_dtype()
851
+
852
+ data = _np.asarray(data, dtype=val_t).ravel()
853
+
854
+ yield _np.dtype(len_t).type(data.size)
855
+ for x in data:
856
+ yield x
857
+
858
+ def _read_bin(self, stream, byte_order):
859
+ (len_t, val_t) = self.list_dtype(byte_order)
860
+
861
+ try:
862
+ n = _np.fromfile(stream, len_t, 1)[0]
863
+ except IndexError:
864
+ raise StopIteration
865
+
866
+ data = _np.fromfile(stream, val_t, n)
867
+ if len(data) < n:
868
+ raise StopIteration
869
+
870
+ return data
871
+
872
+ def _write_bin(self, data, stream, byte_order):
873
+ """
874
+ Write data to a binary stream.
875
+
876
+ """
877
+ (len_t, val_t) = self.list_dtype(byte_order)
878
+
879
+ data = _np.asarray(data, dtype=val_t).ravel()
880
+
881
+ _np.array(data.size, dtype=len_t).tofile(stream)
882
+ data.tofile(stream)
883
+
884
+ def __str__(self):
885
+ len_str = _data_type_reverse[self.len_dtype]
886
+ val_str = _data_type_reverse[self.val_dtype]
887
+ return "property list %s %s %s" % (len_str, val_str, self.name)
888
+
889
+ def __repr__(self):
890
+ return "PlyListProperty(%r, %r, %r)" % (
891
+ self.name,
892
+ _lookup_type(self.len_dtype),
893
+ _lookup_type(self.val_dtype),
894
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/point_cloud_extractor.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) Facebook, Inc. and its affiliates.
2
+ #
3
+ # This source code is licensed under the MIT license found in the
4
+ # LICENSE file in the root directory of this source tree.
5
+
6
+
7
+ import glob, os
8
+ import numpy as np
9
+ import cv2
10
+ import torch
11
+
12
+
13
+ def extractor(input_path, output_path):
14
+ if not os.path.exists(output_path):
15
+ os.mkdir(output_path)
16
+
17
+ # Load Depth Camera Intrinsic
18
+ depth_intrinsic = np.loadtxt(input_path + "/intrinsic/intrinsic_depth.txt")
19
+ print("Depth intrinsic: ")
20
+ print(depth_intrinsic)
21
+
22
+ # Compute Camrea Distance (just for demo, so you can choose the camera distance in frame sampling)
23
+ poses = sorted(
24
+ glob.glob(input_path + "/pose/*.txt"),
25
+ key=lambda a: int(os.path.basename(a).split(".")[0]),
26
+ )
27
+ depths = sorted(
28
+ glob.glob(input_path + "/depth/*.png"),
29
+ key=lambda a: int(os.path.basename(a).split(".")[0]),
30
+ )
31
+ colors = sorted(
32
+ glob.glob(input_path + "/color/*.png"),
33
+ key=lambda a: int(os.path.basename(a).split(".")[0]),
34
+ )
35
+
36
+ # # Get Aligned Point Clouds.
37
+ for ind, (pose, depth, color) in enumerate(zip(poses, depths, colors)):
38
+ name = os.path.basename(pose).split(".")[0]
39
+
40
+ if os.path.exists(output_path + "/{}.npz".format(name)):
41
+ continue
42
+
43
+ try:
44
+ print("=" * 50, ": {}".format(pose))
45
+ depth_img = cv2.imread(depth, -1) # read 16bit grayscale image
46
+ mask = depth_img != 0
47
+ color_image = cv2.imread(color)
48
+ color_image = cv2.resize(color_image, (640, 480))
49
+ color_image = np.reshape(color_image[mask], [-1, 3])
50
+ colors = np.zeros_like(color_image)
51
+ colors[:, 0] = color_image[:, 2]
52
+ colors[:, 1] = color_image[:, 1]
53
+ colors[:, 2] = color_image[:, 0]
54
+
55
+ pose = np.loadtxt(poses[ind])
56
+ print("Camera pose: ")
57
+ print(pose)
58
+
59
+ depth_shift = 1000.0
60
+ x, y = np.meshgrid(
61
+ np.linspace(0, depth_img.shape[1] - 1, depth_img.shape[1]),
62
+ np.linspace(0, depth_img.shape[0] - 1, depth_img.shape[0]),
63
+ )
64
+ uv_depth = np.zeros((depth_img.shape[0], depth_img.shape[1], 3))
65
+ uv_depth[:, :, 0] = x
66
+ uv_depth[:, :, 1] = y
67
+ uv_depth[:, :, 2] = depth_img / depth_shift
68
+ uv_depth = np.reshape(uv_depth, [-1, 3])
69
+ uv_depth = uv_depth[np.where(uv_depth[:, 2] != 0), :].squeeze()
70
+
71
+ intrinsic_inv = np.linalg.inv(depth_intrinsic)
72
+ fx = depth_intrinsic[0, 0]
73
+ fy = depth_intrinsic[1, 1]
74
+ cx = depth_intrinsic[0, 2]
75
+ cy = depth_intrinsic[1, 2]
76
+ bx = depth_intrinsic[0, 3]
77
+ by = depth_intrinsic[1, 3]
78
+ point_list = []
79
+ n = uv_depth.shape[0]
80
+ points = np.ones((n, 4))
81
+ X = (uv_depth[:, 0] - cx) * uv_depth[:, 2] / fx + bx
82
+ Y = (uv_depth[:, 1] - cy) * uv_depth[:, 2] / fy + by
83
+ points[:, 0] = X
84
+ points[:, 1] = Y
85
+ points[:, 2] = uv_depth[:, 2]
86
+ points_world = np.dot(points, np.transpose(pose))
87
+ print(points_world.shape)
88
+
89
+ pcd = dict(coord=points_world[:, :3], color=colors)
90
+ # pcd_save = np.zeros((points_world.shape[0], 7))
91
+ # pcd_save[:, :3] = points_world[:, :3]
92
+ # pcd_save[:, 3:6] = colors
93
+
94
+ # print('Saving npz file...')
95
+ # np.savez(output_path + '/{}.npz'.format(name), pcd=pcd_save)
96
+ torch.save(pcd, output_path + "/{}.pth".format(name))
97
+ except:
98
+ continue
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/preprocess.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import argparse
3
+ import glob
4
+ import multiprocessing as mp
5
+ from concurrent.futures import ProcessPoolExecutor
6
+ from itertools import repeat
7
+ from reader import reader
8
+ from point_cloud_extractor import extractor
9
+ from compute_full_overlapping import compute_full_overlapping
10
+
11
+
12
+ frame_skip = 25
13
+
14
+
15
+ def parse_sens(sens_dir, output_dir):
16
+ scene_id = os.path.basename(os.path.dirname(sens_dir))
17
+ print(f"Parsing sens data{sens_dir}")
18
+ reader(
19
+ sens_dir,
20
+ os.path.join(output_dir, scene_id),
21
+ frame_skip,
22
+ export_color_images=True,
23
+ export_depth_images=True,
24
+ export_poses=True,
25
+ export_intrinsics=True,
26
+ )
27
+ extractor(
28
+ os.path.join(output_dir, scene_id), os.path.join(output_dir, scene_id, "pcd")
29
+ )
30
+ compute_full_overlapping(output_dir, scene_id)
31
+
32
+
33
+ if __name__ == "__main__":
34
+ parser = argparse.ArgumentParser()
35
+ parser.add_argument(
36
+ "--dataset_root",
37
+ required=True,
38
+ help="Path to the ScanNet dataset containing scene folders",
39
+ )
40
+ parser.add_argument(
41
+ "--output_root",
42
+ required=True,
43
+ help="Output path where train/val folders will be located",
44
+ )
45
+ opt = parser.parse_args()
46
+ sens_list = sorted(glob.glob(os.path.join(opt.dataset_root, "scans/scene*/*.sens")))
47
+ # Preprocess data.
48
+ pool = ProcessPoolExecutor(max_workers=mp.cpu_count())
49
+ # pool = ProcessPoolExecutor(max_workers=1)
50
+ print("Processing scenes...")
51
+ _ = list(pool.map(parse_sens, sens_list, repeat(opt.output_root)))
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannet/scannet_pair/reader.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os, sys
3
+
4
+ from SensorData import SensorData
5
+
6
+
7
+ def reader(
8
+ filename,
9
+ output_path,
10
+ frame_skip,
11
+ export_color_images=False,
12
+ export_depth_images=False,
13
+ export_poses=False,
14
+ export_intrinsics=False,
15
+ ):
16
+ if not os.path.exists(output_path):
17
+ os.makedirs(output_path)
18
+
19
+ # load the data
20
+ print("loading %s..." % filename)
21
+ sd = SensorData(filename)
22
+ if export_depth_images:
23
+ sd.export_depth_images(
24
+ os.path.join(output_path, "depth"), frame_skip=frame_skip
25
+ )
26
+ if export_color_images:
27
+ sd.export_color_images(
28
+ os.path.join(output_path, "color"), frame_skip=frame_skip
29
+ )
30
+ if export_poses:
31
+ sd.export_poses(os.path.join(output_path, "pose"), frame_skip=frame_skip)
32
+ if export_intrinsics:
33
+ sd.export_intrinsics(os.path.join(output_path, "intrinsic"))
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/scannetpp/preprocess_scannetpp.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for ScanNet++
3
+ modified from official preprocess code.
4
+
5
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
6
+ Please cite our work if the code is helpful to you.
7
+ """
8
+
9
+ import argparse
10
+ import json
11
+ import numpy as np
12
+ import pandas as pd
13
+ import open3d as o3d
14
+ import multiprocessing as mp
15
+ from collections import OrderedDict
16
+ from concurrent.futures import ProcessPoolExecutor
17
+ from itertools import repeat
18
+ from pathlib import Path
19
+
20
+
21
+ def parse_scene(
22
+ name,
23
+ split,
24
+ dataset_root,
25
+ output_root,
26
+ label_mapping,
27
+ class2idx,
28
+ ignore_index=-1,
29
+ ):
30
+ print(f"Parsing scene {name} in {split} split")
31
+ dataset_root = Path(dataset_root)
32
+ output_root = Path(output_root)
33
+ scene_path = dataset_root / "data" / name / "scans"
34
+ mesh_path = scene_path / "mesh_aligned_0.05.ply"
35
+ segs_path = scene_path / "segments.json"
36
+ anno_path = scene_path / "segments_anno.json"
37
+
38
+ # load mesh vertices and colors
39
+ mesh = o3d.io.read_triangle_mesh(str(mesh_path))
40
+
41
+ # extract mesh information
42
+ mesh.compute_vertex_normals(normalized=True)
43
+ coord = np.array(mesh.vertices).astype(np.float32)
44
+ color = (np.array(mesh.vertex_colors) * 255).astype(np.uint8)
45
+ normal = np.array(mesh.vertex_normals).astype(np.float32)
46
+
47
+ save_path = output_root / split / name
48
+ save_path.mkdir(parents=True, exist_ok=True)
49
+ np.save(save_path / "coord.npy", coord)
50
+ np.save(save_path / "color.npy", color)
51
+ np.save(save_path / "normal.npy", normal)
52
+
53
+ if split == "test":
54
+ return
55
+
56
+ # get label on vertices
57
+ # load segments = vertices per segment ID
58
+ with open(segs_path) as f:
59
+ segments = json.load(f)
60
+ # load anno = (instance, groups of segments)
61
+ with open(anno_path) as f:
62
+ anno = json.load(f)
63
+ seg_indices = np.array(segments["segIndices"], dtype=np.uint32)
64
+ num_vertices = len(seg_indices)
65
+ assert num_vertices == len(coord)
66
+ semantic_gt = np.ones((num_vertices, 3), dtype=np.int16) * ignore_index
67
+ instance_gt = np.ones((num_vertices, 3), dtype=np.int16) * ignore_index
68
+
69
+ # number of labels are used per vertex. initially 0
70
+ # increment each time a new label is added
71
+ instance_size = np.ones((num_vertices, 3), dtype=np.int16) * np.inf
72
+
73
+ # keep track of the size of the instance (#vertices) assigned to each vertex
74
+ # later, keep the label of the smallest instance for major label of vertices
75
+ # store inf initially so that we can pick the smallest instance
76
+ labels_used = np.zeros(num_vertices, dtype=np.int16)
77
+
78
+ for idx, instance in enumerate(anno["segGroups"]):
79
+ label = instance["label"]
80
+ instance["label_orig"] = label
81
+ # remap label
82
+ instance["label"] = label_mapping.get(label, None)
83
+ instance["label_index"] = class2idx.get(label, ignore_index)
84
+
85
+ if instance["label_index"] == ignore_index:
86
+ continue
87
+ # get all the vertices with segment index in this instance
88
+ # and max number of labels not yet applied
89
+ mask = np.isin(seg_indices, instance["segments"]) & (labels_used < 3)
90
+ size = mask.sum()
91
+ if size == 0:
92
+ continue
93
+
94
+ # get the position to add the label - 0, 1, 2
95
+ label_position = labels_used[mask]
96
+ semantic_gt[mask, label_position] = instance["label_index"]
97
+ # store all valid instance (include ignored instance)
98
+ instance_gt[mask, label_position] = instance["objectId"]
99
+ instance_size[mask, label_position] = size
100
+ labels_used[mask] += 1
101
+
102
+ # major label is the label of smallest instance for each vertex
103
+ # use major label for single class segmentation
104
+ # shift major label to the first column
105
+ mask = labels_used > 1
106
+ if mask.sum() > 0:
107
+ major_label_position = np.argmin(instance_size[mask], axis=1)
108
+
109
+ major_semantic_label = semantic_gt[mask, major_label_position]
110
+ semantic_gt[mask, major_label_position] = semantic_gt[:, 0][mask]
111
+ semantic_gt[:, 0][mask] = major_semantic_label
112
+
113
+ major_instance_label = instance_gt[mask, major_label_position]
114
+ instance_gt[mask, major_label_position] = instance_gt[:, 0][mask]
115
+ instance_gt[:, 0][mask] = major_instance_label
116
+
117
+ np.save(save_path / "segment.npy", semantic_gt)
118
+ np.save(save_path / "instance.npy", instance_gt)
119
+
120
+
121
+ def filter_map_classes(mapping, count_thresh, count_type, mapping_type):
122
+ mapping = mapping[mapping[count_type] >= count_thresh]
123
+ if mapping_type == "semantic":
124
+ map_key = "semantic_map_to"
125
+ elif mapping_type == "instance":
126
+ map_key = "instance_map_to"
127
+ else:
128
+ raise NotImplementedError
129
+ # create a dict with classes to be mapped
130
+ # classes that don't have mapping are entered as x->x
131
+ # otherwise x->y
132
+ map_dict = OrderedDict()
133
+
134
+ for i in range(mapping.shape[0]):
135
+ row = mapping.iloc[i]
136
+ class_name = row["class"]
137
+ map_target = row[map_key]
138
+
139
+ # map to None or some other label -> don't add this class to the label list
140
+ try:
141
+ if len(map_target) > 0:
142
+ # map to None -> don't use this class
143
+ if map_target == "None":
144
+ pass
145
+ else:
146
+ # map to something else -> use this class
147
+ map_dict[class_name] = map_target
148
+ except TypeError:
149
+ # nan values -> no mapping, keep label as is
150
+ if class_name not in map_dict:
151
+ map_dict[class_name] = class_name
152
+
153
+ return map_dict
154
+
155
+
156
+ if __name__ == "__main__":
157
+ parser = argparse.ArgumentParser()
158
+ parser.add_argument(
159
+ "--dataset_root",
160
+ required=True,
161
+ help="Path to the ScanNet++ dataset containing data/metadata/splits.",
162
+ )
163
+ parser.add_argument(
164
+ "--output_root",
165
+ required=True,
166
+ help="Output path where train/val/test folders will be located.",
167
+ )
168
+ parser.add_argument(
169
+ "--ignore_index",
170
+ default=-1,
171
+ type=int,
172
+ help="Default ignore index.",
173
+ )
174
+ parser.add_argument(
175
+ "--num_workers",
176
+ default=mp.cpu_count(),
177
+ type=int,
178
+ help="Num workers for preprocessing.",
179
+ )
180
+ config = parser.parse_args()
181
+
182
+ print("Loading meta data...")
183
+ config.dataset_root = Path(config.dataset_root)
184
+ config.output_root = Path(config.output_root)
185
+
186
+ train_list = np.loadtxt(
187
+ config.dataset_root / "splits" / "nvs_sem_train.txt",
188
+ dtype=str,
189
+ )
190
+ print("Num samples in training split:", len(train_list))
191
+
192
+ val_list = np.loadtxt(
193
+ config.dataset_root / "splits" / "nvs_sem_val.txt",
194
+ dtype=str,
195
+ )
196
+ print("Num samples in validation split:", len(val_list))
197
+
198
+ test_list = np.loadtxt(
199
+ config.dataset_root / "splits" / "sem_test.txt",
200
+ dtype=str,
201
+ )
202
+ print("Num samples in testing split:", len(test_list))
203
+
204
+ data_list = np.concatenate([train_list, val_list, test_list])
205
+ split_list = np.concatenate(
206
+ [
207
+ np.full_like(train_list, "train"),
208
+ np.full_like(val_list, "val"),
209
+ np.full_like(test_list, "test"),
210
+ ]
211
+ )
212
+
213
+ # Parsing label information and mapping
214
+ segment_class_names = np.loadtxt(
215
+ config.dataset_root / "metadata" / "semantic_benchmark" / "top100.txt",
216
+ dtype=str,
217
+ delimiter=".", # dummy delimiter to replace " "
218
+ )
219
+ print("Num classes in segment class list:", len(segment_class_names))
220
+
221
+ instance_class_names = np.loadtxt(
222
+ config.dataset_root / "metadata" / "semantic_benchmark" / "top100_instance.txt",
223
+ dtype=str,
224
+ delimiter=".", # dummy delimiter to replace " "
225
+ )
226
+ print("Num classes in instance class list:", len(instance_class_names))
227
+
228
+ label_mapping = pd.read_csv(
229
+ config.dataset_root / "metadata" / "semantic_benchmark" / "map_benchmark.csv"
230
+ )
231
+ label_mapping = filter_map_classes(
232
+ label_mapping, count_thresh=0, count_type="count", mapping_type="semantic"
233
+ )
234
+ class2idx = {
235
+ class_name: idx for (idx, class_name) in enumerate(segment_class_names)
236
+ }
237
+
238
+ print("Processing scenes...")
239
+ pool = ProcessPoolExecutor(max_workers=config.num_workers)
240
+ _ = list(
241
+ pool.map(
242
+ parse_scene,
243
+ data_list,
244
+ split_list,
245
+ repeat(config.dataset_root),
246
+ repeat(config.output_root),
247
+ repeat(label_mapping),
248
+ repeat(class2idx),
249
+ repeat(config.ignore_index),
250
+ )
251
+ )
252
+ pool.shutdown()
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/structured3d/preprocess_structured3d.py ADDED
@@ -0,0 +1,420 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for Structured3D
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import argparse
9
+ import io
10
+ import os
11
+ import PIL
12
+ from PIL import Image
13
+ import cv2
14
+ import zipfile
15
+ import numpy as np
16
+ import multiprocessing as mp
17
+ from concurrent.futures import ProcessPoolExecutor
18
+ from itertools import repeat
19
+
20
+
21
+ VALID_CLASS_IDS_25 = (
22
+ 1,
23
+ 2,
24
+ 3,
25
+ 4,
26
+ 5,
27
+ 6,
28
+ 7,
29
+ 8,
30
+ 9,
31
+ 11,
32
+ 14,
33
+ 15,
34
+ 16,
35
+ 17,
36
+ 18,
37
+ 19,
38
+ 22,
39
+ 24,
40
+ 25,
41
+ 32,
42
+ 34,
43
+ 35,
44
+ 38,
45
+ 39,
46
+ 40,
47
+ )
48
+ CLASS_LABELS_25 = (
49
+ "wall",
50
+ "floor",
51
+ "cabinet",
52
+ "bed",
53
+ "chair",
54
+ "sofa",
55
+ "table",
56
+ "door",
57
+ "window",
58
+ "picture",
59
+ "desk",
60
+ "shelves",
61
+ "curtain",
62
+ "dresser",
63
+ "pillow",
64
+ "mirror",
65
+ "ceiling",
66
+ "refrigerator",
67
+ "television",
68
+ "nightstand",
69
+ "sink",
70
+ "lamp",
71
+ "otherstructure",
72
+ "otherfurniture",
73
+ "otherprop",
74
+ )
75
+
76
+
77
+ def normal_from_cross_product(points_2d: np.ndarray) -> np.ndarray:
78
+ xyz_points_pad = np.pad(points_2d, ((0, 1), (0, 1), (0, 0)), mode="symmetric")
79
+ xyz_points_ver = (xyz_points_pad[:, :-1, :] - xyz_points_pad[:, 1:, :])[:-1, :, :]
80
+ xyz_points_hor = (xyz_points_pad[:-1, :, :] - xyz_points_pad[1:, :, :])[:, :-1, :]
81
+ xyz_normal = np.cross(xyz_points_hor, xyz_points_ver)
82
+ xyz_dist = np.linalg.norm(xyz_normal, axis=-1, keepdims=True)
83
+ xyz_normal = np.divide(
84
+ xyz_normal, xyz_dist, out=np.zeros_like(xyz_normal), where=xyz_dist != 0
85
+ )
86
+ return xyz_normal
87
+
88
+
89
+ class Structured3DReader:
90
+ def __init__(self, files):
91
+ super().__init__()
92
+ if isinstance(files, str):
93
+ files = [files]
94
+ self.readers = [zipfile.ZipFile(f, "r") for f in files]
95
+ self.names_mapper = dict()
96
+ for idx, reader in enumerate(self.readers):
97
+ for name in reader.namelist():
98
+ self.names_mapper[name] = idx
99
+
100
+ def filelist(self):
101
+ return list(self.names_mapper.keys())
102
+
103
+ def listdir(self, dir_name):
104
+ dir_name = dir_name.lstrip(os.path.sep).rstrip(os.path.sep)
105
+ file_list = list(
106
+ np.unique(
107
+ [
108
+ f.replace(dir_name + os.path.sep, "", 1).split(os.path.sep)[0]
109
+ for f in self.filelist()
110
+ if f.startswith(dir_name + os.path.sep)
111
+ ]
112
+ )
113
+ )
114
+ if "" in file_list:
115
+ file_list.remove("")
116
+ return file_list
117
+
118
+ def read(self, file_name):
119
+ split = self.names_mapper[file_name]
120
+ return self.readers[split].read(file_name)
121
+
122
+ def read_camera(self, camera_path):
123
+ z2y_top_m = np.array([[0, 1, 0], [0, 0, 1], [1, 0, 0]], dtype=np.float32)
124
+ cam_extr = np.fromstring(self.read(camera_path), dtype=np.float32, sep=" ")
125
+ cam_t = np.matmul(z2y_top_m, cam_extr[:3] / 1000)
126
+ if cam_extr.shape[0] > 3:
127
+ cam_front, cam_up = cam_extr[3:6], cam_extr[6:9]
128
+ cam_n = np.cross(cam_front, cam_up)
129
+ cam_r = np.stack((cam_front, cam_up, cam_n), axis=1).astype(np.float32)
130
+ cam_r = np.matmul(z2y_top_m, cam_r)
131
+ cam_f = cam_extr[9:11]
132
+ else:
133
+ cam_r = np.eye(3, dtype=np.float32)
134
+ cam_f = None
135
+ return cam_r, cam_t, cam_f
136
+
137
+ def read_depth(self, depth_path):
138
+ depth = cv2.imdecode(
139
+ np.frombuffer(self.read(depth_path), np.uint8), cv2.IMREAD_UNCHANGED
140
+ )[..., np.newaxis]
141
+ depth[depth == 0] = 65535
142
+ return depth
143
+
144
+ def read_color(self, color_path):
145
+ color = cv2.imdecode(
146
+ np.frombuffer(self.read(color_path), np.uint8), cv2.IMREAD_UNCHANGED
147
+ )[..., :3][..., ::-1]
148
+ return color
149
+
150
+ def read_segment(self, segment_path):
151
+ segment = np.array(PIL.Image.open(io.BytesIO(self.read(segment_path))))[
152
+ ..., np.newaxis
153
+ ]
154
+ return segment
155
+
156
+
157
+ def parse_scene(
158
+ scene,
159
+ dataset_root,
160
+ output_root,
161
+ ignore_index=-1,
162
+ grid_size=None,
163
+ fuse_prsp=True,
164
+ fuse_pano=True,
165
+ vis=False,
166
+ ):
167
+ assert fuse_prsp or fuse_pano
168
+ reader = Structured3DReader(
169
+ [
170
+ os.path.join(dataset_root, f)
171
+ for f in os.listdir(dataset_root)
172
+ if f.endswith(".zip")
173
+ ]
174
+ )
175
+ scene_id = int(os.path.basename(scene).split("_")[-1])
176
+ if scene_id < 3000:
177
+ split = "train"
178
+ elif 3000 <= scene_id < 3250:
179
+ split = "val"
180
+ else:
181
+ split = "test"
182
+
183
+ print(f"Processing: {scene} in {split}")
184
+ rooms = reader.listdir(os.path.join("Structured3D", scene, "2D_rendering"))
185
+ for room in rooms:
186
+ room_path = os.path.join("Structured3D", scene, "2D_rendering", room)
187
+ coord_list = list()
188
+ color_list = list()
189
+ normal_list = list()
190
+ segment_list = list()
191
+ if fuse_prsp:
192
+ prsp_path = os.path.join(room_path, "perspective", "full")
193
+ frames = reader.listdir(prsp_path)
194
+
195
+ for frame in frames:
196
+ try:
197
+ cam_r, cam_t, cam_f = reader.read_camera(
198
+ os.path.join(prsp_path, frame, "camera_pose.txt")
199
+ )
200
+ depth = reader.read_depth(
201
+ os.path.join(prsp_path, frame, "depth.png")
202
+ )
203
+ color = reader.read_color(
204
+ os.path.join(prsp_path, frame, "rgb_rawlight.png")
205
+ )
206
+ segment = reader.read_segment(
207
+ os.path.join(prsp_path, frame, "semantic.png")
208
+ )
209
+ except:
210
+ print(
211
+ f"Skipping {scene}_room{room}_frame{frame} perspective view due to loading error"
212
+ )
213
+ else:
214
+ fx, fy = cam_f
215
+ height, width = depth.shape[0], depth.shape[1]
216
+ pixel = np.transpose(np.indices((width, height)), (2, 1, 0))
217
+ pixel = pixel.reshape((-1, 2))
218
+ pixel = np.hstack((pixel, np.ones((pixel.shape[0], 1))))
219
+ k = np.diag([1.0, 1.0, 1.0])
220
+
221
+ k[0, 2] = width / 2
222
+ k[1, 2] = height / 2
223
+
224
+ k[0, 0] = k[0, 2] / np.tan(fx)
225
+ k[1, 1] = k[1, 2] / np.tan(fy)
226
+ coord = (
227
+ depth.reshape((-1, 1)) * (np.linalg.inv(k) @ pixel.T).T
228
+ ).reshape(height, width, 3)
229
+ coord = coord @ np.array([[0, 0, 1], [0, -1, 0], [1, 0, 0]])
230
+ normal = normal_from_cross_product(coord)
231
+
232
+ # Filtering invalid points
233
+ view_dist = np.maximum(
234
+ np.linalg.norm(coord, axis=-1, keepdims=True), float(10e-5)
235
+ )
236
+ cosine_dist = np.sum(
237
+ (coord * normal / view_dist), axis=-1, keepdims=True
238
+ )
239
+ cosine_dist = np.abs(cosine_dist)
240
+ mask = ((cosine_dist > 0.15) & (depth < 65535) & (segment > 0))[
241
+ ..., 0
242
+ ].reshape(-1)
243
+
244
+ coord = np.matmul(coord / 1000, cam_r.T) + cam_t
245
+ normal = normal_from_cross_product(coord)
246
+
247
+ if sum(mask) > 0:
248
+ coord_list.append(coord.reshape(-1, 3)[mask])
249
+ color_list.append(color.reshape(-1, 3)[mask])
250
+ normal_list.append(normal.reshape(-1, 3)[mask])
251
+ segment_list.append(segment.reshape(-1, 1)[mask])
252
+ else:
253
+ print(
254
+ f"Skipping {scene}_room{room}_frame{frame} perspective view due to all points are filtered out"
255
+ )
256
+
257
+ if fuse_pano:
258
+ pano_path = os.path.join(room_path, "panorama")
259
+ try:
260
+ _, cam_t, _ = reader.read_camera(
261
+ os.path.join(pano_path, "camera_xyz.txt")
262
+ )
263
+ depth = reader.read_depth(os.path.join(pano_path, "full", "depth.png"))
264
+ color = reader.read_color(
265
+ os.path.join(pano_path, "full", "rgb_rawlight.png")
266
+ )
267
+ segment = reader.read_segment(
268
+ os.path.join(pano_path, "full", "semantic.png")
269
+ )
270
+ except:
271
+ print(f"Skipping {scene}_room{room} panorama view due to loading error")
272
+ else:
273
+ p_h, p_w = depth.shape[:2]
274
+ p_a = np.arange(p_w, dtype=np.float32) / p_w * 2 * np.pi - np.pi
275
+ p_b = np.arange(p_h, dtype=np.float32) / p_h * np.pi * -1 + np.pi / 2
276
+ p_a = np.tile(p_a[None], [p_h, 1])[..., np.newaxis]
277
+ p_b = np.tile(p_b[:, None], [1, p_w])[..., np.newaxis]
278
+ p_a_sin, p_a_cos, p_b_sin, p_b_cos = (
279
+ np.sin(p_a),
280
+ np.cos(p_a),
281
+ np.sin(p_b),
282
+ np.cos(p_b),
283
+ )
284
+ x = depth * p_a_cos * p_b_cos
285
+ y = depth * p_b_sin
286
+ z = depth * p_a_sin * p_b_cos
287
+ coord = np.concatenate([x, y, z], axis=-1) / 1000
288
+ normal = normal_from_cross_product(coord)
289
+
290
+ # Filtering invalid points
291
+ view_dist = np.maximum(
292
+ np.linalg.norm(coord, axis=-1, keepdims=True), float(10e-5)
293
+ )
294
+ cosine_dist = np.sum(
295
+ (coord * normal / view_dist), axis=-1, keepdims=True
296
+ )
297
+ cosine_dist = np.abs(cosine_dist)
298
+ mask = ((cosine_dist > 0.15) & (depth < 65535) & (segment > 0))[
299
+ ..., 0
300
+ ].reshape(-1)
301
+ coord = coord + cam_t
302
+
303
+ if sum(mask) > 0:
304
+ coord_list.append(coord.reshape(-1, 3)[mask])
305
+ color_list.append(color.reshape(-1, 3)[mask])
306
+ normal_list.append(normal.reshape(-1, 3)[mask])
307
+ segment_list.append(segment.reshape(-1, 1)[mask])
308
+ else:
309
+ print(
310
+ f"Skipping {scene}_room{room} panorama view due to all points are filtered out"
311
+ )
312
+
313
+ if len(coord_list) > 0:
314
+ coord = np.concatenate(coord_list, axis=0)
315
+ coord = coord @ np.array([[1, 0, 0], [0, 0, 1], [0, 1, 0]])
316
+ color = np.concatenate(color_list, axis=0)
317
+ normal = np.concatenate(normal_list, axis=0)
318
+ normal = normal @ np.array([[1, 0, 0], [0, 0, 1], [0, 1, 0]])
319
+ segment = np.concatenate(segment_list, axis=0)
320
+ segment25 = np.ones_like(segment, dtype=np.int64) * ignore_index
321
+ for idx, value in enumerate(VALID_CLASS_IDS_25):
322
+ mask = np.all(segment == value, axis=-1)
323
+ segment25[mask] = idx
324
+
325
+ data_dict = dict(
326
+ coord=coord.astype(np.float32),
327
+ color=color.astype(np.uint8),
328
+ normal=normal.astype(np.float32),
329
+ segment=segment25.astype(np.int16),
330
+ )
331
+ # Grid sampling data
332
+ if grid_size is not None:
333
+ grid_coord = np.floor(coord / grid_size).astype(int)
334
+ _, idx = np.unique(grid_coord, axis=0, return_index=True)
335
+ coord = coord[idx]
336
+ for key in data_dict.keys():
337
+ data_dict[key] = data_dict[key][idx]
338
+
339
+ # Save data
340
+ save_path = os.path.join(
341
+ output_root, split, os.path.basename(scene), f"room_{room}"
342
+ )
343
+ os.makedirs(save_path, exist_ok=True)
344
+ for key in data_dict.keys():
345
+ np.save(os.path.join(save_path, f"{key}.npy"), data_dict[key])
346
+
347
+ if vis:
348
+ from pointcept.utils.visualization import save_point_cloud
349
+
350
+ os.makedirs("./vis", exist_ok=True)
351
+ save_point_cloud(
352
+ coord, color / 255, f"./vis/{scene}_room{room}_color.ply"
353
+ )
354
+ save_point_cloud(
355
+ coord, (normal + 1) / 2, f"./vis/{scene}_room{room}_normal.ply"
356
+ )
357
+ else:
358
+ print(f"Skipping {scene}_room{room} due to no valid points")
359
+
360
+
361
+ if __name__ == "__main__":
362
+ parser = argparse.ArgumentParser()
363
+ parser.add_argument(
364
+ "--dataset_root",
365
+ required=True,
366
+ help="Path to the ScanNet dataset containing scene folders.",
367
+ )
368
+ parser.add_argument(
369
+ "--output_root",
370
+ required=True,
371
+ help="Output path where train/val folders will be located.",
372
+ )
373
+ parser.add_argument(
374
+ "--num_workers",
375
+ default=mp.cpu_count(),
376
+ type=int,
377
+ help="Num workers for preprocessing.",
378
+ )
379
+ parser.add_argument(
380
+ "--grid_size", default=None, type=float, help="Grid size for grid sampling."
381
+ )
382
+ parser.add_argument("--ignore_index", default=-1, type=float, help="Ignore index.")
383
+ parser.add_argument(
384
+ "--fuse_prsp", action="store_true", help="Whether fuse perspective view."
385
+ )
386
+ parser.add_argument(
387
+ "--fuse_pano", action="store_true", help="Whether fuse panorama view."
388
+ )
389
+ config = parser.parse_args()
390
+
391
+ reader = Structured3DReader(
392
+ [
393
+ os.path.join(config.dataset_root, f)
394
+ for f in os.listdir(config.dataset_root)
395
+ if f.endswith(".zip")
396
+ ]
397
+ )
398
+
399
+ scenes_list = reader.listdir("Structured3D")
400
+ scenes_list = sorted(scenes_list)
401
+ os.makedirs(os.path.join(config.output_root, "train"), exist_ok=True)
402
+ os.makedirs(os.path.join(config.output_root, "val"), exist_ok=True)
403
+ os.makedirs(os.path.join(config.output_root, "test"), exist_ok=True)
404
+
405
+ # Preprocess data.
406
+ print("Processing scenes...")
407
+ pool = ProcessPoolExecutor(max_workers=config.num_workers)
408
+ _ = list(
409
+ pool.map(
410
+ parse_scene,
411
+ scenes_list,
412
+ repeat(config.dataset_root),
413
+ repeat(config.output_root),
414
+ repeat(config.ignore_index),
415
+ repeat(config.grid_size),
416
+ repeat(config.fuse_prsp),
417
+ repeat(config.fuse_pano),
418
+ )
419
+ )
420
+ pool.shutdown()
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/waymo/3d_semseg_test_set_frames.txt ADDED
The diff for this file is too large to render. See raw diff
 
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/preprocessing/waymo/preprocess_waymo.py ADDED
@@ -0,0 +1,387 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocessing Script for ScanNet 20/200
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import warnings
9
+
10
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
11
+
12
+ import os
13
+
14
+ os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
15
+ os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
16
+
17
+ import argparse
18
+ import numpy as np
19
+ import tensorflow.compat.v1 as tf
20
+ from pathlib import Path
21
+ from waymo_open_dataset.utils import frame_utils
22
+ from waymo_open_dataset.utils import transform_utils
23
+ from waymo_open_dataset.utils import range_image_utils
24
+ from waymo_open_dataset import dataset_pb2 as open_dataset
25
+ import glob
26
+ import multiprocessing as mp
27
+ from concurrent.futures import ProcessPoolExecutor
28
+ from itertools import repeat
29
+
30
+
31
+ def create_lidar(frame):
32
+ """Parse and save the lidar data in psd format.
33
+ Args:
34
+ frame (:obj:`Frame`): Open dataset frame proto.
35
+ """
36
+ (
37
+ range_images,
38
+ camera_projections,
39
+ segmentation_labels,
40
+ range_image_top_pose,
41
+ ) = frame_utils.parse_range_image_and_camera_projection(frame)
42
+
43
+ points, cp_points, valid_masks = convert_range_image_to_point_cloud(
44
+ frame,
45
+ range_images,
46
+ camera_projections,
47
+ range_image_top_pose,
48
+ keep_polar_features=True,
49
+ )
50
+ points_ri2, cp_points_ri2, valid_masks_ri2 = convert_range_image_to_point_cloud(
51
+ frame,
52
+ range_images,
53
+ camera_projections,
54
+ range_image_top_pose,
55
+ ri_index=1,
56
+ keep_polar_features=True,
57
+ )
58
+
59
+ # 3d points in vehicle frame.
60
+ points_all = np.concatenate(points, axis=0)
61
+ points_all_ri2 = np.concatenate(points_ri2, axis=0)
62
+ # point labels.
63
+
64
+ points_all = np.concatenate([points_all, points_all_ri2], axis=0)
65
+
66
+ velodyne = np.c_[points_all[:, 3:6], points_all[:, 1]]
67
+ velodyne = velodyne.reshape((velodyne.shape[0] * velodyne.shape[1]))
68
+
69
+ valid_masks = [valid_masks, valid_masks_ri2]
70
+ return velodyne, valid_masks
71
+
72
+
73
+ def create_label(frame):
74
+ (
75
+ range_images,
76
+ camera_projections,
77
+ segmentation_labels,
78
+ range_image_top_pose,
79
+ ) = frame_utils.parse_range_image_and_camera_projection(frame)
80
+
81
+ point_labels = convert_range_image_to_point_cloud_labels(
82
+ frame, range_images, segmentation_labels
83
+ )
84
+ point_labels_ri2 = convert_range_image_to_point_cloud_labels(
85
+ frame, range_images, segmentation_labels, ri_index=1
86
+ )
87
+
88
+ # point labels.
89
+ point_labels_all = np.concatenate(point_labels, axis=0)
90
+ point_labels_all_ri2 = np.concatenate(point_labels_ri2, axis=0)
91
+ point_labels_all = np.concatenate([point_labels_all, point_labels_all_ri2], axis=0)
92
+
93
+ labels = point_labels_all
94
+ return labels
95
+
96
+
97
+ def convert_range_image_to_cartesian(
98
+ frame, range_images, range_image_top_pose, ri_index=0, keep_polar_features=False
99
+ ):
100
+ """Convert range images from polar coordinates to Cartesian coordinates.
101
+
102
+ Args:
103
+ frame: open dataset frame
104
+ range_images: A dict of {laser_name, [range_image_first_return,
105
+ range_image_second_return]}.
106
+ range_image_top_pose: range image pixel pose for top lidar.
107
+ ri_index: 0 for the first return, 1 for the second return.
108
+ keep_polar_features: If true, keep the features from the polar range image
109
+ (i.e. range, intensity, and elongation) as the first features in the
110
+ output range image.
111
+
112
+ Returns:
113
+ dict of {laser_name, (H, W, D)} range images in Cartesian coordinates. D
114
+ will be 3 if keep_polar_features is False (x, y, z) and 6 if
115
+ keep_polar_features is True (range, intensity, elongation, x, y, z).
116
+ """
117
+ cartesian_range_images = {}
118
+ frame_pose = tf.convert_to_tensor(
119
+ value=np.reshape(np.array(frame.pose.transform), [4, 4])
120
+ )
121
+
122
+ # [H, W, 6]
123
+ range_image_top_pose_tensor = tf.reshape(
124
+ tf.convert_to_tensor(value=range_image_top_pose.data),
125
+ range_image_top_pose.shape.dims,
126
+ )
127
+ # [H, W, 3, 3]
128
+ range_image_top_pose_tensor_rotation = transform_utils.get_rotation_matrix(
129
+ range_image_top_pose_tensor[..., 0],
130
+ range_image_top_pose_tensor[..., 1],
131
+ range_image_top_pose_tensor[..., 2],
132
+ )
133
+ range_image_top_pose_tensor_translation = range_image_top_pose_tensor[..., 3:]
134
+ range_image_top_pose_tensor = transform_utils.get_transform(
135
+ range_image_top_pose_tensor_rotation, range_image_top_pose_tensor_translation
136
+ )
137
+
138
+ for c in frame.context.laser_calibrations:
139
+ range_image = range_images[c.name][ri_index]
140
+ if len(c.beam_inclinations) == 0: # pylint: disable=g-explicit-length-test
141
+ beam_inclinations = range_image_utils.compute_inclination(
142
+ tf.constant([c.beam_inclination_min, c.beam_inclination_max]),
143
+ height=range_image.shape.dims[0],
144
+ )
145
+ else:
146
+ beam_inclinations = tf.constant(c.beam_inclinations)
147
+
148
+ beam_inclinations = tf.reverse(beam_inclinations, axis=[-1])
149
+ extrinsic = np.reshape(np.array(c.extrinsic.transform), [4, 4])
150
+
151
+ range_image_tensor = tf.reshape(
152
+ tf.convert_to_tensor(value=range_image.data), range_image.shape.dims
153
+ )
154
+ pixel_pose_local = None
155
+ frame_pose_local = None
156
+ if c.name == open_dataset.LaserName.TOP:
157
+ pixel_pose_local = range_image_top_pose_tensor
158
+ pixel_pose_local = tf.expand_dims(pixel_pose_local, axis=0)
159
+ frame_pose_local = tf.expand_dims(frame_pose, axis=0)
160
+ range_image_cartesian = range_image_utils.extract_point_cloud_from_range_image(
161
+ tf.expand_dims(range_image_tensor[..., 0], axis=0),
162
+ tf.expand_dims(extrinsic, axis=0),
163
+ tf.expand_dims(tf.convert_to_tensor(value=beam_inclinations), axis=0),
164
+ pixel_pose=pixel_pose_local,
165
+ frame_pose=frame_pose_local,
166
+ )
167
+
168
+ range_image_cartesian = tf.squeeze(range_image_cartesian, axis=0)
169
+
170
+ if keep_polar_features:
171
+ # If we want to keep the polar coordinate features of range, intensity,
172
+ # and elongation, concatenate them to be the initial dimensions of the
173
+ # returned Cartesian range image.
174
+ range_image_cartesian = tf.concat(
175
+ [range_image_tensor[..., 0:3], range_image_cartesian], axis=-1
176
+ )
177
+
178
+ cartesian_range_images[c.name] = range_image_cartesian
179
+
180
+ return cartesian_range_images
181
+
182
+
183
+ def convert_range_image_to_point_cloud(
184
+ frame,
185
+ range_images,
186
+ camera_projections,
187
+ range_image_top_pose,
188
+ ri_index=0,
189
+ keep_polar_features=False,
190
+ ):
191
+ """Convert range images to point cloud.
192
+
193
+ Args:
194
+ frame: open dataset frame
195
+ range_images: A dict of {laser_name, [range_image_first_return,
196
+ range_image_second_return]}.
197
+ camera_projections: A dict of {laser_name,
198
+ [camera_projection_from_first_return,
199
+ camera_projection_from_second_return]}.
200
+ range_image_top_pose: range image pixel pose for top lidar.
201
+ ri_index: 0 for the first return, 1 for the second return.
202
+ keep_polar_features: If true, keep the features from the polar range image
203
+ (i.e. range, intensity, and elongation) as the first features in the
204
+ output range image.
205
+
206
+ Returns:
207
+ points: {[N, 3]} list of 3d lidar points of length 5 (number of lidars).
208
+ (NOTE: Will be {[N, 6]} if keep_polar_features is true.
209
+ cp_points: {[N, 6]} list of camera projections of length 5
210
+ (number of lidars).
211
+ """
212
+ calibrations = sorted(frame.context.laser_calibrations, key=lambda c: c.name)
213
+ points = []
214
+ cp_points = []
215
+ valid_masks = []
216
+
217
+ cartesian_range_images = convert_range_image_to_cartesian(
218
+ frame, range_images, range_image_top_pose, ri_index, keep_polar_features
219
+ )
220
+
221
+ for c in calibrations:
222
+ range_image = range_images[c.name][ri_index]
223
+ range_image_tensor = tf.reshape(
224
+ tf.convert_to_tensor(value=range_image.data), range_image.shape.dims
225
+ )
226
+ range_image_mask = range_image_tensor[..., 0] > 0
227
+
228
+ range_image_cartesian = cartesian_range_images[c.name]
229
+ points_tensor = tf.gather_nd(
230
+ range_image_cartesian, tf.compat.v1.where(range_image_mask)
231
+ )
232
+
233
+ cp = camera_projections[c.name][ri_index]
234
+ cp_tensor = tf.reshape(tf.convert_to_tensor(value=cp.data), cp.shape.dims)
235
+ cp_points_tensor = tf.gather_nd(cp_tensor, tf.compat.v1.where(range_image_mask))
236
+ points.append(points_tensor.numpy())
237
+ cp_points.append(cp_points_tensor.numpy())
238
+ valid_masks.append(range_image_mask.numpy())
239
+
240
+ return points, cp_points, valid_masks
241
+
242
+
243
+ def convert_range_image_to_point_cloud_labels(
244
+ frame, range_images, segmentation_labels, ri_index=0
245
+ ):
246
+ """Convert segmentation labels from range images to point clouds.
247
+
248
+ Args:
249
+ frame: open dataset frame
250
+ range_images: A dict of {laser_name, [range_image_first_return,
251
+ range_image_second_return]}.
252
+ segmentation_labels: A dict of {laser_name, [range_image_first_return,
253
+ range_image_second_return]}.
254
+ ri_index: 0 for the first return, 1 for the second return.
255
+
256
+ Returns:
257
+ point_labels: {[N, 2]} list of 3d lidar points's segmentation labels. 0 for
258
+ points that are not labeled.
259
+ """
260
+ calibrations = sorted(frame.context.laser_calibrations, key=lambda c: c.name)
261
+ point_labels = []
262
+ for c in calibrations:
263
+ range_image = range_images[c.name][ri_index]
264
+ range_image_tensor = tf.reshape(
265
+ tf.convert_to_tensor(range_image.data), range_image.shape.dims
266
+ )
267
+ range_image_mask = range_image_tensor[..., 0] > 0
268
+
269
+ if c.name in segmentation_labels:
270
+ sl = segmentation_labels[c.name][ri_index]
271
+ sl_tensor = tf.reshape(tf.convert_to_tensor(sl.data), sl.shape.dims)
272
+ sl_points_tensor = tf.gather_nd(sl_tensor, tf.where(range_image_mask))
273
+ else:
274
+ num_valid_point = tf.math.reduce_sum(tf.cast(range_image_mask, tf.int32))
275
+ sl_points_tensor = tf.zeros([num_valid_point, 2], dtype=tf.int32)
276
+
277
+ point_labels.append(sl_points_tensor.numpy())
278
+ return point_labels
279
+
280
+
281
+ def handle_process(file_path, output_root, test_frame_list):
282
+ file = os.path.basename(file_path)
283
+ split = os.path.basename(os.path.dirname(file_path))
284
+ print(f"Parsing {split}/{file}")
285
+ save_path = Path(output_root) / split / file.split(".")[0]
286
+
287
+ data_group = tf.data.TFRecordDataset(file_path, compression_type="")
288
+ for data in data_group:
289
+ frame = open_dataset.Frame()
290
+ frame.ParseFromString(bytearray(data.numpy()))
291
+ context_name = frame.context.name
292
+ timestamp = str(frame.timestamp_micros)
293
+
294
+ if split != "testing":
295
+ # for training and validation frame, extract labelled frame
296
+ if not frame.lasers[0].ri_return1.segmentation_label_compressed:
297
+ continue
298
+ else:
299
+ # for testing frame, extract frame in test_frame_list
300
+ if f"{context_name},{timestamp}" not in test_frame_list:
301
+ continue
302
+
303
+ os.makedirs(save_path / timestamp, exist_ok=True)
304
+
305
+ # extract frame pass above check
306
+ point_cloud, valid_masks = create_lidar(frame)
307
+ point_cloud = point_cloud.reshape(-1, 4)
308
+ coord = point_cloud[:, :3]
309
+ strength = np.tanh(point_cloud[:, -1].reshape([-1, 1]))
310
+ pose = np.array(frame.pose.transform, np.float32).reshape(4, 4)
311
+ mask = np.array(valid_masks, dtype=object)
312
+
313
+ np.save(save_path / timestamp / "coord.npy", coord)
314
+ np.save(save_path / timestamp / "strength.npy", strength)
315
+ np.save(save_path / timestamp / "pose.npy", pose)
316
+
317
+ # save mask for reverse prediction
318
+ if split != "training":
319
+ np.save(save_path / timestamp / "mask.npy", mask)
320
+
321
+ # save label
322
+ if split != "testing":
323
+ # ignore TYPE_UNDEFINED, ignore_index 0 -> -1
324
+ label = create_label(frame)[:, 1].reshape([-1]) - 1
325
+ np.save(save_path / timestamp / "segment.npy", label)
326
+
327
+
328
+ if __name__ == "__main__":
329
+ parser = argparse.ArgumentParser()
330
+ parser.add_argument(
331
+ "--dataset_root",
332
+ required=True,
333
+ help="Path to the Waymo dataset",
334
+ )
335
+ parser.add_argument(
336
+ "--output_root",
337
+ required=True,
338
+ help="Output path where train/val folders will be located",
339
+ )
340
+ parser.add_argument(
341
+ "--splits",
342
+ required=True,
343
+ nargs="+",
344
+ choices=["training", "validation", "testing"],
345
+ help="Splits need to process ([training, validation, testing]).",
346
+ )
347
+ parser.add_argument(
348
+ "--num_workers",
349
+ default=mp.cpu_count(),
350
+ type=int,
351
+ help="Num workers for preprocessing.",
352
+ )
353
+ config = parser.parse_args()
354
+
355
+ # load file list
356
+ file_list = glob.glob(
357
+ os.path.join(os.path.abspath(config.dataset_root), "*", "*.tfrecord")
358
+ )
359
+ assert len(file_list) == 1150
360
+
361
+ # Create output directories
362
+ for split in config.splits:
363
+ os.makedirs(os.path.join(config.output_root, split), exist_ok=True)
364
+
365
+ file_list = [
366
+ file
367
+ for file in file_list
368
+ if os.path.basename(os.path.dirname(file)) in config.splits
369
+ ]
370
+
371
+ # Load test frame list
372
+ test_frame_file = os.path.join(
373
+ os.path.dirname(__file__), "3d_semseg_test_set_frames.txt"
374
+ )
375
+ test_frame_list = [x.rstrip() for x in (open(test_frame_file, "r").readlines())]
376
+
377
+ # Preprocess data.
378
+ print("Processing scenes...")
379
+ pool = ProcessPoolExecutor(max_workers=config.num_workers)
380
+ _ = list(
381
+ pool.map(
382
+ handle_process,
383
+ file_list,
384
+ repeat(config.output_root),
385
+ repeat(test_frame_list),
386
+ )
387
+ )
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/s3dis.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ S3DIS Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ from .defaults import DefaultDataset
10
+ from .builder import DATASETS
11
+
12
+
13
+ @DATASETS.register_module()
14
+ class S3DISDataset(DefaultDataset):
15
+ def get_data_name(self, idx):
16
+ remain, room_name = os.path.split(self.data_list[idx % len(self.data_list)])
17
+ remain, area_name = os.path.split(remain)
18
+ return f"{area_name}-{room_name}"
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/scannet.py ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ScanNet20 / ScanNet200 / ScanNet Data Efficient Dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import glob
9
+ import os
10
+ from collections.abc import Sequence
11
+ from copy import deepcopy
12
+
13
+ import numpy as np
14
+ import torch
15
+ from torch.utils.data import Dataset
16
+
17
+ from pointcept.utils.cache import shared_dict
18
+ from pointcept.utils.logger import get_root_logger
19
+
20
+ from .builder import DATASETS
21
+ from .defaults import DefaultDatasetV2
22
+ from .preprocessing.scannet.meta_data.scannet200_constants import VALID_CLASS_IDS_20, VALID_CLASS_IDS_200
23
+ from .transform import TRANSFORMS, Compose
24
+
25
+
26
+ @DATASETS.register_module()
27
+ class ScanNetDataset(Dataset):
28
+ class2id = np.array(VALID_CLASS_IDS_20)
29
+
30
+ def __init__(
31
+ self,
32
+ split="train",
33
+ data_root="data/scannet",
34
+ transform=None,
35
+ lr_file=None,
36
+ la_file=None,
37
+ ignore_index=-1,
38
+ test_mode=False,
39
+ test_cfg=None,
40
+ cache=False,
41
+ loop=1,
42
+ ):
43
+ super(ScanNetDataset, self).__init__()
44
+ self.data_root = data_root
45
+ self.split = split
46
+ self.transform = Compose(transform)
47
+ self.cache = cache
48
+ self.loop = loop if not test_mode else 1 # force make loop = 1 while in test mode
49
+ self.test_mode = test_mode
50
+ self.test_cfg = test_cfg if test_mode else None
51
+
52
+ if test_mode:
53
+ self.test_voxelize = TRANSFORMS.build(self.test_cfg.voxelize)
54
+ self.test_crop = TRANSFORMS.build(
55
+ self.test_cfg.crop) if self.test_cfg.crop else None
56
+ self.post_transform = Compose(self.test_cfg.post_transform)
57
+ self.aug_transform = [
58
+ Compose(aug) for aug in self.test_cfg.aug_transform
59
+ ]
60
+
61
+ if lr_file:
62
+ self.data_list = [
63
+ os.path.join(data_root, "train", name + ".pth")
64
+ for name in np.loadtxt(lr_file, dtype=str)
65
+ ]
66
+ else:
67
+ self.data_list = self.get_data_list()
68
+ self.la = torch.load(la_file) if la_file else None
69
+ self.ignore_index = ignore_index
70
+ logger = get_root_logger()
71
+ logger.info("Totally {} x {} samples in {} set.".format(
72
+ len(self.data_list), self.loop, split))
73
+
74
+ def get_data_list(self):
75
+ if isinstance(self.split, str):
76
+ data_list = glob.glob(
77
+ os.path.join(self.data_root, self.split, "*.pth"))
78
+ elif isinstance(self.split, Sequence):
79
+ data_list = []
80
+ for split in self.split:
81
+ data_list += glob.glob(
82
+ os.path.join(self.data_root, split, "*.pth"))
83
+ else:
84
+ raise NotImplementedError
85
+ return data_list
86
+
87
+ def get_data(self, idx):
88
+ data_path = self.data_list[idx % len(self.data_list)]
89
+ if not self.cache:
90
+ data = torch.load(data_path)
91
+ else:
92
+ data_name = data_path.replace(os.path.dirname(self.data_root),
93
+ "").split(".")[0]
94
+ cache_name = "pointcept" + data_name.replace(os.path.sep, "-")
95
+ data = shared_dict(cache_name)
96
+ coord = data["coord"]
97
+ color = data["color"]
98
+ normal = data["normal"]
99
+ scene_id = data["scene_id"]
100
+ if "semantic_gt20" in data.keys():
101
+ segment = data["semantic_gt20"].reshape([-1])
102
+ else:
103
+ segment = np.ones(coord.shape[0]) * -1
104
+ if "instance_gt" in data.keys():
105
+ instance = data["instance_gt"].reshape([-1])
106
+ else:
107
+ instance = np.ones(coord.shape[0]) * -1
108
+ data_dict = dict(
109
+ coord=coord,
110
+ normal=normal,
111
+ color=color,
112
+ segment=segment,
113
+ instance=instance,
114
+ scene_id=scene_id,
115
+ )
116
+ if self.la:
117
+ sampled_index = self.la[self.get_data_name(idx)]
118
+ mask = np.ones_like(segment).astype(np.bool)
119
+ mask[sampled_index] = False
120
+ segment[mask] = self.ignore_index
121
+ data_dict["segment"] = segment
122
+ data_dict["sampled_index"] = sampled_index
123
+ return data_dict
124
+
125
+ def get_data_name(self, idx):
126
+ return os.path.basename(
127
+ self.data_list[idx % len(self.data_list)]).split(".")[0]
128
+
129
+ def prepare_train_data(self, idx):
130
+ # load data
131
+ data_dict = self.get_data(idx)
132
+ data_dict = self.transform(data_dict)
133
+ return data_dict
134
+
135
+ def prepare_test_data(self, idx):
136
+ # load data
137
+ data_dict = self.get_data(idx)
138
+ segment = data_dict.pop("segment")
139
+ data_dict = self.transform(data_dict)
140
+ data_dict_list = []
141
+ for aug in self.aug_transform:
142
+ data_dict_list.append(aug(deepcopy(data_dict)))
143
+
144
+ input_dict_list = []
145
+ for data in data_dict_list:
146
+ data_part_list = self.test_voxelize(data)
147
+ for data_part in data_part_list:
148
+ if self.test_crop:
149
+ data_part = self.test_crop(data_part)
150
+ else:
151
+ data_part = [data_part]
152
+ input_dict_list += data_part
153
+
154
+ for i in range(len(input_dict_list)):
155
+ input_dict_list[i] = self.post_transform(input_dict_list[i])
156
+ data_dict = dict(fragment_list=input_dict_list,
157
+ segment=segment,
158
+ name=self.get_data_name(idx))
159
+ return data_dict
160
+
161
+ def __getitem__(self, idx):
162
+ if self.test_mode:
163
+ return self.prepare_test_data(idx)
164
+ else:
165
+ return self.prepare_train_data(idx)
166
+
167
+ def __len__(self):
168
+ return len(self.data_list) * self.loop
169
+
170
+
171
+ @DATASETS.register_module()
172
+ class ScanNet200Dataset(ScanNetDataset):
173
+ class2id = np.array(VALID_CLASS_IDS_200)
174
+
175
+ def get_data(self, idx):
176
+ data = torch.load(self.data_list[idx % len(self.data_list)])
177
+ coord = data["coord"]
178
+ color = data["color"]
179
+ normal = data["normal"]
180
+ scene_id = data["scene_id"]
181
+ if "semantic_gt200" in data.keys():
182
+ segment = data["semantic_gt200"].reshape([-1])
183
+ else:
184
+ segment = np.ones(coord.shape[0]) * -1
185
+ if "instance_gt" in data.keys():
186
+ instance = data["instance_gt"].reshape([-1])
187
+ else:
188
+ instance = np.ones(coord.shape[0]) * -1
189
+ data_dict = dict(
190
+ coord=coord,
191
+ normal=normal,
192
+ color=color,
193
+ segment=segment,
194
+ instance=instance,
195
+ scene_id=scene_id,
196
+ )
197
+ if self.la:
198
+ sampled_index = self.la[self.get_data_name(idx)]
199
+ segment[sampled_index] = self.ignore_index
200
+ data_dict["segment"] = segment
201
+ data_dict["sampled_index"] = sampled_index
202
+ return data_dict
203
+
204
+
205
+ @DATASETS.register_module()
206
+ class ScanNetDatasetV2(DefaultDatasetV2):
207
+ VALID_ASSETS = [
208
+ "coord",
209
+ "color",
210
+ "normal",
211
+ "segment20",
212
+ "instance",
213
+ ]
214
+ class2id = np.array(VALID_CLASS_IDS_20)
215
+
216
+ def __init__(
217
+ self,
218
+ lr_file=None,
219
+ la_file=None,
220
+ **kwargs,
221
+ ):
222
+ self.lr = np.loadtxt(lr_file,
223
+ dtype=str) if lr_file is not None else None
224
+ self.la = torch.load(la_file) if la_file is not None else None
225
+ super().__init__(**kwargs)
226
+
227
+ def get_data_list(self):
228
+ if self.lr is None:
229
+ data_list = super().get_data_list()
230
+ else:
231
+ data_list = [
232
+ os.path.join(self.data_root, "train", name) for name in self.lr
233
+ ]
234
+ return data_list
235
+
236
+ def get_data(self, idx):
237
+ data_path = self.data_list[idx % len(self.data_list)]
238
+ name = self.get_data_name(idx)
239
+ if self.cache:
240
+ cache_name = f"pointcept-{name}"
241
+ return shared_dict(cache_name)
242
+
243
+ data_dict = {}
244
+ assets = os.listdir(data_path)
245
+ for asset in assets:
246
+ if not asset.endswith(".npy"):
247
+ continue
248
+ if asset[:-4] not in self.VALID_ASSETS:
249
+ continue
250
+ data_dict[asset[:-4]] = np.load(os.path.join(data_path, asset))
251
+ data_dict["name"] = name
252
+ data_dict["coord"] = data_dict["coord"].astype(np.float32)
253
+ data_dict["color"] = data_dict["color"].astype(np.float32)
254
+ data_dict["normal"] = data_dict["normal"].astype(np.float32)
255
+
256
+ if "segment20" in data_dict.keys():
257
+ data_dict["segment"] = (data_dict.pop("segment20").reshape(
258
+ [-1]).astype(np.int32))
259
+ elif "segment200" in data_dict.keys():
260
+ data_dict["segment"] = (data_dict.pop("segment200").reshape(
261
+ [-1]).astype(np.int32))
262
+ else:
263
+ data_dict["segment"] = (
264
+ np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1)
265
+
266
+ if "instance" in data_dict.keys():
267
+ data_dict["instance"] = (data_dict.pop("instance").reshape(
268
+ [-1]).astype(np.int32))
269
+ else:
270
+ data_dict["instance"] = (
271
+ np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1)
272
+ if self.la:
273
+ sampled_index = self.la[self.get_data_name(idx)]
274
+ mask = np.ones_like(data_dict["segment"], dtype=bool)
275
+ mask[sampled_index] = False
276
+ data_dict["segment"][mask] = self.ignore_index
277
+ data_dict["sampled_index"] = sampled_index
278
+ return data_dict
279
+
280
+
281
+ @DATASETS.register_module()
282
+ class ScanNet200DatasetV2(ScanNetDatasetV2):
283
+ VALID_ASSETS = [
284
+ "coord",
285
+ "color",
286
+ "normal",
287
+ "segment200",
288
+ "instance",
289
+ ]
290
+ class2id = np.array(VALID_CLASS_IDS_200)
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/scannet_pair.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ScanNet Pair Dataset (Frame-level contrastive view)
3
+
4
+ Refer PointContrast
5
+
6
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
7
+ Please cite our work if the code is helpful to you.
8
+ """
9
+
10
+ import os
11
+ import glob
12
+ import numpy as np
13
+ import torch
14
+ from copy import deepcopy
15
+ from torch.utils.data import Dataset
16
+
17
+ from pointcept.utils.logger import get_root_logger
18
+ from .builder import DATASETS
19
+ from .transform import Compose, TRANSFORMS
20
+
21
+
22
+ @DATASETS.register_module()
23
+ class ScanNetPairDataset(Dataset):
24
+ def __init__(
25
+ self,
26
+ data_root="data/scannet_pair",
27
+ overlap_threshold=0.3,
28
+ view1_transform=None,
29
+ view2_transform=None,
30
+ loop=1,
31
+ **kwargs
32
+ ):
33
+ super(ScanNetPairDataset, self).__init__()
34
+ self.data_root = data_root
35
+ self.overlap_threshold = overlap_threshold
36
+ self.view1_transform = Compose(view1_transform)
37
+ self.view2_transform = Compose(view2_transform)
38
+ self.loop = loop
39
+ self.data_list = self.get_data_list()
40
+ logger = get_root_logger()
41
+ logger.info("Totally {} x {} samples.".format(len(self.data_list), self.loop))
42
+
43
+ def get_data_list(self):
44
+ data_list = []
45
+ overlap_list = glob.glob(
46
+ os.path.join(self.data_root, "*", "pcd", "overlap.txt")
47
+ )
48
+ for overlap_file in overlap_list:
49
+ with open(overlap_file) as f:
50
+ overlap = f.readlines()
51
+ overlap = [pair.strip().split() for pair in overlap]
52
+ data_list.extend(
53
+ [
54
+ pair[:2]
55
+ for pair in overlap
56
+ if float(pair[2]) > self.overlap_threshold
57
+ ]
58
+ )
59
+ return data_list
60
+
61
+ def get_data(self, idx):
62
+ pair = self.data_list[idx % len(self.data_list)]
63
+ view1_dict = torch.load(self.data_root + pair[0])
64
+ view2_dict = torch.load(self.data_root + pair[1])
65
+ return view1_dict, view2_dict
66
+
67
+ def get_data_name(self, idx):
68
+ return os.path.basename(self.data_list[idx % len(self.data_list)]).split(".")[0]
69
+
70
+ def prepare_train_data(self, idx):
71
+ # load data
72
+ view1_dict, view2_dict = self.get_data(idx)
73
+ view1_dict = self.view1_transform(view1_dict)
74
+ view2_dict = self.view2_transform(view2_dict)
75
+ data_dict = dict()
76
+ for key, value in view1_dict.items():
77
+ data_dict["view1_" + key] = value
78
+ for key, value in view2_dict.items():
79
+ data_dict["view2_" + key] = value
80
+ return data_dict
81
+
82
+ def prepare_test_data(self, idx):
83
+ raise NotImplementedError
84
+
85
+ def __getitem__(self, idx):
86
+ return self.prepare_train_data(idx)
87
+
88
+ def __len__(self):
89
+ return len(self.data_list) * self.loop
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/scannetpp.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ScanNet++ dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import numpy as np
10
+ import glob
11
+
12
+ from pointcept.utils.cache import shared_dict
13
+
14
+ from .builder import DATASETS
15
+ from .defaults import DefaultDatasetV2
16
+
17
+
18
+ @DATASETS.register_module()
19
+ class ScanNetPPDataset(DefaultDatasetV2):
20
+ VALID_ASSETS = [
21
+ "coord",
22
+ "color",
23
+ "normal",
24
+ "segment",
25
+ "instance",
26
+ ]
27
+
28
+ def __init__(
29
+ self,
30
+ multilabel=False,
31
+ **kwargs,
32
+ ):
33
+ super().__init__(**kwargs)
34
+ self.multilabel = multilabel
35
+
36
+ def get_data(self, idx):
37
+ data_path = self.data_list[idx % len(self.data_list)]
38
+ name = self.get_data_name(idx)
39
+ if self.cache:
40
+ cache_name = f"pointcept-{name}"
41
+ return shared_dict(cache_name)
42
+
43
+ data_dict = {}
44
+ assets = os.listdir(data_path)
45
+ for asset in assets:
46
+ if not asset.endswith(".npy"):
47
+ continue
48
+ if asset[:-4] not in self.VALID_ASSETS:
49
+ continue
50
+ data_dict[asset[:-4]] = np.load(os.path.join(data_path, asset))
51
+ data_dict["name"] = name
52
+
53
+ if "coord" in data_dict.keys():
54
+ data_dict["coord"] = data_dict["coord"].astype(np.float32)
55
+
56
+ if "color" in data_dict.keys():
57
+ data_dict["color"] = data_dict["color"].astype(np.float32)
58
+
59
+ if "normal" in data_dict.keys():
60
+ data_dict["normal"] = data_dict["normal"].astype(np.float32)
61
+
62
+ if not self.multilabel:
63
+ if "segment" in data_dict.keys():
64
+ data_dict["segment"] = data_dict["segment"][:, 0].astype(np.int32)
65
+ else:
66
+ data_dict["segment"] = (
67
+ np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1
68
+ )
69
+
70
+ if "instance" in data_dict.keys():
71
+ data_dict["instance"] = data_dict["instance"][:, 0].astype(np.int32)
72
+ else:
73
+ data_dict["instance"] = (
74
+ np.ones(data_dict["coord"].shape[0], dtype=np.int32) * -1
75
+ )
76
+ else:
77
+ raise NotImplementedError
78
+ return data_dict
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/semantic_kitti.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Semantic KITTI dataset
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import os
9
+ import numpy as np
10
+
11
+ from .builder import DATASETS
12
+ from .defaults import DefaultDataset
13
+
14
+
15
+ @DATASETS.register_module()
16
+ class SemanticKITTIDataset(DefaultDataset):
17
+ def __init__(self, ignore_index=-1, **kwargs):
18
+ self.ignore_index = ignore_index
19
+ self.learning_map = self.get_learning_map(ignore_index)
20
+ self.learning_map_inv = self.get_learning_map_inv(ignore_index)
21
+ super().__init__(ignore_index=ignore_index, **kwargs)
22
+
23
+ def get_data_list(self):
24
+ split2seq = dict(
25
+ train=[0, 1, 2, 3, 4, 5, 6, 7, 9, 10],
26
+ val=[8],
27
+ test=[11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21],
28
+ )
29
+ if isinstance(self.split, str):
30
+ seq_list = split2seq[self.split]
31
+ elif isinstance(self.split, list):
32
+ seq_list = []
33
+ for split in self.split:
34
+ seq_list += split2seq[split]
35
+ else:
36
+ raise NotImplementedError
37
+
38
+ data_list = []
39
+ for seq in seq_list:
40
+ seq = str(seq).zfill(2)
41
+ seq_folder = os.path.join(self.data_root, "dataset", "sequences", seq)
42
+ seq_files = sorted(os.listdir(os.path.join(seq_folder, "velodyne")))
43
+ data_list += [
44
+ os.path.join(seq_folder, "velodyne", file) for file in seq_files
45
+ ]
46
+ return data_list
47
+
48
+ def get_data(self, idx):
49
+ data_path = self.data_list[idx % len(self.data_list)]
50
+ with open(data_path, "rb") as b:
51
+ scan = np.fromfile(b, dtype=np.float32).reshape(-1, 4)
52
+ coord = scan[:, :3]
53
+ strength = scan[:, -1].reshape([-1, 1])
54
+
55
+ label_file = data_path.replace("velodyne", "labels").replace(".bin", ".label")
56
+ if os.path.exists(label_file):
57
+ with open(label_file, "rb") as a:
58
+ segment = np.fromfile(a, dtype=np.int32).reshape(-1)
59
+ segment = np.vectorize(self.learning_map.__getitem__)(
60
+ segment & 0xFFFF
61
+ ).astype(np.int32)
62
+ else:
63
+ segment = np.zeros(scan.shape[0]).astype(np.int32)
64
+ data_dict = dict(coord=coord, strength=strength, segment=segment)
65
+ return data_dict
66
+
67
+ def get_data_name(self, idx):
68
+ file_path = self.data_list[idx % len(self.data_list)]
69
+ dir_path, file_name = os.path.split(file_path)
70
+ sequence_name = os.path.basename(os.path.dirname(dir_path))
71
+ frame_name = os.path.splitext(file_name)[0]
72
+ data_name = f"{sequence_name}_{frame_name}"
73
+ return data_name
74
+
75
+ @staticmethod
76
+ def get_learning_map(ignore_index):
77
+ learning_map = {
78
+ 0: ignore_index, # "unlabeled"
79
+ 1: ignore_index, # "outlier" mapped to "unlabeled" --------------------------mapped
80
+ 10: 0, # "car"
81
+ 11: 1, # "bicycle"
82
+ 13: 4, # "bus" mapped to "other-vehicle" --------------------------mapped
83
+ 15: 2, # "motorcycle"
84
+ 16: 4, # "on-rails" mapped to "other-vehicle" ---------------------mapped
85
+ 18: 3, # "truck"
86
+ 20: 4, # "other-vehicle"
87
+ 30: 5, # "person"
88
+ 31: 6, # "bicyclist"
89
+ 32: 7, # "motorcyclist"
90
+ 40: 8, # "road"
91
+ 44: 9, # "parking"
92
+ 48: 10, # "sidewalk"
93
+ 49: 11, # "other-ground"
94
+ 50: 12, # "building"
95
+ 51: 13, # "fence"
96
+ 52: ignore_index, # "other-structure" mapped to "unlabeled" ------------------mapped
97
+ 60: 8, # "lane-marking" to "road" ---------------------------------mapped
98
+ 70: 14, # "vegetation"
99
+ 71: 15, # "trunk"
100
+ 72: 16, # "terrain"
101
+ 80: 17, # "pole"
102
+ 81: 18, # "traffic-sign"
103
+ 99: ignore_index, # "other-object" to "unlabeled" ----------------------------mapped
104
+ 252: 0, # "moving-car" to "car" ------------------------------------mapped
105
+ 253: 6, # "moving-bicyclist" to "bicyclist" ------------------------mapped
106
+ 254: 5, # "moving-person" to "person" ------------------------------mapped
107
+ 255: 7, # "moving-motorcyclist" to "motorcyclist" ------------------mapped
108
+ 256: 4, # "moving-on-rails" mapped to "other-vehicle" --------------mapped
109
+ 257: 4, # "moving-bus" mapped to "other-vehicle" -------------------mapped
110
+ 258: 3, # "moving-truck" to "truck" --------------------------------mapped
111
+ 259: 4, # "moving-other"-vehicle to "other-vehicle" ----------------mapped
112
+ }
113
+ return learning_map
114
+
115
+ @staticmethod
116
+ def get_learning_map_inv(ignore_index):
117
+ learning_map_inv = {
118
+ ignore_index: ignore_index, # "unlabeled"
119
+ 0: 10, # "car"
120
+ 1: 11, # "bicycle"
121
+ 2: 15, # "motorcycle"
122
+ 3: 18, # "truck"
123
+ 4: 20, # "other-vehicle"
124
+ 5: 30, # "person"
125
+ 6: 31, # "bicyclist"
126
+ 7: 32, # "motorcyclist"
127
+ 8: 40, # "road"
128
+ 9: 44, # "parking"
129
+ 10: 48, # "sidewalk"
130
+ 11: 49, # "other-ground"
131
+ 12: 50, # "building"
132
+ 13: 51, # "fence"
133
+ 14: 70, # "vegetation"
134
+ 15: 71, # "trunk"
135
+ 16: 72, # "terrain"
136
+ 17: 80, # "pole"
137
+ 18: 81, # "traffic-sign"
138
+ }
139
+ return learning_map_inv
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/shapenet_part.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ShapeNet Part Dataset (Unmaintained)
3
+
4
+ get processed shapenet part dataset
5
+ at "https://shapenet.cs.stanford.edu/media/shapenetcore_partanno_segmentation_benchmark_v0_normal.zip"
6
+
7
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
8
+ Please cite our work if the code is helpful to you.
9
+ """
10
+
11
+ import os
12
+ import json
13
+ import torch
14
+ import numpy as np
15
+ from copy import deepcopy
16
+ from torch.utils.data import Dataset
17
+
18
+ from pointcept.utils.logger import get_root_logger
19
+ from .builder import DATASETS
20
+ from .transform import Compose
21
+
22
+
23
+ @DATASETS.register_module()
24
+ class ShapeNetPartDataset(Dataset):
25
+ def __init__(
26
+ self,
27
+ split="train",
28
+ data_root="data/shapenetcore_partanno_segmentation_benchmark_v0_normal",
29
+ transform=None,
30
+ test_mode=False,
31
+ test_cfg=None,
32
+ loop=1,
33
+ ):
34
+ super(ShapeNetPartDataset, self).__init__()
35
+ self.data_root = data_root
36
+ self.split = split
37
+ self.transform = Compose(transform)
38
+ self.loop = (
39
+ loop if not test_mode else 1
40
+ ) # force make loop = 1 while in test mode
41
+ self.test_mode = test_mode
42
+ self.test_cfg = test_cfg if test_mode else None
43
+ self.cache = {}
44
+
45
+ # load categories file
46
+ self.categories = []
47
+ self.category2part = {
48
+ "Airplane": [0, 1, 2, 3],
49
+ "Bag": [4, 5],
50
+ "Cap": [6, 7],
51
+ "Car": [8, 9, 10, 11],
52
+ "Chair": [12, 13, 14, 15],
53
+ "Earphone": [16, 17, 18],
54
+ "Guitar": [19, 20, 21],
55
+ "Knife": [22, 23],
56
+ "Lamp": [24, 25, 26, 27],
57
+ "Laptop": [28, 29],
58
+ "Motorbike": [30, 31, 32, 33, 34, 35],
59
+ "Mug": [36, 37],
60
+ "Pistol": [38, 39, 40],
61
+ "Rocket": [41, 42, 43],
62
+ "Skateboard": [44, 45, 46],
63
+ "Table": [47, 48, 49],
64
+ }
65
+ self.token2category = {}
66
+ with open(os.path.join(self.data_root, "synsetoffset2category.txt"), "r") as f:
67
+ for line in f:
68
+ ls = line.strip().split()
69
+ self.token2category[ls[1]] = len(self.categories)
70
+ self.categories.append(ls[0])
71
+
72
+ if test_mode:
73
+ self.post_transform = Compose(self.test_cfg.post_transform)
74
+ self.aug_transform = [Compose(aug) for aug in self.test_cfg.aug_transform]
75
+
76
+ # load data list
77
+ if isinstance(self.split, str):
78
+ self.data_list = self.load_data_list(self.split)
79
+ elif isinstance(self.split, list):
80
+ self.data_list = []
81
+ for s in self.split:
82
+ self.data_list += self.load_data_list(s)
83
+ else:
84
+ raise NotImplementedError
85
+
86
+ logger = get_root_logger()
87
+ logger.info(
88
+ "Totally {} x {} samples in {} set.".format(
89
+ len(self.data_idx), self.loop, split
90
+ )
91
+ )
92
+
93
+ def load_data_list(self, split):
94
+ split_file = os.path.join(
95
+ self.data_root,
96
+ "train_test_split",
97
+ "shuffled_{}_file_list.json".format(split),
98
+ )
99
+ if not os.path.isfile(split_file):
100
+ raise (RuntimeError("Split file do not exist: " + split_file + "\n"))
101
+ with open(split_file, "r") as f:
102
+ # drop "shape_data/" and append ".txt"
103
+ data_list = [
104
+ os.path.join(self.data_root, data[11:] + ".txt")
105
+ for data in json.load(f)
106
+ ]
107
+ return data_list
108
+
109
+ def prepare_train_data(self, idx):
110
+ # load data
111
+ data_idx = idx % len(self.data_list)
112
+ if data_idx in self.cache:
113
+ coord, norm, segment, cls_token = self.cache[data_idx]
114
+ else:
115
+ data = np.loadtxt(self.data_list[data_idx]).astype(np.float32)
116
+ cls_token = self.token2category[
117
+ os.path.basename(os.path.dirname(self.data_list[data_idx]))
118
+ ]
119
+ coord, norm, segment = (
120
+ data[:, :3],
121
+ data[:, 3:6],
122
+ data[:, 6].astype(np.int32),
123
+ )
124
+ self.cache[data_idx] = (coord, norm, segment, cls_token)
125
+
126
+ data_dict = dict(coord=coord, norm=norm, segment=segment, cls_token=cls_token)
127
+ data_dict = self.transform(data_dict)
128
+ return data_dict
129
+
130
+ def prepare_test_data(self, idx):
131
+ # load data
132
+ data_idx = self.data_idx[idx % len(self.data_idx)]
133
+ data = np.loadtxt(self.data_list[data_idx]).astype(np.float32)
134
+ cls_token = self.token2category[
135
+ os.path.basename(os.path.dirname(self.data_list[data_idx]))
136
+ ]
137
+ coord, norm, segment = data[:, :3], data[:, 3:6], data[:, 6].astype(np.int32)
138
+
139
+ data_dict = dict(coord=coord, norm=norm, cls_token=cls_token)
140
+ data_dict = self.transform(data_dict)
141
+ data_dict_list = []
142
+ for aug in self.aug_transform:
143
+ data_dict_list.append(self.post_transform(aug(deepcopy(data_dict))))
144
+ data_dict = dict(
145
+ fragment_list=data_dict_list, segment=segment, name=self.get_data_name(idx)
146
+ )
147
+ return data_dict
148
+
149
+ def get_data_name(self, idx):
150
+ data_idx = self.data_idx[idx % len(self.data_idx)]
151
+ return os.path.basename(self.data_list[data_idx]).split(".")[0]
152
+
153
+ def __getitem__(self, idx):
154
+ if self.test_mode:
155
+ return self.prepare_test_data(idx)
156
+ else:
157
+ return self.prepare_train_data(idx)
158
+
159
+ def __len__(self):
160
+ return len(self.data_idx) * self.loop
scannet/insseg-pointgroup-v1m1-pt-v3m1-ppt-ft/code/pointcept/datasets/structure3d.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Structured3D Datasets
3
+
4
+ Author: Xiaoyang Wu (xiaoyang.wu.cs@gmail.com)
5
+ Please cite our work if the code is helpful to you.
6
+ """
7
+
8
+ import glob
9
+ import os
10
+ from collections.abc import Sequence
11
+
12
+ from .builder import DATASETS
13
+ from .defaults import DefaultDataset, DefaultDatasetV2
14
+
15
+
16
+ @DATASETS.register_module()
17
+ class Structured3DDataset(DefaultDataset):
18
+
19
+ def get_data_list(self):
20
+ if isinstance(self.split, str):
21
+ data_list = glob.glob(
22
+ os.path.join(self.data_root, self.split, "scene_*/room_*"))
23
+ elif isinstance(self.split, Sequence):
24
+ data_list = []
25
+ for split in self.split:
26
+ data_list += glob.glob(
27
+ os.path.join(self.data_root, split, "scene_*/room_*"))
28
+ else:
29
+ raise NotImplementedError
30
+ return data_list
31
+
32
+ def get_data_name(self, idx):
33
+ file_path = self.data_list[idx % len(self.data_list)]
34
+ dir_path, room_name = os.path.split(file_path)
35
+ scene_name = os.path.basename(dir_path)
36
+ data_name = f"{scene_name}_{room_name}"
37
+ return data_name
38
+
39
+
40
+ @DATASETS.register_module()
41
+ class Structured3DDatasetV2(DefaultDatasetV2):
42
+
43
+ def get_data_list(self):
44
+ if isinstance(self.split, str):
45
+ data_list = glob.glob(
46
+ os.path.join(self.data_root, self.split, "scene_*/room_*"))
47
+ elif isinstance(self.split, Sequence):
48
+ data_list = []
49
+ for split in self.split:
50
+ data_list += glob.glob(
51
+ os.path.join(self.data_root, split, "scene_*/room_*"))
52
+ else:
53
+ raise NotImplementedError
54
+ return data_list
55
+
56
+ def get_data_name(self, idx):
57
+ file_path = self.data_list[idx % len(self.data_list)]
58
+ dir_path, room_name = os.path.split(file_path)
59
+ scene_name = os.path.basename(dir_path)
60
+ data_name = f"{scene_name}_{room_name}"
61
+ return data_name