diff --git a/.gitignore b/.gitignore
index 9c25236..feac957 100644
--- a/.gitignore
+++ b/.gitignore
@@ -127,4 +127,8 @@ dmypy.json
/data/
/checkpoint/
/shapenet/
-/vis/
+/vis/data/
+/vis/checkpoint
+/predict/data/
+/predict/checkpoint/
+
diff --git a/.idea/pointnet2-pytorch.iml b/.idea/pointnet2-pytorch.iml
index d1ad953..a9a200c 100644
--- a/.idea/pointnet2-pytorch.iml
+++ b/.idea/pointnet2-pytorch.iml
@@ -4,7 +4,8 @@
-
+
+
diff --git a/dataset/shapenet.py b/dataset/shapenet.py
index 931237c..480c04e 100644
--- a/dataset/shapenet.py
+++ b/dataset/shapenet.py
@@ -14,9 +14,8 @@ import re
def save_names(name_list, path):
- with open(path, 'wb'):
- pass
-
+ with open(path, 'wb') as f:
+ f.writelines(name_list)
class CustomShapeNet(InMemoryDataset):
@@ -119,20 +118,16 @@ class CustomShapeNet(InMemoryDataset):
points = torch.tensor(src, dtype=None).squeeze()
if not len(points.shape) > 1:
continue
- # pos = points[:, :3]
- # norm = points[:, 3:]
y_all = [y_raw] * points.shape[0]
y = torch.as_tensor(y_all, dtype=torch.int)
- # points = torch.as_tensor(points, dtype=torch.float)
- # norm = torch.as_tensor(norm, dtype=torch.float)
if self.collate_per_element:
- data = Data(y=y, pos=points[:, :3])
+ data = Data(y=y, pos=points[:, :3], points=points, norm=points[:, 3:])
else:
if not data:
data = defaultdict(list)
- for key, val in dict(y=y, pos= points[:, :3]).items():
+ for key, val in dict(y=y, pos=points[:, :3], points=points, norm=points[:, 3:]).items():
data[key].append(val)
- # , points=points, norm=points[:3], )
+
data = self._transform_and_filter(data)
if self.collate_per_element:
datasets[data_folder].append(data)
@@ -160,7 +155,7 @@ class ShapeNetPartSegDataset(Dataset):
def __getitem__(self, index):
data = self.dataset[index]
- points, labels = data.pos, data.y
+ points, labels, _, norm = data.pos, data.y, data.points, data.norm
# Resample to fixed number of points
try:
@@ -168,13 +163,14 @@ class ShapeNetPartSegDataset(Dataset):
except ValueError:
choice = []
- points, labels = points[choice, :], labels[choice]
+ points, labels, norm = points[choice, :], labels[choice], norm[choice]
labels -= 1 if self.num_classes() in labels else 0 # Map label from [1, C] to [0, C-1]
sample = {
'points': points, # torch.Tensor (n, 3)
- 'labels': labels # torch.Tensor (n,)
+ 'labels': labels, # torch.Tensor (n,)
+ 'normals': norm # torch.Tensor (n,)
}
return sample
@@ -188,11 +184,12 @@ class ShapeNetPartSegDataset(Dataset):
class PredictionShapeNet(InMemoryDataset):
- def __init__(self, root, transform=None, pre_filter=None, pre_transform=None, headers=True):
+ def __init__(self, root, transform=None, pre_filter=None, pre_transform=None, headers=True, refresh=False):
self.has_headers = headers
+ self.refresh = refresh
super(PredictionShapeNet, self).__init__(root, transform, pre_transform, pre_filter)
path = self.processed_paths[0]
- self.data, self.slices = torch.load(path)
+ self.data, self.slices = self._load_dataset()
print("Initialized")
@property
@@ -217,9 +214,18 @@ class PredictionShapeNet(InMemoryDataset):
def _load_dataset(self):
data, slices = None, None
+ filepath = os.path.join(self.processed_dir, self.processed_file_names[0])
+ if self.refresh:
+ try:
+ os.remove(filepath)
+ print('Processed Location "Refreshed" (We deleted the Files)')
+ except FileNotFoundError:
+ print('You meant to refresh the allready processed dataset, but there were none...')
+ print('continue processing')
+ pass
+
while True:
try:
- filepath = os.path.join(self.root, self.processed_dir, f'{"train" if self.train else "test"}.pt')
data, slices = torch.load(filepath)
print('Dataset Loaded')
break
@@ -243,7 +249,7 @@ class PredictionShapeNet(InMemoryDataset):
for pointcloud in tqdm(os.scandir(path_to_clouds)):
if not os.path.isdir(pointcloud):
continue
- full_cloud_pattern = '\d+?_pc\.(xyz|dat)'
+ full_cloud_pattern = '(^\d+?_|^)pc\.(xyz|dat)'
pattern = re.compile(full_cloud_pattern)
for file in os.scandir(pointcloud.path):
if not pattern.match(file.name):
@@ -267,7 +273,7 @@ class PredictionShapeNet(InMemoryDataset):
y = torch.as_tensor(y_fake_all, dtype=torch.int)
# points = torch.as_tensor(points, dtype=torch.float)
# norm = torch.as_tensor(norm, dtype=torch.float)
- data = Data(y=y, pos=points[:, :3])
+ data = Data(y=y, pos=points[:, :3], points=points, norm=points[:, 3:])
# , points=points, norm=points[:3], )
# ToDo: ANy filter to apply? Then do it here.
if self.pre_filter is not None and not self.pre_filter(data):
@@ -293,29 +299,20 @@ class PredictNetPartSegDataset(Dataset):
Resample raw point cloud to fixed number of points.
Map raw label from range [1, N] to [0, N-1].
"""
- def __init__(self, root_dir, num_classes, transform=None, npoints=2048, headers=True):
+ def __init__(self, root_dir, num_classes, transform=None, npoints=2048, headers=True, refresh=False):
super(PredictNetPartSegDataset, self).__init__()
self.npoints = npoints
self._num_classes = num_classes
- self.dataset = PredictionShapeNet(root=root_dir, transform=transform, headers=headers)
+ self.dataset = PredictionShapeNet(root=root_dir, transform=transform, headers=headers, refresh=refresh)
def __getitem__(self, index):
data = self.dataset[index]
- points, labels = data.pos, data.y
-
- # Resample to fixed number of points
- try:
- choice = np.random.choice(points.shape[0], self.npoints, replace=True)
- except ValueError:
- choice = []
-
- points, labels = points[choice, :], labels[choice]
-
- labels -= 1 if self.num_classes() in labels else 0 # Map label from [1, C] to [0, C-1]
+ points, labels, _, norm = data.pos, data.y, data.points, data.norm
sample = {
'points': points, # torch.Tensor (n, 3)
- 'labels': labels # torch.Tensor (n,)
+ 'labels': labels, # torch.Tensor (n,)
+ 'normals': norm # torch.Tensor (n,)
}
return sample
diff --git a/predict/data/processed/predict.pt b/predict/data/processed/predict.pt
deleted file mode 100644
index f7386b1..0000000
Binary files a/predict/data/processed/predict.pt and /dev/null differ
diff --git a/predict/predict.py b/predict/predict.py
index 739c23d..a28d5aa 100644
--- a/predict/predict.py
+++ b/predict/predict.py
@@ -28,7 +28,8 @@ if __name__ == '__main__':
root_dir=opt.dataset,
num_classes=4,
transform=None,
- npoints=opt.npoints
+ npoints=opt.npoints,
+ refresh=True
)
num_classes = test_dataset.num_classes()
diff --git a/vis/show_seg_res.py b/vis/show_seg_res.py
index 7c75122..72bc5a3 100644
--- a/vis/show_seg_res.py
+++ b/vis/show_seg_res.py
@@ -134,7 +134,7 @@ if __name__ == '__main__':
print(diff_labels)
view_points_labels(points, diff_labels)
- if False:
+ if True:
print('View pred labels ..')
print(pred_labels)
view_points_labels(points, pred_labels)