Added normals to prediction DataObject

This commit is contained in:
Si11ium 2019-08-09 10:25:16 +02:00
parent 4e1fcdfd43
commit 8eb165f76c
7 changed files with 101464 additions and 2060 deletions

View File

@ -46,8 +46,9 @@ class CustomShapeNet(InMemoryDataset):
def download(self): def download(self):
dir_count = len([name for name in os.listdir(self.raw_dir) if os.path.isdir(os.path.join(self.raw_dir, name))]) dir_count = len([name for name in os.listdir(self.raw_dir) if os.path.isdir(os.path.join(self.raw_dir, name))])
print(f'{dir_count} folders have been found....')
if dir_count: if dir_count:
print(f'{dir_count} folders have been found....')
return dir_count return dir_count
raise IOError("No raw pointclouds have been found.") raise IOError("No raw pointclouds have been found.")
@ -179,6 +180,7 @@ class ShapeNetPartSegDataset(Dataset):
Resample raw point cloud to fixed number of points. Resample raw point cloud to fixed number of points.
Map raw label from range [1, N] to [0, N-1]. Map raw label from range [1, N] to [0, N-1].
""" """
def __init__(self, root_dir, npoints=1024, mode='train', **kwargs): def __init__(self, root_dir, npoints=1024, mode='train', **kwargs):
super(ShapeNetPartSegDataset, self).__init__() super(ShapeNetPartSegDataset, self).__init__()
self.mode = mode self.mode = mode
@ -191,7 +193,8 @@ class ShapeNetPartSegDataset(Dataset):
# Resample to fixed number of points # Resample to fixed number of points
try: try:
choice = np.random.choice(data.pos.shape[0], self.npoints, replace=True) npoints = self.npoints if self.mode != 'predict' else data.pos.shape[0]
choice = np.random.choice(data.pos.shape[0], npoints, replace=False)
except ValueError: except ValueError:
choice = [] choice = []
@ -204,7 +207,7 @@ class ShapeNetPartSegDataset(Dataset):
'labels': labels # torch.Tensor (n,) 'labels': labels # torch.Tensor (n,)
} }
if self.mode == 'predict': if self.mode == 'predict':
normals = data.normals[choice] normals = data.normals[choice, :]
sample.update(normals=normals) sample.update(normals=normals)
return sample return sample

View File

@ -33,10 +33,10 @@ parser.add_argument('--npoints', type=int, default=1024, help='resample points n
parser.add_argument('--model', type=str, default='', help='model path') parser.add_argument('--model', type=str, default='', help='model path')
parser.add_argument('--nepoch', type=int, default=250, help='number of epochs to train for') parser.add_argument('--nepoch', type=int, default=250, help='number of epochs to train for')
parser.add_argument('--outf', type=str, default='checkpoint', help='output folder') parser.add_argument('--outf', type=str, default='checkpoint', help='output folder')
parser.add_argument('--labels_within', type=strtobool, default=False, help='defines the label location') parser.add_argument('--labels_within', type=strtobool, default=True, help='defines the label location')
parser.add_argument('--batch_size', type=int, default=8, help='input batch size') parser.add_argument('--batch_size', type=int, default=8, help='input batch size')
parser.add_argument('--test_per_batches', type=int, default=1000, help='run a test batch per training batches number') parser.add_argument('--test_per_batches', type=int, default=1000, help='run a test batch per training batches number')
parser.add_argument('--num_workers', type=int, default=4, help='number of data loading workers') parser.add_argument('--num_workers', type=int, default=1, help='number of data loading workers')
parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers') parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers')
parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub') parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub')
parser.add_argument('--has_variations', type=strtobool, default=False, parser.add_argument('--has_variations', type=strtobool, default=False,
@ -129,7 +129,6 @@ if __name__ == '__main__':
net.train() net.train()
# ToDo: We need different dataloader here to train the network in multiple iterations, maybe move the loop down # ToDo: We need different dataloader here to train the network in multiple iterations, maybe move the loop down
# for dataloader in ...
for batch_idx, sample in enumerate(dataLoader): for batch_idx, sample in enumerate(dataLoader):
# points: (batch_size, n, 3) # points: (batch_size, n, 3)
# labels: (batch_size, n) # labels: (batch_size, n)

View File

@ -8,7 +8,7 @@ from torch_geometric.utils.num_nodes import maybe_num_nodes
from torch_geometric.data.data import Data from torch_geometric.data.data import Data
from torch_scatter import scatter_add, scatter_max from torch_scatter import scatter_add, scatter_max
GLOBAL_POINT_FEATURES = 3 GLOBAL_POINT_FEATURES = 6
class PointNet2SAModule(torch.nn.Module): class PointNet2SAModule(torch.nn.Module):
def __init__(self, sample_radio, radius, max_num_neighbors, mlp): def __init__(self, sample_radio, radius, max_num_neighbors, mlp):

200
predict/clusters.txt Normal file
View File

@ -0,0 +1,200 @@
48
1.000000000000000000e+00
1 6
7.199833552042643747e-01 1.481056722005208437e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
5 6
3.199843406677246316e-01 1.547723388671875089e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
4.533166567484537834e-01 1.414390055338541563e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.866499900817871316e-01 1.614390055338541741e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
2.533176740010579242e-01 1.614390055338541741e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.866499900817871316e-01 1.481056722005208437e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
2 6
7.199833552042643747e-01 1.414390055338541563e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
6.533166885375976118e-01 1.481056722005208437e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
3.000000000000000000e+00
4 6
2.533176740010579242e-01 1.414390055338541563e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.199843406677246316e-01 1.347723388671874911e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
2.533176740010579242e-01 1.281056722005208259e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.866499900817871316e-01 1.281056722005208259e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
11 6
3.199843406677246316e-01 1.947723388671875000e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
4.533166567484537834e-01 1.947723388671875000e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
4.533166567484537834e-01 1.814390055338541696e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
5.199833234151204353e-01 2.014390055338541874e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.866499900817871316e-01 2.014390055338541874e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
5.199833234151204353e-01 2.147723388671875178e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
2.533176740010579242e-01 2.014390055338541874e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.199843406677246316e-01 1.814390055338541696e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
2.533176740010579242e-01 1.881056722005208348e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
4.533166567484537834e-01 2.081056722005208304e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.866499900817871316e-01 1.881056722005208348e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
8 6
3.199843406677246316e-01 2.214390055338541607e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.866499900817871316e-01 2.147723388671875178e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
4.533166567484537834e-01 2.081056722005208304e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
4.533166567484537834e-01 2.214390055338541607e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
5.199833234151204353e-01 2.014390055338541874e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
2.533176740010579242e-01 2.281056722005208481e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.866499900817871316e-01 2.014390055338541874e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
5.199833234151204353e-01 2.147723388671875178e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
4 6
7.199833552042643747e-01 1.947723388671875000e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
6.533166885375976118e-01 2.014390055338541874e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
7.199833552042643747e-01 2.081056722005208304e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
6.533166885375976118e-01 2.147723388671875178e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.866510073343912723e-01 2.281056722005208481e+00 5.645600001017252456e-01 -4.267321706433224227e-04 9.999984323090402860e-01 -1.718510726318397703e-03
1.000000000000000000e+00
1 6
5.199833234151204353e-01 2.281056722005208481e+00 5.645600001017252456e-01 -4.267321706433224227e-04 9.999984323090402860e-01 -1.718510726318397703e-03
1.000000000000000000e+00
1 6
6.533166885375976118e-01 1.747723388671875044e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
5.331766605377197266e-02 2.214390055338541607e+00 6.312266667683918975e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.000000000000000000e+00
1 6
1.199843327204386384e-01 2.147723388671875178e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 2.014390055338541874e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
2 6
5.331766605377197266e-02 2.081056722005208304e+00 6.312266667683918975e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.199843327204386384e-01 2.081056722005208304e+00 5.645600001017252456e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.614390055338541741e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.947723388671875000e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
5 6
3.199843406677246316e-01 2.281056722005208481e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.199843406677246316e-01 2.147723388671875178e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.866499900817871316e-01 2.214390055338541607e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
4.533166567484537834e-01 2.281056722005208481e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
2.533176740010579242e-01 2.214390055338541607e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
5 6
5.199833234151204353e-01 1.547723388671875089e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.866499900817871316e-01 1.681056722005208393e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
4.533166567484537834e-01 1.614390055338541741e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
4.533166567484537834e-01 1.481056722005208437e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
5.866500218709309600e-01 1.614390055338541741e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.000000000000000000e+00
1 6
5.331766605377197266e-02 1.881056722005208348e+00 6.312266667683918975e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.000000000000000000e+00
1 6
7.199833552042643747e-01 1.747723388671875044e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
5.866500218709309600e-01 1.747723388671875044e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
2 6
5.866500218709309600e-01 1.614390055338541741e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
5.199833234151204353e-01 1.547723388671875089e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
8 6
5.199833234151204353e-01 1.814390055338541696e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.866510073343912723e-01 1.881056722005208348e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
2.533176740010579242e-01 1.947723388671875000e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.199843406677246316e-01 1.881056722005208348e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
4.533166567484537834e-01 1.747723388671875044e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
5.866500218709309600e-01 1.881056722005208348e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
4.533166567484537834e-01 1.881056722005208348e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.866499900817871316e-01 1.814390055338541696e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.000000000000000000e+00
2 6
5.199833234151204353e-01 1.281056722005208259e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
5.866500218709309600e-01 1.347723388671874911e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
5.866500218709309600e-01 1.414390055338541563e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
3 6
6.533166885375976118e-01 2.147723388671875178e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
6.533166885375976118e-01 2.281056722005208481e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
7.199833552042643747e-01 2.214390055338541607e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.000000000000000000e+00
1 6
5.199833234151204353e-01 1.281056722005208259e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
3.000000000000000000e+00
6 6
3.866499900817871316e-01 1.347723388671874911e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
2.533176740010579242e-01 1.347723388671874911e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.199843406677246316e-01 1.414390055338541563e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
2.533176740010579242e-01 1.481056722005208437e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
2.533176740010579242e-01 1.614390055338541741e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.199843406677246316e-01 1.547723388671875089e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.814390055338541696e+00 5.645600001017252456e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.414390055338541563e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.000000000000000000e+00
1 6
1.199843327204386384e-01 1.281056722005208259e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 2.147723388671875178e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
3.000000000000000000e+00
1 6
1.199843327204386384e-01 1.347723388671874911e+00 5.645600001017252456e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.000000000000000000e+00
1 6
7.199833552042643747e-01 1.614390055338541741e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
7.199833552042643747e-01 2.281056722005208481e+00 6.312266667683918975e-01 -4.267321706433224227e-04 9.999984323090402860e-01 -1.718510726318397703e-03
1.000000000000000000e+00
1 6
5.866500218709309600e-01 1.881056722005208348e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.547723388671875089e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.866510073343912723e-01 1.747723388671875044e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
2.533176740010579242e-01 2.081056722005208304e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
3.000000000000000000e+00
1 6
7.199833552042643747e-01 1.281056722005208259e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
7.199833552042643747e-01 1.881056722005208348e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.747723388671875044e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
6.533166885375976118e-01 2.014390055338541874e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.481056722005208437e+00 5.645600001017252456e-01 -9.993677590519846055e-01 -4.875568776990416931e-04 -3.555059008941406640e-02
1.000000000000000000e+00
1 6
1.199843327204386384e-01 1.414390055338541563e+00 4.978933334350585938e-01 3.555141376242736823e-02 -1.702250673669388707e-03 -9.993663989359141686e-01
1.000000000000000000e+00
1 6
2.533176740010579242e-01 1.747723388671875044e+00 6.312266667683918975e-01 -3.555141376242736823e-02 1.702250673669388707e-03 9.993663989359141686e-01
1.000000000000000000e+00
1 6
5.866500218709309600e-01 2.281056722005208481e+00 6.312266667683918975e-01 -2.872351790370336957e-02 6.003545025065727403e-01 7.992180120838876523e-01
1.000000000000000000e+00
1 6
3.866499900817871316e-01 2.281056722005208481e+00 5.645600001017252456e-01 -4.267321706433224227e-04 9.999984323090402860e-01 -1.718510726318397703e-03

File diff suppressed because it is too large Load Diff

101251
predict/pointclouds/1_pc.xyz Normal file

File diff suppressed because it is too large Load Diff

View File

@ -227,7 +227,7 @@ def draw_clusters(clusters):
def draw_sample_data(sample_data, colored_normals = False): def draw_sample_data(sample_data, colored_normals = False):
cloud = o3d.PointCloud() cloud = o3d.PointCloud()
cloud.points = o3d.Vector3dVector(sample_data[:,:3]) cloud.points = o3d.Vector3dVector(sample_data[:, :3])
cloud.colors = \ cloud.colors = \
o3d.Vector3dVector(label2color(sample_data[:, 6].astype(int)) if not colored_normals else sample_data[:, 3:6]) o3d.Vector3dVector(label2color(sample_data[:, 6].astype(int)) if not colored_normals else sample_data[:, 3:6])
@ -243,7 +243,7 @@ sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../') # add proj
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--npoints', type=int, default=2048, help='resample points number') parser.add_argument('--npoints', type=int, default=2048, help='resample points number')
parser.add_argument('--model', type=str, default='./checkpoint/seg_model_custom_30.pth', help='model path') parser.add_argument('--model', type=str, default='./checkpoint/seg_model_custom_3.pth', help='model path')
parser.add_argument('--sample_idx', type=int, default=0, help='select a sample to segment and view result') parser.add_argument('--sample_idx', type=int, default=0, help='select a sample to segment and view result')
parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers') parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers')
parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub') parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub')
@ -260,7 +260,7 @@ if __name__ == '__main__':
print('Create data set ..') print('Create data set ..')
dataset_folder = './data/raw/predict/' dataset_folder = './data/raw/predict/'
pointcloud_file = './pointclouds/0_pc.xyz' pointcloud_file = './pointclouds/1_pc.xyz'
# Load and pre-process point cloud # Load and pre-process point cloud
pcloud = pc.read_pointcloud(pointcloud_file) pcloud = pc.read_pointcloud(pointcloud_file)
@ -304,7 +304,7 @@ if __name__ == '__main__':
mode='predict', mode='predict',
root_dir='data', root_dir='data',
npoints=opt.npoints, npoints=opt.npoints,
refresh=False, refresh=True,
collate_per_segment=opt.collate_per_segment, collate_per_segment=opt.collate_per_segment,
has_variations=opt.has_variations, has_variations=opt.has_variations,
headers=opt.headers headers=opt.headers