File based header detection, collate_per_PC training.
This commit is contained in:
7
main.py
7
main.py
@ -37,6 +37,7 @@ parser.add_argument('--batch_size', type=int, default=8, help='input batch size'
|
||||
parser.add_argument('--test_per_batches', type=int, default=1000, help='run a test batch per training batches number')
|
||||
parser.add_argument('--num_workers', type=int, default=4, help='number of data loading workers')
|
||||
parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers')
|
||||
parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub')
|
||||
|
||||
|
||||
opt = parser.parse_args()
|
||||
@ -68,10 +69,12 @@ if __name__ == '__main__':
|
||||
train_transform = GT.Compose([GT.NormalizeScale(), RotTransform, TransTransform])
|
||||
test_transform = GT.Compose([GT.NormalizeScale(), ])
|
||||
|
||||
dataset = ShapeNetPartSegDataset(root_dir=opt.dataset, train=True, transform=train_transform, npoints=opt.npoints, headers=opt.headers)
|
||||
dataset = ShapeNetPartSegDataset(root_dir=opt.dataset, collate_per_segment=opt.collate_per_segment,
|
||||
train=True, transform=train_transform, npoints=opt.npoints, headers=opt.headers)
|
||||
dataLoader = DataLoader(dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers)
|
||||
|
||||
test_dataset = ShapeNetPartSegDataset(root_dir=opt.dataset, train=False, transform=test_transform, npoints=opt.npoints, headers=opt.headers)
|
||||
test_dataset = ShapeNetPartSegDataset(root_dir=opt.dataset, collate_per_segment=opt.collate_per_segment,
|
||||
train=False, transform=test_transform, npoints=opt.npoints, headers=opt.headers)
|
||||
test_dataLoader = DataLoader(test_dataset, batch_size=opt.batch_size, shuffle=True, num_workers=opt.num_workers)
|
||||
|
||||
num_classes = dataset.num_classes()
|
||||
|
Reference in New Issue
Block a user