Files
Labeling-Primitives-With-Po…/predict/predict.py

91 lines
3.1 KiB
Python

import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../') # add project root directory
from dataset.shapenet import ShapeNetPartSegDataset
from model.pointnet2_part_seg import PointNet2PartSegmentNet
import torch_geometric.transforms as GT
import torch
from distutils.util import strtobool
import numpy as np
import argparse
##
parser = argparse.ArgumentParser()
parser.add_argument('--npoints', type=int, default=2048, help='resample points number')
parser.add_argument('--model', type=str, default='./checkpoint/seg_model_custom_3.pth', help='model path')
parser.add_argument('--sample_idx', type=int, default=0, help='select a sample to segment and view result')
parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers')
parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub')
parser.add_argument('--has_variations', type=strtobool, default=False,
help='whether a single pointcloud has variations '
'named int(id)_pc.(xyz|dat) look at pointclouds or sub')
opt = parser.parse_args()
print(opt)
if __name__ == '__main__':
# Load dataset
print('Construct dataset ..')
test_transform = GT.Compose([GT.NormalizeScale(),])
test_dataset = ShapeNetPartSegDataset(
mode='predict',
root_dir='data',
npoints=opt.npoints,
refresh=True,
collate_per_segment=opt.collate_per_segment,
has_variations=opt.has_variations,
headers=opt.headers
)
num_classes = test_dataset.num_classes()
print('test dataset size: ', len(test_dataset))
# Load model
print('Construct model ..')
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
dtype = torch.float
# net = PointNetPartSegmentNet(num_classes)
net = PointNet2PartSegmentNet(num_classes)
net.load_state_dict(torch.load(opt.model, map_location=device.type))
net = net.to(device, dtype)
net.eval()
##
def eval_sample(net, sample):
'''
sample: { 'points': tensor(n, 3), 'labels': tensor(n,) }
return: (pred_label, gt_label) with labels shape (n,)
'''
net.eval()
with torch.no_grad():
# points: (n, 3)
points, gt_label = sample['points'], sample['labels']
n = points.shape[0]
points = points.view(1, n, 3) # make a batch
points = points.transpose(1, 2).contiguous()
points = points.to(device, dtype)
pred = net(points) # (batch_size, n, num_classes)
pred_label = pred.max(2)[1]
pred_label = pred_label.view(-1).cpu() # (n,)
assert pred_label.shape == gt_label.shape
return (pred_label, gt_label)
# Iterate over all the samples
for sample in test_dataset:
print('Eval test sample ..')
pred_label, gt_label = eval_sample(net, sample)
print('Eval done ..')
pred_labels = pred_label.numpy()
print(pred_labels)