File based header detection, collate_per_PC training.

This commit is contained in:
Si11ium
2019-07-31 12:55:47 +02:00
parent 0b9d03a25d
commit 47a76dc978
6 changed files with 218 additions and 44 deletions

View File

@ -1,11 +1,11 @@
# Warning: import open3d may lead crash, try to to import open3d first here
from view import view_points_labels
from vis.view import view_points_labels
import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../') # add project root directory
from dataset.shapenet import ShapeNetPartSegDataset
from dataset.shapenet import PredictNetPartSegDataset
from model.pointnet2_part_seg import PointNet2PartSegmentNet
import torch_geometric.transforms as GT
import torch
@ -17,9 +17,8 @@ import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, default='data', help='dataset path')
parser.add_argument('--npoints', type=int, default=50, help='resample points number')
parser.add_argument('--model', type=str, default='./checkpoint/seg_model_Airplane_24.pth', help='model path')
parser.add_argument('--model', type=str, default='./checkpoint/seg_model_custom_8.pth', help='model path')
parser.add_argument('--sample_idx', type=int, default=0, help='select a sample to segment and view result')
opt = parser.parse_args()
print(opt)
@ -29,9 +28,8 @@ if __name__ == '__main__':
print('Construct dataset ..')
test_transform = GT.Compose([GT.NormalizeScale(),])
test_dataset = ShapeNetPartSegDataset(
test_dataset = PredictNetPartSegDataset(
root_dir=opt.dataset,
train=False,
transform=test_transform,
npoints=opt.npoints
)
@ -49,7 +47,7 @@ if __name__ == '__main__':
# net = PointNetPartSegmentNet(num_classes)
net = PointNet2PartSegmentNet(num_classes)
net.load_state_dict(torch.load(opt.model))
net.load_state_dict(torch.load(opt.model, map_location=device.type))
net = net.to(device, dtype)
net.eval()
@ -104,30 +102,35 @@ if __name__ == '__main__':
# Get one sample and eval
sample = test_dataset[opt.sample_idx]
#sample = test_dataset[opt.sample_idx]
r_idx = np.random.randint(0, len(test_dataset), 20)
for idx in r_idx:
sample = test_dataset[int(idx)]
print('Eval test sample ..')
pred_label, gt_label = eval_sample(net, sample)
print('Eval done ..')
print('Eval test sample ..')
pred_label, gt_label = eval_sample(net, sample)
print('Eval done ..')
# Get sample result
print('Compute mIoU ..')
points = sample['points'].numpy()
pred_labels = pred_label.numpy()
gt_labels = gt_label.numpy()
diff_labels = label_diff(pred_labels, gt_labels)
# Get sample result
print('Compute mIoU ..')
points = sample['points'].numpy()
pred_labels = pred_label.numpy()
gt_labels = gt_label.numpy()
diff_labels = label_diff(pred_labels, gt_labels)
print('mIoU: ', compute_mIoU(pred_labels, gt_labels))
print('mIoU: ', compute_mIoU(pred_labels, gt_labels))
# View result
# View result
# print('View gt labels ..')
# view_points_labels(points, gt_labels)
# print('View gt labels ..')
# view_points_labels(points, gt_labels)
# print('View diff labels ..')
# view_points_labels(points, diff_labels)
print('View diff labels ..')
print(diff_labels)
view_points_labels(points, diff_labels)
print('View pred labels ..')
view_points_labels(points, pred_labels)
# print('View pred labels ..')
# print(pred_labels)
# view_points_labels(points, pred_labels)

View File

@ -28,7 +28,7 @@ def view_points(points, colors=None):
'''
cloud = o3d.PointCloud()
cloud.points = o3d.Vector3dVector(points)
# frame = o3d.create_mesh_coordinate_frame(-1, -1, -1)
if colors is not None:
if isinstance(colors, np.ndarray):
cloud.colors = o3d.Vector3dVector(colors)
@ -37,6 +37,7 @@ def view_points(points, colors=None):
o3d.draw_geometries([cloud])
def label2color(labels):
'''
labels: np.ndarray with shape (n, )