2019-08-07 08:54:07 +02:00

348 lines
10 KiB
Python

import sys
import os
import shutil
import math
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../') # add project root directory
from dataset.shapenet import ShapeNetPartSegDataset
from model.pointnet2_part_seg import PointNet2PartSegmentNet
import torch_geometric.transforms as GT
import torch
import argparse
from distutils.util import strtobool
import numpy as np
from sklearn.cluster import DBSCAN
from sklearn.preprocessing import StandardScaler
import open3d as o3d
import pointcloud as pc
def eval_sample(net, sample):
'''
sample: { 'points': tensor(n, 3), 'labels': tensor(n,) }
return: (pred_label, gt_label) with labels shape (n,)
'''
net.eval()
with torch.no_grad():
# points: (n, 3)
points, gt_label = sample['points'], sample['labels']
n = points.shape[0]
points = points.view(1, n, 3) # make a batch
points = points.transpose(1, 2).contiguous()
points = points.to(device, dtype)
pred = net(points) # (batch_size, n, num_classes)
pred_label = pred.max(2)[1]
pred_label = pred_label.view(-1).cpu() # (n,)
assert pred_label.shape == gt_label.shape
return (pred_label, gt_label)
def mini_color_table(index, norm=True):
colors = [
[0.5000, 0.5400, 0.5300], [0.8900, 0.1500, 0.2100], [0.6400, 0.5800, 0.5000],
[1.0000, 0.3800, 0.0100], [1.0000, 0.6600, 0.1400], [0.4980, 1.0000, 0.0000],
[0.4980, 1.0000, 0.8314], [0.9412, 0.9725, 1.0000], [0.5412, 0.1686, 0.8863],
[0.5765, 0.4392, 0.8588], [0.3600, 0.1400, 0.4300], [0.5600, 0.3700, 0.6000],
]
color = colors[index % len(colors)]
if not norm:
color[0] *= 255
color[1] *= 255
color[2] *= 255
return color
def label2color(labels):
'''
labels: np.ndarray with shape (n, )
colors(return): np.ndarray with shape (n, 3)
'''
num = labels.shape[0]
colors = np.zeros((num, 3))
minl, maxl = np.min(labels), np.max(labels)
for l in range(minl, maxl + 1):
colors[labels == l, :] = mini_color_table(l)
return colors
def clusterToColor(cluster, cluster_idx):
colors = np.zeros(shape=(len(cluster), 3))
point_idx = 0
for point in cluster:
colors[point_idx, :] = mini_color_table(cluster_idx)
point_idx += 1
return colors
def normalize_pointcloud(pc):
max = pc.max(axis=0)
min = pc.min(axis=0)
f = np.max([abs(max[0] - min[0]), abs(max[1] - min[1]), abs(max[2] - min[2])])
pc[:, 0:3] /= f
pc[:, 3:6] /= (np.linalg.norm(pc[:, 3:6], ord=2, axis=1, keepdims=True))
return pc
def farthest_point_sampling(pts, K):
if pts.shape[0] < K:
return pts
def calc_distances(p0, points):
return ((p0[:3] - points[:, :3]) ** 2).sum(axis=1)
farthest_pts = np.zeros((K, pts.shape[1]))
farthest_pts[0] = pts[np.random.randint(len(pts))]
distances = calc_distances(farthest_pts[0], pts)
for i in range(1, K):
farthest_pts[i] = pts[np.argmax(distances)]
distances = np.minimum(distances, calc_distances(farthest_pts[i], pts))
return farthest_pts
def append_onehotencoded_type(data, factor = 1.0):
types = data[:, 6].astype(int)
res = np.zeros((len(types), 4))
res[np.arange(len(types)), types] = factor
return np.column_stack((data, res))
def append_normal_angles(data):
def func(x):
theta = math.acos(x[2]) / math.pi
phi = (math.atan2(x[1], x[0]) + math.pi) / (2.0 * math.pi)
return (theta, phi)
res = np.array([func(xi) for xi in data[:, 3:6]])
print(res)
return np.column_stack((data, res))
def extract_cube_clusters(data, cluster_dims, max_points_per_cluster):
max = data[:,:3].max(axis=0)
max += max * 0.01
min = data[:,:3].min(axis=0)
min -= min * 0.01
size = (max - min)
clusters = {}
cluster_size = size / np.array(cluster_dims, dtype=np.float32)
print('Min: ' + str(min) + ' Max: ' + str(max))
print('Cluster Size: ' + str(cluster_size))
for row in data:
# print('Row: ' + str(row))
cluster_pos = ((row[:3] - min) / cluster_size).astype(int)
cluster_idx = cluster_dims[0] * cluster_dims[2] * cluster_pos[1] + cluster_dims[0] * cluster_pos[2] + cluster_pos[0]
clusters.setdefault(cluster_idx, []).append(row)
# Apply farthest point sampling to each cluster
for key, cluster in clusters.items():
c = np.vstack(cluster)
clusters[key] = farthest_point_sampling(c, max_points_per_cluster)
return clusters.values()
def extract_clusters(data, selected_indices, eps, min_samples, metric='euclidean', algo='auto'):
min_samples = min_samples * len(data)
print('Clustering. Min Samples: ' + str(min_samples) + ' EPS: ' + str(eps))
# 0,1,2 : pos
# 3,4,5 : normal
# 6: type index
# 7,8,9,10: type index one hot encoded
# 11,12: normal as angles
db_res = DBSCAN(eps=eps, metric=metric, n_jobs=-1, algorithm=algo, min_samples=min_samples).fit(data[:, selected_indices])
labels = db_res.labels_
n_clusters = len(set(labels)) - (1 if -1 in labels else 0)
n_noise = list(labels).count(-1)
print("Noise: " + str(n_noise) + " Clusters: " + str(n_clusters))
clusters = {}
for idx, l in enumerate(labels):
if l is -1:
continue
clusters.setdefault(str(l), []).append(data[idx, :])
npClusters = []
for cluster in clusters.values():
npClusters.append(np.array(cluster))
return npClusters
def draw_clusters(clusters):
clouds = []
cluster_idx = 0
for cluster in clusters:
cloud = o3d.PointCloud()
cloud.points = o3d.Vector3dVector(cluster[:,:3])
cloud.colors = o3d.Vector3dVector(clusterToColor(cluster, cluster_idx))
clouds.append(cloud)
cluster_idx += 1
o3d.draw_geometries(clouds)
def draw_sample_data(sample_data, colored_normals = False):
cloud = o3d.PointCloud()
cloud.points = o3d.Vector3dVector(sample_data[:,:3])
cloud.colors = \
o3d.Vector3dVector(label2color(sample_data[:, 6].astype(int)) if not colored_normals else sample_data[:, 3:6])
o3d.draw_geometries([cloud])
def recreate_folder(folder):
if os.path.exists(folder) and os.path.isdir(folder):
shutil.rmtree(folder)
os.mkdir(folder)
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../') # add project root directory
parser = argparse.ArgumentParser()
parser.add_argument('--npoints', type=int, default=2048, help='resample points number')
parser.add_argument('--model', type=str, default='./checkpoint/seg_model_custom_3.pth', help='model path')
parser.add_argument('--sample_idx', type=int, default=0, help='select a sample to segment and view result')
parser.add_argument('--headers', type=strtobool, default=True, help='if raw files come with headers')
parser.add_argument('--collate_per_segment', type=strtobool, default=True, help='whether to look at pointclouds or sub')
parser.add_argument('--has_variations', type=strtobool, default=False,
help='whether a single pointcloud has variations '
'named int(id)_pc.(xyz|dat) look at pointclouds or sub')
opt = parser.parse_args()
print(opt)
if __name__ == '__main__':
# Create dataset
print('Create data set ..')
dataset_folder = './data/raw/predict/'
pointcloud_file = './pointclouds/0_pc.xyz'
pcloud = pc.read_pointcloud(pointcloud_file)
pcloud = normalize_pointcloud(pcloud)
pcloud = append_normal_angles(pcloud)
# pc = StandardScaler().fit_transform(pc)
recreate_folder(dataset_folder)
# Add full point cloud to prediction folder.
recreate_folder(dataset_folder + '0_0' + '/')
pc_fps = farthest_point_sampling(pcloud, opt.npoints)
pc.write_pointcloud(dataset_folder + '0_0' + '/pc.xyz', pc_fps)
pc_clusters = extract_cube_clusters(pcloud, [4,4,4], 1024)
#pc_clusters = extract_clusters(pc, [0, 1, 2, 3, 4, 5], eps=0.1, min_samples=0.0001, metric='euclidean', algo='auto')
# Add cluster point clouds to prediction folder.
for idx, pcc in enumerate(pc_clusters):
pcc = farthest_point_sampling(pcc, opt.npoints)
recreate_folder(dataset_folder + str(idx) + '/')
pc.write_pointcloud(dataset_folder + str(idx) + '/pc.xyz', pcc)
#draw_sample_data(pcc, False)
draw_clusters(pc_clusters)
# Load dataset
print('load dataset ..')
test_transform = GT.Compose([GT.NormalizeScale(), ])
test_dataset = ShapeNetPartSegDataset(
mode='predict',
root_dir='data',
npoints=opt.npoints,
refresh=True,
collate_per_segment=opt.collate_per_segment,
has_variations=opt.has_variations,
headers=opt.headers
)
num_classes = test_dataset.num_classes()
print('test dataset size: ', len(test_dataset))
# Load model
print('Construct model ..')
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
dtype = torch.float
# net = PointNetPartSegmentNet(num_classes)
net = PointNet2PartSegmentNet(num_classes)
net.load_state_dict(torch.load(opt.model, map_location=device.type))
net = net.to(device, dtype)
net.eval()
result_clusters = []
# Iterate over all the samples
for sample in test_dataset:
print('Eval test sample ..')
pred_label, gt_label = eval_sample(net, sample)
sample_data = np.column_stack((sample["points"].numpy(), sample["normals"].numpy(), pred_label.numpy()))
print('Eval done.')
sample_data = normalize_pointcloud(sample_data)
sample_data = append_onehotencoded_type(sample_data, 1.0)
sample_data = append_normal_angles(sample_data)
print('Clustering ..')
print('Shape: ' + str(sample_data.shape))
clusters = extract_clusters(sample_data, [0, 1, 2, 3, 4, 5, 7, 8, 9, 10], eps=0.1, min_samples=0.0001, metric='euclidean', algo='auto')
print('Clustering done. ' + str(len(clusters)) + " Clusters.")
print(sample_data[:, 6])
draw_sample_data(sample_data, False)
result_clusters.extend(clusters)
# result_clusters.append(sample_data)
#draw_clusters(result_clusters)