dataset modification

This commit is contained in:
Si11ium
2020-06-19 19:00:07 +02:00
parent b3c67bab40
commit a19bd9cafd
4 changed files with 36 additions and 20 deletions

View File

@ -57,7 +57,7 @@ def predict_prim_type(input_pc, model):
if __name__ == '__main__':
input_pc_path = Path('data') / 'pc' / 'pc.txt'
input_pc_path = Path('data') / 'pc' / 'test.xyz'
model_path = Path('output') / 'PN2' / 'PN_26512907a2de0664bfad2349a6bffee3' / 'version_0'
# config_filename = 'config.ini'
@ -66,15 +66,19 @@ if __name__ == '__main__':
loaded_model = restore_logger_and_model(model_path)
loaded_model.eval()
input_pc = read_pointcloud(input_pc_path, ' ', False)
#input_pc = read_pointcloud(input_pc_path, ' ', False)
input_pc = normalize_pointcloud(input_pc)
# input_pc = normalize_pointcloud(input_pc)
grid_clusters = cluster_cubes(input_pc, [1,1,1], 1024)
# TEST DATASET
test_dataset = ShapeNetPartSegDataset('data', mode=GlobalVar.data_split.predict, collate_per_segment=False,
npoints=1024, refresh=True)
grid_clusters = cluster_cubes(test_dataset[0], [3, 3, 3], max_points_per_cluster=1024)
ps.init()
for i,grid_cluster_pc in enumerate(grid_clusters):
for i, grid_cluster_pc in enumerate(grid_clusters):
print("Cluster pointcloud size: {}".format(grid_cluster_pc.shape[0]))