93 lines
2.9 KiB
Python
93 lines
2.9 KiB
Python
from pathlib import Path
|
|
|
|
import torch
|
|
|
|
import polyscope as ps
|
|
import numpy as np
|
|
|
|
from torch.utils.data import DataLoader
|
|
|
|
# Dataset and Dataloaders
|
|
# =============================================================================
|
|
|
|
# Transforms
|
|
from ml_lib.point_toolset.point_io import BatchToData
|
|
from ml_lib.utils.model_io import SavedLightningModels
|
|
|
|
|
|
# Datasets
|
|
from datasets.shapenet import ShapeNetPartSegDataset
|
|
from utils.pointcloud import read_pointcloud, normalize_pointcloud, cluster_cubes, append_onehotencoded_type, \
|
|
label2color
|
|
from utils.project_settings import GlobalVar
|
|
|
|
|
|
def prepare_dataloader(config_obj):
|
|
dataset = ShapeNetPartSegDataset(config_obj.data.root, split=GlobalVar.data_split.test,
|
|
setting=GlobalVar.settings[config_obj.model.type])
|
|
# noinspection PyTypeChecker
|
|
return DataLoader(dataset, batch_size=config_obj.train.batch_size,
|
|
num_workers=config_obj.data.worker, shuffle=False)
|
|
|
|
|
|
def restore_logger_and_model(log_dir):
|
|
model = SavedLightningModels.load_checkpoint(models_root_path=log_dir, n=-1)
|
|
model = model.restore()
|
|
if torch.cuda.is_available():
|
|
model.cuda()
|
|
else:
|
|
model.cpu()
|
|
return model
|
|
|
|
def predict_prim_type(input_pc, model):
|
|
|
|
input_data = dict(norm=torch.tensor(np.array([input_pc[:, 3:6]], np.float)),
|
|
pos=torch.tensor(input_pc[:, 0:3]),
|
|
y=np.zeros(input_pc.shape[0])
|
|
)
|
|
|
|
batch_to_data = BatchToData()
|
|
|
|
data = batch_to_data(input_data)
|
|
y = loaded_model(data.to(device='cuda' if torch.cuda.is_available() else 'cpu'))
|
|
y_primary = torch.argmax(y.main_out, dim=-1).squeeze().cpu().numpy()
|
|
|
|
return np.concatenate((input_pc, y_primary.reshape(-1,1)), axis=1)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
input_pc_path = Path('data') / 'pc' / 'pc.txt'
|
|
|
|
model_path = Path('output') / 'PN2' / 'PN_26512907a2de0664bfad2349a6bffee3' / 'version_0'
|
|
# config_filename = 'config.ini'
|
|
# config = ThisConfig()
|
|
# config.read_file((Path(model_path) / config_filename).open('r'))
|
|
loaded_model = restore_logger_and_model(model_path)
|
|
loaded_model.eval()
|
|
|
|
input_pc = read_pointcloud(input_pc_path, ' ', False)
|
|
|
|
input_pc = normalize_pointcloud(input_pc)
|
|
|
|
grid_clusters = cluster_cubes(input_pc, [1,1,1], 2048)
|
|
|
|
ps.init()
|
|
|
|
for i,grid_cluster_pc in enumerate(grid_clusters):
|
|
|
|
print("Cluster pointcloud size: {}".format(grid_cluster_pc.shape[0]))
|
|
|
|
pc_with_prim_type = predict_prim_type(grid_cluster_pc, loaded_model)
|
|
|
|
#pc_with_prim_type = polytopes_to_planes(pc_with_prim_type)
|
|
|
|
pc_with_prim_type = append_onehotencoded_type(pc_with_prim_type)
|
|
|
|
pc = ps.register_point_cloud("points_" + str(i), pc_with_prim_type[:, :3], radius=0.01)
|
|
pc.add_color_quantity("prim types", label2color(pc_with_prim_type[:, 6].astype(np.int64)), True)
|
|
|
|
ps.show()
|
|
|
|
print('Done')
|