eval running - offline logger implemented -> Test it!
This commit is contained in:
@@ -5,7 +5,7 @@ from abc import ABC
|
||||
from pathlib import Path
|
||||
|
||||
from torch.utils.data import Dataset
|
||||
from ml_lib.point_toolset.sampling import FarthestpointSampling
|
||||
from ml_lib.point_toolset.sampling import FarthestpointSampling, RandomSampling
|
||||
|
||||
import numpy as np
|
||||
|
||||
@@ -22,16 +22,21 @@ class _Point_Dataset(ABC, Dataset):
|
||||
raise NotImplementedError
|
||||
|
||||
headers = ['x', 'y', 'z', 'xn', 'yn', 'zn', 'label', 'cl_idx']
|
||||
samplers = dict(fps=FarthestpointSampling, rnd=RandomSampling)
|
||||
|
||||
def __init__(self, root=Path('data'), sampling_k=2048, transforms=None, load_preprocessed=True, *args, **kwargs):
|
||||
def __init__(self, root=Path('data'), norm_as_feature=True, sampling_k=2048, sampling='rnd',
|
||||
transforms=None, load_preprocessed=True, split='train', dense_output=False, *args, **kwargs):
|
||||
super(_Point_Dataset, self).__init__()
|
||||
|
||||
self.dense_output = dense_output
|
||||
self.split = split
|
||||
self.norm_as_feature = norm_as_feature
|
||||
self.load_preprocessed = load_preprocessed
|
||||
self.transforms = transforms if transforms else lambda x: x
|
||||
self.sampling_k = sampling_k
|
||||
self.sampling = FarthestpointSampling(K=self.sampling_k)
|
||||
self.sampling = self.samplers[sampling](K=self.sampling_k)
|
||||
self.root = Path(root)
|
||||
self.raw = self.root / 'raw'
|
||||
self.raw = self.root / 'raw' / self.split
|
||||
self.processed_ext = '.pik'
|
||||
self.raw_ext = '.xyz'
|
||||
self.processed = self.root / self.setting
|
||||
|
||||
@@ -9,6 +9,7 @@ from ._point_dataset import _Point_Dataset
|
||||
class FullCloudsDataset(_Point_Dataset):
|
||||
|
||||
setting = 'pc'
|
||||
split: str
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FullCloudsDataset, self).__init__(*args, **kwargs)
|
||||
@@ -21,13 +22,15 @@ class FullCloudsDataset(_Point_Dataset):
|
||||
|
||||
with processed_file_path.open('rb') as processed_file:
|
||||
pointcloud = pickle.load(processed_file)
|
||||
points = np.stack((pointcloud['x'], pointcloud['y'], pointcloud['z'],
|
||||
pointcloud['xn'], pointcloud['yn'], pointcloud['zn']
|
||||
),
|
||||
axis=-1)
|
||||
# When yopu want to return points and normal seperately
|
||||
# normal = np.stack((pointcloud['xn'], pointcloud['yn'], pointcloud['zn']), axis=-1)
|
||||
label = pointcloud['label']
|
||||
sample_idxs = self.sampling(points)
|
||||
|
||||
return points[sample_idxs].astype(np.float), label[sample_idxs].astype(np.int)
|
||||
position = np.stack((pointcloud['x'], pointcloud['y'], pointcloud['z']), axis=-1)
|
||||
|
||||
normal = np.stack((pointcloud['xn'], pointcloud['yn'], pointcloud['zn']), axis=-1)
|
||||
|
||||
label = pointcloud['label']
|
||||
|
||||
sample_idxs = self.sampling(position)
|
||||
|
||||
return (normal[sample_idxs].astype(np.float),
|
||||
position[sample_idxs].astype(np.float),
|
||||
label[sample_idxs].astype(np.int))
|
||||
|
||||
@@ -26,7 +26,7 @@ class FullCloudsDataset(_Point_Dataset):
|
||||
|
||||
# When yopu want to return points and normal seperately
|
||||
# normal = np.stack((pointcloud['xn'], pointcloud['yn'], pointcloud['zn']), axis=-1)
|
||||
label = np.stack((pointcloud['label'], pointcloud['cl_idx']))
|
||||
label = pointcloud['cl_idx']
|
||||
sample_idxs = self.sampling(points)
|
||||
|
||||
return points[sample_idxs], label[sample_idxs]
|
||||
@@ -26,7 +26,7 @@ class FullCloudsDataset(_Point_Dataset):
|
||||
|
||||
# When yopu want to return points and normal seperately
|
||||
# normal = np.stack((pointcloud['xn'], pointcloud['yn'], pointcloud['zn']), axis=-1)
|
||||
label = np.stack((pointcloud['label'], pointcloud['cl_idx']))
|
||||
label = pointcloud['cl_idx']
|
||||
sample_idxs = self.sampling(points)
|
||||
|
||||
return points[sample_idxs], label[sample_idxs]
|
||||
Reference in New Issue
Block a user