data shapes retrieval and hparam passing
This commit is contained in:
@ -15,6 +15,10 @@ class BinaryMasksDataset(Dataset):
|
|||||||
_to_label['mask'] = V.MASK
|
_to_label['mask'] = V.MASK
|
||||||
settings = ['test', 'devel', 'train']
|
settings = ['test', 'devel', 'train']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sample_shape(self):
|
||||||
|
return self[0][0].shape
|
||||||
|
|
||||||
def __init__(self, data_root, setting, transforms=None):
|
def __init__(self, data_root, setting, transforms=None):
|
||||||
assert isinstance(setting, str), f'Setting has to be a string, but was: {self.settings}.'
|
assert isinstance(setting, str), f'Setting has to be a string, but was: {self.settings}.'
|
||||||
assert setting in self.settings, f'Setting must match one of: {self.settings}.'
|
assert setting in self.settings, f'Setting must match one of: {self.settings}.'
|
||||||
|
4
main.py
4
main.py
@ -108,7 +108,9 @@ def run_lightning_loop(config_obj):
|
|||||||
# Model
|
# Model
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Build and Init its Weights
|
# Build and Init its Weights
|
||||||
model: LightningBaseModule = config_obj.build_and_init_model(weight_init_function=torch.nn.init.xavier_normal_)
|
config_obj.set('model', 'in_shape', str(tuple(train_dataset.sample_shape)))
|
||||||
|
model: LightningBaseModule = config_obj.build_and_init_model(weight_init_function=torch.nn.init.xavier_normal_
|
||||||
|
)
|
||||||
|
|
||||||
# Trainer
|
# Trainer
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
@ -27,7 +27,10 @@ class BinaryClassifier(LightningBaseModule):
|
|||||||
return dict(val_loss=val_loss)
|
return dict(val_loss=val_loss)
|
||||||
|
|
||||||
def validation_epoch_end(self, outputs):
|
def validation_epoch_end(self, outputs):
|
||||||
over_all_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs]))
|
overall_val_loss = torch.mean(torch.stack([output['val_loss'] for output in outputs]))
|
||||||
|
return dict(log=dict(
|
||||||
|
mean_val_loss=overall_val_loss)
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, hparams):
|
def __init__(self, hparams):
|
||||||
super(BinaryClassifier, self).__init__(hparams)
|
super(BinaryClassifier, self).__init__(hparams)
|
||||||
@ -35,14 +38,12 @@ class BinaryClassifier(LightningBaseModule):
|
|||||||
self.criterion = nn.BCELoss()
|
self.criterion = nn.BCELoss()
|
||||||
|
|
||||||
# Additional parameters
|
# Additional parameters
|
||||||
self.in_shape = ()
|
self.in_shape = self.hparams.model_params.in_shape
|
||||||
|
|
||||||
#
|
|
||||||
|
|
||||||
# Model Modules
|
# Model Modules
|
||||||
self.conv_1 = ConvModule(self.in_shape, 32, 5, )
|
self.conv_1 = ConvModule(self.in_shape, 32, 5, conv_stride=4, **hparams.model_params)
|
||||||
self.conv_2 = ConvModule(64)
|
self.conv_2 = ConvModule(self.conv_1.shape, 64, 7, conv_stride=2, **hparams.model_params)
|
||||||
self.conv_3 = ConvModule(128)
|
self.conv_3 = ConvModule(self.conv_1.shape, 128, 9, conv_stride=2, **hparams.model_params)
|
||||||
|
|
||||||
def forward(self, batch, **kwargs):
|
def forward(self, batch, **kwargs):
|
||||||
return batch
|
return batch
|
||||||
|
Reference in New Issue
Block a user