from argparse import Namespace from torch import nn from torch.nn import ModuleList from ml_lib.modules.blocks import ConvModule, LinearModule, ResidualModule from ml_lib.modules.util import LightningBaseModule from util.module_mixins import (BaseOptimizerMixin, BaseTrainMixin, BaseValMixin, DatasetMixin, BaseDataloadersMixin) class ResidualConvClassifier(DatasetMixin, BaseDataloadersMixin, BaseTrainMixin, BaseValMixin, BaseOptimizerMixin, LightningBaseModule ): def __init__(self, hparams): super(ResidualConvClassifier, self).__init__(hparams) # Dataset # ============================================================================= self.dataset = self.build_dataset() # Model Paramters # ============================================================================= # Additional parameters self.in_shape = self.dataset.train_dataset.sample_shape self.conv_filters = self.params.filters # Modules with Parameters self.conv_list = ModuleList() last_shape = self.in_shape k = 3 # Base Kernel Value conv_module_params = self.params.module_kwargs conv_module_params.update(conv_kernel=(k, k), conv_stride=(1, 1), conv_padding=1) self.conv_list.append(ConvModule(last_shape, self.conv_filters[0], (k, k), conv_stride=(2, 2), conv_padding=1, **self.params.module_kwargs)) last_shape = self.conv_list[-1].shape for idx in range(len(self.conv_filters)): conv_module_params.update(conv_filters=self.conv_filters[idx]) self.conv_list.append(ResidualModule(last_shape, ConvModule, 2, **conv_module_params)) last_shape = self.conv_list[-1].shape try: self.conv_list.append(ConvModule(last_shape, self.conv_filters[idx+1], (k, k), conv_stride=(2, 2), conv_padding=2, **self.params.module_kwargs)) last_shape = self.conv_list[-1].shape except IndexError: pass self.full_1 = LinearModule(self.conv_list[-1].shape, self.params.lat_dim, **self.params.module_kwargs) self.full_2 = LinearModule(self.full_1.shape, self.full_1.shape * 2, **self.params.module_kwargs) self.full_3 = LinearModule(self.full_2.shape, self.full_2.shape // 2, **self.params.module_kwargs) self.full_out = LinearModule(self.full_3.shape, 1, bias=self.params.bias, activation=nn.Sigmoid) def forward(self, batch, **kwargs): tensor = batch for conv in self.conv_list: tensor = conv(tensor) tensor = self.full_1(tensor) tensor = self.full_2(tensor) tensor = self.full_3(tensor) tensor = self.full_out(tensor) return Namespace(main_out=tensor)