Parameter Adjustmens and Ensemble Model Implementation
This commit is contained in:
55
models/ensemble.py
Normal file
55
models/ensemble.py
Normal file
@ -0,0 +1,55 @@
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
|
||||
import torch
|
||||
from torch import nn
|
||||
from torch.nn import ModuleList
|
||||
|
||||
from ml_lib.modules.utils import LightningBaseModule
|
||||
from ml_lib.utils.config import Config
|
||||
from ml_lib.utils.model_io import SavedLightningModels
|
||||
from util.module_mixins import (BaseOptimizerMixin, BaseTrainMixin, BaseValMixin, BinaryMaskDatasetFunction,
|
||||
BaseDataloadersMixin)
|
||||
|
||||
|
||||
class Ensemble(BinaryMaskDatasetFunction,
|
||||
BaseDataloadersMixin,
|
||||
BaseTrainMixin,
|
||||
BaseValMixin,
|
||||
BaseOptimizerMixin,
|
||||
LightningBaseModule
|
||||
):
|
||||
|
||||
def __init__(self, hparams):
|
||||
super(Ensemble, self).__init__(hparams)
|
||||
|
||||
# Dataset
|
||||
# =============================================================================
|
||||
self.dataset = self.build_dataset()
|
||||
|
||||
# Model Paramters
|
||||
# =============================================================================
|
||||
# Additional parameters
|
||||
self.in_shape = self.dataset.train_dataset.sample_shape
|
||||
self.conv_filters = self.params.filters
|
||||
self.criterion = nn.BCELoss()
|
||||
|
||||
# Pre_trained_models
|
||||
out_path = Path('output') / self.params.secondary_type
|
||||
# exp_paths = list(out_path.rglob(f'*{self.params.exp_fingerprint}'))
|
||||
exp_paths = list(out_path.rglob('*e87b8f455ba134504b1ae17114ac2a2a'))
|
||||
config_ini_files = sum([list(exp_path.rglob('config.ini')) for exp_path in exp_paths], [])
|
||||
|
||||
self.model_list = ModuleList()
|
||||
|
||||
configs = [Config() for _ in range(len(config_ini_files))]
|
||||
for config, ini_file in zip(configs, config_ini_files):
|
||||
config.read_file(ini_file.open('r'))
|
||||
model = SavedLightningModels.load_checkpoint(models_root_path=config.exp_path / config.version).restore()
|
||||
self.model_list.append(model)
|
||||
|
||||
def forward(self, batch, **kwargs):
|
||||
ys = [model(batch).main_out for model in self.model_list]
|
||||
tensor = torch.stack(ys).mean(dim=0)
|
||||
|
||||
return Namespace(main_out=tensor)
|
Reference in New Issue
Block a user