initial
This commit is contained in:
.idea
README.mdbuild_data.pydataset
evaluation
lib
main.pypreprocessing
res/maps
visualization
0
lib/utils/__init__.py
Normal file
0
lib/utils/__init__.py
Normal file
BIN
lib/utils/__pycache__/__init__.cpython-37.pyc
Normal file
BIN
lib/utils/__pycache__/__init__.cpython-37.pyc
Normal file
Binary file not shown.
BIN
lib/utils/__pycache__/config.cpython-37.pyc
Normal file
BIN
lib/utils/__pycache__/config.cpython-37.pyc
Normal file
Binary file not shown.
BIN
lib/utils/__pycache__/logging.cpython-37.pyc
Normal file
BIN
lib/utils/__pycache__/logging.cpython-37.pyc
Normal file
Binary file not shown.
96
lib/utils/config.py
Normal file
96
lib/utils/config.py
Normal file
@ -0,0 +1,96 @@
|
||||
import ast
|
||||
|
||||
from argparse import Namespace
|
||||
from collections import defaultdict
|
||||
from configparser import ConfigParser
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def is_jsonable(x):
|
||||
import json
|
||||
try:
|
||||
json.dumps(x)
|
||||
return True
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
class Config(ConfigParser):
|
||||
|
||||
# TODO: Do this programmatically; This did not work:
|
||||
# Initialize Default Sections
|
||||
# for section in self.default_sections:
|
||||
# self.__setattr__(section, property(lambda x :x._get_namespace_for_section(section))
|
||||
|
||||
@property
|
||||
def main(self):
|
||||
return self._get_namespace_for_section('main')
|
||||
|
||||
@property
|
||||
def model(self):
|
||||
return self._get_namespace_for_section('model')
|
||||
|
||||
@property
|
||||
def train(self):
|
||||
return self._get_namespace_for_section('train')
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._get_namespace_for_section('data')
|
||||
|
||||
@property
|
||||
def project(self):
|
||||
return self._get_namespace_for_section('project')
|
||||
###################################################
|
||||
|
||||
@property
|
||||
def tags(self, ):
|
||||
return [f'{key}: {val}' for key, val in self.serializable.items()]
|
||||
|
||||
@property
|
||||
def serializable(self):
|
||||
return {f'{section}_{key}': val for section, params in self._sections.items()
|
||||
for key, val in params.items() if is_jsonable(val)}
|
||||
|
||||
@property
|
||||
def as_dict(self):
|
||||
return self._sections
|
||||
|
||||
def _get_namespace_for_section(self, item):
|
||||
return Namespace(**{key: self.get(item, key) for key in self[item]})
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(Config, self).__init__(**kwargs)
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def read_namespace(cls, namespace: Namespace):
|
||||
|
||||
space_dict = defaultdict(dict)
|
||||
for key in namespace.__dict__:
|
||||
section, *attr_name = key.split('_')
|
||||
attr_name = '_'.join(attr_name)
|
||||
value = str(namespace.__getattribute__(key))
|
||||
|
||||
space_dict[section][attr_name] = value
|
||||
new_config = cls()
|
||||
new_config.read_dict(space_dict)
|
||||
return new_config
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
item = super(Config, self).get(*args, **kwargs)
|
||||
try:
|
||||
return ast.literal_eval(item)
|
||||
except SyntaxError:
|
||||
return item
|
||||
except ValueError:
|
||||
return item
|
||||
|
||||
def write(self, filepath, **kwargs):
|
||||
path = Path(filepath, exist_ok=True)
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with path.open('w') as configfile:
|
||||
super().write(configfile)
|
||||
|
||||
return True
|
69
lib/utils/logging.py
Normal file
69
lib/utils/logging.py
Normal file
@ -0,0 +1,69 @@
|
||||
from pathlib import Path
|
||||
|
||||
from pytorch_lightning.logging.base import LightningLoggerBase
|
||||
from pytorch_lightning.logging.neptune import NeptuneLogger
|
||||
from pytorch_lightning.logging.test_tube import TestTubeLogger
|
||||
|
||||
from lib.utils.config import Config
|
||||
|
||||
|
||||
class Logger(LightningLoggerBase):
|
||||
|
||||
@property
|
||||
def experiment(self):
|
||||
if self.debug:
|
||||
return self.testtubelogger.experiment
|
||||
else:
|
||||
return self.neptunelogger.experiment
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.config.model.type
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return f"{self.config.project.owner}/{self.config.project.name}"
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return f"version_{self.config.get('main', 'seed')}"
|
||||
|
||||
@property
|
||||
def outpath(self):
|
||||
# ToDo: Add further path modification such as dataset config etc.
|
||||
return Path(self.config.train.outpath)
|
||||
|
||||
def __init__(self, config: Config, debug=False):
|
||||
"""
|
||||
params (dict|None): Optional. Parameters of the experiment. After experiment creation params are read-only.
|
||||
Parameters are displayed in the experiment’s Parameters section and each key-value pair can be
|
||||
viewed in experiments view as a column.
|
||||
properties (dict|None): Optional default is {}. Properties of the experiment.
|
||||
They are editable after experiment is created. Properties are displayed in the experiment’s Details and
|
||||
each key-value pair can be viewed in experiments view as a column.
|
||||
tags (list|None): Optional default []. Must be list of str. Tags of the experiment.
|
||||
They are editable after experiment is created (see: append_tag() and remove_tag()).
|
||||
Tags are displayed in the experiment’s Details and can be viewed in experiments view as a column.
|
||||
"""
|
||||
super(Logger, self).__init__()
|
||||
|
||||
self.debug = debug
|
||||
self.config = config
|
||||
self._testtube_kwargs = dict(save_dir=self.outpath, version=self.version, name=self.name)
|
||||
self._neptune_kwargs = dict(offline_mode=not self.debug,
|
||||
api_key=self.config.project.neptune_key,
|
||||
project_name=self.project_name,
|
||||
name=self.name,
|
||||
upload_source_files=list())
|
||||
self.neptunelogger = NeptuneLogger(**self._neptune_kwargs)
|
||||
self.testtubelogger = TestTubeLogger(**self._testtube_kwargs)
|
||||
|
||||
def log_hyperparams(self, params):
|
||||
self.neptunelogger.log_hyperparams(params)
|
||||
self.testtubelogger.log_hyperparams(params)
|
||||
pass
|
||||
|
||||
def log_metrics(self, metrics, step_num):
|
||||
self.neptunelogger.log_metrics(metrics, step_num)
|
||||
self.testtubelogger.log_metrics(metrics, step_num)
|
||||
pass
|
76
lib/utils/model_io.py
Normal file
76
lib/utils/model_io.py
Normal file
@ -0,0 +1,76 @@
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
from natsort import natsorted
|
||||
from torch import nn
|
||||
|
||||
|
||||
# Hyperparamter Object
|
||||
class ModelParameters(Namespace):
|
||||
|
||||
_activations = dict(
|
||||
leaky_relu=nn.LeakyReLU,
|
||||
relu=nn.ReLU,
|
||||
sigmoid=nn.Sigmoid,
|
||||
tanh=nn.Tanh
|
||||
)
|
||||
|
||||
@property
|
||||
def model_param(self):
|
||||
return self._model_param
|
||||
|
||||
@property
|
||||
def train_param(self):
|
||||
return self._train_param
|
||||
|
||||
@property
|
||||
def data_param(self):
|
||||
return self._data_param
|
||||
|
||||
def __init__(self, model_param, train_param, data_param):
|
||||
self._model_param = model_param
|
||||
self._train_param = train_param
|
||||
self._data_param = data_param
|
||||
kwargs = vars(model_param)
|
||||
kwargs.update(vars(train_param))
|
||||
kwargs.update(vars(data_param))
|
||||
super(ModelParameters, self).__init__(**kwargs)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item == 'activation':
|
||||
try:
|
||||
return self._activations[item]
|
||||
except KeyError:
|
||||
return nn.ReLU
|
||||
return super(ModelParameters, self).__getattribute__(item)
|
||||
|
||||
|
||||
class SavedLightningModels(object):
|
||||
|
||||
@classmethod
|
||||
def load_checkpoint(cls, models_root_path, model, n=-1, tags_file_path=''):
|
||||
assert models_root_path.exists(), f'The path {models_root_path.absolute()} does not exist!'
|
||||
found_checkpoints = list(Path(models_root_path).rglob('*.ckpt'))
|
||||
|
||||
found_checkpoints = natsorted(found_checkpoints, key=lambda y: y.name)
|
||||
|
||||
if not tags_file_path:
|
||||
tag_files = models_root_path.rglob('meta_tags.csv')
|
||||
tags_file_path = list(tag_files)[0]
|
||||
|
||||
return cls(weights=found_checkpoints[n], model=model, tags=tags_file_path)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.weights: str = kwargs.get('weights', '')
|
||||
self.tags: str = kwargs.get('tags', '')
|
||||
|
||||
self.model = kwargs.get('model', None)
|
||||
assert self.model is not None
|
||||
|
||||
def restore(self):
|
||||
pretrained_model = self.model.load_from_metrics(
|
||||
weights_path=self.weights,
|
||||
tags_csv=self.tags
|
||||
)
|
||||
pretrained_model.eval()
|
||||
pretrained_model.freeze()
|
||||
return pretrained_model
|
12
lib/utils/transforms.py
Normal file
12
lib/utils/transforms.py
Normal file
@ -0,0 +1,12 @@
|
||||
import numpy as np
|
||||
|
||||
|
||||
class AsArray(object):
|
||||
def __init__(self, width, height):
|
||||
self.width = width
|
||||
self.height = height
|
||||
|
||||
def __call__(self, x):
|
||||
array = np.zeros((self.width, self.height))
|
||||
|
||||
return array
|
Reference in New Issue
Block a user