This commit is contained in:
Steffen Illium 2020-02-13 20:28:20 +01:00
commit 91ecf157d6
45 changed files with 1319 additions and 0 deletions

2
.idea/.gitignore generated vendored Normal file
View File

@ -0,0 +1,2 @@
# Default ignored files
/workspace.xml

8
.idea/hom_traj_gen.iml generated Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

7
.idea/misc.xml generated Normal file
View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml generated Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/hom_traj_gen.iml" filepath="$PROJECT_DIR$/.idea/hom_traj_gen.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

0
README.md Normal file
View File

11
build_data.py Normal file
View File

@ -0,0 +1,11 @@
from pathlib import Path
from lib.objects.map import Map
from preprocessing.generator import Generator
if __name__ == '__main__':
data_root = Path() / 'data'
maps_root = Path() / 'res' / 'maps'
map_object = Map('Tate').from_image(maps_root / 'tate_sw.bmp')
generator = Generator(data_root, map_object)
generator.generate_n_trajectories_m_alternatives(100, 10, 'test')

Binary file not shown.

96
dataset/dataset.py Normal file
View File

@ -0,0 +1,96 @@
import shelve
from pathlib import Path
import torch
from torch.utils.data import ConcatDataset, Dataset
from lib.objects.map import Map
from preprocessing.generator import Generator
class TrajDataset(Dataset):
def __init__(self, data):
super(TrajDataset, self).__init__()
self.alternatives = data['alternatives']
self.trajectory = data['trajectory']
self.labels = data['labels']
def __len__(self):
return len(self.alternatives)
def __getitem__(self, item):
return self.trajectory.vertices, self.alternatives[item].vertices, self.labels[item]
class DataSetMapping(Dataset):
def __init__(self, dataset, mapping):
self._dataset = dataset
self._mapping = mapping
def __len__(self):
return self._mapping.shape[0]
def __getitem__(self, item):
return self._dataset[self._mapping[item]]
class TrajData(object):
@property
def name(self):
return self.__class__.__name__
def __init__(self, data_root, mapname='tate_sw', trajectories=1000, alternatives=10,
train_val_test_split=(0.6, 0.2, 0.2), rebuild=False, equal_samples=True, **_):
self.rebuild = rebuild
self.equal_samples = equal_samples
self._alternatives = alternatives
self._trajectories = trajectories
self.mapname = mapname
self.train_split, self.val_split, self.test_split = train_val_test_split
self.data_root = Path(data_root)
self._dataset = None
self._dataset, self._train_map, self._val_map, self._test_map = self._load_dataset()
def _build_data_on_demand(self):
maps_root = Path() / 'res' / 'maps'
map_object = Map(self.mapname).from_image(maps_root / f'{self.mapname}.bmp')
assert maps_root.exists()
dataset_file = Path(self.data_root) / f'{self.mapname}.pik'
if dataset_file.exists() and self.rebuild:
dataset_file.unlink()
if not dataset_file.exists():
generator = Generator(self.data_root, map_object)
generator.generate_n_trajectories_m_alternatives(self._trajectories, self._alternatives,
self.mapname, equal_samples=self.equal_samples)
return True
def _load_dataset(self):
assert self._build_data_on_demand()
with shelve.open(str(self.data_root / f'{self.mapname}.pik')) as d:
dataset = ConcatDataset([TrajDataset(d[key]) for key in d.keys() if key != 'map'])
indices = torch.randperm(len(dataset))
train_size = int(len(dataset) * self.train_split)
val_size = int(len(dataset) * self.val_split)
test_size = int(len(dataset) * self.test_split)
train_map = indices[:train_size]
val_map = indices[train_size:val_size]
test_map = indices[test_size:]
return dataset, train_map, val_map, test_map
@property
def train_dataset(self):
return DataSetMapping(self._dataset, self._train_map)
@property
def val_dataset(self):
return DataSetMapping(self._dataset, self._val_map)
@property
def test_dataset(self):
return DataSetMapping(self._dataset, self._test_map)
def get_datasets(self):
return self.train_dataset, self.val_dataset, self.test_dataset

View File

@ -0,0 +1,37 @@
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc
class ROCEvaluation(object):
BINARY_PROBLEM = 2
linewidth = 2
def __init__(self, save_fig=True):
self.epoch = 0
pass
def __call__(self, prediction, label, prepare_fig=True):
# Compute ROC curve and ROC area
fpr, tpr, _ = roc_curve(prediction, label)
roc_auc = auc(fpr, tpr)
if prepare_fig:
fig = self._prepare_fig()
fig.plot(fpr, tpr, color='darkorange',
lw=2, label=f'ROC curve (area = {roc_auc})')
self._prepare_fig()
return roc_auc
def _prepare_fig(self):
fig = plt.gcf()
fig.plot([0, 1], [0, 1], color='navy', lw=self.linewidth, linestyle='--')
fig.xlim([0.0, 1.0])
fig.ylim([0.0, 1.05])
fig.xlabel('False Positive Rate')
fig.ylabel('True Positive Rate')
fig.legend(loc="lower right")
return fig

0
lib/__init__.py Normal file
View File

Binary file not shown.

0
lib/models/__init__.py Normal file
View File

468
lib/models/blocks.py Normal file
View File

@ -0,0 +1,468 @@
from abc import ABC
from pathlib import Path
from typing import Union
import torch
from torch import nn
import torch.nn.functional as F
import pytorch_lightning as pl
# Utility - Modules
###################
from torch.utils.data import DataLoader
from dataset.dataset import TrajData
class Flatten(nn.Module):
def __init__(self, to=(-1, )):
super(Flatten, self).__init__()
self.to = to
def forward(self, x):
return x.view(x.size(0), *self.to)
class Interpolate(nn.Module):
def __init__(self, size=None, scale_factor=None, mode='nearest', align_corners=None):
super(Interpolate, self).__init__()
self.interp = nn.functional.interpolate
self.size = size
self.scale_factor = scale_factor
self.align_corners = align_corners
self.mode = mode
def forward(self, x):
x = self.interp(x, size=self.size, scale_factor=self.scale_factor,
mode=self.mode, align_corners=self.align_corners)
return x
class AutoPad(nn.Module):
def __init__(self, interpolations=3, base=2):
super(AutoPad, self).__init__()
self.fct = base ** interpolations
def forward(self, x):
x = F.pad(x,
[0,
(x.shape[-1] // self.fct + 1) * self.fct - x.shape[-1] if x.shape[-1] % self.fct != 0 else 0,
(x.shape[-2] // self.fct + 1) * self.fct - x.shape[-2] if x.shape[-2] % self.fct != 0 else 0,
0])
return x
class LightningBaseModule(pl.LightningModule, ABC):
@classmethod
def name(cls):
raise NotImplementedError('Give your model a name!')
@property
def shape(self):
try:
x = torch.randn(self.in_shape).unsqueeze(0)
output = self(x)
return output.shape[1:]
except Exception as e:
print(e)
return -1
def __init__(self, params):
super(LightningBaseModule, self).__init__()
self.hparams = params
# Data loading
# =============================================================================
# Dataset
self.dataset = TrajData('data')
def size(self):
return self.shape
def _move_to_model_device(self, x):
return x.cuda() if next(self.parameters()).is_cuda else x.cpu()
def save_to_disk(self, model_path):
Path(model_path, exist_ok=True).mkdir(parents=True, exist_ok=True)
if not (model_path / 'model_class.obj').exists():
with (model_path / 'model_class.obj').open('wb') as f:
torch.save(self.__class__, f)
return True
@pl.data_loader
def train_dataloader(self):
return DataLoader(dataset=self.dataset.train_dataset, shuffle=True,
batch_size=self.hparams.data_param.batchsize,
num_workers=self.hparams.data_param.worker)
@pl.data_loader
def test_dataloader(self):
return DataLoader(dataset=self.dataset.test_dataset, shuffle=True,
batch_size=self.hparams.data_param.batchsize,
num_workers=self.hparams.data_param.worker)
@pl.data_loader
def val_dataloader(self):
return DataLoader(dataset=self.dataset.val_dataset, shuffle=True,
batch_size=self.hparams.data_param.batchsize,
num_workers=self.hparams.data_param.worker)
def configure_optimizers(self):
raise NotImplementedError
def forward(self, *args, **kwargs):
raise NotImplementedError
def validation_step(self, *args, **kwargs):
raise NotImplementedError
def validation_end(self, outputs):
raise NotImplementedError
def training_step(self, batch_xy, batch_nb, *args, **kwargs):
raise NotImplementedError
def test_step(self, *args, **kwargs):
raise NotImplementedError
def test_end(self, outputs):
from sklearn.metrics import roc_auc_score
y_scores, y_true = [], []
for output in outputs:
y_scores.append(output['y_pred'])
y_true.append(output['y_true'])
y_true = torch.cat(y_true, dim=0)
# FIXME: What did this do do i need it?
# y_true = (y_true != V.HOMOTOPIC).long()
y_scores = torch.cat(y_scores, dim=0)
roc_auc_scores = roc_auc_score(y_true.cpu().numpy(), y_scores.cpu().numpy())
print(f'AUC Score: {roc_auc_scores}')
return {'roc_auc_scores': roc_auc_scores}
def init_weights(self):
def _weight_init(m):
if hasattr(m, 'weight'):
if isinstance(m.weight, torch.Tensor):
torch.nn.init.xavier_uniform_(m.weight)
if hasattr(m, 'bias'):
if isinstance(m.bias, torch.Tensor):
m.bias.data.fill_(0.01)
self.apply(_weight_init)
#
# Sub - Modules
###################
class ConvModule(nn.Module):
@property
def shape(self):
x = torch.randn(self.in_shape).unsqueeze(0)
output = self(x)
return output.shape[1:]
def __init__(self, in_shape, activation: nn.Module = nn.ELU, pooling_size=None, use_bias=True, use_norm=True,
dropout: Union[int, float] = 0,
conv_filters=64, conv_kernel=5, conv_stride=1, conv_padding=0):
super(ConvModule, self).__init__()
# Module Paramters
self.in_shape = in_shape
in_channels, height, width = in_shape[0], in_shape[1], in_shape[2]
self.activation = activation()
# Convolution Paramters
self.padding = conv_padding
self.stride = conv_stride
# Modules
self.dropout = nn.Dropout2d(dropout) if dropout else False
self.pooling = nn.MaxPool2d(pooling_size) if pooling_size else False
self.norm = nn.BatchNorm2d(in_channels, eps=1e-04, affine=False) if use_norm else False
self.conv = nn.Conv2d(in_channels, conv_filters, conv_kernel, bias=use_bias,
padding=self.padding, stride=self.stride
)
def forward(self, x):
x = self.norm(x) if self.norm else x
tensor = self.conv(x)
tensor = self.dropout(tensor) if self.dropout else tensor
tensor = self.pooling(tensor) if self.pooling else tensor
tensor = self.activation(tensor)
return tensor
class DeConvModule(nn.Module):
@property
def shape(self):
x = torch.randn(self.in_shape).unsqueeze(0)
output = self(x)
return output.shape[1:]
def __init__(self, in_shape, conv_filters=3, conv_kernel=5, conv_stride=1, conv_padding=0,
dropout: Union[int, float] = 0, autopad=False,
activation: Union[None, nn.Module] = nn.ReLU, interpolation_scale=None,
use_bias=True, normalize=False):
super(DeConvModule, self).__init__()
in_channels, height, width = in_shape[0], in_shape[1], in_shape[2]
self.padding = conv_padding
self.stride = conv_stride
self.in_shape = in_shape
self.conv_filters = conv_filters
self.autopad = AutoPad() if autopad else False
self.interpolation = Interpolate(scale_factor=interpolation_scale) if interpolation_scale else False
self.norm = nn.BatchNorm2d(in_channels, eps=1e-04, affine=False) if normalize else False
self.dropout = nn.Dropout2d(dropout) if dropout else False
self.de_conv = nn.ConvTranspose2d(in_channels, self.conv_filters, conv_kernel, bias=use_bias,
padding=self.padding, stride=self.stride)
self.activation = activation() if activation else None
def forward(self, x):
x = self.norm(x) if self.norm else x
x = self.dropout(x) if self.dropout else x
x = self.autopad(x) if self.autopad else x
x = self.interpolation(x) if self.interpolation else x
tensor = self.de_conv(x)
tensor = self.activation(tensor) if self.activation else tensor
return tensor
def size(self):
return self.shape
#
# Full Model Parts
###################
class Generator(nn.Module):
@property
def shape(self):
x = torch.randn(self.lat_dim).unsqueeze(0)
output = self(x)
return output.shape[1:]
# noinspection PyUnresolvedReferences
def __init__(self, out_channels, re_shape, lat_dim, use_norm=False, use_bias=True, dropout: Union[int, float] = 0,
filters: List[int] = None, activation=nn.ReLU):
super(Generator, self).__init__()
assert filters, '"Filters" has to be a list of int len 3'
self.filters = filters
self.activation = activation
self.inner_activation = activation()
self.out_activation = None
self.lat_dim = lat_dim
self.dropout = dropout
self.l1 = nn.Linear(self.lat_dim, reduce(mul, re_shape), bias=use_bias)
# re_shape = (self.lat_dim // reduce(mul, re_shape[1:]), ) + tuple(re_shape[1:])
self.flat = Flatten(to=re_shape)
self.deconv1 = DeConvModule(re_shape, conv_filters=self.filters[0],
conv_kernel=5,
conv_padding=2,
conv_stride=1,
normalize=use_norm,
activation=self.activation,
interpolation_scale=2,
dropout=self.dropout
)
self.deconv2 = DeConvModule(self.deconv1.shape, conv_filters=self.filters[1],
conv_kernel=3,
conv_padding=1,
conv_stride=1,
normalize=use_norm,
activation=self.activation,
interpolation_scale=2,
dropout=self.dropout
)
self.deconv3 = DeConvModule(self.deconv2.shape, conv_filters=self.filters[2],
conv_kernel=3,
conv_padding=1,
conv_stride=1,
normalize=use_norm,
activation=self.activation,
interpolation_scale=2,
dropout=self.dropout
)
self.deconv4 = DeConvModule(self.deconv3.shape, conv_filters=out_channels,
conv_kernel=3,
conv_padding=1,
# normalize=use_norm,
activation=self.out_activation
)
def forward(self, z):
tensor = self.l1(z)
tensor = self.inner_activation(tensor)
tensor = self.flat(tensor)
tensor = self.deconv1(tensor)
tensor = self.deconv2(tensor)
tensor = self.deconv3(tensor)
tensor = self.deconv4(tensor)
return tensor
def size(self):
return self.shape
class UnitGenerator(Generator):
def __init__(self, *args, **kwargs):
kwargs.update(use_norm=True)
super(UnitGenerator, self).__init__(*args, **kwargs)
self.norm_f = nn.BatchNorm1d(self.l1.out_features, eps=1e-04, affine=False)
self.norm1 = nn.BatchNorm2d(self.deconv1.conv_filters, eps=1e-04, affine=False)
self.norm2 = nn.BatchNorm2d(self.deconv2.conv_filters, eps=1e-04, affine=False)
self.norm3 = nn.BatchNorm2d(self.deconv3.conv_filters, eps=1e-04, affine=False)
def forward(self, z_c1_c2_c3):
z, c1, c2, c3 = z_c1_c2_c3
tensor = self.l1(z)
tensor = self.inner_activation(tensor)
tensor = self.norm(tensor)
tensor = self.flat(tensor)
tensor = self.deconv1(tensor) + c3
tensor = self.inner_activation(tensor)
tensor = self.norm1(tensor)
tensor = self.deconv2(tensor) + c2
tensor = self.inner_activation(tensor)
tensor = self.norm2(tensor)
tensor = self.deconv3(tensor) + c1
tensor = self.inner_activation(tensor)
tensor = self.norm3(tensor)
tensor = self.deconv4(tensor)
return tensor
class BaseEncoder(nn.Module):
@property
def shape(self):
x = torch.randn(self.in_shape).unsqueeze(0)
output = self(x)
return output.shape[1:]
# noinspection PyUnresolvedReferences
def __init__(self, in_shape, lat_dim=256, use_bias=True, use_norm=False, dropout: Union[int, float] = 0,
latent_activation: Union[nn.Module, None] = None, activation: nn.Module = nn.ELU,
filters: List[int] = None):
super(BaseEncoder, self).__init__()
assert filters, '"Filters" has to be a list of int len 3'
# Optional Padding for odd image-sizes
# Obsolet, already Done by autopadding module on incoming tensors
# in_shape = [x+1 if x % 2 != 0 and idx else x for idx, x in enumerate(in_shape)]
# Parameters
self.lat_dim = lat_dim
self.in_shape = in_shape
self.use_bias = use_bias
self.latent_activation = latent_activation() if latent_activation else None
# Modules
self.conv1 = ConvModule(self.in_shape, conv_filters=filters[0],
conv_kernel=3,
conv_padding=1,
conv_stride=1,
pooling_size=2,
use_norm=use_norm,
dropout=dropout,
activation=activation
)
self.conv2 = ConvModule(self.conv1.shape, conv_filters=filters[1],
conv_kernel=3,
conv_padding=1,
conv_stride=1,
pooling_size=2,
use_norm=use_norm,
dropout=dropout,
activation=activation
)
self.conv3 = ConvModule(self.conv2.shape, conv_filters=filters[2],
conv_kernel=5,
conv_padding=2,
conv_stride=1,
pooling_size=2,
use_norm=use_norm,
dropout=dropout,
activation=activation
)
self.flat = Flatten()
def forward(self, x):
tensor = self.conv1(x)
tensor = self.conv2(tensor)
tensor = self.conv3(tensor)
tensor = self.flat(tensor)
return tensor
class UnitEncoder(BaseEncoder):
# noinspection PyUnresolvedReferences
def __init__(self, *args, **kwargs):
kwargs.update(use_norm=True)
super(UnitEncoder, self).__init__(*args, **kwargs)
self.l1 = nn.Linear(reduce(mul, self.conv3.shape), self.lat_dim, bias=self.use_bias)
def forward(self, x):
c1 = self.conv1(x)
c2 = self.conv2(c1)
c3 = self.conv3(c2)
tensor = self.flat(c3)
l1 = self.l1(tensor)
return c1, c2, c3, l1
class VariationalEncoder(BaseEncoder):
# noinspection PyUnresolvedReferences
def __init__(self, *args, **kwargs):
super(VariationalEncoder, self).__init__(*args, **kwargs)
self.logvar = nn.Linear(reduce(mul, self.conv3.shape), self.lat_dim, bias=self.use_bias)
self.mu = nn.Linear(reduce(mul, self.conv3.shape), self.lat_dim, bias=self.use_bias)
@staticmethod
def reparameterize(mu, logvar):
std = torch.exp(0.5*logvar)
eps = torch.randn_like(std)
return mu + eps*std
def forward(self, x):
tensor = super(VariationalEncoder, self).forward(x)
mu = self.mu(tensor)
logvar = self.logvar(tensor)
z = self.reparameterize(mu, logvar)
return mu, logvar, z
class Encoder(BaseEncoder):
# noinspection PyUnresolvedReferences
def __init__(self, *args, **kwargs):
super(Encoder, self).__init__(*args, **kwargs)
self.l1 = nn.Linear(reduce(mul, self.conv3.shape), self.lat_dim, bias=self.use_bias)
def forward(self, x):
tensor = super(Encoder, self).forward(x)
tensor = self.l1(tensor)
tensor = self.latent_activation(tensor) if self.latent_activation else tensor
return tensor

0
lib/objects/__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

125
lib/objects/map.py Normal file
View File

@ -0,0 +1,125 @@
from pathlib import Path
import copy
from math import sqrt
import numpy as np
from PIL import Image, ImageDraw
import networkx as nx
from matplotlib import pyplot as plt
from lib.objects.trajectory import Trajectory
class Map(object):
white = [1, 255]
black = [0]
def __copy__(self):
return copy.deepcopy(self)
@property
def shape(self):
return self.map_array.shape
@property
def width(self):
return self.shape[0]
@property
def height(self):
return self.shape[1]
@property
def as_graph(self):
return self._G
@property
def as_array(self):
return self.map_array
def __init__(self, name='', array_like_map_representation=None):
self.map_array: np.ndarray = array_like_map_representation
self.name = name
pass
def __setattr__(self, key, value):
super(Map, self).__setattr__(key, value)
if key == 'map_array' and self.map_array is not None:
self._G = self._build_graph()
def _build_graph(self, full_neighbors=True):
graph = nx.Graph()
# Do checks in order: up - left - upperLeft - lowerLeft
neighbors = [(0, -1, 1), (-1, 0, 1), (-1, -1, sqrt(2)), (-1, 1, sqrt(2))]
# Check pixels for their color (determine if walkable)
for idx, value in np.ndenumerate(self.map_array):
if value in self.white:
y, x = idx
# IF walkable, add node
graph.add_node((y, x), count=0)
# Fully connect to all surrounding neighbors
for n, (xdif, ydif, weight) in enumerate(neighbors):
# Differentiate between 8 and 4 neighbors
if not full_neighbors and n >= 2:
break
query_node = (y + ydif, x + xdif)
if graph.has_node(query_node):
graph.add_edge(idx, query_node, weight=weight)
return graph
@classmethod
def from_image(cls, imagepath: Path):
with Image.open(imagepath) as image:
return cls(name=imagepath.name, array_like_map_representation=np.array(image))
def simple_trajectory_between(self, start, dest):
vertices = list(nx.shortest_path(self._G, start, dest))
trajectory = Trajectory(vertices)
return trajectory
def get_valid_position(self):
not_found, valid_position = True, (-9999, -9999)
while not_found:
valid_position = int(np.random.choice(self.height, 1)), int(np.random.choice(self.width, 1))
if self._G.has_node(valid_position):
not_found = False
pass
return valid_position
def get_trajectory_from_vertices(self, *args):
coords = list()
for start, dest in zip(args[:-1], args[1:]):
coords.extend(nx.shortest_path(self._G, start, dest))
return Trajectory(coords)
def get_random_trajectory(self):
start = self.get_valid_position()
dest = self.get_valid_position()
return self.simple_trajectory_between(start, dest)
def are_homotopic(self, trajectory, other_trajectory):
if not all(isinstance(x, Trajectory) for x in [trajectory, other_trajectory]):
raise TypeError
polyline = trajectory.vertices.copy()
polyline.extend(reversed(other_trajectory.vertices))
img = Image.new('L', (self.height, self.width), 0)
ImageDraw.Draw(img).polygon(polyline, outline=1, fill=1)
a = (np.array(img) * self.map_array).sum()
if a >= 1:
return False
else:
return True
def draw(self):
fig, ax = plt.gcf(), plt.gca()
# The standard colormaps also all have reversed versions.
# They have the same names with _r tacked on to the end.
# https: // matplotlib.org / api / pyplot_summary.html?highlight = colormaps
img = ax.imshow(self.as_array, cmap='Greys_r')
return dict(img=img, fig=fig, ax=ax)

65
lib/objects/trajectory.py Normal file
View File

@ -0,0 +1,65 @@
from math import atan2
from typing import List, Tuple, Union
from matplotlib import pyplot as plt
from lib.objects import variables as V
class Trajectory(object):
@property
def endpoints(self):
return self.start, self.dest
@property
def start(self):
return self.vertices[0]
@property
def dest(self):
return self.vertices[-1]
@property
def xs(self):
return [x[1] for x in self.vertices]
@property
def ys(self):
return [x[0] for x in self.vertices]
@property
def as_paired_list(self):
return list(zip(self.vertices[:-1], self.vertices[1:]))
def __init__(self, vertices: Union[List[Tuple[int]], None] = None):
assert any((isinstance(vertices, list), vertices is None))
if vertices is not None:
self.vertices = vertices
pass
def is_equal_to(self, other_trajectory):
# ToDo: do further equality Checks here
return self.vertices == other_trajectory.vertices
def draw(self, highlights=True, label=None, **kwargs):
if label is not None:
kwargs.update(color='red' if label == V.HOMOTOPIC else 'green',
label='Homotopic' if label == V.HOMOTOPIC else 'Alternative')
if highlights:
kwargs.update(marker='bo')
fig, ax = plt.gcf(), plt.gca()
img = plt.plot(self.xs, self.ys, **kwargs)
return dict(img=img, fig=fig, ax=ax)
def min_vertices(self, vertices):
vertices, last_angle = [self.start], 0
for (x1, y1), (x2, y2) in self.as_paired_list:
current_angle = atan2(x1-x2, y1-y2)
if current_angle != last_angle:
vertices.append((x2, y2))
last_angle = current_angle
else:
continue
if vertices[-1] != self.dest:
vertices.append(self.dest)
return self.__class__(vertices=vertices)

9
lib/objects/variables.py Normal file
View File

@ -0,0 +1,9 @@
from pathlib import Path
_ROOT = Path('..')
HOMOTOPIC = 0
ALTERNATIVE = 1
_key_1 = 'eyJhcGlfYWRkcmVzcyI6Imh0dHBzOi8vdWkubmVwdHVuZS5haSIsImFwaV91cmwiOiJodHRwczovL3VpLm'
_key_2 = '5lcHR1bmUuYWkiLCJhcGlfa2V5IjoiZmI0OGMzNzUtOTg1NS00Yzg2LThjMzYtMWFiYjUwMDUyMjVlIn0='
NEPTUNE_KEY = _key_1 + _key_2

0
lib/utils/__init__.py Normal file
View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

96
lib/utils/config.py Normal file
View File

@ -0,0 +1,96 @@
import ast
from argparse import Namespace
from collections import defaultdict
from configparser import ConfigParser
from pathlib import Path
def is_jsonable(x):
import json
try:
json.dumps(x)
return True
except TypeError:
return False
class Config(ConfigParser):
# TODO: Do this programmatically; This did not work:
# Initialize Default Sections
# for section in self.default_sections:
# self.__setattr__(section, property(lambda x :x._get_namespace_for_section(section))
@property
def main(self):
return self._get_namespace_for_section('main')
@property
def model(self):
return self._get_namespace_for_section('model')
@property
def train(self):
return self._get_namespace_for_section('train')
@property
def data(self):
return self._get_namespace_for_section('data')
@property
def project(self):
return self._get_namespace_for_section('project')
###################################################
@property
def tags(self, ):
return [f'{key}: {val}' for key, val in self.serializable.items()]
@property
def serializable(self):
return {f'{section}_{key}': val for section, params in self._sections.items()
for key, val in params.items() if is_jsonable(val)}
@property
def as_dict(self):
return self._sections
def _get_namespace_for_section(self, item):
return Namespace(**{key: self.get(item, key) for key in self[item]})
def __init__(self, **kwargs):
super(Config, self).__init__(**kwargs)
pass
@classmethod
def read_namespace(cls, namespace: Namespace):
space_dict = defaultdict(dict)
for key in namespace.__dict__:
section, *attr_name = key.split('_')
attr_name = '_'.join(attr_name)
value = str(namespace.__getattribute__(key))
space_dict[section][attr_name] = value
new_config = cls()
new_config.read_dict(space_dict)
return new_config
def get(self, *args, **kwargs):
item = super(Config, self).get(*args, **kwargs)
try:
return ast.literal_eval(item)
except SyntaxError:
return item
except ValueError:
return item
def write(self, filepath, **kwargs):
path = Path(filepath, exist_ok=True)
path.parent.mkdir(parents=True, exist_ok=True)
with path.open('w') as configfile:
super().write(configfile)
return True

69
lib/utils/logging.py Normal file
View File

@ -0,0 +1,69 @@
from pathlib import Path
from pytorch_lightning.logging.base import LightningLoggerBase
from pytorch_lightning.logging.neptune import NeptuneLogger
from pytorch_lightning.logging.test_tube import TestTubeLogger
from lib.utils.config import Config
class Logger(LightningLoggerBase):
@property
def experiment(self):
if self.debug:
return self.testtubelogger.experiment
else:
return self.neptunelogger.experiment
@property
def name(self):
return self.config.model.type
@property
def project_name(self):
return f"{self.config.project.owner}/{self.config.project.name}"
@property
def version(self):
return f"version_{self.config.get('main', 'seed')}"
@property
def outpath(self):
# ToDo: Add further path modification such as dataset config etc.
return Path(self.config.train.outpath)
def __init__(self, config: Config, debug=False):
"""
params (dict|None): Optional. Parameters of the experiment. After experiment creation params are read-only.
Parameters are displayed in the experiments Parameters section and each key-value pair can be
viewed in experiments view as a column.
properties (dict|None): Optional default is {}. Properties of the experiment.
They are editable after experiment is created. Properties are displayed in the experiments Details and
each key-value pair can be viewed in experiments view as a column.
tags (list|None): Optional default []. Must be list of str. Tags of the experiment.
They are editable after experiment is created (see: append_tag() and remove_tag()).
Tags are displayed in the experiments Details and can be viewed in experiments view as a column.
"""
super(Logger, self).__init__()
self.debug = debug
self.config = config
self._testtube_kwargs = dict(save_dir=self.outpath, version=self.version, name=self.name)
self._neptune_kwargs = dict(offline_mode=not self.debug,
api_key=self.config.project.neptune_key,
project_name=self.project_name,
name=self.name,
upload_source_files=list())
self.neptunelogger = NeptuneLogger(**self._neptune_kwargs)
self.testtubelogger = TestTubeLogger(**self._testtube_kwargs)
def log_hyperparams(self, params):
self.neptunelogger.log_hyperparams(params)
self.testtubelogger.log_hyperparams(params)
pass
def log_metrics(self, metrics, step_num):
self.neptunelogger.log_metrics(metrics, step_num)
self.testtubelogger.log_metrics(metrics, step_num)
pass

76
lib/utils/model_io.py Normal file
View File

@ -0,0 +1,76 @@
from argparse import Namespace
from pathlib import Path
from natsort import natsorted
from torch import nn
# Hyperparamter Object
class ModelParameters(Namespace):
_activations = dict(
leaky_relu=nn.LeakyReLU,
relu=nn.ReLU,
sigmoid=nn.Sigmoid,
tanh=nn.Tanh
)
@property
def model_param(self):
return self._model_param
@property
def train_param(self):
return self._train_param
@property
def data_param(self):
return self._data_param
def __init__(self, model_param, train_param, data_param):
self._model_param = model_param
self._train_param = train_param
self._data_param = data_param
kwargs = vars(model_param)
kwargs.update(vars(train_param))
kwargs.update(vars(data_param))
super(ModelParameters, self).__init__(**kwargs)
def __getattribute__(self, item):
if item == 'activation':
try:
return self._activations[item]
except KeyError:
return nn.ReLU
return super(ModelParameters, self).__getattribute__(item)
class SavedLightningModels(object):
@classmethod
def load_checkpoint(cls, models_root_path, model, n=-1, tags_file_path=''):
assert models_root_path.exists(), f'The path {models_root_path.absolute()} does not exist!'
found_checkpoints = list(Path(models_root_path).rglob('*.ckpt'))
found_checkpoints = natsorted(found_checkpoints, key=lambda y: y.name)
if not tags_file_path:
tag_files = models_root_path.rglob('meta_tags.csv')
tags_file_path = list(tag_files)[0]
return cls(weights=found_checkpoints[n], model=model, tags=tags_file_path)
def __init__(self, **kwargs):
self.weights: str = kwargs.get('weights', '')
self.tags: str = kwargs.get('tags', '')
self.model = kwargs.get('model', None)
assert self.model is not None
def restore(self):
pretrained_model = self.model.load_from_metrics(
weights_path=self.weights,
tags_csv=self.tags
)
pretrained_model.eval()
pretrained_model.freeze()
return pretrained_model

12
lib/utils/transforms.py Normal file
View File

@ -0,0 +1,12 @@
import numpy as np
class AsArray(object):
def __init__(self, width, height):
self.width = width
self.height = height
def __call__(self, x):
array = np.zeros((self.width, self.height))
return array

71
main.py Normal file
View File

@ -0,0 +1,71 @@
# Imports
# =============================================================================
import os
from distutils.util import strtobool
from pathlib import Path
from argparse import ArgumentParser
import warnings
from pytorch_lightning import Trainer
from torch.utils.data import DataLoader
from dataset.dataset import TrajData
from lib.utils.config import Config
from lib.utils.logging import Logger
warnings.filterwarnings('ignore', category=FutureWarning)
warnings.filterwarnings('ignore', category=UserWarning)
_ROOT = Path(__file__).parent
# Paramter Configuration
# =============================================================================
# Argument Parser
main_arg_parser = ArgumentParser(description="parser for fast-neural-style")
# Main Parameters
main_arg_parser.add_argument("--main_debug", type=strtobool, default=False, help="")
main_arg_parser.add_argument("--main_eval", type=strtobool, default=False, help="")
main_arg_parser.add_argument("--main_seed", type=int, default=69, help="")
# Data Parameters
main_arg_parser.add_argument("--data_worker", type=int, default=10, help="")
main_arg_parser.add_argument("--data_batchsize", type=int, default=100, help="")
main_arg_parser.add_argument("--data_root", type=str, default='../data/rpoot', help="")
# Transformations
main_arg_parser.add_argument("--transformations_to_tensor", type=strtobool, default=False, help="")
# Transformations
main_arg_parser.add_argument("--train_outpath", type=str, default="output", help="")
main_arg_parser.add_argument("--train_version", type=strtobool, required=False, help="")
main_arg_parser.add_argument("--train_epochs", type=int, default=10, help="")
main_arg_parser.add_argument("--train_batch_size", type=int, default=512, help="")
main_arg_parser.add_argument("--train_lr", type=float, default=0.002, help="")
# Model
main_arg_parser.add_argument("--model_type", type=str, default="LeNetAE", help="")
main_arg_parser.add_argument("--model_activation", type=str, default="relu", help="")
main_arg_parser.add_argument("--model_filters", type=str, default="[32, 16, 4]", help="")
main_arg_parser.add_argument("--model_use_bias", type=strtobool, default=True, help="")
main_arg_parser.add_argument("--model_use_norm", type=strtobool, default=True, help="")
main_arg_parser.add_argument("--model_dropout", type=float, default=0.00, help="")
# Project
main_arg_parser.add_argument("--project_name", type=str, default='traj-gen', help="")
main_arg_parser.add_argument("--project_owner", type=str, default='si11ium', help="")
main_arg_parser.add_argument("--project_neptune_key", type=str, default=os.getenv('NEPTUNE_KEY'), help="")
# Parse it
args = main_arg_parser.parse_args()
config = Config.read_namespace(args)
# Trainer loading
# =============================================================================
trainer = Trainer(logger=Logger(config, debug=True))
if __name__ == '__main__':
print(next(iter(train_dataloader)))
pass

Binary file not shown.

121
preprocessing/generator.py Normal file
View File

@ -0,0 +1,121 @@
import multiprocessing as mp
import pickle
import shelve
from collections import defaultdict
from pathlib import Path
from typing import Union
from tqdm import trange
from lib.objects.map import Map
class Generator:
possible_modes = ['one_patching']
def __init__(self, data_root, map_obj, binary=True):
self.binary: bool = binary
self.map: Map = map_obj
self.data_root = Path(data_root)
def generate_n_trajectories_m_alternatives(self, n, m, dataset_name='', **kwargs):
trajectories_with_alternatives = list()
for _ in trange(n, desc='Processing Trajectories'):
trajectory = self.map.get_random_trajectory()
alternatives, labels = self.generate_n_alternatives(trajectory, m, dataset_name=dataset_name, **kwargs)
trajectories_with_alternatives.append(dict(trajectory=trajectory, alternatives=alternatives, labels=labels))
return trajectories_with_alternatives
def generate_alternatives(self, trajectory, output: Union[mp.
Queue, None] = None, mode='one_patching'):
start, dest = trajectory.endpoints
if mode == 'one_patching':
patch = self.map.get_valid_position()
alternative = self.map.get_trajectory_from_vertices(start, patch, dest)
else:
raise RuntimeError(f'mode checking went wrong...')
if output:
output.put(alternative)
return alternative
def generate_n_alternatives(self, trajectory, n, dataset_name: Union[str, Path] = '',
mode='one_patching', equal_samples=True):
assert mode in self.possible_modes, f'Parameter "mode" must be either {self.possible_modes}, but was {mode}.'
# Define an output queue
output = mp.Queue()
# Setup a list of processes that we want to run
processes = [mp.Process(target=self.generate_alternatives,
kwargs=dict(trajectory=trajectory, output=output, mode=mode))
for _ in range(n)]
# Run processes
for p in processes:
p.start()
# Exit the completed processes
for p in processes:
p.join()
# Get process results from the output queue
results = [output.get() for _ in processes]
# label per homotopic class
homotopy_classes = defaultdict(list)
homotopy_classes[0].append(trajectory)
for i in range(len(results)):
alternative = results[i]
class_not_found, label = True, None
# check for homotopy class
for label in homotopy_classes.keys():
if self.map.are_homotopic(homotopy_classes[label][0], alternative):
homotopy_classes[label].append(alternative)
class_not_found = False
break
if class_not_found:
label = len(homotopy_classes)
homotopy_classes[label].append(alternative)
# There should be as much homotopic samples as non-homotopic samples
if equal_samples:
homotopy_classes = self._remove_unequal(homotopy_classes)
# Compose lists of alternatives with labels
alternatives, labels = list(), list()
for key in homotopy_classes.keys():
alternatives.extend([homotopy_classes[key]])
labels.extend([key] * len(homotopy_classes[key]))
# Write to disk
if dataset_name:
self.write_to_disk(dataset_name, trajectory, alternatives, labels)
# Return
return alternatives, labels
def write_to_disk(self, filepath, trajectory, alternatives, labels):
dataset_name = filepath if filepath.endswith('.pik') else f'{filepath}.pik'
self.data_root.mkdir(exist_ok=True, parents=True)
with shelve.open(str(self.data_root / dataset_name), protocol=pickle.HIGHEST_PROTOCOL) as f:
new_key = len(f)
f[f'trajectory_{new_key}'] = dict(alternatives=alternatives,
trajectory=trajectory,
labels=labels)
if 'map' not in f:
f['map'] = dict(map=self.map, name=f'map_{self.map.name}')
@staticmethod
def _remove_unequal(hom_dict):
hom_dict = hom_dict.copy()
counter = len(hom_dict)
while sum([len(hom_dict[class_id]) for class_id in range(len(hom_dict))]) > len(hom_dict[0]):
if counter > len(hom_dict):
counter = len(hom_dict)
if counter in hom_dict:
if len(hom_dict[counter]) == 0:
del hom_dict[counter]
else:
del hom_dict[counter][-1]
counter -= 1
return hom_dict

BIN
res/maps/Map.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

BIN
res/maps/doom.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 194 KiB

BIN
res/maps/home.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

BIN
res/maps/maze.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

BIN
res/maps/oet.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 248 KiB

BIN
res/maps/priz.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 198 KiB

BIN
res/maps/tate.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

BIN
res/maps/tate_sw.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

BIN
res/maps/tum.bmp Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 129 KiB

0
visualization/bars.py Normal file
View File

26
visualization/tools.py Normal file
View File

@ -0,0 +1,26 @@
from pathlib import Path
import matplotlib.pyplot as plt
class Plotter(object):
def __init__(self, root_path=''):
self.root_path = Path(root_path)
def save_current_figure(self, path, extention='.png'):
fig, _ = plt.gcf(), plt.gca()
# Prepare save location and check img file extention
path = self.root_path / Path(path if str(path).endswith(extention) else f'{str(path)}{extention}')
path.parent.mkdir(exist_ok=True, parents=True)
fig.savefig(path)
fig.clf()
def show_current_figure(self):
fig, _ = plt.gcf(), plt.gca()
fig.show()
fig.clf()
if __name__ == '__main__':
output_root = Path('..') / 'output'
p = Plotter(output_root)
p.save_current_figure('test.png')