Train Active

This commit is contained in:
Si11ium
2020-03-03 15:10:17 +01:00
parent 44f6589259
commit 1f612a968c
13 changed files with 102 additions and 98 deletions

View File

@@ -18,21 +18,12 @@ class ConvHomDetector(LightningBaseModule):
def configure_optimizers(self):
return Adam(self.parameters(), lr=self.hparams.lr)
def validation_step(self, *args, **kwargs):
pass
def validation_end(self, outputs):
pass
def training_step(self, batch_xy, batch_nb, *args, **kwargs):
batch_x, batch_y = batch_xy
pred_y = self(batch_x)
loss = F.binary_cross_entropy(pred_y, batch_y)
loss = F.binary_cross_entropy(pred_y, batch_y.float())
return {'loss': loss, 'log': dict(loss=loss)}
def test_step(self, *args, **kwargs):
pass
def __init__(self, *params):
super(ConvHomDetector, self).__init__(*params)
@@ -75,8 +66,9 @@ class ConvHomDetector(LightningBaseModule):
#
self.linear = nn.Linear(reduce(mul, self.flatten.shape), self.hparams.model_param.classes * 10)
self.classifier = nn.Linear(self.hparams.model_param.classes * 10, self.hparams.model_param.classes)
self.softmax = nn.Softmax()
# Comments on Multi Class labels
self.classifier = nn.Linear(self.hparams.model_param.classes * 10, 1) # self.hparams.model_param.classes)
self.out_activation = nn.Sigmoid() # nn.Softmax
def forward(self, x):
tensor = self.map_conv_0(x)
@@ -88,5 +80,5 @@ class ConvHomDetector(LightningBaseModule):
tensor = self.flatten(tensor)
tensor = self.linear(tensor)
tensor = self.classifier(tensor)
tensor = self.softmax(tensor)
tensor = self.out_activation(tensor)
return tensor

View File

@@ -106,7 +106,7 @@ class ResidualModule(nn.Module):
self.in_shape = in_shape
module_paramters.update(in_shape=in_shape)
self.activation = activation() if activation else lambda x: x
self.residual_block = [module_class(**module_paramters) for _ in range(n)]
self.residual_block = nn.ModuleList([module_class(**module_paramters) for _ in range(n)])
assert self.in_shape == self.shape, f'The in_shape: {self.in_shape} - must match the out_shape: {self.shape}.'
def forward(self, x):

View File

@@ -133,12 +133,6 @@ class LightningBaseModule(pl.LightningModule, ABC):
def forward(self, *args, **kwargs):
raise NotImplementedError
def validation_step(self, *args, **kwargs):
raise NotImplementedError
def validation_end(self, outputs):
raise NotImplementedError
def training_step(self, batch_xy, batch_nb, *args, **kwargs):
raise NotImplementedError
@@ -146,21 +140,7 @@ class LightningBaseModule(pl.LightningModule, ABC):
raise NotImplementedError
def test_end(self, outputs):
from sklearn.metrics import roc_auc_score
y_scores, y_true = [], []
for output in outputs:
y_scores.append(output['y_pred'])
y_true.append(output['y_true'])
y_true = torch.cat(y_true, dim=0)
# FIXME: What did this do do i need it?
# y_true = (y_true != V.HOMOTOPIC).long()
y_scores = torch.cat(y_scores, dim=0)
roc_auc_scores = roc_auc_score(y_true.cpu().numpy(), y_scores.cpu().numpy())
print(f'AUC Score: {roc_auc_scores}')
return {'roc_auc_scores': roc_auc_scores}
raise NotImplementedError
def init_weights(self):
def _weight_init(m):

View File

@@ -29,11 +29,11 @@ class Map(object):
@property
def width(self):
return self.shape[0]
return self.shape[-2]
@property
def height(self):
return self.shape[1]
return self.shape[-1]
@property
def as_graph(self):
@@ -43,6 +43,10 @@ class Map(object):
def as_array(self):
return self.map_array
@property
def as_2d_array(self):
return self.map_array[1:]
def __init__(self, name='', array_like_map_representation=None):
if array_like_map_representation is not None:
if array_like_map_representation.ndim == 2:
@@ -72,22 +76,26 @@ class Map(object):
# Differentiate between 8 and 4 neighbors
if not full_neighbors and n >= 2:
break
# ToDO: make this explicite and less ugly
query_node = idx[:1] + (idx[1] + ydif,) + (idx[2] + xdif,)
if graph.has_node(query_node):
graph.add_edge(idx, query_node, weight=weight)
return graph
@classmethod
def from_image(cls, imagepath: Path):
def from_image(cls, imagepath: Path, embedding_size=None):
with Image.open(imagepath) as image:
# Turn the image to single Channel Greyscale
if image.mode != 'L':
image = image.convert('L')
map_array = np.expand_dims(np.array(image), axis=0)
return cls(name=imagepath.name, array_like_map_representation=map_array)
if embedding_size:
assert isinstance(embedding_size, tuple), f'embedding_size was of type: {type(embedding_size)}'
embedding = np.zeros(embedding_size)
embedding[:map_array.shape[0], :map_array.shape[1], :map_array.shape[2]] = map_array
map_array = embedding
return cls(name=imagepath.name, array_like_map_representation=map_array)
def simple_trajectory_between(self, start, dest):
vertices = list(nx.shortest_path(self._G, start, dest))
@@ -105,36 +113,46 @@ class Map(object):
return Trajectory(coords)
def get_random_trajectory(self):
start = self.get_valid_position()
dest = self.get_valid_position()
return self.simple_trajectory_between(start, dest)
simple_trajectory = None
while simple_trajectory is None:
try:
start = self.get_valid_position()
dest = self.get_valid_position()
simple_trajectory = self.simple_trajectory_between(start, dest)
except nx.exception.NetworkXNoPath:
pass
return simple_trajectory
def generate_alternative(self, trajectory, mode='one_patching'):
start, dest = trajectory.endpoints
if mode == 'one_patching':
patch = self.get_valid_position()
alternative = self.get_trajectory_from_vertices(start, patch, dest)
else:
raise RuntimeError(f'mode checking went wrong...')
alternative = None
while alternative is None:
try:
if mode == 'one_patching':
patch = self.get_valid_position()
alternative = self.get_trajectory_from_vertices(start, patch, dest)
else:
raise RuntimeError(f'mode checking went wrong...')
except nx.exception.NetworkXNoPath:
pass
return alternative
def are_homotopic(self, trajectory, other_trajectory):
if not all(isinstance(x, Trajectory) for x in [trajectory, other_trajectory]):
raise TypeError
polyline = trajectory.vertices.copy()
polyline.extend(reversed(other_trajectory.vertices))
polyline = trajectory.xy_vertices
polyline.extend(reversed(other_trajectory.xy_vertices))
img = Image.new('L', (self.height, self.width), 0)
draw = ImageDraw.Draw(img)
draw.polygon(polyline, outline=255, fill=255)
a = (np.array(img) * np.where(self.map_array == self.white, 0, 1)).sum()
a = (np.asarray(img) * np.where(self.as_2d_array == self.white, 0, 1)).sum()
if a >= 1:
return False
if a:
return False # Non-Homotoph
else:
return True
return True # Homotoph
def draw(self):
fig, ax = plt.gcf(), plt.gca()

View File

@@ -8,43 +8,51 @@ import numpy as np
class Trajectory(object):
@property
def vertices(self):
return self._vertices
@property
def xy_vertices(self):
return [(x,y) for _, x,y in self._vertices]
@property
def endpoints(self):
return self.start, self.dest
@property
def start(self):
return self.vertices[0]
return self._vertices[0]
@property
def dest(self):
return self.vertices[-1]
return self._vertices[-1]
@property
def xs(self):
return [x[1] for x in self.vertices]
return [x[1] for x in self._vertices]
@property
def ys(self):
return [x[0] for x in self.vertices]
return [x[0] for x in self._vertices]
@property
def as_paired_list(self):
return list(zip(self.vertices[:-1], self.vertices[1:]))
return list(zip(self._vertices[:-1], self._vertices[1:]))
@property
def np_vertices(self):
return [np.array(vertice) for vertice in self.vertices]
return [np.array(vertice) for vertice in self._vertices]
def __init__(self, vertices: Union[List[Tuple[int]], None] = None):
assert any((isinstance(vertices, list), vertices is None))
if vertices is not None:
self.vertices = vertices
self._vertices = vertices
pass
def is_equal_to(self, other_trajectory):
# ToDo: do further equality Checks here
return self.vertices == other_trajectory.vertices
return self._vertices == other_trajectory.vertices
def draw(self, highlights=True, label=None, **kwargs):
if label is not None: