Box and stuff

This commit is contained in:
Si11ium
2019-03-13 15:30:54 +01:00
parent 5c7a646d69
commit d4f0024f4b
34 changed files with 413 additions and 211 deletions

View File

@@ -0,0 +1,36 @@
ParticleDecorator activiation='linear' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='linear' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='linear' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='sigmoid' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='sigmoid' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='sigmoid' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='relu' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='relu' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}
ParticleDecorator activiation='relu' use_bias='False'
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 100}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,30 @@
variation 10e-0
avg time to vergence 3.72
avg time as fixpoint 0
variation 10e-1
avg time to vergence 5.13
avg time as fixpoint 0
variation 10e-2
avg time to vergence 6.53
avg time as fixpoint 0
variation 10e-3
avg time to vergence 8.09
avg time as fixpoint 0
variation 10e-4
avg time to vergence 9.81
avg time as fixpoint 0.06
variation 10e-5
avg time to vergence 11.43
avg time as fixpoint 1.51
variation 10e-6
avg time to vergence 13.15
avg time as fixpoint 3.34
variation 10e-7
avg time to vergence 14.57
avg time as fixpoint 4.79
variation 10e-8
avg time to vergence 22.41
avg time as fixpoint 12.37
variation 10e-9
avg time to vergence 26.17
avg time as fixpoint 16.11

View File

@@ -0,0 +1,8 @@
ParticleDecorator activiation='linear' use_bias=False
{'divergent': 0, 'fix_zero': 0, 'fix_other': 19, 'fix_sec': 0, 'other': 1}
ParticleDecorator activiation='linear' use_bias=False
{'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 20}

View File

@@ -28,34 +28,38 @@ def count(counters, net, notable_nets=[]):
counters['other'] += 1
return counters, notable_nets
with Experiment('fixpoint-density') as exp:
exp.trials = 100
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear', 'sigmoid', 'relu']:
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = net_generator().with_params(epsilon=exp.epsilon)
name = str(net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias='" + str(net.get_keras_params().get('use_bias')) + "'"
count(counters, net, notable_nets)
keras.backend.clear_session()
all_counters += [counters]
all_notable_nets += [notable_nets]
all_names += [name]
exp.save(all_counters=all_counters)
exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names)
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')
print('Done')
if __name__ == '__main__':
with Experiment('fixpoint-density') as exp:
exp.trials = 100
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear', 'sigmoid', 'relu']:
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
# net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = net_generator().with_params(epsilon=exp.epsilon)
net = ParticleDecorator(net)
name = str(net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias='" + str(net.get_keras_params().get('use_bias')) + "'"
count(counters, net, notable_nets)
keras.backend.clear_session()
all_counters += [counters]
all_notable_nets += [notable_nets]
all_names += [name]
exp.save(all_counters=all_counters)
exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names)
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')
print('Done')

View File

@@ -5,7 +5,6 @@ import os
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
@@ -16,19 +15,22 @@ import keras.backend
from statistics import mean
avg = mean
def generate_fixpoint_weights():
return [
np.array([[1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], dtype=np.float32),
np.array([[1.0, 0.0], [0.0, 0.0]], dtype=np.float32),
np.array([[1.0], [0.0]], dtype=np.float32)
]
def generate_fixpoint_net():
net = WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation='sigmoid')
net.set_weights(generate_fixpoint_weights())
return net
def vary(old_weights, e=1.0):
new_weights = copy.deepcopy(old_weights)
for layer_id, layer in enumerate(new_weights):
@@ -40,45 +42,49 @@ def vary(old_weights, e=1.0):
new_weights[layer_id][cell_id][weight_id] = weight - prng() * e
return new_weights
with Experiment('known-fixpoint-variation') as exp:
exp.depth = 10
exp.trials = 100
exp.max_steps = 100
exp.epsilon = 1e-4
exp.xs = []
exp.ys = []
exp.zs = []
exp.notable_nets = []
current_scale = 1.0
for _ in range(exp.depth):
print('variation scale ' + str(current_scale))
for _ in tqdm(range(exp.trials)):
net = generate_fixpoint_net().with_params(epsilon=exp.epsilon)
net.set_weights(vary(net.get_weights(), current_scale))
time_to_something = 0
time_as_fixpoint = 0
still_fixpoint = True
for _ in range(exp.max_steps):
net.self_attack()
if net.is_zero() or net.is_diverged():
break
if net.is_fixpoint():
if still_fixpoint:
time_as_fixpoint += 1
if __name__ == '__main__':
with Experiment('known-fixpoint-variation') as exp:
exp.depth = 10
exp.trials = 100
exp.max_steps = 100
exp.epsilon = 1e-4
exp.xs = []
exp.ys = []
exp.zs = []
exp.notable_nets = []
current_scale = 1.0
for _ in range(exp.depth):
print('variation scale ' + str(current_scale))
for _ in tqdm(range(exp.trials)):
net = generate_fixpoint_net().with_params(epsilon=exp.epsilon)
net = ParticleDecorator(net)
net.set_weights(vary(net.get_weights(), current_scale))
time_to_something = 0
time_as_fixpoint = 0
still_fixpoint = True
for _ in range(exp.max_steps):
net.self_attack()
if net.is_zero() or net.is_diverged():
break
if net.is_fixpoint():
if still_fixpoint:
time_as_fixpoint += 1
else:
print('remarkable')
exp.notable_nets += [net.get_weights()]
still_fixpoint = True
else:
print('remarkable')
exp.notable_nets += [net.get_weights()]
still_fixpoint = True
else:
still_fixpoint = False
time_to_something += 1
exp.xs += [current_scale]
exp.ys += [time_to_something] #time steps taken to reach divergence or zero (reaching another fix-point is basically never happening)
exp.zs += [time_as_fixpoint] #time steps still regarded as sthe initial fix-point
keras.backend.clear_session()
current_scale /= 10.0
for d in range(exp.depth):
exp.log('variation 10e-' + str(d))
exp.log('avg time to vergence ' + str(avg(exp.ys[d*exp.trials:(d+1)*exp.trials])))
exp.log('avg time as fixpoint ' + str(avg(exp.zs[d*exp.trials:(d+1)*exp.trials])))
still_fixpoint = False
time_to_something += 1
exp.xs += [current_scale]
# time steps taken to reach divergence or zero (reaching another fix-point is basically never happening)
exp.ys += [time_to_something]
# time steps still regarded as sthe initial fix-point
exp.zs += [time_as_fixpoint]
keras.backend.clear_session()
current_scale /= 10.0
for d in range(exp.depth):
exp.log('variation 10e-' + str(d))
exp.log('avg time to vergence ' + str(avg(exp.ys[d*exp.trials:(d+1) * exp.trials])))
exp.log('avg time as fixpoint ' + str(avg(exp.zs[d*exp.trials:(d+1) * exp.trials])))

View File

@@ -3,6 +3,9 @@ import os
from typing import Tuple
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
from util import *
from experiment import *
from network import *
@@ -10,10 +13,6 @@ from network import *
import keras.backend
# Concat top Level dir to system environmental variables
sys.path += os.path.join('..', '.')
def generate_counters():
"""
Initial build of the counter dict, to store counts.
@@ -51,46 +50,52 @@ def count(counters, net, notable_nets=[]):
counters['other'] += 1
return counters, notable_nets
if __name__ == '__main__':
with Experiment('mixed-self-fixpoints') as exp:
exp.trials = 20
exp.selfattacks = 4
exp.trains_per_selfattack_values = [100 * i for i in range(11)]
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear', 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
with Experiment('mixed-self-fixpoints') as exp:
exp.trials = 20
exp.selfattacks = 4
exp.trains_per_selfattack_values = [100 * i for i in range(11)]
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_names = []
all_data = []
for net_generator_id, net_generator in enumerate(net_generators):
xs = []
ys = []
for trains_per_selfattack in exp.trains_per_selfattack_values:
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon)
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for selfattack_id in range(exp.selfattacks):
net.self_attack()
for train_id in range(trains_per_selfattack):
loss = net.compiled().train(epoch=selfattack_id*trains_per_selfattack+train_id)
if net.is_diverged() or net.is_fixpoint():
break
count(counters, net, notable_nets)
keras.backend.clear_session()
xs += [trains_per_selfattack]
ys += [float(counters['fix_zero'] + counters['fix_other']) / float(exp.trials)]
all_names += [name]
all_data += [{'xs':xs, 'ys':ys}] #xs: how many trains per self-attack from exp.trains_per_selfattack_values, ys: average amount of fixpoints found
all_names = []
all_data = []
exp.save(all_names=all_names)
exp.save(all_data=all_data)
for exp_id, name in enumerate(all_names):
exp.log(all_names[exp_id])
exp.log(all_data[exp_id])
exp.log('\n')
for net_generator_id, net_generator in enumerate(net_generators):
xs = []
ys = []
for trains_per_selfattack in exp.trains_per_selfattack_values:
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = ParticleDecorator(net_generator())
net = TrainingNeuralNetworkDecorator(net).with_params(epsilon=exp.epsilon)
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for selfattack_id in range(exp.selfattacks):
net.self_attack()
for train_id in range(trains_per_selfattack):
loss = net.compiled().train(epoch=selfattack_id*trains_per_selfattack+train_id)
if net.is_diverged() or net.is_fixpoint():
break
count(counters, net, notable_nets)
keras.backend.clear_session()
xs += [trains_per_selfattack]
ys += [float(counters['fix_zero'] + counters['fix_other']) / float(exp.trials)]
all_names += [name]
# xs: how many trains per self-attack from exp.trains_per_selfattack_values
# ys: average amount of fixpoints found
all_data += [{'xs': xs, 'ys': ys}]
exp.save(all_names=all_names)
exp.save(all_data=all_data)
for exp_id, name in enumerate(all_names):
exp.log(all_names[exp_id])
exp.log(all_data[exp_id])
exp.log('\n')

View File

@@ -8,7 +8,7 @@ from util import *
from experiment import *
from network import *
import keras.backend
import keras.backend as K
def generate_counters():
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
@@ -29,36 +29,40 @@ def count(counters, net, notable_nets=[]):
counters['other'] += 1
return counters, notable_nets
with Experiment('training_fixpoint') as exp:
exp.trials = 5
exp.run_count = 500
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear', 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon)
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for run_id in range(exp.run_count):
loss = net.compiled().train(epoch=run_id+1)
count(counters, net, notable_nets)
keras.backend.clear_session()
all_counters += [counters]
all_notable_nets += [notable_nets]
all_names += [name]
exp.save(all_counters=all_counters) #net types reached in the end
exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names) #experiment setups
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')
if __name__ == '__main__':
with Experiment('training_fixpoint') as exp:
exp.trials = 20
exp.run_count = 500
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_counters = []
all_notable_nets = []
all_names = []
for net_generator_id, net_generator in enumerate(net_generators):
counters = generate_counters()
notable_nets = []
for _ in tqdm(range(exp.trials)):
net = ParticleDecorator(net_generator())
net = TrainingNeuralNetworkDecorator(net).with_params(epsilon=exp.epsilon)
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
for run_id in range(exp.run_count):
loss = net.compiled().train(epoch=run_id+1)
count(counters, net, notable_nets)
all_counters += [counters]
all_notable_nets += [notable_nets]
all_names += [name]
K.clear_session()
exp.save(all_counters=all_counters) #net types reached in the end
# exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names) #experiment setups
for exp_id, counter in enumerate(all_counters):
exp.log(all_names[exp_id])
exp.log(all_counters[exp_id])
exp.log('\n')