soup trajectory and box plot

This commit is contained in:
Si11ium
2019-03-15 13:42:55 +01:00
parent 1e5bec814d
commit 090546520e
70 changed files with 97 additions and 143 deletions

View File

@@ -1 +0,0 @@
{'divergent': 0, 'fix_zero': 10, 'fix_other': 0, 'fix_sec': 0, 'other': 0}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,30 @@
variation 10e-0
avg time to vergence 3.63
avg time as fixpoint 0
variation 10e-1
avg time to vergence 5.02
avg time as fixpoint 0
variation 10e-2
avg time to vergence 6.46
avg time as fixpoint 0
variation 10e-3
avg time to vergence 8.04
avg time as fixpoint 0
variation 10e-4
avg time to vergence 9.61
avg time as fixpoint 0.04
variation 10e-5
avg time to vergence 11.23
avg time as fixpoint 1.38
variation 10e-6
avg time to vergence 12.99
avg time as fixpoint 3.23
variation 10e-7
avg time to vergence 14.58
avg time as fixpoint 4.84
variation 10e-8
avg time to vergence 21.95
avg time as fixpoint 11.91
variation 10e-9
avg time to vergence 26.45
avg time as fixpoint 16.47

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{'divergent': 6, 'fix_zero': 4, 'fix_other': 0, 'fix_sec': 0, 'other': 0}

View File

@@ -36,11 +36,11 @@ if __name__ == '__main__':
exp.trials = 100000
exp.epsilon = 1e-4
net_generators = []
for activation in ['linear', 'sigmoid', 'relu']:
for activation in ['linear']:
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
#net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
# net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
# net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
all_counters = []
all_notable_nets = []
all_names = []

View File

@@ -60,8 +60,8 @@ if __name__ == '__main__':
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
# net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]

View File

@@ -19,14 +19,15 @@ if __name__ == '__main__':
if True:
# WeightWise Neural Network
with FixpointExperiment() as exp:
for run_id in tqdm(range(10)):
net = ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)
.with_keras_params(activation='linear'))
run_exp(net)
K.clear_session()
exp.log(exp.counters)
exp.save(trajectorys=exp.without_particles())
for _ in range(10):
with FixpointExperiment() as exp:
for run_id in tqdm(range(20)):
net = ParticleDecorator(WeightwiseNeuralNetwork(width=2, depth=2)
.with_keras_params(activation='linear'))
run_exp(net)
K.clear_session()
exp.log(exp.counters)
exp.save(trajectorys=exp.without_particles())
if False:
# Aggregating Neural Network

View File

@@ -40,8 +40,8 @@ if __name__ == '__main__':
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
all_counters = []
all_notable_nets = []
all_names = []
@@ -59,7 +59,9 @@ if __name__ == '__main__':
all_notable_nets += [notable_nets]
all_names += [name]
K.clear_session()
exp.save(all_counters=all_counters) #net types reached in the end
exp.save(all_counters=all_counters)
exp.save(trajectorys=exp.without_particles())
# net types reached in the end
# exp.save(all_notable_nets=all_notable_nets)
exp.save(all_names=all_names) #experiment setups
for exp_id, counter in enumerate(all_counters):