foundations
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,8 @@
|
|||||||
|
ParticleDecorator activiation='linear' use_bias=False
|
||||||
|
{'xs': [0, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000], 'ys': [0.45, 0.4, 0.6, 0.8, 0.95, 0.85, 0.95, 0.85, 0.9, 1.0, 0.8]}
|
||||||
|
|
||||||
|
|
||||||
|
ParticleDecorator activiation='linear' use_bias=False
|
||||||
|
{'xs': [0, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000], 'ys': [0.95, 0.9, 0.9, 0.9, 0.95, 0.8, 0.9, 0.9, 0.85, 0.85, 0.9]}
|
||||||
|
|
||||||
|
|
@ -37,7 +37,7 @@ if __name__ == '__main__':
|
|||||||
for activation in ['linear', 'sigmoid', 'relu']:
|
for activation in ['linear', 'sigmoid', 'relu']:
|
||||||
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
net_generators += [lambda activation=activation: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||||
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
net_generators += [lambda activation=activation: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||||
net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
# net_generators += [lambda activation=activation: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||||
# net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
# net_generators += [lambda activation=activation: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=False)]
|
||||||
all_counters = []
|
all_counters = []
|
||||||
all_notable_nets = []
|
all_notable_nets = []
|
||||||
@ -52,7 +52,7 @@ if __name__ == '__main__':
|
|||||||
count(counters, net, notable_nets)
|
count(counters, net, notable_nets)
|
||||||
keras.backend.clear_session()
|
keras.backend.clear_session()
|
||||||
all_counters += [counters]
|
all_counters += [counters]
|
||||||
all_notable_nets += [notable_nets]
|
# all_notable_nets += [notable_nets]
|
||||||
all_names += [name]
|
all_names += [name]
|
||||||
exp.save(all_counters=all_counters)
|
exp.save(all_counters=all_counters)
|
||||||
exp.save(all_notable_nets=all_notable_nets)
|
exp.save(all_notable_nets=all_notable_nets)
|
||||||
|
@ -61,15 +61,15 @@ def count(counters, soup, notable_nets=[]):
|
|||||||
|
|
||||||
with SoupExperiment('learn-from-soup') as exp:
|
with SoupExperiment('learn-from-soup') as exp:
|
||||||
exp.soup_size = 10
|
exp.soup_size = 10
|
||||||
exp.soup_life = 100
|
exp.soup_life = 1000
|
||||||
exp.trials = 10
|
exp.trials = 20
|
||||||
exp.learn_from_severity_values = [10 * i for i in range(11)]
|
exp.learn_from_severity_values = [10 * i for i in range(11)]
|
||||||
exp.epsilon = 1e-4
|
exp.epsilon = 1e-4
|
||||||
net_generators = []
|
net_generators = []
|
||||||
for activation in ['sigmoid']: #['linear', 'sigmoid', 'relu']:
|
for activation in ['sigmoid']: #['linear', 'sigmoid', 'relu']:
|
||||||
for use_bias in [False]:
|
for use_bias in [False]:
|
||||||
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||||
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||||
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||||
|
|
||||||
all_names = []
|
all_names = []
|
||||||
@ -95,7 +95,10 @@ with SoupExperiment('learn-from-soup') as exp:
|
|||||||
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
||||||
zs += [float(counters['fix_other']) / float(exp.trials)]
|
zs += [float(counters['fix_other']) / float(exp.trials)]
|
||||||
all_names += [name]
|
all_names += [name]
|
||||||
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}] #xs: learn_from_intensity according to exp.learn_from_intensity_values, ys: zero-fixpoints after life time, zs: non-zero-fixpoints after life time
|
# xs: learn_from_intensity according to exp.learn_from_intensity_values
|
||||||
|
# ys: zero-fixpoints after life time
|
||||||
|
# zs: non-zero-fixpoints after life time
|
||||||
|
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}]
|
||||||
|
|
||||||
exp.save(all_names=all_names)
|
exp.save(all_names=all_names)
|
||||||
exp.save(all_data=all_data)
|
exp.save(all_data=all_data)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
# Concat top Level dir to system environmental variables
|
||||||
sys.path += os.path.join('..', '.')
|
sys.path += os.path.join('..', '.')
|
||||||
|
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
@ -13,10 +14,6 @@ from soup import *
|
|||||||
import keras.backend
|
import keras.backend
|
||||||
|
|
||||||
|
|
||||||
# Concat top Level dir to system environmental variables
|
|
||||||
sys.path += os.path.join('..', '.')
|
|
||||||
|
|
||||||
|
|
||||||
def generate_counters():
|
def generate_counters():
|
||||||
"""
|
"""
|
||||||
Initial build of the counter dict, to store counts.
|
Initial build of the counter dict, to store counts.
|
||||||
@ -57,7 +54,7 @@ def count(counters, soup, notable_nets=[]):
|
|||||||
|
|
||||||
|
|
||||||
with Experiment('mixed-self-fixpoints') as exp:
|
with Experiment('mixed-self-fixpoints') as exp:
|
||||||
exp.trials = 10
|
exp.trials = 100
|
||||||
exp.soup_size = 10
|
exp.soup_size = 10
|
||||||
exp.soup_life = 5
|
exp.soup_life = 5
|
||||||
exp.trains_per_selfattack_values = [10 * i for i in range(11)]
|
exp.trains_per_selfattack_values = [10 * i for i in range(11)]
|
||||||
@ -91,7 +88,10 @@ with Experiment('mixed-self-fixpoints') as exp:
|
|||||||
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
ys += [float(counters['fix_zero']) / float(exp.trials)]
|
||||||
zs += [float(counters['fix_other']) / float(exp.trials)]
|
zs += [float(counters['fix_other']) / float(exp.trials)]
|
||||||
all_names += [name]
|
all_names += [name]
|
||||||
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}] #xs: how many trains per self-attack from exp.trains_per_selfattack_values, ys: average amount of zero-fixpoints found, zs: average amount of non-zero fixpoints
|
# xs: how many trains per self-attack from exp.trains_per_selfattack_values
|
||||||
|
# ys: average amount of zero-fixpoints found
|
||||||
|
# zs: average amount of non-zero fixpoints
|
||||||
|
all_data += [{'xs':xs, 'ys':ys, 'zs':zs}]
|
||||||
|
|
||||||
exp.save(all_names=all_names)
|
exp.save(all_names=all_names)
|
||||||
exp.save(all_data=all_data)
|
exp.save(all_data=all_data)
|
||||||
|
Reference in New Issue
Block a user