added third experiment
This commit is contained in:
@ -697,8 +697,6 @@ if __name__ == '__main__':
|
|||||||
exp.historical_particles[i] = net
|
exp.historical_particles[i] = net
|
||||||
K.clear_session()
|
K.clear_session()
|
||||||
if False:
|
if False:
|
||||||
# this does not work as the aggregation function screws over the fixpoint computation....
|
|
||||||
# TODO: check for fixpoint in aggregated space...
|
|
||||||
with FixpointExperiment() as exp:
|
with FixpointExperiment() as exp:
|
||||||
run_count = 1000
|
run_count = 1000
|
||||||
net = TrainingNeuralNetworkDecorator(AggregatingNeuralNetwork(4, width=2, depth=2)).with_params(epsilon=0.1e-6)
|
net = TrainingNeuralNetworkDecorator(AggregatingNeuralNetwork(4, width=2, depth=2)).with_params(epsilon=0.1e-6)
|
||||||
|
@ -41,7 +41,7 @@ with Experiment('fixpoint-density') as exp:
|
|||||||
notable_nets = []
|
notable_nets = []
|
||||||
for _ in tqdm(range(exp.trials)):
|
for _ in tqdm(range(exp.trials)):
|
||||||
net = net_generator().with_params(epsilon=exp.epsilon)
|
net = net_generator().with_params(epsilon=exp.epsilon)
|
||||||
name = str(net.__class__.__name__) + " " + str(net.get_keras_params().get('activation'))
|
name = str(net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias='" + str(net.get_keras_params().get('use_bias')) + "'"
|
||||||
count(counters, net, notable_nets)
|
count(counters, net, notable_nets)
|
||||||
all_counters += [counters]
|
all_counters += [counters]
|
||||||
all_notable_nets += [notable_nets]
|
all_notable_nets += [notable_nets]
|
||||||
|
@ -10,13 +10,6 @@ from soup import prng
|
|||||||
from statistics import mean
|
from statistics import mean
|
||||||
avg = mean
|
avg = mean
|
||||||
|
|
||||||
def generate(e=0.0, f=0.0):
|
|
||||||
return [
|
|
||||||
np.array([[1.0+e, 0.0+f], [0.0+f, 0.0+f], [0.0+f, 0.0+f], [0.0+f, 0.0+f]], dtype=np.float32),
|
|
||||||
np.array([[1.0+e, 0.0+f], [0.0+f, 0.0+f]], dtype=np.float32),
|
|
||||||
np.array([[1.0+e], [0.0+f]], dtype=np.float32)
|
|
||||||
]
|
|
||||||
|
|
||||||
def generate_fixpoint_weights():
|
def generate_fixpoint_weights():
|
||||||
return [
|
return [
|
||||||
np.array([[1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], dtype=np.float32),
|
np.array([[1.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], dtype=np.float32),
|
||||||
|
59
code/setups/training-fixpoints.py
Normal file
59
code/setups/training-fixpoints.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path += ['../', './']
|
||||||
|
|
||||||
|
from util import *
|
||||||
|
from experiment import *
|
||||||
|
from network import *
|
||||||
|
|
||||||
|
def generate_counters():
|
||||||
|
return {'divergent': 0, 'fix_zero': 0, 'fix_other': 0, 'fix_sec': 0, 'other': 0}
|
||||||
|
|
||||||
|
def count(counters, net, notable_nets=[]):
|
||||||
|
if net.is_diverged():
|
||||||
|
counters['divergent'] += 1
|
||||||
|
elif net.is_fixpoint():
|
||||||
|
if net.is_zero():
|
||||||
|
counters['fix_zero'] += 1
|
||||||
|
else:
|
||||||
|
counters['fix_other'] += 1
|
||||||
|
notable_nets += [net]
|
||||||
|
elif net.is_fixpoint(2):
|
||||||
|
counters['fix_sec'] += 1
|
||||||
|
notable_nets += [net]
|
||||||
|
else:
|
||||||
|
counters['other'] += 1
|
||||||
|
return counters, notable_nets
|
||||||
|
|
||||||
|
with Experiment('training_fixpoint') as exp:
|
||||||
|
exp.trials = 5
|
||||||
|
exp.run_count = 500
|
||||||
|
exp.epsilon = 1e-4
|
||||||
|
net_generators = []
|
||||||
|
for activation in ['linear', 'sigmoid', 'relu']:
|
||||||
|
for use_bias in [False]:
|
||||||
|
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||||
|
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||||
|
net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
|
||||||
|
all_counters = []
|
||||||
|
all_notable_nets = []
|
||||||
|
all_names = []
|
||||||
|
for net_generator_id, net_generator in enumerate(net_generators):
|
||||||
|
counters = generate_counters()
|
||||||
|
notable_nets = []
|
||||||
|
for _ in tqdm(range(exp.trials)):
|
||||||
|
net = TrainingNeuralNetworkDecorator(net_generator()).with_params(epsilon=exp.epsilon)
|
||||||
|
name = str(net.net.__class__.__name__) + " activiation='" + str(net.get_keras_params().get('activation')) + "' use_bias=" + str(net.get_keras_params().get('use_bias'))
|
||||||
|
for run_id in range(exp.run_count):
|
||||||
|
loss = net.compiled().train(epoch=run_id+1)
|
||||||
|
count(counters, net, notable_nets)
|
||||||
|
all_counters += [counters]
|
||||||
|
all_notable_nets += [notable_nets]
|
||||||
|
all_names += [name]
|
||||||
|
exp.save(all_counters=all_counters)
|
||||||
|
exp.save(all_notable_nets=all_notable_nets)
|
||||||
|
exp.save(all_names=all_names)
|
||||||
|
for exp_id, counter in enumerate(all_counters):
|
||||||
|
exp.log(all_names[exp_id])
|
||||||
|
exp.log(all_counters[exp_id])
|
||||||
|
exp.log('\n')
|
Reference in New Issue
Block a user