weightwise experiments

This commit is contained in:
Si11ium
2019-03-14 22:31:48 +01:00
parent 252716c420
commit 36d6579e4f
56 changed files with 66 additions and 134 deletions
code
experiments
exp-FixpointExperiment-_6511565650566781-0
exp-FixpointExperiment-_6511565800569721-0
exp-FixpointExperiment-_6511565864900101-0
exp-FixpointExperiment-_813945717034465-0
setups
weightwise_experiments
exp-fixpoint-density-_3256002726938743-0
exp-known-fixpoint-variation-_3256002757529753-0
exp-learn-from-soup-_6512006057392099-0
exp-mixed-self-fixpoints-_3256025691277105-0
exp-mixed-self-fixpoints-_6512040991372981-0
exp-training_fixpoint-_6512069271603393-0

@ -60,8 +60,8 @@ if __name__ == '__main__':
net_generators = []
for activation in ['linear']: # , 'sigmoid', 'relu']:
for use_bias in [False]:
net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: WeightwiseNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
net_generators += [lambda activation=activation, use_bias=use_bias: AggregatingNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: FFTNeuralNetwork(aggregates=4, width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]
# net_generators += [lambda activation=activation, use_bias=use_bias: RecurrentNeuralNetwork(width=2, depth=2).with_keras_params(activation=activation, use_bias=use_bias)]