def test_sync_in_out_synapses(self): num_in = 3 num_out = 2 num_states = 2 env = blur_env.tf_env in_out_synapse = tf.random.normal(shape=(num_in + 1, num_out, num_states)) out_in_synapse = tf.random.normal(shape=(num_out, num_in + 1, num_states)) synapse = synapse_util.combine_in_out_synapses(in_out_synapse, out_in_synapse, env) synapse_synced = synapse_util.sync_in_and_out_synapse(synapse, num_in, env) fwd_sync_submatrix = synapse_util.synapse_submatrix( synapse_synced, num_in, synapse_util.UpdateType.FORWARD, include_bias=True) bkw_sync_submatrix = synapse_util.synapse_submatrix( synapse_synced, num_in, synapse_util.UpdateType.BACKWARD, include_bias=True) with tf.Session() as s: bwd, fwd, inp = s.run([ synapse_util.transpose_synapse(bkw_sync_submatrix, env), fwd_sync_submatrix, in_out_synapse ]) self.assertAllEqual(fwd, inp) self.assertAllEqual(bwd, inp)
def test_synapse_derivative(self): tf.reset_default_graph() tf.disable_v2_behavior() n_in, n_out = 10, 5 inp, out, w = random_dense(n_in, n_out) env = blur_env.tf_env pre, post, synapse, genome, network_spec = get_blur_state( env, inp, out, w) post = np.concatenate(2 * [np.zeros_like(out)], axis=-1).astype(blur_env.NP_FLOATING_TYPE) pre_fwd, post_fwd = blur.dense_neuron_update( pre, post, synapse, inp_act=None, out_act=sigmoid_with_grad, neuron_genome=genome.neuron, update_type=synapse_util.UpdateType.FORWARD, global_spec=network_spec, env=env) hebbian_update = blur.get_hebbian_update(pre_fwd, post_fwd, genome.synapse.transform, network_spec, env) hebbian_update_submatrix = synapse_util.synapse_submatrix( hebbian_update, n_in, synapse_util.UpdateType.FORWARD) inp = tf.constant(inp.astype(blur_env.NP_FLOATING_TYPE)) inp_with_bias = tf.concat([inp[Ellipsis, 0], [[[[1]]]]], axis=-1) ww = tf.constant(w.astype(blur_env.NP_FLOATING_TYPE)) out = tf.nn.sigmoid(inp_with_bias @ ww) grad_w = tf.gradients(out, ww) with tf.Session() as s: s.run(tf.initialize_all_variables()) hebb_update, grad_w_val = s.run( [hebbian_update_submatrix[Ellipsis, 0], grad_w[0]]) self.assertAllClose(hebb_update, grad_w_val)
def test_get_hebbian_update(self): tf.reset_default_graph() tf.disable_v2_behavior() n_in, n_out = 10, 5 inp, out, w = random_dense(n_in, n_out) env = blur_env.tf_env pre, post, _, genome, network_spec = get_blur_state(env, inp, out, w) hebbian_update = blur.get_hebbian_update(pre, post, genome.synapse.transform, network_spec, env) hebbian_update_submatrix = synapse_util.synapse_submatrix( hebbian_update, n_in, synapse_util.UpdateType.FORWARD) inp = tf.constant(inp.astype(blur_env.NP_FLOATING_TYPE)) out = tf.constant(out.astype(blur_env.NP_FLOATING_TYPE)) inp_transpose = tf.transpose(inp[Ellipsis, 0], (0, 1, 3, 2)) exp_result = env.concat_row(inp_transpose) @ out[Ellipsis, 0] with tf.Session() as s: s.run(tf.initialize_all_variables()) self.assertAllClose(hebbian_update_submatrix[Ellipsis, 0], exp_result)
def single_synaptic_update(synapse, input_layer, in_channels, update_type, transform_gn, synapse_transform_fn=None, *, env): """Computes one-way (Forward or Backward) synaptic update.""" include_bias = False if update_type == synapse_util.UpdateType.FORWARD: # Input channels are +1 to include "1" channel to simulate bias. input_layer = env.concat_row(input_layer) include_bias = True subsynapse = synapse_util.synapse_submatrix(synapse, in_channels=in_channels, update_type=update_type, include_bias=include_bias) if synapse_transform_fn is not None: subsynapse = synapse_transform_fn(subsynapse) update = env.einsum(FC_UPDATE_PATTERN, input_layer, transform_gn, subsynapse) return update