def __init__(self, in_features, out_features, on, radius, sigma_surround, sigma_center=1.0): super(DifferenceOfGaussiansLinear, self).__init__(in_features, out_features, bias=False) self.on = on self.sigma_surround = sigma_surround self.sigma_center = sigma_center self.radius = radius sigma_center_weights_matrix = normalize( apply_circular_mask_to_weights(get_gaussian_weights( in_features, out_features, sigma_center), radius=radius)) sigma_surround_weights_matrix = normalize( apply_circular_mask_to_weights(get_gaussian_weights( in_features, out_features, sigma_surround), radius=radius)) if on: diff = sigma_center_weights_matrix - sigma_surround_weights_matrix else: diff = sigma_surround_weights_matrix - sigma_center_weights_matrix self.weight = torch.nn.Parameter(diff)
def test_normalize(self, matrix, norm, axis): if norm > 2: raise ValueError str_norm = 'l1' if norm == 1 else 'l2' if norm == 2 else 'max' np.allclose( normalize(matrix, norm, axis).numpy(), sklearn_normalize(matrix.numpy(), str_norm, axis))
def _hebbian_learning(weights, input, output, learning_rate, radius): # Calculates the hebbian delta, applies the connective radius mask and updates the weights, normalizing them # Weight adaptation of a single neuron # w'_pq,ij = (w_pq,ij + alpha * input_pq * output_ij) / sum_uv (w_uv,ij + alpha * input_uv * output_ij) delta = learning_rate * mm(input.data.t(), output.data) apply_circular_mask_to_weights(delta.t_(), radius) weights.data.add_(delta.t_()) weights.data = normalize(weights.data, norm=1, axis=0) return
def linear_decay(w, start, epoch, final_epoch): radius = start + epoch * (1.0 - start) / final_epoch normalize(apply_circular_mask_to_weights(w.data.t_(), radius=radius)) w.data.t_() return
def __init__(self, in_features, out_features, radius, sigma=1.0): super(Cortex, self).__init__(in_features, out_features, sigma=sigma) self.radius = radius self.weight.data = normalize( apply_circular_mask_to_weights(self.weight.data, radius=radius))