def get_layers(self): # make into list if nonlin only one if not hasattr(self.nonlin_before_merge, '__len__'): nonlins_before_merge = ((self.nonlin_before_merge,) * len(self.networks)) else: nonlins_before_merge = self.nonlin_before_merge layers_per_net = [net.get_layers() for net in self.networks] # Check that all have same number of sample preds n_sample_preds = get_n_sample_preds(layers_per_net[0][-1]) for layers in layers_per_net: assert get_n_sample_preds(layers[-1]) == n_sample_preds # remove dense softmax replace by dense linear reduced_layers = [replace_dense_softmax_by_dense_linear(all_l, n_f, nonlin_before_merge=nonlin, batch_norm_before_merge=self.batch_norm_before_merge) for all_l, n_f, nonlin in zip(layers_per_net, self.n_features_per_net, nonlins_before_merge)] # hopefully still works with new method below:) use_same_input_layer(reduced_layers) final_layers = [layers[-1] for layers in reduced_layers] l_merged = ConcatLayer(final_layers) l_merged = DenseLayer(l_merged,num_units=self.n_classes, nonlinearity=softmax) return lasagne.layers.get_all_layers(l_merged)
def crop_layer_1(self): from lasagne.layers.merge import ConcatLayer return ConcatLayer([Mock(), Mock()], axis=1, cropping=['lower'] * 2)
def layer(self): from lasagne.layers.merge import ConcatLayer return ConcatLayer([Mock(), Mock()], axis=1)
def crop_layer_1(self): from lasagne.layers.merge import ConcatLayer l1 = Mock(output_shapes=((None, None), )) l2 = Mock(output_shapes=((None, None), )) return ConcatLayer((l1, l2), axis=1, cropping=['lower'] * 2)
def layer(self, axis): from lasagne.layers.merge import ConcatLayer l1 = Mock(output_shapes=((None, None), )) l2 = Mock(output_shapes=((None, None), )) return ConcatLayer((l1, l2), axis=axis)