Ejemplo n.º 1
0
 def crop_test(cropping, inputs, expected):
     inputs = [theano.shared(x) for x in inputs]
     outs = autocrop(inputs, cropping)
     outs = [o.eval() for o in outs]
     assert len(outs) == len(expected)
     for o, e in zip(outs, expected):
         assert_array_equal(o, e)
Ejemplo n.º 2
0
 def crop_test(cropping, inputs, expected):
     inputs = [theano.shared(x) for x in inputs]
     outs = autocrop(inputs, cropping)
     outs = [o.eval() for o in outs]
     assert len(outs) == len(expected)
     for o, e in zip(outs, expected):
         assert_array_equal(o, e)
Ejemplo n.º 3
0
 def setOutputs_abs(self):
     from lasagne.layers.merge import autocrop
     for s in self.streams:
         outs = []
         for l in self.layers:
             outs.append(l.outputs[s])
         self.outputs[s] = tt.concatenate(autocrop(outs, self.croppings),
                                          axis=self.axis)
Ejemplo n.º 4
0
 def get_output_for(self, inputs, **kwargs):
     inputs = autocrop(inputs, self.cropping)
     output = None
     for input in inputs:
         if output is not None:
             output = self.merge_function(output, input)
         else:
             output = input
     return output
Ejemplo n.º 5
0
    def get_output_for(self,
                       inputs,
                       deterministic=False,
                       batch_norm_use_averages=None,
                       batch_norm_update_averages=None,
                       **kwargs):

        input, features = autocrop(inputs,
                                   cropping=(None, None, "center", "center"))

        input_mean, input_inv_std = feature_statistics(input, features)

        # Decide whether to use the stored averages or mini-batch statistics
        if batch_norm_use_averages is None:
            batch_norm_use_averages = deterministic
        use_averages = batch_norm_use_averages

        if use_averages:
            mean = self.mean
            inv_std = self.inv_std
        else:
            mean = input_mean
            inv_std = input_inv_std

        # Decide whether to update the stored averages
        if batch_norm_update_averages is None:
            batch_norm_update_averages = not deterministic
        update_averages = batch_norm_update_averages

        if update_averages:
            # Trick: To update the stored statistics, we create memory-aliased
            # clones of the stored statistics:
            running_mean = theano.clone(self.mean, share_inputs=False)
            running_inv_std = theano.clone(self.inv_std, share_inputs=False)
            # set a default update for them:
            running_mean.default_update = ((1 - self.alpha) * running_mean +
                                           self.alpha * input_mean)
            running_inv_std.default_update = (
                (1 - self.alpha) * running_inv_std +
                self.alpha * input_inv_std)
            # and make sure they end up in the graph without participating in
            # the computation (this way their default_update will be collected
            # and applied, but the computation will be optimized away):
            mean += 0 * running_mean
            inv_std += 0 * running_inv_std

        # prepare dimshuffle pattern inserting broadcastable axes as needed
        param_axes = iter(range(input.ndim - len(self.axes)))
        pattern = [
            'x' if input_axis in self.axes else next(param_axes)
            for input_axis in range(input.ndim)
        ]

        # apply dimshuffle pattern to all parameters
        beta = 0 if self.beta is None else self.beta
        gamma = 1 if self.gamma is None else self.gamma
        mean = mean
        inv_std = inv_std

        # normalize x, m, mean, inv_std, gamma,beta
        normalized = feature_normalization(input, features, mean, inv_std,
                                           gamma, beta)
        return normalized