Esempio n. 1
0
    def __init__(self, LayerParameter):
        super(GNLayer, self).__init__(LayerParameter)
        gn_param = LayerParameter.group_norm_param
        scale_param = LayerParameter.scale_param
        self._param = {
            'group': int(gn_param.group),
            'eps': gn_param.eps,
            'axis': 1
        }
        scope = LayerParameter.name
        scale = Tensor(scope + '/param:0')
        scale_diff = Tensor(scope + '/param:0_grad')
        bias = Tensor(scope + '/param:1')
        bias_diff = Tensor(scope + '/param:1_grad')

        if scale_param.HasField('filler'):
            self.Fill(scale, scale_param, 'filler')
        else:
            scale.Constant(value=1.0)
        self.Fill(bias, scale_param, 'bias_filler')
        self.scale_blobs = [{
            'data': scale,
            'diff': scale_diff
        }, {
            'data': bias,
            'diff': bias_diff
        }]
        self._blobs.extend(self.scale_blobs)
Esempio n. 2
0
    def __init__(self, LayerParameter):
        super(BNLayer, self).__init__(LayerParameter)
        bn_param = LayerParameter.batch_norm_param
        scale_param = LayerParameter.scale_param
        self._param = {'use_stats': int(bn_param.use_global_stats)
                                        if bn_param.HasField('use_global_stats') else -1,
                       'momentum': bn_param.moving_average_fraction,
                       'eps': bn_param.eps}
        mean = Tensor(LayerParameter.name + '@param0').Constant(value=0.0)
        var = Tensor(LayerParameter.name + '@param1').Constant(value=0.0)
        scale = Tensor(LayerParameter.name + '@param2')
        scale_diff = Tensor(LayerParameter.name + '@param2_grad')
        bias = Tensor(LayerParameter.name + '@param3')
        bias_diff = Tensor(LayerParameter.name + '@param3_grad')

        if scale_param.HasField('filler'):
            self.Fill(scale, scale_param, 'filler')
        else: scale.Constant(value=1.0)
        self.Fill(bias, scale_param, 'bias_filler')
        self.norm_blobs = [{'data': mean, 'diff': None},
                           {'data': var, 'diff': None}]
        self.scale_blobs = [{'data': scale, 'diff': scale_diff},
                            {'data': bias, 'diff': bias_diff}]
        self._blobs.extend(self.norm_blobs)
        self._blobs.extend(self.scale_blobs)
Esempio n. 3
0
 def __init__(self, LayerParameter):
     super(PReLULayer, self).__init__(LayerParameter)
     param = LayerParameter.prelu_param
     self._param = {
         'channel_shared': param.channel_shared,
         'data_format': 'NCHW'
     }
     slope = Tensor(LayerParameter.name + '@param0')
     slope_diff = Tensor(LayerParameter.name + '@param0_grad')
     if param.HasField('filler'):
         self.Fill(slope, param, 'filler')
     else:
         slope.Constant(value=0.25)
     self._blobs.append({'data': slope, 'diff': slope_diff})
Esempio n. 4
0
 def __init__(self, LayerParameter):
     super(NormalizeLayer, self).__init__(LayerParameter)
     param = LayerParameter.normalize_param
     self._l2norm_param = {'axis': 1,
                           'num_axes': -1 if param.across_spatial else 1,
                           'eps': param.eps}
     self._scale_param = {'axis': 1,
                          'num_axes': 0 if param.channel_shared else 1}
     scale = Tensor(LayerParameter.name + '@param0')
     if param.HasField('scale_filler'):
         self.Fill(scale, param, 'scale_filler')
     else: scale.Constant(value=1.0)
     self.scale_blobs = [{'data': scale, 'diff': Tensor(scale.name + '_grad')}]
     self._blobs.extend(self.scale_blobs)
Esempio n. 5
0
 def __init__(self, LayerParameter):
     super(ScaleLayer, self).__init__(LayerParameter)
     param = LayerParameter.scale_param
     self._param = {'axis': param.axis, 'num_axes': param.num_axes}
     scale = Tensor(LayerParameter.name + '@param0')
     scale_diff = Tensor(LayerParameter.name + '@param0_grad')
     if param.HasField('filler'):
         self.Fill(scale, param, 'filler')
     else:
         scale.Constant(value=1.0)
     self._blobs.append({'data': scale, 'diff': scale_diff})
     if param.bias_term:
         bias = Tensor(LayerParameter.name + '@param1')
         bias_diff = Tensor(LayerParameter.name + '@param1_grad')
         # auto fill 0 if not specficed bias_filler
         self.Fill(bias, param, 'bias_filler')
         self._blobs.append({'data': bias, 'diff': bias_diff})
Esempio n. 6
0
    def __init__(self, LayerParameter):
        super(GNLayer, self).__init__(LayerParameter)
        gn_param = LayerParameter.group_norm_param
        scale_param = LayerParameter.scale_param
        self._param = {
            'group':
            int(gn_param.group),
            'use_stats':
            int(gn_param.use_global_stats)
            if gn_param.HasField('use_global_stats') else -1,
            'momentum':
            gn_param.moving_average_fraction,
            'eps':
            gn_param.eps,
            'axis':
            1
        }
        scope = LayerParameter.name
        mean = Tensor(scope + '/param:0').Constant(value=0.0)
        var = Tensor(scope + '/param:1').Constant(value=0.0)
        scale = Tensor(scope + '/param:2')
        scale_diff = Tensor(scope + '/param:2_grad')
        bias = Tensor(scope + '/param:3')
        bias_diff = Tensor(scope + '/param:3_grad')

        if scale_param.HasField('filler'):
            self.Fill(scale, scale_param, 'filler')
        else:
            scale.Constant(value=1.0)
        self.Fill(bias, scale_param, 'bias_filler')
        self.norm_blobs = [{
            'data': mean,
            'diff': None
        }, {
            'data': var,
            'diff': None
        }]
        self.scale_blobs = [{
            'data': scale,
            'diff': scale_diff
        }, {
            'data': bias,
            'diff': bias_diff
        }]
        self._blobs.extend(self.norm_blobs)
        self._blobs.extend(self.scale_blobs)