Exemplo n.º 1
0
 def extract_batch_norm(
         self,
         input_name,
         output_name,
         scope_id,
         data_format="NCHW",
         axis=1,
         eps=1e-3,
         layer_names=["bn", "moving_mean", "moving_variance"]):
     assert (data_format == "NCHW")
     mean, var = self.get_weights(scope_id, layer_names)
     layer = caffe_net.LayerParameter(name=output_name,
                                      type='BatchNorm',
                                      bottom=[input_name],
                                      top=[output_name])
     layer.batch_norm_param(axis=axis, eps=eps)
     layer.add_data(mean, var)
     self.caffe_model.add_layer(layer)
     if (self.data_dict[input_name] is not None):
         input_data, input_shape, inv_transpose_dims = self.preprocess_nchwc8_nchw_input(
             input_name, axis)
         output_data = Operators.batch_norm(input_data, mean, var, eps,
                                            output_name)
         self.data_dict[output_name] = self.postprocess_nchwc8_nchw_output(
             output_data, input_shape, inv_transpose_dims)
     else:
         self.data_dict[output_name] = None
     gamma = self.get_weights(scope_id, [layer_names[0], "gamma"])
     if (gamma is not None):
         scale_name = self.extract_scale(output_name, output_name + "_s",
                                         scope_id, data_format, axis,
                                         [layer_names[0], "gamma", "beta"])
         self.data_dict[output_name] = self.data_dict[scale_name]
         output_name = scale_name
     return output_name