Exemplo n.º 1
0
 def extract_layer_norm(self,
                        input_name,
                        layer_norm_name,
                        scope_id,
                        layer_names=None):
     if (layer_names is None):
         self.scopes[scope_id] = "LayerNorm"
         self.scopes[scope_id + 1] = "gamma"
         gamma_name = self.generate_name(self.scopes, scope_id + 2)
         self.scopes[scope_id + 1] = "beta"
         beta_name = self.generate_name(self.scopes, scope_id + 2)
     else:
         self.scopes[scope_id] = layer_names[0]
         self.scopes[scope_id + 1] = layer_names[1]
         gamma_name = self.generate_name(self.scopes, scope_id + 2)
         self.scopes[scope_id + 1] = layer_names[2]
         beta_name = self.generate_name(self.scopes, scope_id + 2)
     gamma = self.get_tensor(gamma_name)
     beta = self.get_tensor(beta_name)
     layer = caffe_net.LayerParameter(name=layer_norm_name,
                                      type='LayerNorm',
                                      bottom=[input_name],
                                      top=[layer_norm_name])
     layer.add_data(gamma, beta)
     self.caffe_model.add_layer(layer)
     self.data_dict[layer_norm_name] = Operators.layer_norm(
         self.data_dict[input_name], gamma, beta, layer_norm_name)
     return layer_norm_name
Exemplo n.º 2
0
 def extract_layer_norm(self, input_name, output_name, scope_id,
         layer_names=["LayerNorm", "gamma", "beta"]):
     gamma, beta = self.get_weights(scope_id, layer_names)
     layer = caffe_net.LayerParameter(name=output_name, type='LayerNorm',
                 bottom=[input_name], top=[output_name])
     layer.add_data(gamma, beta)
     self.caffe_model.add_layer(layer)
     self.data_dict[output_name] = Operators.layer_norm(self.data_dict[input_name],
                                       gamma, beta,
                                       output_name)
     return output_name