コード例 #1
0
ファイル: tensorflow2caffe.py プロジェクト: yyqgood/bolt
 def add_attention(self, input_name, attention_num, from_seq_length, to_seq_length, output_name):
     layer = caffe_net.LayerParameter(name=output_name, type='Attention',
                 bottom=[input_name], top=[output_name])
     layer.attention_param(attention_num, from_seq_length, to_seq_length)
     self.caffe_model.add_layer(layer)
     self.data_dict[output_name] = Operators.attention(self.data_dict[input_name],
                                       attention_num, from_seq_length, to_seq_length,
                                       output_name)
     return output_name
コード例 #2
0
 def add_attention(self, input_name, output_name):
     layer = caffe_net.LayerParameter(name=output_name, type='Attention',
                                   bottom=[input_name], top=[output_name])
     layer.attention_param(self.num_attentions)
     self.caffe_model.add_layer(layer)
     self.data_dict[output_name] = Operators.attention(self.data_dict[input_name], self.num_attentions, output_name)