def add_attention_mask(self, input_name, output_name, attn_trunc_len, same_length, mask): layer = caffe_net.LayerParameter(name=output_name, type='AttentionMask', bottom=[input_name], top=[output_name]) layer.attention_mask_param(attn_trunc_len, same_length, mask) self.caffe_model.add_layer(layer) self.data_dict[output_name] = Operators.attention_mask(self.data_dict[input_name], attn_trunc_len, same_length, mask, output_name) return output_name