def forward(self, src_ast_encoding, prediction_target):
        # prepare tensors for attention
        attention_memory = self.get_attention_memory(src_ast_encoding)
        src_ast_encoding.update(attention_memory)

        return RecurrentSubtokenDecoder.forward(self, src_ast_encoding,
                                                prediction_target)
    def default_params(cls):
        params = RecurrentSubtokenDecoder.default_params()
        params.update({
            'remove_duplicates_in_prediction': True,
            'context_encoding_size': 128,
            'attention_target': 'ast_nodes'  # terminal_nodes
        })

        return params