コード例 #1
0
ファイル: att_bilstm_base.py プロジェクト: nicolay-r/AREkit
 def __init__(self, keys_count, att_support_zero_length):
     super(AttentionBiLSTMBaseConfig, self).__init__()
     assert (isinstance(att_support_zero_length, bool))
     self.__attention = None
     self.__attention_config = InteractiveMLPAttentionConfig(
         keys_count=keys_count)
     self.__att_support_zero_length = att_support_zero_length
コード例 #2
0
ファイル: att_sef_cnn.py プロジェクト: nicolay-r/AREkit
class AttentionSynonymEndsAndFramesCNNConfig(AttentionCNNBaseConfig):
    def __init__(self):
        super(AttentionSynonymEndsAndFramesCNNConfig, self).__init__()
        self.__attention = None
        self.__attention_config = InteractiveMLPAttentionConfig(
            keys_count=self.FramesPerContext + 2 * self.SynonymsPerContext)

    # region properties

    @property
    def AttentionModel(self):
        return self.__attention

    # endregion

    # region public methods

    def get_attention_parameters(self):
        return self.__attention_config.get_parameters()

    def reinit_config_dependent_parameters(self):
        super(AttentionSynonymEndsAndFramesCNNConfig,
              self).reinit_config_dependent_parameters()

        self.__attention = InteractiveMLPAttention(
            cfg=self.__attention_config,
            batch_size=self.BatchSize,
            terms_per_context=self.TermsPerContext,
            support_zero_length=False)
コード例 #3
0
ファイル: att_bilstm_base.py プロジェクト: nicolay-r/AREkit
class AttentionBiLSTMBaseConfig(BiLSTMConfig):
    """
    Based on Interactive attention model
    """
    def __init__(self, keys_count, att_support_zero_length):
        super(AttentionBiLSTMBaseConfig, self).__init__()
        assert (isinstance(att_support_zero_length, bool))
        self.__attention = None
        self.__attention_config = InteractiveMLPAttentionConfig(
            keys_count=keys_count)
        self.__att_support_zero_length = att_support_zero_length

    # region properties

    @property
    def AttentionModel(self):
        return self.__attention

    # endregion

    # region public methods

    def reinit_config_dependent_parameters(self):
        super(AttentionBiLSTMBaseConfig,
              self).reinit_config_dependent_parameters()

        self.__attention = InteractiveMLPAttention(
            cfg=self.__attention_config,
            batch_size=self.BatchSize,
            terms_per_context=self.TermsPerContext,
            support_zero_length=self.__att_support_zero_length)

    def _internal_get_parameters(self):
        parameters = super(AttentionBiLSTMBaseConfig,
                           self)._internal_get_parameters()
        parameters += self.__attention_config.get_parameters()
        return parameters
コード例 #4
0
ファイル: att_sef_cnn.py プロジェクト: nicolay-r/AREkit
 def __init__(self):
     super(AttentionSynonymEndsAndFramesCNNConfig, self).__init__()
     self.__attention = None
     self.__attention_config = InteractiveMLPAttentionConfig(
         keys_count=self.FramesPerContext + 2 * self.SynonymsPerContext)
コード例 #5
0
ファイル: att_ef_cnn.py プロジェクト: nicolay-r/AREkit
 def __init__(self):
     super(AttentionEndsAndFramesCNNConfig, self).__init__()
     self.__attention = None
     self.__attention_config = InteractiveMLPAttentionConfig(self.FramesPerContext + 2)
コード例 #6
0
 def __init__(self):
     super(AttentionSynonymEndsCNNConfig, self).__init__()
     self.__attention = None
     self.__attention_config = InteractiveMLPAttentionConfig(
         self.SynonymsPerContext * 2)