def _match(self): """ The core of RC model, get the question-aware passage encoding with either BIDAF or MLSTM """ with tf.variable_scope('match'): if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer(self.hidden_size) else: raise NotImplementedError( 'The algorithm {} is not implemented.'.format(self.algo)) self.match_p_encodes, _ = match_layer.match( self.sep_p_encodes, self.sep_q_encodes, self.p_t_length, self.q_t_length) self.match_p_encodes, _ = rnn('bi-lstm', self.match_p_encodes, self.p_t_length, self.hidden_size, layer_num=1) if self.use_dropout: self.match_p_encodes = tf.nn.dropout(self.match_p_encodes, self.dropout_keep_prob)
def _match(self): if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer5(self.hidden_size) else: raise NotImplementedError('The algorithm {} is not implemented.'.format(self.algo)) self.match_p_encodes, _ = match_layer.match(self.sep_p_encodes, self.sep_q_encodes,self.c_mask,self.q_mask,1-self.dropout,self.p_length, self.q_length)
def _match(self): """ The core of QDR model, get the query-aware passage encoding with either BIDAF or MLSTM """ if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer(self.hidden_size) else: raise NotImplementedError('The algorithm {} is not implemented.'.format(self.algo)) self.match_p_encodes, _ = match_layer.match(self.sep_p_encodes, self.sep_q_encodes, self.p_length, self.q_length) if self.use_dropout: self.match_p_encodes = tf.nn.dropout(self.match_p_encodes, self.dropout_keep_prob)
def _match(self): """ The core of RC model, get the question-aware passage encoding with either BIDAF or MLSTM """ if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer(self.hidden_size) else: raise NotImplementedError('The algorithm {} is not implemented.'.format(self.algo)) self.match_p_encodes, _ = match_layer.match(self.sep_p_encodes, self.sep_q_encodes, self.p_length, self.q_length) if self.use_dropout: self.match_p_encodes = tf.nn.dropout(self.match_p_encodes, self.dropout_keep_prob)
def _match(self): """ 阅读理解模型的核心.使用BiDAF或MLSTM来获取文章对问题的感知情况 """ if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer(self.hidden_size) self.match_p_encodes, self.context2question_attn, self.question2context_attn, _ = match_layer.match( self.sep_p_encodes, self.sep_q_encodes, self.p_length, self.q_length) if self.use_dropout: self.match_p_encodes = tf.nn.dropout(self.match_p_encodes, self.dropout_keep_prob)
def _self_att(self): """ Self attention layer """ with tf.variable_scope('self_att'): if self.algo == 'MLSTM': self_att_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': self_att_layer = AttentionFlowMatchLayer(self.hidden_size) else: raise NotImplementedError( 'The algorithm {} is not implemented.'.format(self.algo)) self.self_att_p_encodes, _ = self_att_layer.match( self.match_p_encodes, self.match_p_encodes, self.p_t_length, self.p_t_length) if self.use_dropout: self.self_att_p_encodes = tf.nn.dropout( self.self_att_p_encodes, self.dropout_keep_prob)
def _match(self): """ The core of RC model, get the question-aware passage encoding with either BIDAF or MLSTM """ if self.simple_net in [0]: return if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer(self.hidden_size) else: raise NotImplementedError( 'The algorithm {} is not implemented.'.format(self.algo)) self.match_p_encodes, self.sim_matrix, self.context2question_attn, self.b, self.question2context_attn = match_layer.match( self.sep_p_encodes, self.sep_q_encodes, self.p_length, self.q_length) if self.use_dropout: self.match_p_encodes = tf.nn.dropout(self.match_p_encodes, self.dropout_keep_prob)
def _match(self): if self.algo == 'MLSTM': match_layer = MatchLSTMLayer(self.hidden_size) elif self.algo == 'BIDAF': match_layer = AttentionFlowMatchLayer(self.hidden_size) else: raise NotImplementedError( 'The algorithm {} is not implemented.'.format(self.algo)) self.match_p_encodes, _ = match_layer.match(self.sep_p_encodes, self.sep_q_encodes, self.p_length, self.q_length) self.match_p_encodes = tf.layers.dense(self.match_p_encodes, self.hidden_size * 2, activation=tf.nn.relu) if self.use_dropout: self.match_p_encodes = tf.nn.dropout(self.match_p_encodes, 1 - self.dropout)
#!/usr/bin/python3