Example #1
0
    def bilstm_layer(self, lstm_inputs):
        """
        BiLSTM层
        :param lstm_inputs: [batch_size, num_steps, emb_size] 
        :return: [batch_size, num_steps, 2*lstm_dim] 
        """
        lstm_inputs = tf.layers.dense(lstm_inputs,
                                      self.lstm_dim,
                                      use_bias=True)
        lstm_inputs = tf.nn.relu(lstm_inputs)

        with tf.variable_scope("char_BiLSTM"):
            lstm_cell = {}
            for direction in ["forward", "backward"]:
                with tf.variable_scope(direction):
                    lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                        self.lstm_dim,  # 每个LSTM cell内部的神经元数量(即隐层参数维度)
                        use_peepholes=True,
                        initializer=self.initializer,
                        state_is_tuple=True)
            outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
                lstm_cell["forward"],  # 前向传播cell
                lstm_cell["backward"],  # 后向传播cell
                lstm_inputs,
                dtype=tf.float32,
                sequence_length=self.lengths,
                time_major=False)
        output_forward, output_backward = tf.unstack(outputs, axis=0)
        output_h = tf.add(output_forward, output_backward)
        mix_input = tf.add(tf.nn.relu(output_h), lstm_inputs)

        with tf.variable_scope("char_BiLSTM_2"):
            lstm_cell = {}
            for direction in ["forward", "backward"]:
                with tf.variable_scope(direction):
                    lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                        self.lstm_dim,  # 每个LSTM cell内部的神经元数量(即隐层参数维度)
                        use_peepholes=True,
                        initializer=self.initializer,
                        state_is_tuple=True)
            outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
                lstm_cell["forward"],  # 前向传播cell
                lstm_cell["backward"],  # 后向传播cell
                mix_input,
                dtype=tf.float32,
                sequence_length=self.lengths,
                time_major=False)
        output_forward, output_backward = tf.unstack(outputs, axis=0)
        output_h = tf.add(output_forward, output_backward)

        mix_input = tf.add(tf.nn.relu(mix_input), tf.nn.relu(output_h))

        return tf.concat([output_h, mix_input], axis=2)
def bi_lstm_layer(lstm_inputs, lstm_dim):
    """
    该函数负责将句子用双向LSTM编码,取最后的句子编码
    lstm_inputs: [batch_size, num_steps, emb_size]
    lstm_dim 由于输出结果维度中最后一维是2*lstm_dim,而我们需要的是
    [batch_size, emb_size]
    那么根据这个关系我们可以确定我们需要的lstm_dim会是embedding_size/2
    return: [batch_size, 2*lstm_dim]
    """
    with tf.variable_scope("bi_lstm"):
        lstm_cell = {}
        for direction in ["forward", "backward"]:
            with tf.variable_scope(direction):
                lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                    lstm_dim,
                    use_peepholes=True,
                    initializer=tf.random_normal_initializer(stddev=0.1),
                    state_is_tuple=True)
        outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
            lstm_cell["forward"],
            lstm_cell["backward"],
            lstm_inputs,
            dtype=tf.float32)
    # 取出output最后一个时刻句子的embedding结果作为后续输入
    out = tf.concat(outputs, axis=2)
    res = out[:, -1, :]
    return res
Example #3
0
 def biLSTM_layer(self, model_inputs, lstm_dim, lengths, name=None):
     """
     :param model_inputs: [batch_size, num_steps, emb_size]
     :param lengths:
     :param name:
     :return: [batch_size, num_step, 2*lstm_dim]
     """
     with tf.variable_scope("char_BiLSTM" if not name else name):
         lstm_cell = {}
         for direction in ["forward", "backward"]:
             with tf.variable_scope(direction):
                 lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                     lstm_dim,
                     use_peepholes=True,
                     initializer=self.initializer,
                     state_is_tuple=True
                 )
         outputs, final_statues = tf.nn.bidirectional_dynamic_rnn(
             lstm_cell["forward"],
             lstm_cell["backward"],
             model_inputs,
             dtype=tf.float32,
             sequence_length=lengths
         )
         return tf.concat(outputs, axis=2)
    def biLSTM_layer(self, lstm_inputs, lstm_dim, lengths, name=None):
        with tf.variable_scope('char_lstm' if not name else name):
            lstm_cell = {}
            for direction in ['forward', 'backward']:
                with tf.variable_scope(direction):
                    lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                        lstm_dim,
                        use_peepholes=True,
                        initializer=self.initializer,
                        state_is_tuple=True)
            outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
                lstm_cell['forward'],
                lstm_cell('backward'),
                lstm_inputs,
                dtype=tf.float32,
                sequence_length=lengths
            )

            return  tf.concat(outputs, axis=2)
 def bilstm_layer(self, lstm_inputs):
     """
     BiLSTM层
     :param lstm_inputs: [batch_size, num_steps, emb_size] 
     :return: [batch_size, num_steps, 2*lstm_dim] 
     """
     with tf.variable_scope("char_BiLSTM"):
         lstm_cell = {}
         for direction in ["forward", "backward"]:
             with tf.variable_scope(direction):
                 lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                     self.lstm_dim,   # 每个LSTM cell内部的神经元数量(即隐层参数维度)
                     use_peepholes=True, initializer=self.initializer, state_is_tuple=True)
         outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
             lstm_cell["forward"],   # 前向传播cell
             lstm_cell["backward"],  # 后向传播cell
             lstm_inputs,
             dtype=tf.float32,
             sequence_length=self.lengths)
     return tf.concat(outputs, axis=2)
Example #6
0
 def biLSTMLayer(self, lstmInputs, lstmDim, lengths, name=None):
     """
     :param lstmInputs: [batchSize, numSteps, emb_size] 
     :return: [batchSize, numSteps, 2*lstmDim] 
     """
     with tf.variable_scope("char_BiLSTM" if not name else name):
         lstmCell = {}
         for direction in ["forward", "backward"]:
             with tf.variable_scope(direction):
                 lstmCell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                     lstmDim,
                     usePeepholes=True,
                     initializer=self.initializer,
                     stateIsTuple=True)
         outputs, finalStates = tf.nn.bidirectional_dynamic_rnn(
             lstmCell["forward"],
             lstmCell["backward"],
             lstmInputs,
             dtype=tf.float32,
             sequence_length=lengths)
     return tf.concat(outputs, axis=2)
Example #7
0
 def biLSTM_layer(self, lstm_inputs, lstm_dim, lengths, name=None):
     """
     :param lstm_inputs: [batch_size, num_steps, emb_size]   num_steps = MaxLenSentence
     :return: [batch_size, num_steps, 2*lstm_dim] 
     """
     with tf.variable_scope("char_BiLSTM" if not name else name):
         lstm_cell = {}  # 定义LSTM单元
         for direction in ["forward", "backward"]:
             with tf.variable_scope(direction):
                 lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                     lstm_dim,  # 输出结果维度
                     use_peepholes=True,
                     initializer=self.initializer,
                     state_is_tuple=True)  # 若为真则输出和状态分2列输出,若为假则输出和状态在一列中输出
         outputs, final_states = tf.nn.bidirectional_dynamic_rnn(  # 定义Bi-LSTM网络
             lstm_cell["forward"],
             lstm_cell["backward"],
             lstm_inputs,
             dtype=tf.float32,
             sequence_length=lengths)  # sequence_length为BatchSize
     return tf.concat(outputs, axis=2)
Example #8
0
 def outadaptation_layer(self, lstm_inputs, lstm_dim, lengths, name=None):
     """
     :param lstm_inputs: [batch_size, num_steps, emb_size]
     :return: [batch_size, num_steps, 2*lstm_dim]
     """
     with tf.variable_scope("outadaptation" if not name else name) as scope:
         lstm_cell = {}
         for direction in ["forward", "backward"]:
             with tf.variable_scope(direction, reuse=True):
                 lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                     lstm_dim,
                     use_peepholes=True,
                     initializer=self.initializer,
                     state_is_tuple=True)
         outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
             lstm_cell["forward"],
             lstm_cell["backward"],
             lstm_inputs,
             dtype=tf.float32,
             sequence_length=lengths)
         hidden_lstm = tf.concat(outputs, axis=2)
     return hidden_lstm
Example #9
0
def biLSTM_layer_2(inputs, lstm_dim, lengths, name=None):
    """
    :param lstm_inputs: [batch_size, num_steps, emb_size]
    :return: [batch_size, num_steps, 2*lstm_dim]
    """
    initializer = initializers.xavier_initializer()
    # sess = tf.Session()
    # sess.run(tf.global_variables_initializer())
    with tf.variable_scope("BiLSTM" if not name else name):
        lstm_cell = {}
        for direction in ["forward", "backward"]:
            with tf.variable_scope(direction):
                lstm_cell[direction] = rnn.CoupledInputForgetGateLSTMCell(
                    lstm_dim,
                    use_peepholes=True,
                    initializer=initializer,
                    state_is_tuple=True)
        outputs, final_states = tf.nn.bidirectional_dynamic_rnn(
            lstm_cell["forward"],
            lstm_cell["backward"],
            inputs,
            dtype=tf.float32,
            sequence_length=lengths)
    return tf.concat(outputs, axis=2)