def _wordvec_embedding(self, inputs): wordvec = load_pretrained_vec() embedding = tf.get_variable('embedding', [wordvec.shape[0], wordvec.shape[1]], initializer=tf.constant_initializer( wordvec, tf.float32)) output = tf.nn.embedding_lookup(embedding, inputs) return output
def __init__(self, **kwargs): super(Embedding_Layer, self).__init__(**kwargs) wordvec = load_pretrained_vec() self.word_embedding = tf.keras.layers.Embedding( wordvec.shape[0], wordvec.shape[1], tf.constant_initializer(wordvec, tf.float32), name='word_embedding')
def _build_layers(self): self.p_embedding = tf.keras.layers.Embedding( Config.model.pos_num, Config.model.pos_embedding_size, name='pos_embedding') self.rel_embedding = tf.keras.layers.Embedding( Config.model.dep_num * 2, Config.model.comp_action_embedding_size, name='rel_embedding') self.history_a_embedding = tf.keras.layers.Embedding( 3 + Config.model.dep_num * 4, Config.model.history_action_embedding_size, name='action_embedding') wordvec = load_pretrained_vec() self.w_embedding = tf.keras.layers.Embedding(wordvec.shape[0], wordvec.shape[1], tf.constant_initializer( wordvec, tf.float32), name='word_embedding') self.embedding_dense = tf.keras.layers.Dense( Config.model.embedding_fc_unit, tf.nn.relu, name='embedding_fc') self.recurse_dense = tf.keras.layers.Dense( Config.model.embedding_fc_unit, tf.nn.tanh, name='recurse_fc') lstm_cells = [ tf.nn.rnn_cell.LSTMCell(Config.model.lstm_unit, initializer=tf.orthogonal_initializer()) for _ in range(Config.model.lstm_layer_num) ] self.stack_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells) lstm_cells = [ tf.nn.rnn_cell.LSTMCell(Config.model.lstm_unit, initializer=tf.orthogonal_initializer()) for _ in range(Config.model.lstm_layer_num) ] self.buff_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells) lstm_cells = [ tf.nn.rnn_cell.LSTMCell(Config.model.lstm_unit, initializer=tf.orthogonal_initializer()) for _ in range(Config.model.lstm_layer_num) ] self.action_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells) self.final_dense = tf.keras.layers.Dense(2 + 2 * Config.model.dep_num, name='softmax_fc')
def __init__(self, **kwargs): super(Embedding_Layer, self).__init__(**kwargs) self.dropout = tf.keras.layers.Dropout( 1 - tf.pow(Config.train.dropout_decay, Config.train.epoch)) wordvec = load_pretrained_vec() self.word_embedding = tf.keras.layers.Embedding( wordvec.shape[0], wordvec.shape[1], tf.constant_initializer(wordvec, tf.float32), name='word_embedding') self.pos_embedding = tf.keras.layers.Embedding( Config.model.pos_num, Config.model.pos_embedding_size, name='pos_embedding') self.conv3d_0 = tf.keras.layers.Conv3D(16, [1, 3, 3], padding='SAME', activation=tf.nn.relu, name='conv3d_0') self.conv3d_1 = tf.keras.layers.Conv3D(16, [1, 3, 3], padding='SAME', activation=tf.nn.relu, name='conv3d_1') self.maxpool3d_0 = tf.keras.layers.MaxPool3D([1, 1, 2], [1, 1, 2], 'SAME') # pool width only self.maxpool3d_1 = tf.keras.layers.MaxPool3D([1, 2, 2], [1, 2, 2], 'SAME') self.conv3d_2 = tf.keras.layers.Conv3D(32, [1, 3, 3], padding='SAME', activation=tf.nn.relu, name='conv3d_2') self.conv3d_3 = tf.keras.layers.Conv3D(32, [1, 3, 3], padding='SAME', activation=tf.nn.relu, name='conv3d_3') self.conv3d_4 = tf.keras.layers.Conv3D(64, [1, 3, 3], padding='SAME', activation=tf.nn.relu, name='conv3d_4') self.conv3d_5 = tf.keras.layers.Conv3D(64, [1, 3, 3], padding='SAME', activation=tf.nn.relu, name='conv3d_5') self.cnn_dense = tf.keras.layers.Dense(Config.model.cnn_dense_units, tf.nn.relu, name='cnn_dense')
def _embedding(self, word_id, pos_id): wordvec = load_pretrained_vec() embedding = tf.get_variable('word_embedding', [wordvec.shape[0], wordvec.shape[1]], initializer=tf.constant_initializer( wordvec, tf.float32)) word_embedding = tf.nn.embedding_lookup(embedding, word_id) embedding = tf.get_variable( 'pos_embedding', [Config.model.pos_num, Config.model.pos_embedding_size]) pos_embedding = tf.nn.embedding_lookup(embedding, pos_id) embedding = tf.concat([word_embedding, pos_embedding], -1) outputs = slim.dropout(embedding, Config.model.embedding_keep_prob, is_training=self.is_training) return outputs
def _build_layers(self): self.p_embedding = tf.keras.layers.Embedding( Config.model.pos_num, Config.model.pos_embedding_size, name='pos_embedding') self.history_a_embedding = tf.keras.layers.Embedding( 4 + Config.model.dep_num * 4, Config.model.history_action_embedding_size, name='action_embedding') wordvec = load_pretrained_vec() self.w_embedding = tf.keras.layers.Embedding(wordvec.shape[0], wordvec.shape[1], tf.constant_initializer( wordvec, tf.float32), name='word_embedding') self.learned_word_dense = tf.keras.layers.Dense( Config.model.embedding_fc_unit, tf.nn.relu, name='learned_word_fc') self.tree_lstm_cell = TreeLSTMCell(Config.model.lstm_unit) lstm_cells = [ tf.nn.rnn_cell.LSTMCell(Config.model.lstm_unit, initializer=tf.orthogonal_initializer()) for _ in range(2) ] self.stack_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells) self.fw_lstm_cell0 = tf.nn.rnn_cell.LSTMCell( Config.model.lstm_unit, initializer=tf.orthogonal_initializer(), name='fw_cell0') self.bw_lstm_cell0 = tf.nn.rnn_cell.LSTMCell( Config.model.lstm_unit, initializer=tf.orthogonal_initializer(), name='bw_cell0') self.fw_lstm_cell1 = tf.nn.rnn_cell.LSTMCell( Config.model.lstm_unit, initializer=tf.orthogonal_initializer(), name='fw_cell1') self.bw_lstm_cell1 = tf.nn.rnn_cell.LSTMCell( Config.model.lstm_unit, initializer=tf.orthogonal_initializer(), name='bw_cell1') lstm_cells = [ tf.nn.rnn_cell.LSTMCell(Config.model.lstm_unit, initializer=tf.orthogonal_initializer()) for _ in range(2) ] self.action_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells) lstm_cells = [ tf.nn.rnn_cell.LSTMCell(Config.model.lstm_unit, initializer=tf.orthogonal_initializer()) for _ in range(2) ] self.deque_lstm = tf.nn.rnn_cell.MultiRNNCell(lstm_cells) self.final_dense = tf.keras.layers.Dense(4 + 4 * Config.model.dep_num, tf.nn.relu, name='softmax_fc')