def get_history_sum_embedded(self): # TODO: add mask info for this operation his_days =['one', 'two', 'three', 'four','five','six','seven','eight','nine', 'ten','eleven','twelve','thirteen','fourteen'] for fir in his_days: key = "history_" + fir + "_chap_ph" embed_key = "history_" + fir + "_chap_embedded" setattr(self, embed_key,get_mask_zero_embedded(self.chapters_embeddings_var, getattr(self, key))) for fir in his_days: key = "history_" + fir + "_sec_ph" embed_key = "history_" + fir + "_sec_embedded" setattr(self, embed_key,get_mask_zero_embedded(self.sections_embeddings_var, getattr(self, key))) chap = tf.reduce_mean(self.history_one_chap_embedded, axis=-2) #b*x*128 b*128 b*(128*14) sec = tf.reduce_mean(self.history_one_sec_embedded, axis=-2) for fir in his_days[:0:-1]: key_c = "history_" + fir + "_chap_embedded" chap = tf.concat([chap, tf.reduce_mean(getattr(self, key_c), axis=-2)], axis=-1) key_s = "history_" + fir + "_sec_embedded" sec = tf.concat([sec, tf.reduce_mean(getattr(self, key_s), axis=-2)], axis=-1) history_chap_emb = tf.reshape(chap, [-1, HIS_DAYS, EMBEDDING_DIM]) history_sec_emb = tf.reshape(sec, [-1, HIS_DAYS, EMBEDDING_DIM]) chap_mean = tf.reduce_mean(history_chap_emb, axis=-2) sec_mean = tf.reduce_mean(history_sec_emb, axis=-2) return chap_mean, sec_mean
def diagonal_lstm(inputs, scope='diagonal_lstm'): with tf.compat.v1.variable_scope(scope): skewed_inputs = skew(inputs, scope="skewed_i") input_to_state = conv2d(skewed_inputs, 64, [1, 1], mask_type="b", scope="i_to_s") column_wise_inputs = tf.transpose(input_to_state, perm=[0, 2, 1, 3]) batch, width, height, channel = column_wise_inputs.get_shape().as_list( ) rnn_inputs = tf.reshape(column_wise_inputs, [-1, width, height * channel]) cell = DiagonalLSTMCell(16, height, channel) outputs, states = tf.compat.v1.nn.dynamic_rnn(cell, inputs=rnn_inputs, dtype=tf.float32) width_first_outputs = tf.reshape(outputs, [-1, width, height, 16]) skewed_outputs = tf.transpose(width_first_outputs, perm=[0, 2, 1, 3]) outputs = unskew(skewed_outputs) return outputs
def __call__(self, i_to_s, state, scope="DiagonalBiLSTMCell"): c_prev = tf.slice(state, [0, 0], [-1, self._num_units]) h_prev = tf.slice(state, [0, self._num_units], [-1, self._num_units]) with tf.compat.v1.variable_scope(scope): conv1d_inputs = tf.reshape( h_prev, [-1, self._height, 1, self._hidden_dims], name='conv1d_inputs') conv_s_to_s = conv1d(conv1d_inputs, 4 * self._hidden_dims, 2, scope='s_to_s') s_to_s = tf.reshape(conv_s_to_s, [-1, self._height * self._hidden_dims * 4]) lstm_matrix = tf.sigmoid(s_to_s + i_to_s) i, g, f, o = tf.split(lstm_matrix, 4, 1) c = f * c_prev + i * g h = tf.multiply(o, tf.tanh(c), name='hid') new_state = tf.concat([c, h], 1) return h, new_state
def get_history_sum_embedded(self): # TODO: add mask info for this operation his_days =['one', 'two', 'three', 'four','five','six','seven','eight','nine', 'ten','eleven','twelve','thirteen','fourteen'] for fir in his_days: key_c = "history_" + fir + "_chap_ph" embed_key_c = "history_" + fir + "_chap_embedded" setattr(self, embed_key_c,get_mask_zero_embedded(self.chapters_embeddings_var, getattr(self, key_c))) key_s = "history_" + fir + "_sec_ph" embed_key_s = "history_" + fir + "_sec_embedded" setattr(self, embed_key_s,get_mask_zero_embedded(self.sections_embeddings_var, getattr(self, key_s))) key_st = "style_" + fir + "_ph" embed_key_st = "history_" + fir + "_sty_embedded" setattr(self, embed_key_st,tf.nn.embedding_lookup(self.style_embeddings_var, getattr(self, key_st))) chap = get_rnn_sum(self.history_one_chap_embedded,"rnncha") #b*x*128 b*128 b*(128*14) sec = get_rnn_sum(self.history_one_sec_embedded, "rnnsec") sty = self.history_one_sty_embedded for fir in his_days[:0:-1]: key_c = "history_" + fir + "_chap_embedded" chap = tf.concat([chap, get_rnn_sum(getattr(self, key_c), "rnncha")], axis=-1) key_s = "history_" + fir + "_sec_embedded" sec = tf.concat([sec, get_rnn_sum(getattr(self, key_s), "rnnsec")], axis=-1) key_st = "history_" + fir + "_sty_embedded" sty = tf.concat([sty, getattr(self, key_st)], axis=-1) history_chap_emb = tf.reshape(chap, [-1, HIS_DAYS, EMBEDDING_DIM]) history_sec_emb = tf.reshape(sec, [-1, HIS_DAYS, EMBEDDING_DIM]) history_sty_emb = tf.reshape(sty, [-1, HIS_DAYS, EMBEDDING_DIM]) chap_mean = tf.reduce_mean(history_chap_emb, axis=-2) sec_mean = tf.reduce_mean(history_sec_emb, axis=-2) sty_mean = tf.reduce_mean(history_sty_emb, axis=-2) #return chap_mean, sec_mean return tf.concat([chap_mean, sec_mean,sty_mean], axis=-1)
def skew(inputs, scope="skew"): with tf.compat.v1.name_scope(scope): batch, height, width, channel = inputs.get_shape().as_list() rows = tf.split(inputs, height, 1) new_width = width + height - 1 new_rows = [] for idx, row in enumerate(rows): transposed_row = tf.transpose(tf.squeeze(row, [1]), perm=[0, 2, 1]) squeezed_row = tf.reshape(transposed_row, [-1, width]) padded_row = tf.pad(squeezed_row, paddings=((0, 0), (idx, height - 1 - idx))) unsqueezed_row = tf.reshape(padded_row, [-1, channel, new_width]) untransposed_row = tf.transpose(unsqueezed_row, perm=[0, 2, 1]) new_rows.append(untransposed_row) outputs = tf.stack(new_rows, axis=1, name="output") return outputs