def minibatch_to_feed_dict(self, batch_data: Dict[str, Any], feed_dict: Dict[tf.Tensor, Any], is_train: bool) -> None: super().minibatch_to_feed_dict(batch_data, feed_dict, is_train) write_to_feed_dict(feed_dict, self.placeholders['tokens'], batch_data['tokens']) write_to_feed_dict(feed_dict, self.placeholders['tokens_mask'], batch_data['tokens_mask'])
def minibatch_to_feed_dict(self, batch_data: Dict[str, Any], feed_dict: Dict[tf.Tensor, Any], is_train: bool) -> None: super().minibatch_to_feed_dict(batch_data, feed_dict, is_train) feed_dict[self.placeholders['rnn_dropout_keep_rate']] = \ self.get_hyper('rnn_dropout_keep_rate') if is_train else 1.0 feed_dict[self.placeholders['rnn_recurrent_dropout_keep_rate']] = \ self.get_hyper('rnn_recurrent_dropout_keep_rate') if is_train else 1.0 write_to_feed_dict(feed_dict, self.placeholders['tokens'], batch_data['tokens']) write_to_feed_dict(feed_dict, self.placeholders['tokens_lengths'], batch_data['tokens_lengths'])
def minibatch_to_feed_dict(self, batch_data: Dict[str, Any], feed_dict: Dict[tf.Tensor, Any], is_train: bool) -> None: super().minibatch_to_feed_dict(batch_data, feed_dict, is_train) node_masks = batch_data['node_masks'] node_token_ids = batch_data['tokens'] if node_masks: max_tokens = max([len(x) for x in node_masks]) node_masks = [n + [0] * (max_tokens - len(n)) for n in node_masks] node_token_ids = [n + [-1] * (max_tokens - len(n)) for n in node_token_ids] tfutils.write_to_feed_dict(feed_dict, self.placeholders['node_masks'], node_masks) tfutils.write_to_feed_dict(feed_dict, self.placeholders['tokens'], node_token_ids)
def minibatch_to_feed_dict(self, batch_data: Dict[str, Any], feed_dict: Dict[tf.Tensor, Any], is_train: bool) -> None: super().minibatch_to_feed_dict(batch_data, feed_dict, is_train) node_type_ids = batch_data['node_type_ids'] children = batch_data['children'] node_type_ids, children = tbcnn_network.pad_batch( node_type_ids, children) tfutils.write_to_feed_dict(feed_dict, self.placeholders['node_type_ids'], node_type_ids) tfutils.write_to_feed_dict(feed_dict, self.placeholders['children'], children)
def minibatch_to_feed_dict(self, batch_data: Dict[str, Any], feed_dict: Dict[tf.Tensor, Any], is_train: bool) -> None: super().minibatch_to_feed_dict(batch_data, feed_dict, is_train) if self.get_hyper('use_token_embeddings'): write_to_feed_dict(feed_dict, self.placeholders['token_embeddings'], batch_data['token_embeddings']) else: write_to_feed_dict(feed_dict, self.placeholders['tokens'], batch_data['tokens']) write_to_feed_dict(feed_dict, self.placeholders['tokens_mask'], batch_data['tokens_mask'])
def minibatch_to_feed_dict(self, batch_data: Dict[str, Any], feed_dict: Dict[tf.Tensor, Any], is_train: bool) -> None: super().minibatch_to_feed_dict(batch_data, feed_dict, is_train) node_masks = batch_data['node_masks'] node_token_ids = batch_data['node_token_ids'] edges = batch_data['edges'] # seq_masks = batch_data['seq_masks'] # seq_token_ids = batch_data['seq_token_ids'] if node_masks: # pad batches so that every batch has the same number of nodes max_tokens = max([len(x) for x in node_masks]) node_masks = [list(n) + [0] * (max_tokens - len(n)) for n in node_masks] node_token_ids = [list(n) + [-1] * (max_tokens - len(n)) for n in node_token_ids] edges = [ [edge_type, batch_id, v, u] for batch_id, edge_pack in enumerate(edges) for edge_type, v, u in edge_pack if v < max_tokens and u < max_tokens ] tfutils.write_to_feed_dict(feed_dict, self.placeholders['node_masks'], node_masks) tfutils.write_to_feed_dict(feed_dict, self.placeholders['node_token_ids'], node_token_ids) tfutils.write_to_feed_dict(feed_dict, self.placeholders['edges'], edges)