def build_graph(self): graph = Graph(self.mode) logits = graph.build(self.features) transition_params = tf.get_variable("transitions", [Config.model.fc_unit, Config.model.fc_unit]) viterbi_sequence, _ = tf.contrib.crf.crf_decode(logits, transition_params, self.features['length']) self.predictions = viterbi_sequence tf.identity(self.predictions, 'prediction') if self.mode != tf.estimator.ModeKeys.PREDICT: self._build_loss(logits, transition_params) if self.mode == tf.estimator.ModeKeys.TRAIN: self._build_train_op() else: seg_precision = tf.placeholder(tf.float32, None, 'seg_p_ph') seg_recall = tf.placeholder(tf.float32, None, 'seg_r_ph') seg_f1_measure = tf.placeholder(tf.float32, None, 'seg_f1_ph') tf.summary.scalar('seg_precision', seg_precision, ['prf'], 'seg_score') tf.summary.scalar('seg_recall', seg_recall, ['prf'], 'seg_score') tf.summary.scalar('seg_f1_measure', seg_f1_measure, ['prf'], 'seg_score') tag_precision = tf.placeholder(tf.float32, None, 'tag_p_ph') tag_recall = tf.placeholder(tf.float32, None, 'tag_r_ph') tag_f1_measure = tf.placeholder(tf.float32, None, 'tag_f1_ph') tf.summary.scalar('tag_precison', tag_precision, ['prf'], 'tag_score') tf.summary.scalar('tag_recall', tag_recall, ['prf'], 'tag_score') tf.summary.scalar('tag_f1_measure', tag_f1_measure, ['prf'], 'tag_score')
def build_graph(self): graph = Graph(self.mode) logits = graph.build(self.inputs) self.predictions = tf.argmax(logits, 1) if self.mode != tf.estimator.ModeKeys.PREDICT: self._build_loss(logits) self._build_train_op() self._build_metric()
def build_graph(self): graph = Graph(self.mode) logits, locs, softmax_logits = graph.build(self.inputs) softmax_logits_dict = OrderedDict({ f'softmax_feat{n+1}': softmax_logits[n] for n in range(len(softmax_logits)) }) locs_dict = OrderedDict( {f'locs_feat{n+1}': locs[n] for n in range(len(locs))}) softmax_logits_dict.update(locs_dict) self.predictions = softmax_logits_dict if self.mode != tf.estimator.ModeKeys.PREDICT: self._build_loss(logits, locs) self._build_optimizer()
def build_graph(self): graph = Graph(self.mode) outputs = graph.build(self.inputs) softmax_w = tf.get_variable('w', [Config.model.vocab_num, Config.model.embedding_size], tf.float32, slim.xavier_initializer()) softmax_b = tf.get_variable('b', [Config.model.vocab_num], tf.float32, tf.constant_initializer(0.0)) if self.mode == tf.estimator.ModeKeys.TRAIN: self._build_loss(outputs,softmax_w,softmax_b) self._build_train_op() else: for_logits = tf.tensordot(outputs[0], tf.transpose(softmax_w),[[2],[0]]) for_logits = tf.nn.bias_add(for_logits, softmax_b) for_loss = tf.losses.sparse_softmax_cross_entropy(self.targets['for_labels'],for_logits) back_logits = tf.tensordot(outputs[1], tf.transpose(softmax_w),[[2],[0]]) back_logits = tf.nn.bias_add(back_logits, softmax_b) back_loss = tf.losses.sparse_softmax_cross_entropy(self.targets['back_labels'],back_logits) self.loss = 0.5 * (for_loss + back_loss)
def build_graph(self): graph = Graph(self.mode) arc_logits, label_logits = graph.build(self.inputs) tf.identity(arc_logits, 'arc_logits') tf.identity(label_logits, 'label_logits') self.predictions = { 'arc_logits': arc_logits, 'label_logits': label_logits } if self.mode != tf.estimator.ModeKeys.PREDICT: self._build_loss(arc_logits, label_logits) if self.mode == tf.estimator.ModeKeys.TRAIN: self._build_train_op() else: arc_acc = tf.placeholder(tf.float32, None, 'arc_ph') label_acc = tf.placeholder(tf.float32, None, 'label_ph') tf.summary.scalar('UAS', arc_acc, ['acc'], 'score') tf.summary.scalar('LAS', label_acc, ['acc'], 'score')