def _sg_losses(self, ops={}, suffix=''): ops = self._frc_losses(ops, suffix) rel_score = self.get_output('rel_score'+suffix) cls_score = self.get_output('cls_score' + suffix) ops['cls_score'] = cls_score ops['rel_score'] = rel_score ops['cls_pred'] = self.get_output('cls_pred'+suffix) ops['predicates'] = self.data['predicates'] ops['rel_rois'] = self.data['rel_rois'] ops['relations'] = self.data['relations'] relations = self.data['relations'] ops['labels'] = self.data['labels'] ops['Xr'] = self.Xr cls_pred = self.get_output('cls_pred'+suffix) sbjs = tf.gather(cls_pred, relations[:, 0]) sbjs = tf.expand_dims(sbjs,1) objs = tf.gather(cls_pred,relations[:,1]) objs = tf.expand_dims(objs,1) ops['indices'] = tf.cast(tf.concat(concat_dim=1, values=[sbjs,objs]),tf.int32) ops['Xr'] = tf.gather_nd(params=self.Xr, indices=tf.concat(concat_dim=1, values=ops['indices'])) # point-wise multiplication ops['Xr'] = tf.nn.softmax(ops['Xr']) # apply softmax res = 1 - self.mask # rel_score = tf.multiply(rel_score, ops['Xr']) rel_score = rel_score * (res + (self.mask*ops['Xr'])) ops['loss_rel'+suffix] = losses.sparse_softmax(rel_score, self.data['predicates'], name='rel_loss'+suffix, ignore_bg=True) return ops
def _sg_losses(self, ops={}, suffix=''): ops = self._frc_losses(ops, suffix) rel_score = self.get_output('rel_score' + suffix) ops['loss_rel' + suffix] = losses.sparse_softmax( rel_score, self.data['predicates'], name='rel_loss' + suffix, ignore_bg=True) return ops
def _frc_losses(self, ops={}, suffix=''): # classification loss cls_score = self.get_output('cls_score'+suffix) # ops['cls_score' + "_0"] = cls_score # ops['y_outs'] ops['loss_cls'+suffix] = losses.sparse_softmax(cls_score, self.data['labels'], name='cls_loss'+suffix) # bounding box regression L1 loss if cfg.TRAIN.BBOX_REG: bbox_pred = self.get_output('bbox_pred'+suffix) ops['loss_box'+suffix] = losses.l1_loss(bbox_pred, self.data['bbox_targets'], 'reg_loss'+suffix, self.data['bbox_inside_weights']) else: print('NO BBOX REGRESSION!!!!!') return ops
with sess.as_default(): if FLAGS.random_seed is not None: tf.set_random_seed(FLAGS.random_seed) # Build execution graph network = BiDirectional_LSTM(sess, init, next_batch_x, features_size, FLAGS.attention, FLAGS.attention_size) eval_predictions, endings, train_logits = network.build_model() # Compare with next_batch_endings_y if FLAGS.loss_function == "SIGMOID": loss = losses.sigmoid(next_batch_endings_y, train_logits) elif FLAGS.loss_function == "SOFTMAX": loss = losses.sparse_softmax(next_batch_endings_y, endings) else: raise RuntimeError( f"Loss function {FLAGS.loss_function} not supported.") accuracy = tf.reduce_mean( tf.cast(tf.equal(eval_predictions, next_batch_endings_y), dtype=tf.float32)) """Initialize iterators""" train_handle = sess.run(train_iterator.string_handle()) test_handle = sess.run(test_iterator.string_handle()) if FLAGS.use_train_set: sess.run(train_iterator.initializer, feed_dict={} if FLAGS.use_skip_thoughts else {input_x: sentences})