Пример #1
0
def get_regression_loss(args, xlnet_config, features, is_training=False):
    """Loss for downstream regression tasks."""

    inp = fluid.layers.transpose(features["input_ids"], [1, 0, 2])
    seg_id = features["segment_ids"]
    inp_mask = fluid.layers.transpose(features["input_mask"], [1, 0])
    label = features["label_ids"]

    xlnet_model = XLNetModel(input_ids=inp,
                             seg_ids=seg_id,
                             input_mask=inp_mask,
                             xlnet_config=xlnet_config,
                             args=args)

    summary = xlnet_model.get_pooled_out(args.summary_type, args.use_summ_proj)
    per_example_loss, logits = modeling.regression_loss(
        hidden=summary,
        labels=label,
        initializer=_get_initializer(args),
        name="model_regression_{}".format(args.task_name.lower()),
        return_logits=True)

    total_loss = fluid.layers.reduce_mean(per_example_loss)

    return total_loss, per_example_loss, logits
Пример #2
0
def get_regression_loss(FLAGS, features, is_training):
    """Loss for downstream regression tasks."""

    bsz_per_core = tf.shape(features["input_ids"])[0]

    inp = tf.transpose(features["input_ids"], [1, 0])
    seg_id = tf.transpose(features["segment_ids"], [1, 0])
    inp_mask = tf.transpose(features["input_mask"], [1, 0])
    label = tf.reshape(features["label_ids"], [bsz_per_core])

    xlnet_config = xlnet.XLNetConfig(json_path=FLAGS.model_config_path)
    run_config = xlnet.create_run_config(is_training, True, FLAGS)

    xlnet_model = xlnet.XLNetModel(xlnet_config=xlnet_config,
                                   run_config=run_config,
                                   input_ids=inp,
                                   seg_ids=seg_id,
                                   input_mask=inp_mask)

    summary = xlnet_model.get_pooled_out(FLAGS.summary_type,
                                         FLAGS.use_summ_proj)

    with tf.variable_scope("model", reuse=tf.AUTO_REUSE):
        per_example_loss, logits = modeling.regression_loss(
            hidden=summary,
            labels=label,
            initializer=xlnet_model.get_initializer(),
            scope="regression_{}".format(FLAGS.task_name.lower()),
            return_logits=True)

        total_loss = tf.reduce_mean(per_example_loss)

        return total_loss, per_example_loss, logits
Пример #3
0
def get_regression_loss(FLAGS, features, n_class, is_training, mode):

    bsz_per_core = tf.shape(features["input_ids"])[0]
    inp = tf.transpose(features["input_ids"], [1, 0])
    seg_id = tf.transpose(features["segment_ids"], [1, 0])
    inp_mask = tf.transpose(features["input_mask"], [1, 0])
    label = tf.reshape(features["label_ids"], [FLAGS.train_batch_size, 31])
    xlnet_config = xlnet.XLNetConfig(json_path=FLAGS.model_config_path)
    run_config = xlnet.create_run_config(is_training, True, FLAGS)

    xlnet_model = xlnet.XLNetModel(xlnet_config=xlnet_config,
                                   run_config=run_config,
                                   input_ids=inp,
                                   seg_ids=seg_id,
                                   input_mask=inp_mask)

    summary = xlnet_model.get_pooled_out(FLAGS.summary_type,
                                         FLAGS.use_summ_proj)

    #     if mode == tf.estimator.ModeKeys.TRAIN or mode == tf.estimator.ModeKeys.EVAL:
    with tf.variable_scope("model", reuse=tf.AUTO_REUSE):
        per_example_loss, logits = modeling.regression_loss(
            mode=mode,
            hidden=summary,
            labels=label,
            n_class=n_class,
            initializer=xlnet_model.get_initializer(),
            scope="regression_{}".format(FLAGS.task_name.lower()),
            return_logits=True)

        total_loss = tf.reduce_mean(per_example_loss)
        return total_loss, per_example_loss, logits