def _valid_step(model: tf.keras.Model, dataset: tf.data.Dataset, progress_bar: ProgressBar,
                loss_metric: tf.keras.metrics.Mean, max_train_steps: Any = -1) -> Dict:
    """ 验证步

    :param model: 验证模型
    :param dataset: 验证数据集
    :param progress_bar: 进度管理器
    :param loss_metric: 损失计算器
    :param max_train_steps: 验证步数
    :return: 验证指标
    """
    print("验证轮次")
    start_time = time.time()
    loss_metric.reset_states()
    result, targets = tf.convert_to_tensor([], dtype=tf.float32), tf.convert_to_tensor([], dtype=tf.int32)

    for (batch, (first_queries, second_queries, labels)) in enumerate(dataset.take(max_train_steps)):
        outputs = model(inputs=[first_queries, second_queries])

        loss = tf.keras.losses.SparseCategoricalCrossentropy()(labels, outputs)
        loss_metric(loss)

        result = tf.concat([result, outputs[:, 1]], axis=0)
        targets = tf.concat([targets, labels], axis=0)

        progress_bar(current=batch + 1, metrics=get_dict_string(data={"valid_loss": loss_metric.result()}))

    auc_score = roc_auc_score(y_true=targets, y_score=result)
    progress_bar(current=progress_bar.total, metrics=get_dict_string(
        data={"valid_loss": loss_metric.result(), "valid_auc": auc_score}
    ))

    progress_bar.done(step_time=time.time() - start_time)

    return {"valid_loss": loss_metric.result(), "valid_auc": auc_score}
Ejemplo n.º 2
0
 def _validate_step(self,
                    inputs: tf.Tensor,
                    labels: tf.Tensor,
                    masks: tf.Tensor,
                    loss_tensor: tf.keras.metrics.Mean,
                    training: bool = False) -> float:
     predictions = self.call(inputs, training=training)
     loss = self.loss_fn(predictions, labels, masks)
     loss_tensor.update_state(loss)
     return loss
Ejemplo n.º 3
0
def __validation_step(X: tf.Tensor, y: tf.Tensor, model: tf.keras.Model,
                      loss_fn: tf.keras.losses.Loss,
                      loss: tf.keras.metrics.Mean,
                      metrics: List[tf.keras.metrics.Metric]) -> None:
    logits = model(X, training=False)
    loss_value = loss_fn(y, logits)

    loss.update_state(loss_value)

    for metric in metrics:
        metric.update_state(y, logits)
Ejemplo n.º 4
0
def _valid_step(model: tf.keras.Model,
                dataset: tf.data.Dataset,
                progress_bar: ProgressBar,
                batch_size: Any,
                loss_metric: tf.keras.metrics.Mean,
                max_train_steps: Any = -1) -> Dict:
    """ 验证步

    :param model: 验证模型
    :param dataset: 验证数据集
    :param progress_bar: 进度管理器
    :param batch_size: batch大小
    :param loss_metric: 损失计算器
    :param max_train_steps: 验证步数
    :return: 验证指标
    """
    print("验证轮次")
    start_time = time.time()
    loss_metric.reset_states()
    result, targets = tf.convert_to_tensor(
        [], dtype=tf.float32), tf.convert_to_tensor([], dtype=tf.int32)

    for (batch, (queries, _, true_outputs,
                 labels)) in enumerate(dataset.take(max_train_steps)):
        outputs = model(inputs=queries)
        loss = tf.keras.losses.SparseCategoricalCrossentropy(
            reduction=tf.keras.losses.Reduction.NONE)(true_outputs, outputs)
        mask = tf.cast(x=tf.math.not_equal(true_outputs, 0), dtype=tf.float32)
        batch_loss = tf.reduce_sum(mask * loss) / batch_size

        loss_metric(batch_loss)

        result = tf.concat(
            [result,
             tf.nn.softmax(logits=outputs[:, 0, 5:7], axis=-1)[:, 1]],
            axis=0)
        targets = tf.concat([targets, labels], axis=0)

        progress_bar(
            current=batch + 1,
            metrics=get_dict_string(data={"valid_loss": loss_metric.result()}))

    auc_score = roc_auc_score(y_true=targets, y_score=result)
    progress_bar(current=progress_bar.total,
                 metrics=get_dict_string(data={
                     "valid_loss": loss_metric.result(),
                     "valid_auc": auc_score
                 }))

    progress_bar.done(step_time=time.time() - start_time)

    return {"valid_loss": loss_metric.result(), "valid_auc": auc_score}
Ejemplo n.º 5
0
def _train_step(model: tf.keras.Model, optimizer: tf.keras.optimizers.Adam,
                loss_metric: tf.keras.metrics.Mean, train_enc: Any,
                train_dec: Any, month_enc: Any, month_dec: Any,
                labels: Any) -> Tuple:
    """ 单个训练步

    :param model: 训练模型
    :param optimizer: 优化器
    :param loss_metric: 损失计算器
    :param train_enc: enc输入
    :param train_dec: dec输入
    :param month_enc: enc月份输入
    :param month_dec: dec月份输入
    :param labels: 标签
    :return: 训练指标
    """
    with tf.GradientTape() as tape:
        train_enc = tf.squeeze(train_enc, axis=0)
        train_dec = tf.squeeze(train_dec, axis=0)
        outputs = model(inputs=[train_enc, train_dec, month_enc, month_dec])
        treat_outputs = tf.squeeze(input=outputs[:, -24:, :], axis=-1)
        loss = tf.keras.losses.mean_squared_error(labels, treat_outputs)

        # acikill_metrics = eval_score(preds=outputs, label=labels)
    loss_metric(loss)
    variables = model.trainable_variables
    gradients = tape.gradient(target=loss, sources=variables)
    optimizer.apply_gradients(zip(gradients, variables))

    return {"train_loss": loss_metric.result()}, treat_outputs
Ejemplo n.º 6
0
def __train_step(X: tf.Tensor, y: tf.Tensor, model: tf.keras.Model,
                 optimizer: tf.keras.optimizers.Optimizer,
                 loss_fn: tf.keras.losses.Loss, loss: tf.keras.metrics.Mean,
                 metrics: List[tf.keras.metrics.Metric]) -> None:
    with tf.GradientTape() as tape:
        logits = model(X, training=True)
        loss_value = loss_fn(y, logits)

    optimizer.apply_gradients(
        zip(tape.gradient(loss_value, model.trainable_variables),
            model.trainable_variables))

    loss.update_state(loss_value)

    for metric in metrics:
        metric.update_state(y, logits)
Ejemplo n.º 7
0
def _train_step(model: tf.keras.Model, optimizer: tf.keras.optimizers.Adam,
                batch_size: Any, loss_metric: tf.keras.metrics.Mean,
                queries: Any, targets: Any) -> Tuple:
    """ 单个训练步

    :param model: 训练模型
    :param optimizer: 优化器
    :param batch_size: batch大小
    :param loss_metric: 损失计算器
    :param queries: 第一个查询句子
    :param targets: 第二个查询句子
    :return: 训练指标
    """
    with tf.GradientTape() as tape:
        outputs = model(inputs=queries)
        loss = tf.keras.losses.SparseCategoricalCrossentropy(
            reduction=tf.keras.losses.Reduction.NONE)(targets, outputs)
        mask = tf.cast(x=tf.math.not_equal(targets, 0), dtype=tf.float32)
        batch_loss = tf.reduce_sum(mask * loss) / batch_size
    loss_metric(batch_loss)
    variables = model.trainable_variables
    gradients = tape.gradient(target=batch_loss, sources=variables)
    optimizer.apply_gradients(zip(gradients, variables))

    return {
        "train_loss": loss_metric.result()
    }, tf.nn.softmax(logits=outputs[:, 0, 5:7], axis=-1)
Ejemplo n.º 8
0
def _valid_step(model: tf.keras.Model,
                dataset: tf.data.Dataset,
                progress_bar: ProgressBar,
                loss_metric: tf.keras.metrics.Mean,
                max_train_steps: Any = -1) -> Dict:
    """ 验证步

    :param model: 验证模型
    :param dataset: 验证数据集
    :param progress_bar: 进度管理器
    :param loss_metric: 损失计算器
    :param max_train_steps: 验证步数
    :return: 验证指标
    """
    print("验证轮次")
    start_time = time.time()
    loss_metric.reset_states()

    for (batch, (train_enc, train_dec, month_enc, month_dec,
                 labels)) in enumerate(dataset.take(max_train_steps)):
        train_enc = tf.squeeze(train_enc, axis=0)
        train_dec = tf.squeeze(train_dec, axis=0)
        outputs = model(inputs=[train_enc, train_dec, month_enc, month_dec])
        treat_outputs = tf.squeeze(input=outputs[:, -24:, :], axis=-1)
        loss = tf.keras.losses.MSE(labels, treat_outputs)

        loss_metric(loss)

        progress_bar(
            current=batch + 1,
            metrics=get_dict_string(data={"valid_loss": loss_metric.result()}))

    progress_bar(
        current=progress_bar.total,
        metrics=get_dict_string(data={"valid_loss": loss_metric.result()}))

    progress_bar.done(step_time=time.time() - start_time)

    return {"valid_loss": loss_metric.result()}
Ejemplo n.º 9
0
def _train_step(model: tf.keras.Model, optimizer: Any, segments: Any,
                loss_metric: tf.keras.metrics.Mean, queries: Any, targets: Any) -> Any:
    """ 单个训练步

    :param model: 训练模型
    :param optimizer: 优化器
    :param loss_metric: 损失计算器
    :param queries: 第一个查询句子
    :param targets: 第二个查询句子
    :return: 训练指标
    """
    with tf.GradientTape() as tape:
        outputs = model(inputs=[queries, segments])
        loss = loss_function(pred=outputs, targets=targets)
    loss_metric(loss)
    variables = model.trainable_variables
    gradients = tape.gradient(target=loss, sources=variables)
    optimizer.apply_gradients(zip(gradients, variables))

    return {"train_loss": loss_metric.result()}
Ejemplo n.º 10
0
def _train_step(model: tf.keras.Model, optimizer: tf.keras.optimizers.Adam,
                loss_metric: tf.keras.metrics.Mean, first_queries: Any, second_queries: Any, labels: Any) -> Tuple:
    """ 单个训练步

    :param model: 训练模型
    :param optimizer: 优化器
    :param loss_metric: 损失计算器
    :param first_queries: 第一个查询句子
    :param second_queries: 第二个查询句子
    :param labels: 标签
    :return: 训练指标
    """
    with tf.GradientTape() as tape:
        outputs = model(inputs=[first_queries, second_queries])
        loss = tf.keras.losses.SparseCategoricalCrossentropy()(labels, outputs)
    loss_metric(loss)
    variables = model.trainable_variables
    gradients = tape.gradient(target=loss, sources=variables)
    optimizer.apply_gradients(zip(gradients, variables))

    return {"train_loss": loss_metric.result()}, outputs