def prepare_trainer(net,
                    optimizer_name,
                    momentum,
                    lr,
                    num_gpus,
                    state_file_path=None):

    optimizer_name = optimizer_name.lower()
    if (optimizer_name == "sgd") or (optimizer_name == "nag"):
        optimizer = keras.optimizers.SGD(lr=lr,
                                         momentum=momentum,
                                         nesterov=(optimizer_name == "nag"))
    else:
        raise ValueError("Usupported optimizer: {}".format(optimizer_name))

    backend_agnostic_compile(model=net,
                             loss="categorical_crossentropy",
                             optimizer=optimizer,
                             metrics=[
                                 keras.metrics.categorical_accuracy,
                                 keras.metrics.top_k_categorical_accuracy
                             ],
                             num_gpus=num_gpus)

    if (state_file_path is not None
        ) and state_file_path and os.path.exists(state_file_path):
        net = load_model(filepath=state_file_path)
    return net
Esempio n. 2
0
def test(net,
         val_gen,
         val_size,
         batch_size,
         num_gpus,
         calc_weight_count=False,
         extended_log=False):
    """
    Main test routine.

    Parameters:
    ----------
    net : Model
        Model.
    val_gen : generator
        Data loader.
    val_size : int
        Size of validation subset.
    batch_size : int
        Batch size.
    num_gpus : int
        Number of used GPUs.
    calc_weight_count : bool, default False
        Whether to calculate count of weights.
    extended_log : bool, default False
        Whether to log more precise accuracy values.
    """
    keras.backend.set_learning_phase(0)

    backend_agnostic_compile(
        model=net,
        loss="categorical_crossentropy",
        optimizer=keras.optimizers.SGD(
            lr=0.01,
            momentum=0.0,
            decay=0.0,
            nesterov=False),
        metrics=[keras.metrics.categorical_accuracy, keras.metrics.top_k_categorical_accuracy],
        num_gpus=num_gpus)

    # net.summary()
    tic = time.time()
    score = net.evaluate_generator(
        generator=val_gen,
        steps=(val_size // batch_size),
        verbose=True)
    err_top1_val = 1.0 - score[1]
    err_top5_val = 1.0 - score[2]

    if calc_weight_count:
        weight_count = keras.utils.layer_utils.count_params(net.trainable_weights)
        logging.info("Model: {} trainable parameters".format(weight_count))
    if extended_log:
        logging.info("Test: err-top1={top1:.4f} ({top1})\terr-top5={top5:.4f} ({top5})".format(
            top1=err_top1_val, top5=err_top5_val))
    else:
        logging.info("Test: err-top1={top1:.4f}\terr-top5={top5:.4f}".format(
            top1=err_top1_val, top5=err_top5_val))
    logging.info("Time cost: {:.4f} sec".format(
        time.time() - tic))
Esempio n. 3
0
def test(net,
         val_gen,
         val_size,
         batch_size,
         num_gpus,
         calc_weight_count=False,
         extended_log=False):

    keras.backend.set_learning_phase(0)

    backend_agnostic_compile(model=net,
                             loss='categorical_crossentropy',
                             optimizer=keras.optimizers.SGD(lr=0.01,
                                                            momentum=0.0,
                                                            decay=0.0,
                                                            nesterov=False),
                             metrics=[
                                 keras.metrics.categorical_accuracy,
                                 keras.metrics.top_k_categorical_accuracy
                             ],
                             num_gpus=num_gpus)

    # net.summary()
    tic = time.time()
    score = net.evaluate_generator(generator=val_gen,
                                   steps=(val_size // batch_size),
                                   verbose=True)
    err_top1_val = 1.0 - score[1]
    err_top5_val = 1.0 - score[2]

    if calc_weight_count:
        weight_count = keras.utils.layer_utils.count_params(
            net.trainable_weights)
        logging.info('Model: {} trainable parameters'.format(weight_count))
    if extended_log:
        logging.info(
            'Test: err-top1={top1:.4f} ({top1})\terr-top5={top5:.4f} ({top5})'.
            format(top1=err_top1_val, top5=err_top5_val))
    else:
        logging.info('Test: err-top1={top1:.4f}\terr-top5={top5:.4f}'.format(
            top1=err_top1_val, top5=err_top5_val))
    logging.info('Time cost: {:.4f} sec'.format(time.time() - tic))