Exemple #1
0
def parse_args():
    parser = argparse.ArgumentParser(
        description=
        "Evaluate a model for image classification/segmentation (Chainer)",
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument(
        "--dataset",
        type=str,
        default="ImageNet1K",
        help=
        "dataset name. options are ImageNet1K, CUB200_2011, CIFAR10, CIFAR100, SVHN, VOC2012, ADE20K, Cityscapes, "
        "COCO")
    parser.add_argument(
        "--work-dir",
        type=str,
        default=os.path.join("..", "imgclsmob_data"),
        help="path to working directory only for dataset root path preset")

    args, _ = parser.parse_known_args()
    dataset_metainfo = get_dataset_metainfo(dataset_name=args.dataset)
    dataset_metainfo.add_dataset_parser_arguments(parser=parser,
                                                  work_dir_path=args.work_dir)

    add_eval_parser_arguments(parser)

    args = parser.parse_args()
    return args
Exemple #2
0
def test_model(args):
    """
    Main test routine.

    Parameters:
    ----------
    args : ArgumentParser
        Main script arguments.

    Returns
    -------
    float
        Main accuracy value.
    """
    ds_metainfo = get_dataset_metainfo(dataset_name=args.dataset)
    ds_metainfo.update(args=args)
    assert (ds_metainfo.ml_type != "imgseg") or (args.batch_size == 1)
    assert (ds_metainfo.ml_type != "imgseg") or args.disable_cudnn_autotune

    global_config.train = False
    use_gpus = prepare_ch_context(args.num_gpus)

    net = prepare_model(
        model_name=args.model,
        use_pretrained=args.use_pretrained,
        pretrained_model_file_path=args.resume.strip(),
        use_gpus=use_gpus,
        net_extra_kwargs=ds_metainfo.test_net_extra_kwargs,
        num_classes=(args.num_classes if ds_metainfo.ml_type != "hpe" else None),
        in_channels=args.in_channels)
    assert (hasattr(net, "classes") or (ds_metainfo.ml_type == "hpe"))
    assert (hasattr(net, "in_size"))

    get_test_data_source_class = get_val_data_source if args.data_subset == "val" else get_test_data_source
    test_data = get_test_data_source_class(
        ds_metainfo=ds_metainfo,
        batch_size=args.batch_size,
        num_workers=args.num_workers)
    if args.data_subset == "val":
        test_metric = get_composite_metric(
            metric_names=ds_metainfo.val_metric_names,
            metric_extra_kwargs=ds_metainfo.val_metric_extra_kwargs)
    else:
        test_metric = get_composite_metric(
            metric_names=ds_metainfo.test_metric_names,
            metric_extra_kwargs=ds_metainfo.test_metric_extra_kwargs)

    assert (args.use_pretrained or args.resume.strip())
    acc_values = calc_model_accuracy(
        net=net,
        test_data=test_data,
        metric=test_metric,
        calc_weight_count=True,
        calc_flops_only=args.calc_flops_only,
        extended_log=True)
    return acc_values[ds_metainfo.saver_acc_ind] if len(acc_values) > 0 else None
Exemple #3
0
def main():
    """
    Main body of script.
    """
    args = parse_args()

    if args.disable_cudnn_autotune:
        os.environ["MXNET_CUDNN_AUTOTUNE_DEFAULT"] = "0"

    _, log_file_exist = initialize_logging(
        logging_dir_path=args.save_dir,
        logging_file_name=args.logging_file_name,
        script_args=args,
        log_packages=args.log_packages,
        log_pip_packages=args.log_pip_packages)

    ds_metainfo = get_dataset_metainfo(dataset_name=args.dataset)
    ds_metainfo.update(args=args)
    assert (ds_metainfo.ml_type != "imgseg") or (args.batch_size == 1)
    assert (ds_metainfo.ml_type != "imgseg") or args.disable_cudnn_autotune

    global_config.train = False
    use_gpus = prepare_ch_context(args.num_gpus)

    net = prepare_model(model_name=args.model,
                        use_pretrained=args.use_pretrained,
                        pretrained_model_file_path=args.resume.strip(),
                        use_gpus=use_gpus,
                        net_extra_kwargs=ds_metainfo.net_extra_kwargs,
                        num_classes=args.num_classes,
                        in_channels=args.in_channels)
    assert (hasattr(net, "classes"))
    assert (hasattr(net, "in_size"))

    if args.data_subset == "val":
        get_test_data_source_class = get_val_data_source
        test_metric = get_composite_metric(
            metric_names=ds_metainfo.val_metric_names,
            metric_extra_kwargs=ds_metainfo.val_metric_extra_kwargs)
    else:
        get_test_data_source_class = get_test_data_source
        test_metric = get_composite_metric(
            metric_names=ds_metainfo.test_metric_names,
            metric_extra_kwargs=ds_metainfo.test_metric_extra_kwargs)
    test_data = get_test_data_source_class(ds_metainfo=ds_metainfo,
                                           batch_size=args.batch_size,
                                           num_workers=args.num_workers)

    assert (args.use_pretrained or args.resume.strip())
    test(net=net,
         test_data=test_data,
         metric=test_metric,
         calc_weight_count=True,
         extended_log=True)
Exemple #4
0
def main():
    args = parse_args()
    args.seed = init_rand(seed=args.seed)

    _, log_file_exist = initialize_logging(
        logging_dir_path=args.save_dir,
        logging_file_name=args.logging_file_name,
        script_args=args,
        log_packages=args.log_packages,
        log_pip_packages=args.log_pip_packages)

    ds_metainfo = get_dataset_metainfo(dataset_name=args.dataset)
    ds_metainfo.update(args=args)

    use_gpus = prepare_ch_context(args.num_gpus)
    # batch_size = args.batch_size

    net = prepare_model(
        model_name=args.model,
        use_pretrained=args.use_pretrained,
        pretrained_model_file_path=args.resume.strip(),
        use_gpus=use_gpus,
        num_classes=args.num_classes,
        in_channels=args.in_channels)
    assert (hasattr(net, "classes"))
    assert (hasattr(net, "in_size"))

    train_data = get_train_data_source(
        ds_metainfo=ds_metainfo,
        batch_size=args.batch_size,
        num_workers=args.num_workers)
    val_data = get_val_data_source(
        ds_metainfo=ds_metainfo,
        batch_size=args.batch_size,
        num_workers=args.num_workers)

    trainer = prepare_trainer(
        net=net,
        optimizer_name=args.optimizer_name,
        lr=args.lr,
        momentum=args.momentum,
        num_epochs=args.num_epochs,
        train_data=train_data,
        val_data=val_data,
        logging_dir_path=args.save_dir,
        use_gpus=use_gpus)

    trainer.run()