Esempio n. 1
0
def get_model_data(name, timings, errors):
    """Get model data for a single model."""
    # Load model config
    reset_cfg()
    load_cfg(model_zoo.get_config_file(name))
    config_url, _, model_id, _, weight_url_full = model_zoo.get_model_info(name)
    # Get model complexity
    cx = net.complexity(builders.get_model())
    # Inference time is measured in ms with a reference batch_size and num_gpus
    batch_size, num_gpus = 64, 1
    reference = batch_size / cfg.TEST.BATCH_SIZE * cfg.NUM_GPUS / num_gpus
    infer_time = timings[name]["test_fw_time"] * reference * 1000
    # Training time is measured in hours for 100 epochs over the ImageNet train set
    iterations = 1281167 / cfg.TRAIN.BATCH_SIZE * 100
    train_time = timings[name]["train_fw_bw_time"] * iterations / 3600
    # Gather all data about the model
    return {
        "config_url": "configs/" + config_url,
        "flops": round(cx["flops"] / 1e9, 1),
        "params": round(cx["params"] / 1e6, 1),
        "acts": round(cx["acts"] / 1e6, 1),
        "batch_size": cfg.TRAIN.BATCH_SIZE,
        "infer_time": round(infer_time),
        "train_time": round(train_time, 1),
        "error": round(errors[name]["top1_err"], 1),
        "model_id": model_id,
        "weight_url": weight_url_full,
    }
Esempio n. 2
0
def main():
    """Execute operation (train, test, time, etc.)."""
    args = parse_args()
    mode = args.mode
    config.load_cfg(args.cfg)
    cfg.merge_from_list(args.opts)
    config.assert_cfg()
    cfg.freeze()
    if mode == "info":
        print(builders.get_model()())
        print("complexity:", net.complexity(builders.get_model()))
    elif mode == "train":
        dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.train_model)
    elif mode == "test":
        dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.test_model)
    elif mode == "time":
        dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.time_model)
    elif mode == "scale":
        cfg.defrost()
        cx_orig = net.complexity(builders.get_model())
        scaler.scale_model()
        cx_scaled = net.complexity(builders.get_model())
        cfg_file = config.dump_cfg()
        print("Scaled config dumped to:", cfg_file)
        print("Original model complexity:", cx_orig)
        print("Scaled model complexity:", cx_scaled)
Esempio n. 3
0
def test_timing(key):
    """Measure the timing of a single model."""
    reset_cfg()
    load_cfg(model_zoo.get_config_file(key))
    cfg.PREC_TIME.WARMUP_ITER, cfg.PREC_TIME.NUM_ITER = 5, 50
    cfg.OUT_DIR, cfg.LOG_DEST = tempfile.mkdtemp(), "file"
    dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.time_model)
    log_file = os.path.join(cfg.OUT_DIR, "stdout.log")
    data = logging.sort_log_data(logging.load_log_data(log_file))["iter_times"]
    shutil.rmtree(cfg.OUT_DIR)
    return data
Esempio n. 4
0
def test_error(key):
    """Measure the error of a single model."""
    reset_cfg()
    load_cfg(model_zoo.get_config_file(key))
    cfg.TEST.WEIGHTS = model_zoo.get_weights_file(key)
    cfg.OUT_DIR, cfg.LOG_DEST = tempfile.mkdtemp(), "file"
    dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.test_model)
    log_file = os.path.join(cfg.OUT_DIR, "stdout.log")
    data = logging.sort_log_data(logging.load_log_data(log_file))["test_epoch"]
    data = {"top1_err": data["top1_err"][-1], "top5_err": data["top5_err"][-1]}
    shutil.rmtree(cfg.OUT_DIR)
    return data
Esempio n. 5
0
def build_model(name, pretrained=False, cfg_list=()):
    """Constructs a predefined model (note: loads global config as well)."""
    # Load the config
    reset_cfg()
    config_file = get_config_file(name)
    load_cfg(config_file)
    cfg.merge_from_list(cfg_list)
    # Construct model
    model = builders.build_model()
    # Load pretrained weights
    if pretrained:
        weights_file = get_weights_file(name)
        cp.load_checkpoint(weights_file, model)
    return model
Esempio n. 6
0
SPACE = 'darts'
DATASET = 'cifar10'
SOURCE_TASK = 'cls'
TARGET_TASK = 'psd50000'

file_list = np.load(
    f'configs/sample_based/{SPACE}/{DATASET}/selected_files.npy',
    allow_pickle=True)
input_dir = f'configs/sample_based/{SPACE}/{DATASET}/{SOURCE_TASK}/'
output_dir = f'configs/sample_based/{SPACE}/{DATASET}/{TARGET_TASK}/'

if not os.path.exists(output_dir):
    os.makedirs(output_dir)
for f in file_list:
    f_name = f.split('.')[0]
    source_config = f'{input_dir}/{f}'
    target_config = f'{output_dir}/{f}'
    config.load_cfg(input_dir, f)
    config.assert_and_infer_cfg()
    config.cfg.TRAIN.PSD_LABEL_SPLIT = 50000
    config.cfg.TRAIN.PSD_UNLABEL_BATCH_SIZE = 50
    config.cfg.TRAIN.PSD_LABEL_BATCH_SIZE = 32
    config.cfg.TRAIN.PSD_THRESHOLD = 0.95
    config.cfg.LOG_PERIOD = 100
    config.cfg.TASK = 'psd'
    config.cfg.OPTIM.MAX_EPOCH = 50
    config.cfg.OPTIM.BASE_LR = 0.1
    config.cfg.OPTIM.MOMENTUM = 0.9
    config.dump_cfg_to_file(target_config)
    print(source_config, target_config)
Esempio n. 7
0
def test_complexity(key):
    """Measure the complexity of a single model."""
    reset_cfg()
    cfg_file = os.path.join(_PYCLS_DIR, key)
    load_cfg(cfg_file)
    return net.complexity(builders.get_model())