def main(): """Execute operation (train, test, time, etc.).""" args = parse_args() mode = args.mode config.load_cfg(args.cfg) cfg.merge_from_list(args.opts) config.assert_cfg() cfg.freeze() if mode == "info": print(builders.get_model()()) print("complexity:", net.complexity(builders.get_model())) elif mode == "train": dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.train_model) elif mode == "test": dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.test_model) elif mode == "time": dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer.time_model) elif mode == "scale": cfg.defrost() cx_orig = net.complexity(builders.get_model()) scaler.scale_model() cx_scaled = net.complexity(builders.get_model()) cfg_file = config.dump_cfg() print("Scaled config dumped to:", cfg_file) print("Original model complexity:", cx_orig) print("Scaled model complexity:", cx_scaled)
def main(): config.load_cfg_fom_args("Scale a model.") config.assert_and_infer_cfg() cx_orig = net.complexity(builders.get_model()) scaler.scale_model() cx_scaled = net.complexity(builders.get_model()) cfg_file = config.dump_cfg() print("Scaled config dumped to:", cfg_file) print("Original model complexity:", cx_orig) print("Scaled model complexity:", cx_scaled)
def get_model_data(name, timings, errors): """Get model data for a single model.""" # Load model config reset_cfg() cfg.merge_from_file(model_zoo.get_config_file(name)) config_url, _, model_id, _, weight_url_full = model_zoo.get_model_info( name) # Get model complexity cx = net.complexity(builders.get_model()) # Inference time is measured in ms with a reference batch_size and num_gpus batch_size, num_gpus = 64, 1 reference = batch_size / cfg.TEST.BATCH_SIZE * cfg.NUM_GPUS / num_gpus infer_time = timings[name]["test_fw_time"] * reference * 1000 # Training time is measured in hours for 100 epochs over the ImageNet train set iterations = 1281167 / cfg.TRAIN.BATCH_SIZE * 100 train_time = timings[name]["train_fw_bw_time"] * iterations / 3600 # Gather all data about the model return { "config_url": "configs/" + config_url, "flops": round(cx["flops"] / 1e9, 1), "params": round(cx["params"] / 1e6, 1), "acts": round(cx["acts"] / 1e6, 1), "batch_size": cfg.TRAIN.BATCH_SIZE, "infer_time": round(infer_time), "train_time": round(train_time, 1), "error": round(errors[name]["top1_err"], 1), "model_id": model_id, "weight_url": weight_url_full, }
def test_complexity(self, cfg_file, cx_expected): """Test complexity of a single model with the specified config.""" cfg_init = cfg.clone() cfg.merge_from_file(cfg_file) cx = net.complexity(builders.get_model()) cfg.merge_from_other_cfg(cfg_init) self.assertEqual(cx_expected, cx)
def check_complexity_constraints(constraints): """Checks complexity constraints.""" cx, valid = None, True for p, v in constraints.CX.items(): p, min_v, max_v = p.lower(), v[0], v[1] if min_v != 0 or max_v != 0: cx = cx if cx else net.complexity(builders.get_model()) min_v = cx[p] if min_v == 0 else min_v max_v = cx[p] if max_v == 0 else max_v valid = valid and (min_v <= cx[p] <= max_v) return valid
def dump_complexity(): """Measure the complexity of every model in the configs/ directory.""" complexity = {"date-created": str(datetime.datetime.now())} cfg_files = [os.path.join(r, f) for r, _, fs in os.walk("configs/") for f in fs] cfg_files = sorted(f for f in cfg_files if ".yaml" in f) for cfg_file in cfg_files: cfg_init = cfg.clone() cfg.merge_from_file(cfg_file) complexity[cfg_file] = net.complexity(builders.get_model()) cfg.merge_from_other_cfg(cfg_init) with open(_COMPLEXITY_FILE, "w") as file: json.dump(complexity, file, sort_keys=True, indent=4)
def test_complexity(key): """Measure the complexity of a single model.""" reset_cfg() cfg_file = os.path.join(_PYCLS_DIR, key) merge_from_file(cfg_file) return net.complexity(builders.get_model())