Exemple #1
0
 def test_complexity(self, cfg_file, cx_expected):
     """Test complexity of a single model with the specified config."""
     cfg_init = cfg.clone()
     cfg.merge_from_file(cfg_file)
     cx = net.complexity(builders.get_model())
     cfg.merge_from_other_cfg(cfg_init)
     self.assertEqual(cx_expected, cx)
Exemple #2
0
def dump_complexity():
    """Measure the complexity of every model in the configs/ directory."""
    complexity = {"date-created": str(datetime.datetime.now())}
    cfg_files = [os.path.join(r, f) for r, _, fs in os.walk("configs/") for f in fs]
    cfg_files = sorted(f for f in cfg_files if ".yaml" in f)
    for cfg_file in cfg_files:
        cfg_init = cfg.clone()
        cfg.merge_from_file(cfg_file)
        complexity[cfg_file] = net.complexity(builders.get_model())
        cfg.merge_from_other_cfg(cfg_init)
    with open(_COMPLEXITY_FILE, "w") as file:
        json.dump(complexity, file, sort_keys=True, indent=4)
Exemple #3
0
def sample_cfgs(seed):
    """Samples chunk configs and return those that are unique and valid."""
    # Fix RNG seed (every call to this function should use a unique seed)
    np.random.seed(seed)
    setup_cfg = sweep_cfg.SETUP
    cfgs = {}
    for _ in range(setup_cfg.CHUNK_SIZE):
        # Sample parameters [key, val, ...] list based on the samplers
        params = samplers.sample_parameters(setup_cfg.SAMPLERS)
        # Check if config is unique, if not continue
        key = zip(params[0::2], params[1::2])
        key = " ".join(["{} {}".format(k, v) for k, v in key])
        if key in cfgs:
            continue
        # Generate config from parameters
        reset_cfg()
        cfg.merge_from_other_cfg(setup_cfg.BASE_CFG)
        cfg.merge_from_list(params)
        # Check if config is valid, if not continue
        is_valid = samplers.check_regnet_constraints(setup_cfg.CONSTRAINTS)
        if not is_valid:
            continue
        # Special logic for dealing w model scaling (side effect is to standardize cfg)
        if cfg.MODEL.TYPE in ["anynet", "effnet", "regnet"]:
            scaler.scale_model()
        # Check if config is valid, if not continue
        is_valid = samplers.check_complexity_constraints(setup_cfg.CONSTRAINTS)
        if not is_valid:
            continue
        # Set config description to key
        cfg.DESC = key
        # Store copy of config if unique and valid
        cfgs[key] = cfg.clone()
        # Stop sampling if already reached quota
        if len(cfgs) == setup_cfg.NUM_CONFIGS:
            break
    return cfgs
Exemple #4
0
_C.SETUP = CfgNode()

# Max number of unique job configs to generate
_C.SETUP.NUM_CONFIGS = 0

# Max number of attempts for generating NUM_CONFIGS valid configs
_C.SETUP.NUM_SAMPLES = 1000000

# Specifies the chunk size to use per process while sampling configs
_C.SETUP.CHUNK_SIZE = 5000

# Random seed for generating job configs
_C.SETUP.RNG_SEED = 0

# Base config for all jobs, any valid config option in core.config is valid here
_C.SETUP.BASE_CFG = cfg.clone()

# Samplers to use for generating job configs (see SAMPLERS defined toward end of file)
# SETUP.SAMPLERS should consists of a dictionary of SAMPLERS
# Each dict key should be a valid parameter in the BASE_CFG (e.g. "MODEL.DEPTH")
# Each dict val should be a valid SAMPLER that defines how to sample (e.g. INT_SAMPLER)
# See the example sweep configs for more usage information
_C.SETUP.SAMPLERS = CfgNode(new_allowed=True)

# Constraints on generated configs
_C.SETUP.CONSTRAINTS = CfgNode()

# Complexity constraints CX on models specified as a [LOW, HIGH] range, e.g. [0, 1.0e+6]
# If LOW == HIGH == 0 for a given complexity constraint that constraint is not applied
# For RegNets, if flops<F (B), setting params<3+5.5F and acts<6.5*sqrt(F) (M) works well
_C.SETUP.CONSTRAINTS.CX = CfgNode()