def resnext(name, pretrained=False, nc=1000): """Constructs a ResNeXt model.""" is_valid = name in _RESNEXT_URLS.keys() and name in _RESNEXT_CFGS.keys() assert is_valid, "ResNet-{} not found in the model zoo.".format(name) # Construct the model cfg = _RESNEXT_CFGS[name] kwargs = { "stem_type": "res_stem_in", "stem_w": 64, "block_type": "res_bottleneck_block", "ss": [1, 2, 2, 2], "bms": [0.5, 0.5, 0.5, 0.5], "se_r": None, "nc": nc, "ds": cfg["ds"], "ws": [256, 512, 1024, 2048], "gws": [4, 8, 16, 32], } model = AnyNet(**kwargs) # Download and load the weights if pretrained: url = os.path.join(_URL_PREFIX, _RESNEXT_URLS[name]) ws_path = cache_url(url, _DOWNLOAD_CACHE) checkpoint.load_checkpoint(ws_path, model) return model
def regnety(name, pretrained=False, nc=1000): """Constructs a RegNetY model.""" is_valid = name in _REGNETY_URLS.keys() and name in _REGNETY_CFGS.keys() assert is_valid, "RegNetY-{} not found in the model zoo.".format(name) # Construct the model cfg = _REGNETY_CFGS[name] kwargs = { "stem_type": "simple_stem_in", "stem_w": 32, "block_type": "res_bottleneck_block", "ss": [2, 2, 2, 2], "bms": [1.0, 1.0, 1.0, 1.0], "se_r": 0.25, "nc": nc, "ds": cfg["ds"], "ws": cfg["ws"], "gws": [cfg["g"] for _ in range(4)], } model = AnyNet(**kwargs) # Download and load the weights if pretrained: url = os.path.join(_URL_PREFIX, _REGNETY_URLS[name]) ws_path = cache_url(url, _DOWNLOAD_CACHE) checkpoint.load_checkpoint(ws_path, model) return model
def get_weights_file(weights_file): """Download weights file if stored as a URL.""" download = dist.is_main_proc(local=True) weights_file = cache_url(weights_file, cfg.DOWNLOAD_CACHE, download=download) if cfg.NUM_GPUS > 1: torch.distributed.barrier() return weights_file
def effnet(name, pretrained=False, nc=1000): """Constructs an EfficientNet model.""" is_valid = name in _EN_URLS.keys() and name in _EN_CFGS.keys() assert is_valid, "EfficientNet-{} not found in the model zoo.".format(name) # Construct the model cfg = _EN_CFGS[name] kwargs = { "exp_rs": [1, 6, 6, 6, 6, 6, 6], "se_r": 0.25, "nc": nc, "ss": [1, 2, 2, 2, 1, 2, 1], "ks": [3, 3, 5, 3, 5, 5, 3], "stem_w": cfg["sw"], "ds": cfg["ds"], "ws": cfg["ws"], "head_w": cfg["hw"], } model = EffNet(**kwargs) # Download and load the weights if pretrained: url = os.path.join(_URL_PREFIX, _EN_URLS[name]) ws_path = cache_url(url, _DOWNLOAD_CACHE) checkpoint.load_checkpoint(ws_path, model) return model
def cache_cfg_urls(): """Download URLs in config, cache them, and rewrite cfg to use cached file.""" _C.TRAIN.WEIGHTS = cache_url(_C.TRAIN.WEIGHTS, _C.DOWNLOAD_CACHE) _C.TEST.WEIGHTS = cache_url(_C.TEST.WEIGHTS, _C.DOWNLOAD_CACHE)
def get_weights_file(name): """Get file with model weights (downloads if necessary).""" err_str = "Model {} not found in the model zoo.".format(name) assert name in _MODEL_ZOO_WEIGHTS.keys(), err_str weights_url = os.path.join(_URL_WEIGHTS, _MODEL_ZOO_WEIGHTS[name]) return cache_url(weights_url, _DOWNLOAD_CACHE, _URL_WEIGHTS)
def get_config_file(name): """Get file with model config (downloads if necessary).""" err_str = "Model {} not found in the model zoo.".format(name) assert name in _MODEL_ZOO_CONFIGS.keys(), err_str config_url = os.path.join(_URL_CONFIGS, _MODEL_ZOO_CONFIGS[name]) return cache_url(config_url, _DOWNLOAD_CACHE, _URL_CONFIGS)