def test_load(self):
        cfg = LazyConfig.load(self.root_filename)

        self.assertEqual(cfg.dir1a_dict.a, "modified")
        self.assertEqual(cfg.dir1b_dict.a, 1)
        self.assertEqual(cfg.lazyobj.x, "base_a_1")

        cfg.lazyobj.x = "new_x"
        # reload
        cfg = LazyConfig.load(self.root_filename)
        self.assertEqual(cfg.lazyobj.x, "base_a_1")
    def test_save_load(self):
        cfg = LazyConfig.load(self.root_filename)
        with tempfile.TemporaryDirectory(prefix="detectron2") as d:
            fname = os.path.join(d, "test_config.yaml")
            LazyConfig.save(cfg, fname)
            cfg2 = LazyConfig.load(fname)

        self.assertEqual(cfg2.lazyobj._target_, "itertools.count")
        self.assertEqual(cfg.lazyobj._target_, count)
        cfg2.lazyobj.pop("_target_")
        cfg.lazyobj.pop("_target_")
        # the rest are equal
        self.assertEqual(cfg, cfg2)
    def test_to_py(self):
        cfg = LazyConfig.load(self.root_filename)
        cfg.lazyobj.x = {
            "a": 1,
            "b": 2,
            "c": L(count)(x={
                "r": "a",
                "s": 2.4,
                "t": [1, 2, 3, "z"]
            })
        }
        cfg.list = ["a", 1, "b", 3.2]
        py_str = LazyConfig.to_py(cfg)
        expected = """cfg.dir1a_dict.a = "modified"
cfg.dir1a_dict.b = 2
cfg.dir1b_dict.a = 1
cfg.dir1b_dict.b = 2
cfg.lazyobj = itertools.count(
    x={
        "a": 1,
        "b": 2,
        "c": itertools.count(x={"r": "a", "s": 2.4, "t": [1, 2, 3, "z"]}),
    },
    y="base_a_1_from_b",
)
cfg.list = ["a", 1, "b", 3.2]
"""
        self.assertEqual(py_str, expected)
Esempio n. 4
0
def get_config(config_path, trained: bool = False):
    """
    Returns a config object for a model in model zoo.

    Args:
        config_path (str): config file name relative to detectron2's "configs/"
            directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml"
        trained (bool): If True, will set ``MODEL.WEIGHTS`` to trained model zoo weights.
            If False, the checkpoint specified in the config file's ``MODEL.WEIGHTS`` is used
            instead; this will typically (though not always) initialize a subset of weights using
            an ImageNet pre-trained model, while randomly initializing the other weights.

    Returns:
        CfgNode or omegaconf.DictConfig: a config object
    """
    cfg_file = get_config_file(config_path)
    if cfg_file.endswith(".yaml"):
        cfg = get_cfg()
        cfg.merge_from_file(cfg_file)
        if trained:
            cfg.MODEL.WEIGHTS = get_checkpoint_url(config_path)
        return cfg
    elif cfg_file.endswith(".py"):
        cfg = LazyConfig.load(cfg_file)
        if trained:
            url = get_checkpoint_url(config_path)
            if "train" in cfg and "init_checkpoint" in cfg.train:
                cfg.train.init_checkpoint = url
            else:
                raise NotImplementedError
        return cfg
Esempio n. 5
0
def reload_lazy_config(cfg):
    """
    Save an object by LazyConfig.save and load it back.
    This is used to test that a config still works the same after
    serialization/deserialization.
    """
    with tempfile.TemporaryDirectory(prefix="detectron2") as d:
        fname = os.path.join(d, "d2_cfg_test.yaml")
        LazyConfig.save(cfg, fname)
        return LazyConfig.load(fname)
Esempio n. 6
0
def setup(args):
    if args.config_file.endswith(".yaml"):
        cfg = get_cfg()
        cfg.merge_from_file(args.config_file)
        cfg.SOLVER.BASE_LR = 0.001  # Avoid NaNs. Not useful in this script anyway.
        cfg.merge_from_list(args.opts)
        cfg.freeze()
    else:
        cfg = LazyConfig.load(args.config_file)
        cfg = LazyConfig.apply_overrides(cfg, args.opts)
    setup_logger(distributed_rank=comm.get_rank())
    return cfg
def main(args):
    cfg = LazyConfig.load(args.config_file)
    cfg = LazyConfig.apply_overrides(cfg, args.opts)
    default_setup(cfg, args)

    if args.eval_only:
        model = instantiate(cfg.model)
        model = create_ddp_model(model)
        DetectionCheckpointer(model).load(cfg.train.init_checkpoint)
        print(do_test(cfg, model))
    else:
        do_train(args, cfg)
Esempio n. 8
0
def setup(args):
    if args.config_file.endswith(".yaml"):
        cfg = get_cfg()
        cfg.merge_from_file(args.config_file)
        cfg.DATALOADER.NUM_WORKERS = 0
        cfg.merge_from_list(args.opts)
        cfg.freeze()
    else:
        cfg = LazyConfig.load(args.config_file)
        cfg = LazyConfig.apply_overrides(cfg, args.opts)
    setup_logger(name="fvcore")
    setup_logger()
    return cfg
Esempio n. 9
0
def create_lazy_cfg(lazy_path, max_iter=100):
    print(f"Creating {lazy_path}...\n")
    cfg = LazyConfig.load(model_zoo.get_config_file(lazy_path))
    default_setup(cfg, [])

    model = cfg.model
    model.backbone.bottom_up.stem.norm = \
    model.backbone.bottom_up.stages.norm = \
    model.backbone.norm = "FrozenBN"
    cfg.model = model

    cfg.train.init_checkpoint = 'model_final_bb69de.pkl'
    cfg.model.roi_heads.batch_size_per_image = 64
    cfg.dataloader.train.total_batch_size = 8
    cfg.optimizer.lr = 0.003
    cfg.train.amp.enabled = True
    cfg.train.output_dir = '../logs/mask_rcnn'
    cfg.train.max_iter = max_iter
    cfg.model.roi_heads.num_classes = 13
    cfg.dataloader.train.dataset.names = 'modanet_instance_segmentation_train'
    cfg.dataloader.test.dataset.names = 'modanet_instance_segmentation_test'

    return cfg
 def test_invalid_overrides(self):
     cfg = LazyConfig.load(self.root_filename)
     with self.assertRaises(KeyError):
         LazyConfig.apply_overrides(cfg, ["lazyobj.x.xxx=123"])
 def test_overrides(self):
     cfg = LazyConfig.load(self.root_filename)
     LazyConfig.apply_overrides(cfg,
                                ["lazyobj.x=123", 'dir1b_dict.a="123"'])
     self.assertEqual(cfg.dir1b_dict.a, "123")
     self.assertEqual(cfg.lazyobj.x, 123)