예제 #1
0
 def test_pit_model(self):
     cfg = CN()
     cfg.MODEL = CN()
     add_pit_backbone_config(cfg)
     build_model = BACKBONE_REGISTRY.get("pit_d2go_model_wrapper")
     pit_models = {
         "pit_ti_ours": 160,
         "pit_ti": 224,
         "pit_s_ours_v1": 256,
         "pit_s": 224,
     }
     pit_model_weights = {
         "pit_ti_ours":
         "manifold://mobile_vision_workflows/tree/workflows/kyungminkim/20210515/deit_[model]pit_scalable_distilled_[bs]128_[mcfg]pit_ti_ours_.HImkjNCpJI/checkpoint_best.pth",
         "pit_ti":
         "manifold://mobile_vision_workflows/tree/workflows/kyungminkim/20210515/deit_[model]pit_scalable_distilled_[bs]128_[mcfg]pit_ti_.QJeFNUfYOD/checkpoint_best.pth",
         "pit_s_ours_v1":
         "manifold://mobile_vision_workflows/tree/workflows/kyungminkim/20210515/deit_[model]pit_scalable_distilled_[bs]64_[mcfg]pit_s_ours_v1_.LXdwyBDaNY/checkpoint_best.pth",
         "pit_s":
         "manifold://mobile_vision_workflows/tree/workflows/kyungminkim/20210515/deit_[model]pit_scalable_distilled_[bs]128_[mcfg]pit_s_.zReQLPOuJe/checkpoint_best.pth",
     }
     for model_name, org_size in pit_models.items():
         print("model_name", model_name)
         cfg.MODEL.PIT.MODEL_CONFIG = f"manifold://mobile_vision_workflows/tree/workflows/wbc/deit/model_cfgs/{model_name}.json"
         cfg.MODEL.PIT.WEIGHTS = pit_model_weights[model_name]
         cfg.MODEL.PIT.DILATED = True
         model = build_model(cfg, None)
         model.eval()
         for input_size_h in [org_size, 192, 224, 256, 320]:
             for input_size_w in [org_size, 192, 224, 256, 320]:
                 x = torch.rand(1, 3, input_size_h, input_size_w)
                 y = model(x)
                 print(f"x.shape: {x.shape}, y.shape: {y.shape}")
예제 #2
0
    def test_modeling_hook_cfg(self):
        """Create model with modeling hook using build_model"""
        cfg = CfgNode()
        cfg.MODEL = CfgNode()
        cfg.MODEL.DEVICE = "cpu"
        cfg.MODEL.META_ARCHITECTURE = "TestArch"
        cfg.MODEL.MODELING_HOOKS = ["PlusOneHook", "TimesTwoHook"]
        model = build_model(cfg)
        self.assertEqual(model(2), 10)

        self.assertTrue(hasattr(model, "_modeling_hooks"))
        self.assertTrue(hasattr(model, "unapply_modeling_hooks"))
        orig_model = model.unapply_modeling_hooks()
        self.assertIsInstance(orig_model, TestArch)
        self.assertEqual(orig_model(2), 4)
예제 #3
0
 def _get_default_config():
     cfg = CfgNode()
     cfg.INPUT = CfgNode()
     cfg.INPUT.CROP = CfgNode()
     cfg.INPUT.CROP.ENABLED = False
     cfg.INPUT.CROP.SIZE = (0.9, 0.9)
     cfg.INPUT.CROP.TYPE = "relative_range"
     cfg.MODEL = CfgNode()
     cfg.MODEL.MIN_DIM_SIZE = 360
     cfg.INFERENCE_SDK = CfgNode()
     cfg.INFERENCE_SDK.MODEL = CfgNode()
     cfg.INFERENCE_SDK.MODEL.SCORE_THRESHOLD = 0.8
     cfg.INFERENCE_SDK.IOU_TRACKER = CfgNode()
     cfg.INFERENCE_SDK.IOU_TRACKER.IOU_THRESHOLD = 0.15
     cfg.INFERENCE_SDK.ENABLE_ID_TRACKING = True
     return cfg
예제 #4
0
def create_cfg_from_cli_args(args, default_cfg):
    """
    Instead of loading from defaults.py, this binary only includes necessary
    configs building from scratch, and overrides them from args. There're two
    levels of config:
        _C: the config system used by this binary, which is a sub-set of training
            config, override by configurable_cfg. It can also be override by
            args.opts for convinience.
        configurable_cfg: common configs that user should explicitly specify
            in the args.
    """

    _C = CN()
    _C.INPUT = default_cfg.INPUT
    _C.DATASETS = default_cfg.DATASETS
    _C.DATALOADER = default_cfg.DATALOADER
    _C.TEST = default_cfg.TEST
    if hasattr(default_cfg, "D2GO_DATA"):
        _C.D2GO_DATA = default_cfg.D2GO_DATA
    if hasattr(default_cfg, "TENSORBOARD"):
        _C.TENSORBOARD = default_cfg.TENSORBOARD

    # NOTE configs below might not be necessary, but must add to make code work
    _C.MODEL = CN()
    _C.MODEL.META_ARCHITECTURE = default_cfg.MODEL.META_ARCHITECTURE
    _C.MODEL.MASK_ON = default_cfg.MODEL.MASK_ON
    _C.MODEL.KEYPOINT_ON = default_cfg.MODEL.KEYPOINT_ON
    _C.MODEL.LOAD_PROPOSALS = default_cfg.MODEL.LOAD_PROPOSALS
    assert _C.MODEL.LOAD_PROPOSALS is False, "caffe2 model doesn't support"

    _C.OUTPUT_DIR = args.output_dir

    configurable_cfg = [
        "DATASETS.TEST",
        args.datasets,
        "INPUT.MIN_SIZE_TEST",
        args.min_size,
        "INPUT.MAX_SIZE_TEST",
        args.max_size,
    ]

    cfg = _C.clone()
    cfg.merge_from_list(configurable_cfg)
    cfg.merge_from_list(args.opts)

    return cfg
예제 #5
0
    def test_modeling_hook_copy(self):
        """Create model with modeling hook, the model could be copied"""
        cfg = CfgNode()
        cfg.MODEL = CfgNode()
        cfg.MODEL.DEVICE = "cpu"
        cfg.MODEL.META_ARCHITECTURE = "TestArch"
        cfg.MODEL.MODELING_HOOKS = ["PlusOneHook", "TimesTwoHook"]
        model = build_model(cfg)
        self.assertEqual(model(2), 10)

        model_copy = copy.deepcopy(model)

        orig_model = model.unapply_modeling_hooks()
        self.assertIsInstance(orig_model, TestArch)
        self.assertEqual(orig_model(2), 4)

        orig_model_copy = model_copy.unapply_modeling_hooks()
        self.assertEqual(orig_model_copy(2), 4)
예제 #6
0
    def test_deit_model(self):
        cfg = CN()
        cfg.MODEL = CN()
        add_deit_backbone_config(cfg)
        build_model = BACKBONE_REGISTRY.get("deit_d2go_model_wrapper")
        deit_models = {
            "8X-7-RM_4": 170,
            "DeiT-Tiny": 224,
            "DeiT-Small": 224,
            "32X-1-RM_2": 221,
            "8X-7": 160,
            "32X-1": 256,
        }
        deit_model_weights = {
            "8X-7-RM_4":
            "manifold://mobile_vision_workflows/tree/workflows/kyungminkim/20210511/deit_[model]deit_scaling_distill_[bs]128_[mcfg]8X-7-RM_4_.OIXarYpbZw/checkpoint_best.pth",
            "DeiT-Tiny":
            "manifold://mobile_vision_workflows/tree/workflows/cl114/DeiT-official-ckpt/deit_tiny_distilled_patch16_224-b40b3cf7.pth",
            "DeiT-Small":
            "manifold://mobile_vision_workflows/tree/workflows/cl114/DeiT-official-ckpt/deit_small_distilled_patch16_224-649709d9.pth",
            "32X-1-RM_2":
            "manifold://mobile_vision_workflows/tree/workflows/kyungminkim/20210511/deit_[model]deit_scaling_distill_[bs]64_[mcfg]32X-1-RM_2_.xusuFyNMdD/checkpoint_best.pth",
            "8X-7":
            "manifold://mobile_vision_workflows/tree/workflows/cl114/scaled_best/8X-7.pth",
            "32X-1":
            "manifold://mobile_vision_workflows/tree/workflows/cl114/scaled_best/32X-1.pth",
        }

        for model_name, org_size in deit_models.items():
            print("model_name", model_name)
            cfg.MODEL.DEIT.MODEL_CONFIG = f"manifold://mobile_vision_workflows/tree/workflows/wbc/deit/model_cfgs/{model_name}.json"
            cfg.MODEL.DEIT.WEIGHTS = deit_model_weights[model_name]
            model = build_model(cfg, None)
            model.eval()
            for input_size_h in [org_size, 192, 224, 256, 320]:
                for input_size_w in [org_size, 192, 224, 256, 320]:
                    x = torch.rand(1, 3, input_size_h, input_size_w)
                    y = model(x)
                    print(f"x.shape: {x.shape}, y.shape: {y.shape}")