def __init__(self, data_source, pipeline1, pipeline2, prefetch=False): self.data_source = build_datasource(data_source) pipeline1 = [build_from_cfg(p, PIPELINES) for p in pipeline1] self.pipeline1 = Compose(pipeline1) pipeline2 = [build_from_cfg(p, PIPELINES) for p in pipeline2] self.pipeline2 = Compose(pipeline2) self.prefetch = prefetch
def build(cfg, registry, default_args=None): if isinstance(cfg, list): modules = [ build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg ] return nn.Sequential(*modules) else: return build_from_cfg(cfg, registry, default_args)
def inference_model(model, img): """Inference image(s) with the detector. Args: model (nn.Module): The loaded detector. imgs (str/ndarray or list[str/ndarray]): Either image files or loaded images. Returns: If imgs is a str, a generator will be returned, otherwise return the detection results directly. """ cfg = model.cfg device = next(model.parameters()).device # model device # build the data pipeline test_pipeline = cfg.data.test.pipeline test_pipeline = [build_from_cfg(p, PIPELINES) for p in test_pipeline] test_pipeline = Compose(test_pipeline) # prepare data img = Image.open(img) img = img.convert('RGB') data = test_pipeline(img) data = scatter(collate([data], samples_per_gpu=1), [device])[0] # forward the model with torch.no_grad(): result = model(data, mode='test') return result
def __init__(self, data_source, pipelines): pipeline = pipelines[0] patch_pipeline = pipelines[1] super(MultiScaleContrastiveDataset, self).__init__(data_source, pipeline) patch_pipeline = [build_from_cfg(p, PIPELINES) for p in patch_pipeline] self.patch_pipeline = Compose(patch_pipeline) img_norm_cfg = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) post_pipeline = [ dict(type='ToPILImage'), dict(type='RandomCrop', size=64), dict(type='ToTensor'), dict(type='Normalize', **img_norm_cfg) ] post_pipeline = [build_from_cfg(p, PIPELINES) for p in post_pipeline] self.post_pipeline = Compose(post_pipeline)
def build_dataset(cfg, default_args=None): if isinstance(cfg, (list, tuple)): dataset = ConcatDataset([build_dataset(c, default_args) for c in cfg]) elif cfg['type'] == 'RepeatDataset': dataset = RepeatDataset( build_dataset(cfg['dataset'], default_args), cfg['times']) else: dataset = build_from_cfg(cfg, DATASETS, default_args) return dataset
def __init__(self, data_source, pipeline, prefetch=False, use_s2=True, use_s1=True, use_RGB=False): self.data_source = build_datasource(data_source) pipeline = [build_from_cfg(p, PIPELINES) for p in pipeline] self.pipeline = Compose(pipeline) self.prefetch = prefetch
def build(cfg, registry, default_args=None): """Build a module. Args: cfg (dict, list[dict]): The config of modules, it is either a dict or a list of configs. registry (:obj:`Registry`): A registry the module belongs to. default_args (dict, optional): Default arguments to build the module. Default: None. Returns: nn.Module: A built nn module. """ if isinstance(cfg, list): modules = [ build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg ] return nn.Sequential(*modules) else: return build_from_cfg(cfg, registry, default_args)
def build(cfg, registry, default_args=None): """Build a module. Args: cfg (dict, list[dict]): The config of modules, it is either a dict or a list of configs. registry (:obj:`Registry`): A registry the module belongs to. default_args (dict, optional): Default arguments to build the module. Default: None. Returns: nn.Module: A built nn module. """ # ugly hack to __automagically__ set the neck input if cfg.get("neck") and cfg.get("neck").get("auto_channels"): del cfg["neck"]["auto_channels"] # build the backbone to obtain the number of channels bbone = build_backbone(cfg['backbone']) x = randn(1, cfg['backbone']['in_channels'], 224, 224) # 224 doesn't really matter here outp = bbone(x) if isinstance(outp, tuple): outp = outp[0] outsize = outp.shape[1] del bbone, x cfg["neck"]["in_channels"] = outsize if cfg['neck']['type'].find('NonLinear') > -1: cfg["neck"]["hid_channels"] = outsize if isinstance(cfg, list): modules = [ build_from_cfg(cfg_, registry, default_args) for cfg_ in cfg ] return nn.Sequential(*modules) else: return build_from_cfg(cfg, registry, default_args)
def __init__(self, transforms, p=0.5): t = [build_from_cfg(t, PIPELINES) for t in transforms] self.trans = _transforms.RandomApply(t, p=p)
def __init__(self, data_source, pipeline): self.data_source = build_datasource(data_source) pipeline = [build_from_cfg(p, PIPELINES) for p in pipeline] self.pipeline = Compose(pipeline)
def __init__(self, data_source, pipeline, format_pipeline): super(RelativeLocDataset, self).__init__(data_source, pipeline) format_pipeline = [ build_from_cfg(p, PIPELINES) for p in format_pipeline ] self.format_pipeline = Compose(format_pipeline)
def build_datasource(cfg): return build_from_cfg(cfg, DATASOURCES)
def build_hook(cfg, default_args=None): return build_from_cfg(cfg, HOOKS, default_args)