Beispiel #1
0
 def _evaluate_single_model(self, record):
     try:
         worker_info = {
             "step_name": record.step_name,
             "worker_id": record.worker_id
         }
         _record = dict(worker_id=record.worker_id,
                        desc=record.desc,
                        step_name=record.step_name)
         _init_record = ReportRecord().load_dict(_record)
         Report().broadcast(_init_record)
         if EvaluatorConfig.gpu_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(ClassType.GPU_EVALUATOR,
                                                  "GpuEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
         if EvaluatorConfig.davinci_mobile_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(
                 ClassType.DAVINCI_MOBILE_EVALUATOR,
                 "DavinciMobileEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
     except Exception:
         logger.error(
             "Failed to evaluate model, worker info={}".format(worker_info))
         logger.error(traceback.format_exc())
         return
Beispiel #2
0
 def from_desc(cls, desc):
     """Create Model from desc."""
     desc = deepcopy(desc)
     module_groups = desc.get('modules', [])
     module_type = desc.get('type', 'Sequential')
     loss = desc.get('loss')
     modules = OrderedDict()
     for group_name in module_groups:
         module_desc = deepcopy(desc.get(group_name))
         if 'modules' in module_desc:
             module = cls.from_desc(module_desc)
         else:
             cls_name = module_desc.get('type')
             if not ClassFactory.is_exists(ClassType.NETWORK, cls_name):
                 raise ValueError("Network {} not exists.".format(cls_name))
             module = ClassFactory.get_instance(ClassType.NETWORK,
                                                module_desc)
         modules[group_name] = module
     if not modules and module_type:
         model = ClassFactory.get_instance(ClassType.NETWORK, desc)
     else:
         if ClassFactory.is_exists(SearchSpaceType.CONNECTIONS,
                                   module_type):
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                module_type)
         else:
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                'Sequential')
         model = list(modules.values())[0] if len(
             modules) == 1 else connections(modules)
     if loss:
         model.add_loss(ClassFactory.get_cls(ClassType.LOSS, loss))
     return model
Beispiel #3
0
 def from_desc(cls, desc):
     """Create Model from desc."""
     module_groups = desc.get('modules')
     module_type = desc.get('type', 'Sequential')
     loss = desc.get('loss')
     modules = OrderedDict()
     for group_name in module_groups:
         module_desc = deepcopy(desc.get(group_name))
         if 'modules' in module_desc:
             module = cls.from_desc(module_desc)
         else:
             cls_name = module_desc.get('type')
             if not ClassFactory.is_exists(ClassType.SEARCH_SPACE,
                                           cls_name):
                 return None
             module = ClassFactory.get_instance(ClassType.SEARCH_SPACE,
                                                module_desc)
         modules[group_name] = module
     if ClassFactory.is_exists(SearchSpaceType.CONNECTIONS, module_type):
         connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                            module_type)
     else:
         connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                            'Sequential')
     model = list(
         modules.values())[0] if len(modules) == 1 else connections(modules)
     if loss:
         model.add_loss(ClassFactory.get_cls(ClassType.LOSS, loss))
     return model
Beispiel #4
0
 def __init__(self, generator, discriminator, latent_dim, gen_bs):
     super(GAN, self).__init__()
     self.generator = ClassFactory.get_cls(
         ClassType.NETWORK, generator.pop('type'))(**generator)
     self.latent_dim = latent_dim
     self.gen_bs = gen_bs
     self.discriminator = ClassFactory.get_cls(
         ClassType.NETWORK, discriminator.pop('type'))(**discriminator)
Beispiel #5
0
 def __new__(cls, *args, **kwargs):
     """Create search algorithm instance by ClassFactory."""
     if cls.__name__ != 'Codec':
         return super().__new__(cls)
     if kwargs.get('type'):
         t_cls = ClassFactory.get_cls(ClassType.CODEC, kwargs.pop('type'))
     else:
         t_cls = ClassFactory.get_cls(ClassType.CODEC)
     return super().__new__(t_cls)
Beispiel #6
0
 def __new__(cls, *args, **kwargs):
     """Create a subclass instance of dataset."""
     if Dataset in cls.__bases__:
         return super().__new__(cls)
     if kwargs.get('type'):
         t_cls = ClassFactory.get_cls(ClassType.DATASET, kwargs.pop('type'))
     else:
         t_cls = ClassFactory.get_cls(ClassType.DATASET)
     return super().__new__(t_cls)
Beispiel #7
0
    def __new__(cls, *args, **kwargs):
        """Create search algorithm instance by ClassFactory."""
        if cls.__name__ != 'SearchAlgorithm':
            return super().__new__(cls)
        if kwargs.get('type'):
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, kwargs.pop('type'))
        else:
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, PipeStepConfig.search_algorithm.type)

        return super().__new__(t_cls)
Beispiel #8
0
 def __init__(self, type):
     if type not in ['restrict', 'target']:
         raise ValueError('Input type must be restriction or target.')
     self.filter_types, self.terminate_types = [], []
     self.filter_compares = dict()
     self.terminate_compares = dict()
     self.filters_to_params = dict()
     self._init_compare_types(type)
     for filter in self.filter_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, filter)
         self.filter_compares[filter] = t_cls()
     for terminate in self.terminate_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, terminate)
         self.terminate_compares[terminate] = t_cls()
     self.filter_rules = copy.deepcopy(General.quota.filter_rules)
Beispiel #9
0
 def _init_dataloader(self, mode, loader=None):
     """Init dataloader."""
     if loader is not None:
         return loader
     if mode == "train" and self.hps is not None and self.hps.get("dataset") is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         dataset = dataset_cls(mode=mode, hps=self.hps.get("dataset"))
     else:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         dataset = dataset_cls(mode=mode)
     if self.distributed and mode == "train":
         dataset.set_distributed(self._world_size, self._rank_id)
     # adapt the dataset to specific backend
     dataloader = Adapter(dataset).loader
     return dataloader
Beispiel #10
0
    def __init__(self,
                 num_classes,
                 backbone='SerialBackbone',
                 neck='TorchFPN',
                 network_name='torchvision_FasterRCNN',
                 weight_file=None,
                 **kwargs):
        """Create layers.

        :param num_class: number of class
        :type num_class: int
        """
        super(FasterRCNN, self).__init__()
        self.weight_file = weight_file
        backbone_cls = self.define_props('backbone',
                                         backbone,
                                         dtype=ClassType.NETWORK)
        backbone_cls.freeze()
        if getattr(backbone_cls, 'out_channels') and 'in_channels' not in neck:
            neck_in_channel = backbone_cls.out_channels
            params = {"in_channels": neck_in_channel}
            neck_cls = self.define_props('neck',
                                         neck,
                                         dtype=ClassType.NETWORK,
                                         params=params)
        else:
            neck_cls = self.define_props('neck', neck, dtype=ClassType.NETWORK)
        backbone_neck = Sequential(backbone_cls, neck_cls)
        backbone_neck.freeze()
        self.model = ClassFactory.get_cls(ClassType.NETWORK,
                                          network_name)(backbone_neck,
                                                        num_classes, **kwargs)
def get_module_class(cls_name):
    """Get Search Space by class name.

    :param cls_name: class name
    :return: Search Space cls
    """
    return ClassFactory.get_cls(ClassType.SEARCH_SPACE, cls_name)
Beispiel #12
0
 def __new__(cls, *args, **kwargs):
     """Create optimizer or multi-optimizer class."""
     if isinstance(cls.config.to_dict, list):
         t_cls = ClassFactory.get_cls(ClassType.OPTIMIZER,
                                      'MultiOptimizers')
         return super().__new__(t_cls)
     return super().__new__(cls)
Beispiel #13
0
 def __init__(self, metric_cfg=None):
     """Init Metrics."""
     self.mdict = {}
     metric_config = self.config.to_dict() if not metric_cfg else deepcopy(metric_cfg)
     if not isinstance(metric_config, list):
         metric_config = [metric_config]
     for metric_item in metric_config:
         ClassFactory.get_cls(ClassType.METRIC, self.config.type)
         metric_name = metric_item.pop('type')
         metric_class = ClassFactory.get_cls(ClassType.METRIC, metric_name)
         if isfunction(metric_class):
             metric_class = partial(metric_class, **metric_item.get("params", {}))
         else:
             metric_class = metric_class(**metric_item.get("params", {}))
         self.mdict[metric_name] = metric_class
     self.mdict = Config(self.mdict)
Beispiel #14
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     super(MixAuxiliaryLoss, self).__init__()
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     self.loss_fn = ClassFactory.get_cls('trainer.loss', loss_base_name)(**loss_base_cp['params'])
Beispiel #15
0
    def _init_transforms(self):
        """Initialize transforms method.

        :return: a list of object
        :rtype: list
        """
        if "transforms" in self.args.keys():
            transforms = list()
            if not isinstance(self.args.transforms, list):
                self.args.transforms = [self.args.transforms]
            for i in range(len(self.args.transforms)):
                transform_name = self.args.transforms[i].pop("type")
                kwargs = self.args.transforms[i]
                if ClassFactory.is_exists(ClassType.TRANSFORM, transform_name):
                    transforms.append(
                        ClassFactory.get_cls(ClassType.TRANSFORM,
                                             transform_name)(**kwargs))
                else:
                    transforms.append(
                        getattr(
                            importlib.import_module('torchvision.transforms'),
                            transform_name)(**kwargs))
            return transforms
        else:
            return list()
Beispiel #16
0
    def decorator(cls):
        """Provide input param to decorator.

        :param func: wrapper function
        :return: decoratpr
        """
        # TODO: 需要导入包
        if isinstance(class_name, str):
            need_validate_cls = ClassFactory.get_cls(ClassType.CONFIG,
                                                     class_name)
        else:
            need_validate_cls = class_name

        @wraps(cls)
        def wrapper(*args, **kwargs):
            """Make function as a wrapper."""
            valid_attrs = {
                key: item
                for key, item in cls.__dict__.items()
                if not key.startswith('_')
            }
            for attr_name, rules in valid_attrs.items():
                attr_value = getattr(need_validate_cls, attr_name)
                if isinstance(rules, list) or isinstance(rules, tuple):
                    for _rule in rules:
                        _rule(attr_value)
                else:
                    rules(attr_value)

            return cls(*args, **kwargs)

        return wrapper
Beispiel #17
0
 def _create_loss(self):
     """Create loss class."""
     if self.loss is None:
         return
     loss_cls = ClassFactory.get_cls(ClassType.LOSS, self.loss)
     desc = self.desc.get('loss')
     loss_obj = loss_cls(**desc) if desc is not None else loss_cls()
     self.add_loss(loss_obj)
Beispiel #18
0
 def _init_dataloader(self, mode, loader=None):
     """Init dataloader."""
     if loader is not None:
         return loader
     dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
     dataset = dataset_cls(mode=mode)
     dataloader = Adapter(dataset).loader
     return dataloader
Beispiel #19
0
 def __init__(self):
     super(LatencyFilter, self).__init__()
     self.max_latency = self.restrict_config.latency
     if self.max_latency is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         self.dataset = dataset_cls()
         from zeus.datasets import Adapter
         self.dataloader = Adapter(self.dataset).loader
Beispiel #20
0
 def _dispatch_trainer(self, samples):
     for (id, desc, hps) in samples:
         cls_trainer = ClassFactory.get_cls(ClassType.TRAINER)
         TrainerConfig.from_dict(self.user_trainer_config)
         trainer = cls_trainer(id=id, model_desc=desc, hps=hps)
         evaluator = self._get_evaluator(trainer)
         logging.info("submit trainer, id={}".format(id))
         ReportServer.add_watched_var(General.step_name, trainer.worker_id)
         self.master.run(trainer, evaluator)
Beispiel #21
0
 def _add_pretrained_hook(self):
     if self.pretrained_hook is None:
         return
     if isinstance(self.pretrained_hook, str):
         hook = ClassFactory.get_cls(ClassType.PRETRAINED_HOOK,
                                     self.pretrained_hook)
     else:
         hook = self.pretrained_hook
     self._register_load_state_dict_pre_hook(hook)
Beispiel #22
0
 def __init__(self):
     """Initialize."""
     # register pytorch/tensorflow optim as default
     raw_config = self.config.to_json()
     raw_config.type = self.config.type
     map_dict = OptimMappingDict
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict, map_dict.params_mapping_dict).backend_mapping(raw_config)
     self.optim_cls = ClassFactory.get_cls(ClassType.OPTIMIZER, self.map_config.type)
Beispiel #23
0
 def __init__(self):
     """Initialize."""
     # register pytorch loss as default
     raw_config = self.config.to_json()
     raw_config.type = self.config.type
     map_dict = LossMappingDict()
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict, map_dict.params_mapping_dict).backend_mapping(raw_config)
     self._cls = ClassFactory.get_cls(ClassType.LOSS, self.map_config.type)
Beispiel #24
0
 def _get_evaluator(self, worker_id):
     if not PipeStepConfig.evaluator_enable:
         return None
     cls_evaluator = ClassFactory.get_cls('evaluator', "Evaluator")
     evaluator = cls_evaluator({
         "step_name": self.task.step_name,
         "worker_id": worker_id
     })
     return evaluator
Beispiel #25
0
 def value(self):
     """Get values."""
     value = self._values.get(self.key)
     if self.d_type == ClassType.NETWORK:
         if isinstance(value, str):
             cls = ClassFactory.get_cls(ClassType.NETWORK, value)
             value = cls() if self.params is None else cls(**self.params)
         else:
             value = ClassFactory.get_instance(ClassType.NETWORK, value)
     return value
Beispiel #26
0
 def do(self):
     """Do the main task in this pipe step."""
     logger.info("SpNasPipeStep started")
     while not self.generator.is_completed:
         id, spnas_sample = self.generator.search_alg.search()
         cls_trainer = ClassFactory.get_cls('trainer')
         trainer = cls_trainer(spnas_sample=spnas_sample, id=id)
         logging.info("submit trainer(id={})!".format(id))
         self.master.run(trainer)
     self.master.join()
Beispiel #27
0
    def _use_evaluator(self):
        """Check if use evaluator and get the evaluators.

        :return: if we used evaluator, and Evaluator classes
        :rtype: bool, (Evaluator, HostEvaluator, DloopEvaluator)
        """
        use_evaluator = False
        cls_evaluator_set = []
        if EvaluatorConfig.host_evaluator_enable:
            cls_host_evaluator = ClassFactory.get_cls(ClassType.HOST_EVALUATOR, "HostEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_host_evaluator)
        if EvaluatorConfig.device_evaluator_enable:
            cls_device_evaluator = ClassFactory.get_cls(
                ClassType.DEVICE_EVALUATOR, "DeviceEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_device_evaluator)
        # TODO HAVA_D_EVALUATOR
        return use_evaluator, cls_evaluator_set
Beispiel #28
0
def create_module(model):
    """Create search space from model or desc."""
    if isinstance(model, Module):
        return model.__class__.__name__, model
    elif isinstance(model, dict):
        module_type = model.get('type')
        module_param = deepcopy(model)
        module_param.pop('type')
        module = ClassFactory.get_cls(ClassType.SEARCH_SPACE, module_type)
        return module_type, module(**module_param)
Beispiel #29
0
 def _dispatch_trainer(self, samples):
     for (id_ele, desc) in samples:
         hps = deepcopy(desc)
         cls_trainer = ClassFactory.get_cls(ClassType.TRAINER)
         trainer = cls_trainer(id=id_ele, model_desc=desc, hps=hps)
         evaluator = self._get_evaluator(trainer)
         logging.info("submit trainer, id={}".format(id_ele))
         self.master.run(trainer, evaluator)
     if isinstance(samples, list) and len(samples) > 1:
         self.master.join()
Beispiel #30
0
    def _use_evaluator(self):
        """Check if use evaluator and get the evaluators.

        :return: if we used evaluator, and Evaluator classes
        :rtype: bool, (Evaluator, GpuEvaluator, DloopEvaluator)
        """
        use_evaluator = False
        cls_evaluator_set = []
        if EvaluatorConfig.gpu_evaluator_enable:
            cls_gpu_evaluator = ClassFactory.get_cls(ClassType.GPU_EVALUATOR,
                                                     "GpuEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_gpu_evaluator)
        if EvaluatorConfig.davinci_mobile_evaluator_enable:
            cls_davinci_mobile_evaluator = ClassFactory.get_cls(
                ClassType.DAVINCI_MOBILE_EVALUATOR, "DavinciMobileEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_davinci_mobile_evaluator)
        # TODO HAVA_D_EVALUATOR
        return use_evaluator, cls_evaluator_set