def _evaluate_single_model(self, record):
     try:
         worker_info = {
             "step_name": record.step_name,
             "worker_id": record.worker_id
         }
         _record = dict(worker_id=record.worker_id,
                        desc=record.desc,
                        step_name=record.step_name)
         ReportClient().update(**_record)
         if EvaluatorConfig.host_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(ClassType.HOST_EVALUATOR,
                                                  "HostEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
         if EvaluatorConfig.device_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(
                 ClassType.DEVICE_EVALUATOR, "DeviceEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
     except Exception:
         logger.error(
             "Failed to evaluate model, worker info={}".format(worker_info))
         logger.error(traceback.format_exc())
         return
 def __init__(self, generator, discriminator, latent_dim, gen_bs):
     super(GAN, self).__init__()
     self.generator = ClassFactory.get_cls(
         ClassType.NETWORK, generator.pop('type'))(**generator)
     self.latent_dim = latent_dim
     self.gen_bs = gen_bs
     self.discriminator = ClassFactory.get_cls(
         ClassType.NETWORK, discriminator.pop('type'))(**discriminator)
Exemple #3
0
 def __new__(cls, *args, **kwargs):
     """Create search algorithm instance by ClassFactory."""
     if cls.__name__ != 'Codec':
         return super().__new__(cls)
     if kwargs.get('type'):
         t_cls = ClassFactory.get_cls(ClassType.CODEC, kwargs.pop('type'))
     else:
         t_cls = ClassFactory.get_cls(ClassType.CODEC)
     return super().__new__(t_cls)
Exemple #4
0
    def __new__(cls, *args, **kwargs):
        """Create search algorithm instance by ClassFactory."""
        if cls.__name__ != 'SearchAlgorithm':
            return super().__new__(cls)
        if kwargs.get('type'):
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, kwargs.pop('type'))
        else:
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, PipeStepConfig.search_algorithm.type)

        return super().__new__(t_cls)
Exemple #5
0
 def __init__(self, type):
     if type not in ['restrict', 'target']:
         raise ValueError('Input type must be restriction or target.')
     self.filter_types, self.terminate_types = [], []
     self.filter_compares = dict()
     self.terminate_compares = dict()
     self.filters_to_params = dict()
     self._init_compare_types(type)
     for filter in self.filter_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, filter)
         self.filter_compares[filter] = t_cls()
     for terminate in self.terminate_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, terminate)
         self.terminate_compares[terminate] = t_cls()
     self.filter_rules = copy.deepcopy(General.quota.filter_rules)
Exemple #6
0
def get_module_class(cls_name):
    """Get Search Space by class name.

    :param cls_name: class name
    :return: Search Space cls
    """
    return ClassFactory.get_cls(ClassType.NETWORK, cls_name)
Exemple #7
0
 def __new__(cls, *args, **kwargs):
     """Create optimizer or multi-optimizer class."""
     if isinstance(cls.config.to_dict, list):
         t_cls = ClassFactory.get_cls(ClassType.OPTIMIZER,
                                      'MultiOptimizers')
         return super().__new__(t_cls)
     return super().__new__(cls)
Exemple #8
0
    def __init__(self, backbone_name, layer_names=None, **kwargs):
        backbone = ClassFactory.get_cls(ClassType.NETWORK, backbone_name)
        backbone = backbone(**kwargs) if kwargs else backbone()
        if hasattr(backbone, "layers_name"):
            layer_names = backbone.layers_name()

        super(BackboneGetter, self).__init__(backbone, layer_names)
Exemple #9
0
    def decorator(cls):
        """Provide input param to decorator.

        :param func: wrapper function
        :return: decoratpr
        """
        # TODO: 需要导入包
        if isinstance(class_name, str):
            need_validate_cls = ClassFactory.get_cls(ClassType.CONFIG, class_name)
        else:
            need_validate_cls = class_name

        @wraps(cls)
        def wrapper(*args, **kwargs):
            """Make function as a wrapper."""
            valid_attrs = {key: item for key, item in cls.__dict__.items() if not key.startswith('_')}
            for attr_name, rules in valid_attrs.items():
                attr_value = getattr(need_validate_cls, attr_name)
                if isinstance(rules, list) or isinstance(rules, tuple):
                    for _rule in rules:
                        _rule(attr_value)
                else:
                    rules(attr_value)

            return cls(*args, **kwargs)

        return wrapper
Exemple #10
0
 def from_module(cls, module):
     """From Model."""
     name = module.__class__.__name__
     if ClassFactory.is_exists(ClassType.NETWORK, name):
         module_cls = ClassFactory.get_cls(ClassType.NETWORK, name)
         if hasattr(module_cls, "from_module"):
             return module_cls.from_module(module)
     return module
Exemple #11
0
 def _init_dataloader(self, mode, loader=None):
     """Init dataloader."""
     if loader is not None:
         return loader
     dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
     dataset = dataset_cls(mode=mode)
     dataloader = Adapter(dataset).loader
     return dataloader
Exemple #12
0
 def __init__(self, metric_cfg=None):
     """Init Metrics."""
     self.mdict = {}
     metric_config = self.config.to_dict()
     if not isinstance(metric_config, list):
         metric_config = [metric_config]
     for metric_item in metric_config:
         ClassFactory.get_cls(ClassType.METRIC, self.config.type)
         metric_name = metric_item.pop('type')
         metric_class = ClassFactory.get_cls(ClassType.METRIC, metric_name)
         if isfunction(metric_class):
             metric_class = partial(metric_class, **metric_item.get("params", {}))
         else:
             metric_class = metric_class(**metric_item.get("params", {}))
         self.mdict[metric_name] = metric_class
     self.mdict = Config(self.mdict)
     self.metric_results = dict()
Exemple #13
0
 def __init__(self):
     super(LatencyFilter, self).__init__()
     self.max_latency = self.restrict_config.latency
     if self.max_latency is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         self.dataset = dataset_cls()
         from vega.datasets import Adapter
         self.dataloader = Adapter(self.dataset).loader
Exemple #14
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     super(MixAuxiliaryLoss, self).__init__()
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     self.loss_fn = ClassFactory.get_cls(
         'trainer.loss', loss_base_name)(**loss_base_cp['params'])
Exemple #15
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     if ClassFactory.is_exists('trainer.loss', loss_base_name):
         loss_class = ClassFactory.get_cls('trainer.loss', loss_base_name)
     else:
         loss_class = getattr(importlib.import_module('tensorflow.losses'), loss_base_name)
     self.loss_fn = loss_class(**loss_base_cp['params'])
Exemple #16
0
def create_module(model):
    """Create search space from model or desc."""
    if isinstance(model, Module):
        return model.__class__.__name__, model
    elif isinstance(model, dict):
        module_type = model.get('type')
        module_param = deepcopy(model)
        module_param.pop('type')
        module = ClassFactory.get_cls(ClassType.NETWORK, module_type)
        return module_type, module(**module_param)
Exemple #17
0
 def __init__(self):
     """Initialize."""
     # register pytorch loss as default
     raw_config = self.config.to_dict()
     raw_config.type = self.config.type
     map_dict = LossMappingDict()
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict,
         map_dict.params_mapping_dict).backend_mapping(raw_config)
     self._cls = ClassFactory.get_cls(ClassType.LOSS, self.map_config.type)
Exemple #18
0
    def _use_evaluator(self):
        """Check if use evaluator and get the evaluators.

        :return: if we used evaluator, and Evaluator classes
        :rtype: bool, (Evaluator, HostEvaluator, DloopEvaluator)
        """
        use_evaluator = False
        cls_evaluator_set = []
        if EvaluatorConfig.host_evaluator_enable:
            cls_host_evaluator = ClassFactory.get_cls(ClassType.HOST_EVALUATOR,
                                                      "HostEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_host_evaluator)
        if EvaluatorConfig.device_evaluator_enable:
            cls_device_evaluator = ClassFactory.get_cls(
                ClassType.DEVICE_EVALUATOR, "DeviceEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_device_evaluator)
        # TODO HAVA_D_EVALUATOR
        return use_evaluator, cls_evaluator_set
Exemple #19
0
 def __init__(self, optimizer, cfg):
     """Initialize."""
     self.cfg = cfg
     self.optimizer = optimizer
     for item in cfg['modules']:
         sub_optimizer_name = cfg[item]['optimizer']
         tem_lr_scheduler = ClassFactory.get_cls(ClassType.LR_SCHEDULER,
                                                 cfg[item].type)
         sub_optimizer = getattr(optimizer, sub_optimizer_name)
         params = cfg[item].get("params", {})
         setattr(self, item, tem_lr_scheduler(sub_optimizer, **params))
Exemple #20
0
 def _create_loss(self):
     """Create loss class."""
     if self.loss is None:
         return
     if isinstance(self.loss, str):
         loss_cls = ClassFactory.get_cls(ClassType.LOSS, self.loss)
         desc = self.desc.get('loss')
         loss_obj = loss_cls(**desc) if desc is not None else loss_cls()
     else:
         loss_obj = self.loss
     self.add_loss(loss_obj)
Exemple #21
0
 def from_desc(cls, desc):
     """Create Model from desc."""
     desc = deepcopy(desc)
     module_groups = desc.get('modules', [])
     module_type = desc.get('type', 'Sequential')
     loss = desc.get('loss')
     if '_arch_params' in desc:
         arch_params = desc.pop('_arch_params')
         arch_type = list(arch_params.keys())[0]
         ArchParams._arch_type = arch_type
         ArchParams.update(arch_params.get(arch_type))
     modules = OrderedDict()
     for group_name in module_groups:
         module_desc = deepcopy(desc.get(group_name))
         if not module_desc:
             continue
         if 'modules' in module_desc:
             module = cls.from_desc(module_desc)
         else:
             cls_name = module_desc.get('type')
             if not ClassFactory.is_exists(ClassType.NETWORK, cls_name):
                 raise ValueError("Network {} not exists.".format(cls_name))
             module = ClassFactory.get_instance(ClassType.NETWORK,
                                                module_desc)
         modules[group_name] = module
         module.name = str(group_name)
     if not module_groups and module_type:
         model = ClassFactory.get_instance(ClassType.NETWORK, desc)
     else:
         if ClassFactory.is_exists(SearchSpaceType.CONNECTIONS,
                                   module_type):
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                module_type)
         else:
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                'Sequential')
         model = list(modules.values())[0] if len(
             modules) == 1 else connections(modules)
     if loss:
         model.add_loss(ClassFactory.get_cls(ClassType.LOSS, loss))
     return model
Exemple #22
0
 def __init__(self, config=None):
     """Initialize."""
     self.is_multi_opt = False
     if config is not None:
         self.config = Config(config)
     raw_config = self.config.to_dict()
     raw_config.type = self.config.type
     map_dict = OptimMappingDict
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict,
         map_dict.params_mapping_dict).backend_mapping(raw_config)
     self.optim_cls = ClassFactory.get_cls(ClassType.OPTIMIZER,
                                           self.map_config.type)
Exemple #23
0
    def append(self, *args, **kwargs):
        """Append a transform to the end of the list.

        :param *args: positional arguments
        :type *args: tuple
        :param ** kwargs: keyword argumnets
        :type ** kwargs: dict
        """
        if isinstance(args[0], str):
            transform = ClassFactory.get_cls(ClassType.TRANSFORM, args[0])
            self.__transform__.append(transform(**kwargs))
        else:
            self.__transform__.append(args[0])
Exemple #24
0
 def __init__(self):
     super(FlopsParamsFilter, self).__init__()
     self.flops_range = self.restrict_config.flops
     self.params_range = self.restrict_config.params
     if self.flops_range and not isinstance(self.flops_range, list):
         self.flops_range = [0., self.flops_range]
     if self.params_range and not isinstance(self.params_range, list):
         self.params_range = [0., self.params_range]
     if self.flops_range is not None or self.params_range is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         self.dataset = dataset_cls()
         from vega.datasets import Adapter
         self.dataloader = Adapter(self.dataset).loader
Exemple #25
0
 def __init__(self, search_space, **kwargs):
     """Init PruneCodec."""
     super(PruneCodec, self).__init__(search_space, **kwargs)
     net_type = self.search_space.backbone.type
     base_depth = self.search_space.backbone.base_depth
     stage = self.search_space.backbone.stage
     net_cls = ClassFactory.get_cls(ClassType.NETWORK, net_type)
     stage_blocks = net_cls._default_blocks[base_depth][:stage]
     self.base_chn, self.base_chn_node = [], []
     channel = self.search_space.backbone.base_channel
     for stage in stage_blocks:
         self.base_chn += [channel] * stage
         self.base_chn_node.append(channel)
         channel *= 2
Exemple #26
0
 def value(self):
     """Get values."""
     value = self._values.get(self.key)
     if self.d_type == ClassType.NETWORK:
         if isinstance(value, str):
             cls = ClassFactory.get_cls(ClassType.NETWORK, value)
             value = cls() if self.params is None else cls(**self.params)
         else:
             if self.params:
                 value = ClassFactory.get_instance(ClassType.NETWORK, value,
                                                   **self.params)
             else:
                 value = ClassFactory.get_instance(ClassType.NETWORK, value)
     return value
Exemple #27
0
 def is_filtered(self, desc=None):
     """Filter function of latency."""
     if self.max_latency is None:
         return False
     model, count_input = self.get_model_input(desc)
     trainer = ClassFactory.get_cls(ClassType.TRAINER)(model_desc=desc)
     sess_config = trainer._init_session_config() if vega.is_tf_backend(
     ) else None
     latency = calc_forward_latency(model, count_input, sess_config)
     logging.info('Sampled model\'s latency: {}ms'.format(latency))
     if latency > self.max_latency:
         logging.info('The latency is out of range. Skip this network.')
         return True
     else:
         return False
Exemple #28
0
    def insert(self, index, *args, **kwargs):
        """Insert a transform into the list.

        :param index: Insertion position
        :type index: int
        :param *args: positional arguments
        :type *args: tuple
        :param ** kwargs: keyword argumnets
        :type ** kwargs: dict
        """
        if isinstance(args[0], str):
            transform = ClassFactory.get_cls(ClassType.TRANSFORM, args[0])
            self.__transform__.insert(index, transform(**kwargs))
        else:
            self.__transform__.insert(index, args[0])
Exemple #29
0
 def __init__(self,
              optimizer=None,
              warmup_type="linear",
              warmup_iters=0,
              warmup_ratio=0.01,
              after_scheduler_config=None):
     super(WarmupScheduler, self).__init__()
     self.warmup_type = warmup_type
     self.warmup_iter = warmup_iters
     self.warmup_ratio = warmup_ratio
     self.after_scheduler_config = after_scheduler_config
     self.after_scheduler_cls = ClassFactory.get_cls(
         ClassType.LR_SCHEDULER, self.after_scheduler_config.get("type"))
     self.after_scheduler = self.after_scheduler_cls(
         optimizer=None, **(self.after_scheduler_config.get("params")))
Exemple #30
0
    def is_filtered(self, desc=None):
        """Filter function of latency."""
        try:
            if not self.dataloader:
                dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
                self.dataset = dataset_cls()
                from vega.datasets import Adapter
                self.dataloader = Adapter(self.dataset).loader

            model, count_input = self.get_model_input(desc)
            model(count_input)
            return False
        except Exception as e:
            encoding = desc['backbone']['encoding']
            logging.info(f"Invalid encoding: {encoding}, message: {str(e)}")
            return True