示例#1
0
 def _evaluate_single_model(self, record):
     try:
         worker_info = {
             "step_name": record.step_name,
             "worker_id": record.worker_id
         }
         _record = dict(worker_id=record.worker_id,
                        desc=record.desc,
                        step_name=record.step_name)
         ReportClient().update(**_record)
         if EvaluatorConfig.host_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(ClassType.HOST_EVALUATOR,
                                                  "HostEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
         if EvaluatorConfig.device_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(
                 ClassType.DEVICE_EVALUATOR, "DeviceEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
     except Exception:
         logger.error(
             "Failed to evaluate model, worker info={}".format(worker_info))
         logger.error(traceback.format_exc())
         return
示例#2
0
 def from_module(cls, module):
     """From Model."""
     name = module.__class__.__name__
     if ClassFactory.is_exists(ClassType.NETWORK, name):
         module_cls = ClassFactory.get_cls(ClassType.NETWORK, name)
         if hasattr(module_cls, "from_module"):
             return module_cls.from_module(module)
     return module
示例#3
0
 def __init__(self, generator, discriminator, latent_dim, gen_bs):
     super(GAN, self).__init__()
     self.generator = ClassFactory.get_cls(
         ClassType.NETWORK, generator.pop('type'))(**generator)
     self.latent_dim = latent_dim
     self.gen_bs = gen_bs
     self.discriminator = ClassFactory.get_cls(
         ClassType.NETWORK, discriminator.pop('type'))(**discriminator)
示例#4
0
 def __new__(cls, *args, **kwargs):
     """Create search algorithm instance by ClassFactory."""
     if cls.__name__ != 'Codec':
         return super().__new__(cls)
     if kwargs.get('type'):
         t_cls = ClassFactory.get_cls(ClassType.CODEC, kwargs.pop('type'))
     else:
         t_cls = ClassFactory.get_cls(ClassType.CODEC)
     return super().__new__(t_cls)
示例#5
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     if ClassFactory.is_exists('trainer.loss', loss_base_name):
         loss_class = ClassFactory.get_cls('trainer.loss', loss_base_name)
     else:
         loss_class = getattr(importlib.import_module('tensorflow.losses'), loss_base_name)
     self.loss_fn = loss_class(**loss_base_cp['params'])
示例#6
0
    def __new__(cls, *args, **kwargs):
        """Create search algorithm instance by ClassFactory."""
        if cls.__name__ != 'SearchAlgorithm':
            return super().__new__(cls)
        if kwargs.get('type'):
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, kwargs.pop('type'))
        else:
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, PipeStepConfig.search_algorithm.type)

        return super().__new__(t_cls)
示例#7
0
    def __init__(self, num_classes, backbone='ResNetBackbone', neck='FPN', **kwargs):
        """Create layers.

        :param num_class: number of class
        :type num_class: int
        """
        backbone_cls = ClassFactory.get_instance(ClassType.NETWORK, backbone)
        neck_cls = ClassFactory.get_instance(ClassType.NETWORK, neck, in_channels=backbone_cls.out_channels)
        backbone_neck = Sequential()
        backbone_neck.append(backbone_cls, 'body')
        backbone_neck.append(neck_cls, 'fpn')
        super(FasterRCNN, self).__init__(backbone_neck, num_classes, **kwargs)
示例#8
0
    def __init__(self,
                 stem,
                 cells,
                 head,
                 init_channels,
                 num_classes,
                 auxiliary,
                 search,
                 aux_size=8,
                 auxiliary_layer=13,
                 drop_path_prob=0):
        """Create layers."""
        super(DartsNetwork, self).__init__()
        self.is_search = search
        self._auxiliary = auxiliary
        self.drop_path_prob = drop_path_prob
        if auxiliary:
            self._aux_size = aux_size
            self._auxiliary_layer = auxiliary_layer
        # Build stems part
        self.pre_stems = ClassFactory.get_instance(ClassType.NETWORK, stem)
        # Build cells part
        c_curr = self.pre_stems.output_channel
        self.cells_ = Cells(cells,
                            c_curr,
                            init_channels,
                            auxiliary=auxiliary,
                            auxiliary_layer=auxiliary_layer)
        # output params
        self.len_alpha = self.cells_.len_alpha
        self.num_ops = self.cells_.num_ops
        self.steps = self.cells_.steps
        c_prev, c_aux = self.cells_.output_channels()
        if not search and auxiliary:
            self.auxiliary_head = AuxiliaryHead(c_aux, num_classes, aux_size)
        # head
        self.head = ClassFactory.get_instance(ClassType.NETWORK,
                                              head,
                                              base_channel=c_prev,
                                              num_classes=num_classes)

        # Initialize architecture parameters
        self.set_parameters(
            'alphas_normal',
            1e-3 * ops.random_normal(self.len_alpha, self.num_ops))
        self.set_parameters(
            'alphas_reduce',
            1e-3 * ops.random_normal(self.len_alpha, self.num_ops))

        self.cell_list = self.cells_.children()
        self.name_list = []
        for tmp_cell in self.cells_.children():
            self.name_list.append(tmp_cell.__class__.__name__)
示例#9
0
 def _register_models_from_current_module_scope(module):
     for _name in dir(module):
         if _name.startswith("_"):
             continue
         _cls = getattr(module, _name)
         if isinstance(_cls, ModuleType):
             continue
         if ClassFactory.is_exists(ClassType.NETWORK,
                                   'torchvision_' + _cls.__name__):
             continue
         ClassFactory.register_cls(_cls,
                                   ClassType.NETWORK,
                                   alias='torchvision_' + _cls.__name__)
示例#10
0
 def value(self):
     """Get values."""
     value = self._values.get(self.key)
     if self.d_type == ClassType.NETWORK:
         if isinstance(value, str):
             cls = ClassFactory.get_cls(ClassType.NETWORK, value)
             value = cls() if self.params is None else cls(**self.params)
         else:
             if self.params:
                 value = ClassFactory.get_instance(ClassType.NETWORK, value,
                                                   **self.params)
             else:
                 value = ClassFactory.get_instance(ClassType.NETWORK, value)
     return value
示例#11
0
 def __init__(self, type):
     if type not in ['restrict', 'target']:
         raise ValueError('Input type must be restriction or target.')
     self.filter_types, self.terminate_types = [], []
     self.filter_compares = dict()
     self.terminate_compares = dict()
     self.filters_to_params = dict()
     self._init_compare_types(type)
     for filter in self.filter_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, filter)
         self.filter_compares[filter] = t_cls()
     for terminate in self.terminate_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, terminate)
         self.terminate_compares[terminate] = t_cls()
     self.filter_rules = copy.deepcopy(General.quota.filter_rules)
示例#12
0
def get_module_class(cls_name):
    """Get Search Space by class name.

    :param cls_name: class name
    :return: Search Space cls
    """
    return ClassFactory.get_cls(ClassType.NETWORK, cls_name)
示例#13
0
    def __init__(self, backbone_name, layer_names=None, **kwargs):
        backbone = ClassFactory.get_cls(ClassType.NETWORK, backbone_name)
        backbone = backbone(**kwargs) if kwargs else backbone()
        if hasattr(backbone, "layers_name"):
            layer_names = backbone.layers_name()

        super(BackboneGetter, self).__init__(backbone, layer_names)
示例#14
0
 def __new__(cls, *args, **kwargs):
     """Create optimizer or multi-optimizer class."""
     if isinstance(cls.config.to_dict, list):
         t_cls = ClassFactory.get_cls(ClassType.OPTIMIZER,
                                      'MultiOptimizers')
         return super().__new__(t_cls)
     return super().__new__(cls)
示例#15
0
    def decorator(cls):
        """Provide input param to decorator.

        :param func: wrapper function
        :return: decoratpr
        """
        # TODO: 需要导入包
        if isinstance(class_name, str):
            need_validate_cls = ClassFactory.get_cls(ClassType.CONFIG, class_name)
        else:
            need_validate_cls = class_name

        @wraps(cls)
        def wrapper(*args, **kwargs):
            """Make function as a wrapper."""
            valid_attrs = {key: item for key, item in cls.__dict__.items() if not key.startswith('_')}
            for attr_name, rules in valid_attrs.items():
                attr_value = getattr(need_validate_cls, attr_name)
                if isinstance(rules, list) or isinstance(rules, tuple):
                    for _rule in rules:
                        _rule(attr_value)
                else:
                    rules(attr_value)

            return cls(*args, **kwargs)

        return wrapper
示例#16
0
 def _init_dataloader(self, mode, loader=None):
     """Init dataloader."""
     if loader is not None:
         return loader
     dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
     dataset = dataset_cls(mode=mode)
     dataloader = Adapter(dataset).loader
     return dataloader
示例#17
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     super(MixAuxiliaryLoss, self).__init__()
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     self.loss_fn = ClassFactory.get_cls(
         'trainer.loss', loss_base_name)(**loss_base_cp['params'])
示例#18
0
 def __init__(self):
     super(LatencyFilter, self).__init__()
     self.max_latency = self.restrict_config.latency
     if self.max_latency is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         self.dataset = dataset_cls()
         from vega.datasets import Adapter
         self.dataloader = Adapter(self.dataset).loader
示例#19
0
 def __init__(self, metric_cfg=None):
     """Init Metrics."""
     self.mdict = {}
     metric_config = self.config.to_dict()
     if not isinstance(metric_config, list):
         metric_config = [metric_config]
     for metric_item in metric_config:
         ClassFactory.get_cls(ClassType.METRIC, self.config.type)
         metric_name = metric_item.pop('type')
         metric_class = ClassFactory.get_cls(ClassType.METRIC, metric_name)
         if isfunction(metric_class):
             metric_class = partial(metric_class, **metric_item.get("params", {}))
         else:
             metric_class = metric_class(**metric_item.get("params", {}))
         self.mdict[metric_name] = metric_class
     self.mdict = Config(self.mdict)
     self.metric_results = dict()
示例#20
0
def create_module(model):
    """Create search space from model or desc."""
    if isinstance(model, Module):
        return model.__class__.__name__, model
    elif isinstance(model, dict):
        module_type = model.get('type')
        module_param = deepcopy(model)
        module_param.pop('type')
        module = ClassFactory.get_cls(ClassType.NETWORK, module_type)
        return module_type, module(**module_param)
示例#21
0
文件: loss.py 项目: huawei-noah/vega
 def __init__(self):
     """Initialize."""
     # register pytorch loss as default
     raw_config = self.config.to_dict()
     raw_config.type = self.config.type
     map_dict = LossMappingDict()
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict,
         map_dict.params_mapping_dict).backend_mapping(raw_config)
     self._cls = ClassFactory.get_cls(ClassType.LOSS, self.map_config.type)
示例#22
0
 def _create_loss(self):
     """Create loss class."""
     if self.loss is None:
         return
     if isinstance(self.loss, str):
         loss_cls = ClassFactory.get_cls(ClassType.LOSS, self.loss)
         desc = self.desc.get('loss')
         loss_obj = loss_cls(**desc) if desc is not None else loss_cls()
     else:
         loss_obj = self.loss
     self.add_loss(loss_obj)
示例#23
0
    def _use_evaluator(self):
        """Check if use evaluator and get the evaluators.

        :return: if we used evaluator, and Evaluator classes
        :rtype: bool, (Evaluator, HostEvaluator, DloopEvaluator)
        """
        use_evaluator = False
        cls_evaluator_set = []
        if EvaluatorConfig.host_evaluator_enable:
            cls_host_evaluator = ClassFactory.get_cls(ClassType.HOST_EVALUATOR,
                                                      "HostEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_host_evaluator)
        if EvaluatorConfig.device_evaluator_enable:
            cls_device_evaluator = ClassFactory.get_cls(
                ClassType.DEVICE_EVALUATOR, "DeviceEvaluator")
            use_evaluator = True
            cls_evaluator_set.append(cls_device_evaluator)
        # TODO HAVA_D_EVALUATOR
        return use_evaluator, cls_evaluator_set
示例#24
0
 def __init__(self, optimizer, cfg):
     """Initialize."""
     self.cfg = cfg
     self.optimizer = optimizer
     for item in cfg['modules']:
         sub_optimizer_name = cfg[item]['optimizer']
         tem_lr_scheduler = ClassFactory.get_cls(ClassType.LR_SCHEDULER,
                                                 cfg[item].type)
         sub_optimizer = getattr(optimizer, sub_optimizer_name)
         params = cfg[item].get("params", {})
         setattr(self, item, tem_lr_scheduler(sub_optimizer, **params))
示例#25
0
 def __init__(self,
              stem,
              cells,
              head,
              init_channels,
              num_classes,
              auxiliary,
              search,
              aux_size=8,
              auxiliary_layer=13,
              drop_path_prob=0):
     """Create layers."""
     super(DartsNetwork, self).__init__()
     self.is_search = search
     self._auxiliary = auxiliary
     self.drop_path_prob = drop_path_prob
     if auxiliary:
         self._aux_size = aux_size
         self._auxiliary_layer = auxiliary_layer
     # Build stems part
     self.pre_stems = ClassFactory.get_instance(ClassType.NETWORK, stem)
     # Build cells part
     c_curr = self.pre_stems.output_channel
     self.cells_ = Cells(cells,
                         c_curr,
                         init_channels,
                         auxiliary=auxiliary,
                         auxiliary_layer=auxiliary_layer)
     # output params
     self.len_alpha = self.cells_.len_alpha
     self.num_ops = self.cells_.num_ops
     self.steps = self.cells_.steps
     c_prev, c_aux = self.cells_.output_channels()
     if not search and auxiliary:
         self.auxiliary_head = AuxiliaryHead(c_aux, num_classes, aux_size)
     # head
     self.head = ClassFactory.get_instance(ClassType.NETWORK,
                                           head,
                                           base_channel=c_prev,
                                           num_classes=num_classes)
     self.build()
示例#26
0
 def _init_loss(self):
     """Init loss."""
     if vega.is_torch_backend():
         loss_config = self.criterion.copy()
         loss_name = loss_config.pop('type')
         loss_class = getattr(importlib.import_module('torch.nn'),
                              loss_name)
         return loss_class(**loss_config)
     elif vega.is_tf_backend():
         from inspect import isclass
         loss_config = self.config.tf_criterion.copy()
         loss_name = loss_config.pop('type')
         if ClassFactory.is_exists('trainer.loss', loss_name):
             loss_class = ClassFactory.get_cls('trainer.loss', loss_name)
             if isclass(loss_class):
                 return loss_class(**loss_config)
             else:
                 return partial(loss_class, **loss_config)
         else:
             loss_class = getattr(
                 importlib.import_module('tensorflow.losses'), loss_name)
             return partial(loss_class, **loss_config)
示例#27
0
    def append(self, *args, **kwargs):
        """Append a transform to the end of the list.

        :param *args: positional arguments
        :type *args: tuple
        :param ** kwargs: keyword argumnets
        :type ** kwargs: dict
        """
        if isinstance(args[0], str):
            transform = ClassFactory.get_cls(ClassType.TRANSFORM, args[0])
            self.__transform__.append(transform(**kwargs))
        else:
            self.__transform__.append(args[0])
示例#28
0
 def __init__(self, config=None):
     """Initialize."""
     self.is_multi_opt = False
     if config is not None:
         self.config = Config(config)
     raw_config = self.config.to_dict()
     raw_config.type = self.config.type
     map_dict = OptimMappingDict
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict,
         map_dict.params_mapping_dict).backend_mapping(raw_config)
     self.optim_cls = ClassFactory.get_cls(ClassType.OPTIMIZER,
                                           self.map_config.type)
示例#29
0
 def __init__(self):
     super(FlopsParamsFilter, self).__init__()
     self.flops_range = self.restrict_config.flops
     self.params_range = self.restrict_config.params
     if self.flops_range and not isinstance(self.flops_range, list):
         self.flops_range = [0., self.flops_range]
     if self.params_range and not isinstance(self.params_range, list):
         self.params_range = [0., self.params_range]
     if self.flops_range is not None or self.params_range is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         self.dataset = dataset_cls()
         from vega.datasets import Adapter
         self.dataloader = Adapter(self.dataset).loader
示例#30
0
 def __init__(self, search_space, **kwargs):
     """Init PruneCodec."""
     super(PruneCodec, self).__init__(search_space, **kwargs)
     net_type = self.search_space.backbone.type
     base_depth = self.search_space.backbone.base_depth
     stage = self.search_space.backbone.stage
     net_cls = ClassFactory.get_cls(ClassType.NETWORK, net_type)
     stage_blocks = net_cls._default_blocks[base_depth][:stage]
     self.base_chn, self.base_chn_node = [], []
     channel = self.search_space.backbone.base_channel
     for stage in stage_blocks:
         self.base_chn += [channel] * stage
         self.base_chn_node.append(channel)
         channel *= 2