Beispiel #1
0
 def _evaluate_single_model(self, record):
     try:
         worker_info = {
             "step_name": record.step_name,
             "worker_id": record.worker_id
         }
         _record = dict(worker_id=record.worker_id,
                        desc=record.desc,
                        step_name=record.step_name)
         _init_record = ReportRecord().load_dict(_record)
         Report().broadcast(_init_record)
         if EvaluatorConfig.gpu_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(ClassType.GPU_EVALUATOR,
                                                  "GpuEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
         if EvaluatorConfig.davinci_mobile_evaluator_enable:
             cls_evaluator = ClassFactory.get_cls(
                 ClassType.DAVINCI_MOBILE_EVALUATOR,
                 "DavinciMobileEvaluator")
             evaluator = cls_evaluator(worker_info=worker_info,
                                       model_desc=record.desc,
                                       weights_file=record.weights_file)
             self.master.run(evaluator)
     except Exception:
         logger.error(
             "Failed to evaluate model, worker info={}".format(worker_info))
         logger.error(traceback.format_exc())
         return
Beispiel #2
0
 def register(self, regclass, reg_path, reg_id):
     """Register a component class."""
     reg_id = self.get_reg_name(reg_id)
     ClassFactory.register_cls(regclass,
                               type_name=get_reg_type(reg_path),
                               alias=reg_id)
     self.logger.debug('registered: {}'.format(reg_id))
Beispiel #3
0
    def _init_transforms(self):
        """Initialize transforms method.

        :return: a list of object
        :rtype: list
        """
        if "transforms" in self.args.keys():
            transforms = list()
            if not isinstance(self.args.transforms, list):
                self.args.transforms = [self.args.transforms]
            for i in range(len(self.args.transforms)):
                transform_name = self.args.transforms[i].pop("type")
                kwargs = self.args.transforms[i]
                if ClassFactory.is_exists(ClassType.TRANSFORM, transform_name):
                    transforms.append(
                        ClassFactory.get_cls(ClassType.TRANSFORM,
                                             transform_name)(**kwargs))
                else:
                    transforms.append(
                        getattr(
                            importlib.import_module('torchvision.transforms'),
                            transform_name)(**kwargs))
            return transforms
        else:
            return list()
Beispiel #4
0
 def __init__(self, generator, discriminator, latent_dim, gen_bs):
     super(GAN, self).__init__()
     self.generator = ClassFactory.get_cls(
         ClassType.NETWORK, generator.pop('type'))(**generator)
     self.latent_dim = latent_dim
     self.gen_bs = gen_bs
     self.discriminator = ClassFactory.get_cls(
         ClassType.NETWORK, discriminator.pop('type'))(**discriminator)
Beispiel #5
0
 def __new__(cls, *args, **kwargs):
     """Create search algorithm instance by ClassFactory."""
     if cls.__name__ != 'Codec':
         return super().__new__(cls)
     if kwargs.get('type'):
         t_cls = ClassFactory.get_cls(ClassType.CODEC, kwargs.pop('type'))
     else:
         t_cls = ClassFactory.get_cls(ClassType.CODEC)
     return super().__new__(t_cls)
Beispiel #6
0
 def __new__(cls, *args, **kwargs):
     """Create a subclass instance of dataset."""
     if Dataset in cls.__bases__:
         return super().__new__(cls)
     if kwargs.get('type'):
         t_cls = ClassFactory.get_cls(ClassType.DATASET, kwargs.pop('type'))
     else:
         t_cls = ClassFactory.get_cls(ClassType.DATASET)
     return super().__new__(t_cls)
Beispiel #7
0
    def __new__(cls, *args, **kwargs):
        """Create search algorithm instance by ClassFactory."""
        if cls.__name__ != 'SearchAlgorithm':
            return super().__new__(cls)
        if kwargs.get('type'):
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, kwargs.pop('type'))
        else:
            t_cls = ClassFactory.get_cls(ClassType.SEARCH_ALGORITHM, PipeStepConfig.search_algorithm.type)

        return super().__new__(t_cls)
Beispiel #8
0
 def value(self):
     """Get values."""
     value = self._values.get(self.key)
     if self.d_type == ClassType.NETWORK:
         if isinstance(value, str):
             cls = ClassFactory.get_cls(ClassType.NETWORK, value)
             value = cls() if self.params is None else cls(**self.params)
         else:
             value = ClassFactory.get_instance(ClassType.NETWORK, value)
     return value
 def _register_models_from_current_module_scope(module):
     for _name in dir(module):
         if _name.startswith("_"):
             continue
         _cls = getattr(module, _name)
         if isinstance(_cls, ModuleType):
             continue
         if ClassFactory.is_exists(ClassType.SEARCH_SPACE, 'torchvision_' + _cls.__name__):
             continue
         ClassFactory.register_cls(_cls, ClassType.SEARCH_SPACE, alias='torchvision_' + _cls.__name__)
Beispiel #10
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     if ClassFactory.is_exists('trainer.loss', loss_base_name):
         loss_class = ClassFactory.get_cls('trainer.loss', loss_base_name)
     else:
         loss_class = getattr(importlib.import_module('tensorflow.losses'),
                              loss_base_name)
     self.loss_fn = loss_class(**loss_base_cp['params'])
Beispiel #11
0
    def __init__(self,
                 stem,
                 cells,
                 head,
                 init_channels,
                 num_classes,
                 auxiliary,
                 search,
                 aux_size=8,
                 auxiliary_layer=13,
                 drop_path_prob=0):
        """Create layers."""
        super(DartsNetwork, self).__init__()
        self.is_search = search
        self._auxiliary = auxiliary
        self.drop_path_prob = drop_path_prob
        if auxiliary:
            self._aux_size = aux_size
            self._auxiliary_layer = auxiliary_layer
        # Build stems part
        self.pre_stems = ClassFactory.get_instance(ClassType.NETWORK, stem)
        # Build cells part
        c_curr = self.pre_stems.output_channel
        self.cells_ = Cells(cells,
                            c_curr,
                            init_channels,
                            auxiliary=auxiliary,
                            auxiliary_layer=auxiliary_layer)
        # output params
        self.len_alpha = self.cells_.len_alpha
        self.num_ops = self.cells_.num_ops
        self.steps = self.cells_.steps
        c_prev, c_aux = self.cells_.output_channels()
        if not search and auxiliary:
            self.auxiliary_head = AuxiliaryHead(c_aux, num_classes, aux_size)
        # head
        self.head = ClassFactory.get_instance(ClassType.NETWORK,
                                              head,
                                              base_channel=c_prev,
                                              num_classes=num_classes)

        # Initialize architecture parameters
        self.set_parameters(
            'alphas_normal',
            1e-3 * ops.random_normal(self.len_alpha, self.num_ops))
        self.set_parameters(
            'alphas_reduce',
            1e-3 * ops.random_normal(self.len_alpha, self.num_ops))

        self.cell_list = self.cells_.children()
        self.name_list = []
        for tmp_cell in self.cells_.children():
            self.name_list.append(tmp_cell.__class__.__name__)
Beispiel #12
0
 def __init__(self, type):
     if type not in ['restrict', 'target']:
         raise ValueError('Input type must be restriction or target.')
     self.filter_types, self.terminate_types = [], []
     self.filter_compares = dict()
     self.terminate_compares = dict()
     self.filters_to_params = dict()
     self._init_compare_types(type)
     for filter in self.filter_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, filter)
         self.filter_compares[filter] = t_cls()
     for terminate in self.terminate_types:
         t_cls = ClassFactory.get_cls(ClassType.QUOTA, terminate)
         self.terminate_compares[terminate] = t_cls()
     self.filter_rules = copy.deepcopy(General.quota.filter_rules)
Beispiel #13
0
 def _init_dataloader(self, mode, loader=None):
     """Init dataloader."""
     if loader is not None:
         return loader
     if mode == "train" and self.hps is not None and self.hps.get("dataset") is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         dataset = dataset_cls(mode=mode, hps=self.hps.get("dataset"))
     else:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         dataset = dataset_cls(mode=mode)
     if self.distributed and mode == "train":
         dataset.set_distributed(self._world_size, self._rank_id)
     # adapt the dataset to specific backend
     dataloader = Adapter(dataset).loader
     return dataloader
Beispiel #14
0
    def decorator(cls):
        """Provide input param to decorator.

        :param func: wrapper function
        :return: decoratpr
        """
        # TODO: 需要导入包
        if isinstance(class_name, str):
            need_validate_cls = ClassFactory.get_cls(ClassType.CONFIG,
                                                     class_name)
        else:
            need_validate_cls = class_name

        @wraps(cls)
        def wrapper(*args, **kwargs):
            """Make function as a wrapper."""
            valid_attrs = {
                key: item
                for key, item in cls.__dict__.items()
                if not key.startswith('_')
            }
            for attr_name, rules in valid_attrs.items():
                attr_value = getattr(need_validate_cls, attr_name)
                if isinstance(rules, list) or isinstance(rules, tuple):
                    for _rule in rules:
                        _rule(attr_value)
                else:
                    rules(attr_value)

            return cls(*args, **kwargs)

        return wrapper
Beispiel #15
0
    def __init__(self,
                 num_classes,
                 backbone='SerialBackbone',
                 neck='TorchFPN',
                 network_name='torchvision_FasterRCNN',
                 weight_file=None,
                 **kwargs):
        """Create layers.

        :param num_class: number of class
        :type num_class: int
        """
        super(FasterRCNN, self).__init__()
        self.weight_file = weight_file
        backbone_cls = self.define_props('backbone',
                                         backbone,
                                         dtype=ClassType.NETWORK)
        backbone_cls.freeze()
        if getattr(backbone_cls, 'out_channels') and 'in_channels' not in neck:
            neck_in_channel = backbone_cls.out_channels
            params = {"in_channels": neck_in_channel}
            neck_cls = self.define_props('neck',
                                         neck,
                                         dtype=ClassType.NETWORK,
                                         params=params)
        else:
            neck_cls = self.define_props('neck', neck, dtype=ClassType.NETWORK)
        backbone_neck = Sequential(backbone_cls, neck_cls)
        backbone_neck.freeze()
        self.model = ClassFactory.get_cls(ClassType.NETWORK,
                                          network_name)(backbone_neck,
                                                        num_classes, **kwargs)
def get_module_class(cls_name):
    """Get Search Space by class name.

    :param cls_name: class name
    :return: Search Space cls
    """
    return ClassFactory.get_cls(ClassType.SEARCH_SPACE, cls_name)
Beispiel #17
0
 def __new__(cls, *args, **kwargs):
     """Create optimizer or multi-optimizer class."""
     if isinstance(cls.config.to_dict, list):
         t_cls = ClassFactory.get_cls(ClassType.OPTIMIZER,
                                      'MultiOptimizers')
         return super().__new__(t_cls)
     return super().__new__(cls)
Beispiel #18
0
 def __init__(self, metric_cfg=None):
     """Init Metrics."""
     self.mdict = {}
     metric_config = self.config.to_dict() if not metric_cfg else deepcopy(metric_cfg)
     if not isinstance(metric_config, list):
         metric_config = [metric_config]
     for metric_item in metric_config:
         ClassFactory.get_cls(ClassType.METRIC, self.config.type)
         metric_name = metric_item.pop('type')
         metric_class = ClassFactory.get_cls(ClassType.METRIC, metric_name)
         if isfunction(metric_class):
             metric_class = partial(metric_class, **metric_item.get("params", {}))
         else:
             metric_class = metric_class(**metric_item.get("params", {}))
         self.mdict[metric_name] = metric_class
     self.mdict = Config(self.mdict)
Beispiel #19
0
 def __init__(self, aux_weight, loss_base):
     """Init MixAuxiliaryLoss."""
     super(MixAuxiliaryLoss, self).__init__()
     self.aux_weight = aux_weight
     loss_base_cp = loss_base.copy()
     loss_base_name = loss_base_cp.pop('type')
     self.loss_fn = ClassFactory.get_cls('trainer.loss', loss_base_name)(**loss_base_cp['params'])
Beispiel #20
0
 def _init_dataloader(self, mode, loader=None):
     """Init dataloader."""
     if loader is not None:
         return loader
     dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
     dataset = dataset_cls(mode=mode)
     dataloader = Adapter(dataset).loader
     return dataloader
Beispiel #21
0
 def _create_loss(self):
     """Create loss class."""
     if self.loss is None:
         return
     loss_cls = ClassFactory.get_cls(ClassType.LOSS, self.loss)
     desc = self.desc.get('loss')
     loss_obj = loss_cls(**desc) if desc is not None else loss_cls()
     self.add_loss(loss_obj)
Beispiel #22
0
 def __init__(self):
     super(LatencyFilter, self).__init__()
     self.max_latency = self.restrict_config.latency
     if self.max_latency is not None:
         dataset_cls = ClassFactory.get_cls(ClassType.DATASET)
         self.dataset = dataset_cls()
         from zeus.datasets import Adapter
         self.dataloader = Adapter(self.dataset).loader
Beispiel #23
0
 def from_desc(cls, desc):
     """Create Model from desc."""
     desc = deepcopy(desc)
     module_groups = desc.get('modules', [])
     module_type = desc.get('type', 'Sequential')
     loss = desc.get('loss')
     modules = OrderedDict()
     for group_name in module_groups:
         module_desc = deepcopy(desc.get(group_name))
         if 'modules' in module_desc:
             module = cls.from_desc(module_desc)
         else:
             cls_name = module_desc.get('type')
             if not ClassFactory.is_exists(ClassType.NETWORK, cls_name):
                 raise ValueError("Network {} not exists.".format(cls_name))
             module = ClassFactory.get_instance(ClassType.NETWORK,
                                                module_desc)
         modules[group_name] = module
     if not modules and module_type:
         model = ClassFactory.get_instance(ClassType.NETWORK, desc)
     else:
         if ClassFactory.is_exists(SearchSpaceType.CONNECTIONS,
                                   module_type):
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                module_type)
         else:
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                'Sequential')
         model = list(modules.values())[0] if len(
             modules) == 1 else connections(modules)
     if loss:
         model.add_loss(ClassFactory.get_cls(ClassType.LOSS, loss))
     return model
Beispiel #24
0
 def __init__(self):
     """Initialize."""
     # register pytorch/tensorflow optim as default
     raw_config = self.config.to_json()
     raw_config.type = self.config.type
     map_dict = OptimMappingDict
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict, map_dict.params_mapping_dict).backend_mapping(raw_config)
     self.optim_cls = ClassFactory.get_cls(ClassType.OPTIMIZER, self.map_config.type)
Beispiel #25
0
 def _get_evaluator(self, worker_id):
     if not PipeStepConfig.evaluator_enable:
         return None
     cls_evaluator = ClassFactory.get_cls('evaluator', "Evaluator")
     evaluator = cls_evaluator({
         "step_name": self.task.step_name,
         "worker_id": worker_id
     })
     return evaluator
Beispiel #26
0
 def __init__(self):
     """Initialize."""
     # register pytorch loss as default
     raw_config = self.config.to_json()
     raw_config.type = self.config.type
     map_dict = LossMappingDict()
     self.map_config = ConfigBackendMapping(
         map_dict.type_mapping_dict, map_dict.params_mapping_dict).backend_mapping(raw_config)
     self._cls = ClassFactory.get_cls(ClassType.LOSS, self.map_config.type)
Beispiel #27
0
 def _add_pretrained_hook(self):
     if self.pretrained_hook is None:
         return
     if isinstance(self.pretrained_hook, str):
         hook = ClassFactory.get_cls(ClassType.PRETRAINED_HOOK,
                                     self.pretrained_hook)
     else:
         hook = self.pretrained_hook
     self._register_load_state_dict_pre_hook(hook)
Beispiel #28
0
 def _dispatch_trainer(self, samples):
     for (id, desc, hps) in samples:
         cls_trainer = ClassFactory.get_cls(ClassType.TRAINER)
         TrainerConfig.from_dict(self.user_trainer_config)
         trainer = cls_trainer(id=id, model_desc=desc, hps=hps)
         evaluator = self._get_evaluator(trainer)
         logging.info("submit trainer, id={}".format(id))
         ReportServer.add_watched_var(General.step_name, trainer.worker_id)
         self.master.run(trainer, evaluator)
Beispiel #29
0
def create_module(model):
    """Create search space from model or desc."""
    if isinstance(model, Module):
        return model.__class__.__name__, model
    elif isinstance(model, dict):
        module_type = model.get('type')
        module_param = deepcopy(model)
        module_param.pop('type')
        module = ClassFactory.get_cls(ClassType.SEARCH_SPACE, module_type)
        return module_type, module(**module_param)
Beispiel #30
0
 def __init__(self,
              stem,
              cells,
              head,
              init_channels,
              num_classes,
              auxiliary,
              search,
              aux_size=8,
              auxiliary_layer=13,
              drop_path_prob=0):
     """Create layers."""
     super(DartsNetwork, self).__init__()
     self.is_search = search
     self._auxiliary = auxiliary
     self.drop_path_prob = drop_path_prob
     self._cells = cells
     if auxiliary:
         self._aux_size = aux_size
         self._auxiliary_layer = auxiliary_layer
     # Build stems part
     self.pre_stems = ClassFactory.get_instance(ClassType.SEARCH_SPACE,
                                                stem)
     # Build cells part
     c_curr = self.pre_stems.output_channel
     self.cells_ = Cells(cells,
                         c_curr,
                         init_channels,
                         auxiliary=auxiliary,
                         auxiliary_layer=auxiliary_layer)
     # output params
     self.len_alpha = self.cells_.len_alpha
     self.num_ops = self.cells_.num_ops
     self.steps = self.cells_.steps
     c_prev, c_aux = self.cells_.output_channels()
     if not search and auxiliary:
         self.auxiliary_head = AuxiliaryHead(c_aux, num_classes, aux_size)
     # head
     self.head = ClassFactory.get_instance(ClassType.SEARCH_SPACE,
                                           head,
                                           base_channel=c_prev,
                                           num_classes=num_classes)
     self.initializer()