Esempio n. 1
0
    def __init__(self, num_classes, backbone='ResNetBackbone', neck='FPN', **kwargs):
        """Create layers.

        :param num_class: number of class
        :type num_class: int
        """
        backbone_cls = ClassFactory.get_instance(ClassType.NETWORK, backbone)
        neck_cls = ClassFactory.get_instance(ClassType.NETWORK, neck, in_channels=backbone_cls.out_channels)
        backbone_neck = Sequential()
        backbone_neck.append(backbone_cls, 'body')
        backbone_neck.append(neck_cls, 'fpn')
        super(FasterRCNN, self).__init__(backbone_neck, num_classes, **kwargs)
Esempio n. 2
0
    def __init__(self,
                 stem,
                 cells,
                 head,
                 init_channels,
                 num_classes,
                 auxiliary,
                 search,
                 aux_size=8,
                 auxiliary_layer=13,
                 drop_path_prob=0):
        """Create layers."""
        super(DartsNetwork, self).__init__()
        self.is_search = search
        self._auxiliary = auxiliary
        self.drop_path_prob = drop_path_prob
        if auxiliary:
            self._aux_size = aux_size
            self._auxiliary_layer = auxiliary_layer
        # Build stems part
        self.pre_stems = ClassFactory.get_instance(ClassType.NETWORK, stem)
        # Build cells part
        c_curr = self.pre_stems.output_channel
        self.cells_ = Cells(cells,
                            c_curr,
                            init_channels,
                            auxiliary=auxiliary,
                            auxiliary_layer=auxiliary_layer)
        # output params
        self.len_alpha = self.cells_.len_alpha
        self.num_ops = self.cells_.num_ops
        self.steps = self.cells_.steps
        c_prev, c_aux = self.cells_.output_channels()
        if not search and auxiliary:
            self.auxiliary_head = AuxiliaryHead(c_aux, num_classes, aux_size)
        # head
        self.head = ClassFactory.get_instance(ClassType.NETWORK,
                                              head,
                                              base_channel=c_prev,
                                              num_classes=num_classes)

        # Initialize architecture parameters
        self.set_parameters(
            'alphas_normal',
            1e-3 * ops.random_normal(self.len_alpha, self.num_ops))
        self.set_parameters(
            'alphas_reduce',
            1e-3 * ops.random_normal(self.len_alpha, self.num_ops))

        self.cell_list = self.cells_.children()
        self.name_list = []
        for tmp_cell in self.cells_.children():
            self.name_list.append(tmp_cell.__class__.__name__)
Esempio n. 3
0
 def value(self):
     """Get values."""
     value = self._values.get(self.key)
     if self.d_type == ClassType.NETWORK:
         if isinstance(value, str):
             cls = ClassFactory.get_cls(ClassType.NETWORK, value)
             value = cls() if self.params is None else cls(**self.params)
         else:
             if self.params:
                 value = ClassFactory.get_instance(ClassType.NETWORK, value,
                                                   **self.params)
             else:
                 value = ClassFactory.get_instance(ClassType.NETWORK, value)
     return value
Esempio n. 4
0
 def from_desc(cls, desc):
     """Create Model from desc."""
     desc = deepcopy(desc)
     module_groups = desc.get('modules', [])
     module_type = desc.get('type', 'Sequential')
     loss = desc.get('loss')
     if '_arch_params' in desc:
         arch_params = desc.pop('_arch_params')
         arch_type = list(arch_params.keys())[0]
         ArchParams._arch_type = arch_type
         ArchParams.update(arch_params.get(arch_type))
     modules = OrderedDict()
     for group_name in module_groups:
         module_desc = deepcopy(desc.get(group_name))
         if not module_desc:
             continue
         if 'modules' in module_desc:
             module = cls.from_desc(module_desc)
         else:
             cls_name = module_desc.get('type')
             if not ClassFactory.is_exists(ClassType.NETWORK, cls_name):
                 raise ValueError("Network {} not exists.".format(cls_name))
             module = ClassFactory.get_instance(ClassType.NETWORK,
                                                module_desc)
         modules[group_name] = module
         module.name = str(group_name)
     if not module_groups and module_type:
         model = ClassFactory.get_instance(ClassType.NETWORK, desc)
     else:
         if ClassFactory.is_exists(SearchSpaceType.CONNECTIONS,
                                   module_type):
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                module_type)
         else:
             connections = ClassFactory.get_cls(SearchSpaceType.CONNECTIONS,
                                                'Sequential')
         model = list(modules.values())[0] if len(
             modules) == 1 else connections(modules)
     if loss:
         model.add_loss(ClassFactory.get_cls(ClassType.LOSS, loss))
     return model
Esempio n. 5
0
 def __init__(self,
              stem,
              cells,
              head,
              init_channels,
              num_classes,
              auxiliary,
              search,
              aux_size=8,
              auxiliary_layer=13,
              drop_path_prob=0):
     """Create layers."""
     super(DartsNetwork, self).__init__()
     self.is_search = search
     self._auxiliary = auxiliary
     self.drop_path_prob = drop_path_prob
     if auxiliary:
         self._aux_size = aux_size
         self._auxiliary_layer = auxiliary_layer
     # Build stems part
     self.pre_stems = ClassFactory.get_instance(ClassType.NETWORK, stem)
     # Build cells part
     c_curr = self.pre_stems.output_channel
     self.cells_ = Cells(cells,
                         c_curr,
                         init_channels,
                         auxiliary=auxiliary,
                         auxiliary_layer=auxiliary_layer)
     # output params
     self.len_alpha = self.cells_.len_alpha
     self.num_ops = self.cells_.num_ops
     self.steps = self.cells_.steps
     c_prev, c_aux = self.cells_.output_channels()
     if not search and auxiliary:
         self.auxiliary_head = AuxiliaryHead(c_aux, num_classes, aux_size)
     # head
     self.head = ClassFactory.get_instance(ClassType.NETWORK,
                                           head,
                                           base_channel=c_prev,
                                           num_classes=num_classes)
     self.build()
Esempio n. 6
0
 def __call__(self, model=None, distributed=False):
     """Call Optimizer class."""
     for config in self.config:
         name = config.get('model')
         sub_model = getattr(model, config.get('model'))
         sub_opt = Optimizer(config)(sub_model, distributed)
         sub_lr = None
         sub_loss = None
         if config.get('lr_scheduler'):
             sub_lr = LrScheduler(
                 config=config.get('lr_scheduler'))(sub_opt)
         if config.get('loss'):
             sub_loss = ClassFactory.get_instance(ClassType.LOSS,
                                                  config.get('loss'))
         self._opts[name] = dict(opt=sub_opt,
                                 lr=sub_lr,
                                 loss=sub_loss,
                                 model=sub_model)
     return self
Esempio n. 7
0
 def _build(self, desc):
     """Build cell."""
     reduction_prev = True if self.C_curr == self.C_prev else False
     for idx, model_name in enumerate(desc.get('modules')):
         params = deepcopy(desc.get(model_name))
         if model_name == 'reduce':
             self.C_curr *= 2
             reduction = True
         else:
             reduction = False
         params['reduction_prev'] = reduction_prev
         params['C_prev_prev'] = self.C_prev_prev
         params['C_prev'] = self.C_prev
         params['C'] = self.C_curr
         reduction_prev = reduction
         model = ClassFactory.get_instance(ClassType.NETWORK, params)
         self.add_module(str(idx), model)
         concat_size = model.concat_size if hasattr(model,
                                                    'concat_size') else 1
         self.C_prev_prev, self.C_prev = self.C_prev, concat_size * self.C_curr
         if self.auxiliary and idx == self.auxiliary_layer:
             self.C_aux = self.C_prev
Esempio n. 8
0
 def from_desc(cls, desc):
     """Create Operator class by desc."""
     return ClassFactory.get_instance(ClassType.NETWORK, desc)