예제 #1
0
파일: model.py 프로젝트: JSong-Jia/nni-1
    def __init__(self, key, prev_keys, hidden_units, choose_from_k,
                 cnn_keep_prob, lstm_keep_prob, att_keep_prob, att_mask):
        super(Layer, self).__init__(key)

        def conv_shortcut(kernel_size):
            return ConvBN(kernel_size, hidden_units, hidden_units,
                          cnn_keep_prob, False, True)

        self.n_candidates = len(prev_keys)
        if self.n_candidates:
            self.prec = mutables.InputChoice(
                choose_from=prev_keys[-choose_from_k:], n_chosen=1)
        else:
            # first layer, skip input choice
            self.prec = None
        self.op = mutables.LayerChoice([
            conv_shortcut(1),
            conv_shortcut(3),
            conv_shortcut(5),
            conv_shortcut(7),
            AvgPool(3, False, True),
            MaxPool(3, False, True),
            RNN(hidden_units, lstm_keep_prob),
            Attention(hidden_units, 4, att_keep_prob, att_mask)
        ])
        if self.n_candidates:
            self.skipconnect = mutables.InputChoice(choose_from=prev_keys)
        else:
            self.skipconnect = None
        self.bn = BatchNorm(hidden_units, False, True)
예제 #2
0
파일: model.py 프로젝트: JSong-Jia/nni-1
 def __init__(self, node_id, num_prev_nodes, channels,
              num_downsample_connect):
     super().__init__()
     self.ops = nn.ModuleList()
     choice_keys = []
     for i in range(num_prev_nodes):
         stride = 2 if i < num_downsample_connect else 1
         choice_keys.append("{}_p{}".format(node_id, i))
         self.ops.append(
             mutables.LayerChoice([
                 ops.PoolBN('max', channels, 3, stride, 1, affine=False),
                 ops.PoolBN('avg', channels, 3, stride, 1, affine=False),
                 nn.Identity() if stride == 1 else ops.FactorizedReduce(
                     channels, channels, affine=False),
                 ops.SepConv(channels, channels, 3, stride, 1,
                             affine=False),
                 ops.SepConv(channels, channels, 5, stride, 2,
                             affine=False),
                 ops.DilConv(
                     channels, channels, 3, stride, 2, 2, affine=False),
                 ops.DilConv(
                     channels, channels, 5, stride, 4, 2, affine=False)
             ],
                                  key=choice_keys[-1]))
     self.drop_path = ops.DropPath()
     self.input_switch = mutables.InputChoice(
         choose_from=choice_keys,
         n_chosen=2,
         key="{}_switch".format(node_id))
예제 #3
0
    def __init__(self, key, prev_labels, in_filters, out_filters):
        super().__init__(key)
        self.in_filters = in_filters
        self.out_filters = out_filters
        self.mutable = mutables.LayerChoice([
#             ConvBranch(in_filters, out_filters, kernel_size=3, stride=1, separable=False),
#             ConvBranch(in_filters, out_filters, kernel_size=3, stride=1, separable=True),
            ConvBranch(in_filters, out_filters, kernel_size=5, stride=1, separable=False),
#             ConvBranch(in_filters, out_filters, kernel_size=5, stride=1, separable=True),
#             ConvBranch(in_filters, out_filters, kernel_size=7, stride=1, separable=False),
#             ConvBranch(in_filters, out_filters, kernel_size=7, stride=1, separable=True),
#             ConvBranch(in_filters, out_filters, kernel_size=9, stride=1, separable=False),
            ConvBranch(in_filters, out_filters, kernel_size=41, stride=1, separable=True),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=3, stride=1, separable=False),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=3, stride=1, separable=True),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=5, stride=1, separable=False),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=5, stride=1, separable=True),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=7, stride=1, separable=False),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=7, stride=1, separable=True),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=9, stride=1, separable=False),
#             ResidualConvBranch(in_filters, out_filters, kernel_size=9, stride=1, separable=True),
#             PoolBranch('avg', in_filters, out_filters, kernel_size=3, stride=1),
#             PoolBranch('max', in_filters, out_filters, kernel_size=3, stride=1),
        ])
        if len(prev_labels) > 0:
            self.skipconnect = mutables.InputChoice(choose_from=prev_labels, n_chosen=None)
        else:
            self.skipconnect = None
        self.batch_norm = nn.BatchNorm1d(out_filters, affine=False)
예제 #4
0
 def __init__(self, node_id, num_prev_nodes, channels, num_downsample_connect):
     super().__init__()
     self.ops = nn.ModuleList()
     choice_keys = []
     for i in range(num_prev_nodes):
         stride = 2 if i < num_downsample_connect else 1
         choice_keys.append("{}_p{}".format(node_id, i))
         self.ops.append(mutables.LayerChoice([ops.OPS[k](channels, stride, False) for k in ops.PRIMITIVES],
                                              key=choice_keys[-1]))
     self.drop_path = ops.DropPath()
     self.input_switch = mutables.InputChoice(choose_from=choice_keys, n_chosen=2, key="{}_switch".format(node_id))
예제 #5
0
 def __init__(self, cell_name, prev_labels, channels):
     super().__init__()
     self.input_choice = mutables.InputChoice(choose_from=prev_labels, n_chosen=1, return_mask=True,
                                              key=cell_name + "_input")
     self.op_choice = mutables.LayerChoice([
         SepConvBN(channels, channels, 3, 1),
         SepConvBN(channels, channels, 5, 2),
         Pool("avg", 3, 1, 1),
         Pool("max", 3, 1, 1),
         nn.Identity()
     ], key=cell_name + "_op")
예제 #6
0
 def __init__(self, key, prev_labels, out_shape):
     super().__init__(key)
     self.mutable = mutables.LayerChoice([
         block1(out_shape),
         block2(out_shape),
         block3(out_shape),
         block4(out_shape),
         block5(out_shape),
     ])
     if len(prev_labels) > 0:
         self.skipconnect = mutables.InputChoice(choose_from=prev_labels,
                                                 n_chosen=None)
     else:
         self.skipconnect = None
     self.batch_norm = nn.BatchNorm1d(out_shape)
예제 #7
0
 def __init__(self, node_id, num_prev_nodes, out_shape):
     super().__init__()
     self.ops = nn.ModuleList()
     choice_keys = []
     for i in range(num_prev_nodes):
         choice_keys.append("{}_p{}".format(node_id, i))
         self.ops.append(
             mutables.LayerChoice(OrderedDict([
                 ("block1", block1(out_shape)),
                 ("block2", block2(out_shape)),
                 ("block3", block3(out_shape)),
                 ("block4", block4(out_shape)),
                 ("block5", block5(out_shape))
             ]), key=choice_keys[-1]))
     self.input_switch = mutables.InputChoice(choose_from=choice_keys, n_chosen=2, key="{}_switch".format(node_id))
예제 #8
0
def _setup_forward_callback_submodule(
    deepcv_module: 'deepcv.meta.base_module.DeepcvModule', subm_name: str,
    submodule_params: Dict[str, Any],
    forward_callback_module: 'deepcv.submodule_creators.ForwardCallbackSubmodule'
) -> Tuple[str, Optional[torch.nn.Module]]:
    """ Specfic model definition logic for submodules based on forward pass callbacks (`deepcv.meta.submodule_creators.ForwardCallbackSubmodule` submodule instances are handled sperately allowing output tensor (residual/dense) references and NNI NAS Mutable InputChoice support).
    Allows referencing other submodule(s) output tensor(s) (`yaml_tokens.FROM` usage) and NNI NAS Mutable InputChoice (`yaml_tokens.FROM_NAS_INPUT_CHOICE` usage).
    """
    deepcv_module._uses_forward_callback_submodules = True
    # yaml_tokens.FROM_NAS_INPUT_CHOICE occurences in `submodule_params` are handled like yaml_tokens.FROM entries: nni_mutables.InputChoice(references) + optional parameters 'n_chosen' (None by default, should be an integer between 1 and number of candidates)
    if yaml_tokens.FROM_NAS_INPUT_CHOICE in submodule_params:
        deepcv_module._uses_nni_nas_mutables = True
        n_chosen = submodule_params[
            'n_chosen'] if yaml_tokens.FROM_NAS_INPUT_N_CHOSEN in submodule_params else None
        n_candidates = len(submodule_params[yaml_tokens.FROM_NAS_INPUT_CHOICE])
        mask = submodule_params[
            yaml_tokens.
            NAS_MUTABLE_RETURN_MASK] if yaml_tokens.NAS_MUTABLE_RETURN_MASK in submodule_params else False
        forward_callback_module.mutable_input_choice = nni_mutables.InputChoice(
            n_candidates=n_candidates,
            n_chosen=n_chosen,
            return_mask=mask,
            key=subm_name,
            reduction='none')

        if yaml_tokens.FROM in submodule_params:
            raise ValueError(
                f'Error: Cant both specify "{yaml_tokens.FROM}" and "{yaml_tokens.FROM_NAS_INPUT_CHOICE}" in the same submodule '
                '(You should either choose to use NNI NAS Mutable InputChoice candidate reference(s) or regular tensor reference(s)).'
            )
    elif yaml_tokens.NAS_MUTABLE_RETURN_MASK in submodule_params or yaml_tokens.FROM_NAS_INPUT_N_CHOSEN:
        raise ValueError(
            f'Error: Cannot specify "{yaml_tokens.NAS_MUTABLE_RETURN_MASK}" nor "{yaml_tokens.FROM_NAS_INPUT_N_CHOSEN}" without using "{yaml_tokens.FROM_NAS_INPUT_CHOICE}".'
            f'("{yaml_tokens.NAS_MUTABLE_RETURN_MASK}" and "{yaml_tokens.FROM_NAS_INPUT_N_CHOSEN}" is an optional parameter reserved for NNI NAS InputChoice usage).'
        )

    # Store any sub-module name/label references (used to store referenced submodule's output features during model's forward pass in order to reuse these features later in a forward callback (e.g. for residual links))
    if yaml_tokens.FROM in submodule_params or yaml_tokens.FROM_NAS_INPUT_CHOICE in submodule_params:
        # Allow multiple referenced sub-module(s) (`yaml_tokens.FROM` entry can either be a list/tuple of referenced sub-modules name/label or a single sub-module name/label)
        tensor_references = submodule_params[
            yaml_tokens.
            FROM] if yaml_tokens.FROM in submodule_params else submodule_params[
                yaml_tokens.FROM_NAS_INPUT_CHOICE]
        forward_callback_module.referenced_submodules = [
            tensor_references,
        ] if isinstance(tensor_references, str) else list(tensor_references)
예제 #9
0
파일: darts_cell.py 프로젝트: zctt00/nni
    def __init__(self, node_id, num_prev_nodes, channels,
                 num_downsample_connect):
        """
        builtin Darts Node structure

        Parameters
        ---
        node_id: str
        num_prev_nodes: int
            the number of previous nodes in this cell
        channels: int
            output channels
        num_downsample_connect: int
            downsample the input node if this cell is reduction cell
        """
        super().__init__()
        self.ops = nn.ModuleList()
        choice_keys = []
        for i in range(num_prev_nodes):
            stride = 2 if i < num_downsample_connect else 1
            choice_keys.append("{}_p{}".format(node_id, i))
            self.ops.append(
                mutables.LayerChoice(OrderedDict([
                    ("maxpool",
                     PoolBN('max', channels, 3, stride, 1, affine=False)),
                    ("avgpool",
                     PoolBN('avg', channels, 3, stride, 1, affine=False)),
                    ("skipconnect", nn.Identity() if stride == 1 else
                     FactorizedReduce(channels, channels, affine=False)),
                    ("sepconv3x3",
                     SepConv(channels, channels, 3, stride, 1, affine=False)),
                    ("sepconv5x5",
                     SepConv(channels, channels, 5, stride, 2, affine=False)),
                    ("dilconv3x3",
                     DilConv(channels, channels, 3, stride, 2, 2,
                             affine=False)),
                    ("dilconv5x5",
                     DilConv(channels, channels, 5, stride, 4, 2,
                             affine=False))
                ]),
                                     key=choice_keys[-1]))
        self.drop_path = DropPath()
        self.input_switch = mutables.InputChoice(
            choose_from=choice_keys,
            n_chosen=2,
            key="{}_switch".format(node_id))
예제 #10
0
 def __init__(self, key, prev_labels, in_filters, out_filters):
     super().__init__(key)
     self.in_filters = in_filters
     self.out_filters = out_filters
     self.mutable = mutables.LayerChoice([
         ConvBranch(in_filters, out_filters, 3, 1, 1, separable=False),
         ConvBranch(in_filters, out_filters, 3, 1, 1, separable=True),
         ConvBranch(in_filters, out_filters, 5, 1, 2, separable=False),
         ConvBranch(in_filters, out_filters, 5, 1, 2, separable=True),
         PoolBranch('avg', in_filters, out_filters, 3, 1, 1),
         PoolBranch('max', in_filters, out_filters, 3, 1, 1)
     ])
     if prev_labels > 0:
         self.skipconnect = mutables.InputChoice(choose_from=prev_labels, n_chosen=None)
     else:
         self.skipconnect = None
     self.batch_norm = nn.BatchNorm2d(out_filters, affine=False)
예제 #11
0
    def __init__(self, node_id, num_prev_nodes, channels, num_downsample_connect):
        '''
        Node("{}_n{}".format("reduce" if reduction else "normal", depth),
             depth, channels, 2 if reduction else 0)
        num_prev_nodes: 之前的节点个数
        '''

        super().__init__()
        self.ops = nn.ModuleList()
        choice_keys = []  # 记录 节点+边 组合的名称

        for i in range(num_prev_nodes):  # 枚举之前的节点
            stride = 2 if i < num_downsample_connect else 1
            # 统一设置stride
            # 如果是reduction cell, stride=2,
            # 如果是normal cell, stride=1
            choice_keys.append("{}_p{}".format(node_id, i))

            self.ops.append(
                mutables.LayerChoice(OrderedDict([
                    ("maxpool", ops.PoolBN('max', channels, 3, stride, 1, affine=False)),
                    ("avgpool", ops.PoolBN('avg', channels, 3, stride, 1, affine=False)),
                    ("skipconnect", nn.Identity() if stride == 1 else ops.FactorizedReduce(
                        channels, channels, affine=False)),
                    ("sepconv3x3", ops.SepConv(channels,
                                               channels, 3, stride, 1, affine=False)),
                    ("sepconv5x5", ops.SepConv(channels,
                                               channels, 5, stride, 2, affine=False)),
                    ("dilconv3x3", ops.DilConv(channels,
                                               channels, 3, stride, 2, 2, affine=False)),
                    ("dilconv5x5", ops.DilConv(channels,
                                               channels, 5, stride, 4, 2, affine=False))
                ]), key=choice_keys[-1]))

        self.drop_path = ops.DropPath()  # 以0.2的概率drop path

        self.input_switch = mutables.InputChoice(  # 控制连接方式, 维护choice_key就是为了这个使用
            choose_from=choice_keys, n_chosen=2, key="{}_switch".format(node_id))