Ejemplo n.º 1
0
    def __init__(self, args, classes, layers, nodes, channels, keep_prob,
                 drop_path_keep_prob, use_aux_head, steps, arch):
        super(NASNetworkCIFAR, self).__init__()
        self.args = args
        self.classes = classes
        self.layers = layers
        self.nodes = nodes
        self.channels = channels
        self.keep_prob = keep_prob
        self.drop_path_keep_prob = drop_path_keep_prob
        self.use_aux_head = use_aux_head
        self.steps = steps
        try:
            arch = eval(arch)
        except:
            pass
        arch = arch[0] + arch[1]
        self.conv_arch = arch[:4 * self.nodes]
        self.reduc_arch = arch[4 * self.nodes:]

        self.pool_layers = [self.layers, 2 * self.layers + 1]
        self.layers = self.layers * 3
        self.multi_adds = 0

        if self.use_aux_head:
            self.aux_head_index = self.pool_layers[-1]  # + 1
        stem_multiplier = 3
        channels = stem_multiplier * self.channels
        self.stem = nn.Sequential(
            nn.Conv2d(3, channels, 3, padding=1, bias=False),
            nn.BatchNorm2d(channels))
        outs = [[32, 32, channels], [32, 32, channels]]
        self.multi_adds += 3 * 3 * 3 * channels * 32 * 32
        channels = self.channels
        self.cells = nn.ModuleList()
        for i in range(self.layers + 2):
            if i not in self.pool_layers:
                cell = Cell(self.conv_arch, outs, channels, False, i,
                            self.layers + 2, self.steps,
                            self.drop_path_keep_prob)
            else:
                channels *= 2
                cell = Cell(self.reduc_arch, outs, channels, True, i,
                            self.layers + 2, self.steps,
                            self.drop_path_keep_prob)
            self.multi_adds += cell.multi_adds
            self.cells.append(cell)
            outs = [outs[-1], cell.out_shape]

            if self.use_aux_head and i == self.aux_head_index:
                self.auxiliary_head = AuxHeadCIFAR(outs[-1][-1], classes)

        self.global_pooling = nn.AdaptiveAvgPool2d(1)
        self.dropout = nn.Dropout(1 - self.keep_prob)
        self.classifier = nn.Linear(outs[-1][-1], classes)

        self.init_parameters()
Ejemplo n.º 2
0
    def __init__(self, args, classes, layers, nodes, channels, keep_prob,
                 drop_path_keep_prob, use_aux_head, steps, arch):
        super(NASNetworkMNIST, self).__init__()
        self.args = args
        self.classes = classes
        self.layers = layers
        self.nodes = nodes
        self.channels = channels
        self.keep_prob = keep_prob
        self.drop_path_keep_prob = drop_path_keep_prob
        self.use_aux_head = use_aux_head
        self.steps = steps
        if isinstance(arch, str):
            arch = list(map(int, arch.strip().split()))
        elif isinstance(arch, list) and len(arch) == 2:
            arch = arch[0] + arch[1]
        self.conv_arch = arch[:4 * self.nodes]
        self.reduc_arch = arch[4 * self.nodes:]

        self.pool_layers = [self.layers, 2 * self.layers + 1]
        self.layers = self.layers * 3

        if self.use_aux_head:
            self.aux_head_index = self.pool_layers[-1]  #+ 1
        stem_multiplier = 3
        channels = stem_multiplier * self.channels
        self.stem = nn.Sequential(
            nn.Conv2d(1, channels, 3, padding=1, bias=False),
            nn.BatchNorm2d(channels))
        outs = [[28, 28, channels], [28, 28, channels]]
        channels = self.channels
        self.cells = nn.ModuleList()
        for i in range(self.layers + 2):
            if i not in self.pool_layers:
                cell = Cell(self.conv_arch, outs, channels, False, i,
                            self.layers + 2, self.steps,
                            self.drop_path_keep_prob)
            else:
                channels *= 2
                cell = Cell(self.reduc_arch, outs, channels, True, i,
                            self.layers + 2, self.steps,
                            self.drop_path_keep_prob)
            self.cells.append(cell)
            outs = [outs[-1], cell.out_shape]

            if self.use_aux_head and i == self.aux_head_index:
                self.auxiliary_head = AuxHeadCIFAR(outs[-1][-1], classes)

        self.global_pooling = nn.AdaptiveAvgPool2d(1)
        self.dropout = nn.Dropout(1 - self.keep_prob)
        self.classifier = nn.Linear(outs[-1][-1], classes)

        self.init_parameters()
Ejemplo n.º 3
0
    def __init__(self, args, classes, layers, nodes, channels, keep_prob,
                 drop_path_keep_prob, use_aux_head, steps):
        super(NASWSNetworkCIFAR, self).__init__()
        self.args = args
        self.search_space = args.search_space
        self.classes = classes
        self.layers = layers
        self.nodes = nodes
        self.channels = channels
        self.keep_prob = keep_prob
        self.drop_path_keep_prob = drop_path_keep_prob
        self.use_aux_head = use_aux_head
        self.steps = steps

        self.pool_layers = [self.layers, 2 * self.layers + 1]
        self.total_layers = self.layers * 3 + 2

        if self.use_aux_head:
            self.aux_head_index = self.pool_layers[-1]
        stem_multiplier = 3
        channels = stem_multiplier * self.channels
        self.stem = nn.Sequential(
            nn.Conv2d(3, channels, 3, padding=1, bias=False),
            nn.BatchNorm2d(channels))
        outs = [[32, 32, channels], [32, 32, channels]]
        channels = self.channels
        self.cells = nn.ModuleList()
        for i in range(self.total_layers):
            # normal cell
            if i not in self.pool_layers:
                cell = Cell(self.search_space, outs, self.nodes, channels,
                            False, i, self.total_layers, self.steps,
                            self.drop_path_keep_prob)
            # reduction cell
            else:
                channels *= 2
                cell = Cell(self.search_space, outs, self.nodes, channels,
                            True, i, self.total_layers, self.steps,
                            self.drop_path_keep_prob)
            self.cells.append(cell)
            outs = [outs[-1], cell.out_shape]

            if self.use_aux_head and i == self.aux_head_index:
                self.auxiliary_head = AuxHeadCIFAR(outs[-1][-1], classes)

        self.global_pooling = nn.AdaptiveAvgPool2d(1)
        self.dropout = nn.Dropout(1 - self.keep_prob)
        self.classifier = nn.Linear(outs[-1][-1], classes)

        self.init_parameters()
Ejemplo n.º 4
0
    def __init__(self, classes, reduce_distance, num_nodes, channels,
                 keep_prob, drop_path_keep_prob, use_aux_head, steps, arch):
        super(NASNetworkCIFAR, self).__init__()
        self.classes = classes
        self.reduce_distance = reduce_distance
        self.num_nodes = num_nodes
        self.channels = channels
        self.keep_prob = keep_prob
        self.drop_path_keep_prob = drop_path_keep_prob
        self.use_aux_head = use_aux_head
        self.steps = steps

        if isinstance(arch, str):
            arch = list(map(int, arch.strip().split()))
        elif isinstance(arch, list) and len(arch) == 2:
            arch = arch[0] + arch[1]

        self.normal_arch = arch[:4 * self.num_nodes]
        self.reduce_arch = arch[4 * self.num_nodes:]

        # why two reduce cells here ??? why not three???
        # 0-5, 6, 7-12, 13, 14-19, 20 => [6, 13, 20]
        # i found 3 reduce cells is not good, so try back to 2 reduce cells as paper's work
        self.reduce_cells_idx = [
            self.reduce_distance, 2 * self.reduce_distance + 1,
            3 * reduce_distance + 2
        ]
        self.num_cells = self.reduce_distance * 3 + 2  # (6 normal cells * 3 + 2 reduce cells)

        if self.use_aux_head:
            self.aux_head_index = self.reduce_cells_idx[
                -1]  # do auxiliary head after the last pooling cell

        # stem_multiplier = 3 # why need this? why need stem layer? why multiply channels and not split.
        # self.channels = self.channels * stem_multiplier
        self.stem = keras.Sequential([
            layers.Conv2D(filters=self.channels,
                          kernel_size=3,
                          padding='same',
                          use_bias=False),
            # in_channels=3, out_channels=self.channels
            layers.BatchNormalization(axis=-1)
        ])

        out_shapes = [[32, 32, self.channels],
                      [32, 32, self.channels]]  # after stem layer
        self.cells = []
        for i in range(self.num_cells):
            if i in self.reduce_cells_idx:
                self.channels *= 2
                cell = Cell(self.reduce_arch, out_shapes, self.channels, True,
                            i, self.num_cells, self.steps,
                            self.drop_path_keep_prob)
            else:
                cell = Cell(self.normal_arch, out_shapes, self.channels, False,
                            i, self.num_cells, self.steps,
                            self.drop_path_keep_prob)
            self.cells.append(cell)
            out_shapes = [out_shapes[-1], cell.out_shape]

            if self.use_aux_head and i == self.aux_head_index:
                self.auxiliary_head = AuxHeadCIFAR(out_shapes[-1][-1], classes)

        # self.global_pooling = nn.AdaptiveAvgPool2d(1)
        self.global_pooling = layers.AveragePooling2D(
            pool_size=(out_shapes[-1][-3], out_shapes[-1][-2]))
        self.dropout = layers.Dropout(1 - self.keep_prob)
        self.classifier = layers.Dense(
            classes)  # out_shapes[-1][-1], num_channels to classess