Beispiel #1
0
    def __init__(self, n_nodes, c0, c1, node_c, reduction, reduction_prev):
        '''
        n_nodes: How many nodes in a cell.
        c0, c1: in_channels for two inputs.
        node_c: node out channels.
        reduction: if True, this is a reduction layer, otherwise a normal layer.
        reduction_prev: if True, the former layer is a reduction layer.
        '''
        super().__init__()
        self.reduction = reduction

        if reduction_prev:
            self.preprocess0 = FactorizedReduce(c0, node_c, affine=False)
        else:
            self.preprocess0 = ReLUConvBN(c0, node_c, 1, 1, 0, affine=False)
        self.preprocess1 = ReLUConvBN(c1, node_c, 1, 1, 0, affine=False)

        self.n_nodes = n_nodes
        self.node_c = node_c

        self._ops = LayerList()
        for i in range(self.n_nodes):
            for j in range(2 + i):
                stride = 2 if reduction and j < 2 else 1
                op = MixedOp(node_c, stride)
                self._ops.append(op)
        return
Beispiel #2
0
class SearchedNet(Layer):
    def __init__(self,
                 gene,
                 in_channels,
                 init_node_c,
                 out_channels,
                 depth,
                 n_nodes,
                 drop_rate=0):
        '''
        gene: Genotype, searched architecture of a cell
        in_channels: RGB channel.
        init_node_c: Initial number of filters or output channels for the node.
        out_channels: How many classes are there in the target.
        depth: Number of cells.
        n_nodes: Number of nodes in each cell.
        drop_rate: dropout rate.
        '''
        super().__init__()
        stem_c = min(in_channels, n_nodes) * init_node_c  # stem out_channels
        self.stem = Sequential(
            Conv2D(in_channels,
                   stem_c,
                   3,
                   padding=1,
                   param_attr=ParamAttr(initializer=MSRAInitializer()),
                   bias_attr=False), BatchNorm(stem_c))
        c0 = c1 = stem_c
        node_c = init_node_c  # node out_channels
        self.cells = LayerList()
        reduction_prev = False
        reduce_layers = [depth // 3, 2 * depth // 3]
        for i in range(depth):
            if i in reduce_layers:
                node_c *= 2
                reduction = True
            else:
                reduction = False
            cell = SearchedCell(gene, n_nodes, c0, c1, node_c, reduction,
                                reduction_prev, drop_rate)
            reduction_prev = reduction
            self.cells.append(cell)
            c0, c1 = c1, cell.out_channels

        self.global_pooling = Pool2D(pool_type='avg', global_pooling=True)
        self.classifier = Linear(
            input_dim=c1,
            output_dim=out_channels,
            param_attr=ParamAttr(initializer=MSRAInitializer()),
            bias_attr=ParamAttr(initializer=MSRAInitializer()))

    def forward(self, x):
        x0 = x1 = self.stem(x)
        for i, cell in enumerate(self.cells):
            x0, x1 = x1, cell(x0, x1)
        out = self.global_pooling(x1)
        out = fluid.layers.squeeze(out, axes=[-1, -2])
        y = self.classifier(out)
        return y
Beispiel #3
0
class KernelNet(Layer):
    def __init__(self, in_channels, init_node_c, out_channels, depth, n_nodes):
        '''
        in_channels: RGB channel.
        init_node_c: Initial number of filters or output channels for the node.
        out_channels: How many classes are there in the target.
        depth: Number of cells.
        n_nodes: Number of nodes in each cell.
        '''
        super().__init__()
        stem_c = min(in_channels, n_nodes) * init_node_c # stem out_channels
        self.stem = Sequential(
          Conv2D(in_channels, stem_c, 3, padding=1, 
                 param_attr=ParamAttr(initializer=MSRAInitializer()), 
                 bias_attr=False),
          BatchNorm(stem_c)
        )
        c0 = c1 = stem_c
        node_c = init_node_c # node out_channels
        self.cells = LayerList()
        reduction_prev = False
        reduce_layers = [depth//3, 2*depth//3]
        for i in range(depth):
            if i in reduce_layers:
                node_c *= 2
                reduction = True
            else:
                reduction = False
            cell = Cell(n_nodes, c0, c1, node_c, reduction, reduction_prev)
            reduction_prev = reduction
            self.cells.append(cell)
            c0, c1 = c1, cell.out_channels

        self.global_pooling = Pool2D(pool_type='avg', global_pooling=True)
        self.classifier = Linear(input_dim=c1,
                                 output_dim=out_channels,
                                 param_attr=ParamAttr(initializer=MSRAInitializer()),
                                 bias_attr=ParamAttr(initializer=MSRAInitializer()))

    def forward(self, x, alphas_normal, alphas_reduce):
#         pdb.set_trace()
        x0 = x1 = self.stem(x)
        for i, cell in enumerate(self.cells):
            if cell.reduction:
                alphas = fluid.layers.softmax(alphas_reduce)
            else:
                alphas = fluid.layers.softmax(alphas_normal)
            x0, x1 = x1, cell(x0, x1, alphas)
        out = self.global_pooling(x1)
        out = fluid.layers.squeeze(out, axes=[-1,-2])
        y = self.classifier(out)
        return y
Beispiel #4
0
 def __init__(self, channels, stride):
     '''
     channels: in_channels == out_channels for MixedOp
     '''
     super().__init__()
     self._ops = LayerList()
     for prim_op in PRIMITIVES:
         op = OPS[prim_op](channels, stride, False)
         if 'pool' in prim_op:
             gama, beta = bn_param_config()
             bn = BatchNorm(channels, param_attr=gama, bias_attr=beta)
             op = Sequential(op, bn)
         self._ops.append(op)
Beispiel #5
0
class Cell(Layer):
    def __init__(self, n_nodes, c0, c1, node_c, reduction, reduction_prev):
        '''
        n_nodes: How many nodes in a cell.
        c0, c1: in_channels for two inputs.
        node_c: node out channels.
        reduction: if True, this is a reduction layer, otherwise a normal layer.
        reduction_prev: if True, the former layer is a reduction layer.
        '''
        super().__init__()
        self.reduction = reduction

        if reduction_prev:
            self.preprocess0 = FactorizedReduce(c0, node_c, affine=False)
        else:
            self.preprocess0 = ReLUConvBN(c0, node_c, 1, 1, 0, affine=False)
        self.preprocess1 = ReLUConvBN(c1, node_c, 1, 1, 0, affine=False)

        self.n_nodes = n_nodes
        self.node_c = node_c

        self._ops = LayerList()
        for i in range(self.n_nodes):
            for j in range(2 + i):
                stride = 2 if reduction and j < 2 else 1
                op = MixedOp(node_c, stride)
                self._ops.append(op)
        return

    @property
    def out_channels(self):
        return self.n_nodes * self.node_c

    def forward(self, x0, x1, alphas):
        '''
        x0, x1: Inputs to a cell
        alphas: alpha_reduce or alpha_normal
        '''
        #         pdb.set_trace()
        x0 = self.preprocess0(x0)
        x1 = self.preprocess1(x1)
        xs = [x0, x1]
        i = 0
        for node in range(self.n_nodes):
            outputs = []
            for x in xs:
                outputs.append(self._ops[i](x, alphas[i]))
                i += 1
            xs.append(sum(outputs))
        return fluid.layers.concat(xs[-self.n_nodes:], axis=1)
Beispiel #6
0
class MixedOp(Layer):
    def __init__(self, channels, stride):
        '''
        channels: in_channels == out_channels for MixedOp
        '''
        super().__init__()
        self._ops = LayerList()
        for prim_op in PRIMITIVES:
            op = OPS[prim_op](channels, stride, False)
            if 'pool' in prim_op:
                gama, beta = bn_param_config()
                bn = BatchNorm(channels, param_attr=gama, bias_attr=beta)
                op = Sequential(op, bn)
            self._ops.append(op)

    def forward(self, x, alphas):
        '''
        alphas: one row of alpha_reduce or alpha_normal
        '''
        return sum(alphas[i] * op(x) for i, op in enumerate(self._ops))
Beispiel #7
0
    def __init__(self, in_channels, init_node_c, out_channels, depth, n_nodes):
        '''
        in_channels: RGB channel.
        init_node_c: Initial number of filters or output channels for the node.
        out_channels: How many classes are there in the target.
        depth: Number of cells.
        n_nodes: Number of nodes in each cell.
        '''
        super().__init__()
        stem_c = min(in_channels, n_nodes) * init_node_c # stem out_channels
        self.stem = Sequential(
          Conv2D(in_channels, stem_c, 3, padding=1, 
                 param_attr=ParamAttr(initializer=MSRAInitializer()), 
                 bias_attr=False),
          BatchNorm(stem_c)
        )
        c0 = c1 = stem_c
        node_c = init_node_c # node out_channels
        self.cells = LayerList()
        reduction_prev = False
        reduce_layers = [depth//3, 2*depth//3]
        for i in range(depth):
            if i in reduce_layers:
                node_c *= 2
                reduction = True
            else:
                reduction = False
            cell = Cell(n_nodes, c0, c1, node_c, reduction, reduction_prev)
            reduction_prev = reduction
            self.cells.append(cell)
            c0, c1 = c1, cell.out_channels

        self.global_pooling = Pool2D(pool_type='avg', global_pooling=True)
        self.classifier = Linear(input_dim=c1,
                                 output_dim=out_channels,
                                 param_attr=ParamAttr(initializer=MSRAInitializer()),
                                 bias_attr=ParamAttr(initializer=MSRAInitializer()))
Beispiel #8
0
    def __init__(self,
                 gene,
                 n_nodes,
                 c0,
                 c1,
                 node_c,
                 reduction,
                 reduction_prev,
                 drop_rate=0):
        '''
        gene: Genotype, searched architecture of a cell
        n_nodes: How many nodes in a cell.
        c0, c1: in_channels for two inputs.
        node_c: node out channels.
        reduction: if True, this is a reduction layer, otherwise a normal layer.
        reduction_prev: if True, the former layer is a reduction layer.
        drop_rate: dropout rate.
        '''
        super().__init__()
        self.n_nodes = n_nodes
        self.node_c = node_c
        self.drop_rate = drop_rate
        self.genolist = gene.reduce if reduction else gene.normal

        if reduction_prev:
            self.preprocess0 = FactorizedReduce(c0, node_c)
        else:
            self.preprocess0 = ReLUConvBN(c0, node_c, 1, 1, 0)
        self.preprocess1 = ReLUConvBN(c1, node_c, 1, 1, 0)

        self._ops = LayerList([
            OPS[i[0]](C=node_c,
                      stride=2 if reduction and i[1] < 2 else 1,
                      affine=True) for i in self.genolist
        ])

        return