Esempio n. 1
0
def create_new_layer(input_shape, n_dim):
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [
        get_conv_class(n_dim),
        get_batch_norm_class(n_dim), StubReLU
    ]
    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)

    elif layer_class == get_conv_class(n_dim):
        new_layer = layer_class(input_shape[-1],
                                input_shape[-1],
                                sample((1, 3, 5), 1)[0],
                                stride=1)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])

    else:
        new_layer = layer_class()

    return new_layer
Esempio n. 2
0
 def __init__(self, n_output_node, input_shape):
     super(MobileNetV2Generator, self).__init__(n_output_node, input_shape)
       
     """ configuration for complete net:
     self.cfg = [(1,  16, 1, 1),
        (6,  24, 2, 1) ,  # NOTE: change stride 2 -> 1 for CIFAR10
        (6,  32, 3, 2),
        (6,  64, 4, 2),
        (6,  96, 3, 1),
        (6, 160, 3, 2),
        (6, 320, 1, 1)]
     """
     
     # we try smaller net configuration (so autokeras will be able to expand the net)
     self.cfg = [(1,  16, 1, 1),
        (6,  24, 2, 1)] # ,  # NOTE: change stride 2 -> 1 for CIFAR10
        #(6,  32, 3, 2) ,
        #(6,  64, 4, 2),
        #(6,  96, 3, 1),
        #(6, 160, 3, 2),
        #(6, 320, 1, 1)]
     
     self.in_planes = 32
     self.block_expansion = 1
     self.n_dim = len(self.input_shape) - 1
     
     if len(self.input_shape) > 4:
         raise ValueError('The input dimension is too high.')
     elif len(self.input_shape) < 2:
         raise ValueError('The input dimension is too low.')
     self.conv = get_conv_class(self.n_dim)
     self.dropout = get_dropout_class(self.n_dim)
     self.global_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.adaptive_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.batch_norm = get_batch_norm_class(self.n_dim)
Esempio n. 3
0
 def __init__(self, n_output_node, input_shape):
     super(CnnGenerator, self).__init__(n_output_node, input_shape)
     self.n_dim = len(self.input_shape) - 1
     if len(self.input_shape) > 4:
         raise ValueError('The input dimension is too high.')
     if len(self.input_shape) < 2:
         raise ValueError('The input dimension is too low.')
     self.conv = get_conv_class(self.n_dim)
     self.dropout = get_dropout_class(self.n_dim)
     self.global_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.pooling = get_pooling_class(self.n_dim)
     self.batch_norm = get_batch_norm_class(self.n_dim)
Esempio n. 4
0
def create_new_layer(layer, n_dim):
    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [get_conv_class(n_dim), get_batch_norm_class(n_dim), StubReLU]
    if is_layer(layer, LayerType.RELU):
        conv_deeper_classes = [get_conv_class(n_dim), get_batch_norm_class(n_dim)]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, LayerType.DROPOUT):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, LayerType.BATCH_NORM):
        conv_deeper_classes = [get_conv_class(n_dim), StubReLU]

    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)

    elif layer_class == get_conv_class(n_dim):
        new_layer = layer_class(input_shape[-1], input_shape[-1], sample((1, 3, 5), 1)[0], stride=1)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])

    else:
        new_layer = layer_class()

    return new_layer
Esempio n. 5
0
 def __init__(self, n_output_node, input_shape):
     super(ResNetGenerator, self).__init__(n_output_node, input_shape)
     # self.layers = [2, 2, 2, 2]
     self.in_planes = 64
     self.block_expansion = 1
     self.n_dim = len(self.input_shape) - 1
     if len(self.input_shape) > 4:
         raise ValueError('The input dimension is too high.')
     elif len(self.input_shape) < 2:
         raise ValueError('The input dimension is too low.')
     self.conv = get_conv_class(self.n_dim)
     self.dropout = get_dropout_class(self.n_dim)
     self.global_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.adaptive_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.batch_norm = get_batch_norm_class(self.n_dim)
Esempio n. 6
0
 def __init__(self, n_output_node, input_shape):
     super().__init__(n_output_node, input_shape)
     # DenseNet Constant
     self.num_init_features = 64
     self.growth_rate = 32
     self.block_config = (6, 12, 24, 16)
     self.bn_size = 4
     self.drop_rate = 0
     # Stub layers
     self.n_dim = len(self.input_shape) - 1
     self.conv = get_conv_class(self.n_dim)
     self.dropout = get_dropout_class(self.n_dim)
     self.global_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.adaptive_avg_pooling = get_global_avg_pooling_class(self.n_dim)
     self.max_pooling = get_pooling_class(self.n_dim)
     self.avg_pooling = get_avg_pooling_class(self.n_dim)
     self.batch_norm = get_batch_norm_class(self.n_dim)
Esempio n. 7
0
    def __init__(self, n_output_node, input_shape):
        """Initialize the instance.

        Args:
            n_output_node: An integer. Number of output nodes in the network.
            input_shape: A tuple. Input shape of the network.
        """
        super(CnnGenerator, self).__init__(n_output_node, input_shape)
        self.n_dim = len(self.input_shape) - 1
        if len(self.input_shape) > 4:
            raise ValueError('The input dimension is too high.')
        if len(self.input_shape) < 2:
            raise ValueError('The input dimension is too low.')
        self.conv = get_conv_class(self.n_dim)
        self.dropout = get_dropout_class(self.n_dim)
        self.global_avg_pooling = get_global_avg_pooling_class(self.n_dim)
        self.pooling = get_pooling_class(self.n_dim)
        self.batch_norm = get_batch_norm_class(self.n_dim)
Esempio n. 8
0
    def __init__(self, n_output_node, input_shape):
        """Initialize the instance.

        Args:
            n_output_node: An integer. Number of output nodes in the network.
            input_shape: A tuple. Input shape of the network.
        """
        super(CnnGenerator, self).__init__(n_output_node, input_shape)
        self.n_dim = len(self.input_shape) - 1
        if len(self.input_shape) > 4:
            raise ValueError('The input dimension is too high.')
        if len(self.input_shape) < 2:
            raise ValueError('The input dimension is too low.')
        self.conv = get_conv_class(self.n_dim)
        self.dropout = get_dropout_class(self.n_dim)
        self.global_avg_pooling = get_global_avg_pooling_class(self.n_dim)
        self.pooling = get_pooling_class(self.n_dim)
        self.batch_norm = get_batch_norm_class(self.n_dim)
def create_new_layer(layer, n_dim):
    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [
        get_conv_class(n_dim),
        get_batch_norm_class(n_dim), StubReLU
    ]
    if is_layer(layer, LayerType.RELU):
        conv_deeper_classes = [
            get_conv_class(n_dim),
            get_batch_norm_class(n_dim)
        ]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, LayerType.DROPOUT):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, LayerType.BATCH_NORM):
        conv_deeper_classes = [get_conv_class(n_dim)]  #, StubReLU]

    new_layers = []
    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])
        new_layers.append(new_layer)

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)
        new_layers.append(new_layer)

    elif layer_class == get_conv_class(n_dim):
        # add conv layer
        # new_layer = layer_class(input_shape[-1],, input_shape[-1], sample((1, 3, 5), 1)[0], stride=1)

        # add mobilenet block
        in_planes = input_shape[-1]
        expansion = sample((1, 6), 1)[0]
        stride = sample((1, 2), 1)[0]
        planes = expansion * in_planes

        new_layer = layer_class(in_planes, planes, 1, stride=1, padding=0)
        new_layers.append(new_layer)

        new_layer = get_batch_norm_class(n_dim)(planes)
        new_layers.append(new_layer)

        new_layer = StubReLU()
        new_layers.append(new_layer)

        new_layer = layer_class(planes,
                                planes,
                                3,
                                stride=stride,
                                padding=1,
                                groups=planes)
        new_layers.append(new_layer)

        new_layer = get_batch_norm_class(n_dim)(planes)
        new_layers.append(new_layer)

        new_layer = StubReLU()
        new_layers.append(new_layer)

        new_layer = layer_class(planes, in_planes, 1, stride=1, padding=0)
        new_layers.append(new_layer)

        new_layer = get_batch_norm_class(n_dim)(in_planes)
        new_layers.append(new_layer)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])
        new_layers.append(new_layer)

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])
        new_layers.append(new_layer)

    else:
        new_layer = layer_class()
        new_layers.append(new_layer)

    return new_layers