Beispiel #1
0
 def __init__(self, in_channels, out_channels):
     super(ResidualBlockB, self).__init__()
     with self.init_scope():
         self.res_branch1 = Convolution2D(in_channels,
                                          out_channels,
                                          ksize=1,
                                          stride=2,
                                          initialW=initial)
         # initialW=HeNormal())
         self.bn_branch1 = BatchNormalization(out_channels)
         self.res_branch2a = Convolution2D(in_channels,
                                           out_channels,
                                           ksize=3,
                                           stride=2,
                                           pad=1,
                                           initialW=initial)
         # initialW=HeNormal())
         self.bn_branch2a = BatchNormalization(out_channels)
         self.res_branch2b = Convolution2D(out_channels,
                                           out_channels,
                                           ksize=3,
                                           pad=1,
                                           initialW=initial)
         # initialW=HeNormal())
         self.bn_branch2b = BatchNormalization(out_channels)
Beispiel #2
0
    def __init__(self, in_channels, out_channels, ksize,
                 stride=1, pad=0, dilate=1, nobias=False,
                 dw_initialW=None, pw_initialW=None,
                 dw_initial_bias=None, pw_initial_bias=None,
                 dw_activ=identity, pw_activ=relu, bn_kwargs={}):

        self.dw_activ = identity if dw_activ is None else dw_activ
        self.pw_activ = identity if pw_activ is None else pw_activ
        super(SeparableConv2DBNActiv, self).__init__()

        with self.init_scope():
            self.depthwise = Convolution2D(
                in_channels, in_channels, ksize=ksize, stride=stride,
                pad=pad, dilate=dilate, groups=in_channels,
                nobias=nobias, initialW=dw_initialW)
            self.pointwise = Convolution2D(
                in_channels, out_channels, 1,
                nobias=nobias, initialW=pw_initialW)

            if 'comm' in bn_kwargs:
                self.dw_bn = MultiNodeBatchNormalization(
                    out_channels, **bn_kwargs)
                self.pw_bn = MultiNodeBatchNormalization(
                    out_channels, **bn_kwargs)
            else:
                self.dw_bn = BatchNormalization(in_channels, **bn_kwargs)
                self.pw_bn = BatchNormalization(out_channels, **bn_kwargs)
Beispiel #3
0
 def __init__(self, channels, ksize):
     super().__init__()
     self.ksize = ksize
     with self.init_scope():
         self.c1 = Convolution2D(channels, channels, ksize)
         self.b1 = BatchNormalization(channels)
         self.a1 = LeakyReluLink()
         self.c2 = Convolution2D(channels, channels, ksize)
         self.b2 = BatchNormalization(channels)
         self.a2 = LeakyReluLink()
 def __init__(self, k, layer_num, f0, growth=4, dropout_ratio=0.5):
     super().__init__()
     with self.init_scope():
         self.bn1 = BatchNormalization(size=(f0 + (layer_num - 1) * growth))
         self.bn2 = BatchNormalization(size=4 * growth)
         self.conv1 = Convolution2D(in_channels=f0 +
                                    (layer_num - 1) * growth,
                                    out_channels=4 * growth,
                                    ksize=1)
         self.conv2 = MaskedConv2D(in_channels=4 * growth,
                                   out_channels=growth,
                                   ksize=k,
                                   pad=k // 2)
     self.dropout_ratio = dropout_ratio
    def __init__(self,
                 in_channels,
                 channel_multiplier,
                 ksize=None,
                 stride=1,
                 pad=0,
                 nobias=True,
                 initialW=None,
                 initial_bias=None,
                 activ=relu,
                 bn_kwargs={}):
        self.activ = activ
        #print(bn_kwargs)
        super(DepthwiseConv2DBNActiv, self).__init__()

        with self.init_scope():
            self.conv = L.DepthwiseConvolution2D(in_channels,
                                                 channel_multiplier, ksize,
                                                 stride, pad, nobias, initialW,
                                                 initial_bias)
            if 'comm' in bn_kwargs:
                self.bn = MultiNodeBatchNormalization(
                    int(in_channels * channel_multiplier), **bn_kwargs)
            else:
                self.bn = BatchNormalization(
                    int(in_channels * channel_multiplier), **bn_kwargs)
Beispiel #6
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize=None,
                 stride=1,
                 pad=0,
                 dilate=1,
                 groups=1,
                 nobias=True,
                 initialW=None,
                 initial_bias=None,
                 activ=relu,
                 bn_kwargs={}):
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.activ = activ
        super(Conv2DBNActiv, self).__init__()
        with self.init_scope():
            self.conv = Convolution2D(in_channels,
                                      out_channels,
                                      ksize,
                                      stride,
                                      pad,
                                      nobias,
                                      initialW,
                                      initial_bias,
                                      dilate=dilate,
                                      groups=groups)
            if 'comm' in bn_kwargs:
                self.bn = MultiNodeBatchNormalization(out_channels,
                                                      **bn_kwargs)
            else:
                self.bn = BatchNormalization(out_channels, **bn_kwargs)
Beispiel #7
0
 def __init__(self, in_ch, wscale=0.02, getIntermFeat=True, conv = Convolution2D):
     self.getIntermFeat = getIntermFeat
     w= chainer.initializers.Normal(wscale)
     super(Discriminator, self).__init__()
     kw = 4
     pad = int(np.ceil((kw - 1.0) / 2))
     with self.init_scope():
         self.c1 = conv(in_ch, 64, 4, 2, pad, initialW=w)
         self.c2 = conv(64, 128, 4, 2, pad, initialW=w)
         self.c3 = conv(128, 256, 4, 2, pad, initialW=w)
         self.c4 = conv(256, 512, 4, 1, pad, initialW=w)
         self.c5 = conv(512, 1, 4, 1, pad, initialW=w)
         self.bn0 = BatchNormalization(64)
         self.bn1 = BatchNormalization(128)
         self.bn2 = BatchNormalization(256)
         self.bn3 = BatchNormalization(512)
Beispiel #8
0
 def __init__(self, in_channels, out_channels):
     super(ConvolutionBlock, self).__init__()
     with self.init_scope():
         self.conv = Convolution2D(in_channels, out_channels,
                                   ksize=7, stride=2, pad=3,
                                   initialW=HeNormal())
         self.bn_conv = BatchNormalization(out_channels)
Beispiel #9
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize=None,
                 stride=1,
                 pad=0,
                 scale=1,
                 dilate=1,
                 groups=1,
                 nobias=True,
                 initialW=None,
                 initial_bias=None,
                 activ=relu,
                 bn_kwargs={},
                 aa_kwargs={}):
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.activ = activ
        super(Conv2DBNActiv, self).__init__()
        with self.init_scope():
            if len(aa_kwargs) > 0:
                self.conv = AugmentedConv(in_channels,
                                          out_channels,
                                          ksize,
                                          int(out_channels * aa_kwargs['k']),
                                          int(out_channels * aa_kwargs['v']),
                                          aa_kwargs['Nh'],
                                          aa_kwargs['relative'],
                                          initialW=initialW)
            elif scale > 1:
                self.conv = Res2NetConv(in_channels,
                                        out_channels,
                                        ksize,
                                        stride,
                                        pad,
                                        scale,
                                        nobias,
                                        initialW,
                                        initial_bias,
                                        dilate=dilate,
                                        groups=groups)
            else:
                self.conv = Convolution2D(in_channels,
                                          out_channels,
                                          ksize,
                                          stride,
                                          pad,
                                          nobias,
                                          initialW,
                                          initial_bias,
                                          dilate=dilate,
                                          groups=groups)
            if 'comm' in bn_kwargs:
                self.bn = MultiNodeBatchNormalization(out_channels,
                                                      **bn_kwargs)
            else:
                self.bn = BatchNormalization(out_channels, **bn_kwargs)
Beispiel #10
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize=None,
                 stride=1,
                 pad=0,
                 dilate=1,
                 groups=1,
                 nobias=True,
                 initialW=None,
                 initial_bias=None,
                 activ=relu,
                 use_bn=True,
                 bn_kwargs={}):
        super().__init__()

        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.in_channels = in_channels
        self.out_channels = out_channels
        self.ksize = ksize
        self.stride = stride
        self.pad = pad
        self.dilate = dilate
        self.groups = groups
        self.nobias = nobias
        self.initialW = initialW
        self.initial_bias = initial_bias
        self.use_bn = use_bn
        self.bn_kwargs = bn_kwargs

        self.activ = activ
        with self.init_scope():
            self.conv = Convolution2D(in_channels,
                                      out_channels,
                                      ksize=ksize,
                                      stride=stride,
                                      pad=pad,
                                      nobias=nobias,
                                      initialW=initialW,
                                      initial_bias=initial_bias,
                                      dilate=dilate,
                                      groups=groups)

            # TODO: allow passing customized BN
            if use_bn:
                if 'comm' in bn_kwargs:
                    self.bn = MultiNodeBatchNormalization(
                        out_channels, **bn_kwargs)
                else:
                    self.bn = BatchNormalization(out_channels, **bn_kwargs)
            else:
                self.bn = None
Beispiel #11
0
    def __init__(self, n_joints):
        super(VGG16_conv3_3, self).__init__()

        with self.init_scope():
            self.conv1_1 = Convolution2D(3, 64, 3, 1, 1)
            self.conv1_2 = Convolution2D(64, 64, 3, 1, 1)
            self.conv2_1 = Convolution2D(64, 128, 3, 1, 1)
            self.conv2_2 = Convolution2D(128, 128, 3, 1, 1)
            self.conv3_1 = Convolution2D(128, 256, 3, 1, 1)
            self.conv3_2 = Convolution2D(256, 256, 3, 1, 1)
            self.conv3_3 = Convolution2D(256, 256, 3, 1, 1)
            self.conv4_1 = Convolution2D(256, 512, 3, 1, 1)
            self.conv4_2 = Convolution2D(512, 512, 3, 1, 1)
            self.conv4_3 = Convolution2D(512, 512, 3, 1, 1)
            self.conv5_1 = Convolution2D(512, 512, 3, 1, 1)
            self.conv5_2 = Convolution2D(512, 512, 3, 1, 1)
            self.conv5_3 = Convolution2D(512, 512, 3, 1, 1)
            self.bn1 = BatchNormalization(512)
            self.fc6 = Linear(None, 4096)
            self.bn2 = BatchNormalization(4096)
            self.fc7 = Linear(4096, 4096)
            self.bn3 = BatchNormalization(4096)
            self.fc8 = Linear(4096, n_joints * 2)
Beispiel #12
0
 def __init__(self, in_ch=3, out_ch=3):
     super(Encoder, self).__init__()
     with self.init_scope():
         self.flat1 = Convolution2D(in_ch, 32, 7, 1, 3)
         self.flat1_bn = BatchNormalization(32)
         self.down1 = Convolution2D(32, 64, 3, 2, 1)
         self.down1_bn = BatchNormalization(64)
         self.down2 = Convolution2D(64, 128, 3, 2, 1)
         self.down2_bn = BatchNormalization(128)
         self.down3 = Convolution2D(128, 256, 3, 2, 1)
         self.down3_bn = BatchNormalization(256)
         self.down4 = Convolution2D(256, 512, 3, 2, 1)
         self.down4_bn = BatchNormalization(512)
         self.up1 = Deconvolution2D(512, 256, 4, 2, 1)
         self.up1_bn = BatchNormalization(256)
         self.up2 = Deconvolution2D(256, 128, 4, 2, 1)
         self.up2_bn = BatchNormalization(128)
         self.up3 = Deconvolution2D(128, 64, 4, 2, 1)
         self.up3_bn = BatchNormalization(64)
         self.up4 = Deconvolution2D(64, 32, 4, 2, 1)
         self.up4_bn = BatchNormalization(32)
         self.flat2 = Deconvolution2D(32, out_ch, 7, 1, 3)
     self.out_ch = out_ch
Beispiel #13
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize=None,
                 stride=1,
                 pad=0,
                 nobias=True,
                 initialW=None,
                 initial_bias=None,
                 activ=relu,
                 bn_kwargs=dict()):
        if ksize is None:
            out_channels, ksize, in_channels = in_channels, out_channels, None

        self.activ = activ
        super(Conv2DBNActiv, self).__init__()
        with self.init_scope():
            self.conv = Convolution2D(in_channels, out_channels, ksize, stride,
                                      pad, nobias, initialW, initial_bias)
            self.bn = BatchNormalization(out_channels, **bn_kwargs)
Beispiel #14
0
    def __init__(self, graph_str, shapes, batchnorm=False):
        super(Einconv, self).__init__()

        self.add_link('gm', GManager(graph_str, shapes))
        logging.debug(self.gm.dims)
        logging.debug(self.gm.graph)

        # define parameters
        bn_axis = [self.gm.BATCH_IND] + \
                  self.gm.get_filter_indices() + \
                  self.gm.get_image_indices()
        for i in range(1, self.gm.num_tensors + 1):
            dims = self.gm.get_dims(i, expanded=True)
            henormal_scale = sqrt(2 / self.gm.get_fan_in(i))
            W_initializer = initializers.Normal(scale=henormal_scale)
            self.add_param(self.param_name(i),
                           shape=dims,
                           initializer=W_initializer)
            if batchnorm:
                self.add_link(self.bn_name(i),
                              BatchNormalization(axis=tuple(bn_axis)))

        self.batchnorm = batchnorm
Beispiel #15
0
 def __init__(self):
     super(SimpleAll, self).__init__()
     with self.init_scope():
         self.fc = Linear(in_size=1, out_size=5)
         self.bn = BatchNormalization(5)
Beispiel #16
0
 def reinitialize(self, link: L.BatchNormalization):
     link._initialize_params(link.gamma.shape)