Ejemplo n.º 1
0
    def __init__(self, inplanes, gpu):
        super(hourglass, self).__init__()
        self.gpu = gpu

        self.conv1 = Sequential(
            convbn_3d(inplanes, inplanes * 2, kernel_size=3, stride=2, pad=1),
            F.relu).to_gpu(self.gpu)

        self.conv2 = convbn_3d(inplanes * 2,
                               inplanes * 2,
                               kernel_size=3,
                               stride=1,
                               pad=1).to_gpu(self.gpu)

        self.conv3 = Sequential(
            convbn_3d(inplanes * 2,
                      inplanes * 2,
                      kernel_size=3,
                      stride=2,
                      pad=1), F.relu).to_gpu(self.gpu)

        self.conv4 = Sequential(
            convbn_3d(inplanes * 2,
                      inplanes * 2,
                      kernel_size=3,
                      stride=1,
                      pad=1), F.relu).to_gpu(self.gpu)

        self.conv5 = Sequential(
            L.DeconvolutionND(3,
                              inplanes * 2,
                              inplanes * 2,
                              ksize=4,
                              stride=2,
                              pad=1,
                              nobias=True,
                              initialW=ini.Normal(math.sqrt(2. / 32))),
            L.BatchNormalization(inplanes * 2,
                                 eps=1e-5,
                                 decay=0.95,
                                 initial_gamma=ini.One(),
                                 initial_beta=ini.Zero())).to_gpu(
                                     self.gpu)  # +conv2

        self.conv6 = Sequential(
            L.DeconvolutionND(3,
                              inplanes * 2,
                              inplanes,
                              ksize=4,
                              stride=2,
                              pad=1,
                              nobias=True),
            L.BatchNormalization(inplanes,
                                 eps=1e-5,
                                 decay=0.95,
                                 initial_gamma=ini.One(),
                                 initial_beta=ini.Zero())).to_gpu(
                                     self.gpu)  # +x
Ejemplo n.º 2
0
 def __init__(self,
              size,
              decay=0.9,
              eps=2e-5,
              dtype=numpy.float32,
              use_gamma=True,
              use_beta=True,
              initial_gamma=None,
              initial_beta=None):
     super(BatchNormalization, self).__init__()
     if use_gamma:
         self.add_param('gamma', size, dtype=dtype)
         if initial_gamma is None:
             initial_gamma = initializers.One()
         #initializers.init_weight(self.gamma.data, initial_gamma)
         self.gamma = chainer.Parameter(initial_gamma, (size, ))
     if use_beta:
         self.add_param('beta', size, dtype=dtype)
         if initial_beta is None:
             initial_beta = initializers.Zero()
         #initializers.init_weight(self.beta.data, initial_beta)
         self.beta = chainer.Parameter(initial_beta, (size, ))
     self.add_persistent('avg_mean', numpy.zeros(size, dtype=dtype))
     self.add_persistent('avg_var', numpy.zeros(size, dtype=dtype))
     self.add_persistent('N', 0)
     self.decay = decay
     self.eps = eps
Ejemplo n.º 3
0
def create_initializer(init_type, scale=None, fillvalue=None):
    if init_type == 'identity':
        return initializers.Identity() if scale is None else initializers.Identity(scale=scale)
    if init_type == 'constant':
        return initializers.Constant(fillvalue)
    if init_type == 'zero':
        return initializers.Zero()
    if init_type == 'one':
        return initializers.One()
    if init_type == 'normal':
        return initializers.Normal() if scale is None else initializers.Normal(scale)
    if init_type == 'glorotNormal':
        return initializers.GlorotNormal() if scale is None else initializers.GlorotNormal(scale)
    if init_type == 'heNormal':
        return initializers.HeNormal() if scale is None else initializers.HeNormal(scale)
    if init_type == 'orthogonal':
        return initializers.Orthogonal(
            scale) if scale is None else initializers.Orthogonal(scale)
    if init_type == 'uniform':
        return initializers.Uniform(
            scale) if scale is None else initializers.Uniform(scale)
    if init_type == 'leCunUniform':
        return initializers.LeCunUniform(
            scale) if scale is None else initializers.LeCunUniform(scale)
    if init_type == 'glorotUniform':
        return initializers.GlorotUniform(
            scale) if scale is None else initializers.GlorotUniform(scale)
    if init_type == 'heUniform':
        return initializers.HeUniform(
            scale) if scale is None else initializers.HeUniform(scale)
    raise ValueError("Unknown initializer type: {0}".format(init_type))
Ejemplo n.º 4
0
 def __init__(self, channels):
     super(AffineChannel2D, self).__init__()
     with self.init_scope():
         self.W = chainer.variable.Parameter(initializers.One(),
                                             (channels, ))
         self.b = chainer.variable.Parameter(initializers.Zero(),
                                             (channels, ))
Ejemplo n.º 5
0
        def __init__(self, shape, glow_encoder):
            super().__init__()
            self.encoder = glow_encoder

            with self.init_scope():
                self.b = chainer.Parameter(initializers.Zero(), shape)
                self.m = chainer.Parameter(initializers.One(), (3, 8, 8))
Ejemplo n.º 6
0
def convbn_3d(in_planes, out_planes, kernel_size, stride, pad):
    return Sequential(
        L.ConvolutionND(3, in_planes, out_planes,
                        ksize=kernel_size, stride=stride,
                        pad=pad, nobias=True,
                        initialW=ini.Normal(math.sqrt(2. / (kernel_size * kernel_size * kernel_size * out_planes)))),
        L.BatchNormalization(out_planes, eps=1e-5, decay=0.95,
                             initial_gamma=ini.One(), initial_beta=ini.Zero()),
    )
Ejemplo n.º 7
0
 def __init__(self, in_size, n_units=15):
     super(ConvolutionalModel, self).__init__()
     with self.init_scope():
         initial_w = initializers.One(dtype=np.float32)
         self.conv = L.ConvolutionND(ndim=1,
                                     in_channels=1,
                                     out_channels=n_units,
                                     ksize=in_size,
                                     stride=1,
                                     pad=0,
                                     initialW=initial_w)
         self.fc = L.Linear(n_units, 1)
Ejemplo n.º 8
0
 def __init__(self, n_in, n_out):
     super(QNetNout, self).__init__(
         lin=L.Linear(INPUT_LAYER_SIZE,
                      MID1_LAYER_SIZE,
                      initialW=initializers.Normal(scale=0.01)),
         lm1=L.Linear(MID1_LAYER_SIZE,
                      MID2_LAYER_SIZE,
                      initialW=initializers.Normal(scale=0.01)),
         lout=L.Linear(MID2_LAYER_SIZE,
                       NUM_SLOTS,
                       initialW=initializers.Normal(scale=0.01)),
         lq=L.Linear(2, 1, initialW=initializers.One()))
Ejemplo n.º 9
0
 def __init__(self, eps=1e-6, initial_gamma=None, initial_beta=None):
     super(LayerNormalization, self).__init__()
     self.add_uninitialized_param('gamma')
     self.add_uninitialized_param('beta')
     if initial_gamma is None:
         initial_gamma = initializers.One()
     self._gamma_initializer = initial_gamma
     if initial_beta is None:
         initial_beta = initializers.Zero()
     self._beta_initializer = initial_beta
     self.eps = eps
     utils.experimental(
         'chainer.links.normalization.layer_normalization.py')
Ejemplo n.º 10
0
    def __init__(self,
                 size=None,
                 eps=1e-6,
                 initial_gamma=None,
                 initial_beta=None):
        super(LayerNormalizationLink, self).__init__()
        self.add_uninitialized_param('gamma')
        self.add_uninitialized_param('beta')
        if initial_gamma is None:
            initial_gamma = initializers.One()
        self._gamma_initializer = initial_gamma
        if initial_beta is None:
            initial_beta = initializers.Zero()
        self._beta_initializer = initial_beta
        self.eps = eps

        if size is not None:
            self._initialize_params(size)
Ejemplo n.º 11
0
    def __init__(self,
                 in_channels,
                 out_channels,
                 ksize=None,
                 stride=1,
                 pad=0,
                 dilate=1,
                 groups=1,
                 nobias=True,
                 initialW=None,
                 initial_bias=None,
                 use_scale=True,
                 activ=F.relu):
        """ CTOR. """
        super(FixupConv2D, self).__init__()

        if initialW is None:  # NOTE: update it to zero initializer
            initialW = I.Zero()

        with self.init_scope():
            self.conv = L.Convolution2D(in_channels,
                                        out_channels,
                                        ksize=ksize,
                                        stride=stride,
                                        pad=pad,
                                        nobias=nobias,
                                        initialW=initialW,
                                        initial_bias=initial_bias,
                                        dilate=dilate,
                                        groups=groups)
            # bias term for conv input and output
            self.bias_in = chainer.Parameter(initializer=I.Zero(), shape=1)
            self.bias_out = chainer.Parameter(initializer=I.Zero(), shape=1)

            # NOTE: activ controls whether to use scale as well
            if use_scale or activ is None:
                self.scale = chainer.Parameter(initializer=I.One(), shape=1)
            else:
                self.scale = None

            # activation
            self.activ = activ
Ejemplo n.º 12
0
    def _make_layer(self, block, planes, blocks, stride, pad, dilation):
        downsample = None
        if stride != 1 or self.inplanes != planes * block.expansion:
            downsample = Sequential(
                L.Convolution2D(self.inplanes, planes * block.expansion,
                                ksize=1, stride=stride, nobias=True,
                                initialW=ini.Normal(math.sqrt(2. / (planes * block.expansion)))),
                L.BatchNormalization(planes * block.expansion,
                                     eps=1e-5, decay=0.95,
                                     initial_gamma=ini.One(), initial_beta=ini.Zero()),
            )

        layers = Sequential()
        layers.append(block(self.inplanes, planes,
                            stride, downsample, pad, dilation))
        self.inplanes = planes * block.expansion
        for i in range(1, blocks):
            layers.append(block(self.inplanes, planes, 1, None, pad, dilation))

        return layers
Ejemplo n.º 13
0
 def test_initialize_by_initializer(self):
     x = chainer.Parameter(initializers.One(), (3, ))
     np.testing.assert_array_equal(x.data, np.array([1., 1., 1.],
                                                    dtype='f'))
Ejemplo n.º 14
0
	def __init__(self, z_dim, ch_out):
		w = init.Normal(1.0)
		super(AdaIN, self).__init__()
		with self.init_scope():
			self.l1 = L.Linear(z_dim, ch_out, initialW=w, initial_bias=init.One())
			self.l2 = L.Linear(z_dim, ch_out, initialW=w)