Esempio n. 1
0
    def setUp(self):
        if self.activ == 'relu':
            activ = relu
        elif self.activ == 'add_one':
            activ = _add_one
        self.x = np.random.uniform(
            -1, 1, (5, self.in_channels, 5, 5)).astype(np.float32)
        self.gy = np.random.uniform(
            -1, 1, (5, self.out_channels, 5, 5)).astype(np.float32)

        # Convolution is the identity function.
        initialW = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]],
                            dtype=np.float32).reshape(1, 1, 3, 3)
        initial_bias = 0
        if self.args_style == 'explicit':
            self.l = Conv2DActiv(
                self.in_channels, self.out_channels, self.ksize,
                self.stride, self.pad,
                initialW=initialW, initial_bias=initial_bias,
                activ=activ)
        elif self.args_style == 'None':
            self.l = Conv2DActiv(
                None, self.out_channels, self.ksize, self.stride, self.pad,
                initialW=initialW, initial_bias=initial_bias,
                activ=activ)
        elif self.args_style == 'omit':
            self.l = Conv2DActiv(
                self.out_channels, self.ksize, stride=self.stride,
                pad=self.pad, initialW=initialW, initial_bias=initial_bias,
                activ=activ)
Esempio n. 2
0
    def __init__(self, n_class, scales):
        super(MaskHead, self).__init__()

        initialW = HeNormal(1, fan_option='fan_out')
        with self.init_scope():
            self.conv1 = Conv2DActiv(256, 3, pad=1, initialW=initialW)
            self.conv2 = Conv2DActiv(256, 3, pad=1, initialW=initialW)
            self.conv3 = Conv2DActiv(256, 3, pad=1, initialW=initialW)
            self.conv4 = Conv2DActiv(256, 3, pad=1, initialW=initialW)
            self.conv5 = L.Deconvolution2D(
                256, 2, pad=0, stride=2, initialW=initialW)
            self.seg = L.Convolution2D(n_class, 1, pad=0, initialW=initialW)

        self._n_class = n_class
        self._scales = scales
Esempio n. 3
0
def create_fcn(out_chanels, init_options=None):
    default_init = {'initialW': initializers.Normal(0.01)}
    if init_options is None:
        init_options = [None for _ in out_chanels]
    fcn = []
    for i in range(len(out_chanels)):
        oc = out_chanels[i]
        init_option = init_options[i]
        if i + 1 < len(out_chanels):
            activ = F.relu
        else:
            activ = None
        if init_option is None:
            init = default_init
        else:
            init = init_option
        fcn.append(Conv2DActiv(None, oc, 3, 1, 1, activ=activ, **init))
    return chainer.Sequential(*fcn)
Esempio n. 4
0
    def __init__(self):
        super(MaskRefine, self).__init__()
        with self.init_scope():
            self.v0 = chainer.Sequential(
                Conv2DActiv(64, 16, ksize=3, pad=1),
                Conv2DActiv(16, 4, ksize=3, pad=1),
            )
            self.v1 = chainer.Sequential(
                Conv2DActiv(256, 64, ksize=3, pad=1),
                Conv2DActiv(64, 16, ksize=3, pad=1),
            )
            self.v2 = chainer.Sequential(
                Conv2DActiv(512, 128, ksize=3, pad=1),
                Conv2DActiv(128, 32, ksize=3, pad=1),
            )

            self.h2 = chainer.Sequential(
                Conv2DActiv(32, 32, ksize=3, pad=1),
                Conv2DActiv(32, 32, ksize=3, pad=1),
            )
            self.h1 = chainer.Sequential(
                Conv2DActiv(16, 16, ksize=3, pad=1),
                Conv2DActiv(16, 16, ksize=3, pad=1),
            )
            self.h0 = chainer.Sequential(
                Conv2DActiv(4, 4, ksize=3, pad=1),
                Conv2DActiv(4, 4, ksize=3, pad=1),
            )

            self.deconv = L.Deconvolution2D(256, 32, ksize=15, stride=15)
            self.post0 = L.Convolution2D(32, 16, ksize=3, pad=1)
            self.post1 = L.Convolution2D(16, 4, ksize=3, pad=1)
            self.post2 = L.Convolution2D(4, 1, ksize=3, pad=1)