Example #1
0
    def __call__(self, x, train):
        h1 = relu.relu(self.bn1(self.conv1(x), test=not train))
        h1 = relu.relu(self.bn2(self.conv2(h1), test=not train))
        h1 = self.bn3(self.conv3(h1), test=not train)
        h2 = self.bn4(self.conv4(x), test=not train)

        return relu.relu(h1 + h2)
Example #2
0
    def __call__(self, x):
        h = relu(self.bn1(self.deconv1(x)))
        h = relu(self.bn2(self.deconv2(h)))
        h = relu(self.bn3(self.deconv3(h)))
        h = F.sigmoid(self.deconv4(h))

        return h
Example #3
0
    def __call__(self, x):
        h = relu(self.bn1(self.conv1(x)))
        h = relu(self.bn2(self.conv2(h)))
        h = relu(self.bn3(self.conv3(h)))
        h = relu(self.bn4(self.conv4(h)))

        return h
Example #4
0
 def __call__(self, x):
     h0 = max_pooling_2d.max_pooling_2d(relu.relu(self.conv1a(x)),
                                        3,
                                        stride=2)
     h1 = max_pooling_2d.max_pooling_2d(relu.relu(self.conv1b(x)),
                                        3,
                                        stride=2)
     h0 = max_pooling_2d.max_pooling_2d(relu.relu(self.conv2a(h0)),
                                        3,
                                        stride=2)
     h1 = max_pooling_2d.max_pooling_2d(relu.relu(self.conv2b(h1)),
                                        3,
                                        stride=2)
     h2 = relu.relu(self.conv3a(h0) + self.conv3b(h1))
     h3 = relu.relu(self.conv3c(h0) + self.conv3d(h1))
     h2 = relu.relu(self.conv4a(h2))
     h3 = relu.relu(self.conv4b(h3))
     h2 = max_pooling_2d.max_pooling_2d(relu.relu(self.conv5a(h2)),
                                        3,
                                        stride=2)
     h3 = max_pooling_2d.max_pooling_2d(relu.relu(self.conv5b(h3)),
                                        3,
                                        stride=2)
     h3 = self.fc6a(h2) + self.fc6b(h3)
     h3 = self.fc7(h3)
     return self.fc8(h3)
Example #5
0
 def __call__(self, x):
     h = relu(self.bn1(self.conv1(x)))
     self.cam_b1 = h
     h = relu(self.bn2(self.conv2(h)))
     self.cam_b2 = h
     h = self.bn3(self.conv3(h))
     self.cam_b3 = h
     return relu(h + x)
 def __call__(self, x):
     out1 = self.f.conv1(x)
     out3 = self.f.conv3(relu.relu(self.f.proj3(x)))
     out5 = self.f.conv5(relu.relu(self.f.proj5(x)))
     pool = self.f.projp(
         max_pooling_2d.max_pooling_2d(x, 3, stride=1, pad=1))
     y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1))
     return y
Example #7
0
 def __call__(self, x):
     out1 = self.f.conv1(x)
     out3 = self.f.conv3(relu.relu(self.f.proj3(x)))
     out5 = self.f.conv5(relu.relu(self.f.proj5(x)))
     pool = self.f.projp(max_pooling_2d.max_pooling_2d(
         x, 3, stride=1, pad=1))
     y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1))
     return y
Example #8
0
    def __call__(self, x):
        h = relu(self.conv1(x))
        h = relu(self.bn2(self.conv2(h)))
        h = relu(self.bn3(self.conv3(h)))
        h = relu(self.bn4(self.conv4(h)))
        h = R._global_average_pooling_2d(h)
        h = self.fc(h)

        return h
Example #9
0
 def __call__(self, x):
     h1 = relu(self.bn1(self.conv1(x)))
     self.cam_a1 = h1
     h1 = relu(self.bn2(self.conv2(h1)))
     self.cam_a2 = h1
     h1 = self.bn3(self.conv3(h1))
     self.cam_a3 = h1
     h2 = self.bn4(self.conv4(x))
     self.cam_a4 = h2
     return relu(h1 + h2)
Example #10
0
    def __call__(self, x, train=False):
        """
        :param x: sensory input (ntrials x nchannels x ninput[0] x ninput[1])
        """
        if self.y is None:
            h1=self.l1(x)
        else:
            fb=relu.relu(self.l3(self.y))
            h1 = self.l1(fb)


        self.y = self.l2(relu.relu(h1))

        return self.y
Example #11
0
 def __call__(self, x):
     h = max_pooling_2d.max_pooling_2d(relu.relu(self.conv1(x)),
                                       3,
                                       stride=2)
     h = max_pooling_2d.max_pooling_2d(relu.relu(self.conv2(h)),
                                       3,
                                       stride=2)
     h = relu.relu(self.conv3(h))
     h = relu.relu(self.conv4(h))
     h = max_pooling_2d.max_pooling_2d(relu.relu(self.conv5(h)),
                                       3,
                                       stride=2)
     h = self.fc6(h)
     h = self.fc7(h)
     return self.fc8(h)
    def forward(self, x):
        """Computes the output of the Inception module.
        Args:
            x (~chainer.Variable): Input variable.
        Returns:
            Variable: Output variable. Its array has the same spatial size and
            the same minibatch size as the input array. The channel dimension
            has size ``out1 + out3 + out5 + proj_pool``.
        """
        out1 = self.conv1(x)
        out3 = self.conv3(relu.relu(self.proj3(x)))
        out5 = self.conv5(relu.relu(self.proj5(x)))
        pool = self.projp(max_pooling_2d.max_pooling_2d(x, 3, stride=1, pad=1))

        y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1))
        return y
Example #13
0
            def _one_directional_loop(di):
                # di=0, forward RNN
                # di=1, backward RNN
                xs_list = xs_next if di == 0 else reversed(xs_next)
                layer_idx = direction * layer + di
                h = hx[layer_idx]
                h_list = []
                for x in xs_list:
                    batch = x.shape[0]
                    if h.shape[0] > batch:
                        h, h_rest = split_axis.split_axis(h, [batch], axis=0)
                    else:
                        h_rest = None

                    if layer > 0:
                        x = dropout.dropout(x, ratio=dropout_ratio)

                    rnn_in = (linear.linear(x, xws[layer_idx],
                                            xbs[layer_idx]) +
                              linear.linear(h, hws[layer_idx], hbs[layer_idx]))
                    if activation == 'tanh':
                        h_bar = tanh.tanh(rnn_in)
                    elif activation == 'relu':
                        h_bar = relu.relu(rnn_in)

                    if h_rest is not None:
                        h = concat.concat([h_bar, h_rest], axis=0)
                    else:
                        h = h_bar
                    h_list.append(h_bar)
                return h, h_list
            def _one_directional_loop(di):
                # di=0, forward RNN
                # di=1, backward RNN
                xs_list = xs_next if di == 0 else reversed(xs_next)
                layer_idx = direction * layer + di
                h = hx[layer_idx]
                h_list = []
                for x in xs_list:
                    batch = x.shape[0]
                    if h.shape[0] > batch:
                        h, h_rest = split_axis.split_axis(h, [batch], axis=0)
                    else:
                        h_rest = None

                    if layer > 0:
                        x = dropout.dropout(x, ratio=dropout_ratio)

                    rnn_in = (
                        linear.linear(x, xws[layer_idx], xbs[layer_idx]) +
                        linear.linear(h, hws[layer_idx], hbs[layer_idx]))
                    if activation == 'tanh':
                        h_bar = tanh.tanh(rnn_in)
                    elif activation == 'relu':
                        h_bar = relu.relu(rnn_in)

                    if h_rest is not None:
                        h = concat.concat([h_bar, h_rest], axis=0)
                    else:
                        h = h_bar
                    h_list.append(h_bar)
                return h, h_list
Example #15
0
 def f(x, h, c, w, b):
     xw, hw = w
     xb, hb = b
     rnn_in = linear.linear(x, xw, xb) + linear.linear(h, hw, hb)
     if activation == 'tanh':
         return tanh.tanh(rnn_in), None
     elif activation == 'relu':
         return relu.relu(rnn_in), None
Example #16
0
 def f(x, h, c, w, b):
     xw, hw = w
     xb, hb = b
     rnn_in = linear.linear(x, xw, xb) + linear.linear(h, hw, hb)
     if activation == 'tanh':
         return tanh.tanh(rnn_in), None
     elif activation == 'relu':
         return relu.relu(rnn_in), None
Example #17
0
 def __call__(self, x):
     x = relu.relu(self.conv1a(x))
     x = max_pooling_3d(x, (1, 2, 2), use_cudnn=self.use_cudnn)
     x = relu.relu(self.conv2a(x))
     x = max_pooling_3d(x, 2, use_cudnn=self.use_cudnn)
     x = relu.relu(self.conv3a(x))
     x = relu.relu(self.conv3b(x))
     x = max_pooling_3d(x, 2, use_cudnn=self.use_cudnn)
     x = relu.relu(self.conv4a(x))
     x = relu.relu(self.conv4b(x))
     x = max_pooling_3d(x, 2, use_cudnn=self.use_cudnn)
     x = relu.relu(self.conv5a(x))
     x = relu.relu(self.conv5b(x))
     x = max_pooling_3d(x, 2, use_cudnn=self.use_cudnn)
     x = relu.relu(self.fc6(x))
     x = relu.relu(self.fc7(x))
     return self.fc8(x)
    def __call__(self, x):
        outs = []

        if self.out1 > 0:
            h1 = self.f.conv1(x)
            h1 = self.f.conv1n(h1)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.f.proj3n(self.f.proj3(x)))
        h3 = relu.relu(self.f.conv3n(self.f.conv3(h3)))
        outs.append(h3)

        h33 = relu.relu(self.f.proj33n(self.f.proj33(x)))
        h33 = relu.relu(self.f.conv33an(self.f.conv33a(h33)))
        h33 = relu.relu(self.f.conv33bn(self.f.conv33b(h33)))
        outs.append(h33)

        p = self.f.pool(x)
        if self.proj_pool is not None:
            p = relu.relu(self.f.poolpn(self.f.poolp(p)))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #19
0
    def forward(self, x):
        outs = []

        if self.out1 > 0:
            h1 = self.conv1(x)
            h1 = self.conv1n(h1)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.proj3n(self.proj3(x)))
        h3 = relu.relu(self.conv3n(self.conv3(h3)))
        outs.append(h3)

        h33 = relu.relu(self.proj33n(self.proj33(x)))
        h33 = relu.relu(self.conv33an(self.conv33a(h33)))
        h33 = relu.relu(self.conv33bn(self.conv33b(h33)))
        outs.append(h33)

        if self.pooltype == 'max':
            p = max_pooling_nd.max_pooling_2d(x, 3, stride=self.stride, pad=1,
                                              cover_all=False)
        else:
            p = average_pooling_2d.average_pooling_2d(x, 3, stride=self.stride,
                                                      pad=1)
        if self.proj_pool is not None:
            p = relu.relu(self.poolpn(self.poolp(p)))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #20
0
    def __call__(self, x):
        test = not self.train
        outs = []

        if self.out1 > 0:
            h1 = self.conv1(x)
            h1 = self.conv1n(h1, test=test)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.proj3n(self.proj3(x), test=test))
        h3 = relu.relu(self.conv3n(self.conv3(h3), test=test))
        outs.append(h3)

        h33 = relu.relu(self.proj33n(self.proj33(x), test=test))
        h33 = relu.relu(self.conv33an(self.conv33a(h33), test=test))
        h33 = relu.relu(self.conv33bn(self.conv33b(h33), test=test))
        outs.append(h33)

        if self.pooltype == 'max':
            p = max_pooling_2d.max_pooling_2d(x, 3, stride=self.stride, pad=1)
        else:
            p = average_pooling_2d.average_pooling_2d(x, 3, stride=self.stride,
                                                      pad=1)
        if self.proj_pool is not None:
            p = relu.relu(self.poolpn(self.poolp(p), test=test))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #21
0
    def __call__(self, x):
        test = not self.train
        outs = []

        if self.out1 > 0:
            h1 = self.conv1(x)
            h1 = self.conv1n(h1, test=test)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.proj3n(self.proj3(x), test=test))
        h3 = relu.relu(self.conv3n(self.conv3(h3), test=test))
        outs.append(h3)

        h33 = relu.relu(self.proj33n(self.proj33(x), test=test))
        h33 = relu.relu(self.conv33an(self.conv33a(h33), test=test))
        h33 = relu.relu(self.conv33bn(self.conv33b(h33), test=test))
        outs.append(h33)

        if self.pooltype == 'max':
            p = max_pooling_2d.max_pooling_2d(x, 3, stride=self.stride, pad=1)
        else:
            p = average_pooling_2d.average_pooling_2d(x, 3, stride=self.stride,
                                                      pad=1)
        if self.proj_pool is not None:
            p = relu.relu(self.poolpn(self.poolp(p), test=test))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #22
0
    def forward(self, x):
        outs = []

        if self.out1 > 0:
            h1 = self.conv1(x)
            h1 = self.conv1n(h1)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.proj3n(self.proj3(x)))
        h3 = relu.relu(self.conv3n(self.conv3(h3)))
        outs.append(h3)

        h33 = relu.relu(self.proj33n(self.proj33(x)))
        h33 = relu.relu(self.conv33an(self.conv33a(h33)))
        h33 = relu.relu(self.conv33bn(self.conv33b(h33)))
        outs.append(h33)

        if self.pooltype == 'max':
            p = max_pooling_2d.max_pooling_2d(x, 3, stride=self.stride, pad=1,
                                              cover_all=False)
        else:
            p = average_pooling_2d.average_pooling_2d(x, 3, stride=self.stride,
                                                      pad=1)
        if self.proj_pool is not None:
            p = relu.relu(self.poolpn(self.poolp(p)))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #23
0
    def __call__(self, x):
        """Computes the output of the Inception module.

        Args:
            x (~chainer.Variable): Input variable.

        Returns:
            Variable: Output variable. Its array has the same spatial size and
            the same minibatch size as the input array. The channel dimension
            has size ``out1 + out3 + out5 + proj_pool``.

        """
        out1 = self.conv1(x)
        out3 = self.conv3(relu.relu(self.proj3(x)))
        out5 = self.conv5(relu.relu(self.proj5(x)))
        pool = self.projp(max_pooling_2d.max_pooling_2d(
            x, 3, stride=1, pad=1))
        y = relu.relu(concat.concat((out1, out3, out5, pool), axis=1))
        return y
Example #24
0
 def __call__(self, x):
     h = relu.relu(self.conv1(x), self.use_cudnn)
     h = max_pooling_2d.max_pooling_2d(h, 2, stride=2, use_cudnn=self.use_cudnn)
     h = relu.relu(self.conv2(h), self.use_cudnn)
     h = max_pooling_2d.max_pooling_2d(h, 2, stride=2, use_cudnn=self.use_cudnn)
     h = relu.relu(self.conv3_1(h), self.use_cudnn)
     h = relu.relu(self.conv3_2(h), self.use_cudnn)
     h = max_pooling_2d.max_pooling_2d(h, 2, stride=2, use_cudnn=self.use_cudnn)
     h = relu.relu(self.conv4_1(h), self.use_cudnn)
     h = relu.relu(self.conv4_2(h), self.use_cudnn)
     h = max_pooling_2d.max_pooling_2d(h, 2, stride=2, use_cudnn=self.use_cudnn)
     h = relu.relu(self.conv5_1(h), self.use_cudnn)
     h = relu.relu(self.conv5_2(h), self.use_cudnn)
     h = max_pooling_2d.max_pooling_2d(h, 2, stride=2, use_cudnn=self.use_cudnn)
     h = self.fc6(h)
     h = self.fc7(h)
     return self.fc8(h)
Example #25
0
 def __call__(self, x):
     h = self.bn1(self.conv1(x), test=not self.train)
     h = max_pooling_2d.max_pooling_2d(relu.relu(h),
                                       3,
                                       stride=2,
                                       use_cudnn=self.use_cudnn)
     h = self.res2(h, self.train)
     h = self.res3(h, self.train)
     h = self.res4(h, self.train)
     h = self.res5(h, self.train)
     h = average_pooling_2d.average_pooling_2d(h,
                                               7,
                                               stride=1,
                                               use_cudnn=self.use_cudnn)
     h = self.fc(h)
     return h
Example #26
0
    def __call__(self, x, test=None):
        """Computes the output of the InceptionBN module.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, batch normalization layers run in testing
                mode; if ``test`` is omitted, ``not self.train`` is used as
                ``test``.

        """
        if test is None:
            test = not self.train
        outs = []

        if self.out1 > 0:
            h1 = self.conv1(x)
            h1 = self.conv1n(h1, test=test)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.proj3n(self.proj3(x), test=test))
        h3 = relu.relu(self.conv3n(self.conv3(h3), test=test))
        outs.append(h3)

        h33 = relu.relu(self.proj33n(self.proj33(x), test=test))
        h33 = relu.relu(self.conv33an(self.conv33a(h33), test=test))
        h33 = relu.relu(self.conv33bn(self.conv33b(h33), test=test))
        outs.append(h33)

        if self.pooltype == 'max':
            p = max_pooling_2d.max_pooling_2d(x,
                                              3,
                                              stride=self.stride,
                                              pad=1,
                                              cover_all=False)
        else:
            p = average_pooling_2d.average_pooling_2d(x,
                                                      3,
                                                      stride=self.stride,
                                                      pad=1)
        if self.proj_pool is not None:
            p = relu.relu(self.poolpn(self.poolp(p), test=test))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #27
0
    def __call__(self, x, test=None):
        """Computes the output of the InceptionBN module.

        Args:
            x (Variable): An input variable.
            test (bool): If ``True``, batch normalization layers run in testing
                mode; if ``test`` is omitted, ``not self.train`` is used as
                ``test``.

        """
        if test is None:
            test = not self.train
        outs = []

        if self.out1 > 0:
            h1 = self.conv1(x)
            h1 = self.conv1n(h1, test=test)
            h1 = relu.relu(h1)
            outs.append(h1)

        h3 = relu.relu(self.proj3n(self.proj3(x), test=test))
        h3 = relu.relu(self.conv3n(self.conv3(h3), test=test))
        outs.append(h3)

        h33 = relu.relu(self.proj33n(self.proj33(x), test=test))
        h33 = relu.relu(self.conv33an(self.conv33a(h33), test=test))
        h33 = relu.relu(self.conv33bn(self.conv33b(h33), test=test))
        outs.append(h33)

        if self.pooltype == 'max':
            p = max_pooling_2d.max_pooling_2d(x, 3, stride=self.stride, pad=1,
                                              cover_all=False)
        else:
            p = average_pooling_2d.average_pooling_2d(x, 3, stride=self.stride,
                                                      pad=1)
        if self.proj_pool is not None:
            p = relu.relu(self.poolpn(self.poolp(p), test=test))
        outs.append(p)

        y = concat.concat(outs, axis=1)
        return y
Example #28
0
    def __call__(self, x):
        z = self.W_z(x)
        h_bar = self.W(x)
        if self.h is not None:
            r = sigmoid.sigmoid(self.W_r(x) + self.U_r(self.h))
            z += self.U_z(self.h)
            h_bar += self.U(r * self.h)
        else:
            r = sigmoid.sigmoid(self.W_r(x) + self.U_r(self.H0(z)))
            z += self.U_z(self.H0(z))
            h_bar += self.U(r * self.H0(z))

        z = sigmoid.sigmoid(z)

        h_bar = relu.relu(h_bar)

        if self.h is not None:
            h_new = linear_interpolate.linear_interpolate(z, h_bar, self.h)
        else:
            h_new = z * h_bar

        self.h = h_new

        return self.h
Example #29
0
 def __call__(self, x, test=True):
     h1 = relu(self.bn1(self.conv1(x), test=test))
     h1 = relu(self.bn2(self.conv2(h1), test=test))
     h1 = self.bn3(self.conv3(h1), test=test)
     h2 = self.bn4(self.conv4(x), test=test)
     return relu(h1 + h2)
Example #30
0
 def __call__(self, x, test=True):
     h = relu(self.bn1(self.conv1(x), test=test))
     h = relu(self.bn2(self.conv2(h), test=test))
     h = self.bn3(self.conv3(h), test=test)
     return relu(h + x)
Example #31
0
 def forward(self, x):
     h1 = relu(self.bn1(self.conv1(x)))
     h1 = relu(self.bn2(self.conv2(h1)))
     h1 = self.bn3(self.conv3(h1))
     h2 = self.bn4(self.conv4(x))
     return relu(h1 + h2)
Example #32
0
 def forward(self, x):
     h = relu(self.bn1(self.conv1(x)))
     h = relu(self.bn2(self.conv2(h)))
     h = self.bn3(self.conv3(h))
     return relu(h + x)
Example #33
0
 def __call__(self, x, test=True):
     h1 = relu(self.bn1(self.conv1(x), test=test))
     h1 = relu(self.bn2(self.conv2(h1), test=test))
     h1 = self.bn3(self.conv3(h1), test=test)
     h2 = self.bn4(self.conv4(x), test=test)
     return relu(h1 + h2)
Example #34
0
 def __call__(self, x, test=True):
     h = relu(self.bn1(self.conv1(x), test=test))
     h = relu(self.bn2(self.conv2(h), test=test))
     h = self.bn3(self.conv3(h), test=test)
     return relu(h + x)
Example #35
0
    def __call__(self, x, train):
        h = relu.relu(self.bn1(self.conv1(x), test=not train))
        h = relu.relu(self.bn2(self.conv2(h), test=not train))
        h = self.bn3(self.conv3(h), test=not train)

        return relu.relu(h + x)
Example #36
0
 def __call__(self, x):
     h1 = relu(self.bn1(self.conv1(x)))
     h1 = relu(self.bn2(self.conv2(h1)))
     h1 = self.bn3(self.conv3(h1))
     h2 = self.bn4(self.conv4(x))
     return relu(h1 + h2)
Example #37
0
 def __call__(self, x):
     h = self.conv1(relu(self.bn1(x)))
     h = self.conv2(relu(self.bn2(h)))
     return h + x
Example #38
0
 def __call__(self, x):
     out = relu(self.bn1(x))
     h1 = self.conv1(out)
     h1 = self.conv2(relu(self.bn2(h1)))
     h2 = self.conv3(out)
     return h1 + h2
Example #39
0
    def __call__(self, embeddings, labels):
        """
        Args:
            embeddings (:class:`~chainer.Variable` or :class:`numpy.ndarray` \
            or :class:`cupy.ndarray`): \
                predicted embedding vectors
                (batch size, max embedding dimensions, height, width)

            labels (:class:`numpy.ndarray` or :class:`cupy.ndarray`): \
                instance segmentation ground truth
                each unique value has to be denoting one instance
                (batch size, height, width)

        Returns:
            :class:`tuple` of :class:`chainer.Variable`:
            - *Variance loss*: Variance loss multiplied by ``alpha``
            - *Distance loss*: Distance loss multiplied by ``beta``
            - *Regularization loss*: Regularization loss multiplied by
              ``gamma``

        """
        assert (self.max_embedding_dim == embeddings.shape[1])

        l_dist = 0.0
        count = 0
        xp = cuda.get_array_module(embeddings)

        emb = embeddings[None, :]
        emb = broadcast_to(emb, (emb.shape[1],
                                 emb.shape[1],
                                 emb.shape[2],
                                 emb.shape[3],
                                 emb.shape[4]))
        ms = []
        for c in range(self.max_embedding_dim):
            # Create mask for instance
            mask = xp.expand_dims(labels == c + 1, 1)
            ms.append(mask)
        if hasattr(xp, 'stack'):
            ms = xp.stack(ms, 0)
        else:
            # Old numpy does not have numpy.stack.
            ms = xp.concatenate([xp.expand_dims(x, 0) for x in ms], 0)
        mns = c_sum(emb * ms, axis=(3, 4))
        mns = mns / xp.maximum(xp.sum(ms, (2, 3, 4))[:, :, None], 1)
        mns_exp = mns[:, :, :, None, None]

        # Calculate regularization term
        l_reg = c_sum(self.norm(mns, (1, 2)))
        l_reg = l_reg / (self.max_embedding_dim * embeddings.shape[0])

        # Calculate variance term
        l_var = self.norm((mns_exp - emb) * ms, 2)
        l_var = relu(l_var - self.delta_v) ** 2
        l_var = c_sum(l_var, (1, 2, 3))
        l_var = l_var / xp.maximum(xp.sum(ms, (1, 2, 3, 4)), 1)
        l_var = c_sum(l_var) / self.max_embedding_dim

        # Calculate distance loss
        for c_a in range(len(mns)):
            for c_b in range(c_a + 1, len(mns)):
                m_a = mns[c_a]
                m_b = mns[c_b]
                dist = self.norm(m_a - m_b, 1)  # N
                l_dist += c_sum((relu(2 * self.delta_d - dist)) ** 2)
                count += 1
        l_dist /= max(count * embeddings.shape[0], 1)
        rtn = self.alpha * l_var, self.beta * l_dist, self.gamma * l_reg
        return rtn
Example #40
0
 def forward(self, x):
     h1 = relu(self.bn1(self.conv1(x)))
     h1 = relu(self.bn2(self.conv2(h1)))
     h1 = self.bn3(self.conv3(h1))
     h2 = self.bn4(self.conv4(x))
     return relu(h1 + h2)
Example #41
0
 def forward(self, x):
     h = relu(self.bn1(self.conv1(x)))
     h = relu(self.bn2(self.conv2(h)))
     h = self.bn3(self.conv3(h))
     return relu(h + x)
Example #42
0
 def __call__(self, x):
     h1 = relu(self.bn1(self.conv1(x)))
     h1 = relu(self.bn2(self.conv2(h1)))
     h1 = self.bn3(self.conv3(h1))
     h2 = self.bn4(self.conv4(x))
     return relu(h1 + h2)
Example #43
0
 def __call__(self, x):
     h = relu(self.bn1(self.conv1(x)))
     h = relu(self.bn2(self.conv2(h)))
     h = self.bn3(self.conv3(h))
     return relu(h + x)