Exemple #1
0
def test_gpu_node_leaky_relu(a):
    set_cuda_active(True)

    g1 = Variable(a)

    g3 = sum(rm.leaky_relu(g1))
    g = g3.grad()
    g_g1 = g.get(g1)
    g3.to_cpu()

    set_cuda_active(False)
    c3 = sum(rm.leaky_relu(g1))
    c = c3.grad()
    c_g1 = c.get(g1)

    close(g3, c3)
    close(c_g1, g_g1)
Exemple #2
0
 def forward(self, x, print_parameter=False):
     hidden = self.input(x)
     if print_parameter:
         print('{}'.format('-' * 20))
         print('check network')
         print(x.shape)
         print('{}'.format('-' * 20))
     if self.dropout:
         hidden = rm.dropout(hidden)
     hidden = rm.max_pool2d(hidden, stride=1, padding=1)
     if print_parameter:
         print(hidden.shape)
         print('{}'.format('-' * 20))
     layers = self.hidden._layers
     blocks = self.blocks if isinstance(self.blocks, int) else len(
         self.blocks)
     for i in range(blocks):
         offset = i * (self.depth * 2 + 1)
         for j in range(self.depth):
             sub = rm.leaky_relu(layers[offset + 2 * j](hidden))
             if print_parameter:
                 print('{}.{} b {}'.format(i, j, sub.shape))
             sub = layers[offset + 2 * j + 1](sub)
             if print_parameter:
                 print('{}.{} + {}'.format(i, j, sub.shape))
             if self.dropout:
                 sub = rm.dropout(sub)
             hidden = rm.concat(hidden, sub)
             if print_parameter:
                 print('{}.{} = {}'.format(i, j, hidden.shape))
         offset = (i + 1) * (self.depth * 2 + 1) - 1
         hidden = layers[offset](hidden)
         if print_parameter:
             print('{}.{} * {}'.format(i, j, hidden.shape))
         if self.dropout:
             if print_parameter:
                 print('dropout')
             hidden = rm.dropout(hidden)
         hidden = rm.average_pool2d(hidden,
                                    padding=1,
                                    stride=(1, 2) if self.keep_v else 2)
         if print_parameter:
             print('{}.{} @ {}'.format(i, j, hidden.shape))
             print('{}'.format('-' * 20))
     x = rm.flatten(hidden)
     if print_parameter:
         print('  >>>  {} prameters'.format(x.shape))
     return x
Exemple #3
0
 def forward(self, x):
     return rm.leaky_relu(self._bn(self._conv(x)), 0.1)
Exemple #4
0
    def forward(self, x):
        h = self.pool1(rm.leaky_relu(self.bn1(self.conv1(x)), slope=0.1))
        h = self.pool2(rm.leaky_relu(self.bn2(self.conv2(h)), slope=0.1))
        h = rm.leaky_relu(self.bn3(self.conv3(h)), slope=0.1)
        h = rm.leaky_relu(self.bn4(self.conv4(h)), slope=0.1)
        h = self.pool3(rm.leaky_relu(self.bn5(self.conv5(h)), slope=0.1))
        h = rm.leaky_relu(self.bn6(self.conv6(h)), slope=0.1)
        h = rm.leaky_relu(self.bn7(self.conv7(h)), slope=0.1)
        h = self.pool4(rm.leaky_relu(self.bn8(self.conv8(h)), slope=0.1))
        h = rm.leaky_relu(self.bn9(self.conv9(h)), slope=0.1)
        h = rm.leaky_relu(self.bn10(self.conv10(h)), slope=0.1)
        h = rm.leaky_relu(self.bn11(self.conv11(h)), slope=0.1)
        h = rm.leaky_relu(self.bn12(self.conv12(h)), slope=0.1)
        h = rm.leaky_relu(self.bn13(self.conv13(h)), slope=0.1)
        high_resolution_feature = reorg(h) # 高解像度特徴量をreorgでサイズ落として保存しておく
        h = self.pool5(h)
        h = rm.leaky_relu(self.bn14(self.conv14(h)), slope=0.1)
        h = rm.leaky_relu(self.bn15(self.conv15(h)), slope=0.1)
        h = rm.leaky_relu(self.bn16(self.conv16(h)), slope=0.1)
        h = rm.leaky_relu(self.bn17(self.conv17(h)), slope=0.1)
        h = rm.leaky_relu(self.bn18(self.conv18(h)), slope=0.1)

        ##### detection layer
        h = rm.leaky_relu(self.bn19(self.conv19(h)), slope=0.1)
        h = rm.leaky_relu(self.bn20(self.conv20(h)), slope=0.1)
        h = rm.concat(high_resolution_feature, h)
        h = rm.leaky_relu(self.bn21(self.conv21(h)), slope=0.1)
        h = self.conv22(h)

        return h
Exemple #5
0
 def func(node):
     return sum(rm.leaky_relu(node))
Exemple #6
0
    def forward(self, x):
        h = self.pool1(rm.leaky_relu(self.bn1(self.conv1(x)), slope=0.1))
        h = self.pool2(rm.leaky_relu(self.bn2(self.conv2(h)), slope=0.1))
        h = rm.leaky_relu(self.bn3(self.conv3(h)), slope=0.1)
        h = rm.leaky_relu(self.bn4(self.conv4(h)), slope=0.1)
        h = self.pool3(rm.leaky_relu(self.bn5(self.conv5(h)), slope=0.1))
        h = rm.leaky_relu(self.bn6(self.conv6(h)), slope=0.1)
        h = rm.leaky_relu(self.bn7(self.conv7(h)), slope=0.1)
        h = self.pool4(rm.leaky_relu(self.bn8(self.conv8(h)), slope=0.1))
        h = rm.leaky_relu(self.bn9(self.conv9(h)), slope=0.1)
        h = rm.leaky_relu(self.bn10(self.conv10(h)), slope=0.1)
        h = rm.leaky_relu(self.bn11(self.conv11(h)), slope=0.1)
        h = rm.leaky_relu(self.bn12(self.conv12(h)), slope=0.1)
        h = rm.leaky_relu(self.bn13(self.conv13(h)), slope=0.1)
        h = self.pool5(h)
        h = rm.leaky_relu(self.bn14(self.conv14(h)), slope=0.1)
        h = rm.leaky_relu(self.bn15(self.conv15(h)), slope=0.1)
        h = rm.leaky_relu(self.bn16(self.conv16(h)), slope=0.1)
        h = rm.leaky_relu(self.bn17(self.conv17(h)), slope=0.1)
        h = rm.leaky_relu(self.bn18(self.conv18(h)), slope=0.1)

        ##### pretraining layer
        h = self.conv23(h)
        h = rm.average_pool2d(h,
                              filter=(h.shape[-1], h.shape[-1]),
                              stride=(1, 1),
                              padding=(0, 0))

        y = rm.reshape(h, (x.shape[0], -1))

        return y
Exemple #7
0
 def forward(self, inputs):
     self.inputs = inputs
     h1 = rm.leaky_relu(self.l_1(self.inputs))
     h2 = rm.leaky_relu(self.l_2(h1))
     h3 = self.l_3(h2)
     return h3