예제 #1
0
 def forward(self, x):
     x = self.bn(self.conv(x))
     if self.act == "leaky_relu":
         x = F.leaky_relu(x)
     elif self.act == "hard_swish":
         x = F.hardswish(x)
     return x
예제 #2
0
 def forward(self, x):
     x = self.conv(x)
     x = self.bn(x)
     if self.if_act:
         if self.act == "relu":
             x = F.relu(x)
         elif self.act == "hardswish":
             x = hardswish(x)
         else:
             print("The activation function is selected incorrectly.")
             exit()
     return x
예제 #3
0
 def forward(self, x):
     x = self.conv(x)
     x = self.bn(x)
     if self.act is not None:
         if self.act == "relu":
             x = F.relu(x)
         elif self.act == "relu6":
             x = F.relu6(x)
         elif self.act == "hard_swish":
             x = F.hardswish(x)
         else:
             raise NotImplementedError(
                 "The activation function is selected incorrectly.")
     return x
예제 #4
0
    def setUp(self):
        paddle.disable_static()

        self.place = paddle.NPUPlace(0)
        self.dtype = np.float32

        self.x = np.random.uniform(-6, 10, [8, 15]).astype(self.dtype)

        paddle.set_device('cpu')

        data = paddle.to_tensor(self.x, stop_gradient=False)
        y = F.hardswish(data)
        y.sum().backward()

        self.out_g = data.grad
        self.out_y = y
예제 #5
0
    def forward(self, inputs, label=None):
        x = self.conv1(inputs)

        for block in self.block_list:
            x = block(x)

        x = self.last_second_conv(x)
        x = self.pool(x)

        x = self.last_conv(x)
        x = hardswish(x)
        x = self.dropout(x)
        x = paddle.flatten(x, start_axis=1, stop_axis=-1)
        x = self.out(x)

        return x
예제 #6
0
    def test_check_output_and_grad_npu(self):
        paddle.set_device('npu')

        data = paddle.to_tensor(self.x, stop_gradient=False)
        y = F.hardswish(data)
        y.sum().backward()

        self.assertTrue(
            np.allclose(self.out_y.numpy(), y.numpy()),
            "Output of NPU HardSwish forward has diff at " + str(self.place) +
            "\nExpect " + str(self.out_y) + "\n" + "But Got" + str(y) +
            " in class " + self.__class__.__name__ + ".")
        self.assertTrue(
            np.allclose(self.out_g.numpy(), data.grad.numpy()),
            "Output of NPU HardSwish backward has diff at " + str(self.place) +
            "\nExpect " + str(self.out_g) + "\n" + "But Got" + str(data.grad) +
            " in class " + self.__class__.__name__ + ".")
예제 #7
0
 def act_func(self, x):
     if self.act == "leaky_relu":
         x = F.leaky_relu(x)
     elif self.act == "hard_swish":
         x = F.hardswish(x)
     return x