示例#1
0
 def testSigmoid(self):
     for input_size in self.test_configs:
         op = core.CreateOperator("Sigmoid")(["X"], ["Y"])
         X = np.random.rand(*input_size).astype(np.float32) - 0.5
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(
                 op, [X], 0, [0])
             self.assertTrue(res)
 def testRelu(self):
     for input_size in self.test_configs:
         op = core.CreateOperator("Relu")(["X"], ["Y"])
         X = np.random.rand(*input_size).astype(np.float32)
         # go away from the origin point to avoid kink problems
         X += 0.01 * np.sign(X)
         X[X == 0] = 0.01
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(
                 op, [X], 0, [0])
             self.assertTrue(res)
 def testLRN(self):
     for input_size, depth in self.test_configs:
         op = core.CreateOperator("LRN")(["X"], ["Y", "Y_scale"],
                                         size=11,
                                         alpha=0.001,
                                         beta=0.5,
                                         bias=2.0,
                                         order="NHWC")
         X = np.random.rand(2, input_size, input_size,
                            depth).astype(np.float32)
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(
                 op, [X], 0, [0])
             self.assertTrue(res)
 def testAveragePoolingLegacyPadding(self):
     for stride, kernel, legacy_pad, size, order in self.test_configs:
         print 'AveragePool', stride, kernel, legacy_pad, size, order
         op = core.CreateOperator("AveragePool")(["X"], ["Y"],
                                                 stride=stride,
                                                 kernel=kernel,
                                                 legacy_pad=legacy_pad,
                                                 order=order)
         if order == "NHWC":
             X = np.random.rand(2, size, size, 3).astype(np.float32)
         else:
             X = np.random.rand(2, 3, size, size).astype(np.float32)
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(
                 op, [X], 0, [0])
             self.assertTrue(res)
 def testMaxPoolingLegacyPadding(self):
     for stride, kernel, legacy_pad, size, order in self.test_configs:
         print 'MaxPool', stride, kernel, legacy_pad, size, order
         op = core.CreateOperator("MaxPool")(["X"], ["Y", "Y_maxid"],
                                             stride=stride,
                                             kernel=kernel,
                                             legacy_pad=legacy_pad,
                                             order=order)
         # In order to avoid the problem of race conditions, we will do a randperm
         # so that the values will be apart at least 0.01
         if order == "NHWC":
             X = np.random.permutation(1 * size * size * 3).reshape(
                 1, size, size, 3).astype(np.float32) * 0.01
         else:
             X = np.random.permutation(1 * size * size * 3).reshape(
                 1, 3, size, size).astype(np.float32) * 0.01
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(
                 op, [X], 0, [0])
             self.assertTrue(res)
 def testDepthConcatNCHW(self):
     for input_size, d1, d2, d3, d4 in self.test_configs:
         op = core.CreateOperator("DepthConcat")(["X1", "X2", "X3", "X4"],
                                                 ["Y", "Y_dims"],
                                                 order="NCHW")
         Xs = [
             np.random.rand(2, d1, input_size,
                            input_size).astype(np.float32),
             np.random.rand(2, d2, input_size,
                            input_size).astype(np.float32),
             np.random.rand(2, d3, input_size,
                            input_size).astype(np.float32),
             np.random.rand(2, d4, input_size,
                            input_size).astype(np.float32)
         ]
         for i in range(4):
             res = device_checker.CheckSimple(op, Xs, [0])
             self.assertTrue(res)
             for checker in gradient_checkers:
                 res, grad, grad_estimated = checker.CheckSimple(
                     op, Xs, i, [0])
                 self.assertTrue(res)
 def testConvolutionLegacyPadding(self):
     for stride, kernel, legacy_pad, size, order in self.test_configs:
         print 'conv', stride, kernel, legacy_pad, size, order
         op = core.CreateOperator("Conv")(["X", "w", "b"], ["Y"],
                                          stride=stride,
                                          kernel=kernel,
                                          legacy_pad=legacy_pad,
                                          order=order)
         if order == "NHWC":
             X = np.random.rand(2, size, size, 3).astype(np.float32) - 0.5
             w = np.random.rand(4, kernel, kernel, 3).astype(
                 np.float32) - 0.5
         else:
             X = np.random.rand(2, 3, size, size).astype(np.float32) - 0.5
             w = np.random.rand(4, 3, kernel, kernel).astype(
                 np.float32) - 0.5
         b = np.random.rand(4).astype(np.float32) - 0.5
         res = device_checker.CheckSimple(op, [X, w, b], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             for i in range(3):
                 res, grad, grad_estimated = checker.CheckSimple(
                     op, [X, w, b], i, [0])
                 self.assertTrue(res)