Beispiel #1
0
 def testConvolutionnPadding(self):
     for stride, kernel, pad, size, order, engine in self.test_configs:
         print('conv {} {} {} {} {} {}'.format(
             stride, kernel, pad, size, order, engine)
         )
         op = core.CreateOperator("Conv",
             ["X", "w", "b"],
             ["Y"],
             stride=stride,
             kernel=kernel,
             pad=pad,
             order=order,
             engine=engine,
         )
         if order == "NHWC":
             X = np.random.rand(2, size, size, 3).astype(np.float32) - 0.5
             w = np.random.rand(4, kernel, kernel,
                                3).astype(np.float32) - 0.5
         else:
             X = np.random.rand(2, 3, size, size).astype(np.float32) - 0.5
             w = np.random.rand(4, 3, kernel,
                                kernel).astype(np.float32) - 0.5
         b = np.random.rand(4).astype(np.float32) - 0.5
         res = device_checker.CheckSimple(op, [X, w, b], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             for i in range(3):
                 res, grad, grad_estimated = checker.CheckSimple(
                     op, [X, w, b], i, [0]
                 )
                 self.assertTrue(res)
Beispiel #2
0
 def testMaxPoolingLegacyPadding(self):
     for stride, kernel, legacy_pad, size, order in self.test_configs:
         print('MaxPool {} {} {} {} {}'.format(stride, kernel, legacy_pad,
                                               size, order))
         op = core.CreateOperator("MaxPool",
             ["X"],
             ["Y"],
             stride=stride,
             kernel=kernel,
             legacy_pad=legacy_pad,
             order=order
         )
         # In order to avoid the problem of race conditions, we will do a
         # randperm so that the values will be apart at least 0.01
         if order == "NHWC":
             X = np.random.permutation(1 * size * size * 3).reshape(
                 1, size, size, 3).astype(np.float32) * 0.01
         else:
             X = np.random.permutation(1 * size * size * 3).reshape(
                 1, 3, size, size).astype(np.float32) * 0.01
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
             self.assertTrue(res)
Beispiel #3
0
 def testFlatten(self):
     op = core.CreateOperator("Flatten", ["X"], ["Y"])
     X = np.random.rand(2, 3, 4, 5).astype(np.float32)
     res = device_checker.CheckSimple(op, [X], [0])
     self.assertTrue(res)
     for checker in gradient_checkers:
         res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
         self.assertTrue(res)
Beispiel #4
0
 def testSigmoid(self):
     for input_size in self.test_configs:
         op = core.CreateOperator("Sigmoid", ["X"], ["Y"])
         X = np.random.rand(*input_size).astype(np.float32) - 0.5
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
             self.assertTrue(res)
Beispiel #5
0
 def testLRN(self):
   for input_size, depth in self.test_configs:
     op = core.CreateOperator("LRN")(
         ["X"], ["Y", "Y_scale"], size=11, alpha=0.001, beta=0.5, bias=2.0, order="NHWC")
     X = np.random.rand(2, input_size, input_size, depth).astype(np.float32)
     res = device_checker.CheckSimple(op, [X], [0])
     self.assertTrue(res)
     for checker in gradient_checkers:
       res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
       self.assertTrue(res)
Beispiel #6
0
 def testMakeTwoClass(self):
     for input_size in self.test_configs:
         op = core.CreateOperator("MakeTwoClass", ["X"], ["Y"])
         X = np.random.rand(*input_size).astype(np.float32)
         # step a little to avoid gradient problems
         X[X < 0.01] += 0.01
         X[X > 0.99] -= 0.01
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
             self.assertTrue(res)
Beispiel #7
0
 def testAbs(self):
     for input_size in self.test_configs:
         op = core.CreateOperator("Abs", ["X"], ["Y"])
         X = np.random.rand(*input_size).astype(np.float32)
         # go away from the origin point to avoid kink problems
         X += 0.01 * np.sign(X)
         X[X == 0] = 0.01
         res = device_checker.CheckSimple(op, [X], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
             self.assertTrue(res)
Beispiel #8
0
 def testSum(self):
     for (input_size, in_place) in self.test_configs:
         op = core.CreateOperator("Sum", ["X1", "X2"],
                                  ["Y" if not in_place else "X1"])
         X1 = np.random.rand(*input_size).astype(np.float32) - 0.5
         X2 = np.random.rand(*input_size).astype(np.float32) - 0.5
         res = device_checker.CheckSimple(op, [X1, X2], [0])
         self.assertTrue(res)
         for checker in gradient_checkers:
             res, grad, grad_estimated = checker.CheckSimple(
                 op, [X1, X2], 0, [0])
             self.assertTrue(res)
Beispiel #9
0
 def testDepthConcatNCHW(self):
   for input_size, d1, d2, d3, d4 in self.test_configs:
     op = core.CreateOperator("DepthConcat")(
         ["X1", "X2", "X3", "X4"], ["Y", "Y_dims"], order="NCHW")
     Xs = [np.random.rand(2, d1, input_size, input_size).astype(np.float32),
           np.random.rand(2, d2, input_size, input_size).astype(np.float32),
           np.random.rand(2, d3, input_size, input_size).astype(np.float32),
           np.random.rand(2, d4, input_size, input_size).astype(np.float32)]
     for i in range(4):
       res = device_checker.CheckSimple(op, Xs, [0])
       self.assertTrue(res)
       for checker in gradient_checkers:
         res, grad, grad_estimated = checker.CheckSimple(op, Xs, i, [0])
         self.assertTrue(res)
Beispiel #10
0
 def testAveragePoolingLegacyPadding(self):
   for stride, kernel, legacy_pad, size, order in self.test_configs:
     print 'AveragePool', stride, kernel, legacy_pad, size, order
     op = core.CreateOperator("AveragePool")(
         ["X"], ["Y"], stride=stride, kernel=kernel,
         legacy_pad=legacy_pad, order=order)
     if order == "NHWC":
       X = np.random.rand(2, size, size, 3).astype(np.float32)
     else:
       X = np.random.rand(2, 3, size, size).astype(np.float32)
     res = device_checker.CheckSimple(op, [X], [0])
     self.assertTrue(res)
     for checker in gradient_checkers:
       res, grad, grad_estimated = checker.CheckSimple(op, [X], 0, [0])
       self.assertTrue(res)