Beispiel #1
0
    def test_get_exception_message(self):
        exception_message = "test_message"
        self.assertRaises(AssertionError, cpt.get_exception_message, None)
        if six.PY2:
            self.assertRaises(AttributeError, cpt.get_exception_message,
                              exception_message)
            try:
                raise RuntimeError(exception_message)
            except Exception as e:
                self.assertEqual(exception_message,
                                 cpt.get_exception_message(e))
                self.assertIsNotNone(e)

            try:
                raise Exception(exception_message)
            except Exception as e:
                self.assertEqual(exception_message,
                                 cpt.get_exception_message(e))
                self.assertIsNotNone(e)

        if six.PY3:
            try:
                raise RuntimeError(exception_message)
            except Exception as e:
                self.assertEqual(exception_message,
                                 cpt.get_exception_message(e))
                self.assertIsNotNone(e)

            try:
                raise Exception(exception_message)
            except Exception as e:
                self.assertEqual(exception_message,
                                 cpt.get_exception_message(e))
                self.assertIsNotNone(e)
Beispiel #2
0
    def test_multi_process_with_get_timeout(self):
        def slow_batch_generator_creator(batch_size, batch_num):
            def __reader__():
                for _ in range(batch_num):
                    time.sleep(80)
                    batch_image, batch_label = get_random_images_and_labels(
                        [batch_size, 784], [batch_size, 1])
                    yield batch_image, batch_label

            return __reader__

        with fluid.dygraph.guard():
            loader = fluid.io.DataLoader.from_generator(capacity=self.capacity,
                                                        use_multiprocess=True)
            loader.set_batch_generator(slow_batch_generator_creator(
                self.batch_size, self.batch_num),
                                       places=fluid.CPUPlace())
            exception = None
            try:
                for _ in range(self.epoch_num):
                    for image, _ in loader():
                        fluid.layers.relu(image)
            except core.EnforceNotMet as ex:
                self.assertIn("Blocking queue is killed",
                              cpt.get_exception_message(ex))
                exception = ex
            self.assertIsNotNone(exception)
Beispiel #3
0
 def test_error_type(self):
     block = main_program._create_block()
     try:
         block.append_op()
         self.assertFail()
     except ValueError as v_err:
         self.assertEqual(
             cpt.get_exception_message(v_err),
             "`type` to initialized an Operator can not be None.")
     try:
         block.append_op(type="no_such_op")
         self.assertFail()
     except ValueError as a_err:
         self.assertEqual(
             cpt.get_exception_message(a_err),
             "Operator \"no_such_op\" has not been registered.")
Beispiel #4
0
    def test_fetch_handler(self):
        """
        Test Dataset With Fetch Handler. TestCases.
        """
        slots_vars, out = self.net()
        files = ["test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"]
        dataset = self.get_dataset(slots_vars, files)

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())

        fh = fluid.executor.FetchHandler(out.name)
        fh.help()

        try:
            exe.train_from_dataset(
                program=fluid.default_main_program(),
                dataset=dataset,
                fetch_handler=fh)
        except ImportError as e:
            print("warning: we skip trainer_desc_pb2 import problem in windows")
        except RuntimeError as e:
            error_msg = "dataset is need and should be initialized"
            self.assertEqual(error_msg, cpt.get_exception_message(e))
        except Exception as e:
            self.assertTrue(False)
Beispiel #5
0
 def test_shape_errors(self):
     with fluid.dygraph.guard():
         try:
             shape = [-1, 5]
             out = paddle.zeros(shape)
         except Exception as e:
             error_msg = cpt.get_exception_message(e)
             assert error_msg.find("expected to be no less than 0") > 0
 def func_test_not_callable_func(self):
     exception = None
     try:
         CleanupFuncRegistrar.register(5)
     except TypeError as ex:
         self.assertIn("is not callable", cpt.get_exception_message(ex))
         exception = ex
     self.assertIsNotNone(exception)
Beispiel #7
0
    def test_exception(self):
        exception = None
        try:
            core.__unittest_throw_exception__()
        except core.EnforceNotMet as ex:
            self.assertIn("test exception", cpt.get_exception_message(ex))
            exception = ex

        self.assertIsNotNone(exception)
    def test_exception(self):
        exception = None
        try:
            core.__unittest_throw_exception__()
        except RuntimeError as ex:
            self.assertIn("This is a test of exception",
                          cpt.get_exception_message(ex))
            exception = ex

        self.assertIsNotNone(exception)
Beispiel #9
0
    def test_allow_unused_false(self):
        def func(x, y):
            return paddle.sum(paddle.matmul(x, x))

        try:
            self.x.stop_gradient = False
            self.y.stop_gradient = False
            hessian = paddle.autograd.hessian(func, [self.x, self.y])
        except ValueError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("allow_unused") > 0
Beispiel #10
0
    def test_allow_unused_false(self):
        def func(x, y):
            return x * x

        try:
            self.x.stop_gradient = False
            self.y.stop_gradient = False
            jacobian = paddle.autograd.batch_jacobian(func, [self.x, self.y])
        except ValueError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("allow_unused") > 0
Beispiel #11
0
 def test_load(self):
     mul_out, b1_out, b2_out, mean_out = self.net()
     sgd_optimizer = optimizer.SGD(learning_rate=1.0)
     recompute_optimizer = optimizer.RecomputeOptimizer(sgd_optimizer)
     recompute_optimizer._set_checkpoints([b1_out])
     try:
         stat_dict = {}
         recompute_optimizer.load(stat_dict)
     except NotImplementedError as e:
         self.assertEqual(
             "load function is not supported by Recompute Optimizer for now",
             cpt.get_exception_message(e))
Beispiel #12
0
    def test_create_graph_false(self):
        def func(x):
            return paddle.matmul(x * x, self.weight)[:, 0:1]

        numerical_hessian = _compute_numerical_batch_hessian(
            func, self.x, self.numerical_delta, self.np_dtype)
        self.x.stop_gradient = False
        hessian = paddle.autograd.batch_hessian(func, self.x)
        assert hessian.stop_gradient == True
        assert np.allclose(hessian.numpy(), numerical_hessian, self.rtol,
                           self.atol)
        try:
            paddle.grad(hessian, self.x)
        except RuntimeError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("has no gradient") > 0
Beispiel #13
0
 def test_prune_target_none(self):
     program = framework.Program()
     startup_program = framework.Program()
     block = program.global_block()
     with fluid.program_guard(program, startup_program):
         (x, y, label, loss) = self.net()
     self.assertEqual(len(block.ops), 5)
     self.assertEqual(
         [op.type for op in block.ops],
         ["mul", "elementwise_add", "softmax", "cross_entropy2", "mean"])
     try:
         pruned_program = program._prune(targets=None)
     except ValueError as e:
         self.assertEqual(
             "All targets of prune() can only be Variable or Operator.",
             cpt.get_exception_message(e))
Beispiel #14
0
    def test_child_process_exit_will_error(self):
        def __test_process__():
            core._set_process_signal_handler()
            sys.exit(1)

        exception = None
        try:
            test_process = multiprocessing.Process(target=__test_process__)
            test_process.start()

            set_child_signal_handler(id(self), test_process.pid)
            time.sleep(1)
        except core.EnforceNotMet as ex:
            self.assertIn("FatalError", cpt.get_exception_message(ex))
            exception = ex

        self.assertIsNotNone(exception)
Beispiel #15
0
    def test_child_process_killed_by_sigsegv(self):
        def __test_process__():
            core._set_process_signal_handler()
            os.kill(os.getpid(), signal.SIGSEGV)

        exception = None
        try:
            test_process = multiprocessing.Process(target=__test_process__)
            test_process.start()

            set_child_signal_handler(id(self), test_process.pid)
            time.sleep(1)
        except core.EnforceNotMet as ex:
            self.assertIn("FatalError", cpt.get_exception_message(ex))
            exception = ex

        self.assertIsNotNone(exception)
Beispiel #16
0
    def test_create_graph_false(self):
        def func(x, y):
            return x * y

        numerical_jacobian = _compute_numerical_batch_jacobian(
            func, [self.x, self.y], self.numerical_delta, self.np_dtype)
        self.x.stop_gradient = False
        self.y.stop_gradient = False
        jacobian = paddle.autograd.batch_jacobian(func, [self.x, self.y])
        for j in range(len(jacobian)):
            assert jacobian[j].stop_gradient == True
            assert np.allclose(jacobian[j].numpy(), numerical_jacobian[0][j],
                               self.rtol, self.atol)
        try:
            paddle.grad(jacobian[0], [self.x, self.y])
        except RuntimeError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("has no gradient") > 0
    def func_simple_example_eager_grad_not_allow_unused(self):
        np.random.seed(2021)
        paddle.set_device('cpu')
        np_x = np.random.random((3, 3))
        np_y = np.random.random((3, 1))
        np_z = np.random.random((3, 1))
        x = paddle.to_tensor(np_x, dtype="float64", stop_gradient=False)
        y = paddle.to_tensor(np_y, dtype="float64", stop_gradient=False)
        z = paddle.to_tensor(np_z, dtype="float64", stop_gradient=False)
        out_z = paddle.nn.functional.sigmoid(z)
        out = paddle.matmul(x, y)

        try:
            # allow_unused is false in default
            dx = fluid.dygraph.grad(out, [x, z])
        except ValueError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("allow_unused") > 0
    def func_simple_example_eager_grad_duplicate_output(self):
        np.random.seed(2021)
        paddle.set_device('cpu')
        np_x = np.random.random((3, 3))
        np_y = np.random.random((3, 1))
        np_z = np.random.random((3, 1))
        x = paddle.to_tensor(np_x, dtype="float64", stop_gradient=False)
        y = paddle.to_tensor(np_y, dtype="float64", stop_gradient=False)
        z = paddle.to_tensor(np_z, dtype="float64", stop_gradient=False)
        out_z = paddle.nn.functional.sigmoid(z)
        out = paddle.matmul(x, y)

        try:
            # duplicate output will arise RuntimeError errors
            dx = fluid.dygraph.grad([out, out], [x])
        except RuntimeError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("duplicate") > 0
Beispiel #19
0
    def test_create_graph_false(self):
        def func(x):
            return paddle.sum(F.sigmoid(x))

        numerical_func_output = func(self.x).numpy()
        numerical_vhp = _compute_numerical_vhp(func, self.x, self.vx,
                                               self.numerical_delta,
                                               self.np_dtype)

        self.x.stop_gradient = False
        func_output, vhp = paddle.autograd.vhp(func, self.x, self.vx)
        assert np.allclose(func_output.numpy(), numerical_func_output,
                           self.rtol, self.atol)
        assert vhp[0].stop_gradient == True
        assert np.allclose(vhp[0].numpy(), numerical_vhp[0], self.rtol,
                           self.atol)
        try:
            paddle.grad(vhp, self.x)
        except RuntimeError as e:
            error_msg = cpt.get_exception_message(e)
            assert error_msg.find("has no gradient") > 0
Beispiel #20
0
    def test_dataset_none(self):
        slots_vars, out = self.net()
        files = [
            "test_queue_dataset_run_a.txt", "test_queue_dataset_run_b.txt"
        ]
        dataset = self.get_dataset(slots_vars, files)

        exe = fluid.Executor(fluid.CPUPlace())
        exe.run(fluid.default_startup_program())

        # test dataset->None
        try:
            exe.train_from_dataset(fluid.default_main_program(), None)
        except ImportError as e:
            print(
                "warning: we skip trainer_desc_pb2 import problem in windows")
        except RuntimeError as e:
            error_msg = "dataset is need and should be initialized"
            self.assertEqual(error_msg, cpt.get_exception_message(e))
        except Exception as e:
            self.assertTrue(False)
Beispiel #21
0
    def test_multi_process_with_thread_expection(self):
        def error_sample_genarator(batch_num):
            def __reader__():
                for _ in range(batch_num):
                    yield [[[1, 2], [1]]]

            return __reader__

        with fluid.dygraph.guard():
            loader = fluid.io.DataLoader.from_generator(capacity=self.capacity,
                                                        use_multiprocess=True)
            loader.set_batch_generator(error_sample_genarator(self.batch_num),
                                       places=fluid.CPUPlace())
            exception = None
            try:
                for _ in loader():
                    print("test_multi_process_with_thread_expection")
            except core.EnforceNotMet as ex:
                self.assertIn("Blocking queue is killed",
                              cpt.get_exception_message(ex))
                exception = ex
            self.assertIsNotNone(exception)