Exemple #1
0
    def test_param_type(self):
        program_translator = ProgramTranslator()
        program_translator.enable(True)
        x_data = np.random.random((20, 20)).astype('float32')

        with fluid.dygraph.guard(fluid.CPUPlace()):
            net = SimpleFcLayer(20)
            x = fluid.dygraph.to_variable(x_data)
            out = net(x)

            program_cache = net.forward.program_cache
            _, (concrete_program, _) = program_cache.last()

            params = concrete_program.parameters

            concrete_program.parameters = params[0]
            # TypeError: Type of self._params should be list or tuple,
            # but received <class 'paddle.fluid.framework.ParamBase'>.
            with self.assertRaises(TypeError):
                partial_program_from(concrete_program)

            params[0] = "linear.w.0"
            concrete_program.parameters = params
            # TypeError: Type of self._params[0] should be framework.ParamBase,
            # but received <type 'str'>.
            with self.assertRaises(TypeError):
                partial_program_from(concrete_program)
    def train(self, to_static=False):
        program_translator = ProgramTranslator()
        program_translator.enable(to_static)

        with fluid.dygraph.guard():
            dy_layer = self.dygraph_class()
            x = fluid.dygraph.to_variable(self.data)
            prediction = dy_layer(x)
            if isinstance(prediction, (list, tuple)):
                prediction = prediction[0]

            return prediction.numpy()
Exemple #3
0
    def train(self, to_static=False):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        loss_data = []
        with fluid.dygraph.guard(self.place):
            fluid.default_main_program().random_seed = SEED
            fluid.default_startup_program().random_seed = SEED
            mnist = MNIST()
            adam = AdamOptimizer(learning_rate=0.001,
                                 parameter_list=mnist.parameters())

            for epoch in range(self.epoch_num):
                start = time()
                for batch_id, data in enumerate(self.train_reader()):
                    dy_x_data = np.array([
                        x[0].reshape(1, 28, 28) for x in data
                    ]).astype('float32')
                    y_data = np.array([x[1] for x in data
                                       ]).astype('int64').reshape(-1, 1)

                    img = to_variable(dy_x_data)
                    label = to_variable(y_data)

                    label.stop_gradient = True
                    prediction, acc, avg_loss = mnist(img, label=label)
                    avg_loss.backward()

                    adam.minimize(avg_loss)
                    loss_data.append(avg_loss.numpy()[0])
                    # save checkpoint
                    mnist.clear_gradients()
                    if batch_id % 10 == 0:
                        print(
                            "Loss at epoch {} step {}: loss: {:}, acc: {}, cost: {}"
                            .format(epoch, batch_id, avg_loss.numpy(),
                                    acc.numpy(),
                                    time() - start))
                        start = time()
                    if batch_id == 50:
                        mnist.eval()
                        prediction, acc, avg_loss = mnist(img, label)
                        loss_data.append(avg_loss.numpy()[0])
                        # new save load check
                        self.check_jit_save_load(mnist, [dy_x_data], [img],
                                                 to_static, prediction)
                        break
        return loss_data
Exemple #4
0
class TestErrorWithInitFromStaticMode(unittest.TestCase):
    def setUp(self):
        self.program_translator = ProgramTranslator()
        self.x = np.random.randn(10, 32).astype('float32')

    def test_raise_error(self):
        # disable imperative
        paddle.enable_static()
        net = Net()

        self.program_translator.enable(True)
        with self.assertRaisesRegexp(RuntimeError,
                                     "only available in dynamic mode"):
            self.program_translator.get_output(net.forward, self.x)

        with self.assertRaisesRegexp(RuntimeError,
                                     "only available in dynamic mode"):
            self.program_translator.get_program(net.forward, self.x)
Exemple #5
0
    def test_save_load_same_result(self):
        program_translator = ProgramTranslator()
        x_data = np.random.randn(30, 10, 32).astype('float32')
        batch_num = 3

        with fluid.dygraph.guard(place):

            program_translator.enable(True)
            x = fluid.dygraph.to_variable(x_data)
            net = Linear(32, 64)
            adam = AdamOptimizer(learning_rate=0.1,
                                 parameter_list=net.parameters())

            for i in range(batch_num):
                static_out, static_loss = net(x)
                # Update parameters
                static_loss.backward()
                adam.minimize(static_loss)
                net.clear_gradients()
            # Save parameters

            fluid.save_dygraph(net.state_dict(), self.model_path)
            # minimize() will update parameter, call net() to get output and avg_loss.
            # Switch into eval mode.
            net.eval()
            static_out, static_loss = net(x)

        # load parameters into dygraph
        with fluid.dygraph.guard(place):
            dygraph_net = Linear(32, 64)

            # Load parameters
            model_dict, _ = fluid.load_dygraph(self.model_path)
            dygraph_net.set_dict(model_dict)
            # Switch into eval mode.
            dygraph_net.eval()

            x = fluid.dygraph.to_variable(x_data)
            # predict output
            program_translator.enable(False)
            dygraph_out, dygraph_loss = dygraph_net(x)

        self.assertTrue(np.allclose(dygraph_out.numpy(), static_out.numpy()))
        self.assertTrue(np.allclose(dygraph_loss.numpy(), static_loss.numpy()))
Exemple #6
0
    def train(self, to_static=False):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        with fluid.dygraph.guard(fluid.CPUPlace()):
            dygraph_net = self.dygraph_class()
            adam = fluid.optimizer.AdamOptimizer(
                learning_rate=0.001, parameter_list=dygraph_net.parameters())
            loss_data = []
            for batch_id in range(self.batch_num):
                input = fluid.dygraph.to_variable(self.data)
                pred, avg_loss = dygraph_net(input)

                loss_data.append(avg_loss.numpy())
                avg_loss.backward()
                adam.minimize(avg_loss)
                dygraph_net.clear_gradients()

        return loss_data
Exemple #7
0
class TestEnableDeclarative(unittest.TestCase):
    def setUp(self):
        self.x = np.random.randn(30, 10, 32).astype('float32')
        self.weight = np.random.randn(32, 64).astype('float32')
        self.program_translator = ProgramTranslator()

    def test_raise_error(self):
        with fluid.dygraph.guard():
            self.program_translator.enable(True)
            net = NetWithError()
            with self.assertRaises(ValueError):
                net(fluid.dygraph.to_variable(self.x))

    def test_enable_disable_get_output(self):
        self.program_translator.enable(True)
        with fluid.dygraph.guard():
            static_output = self.program_translator.get_output(
                simple_func, self.x, self.weight)

        self.program_translator.enable(False)
        with fluid.dygraph.guard():
            dygraph_output = self.program_translator.get_output(
                simple_func, self.x, self.weight)
            self.assertTrue(
                np.allclose(static_output.numpy(),
                            dygraph_output.numpy(),
                            atol=1e-4))

    def test_enable_disable_get_func(self):

        self.program_translator.enable(True)
        with fluid.dygraph.guard():
            static_func = self.program_translator.get_func(simple_func)
            self.assertTrue(callable(static_func))
            static_output = static_func(self.x, self.weight)
            self.assertTrue(isinstance(static_output, fluid.Variable))

        self.program_translator.enable(False)
        with fluid.dygraph.guard():
            dygraph_func = self.program_translator.get_func(simple_func)
            self.assertTrue(callable(dygraph_func))
            dygraph_output = dygraph_func(self.x, self.weight)
            self.assertTrue(isinstance(dygraph_output, fluid.core.VarBase))

    def test_enable_disable_get_program(self):

        self.program_translator.enable(True)
        static_output = self.program_translator.get_program(
            simple_func, self.x, self.weight)
        self.assertTrue(isinstance(static_output, tuple))
        self.assertEqual(len(static_output), 4)
        self.assertTrue(isinstance(static_output[0], fluid.Program))
        self.assertTrue(isinstance(static_output[1], fluid.Program))
        # Check all inputs and outputs are Variable
        for var in static_output[2]:
            self.assertTrue(isinstance(var, fluid.Variable))

        for var in static_output[3]:
            self.assertTrue(isinstance(var, fluid.Variable))

        self.program_translator.enable(False)
        with fluid.dygraph.guard():
            dygraph_output = self.program_translator.get_program(
                simple_func, self.x, self.weight)
            self.assertTrue(isinstance(dygraph_output, fluid.core.VarBase))

    def test_enable_disable_declarative(self):

        self.program_translator.enable(True)
        with fluid.dygraph.guard():
            static_output = decorated_simple_func(self.x, self.weight)

        self.program_translator.enable(False)
        with fluid.dygraph.guard():
            dygraph_output = decorated_simple_func(self.x, self.weight)
            self.assertTrue(
                np.allclose(static_output.numpy(),
                            dygraph_output.numpy(),
                            atol=1e-4))