Esempio n. 1
0
    def test_param_type(self):
        program_translator = ProgramTranslator()
        program_translator.enable(True)
        x_data = np.random.random((20, 20)).astype('float32')

        with fluid.dygraph.guard(fluid.CPUPlace()):
            net = SimpleFcLayer(20)
            x = fluid.dygraph.to_variable(x_data)
            out = net(x)

            program_cache = net.forward.program_cache
            _, (concrete_program, _) = program_cache.last()

            params = concrete_program.parameters

            concrete_program.parameters = params[0]
            # TypeError: Type of self._params should be list or tuple,
            # but received <class 'paddle.fluid.framework.ParamBase'>.
            with self.assertRaises(TypeError):
                partial_program_from(concrete_program)

            params[0] = "linear.w.0"
            concrete_program.parameters = params
            # TypeError: Type of self._params[0] should be framework.ParamBase,
            # but received <type 'str'>.
            with self.assertRaises(TypeError):
                partial_program_from(concrete_program)
Esempio n. 2
0
    def test_save_inference_model(self):
        fc_size = 20

        x = np.random.random((fc_size, fc_size)).astype('float32')
        layer = SimpleFcLayer(fc_size)

        program_translator = ProgramTranslator.get_instance()
        program_cache = ProgramTranslator().get_program_cache
        adam = fluid.optimizer.SGD(learning_rate=0.001)
        program_translator.set_optimizer(adam, 'mean')

        for i in range(5):
            out = layer(x)

        main_program = ProgramTranslator.get_instance().main_program
        expected_persistable_vars = set(
            [layer._linear.weight.name, layer._linear.bias.name])

        infer_model_dir = "./test_dy2stat_save_inference_model"
        ProgramTranslator.get_instance().save_inference_model(infer_model_dir)
        saved_var_names = set([
            filename for filename in os.listdir(infer_model_dir)
            if filename != '__model__'
        ])
        self.assertEqual(saved_var_names, expected_persistable_vars)
Esempio n. 3
0
 def _run(self, func, x, with_exception, to_static):
     ProgramTranslator().enable(to_static)
     if with_exception:
         with self.assertRaises(BaseException):
             with fluid.dygraph.guard():
                 func(x)
     else:
         with fluid.dygraph.guard():
             func(x)
Esempio n. 4
0
    def train(self, to_static=False):
        program_translator = ProgramTranslator()
        program_translator.enable(to_static)

        with fluid.dygraph.guard():
            dy_layer = self.dygraph_class()
            x = fluid.dygraph.to_variable(self.data)
            prediction = dy_layer(x)
            if isinstance(prediction, (list, tuple)):
                prediction = prediction[0]

            return prediction.numpy()
Esempio n. 5
0
    def train(self, to_static=False):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        loss_data = []
        with fluid.dygraph.guard(self.place):
            fluid.default_main_program().random_seed = SEED
            fluid.default_startup_program().random_seed = SEED
            mnist = MNIST()
            adam = AdamOptimizer(learning_rate=0.001,
                                 parameter_list=mnist.parameters())

            for epoch in range(self.epoch_num):
                start = time()
                for batch_id, data in enumerate(self.train_reader()):
                    dy_x_data = np.array([
                        x[0].reshape(1, 28, 28) for x in data
                    ]).astype('float32')
                    y_data = np.array([x[1] for x in data
                                       ]).astype('int64').reshape(-1, 1)

                    img = to_variable(dy_x_data)
                    label = to_variable(y_data)

                    label.stop_gradient = True
                    prediction, acc, avg_loss = mnist(img, label=label)
                    avg_loss.backward()

                    adam.minimize(avg_loss)
                    loss_data.append(avg_loss.numpy()[0])
                    # save checkpoint
                    mnist.clear_gradients()
                    if batch_id % 10 == 0:
                        print(
                            "Loss at epoch {} step {}: loss: {:}, acc: {}, cost: {}"
                            .format(epoch, batch_id, avg_loss.numpy(),
                                    acc.numpy(),
                                    time() - start))
                        start = time()
                    if batch_id == 50:
                        mnist.eval()
                        prediction, acc, avg_loss = mnist(img, label)
                        loss_data.append(avg_loss.numpy()[0])
                        # new save load check
                        self.check_jit_save_load(mnist, [dy_x_data], [img],
                                                 to_static, prediction)
                        break
        return loss_data
Esempio n. 6
0
    def train_static(self):
        main_program = fluid.Program()
        loss_data = []
        with fluid.program_guard(main_program):
            static_net = self.dygraph_class()
            adam = fluid.optimizer.AdamOptimizer(learning_rate=0.001)
            # set optimizer
            # TODO: Need a better interfaces to set optimizer.
            program_translator = ProgramTranslator()
            program_translator.set_optimizer(adam, 'avg_loss')

            for batch_id in range(self.batch_num):
                pred, avg_loss = static_net(self.data)
                loss_data.append(np.array(avg_loss))

        return loss_data
Esempio n. 7
0
    def test_save_load_same_result(self):
        program_translator = ProgramTranslator()
        x_data = np.random.randn(30, 10, 32).astype('float32')
        batch_num = 3

        with fluid.dygraph.guard(place):

            program_translator.enable(True)
            x = fluid.dygraph.to_variable(x_data)
            net = Linear(32, 64)
            adam = AdamOptimizer(learning_rate=0.1,
                                 parameter_list=net.parameters())

            for i in range(batch_num):
                static_out, static_loss = net(x)
                # Update parameters
                static_loss.backward()
                adam.minimize(static_loss)
                net.clear_gradients()
            # Save parameters

            fluid.save_dygraph(net.state_dict(), self.model_path)
            # minimize() will update parameter, call net() to get output and avg_loss.
            # Switch into eval mode.
            net.eval()
            static_out, static_loss = net(x)

        # load parameters into dygraph
        with fluid.dygraph.guard(place):
            dygraph_net = Linear(32, 64)

            # Load parameters
            model_dict, _ = fluid.load_dygraph(self.model_path)
            dygraph_net.set_dict(model_dict)
            # Switch into eval mode.
            dygraph_net.eval()

            x = fluid.dygraph.to_variable(x_data)
            # predict output
            program_translator.enable(False)
            dygraph_out, dygraph_loss = dygraph_net(x)

        self.assertTrue(np.allclose(dygraph_out.numpy(), static_out.numpy()))
        self.assertTrue(np.allclose(dygraph_loss.numpy(), static_loss.numpy()))
Esempio n. 8
0
    def train(self, to_static=False):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        with fluid.dygraph.guard(fluid.CPUPlace()):
            dygraph_net = self.dygraph_class()
            adam = fluid.optimizer.AdamOptimizer(
                learning_rate=0.001, parameter_list=dygraph_net.parameters())
            loss_data = []
            for batch_id in range(self.batch_num):
                input = fluid.dygraph.to_variable(self.data)
                pred, avg_loss = dygraph_net(input)

                loss_data.append(avg_loss.numpy())
                avg_loss.backward()
                adam.minimize(avg_loss)
                dygraph_net.clear_gradients()

        return loss_data
Esempio n. 9
0
    def check_save_inference_model(self,
                                   model,
                                   inputs,
                                   gt_out,
                                   feed=None,
                                   fetch=None):
        program_translator = ProgramTranslator()
        expected_persistable_vars = set([p.name for p in model.parameters()])

        infer_model_dir = "./test_dy2stat_save_inference_model"
        program_translator.save_inference_model(infer_model_dir,
                                                feed=feed,
                                                fetch=fetch)
        saved_var_names = set([
            filename for filename in os.listdir(infer_model_dir)
            if filename != '__model__'
        ])
        self.assertEqual(saved_var_names, expected_persistable_vars)
        # Check the correctness of the inference
        infer_out = self.load_and_run_inference(infer_model_dir, inputs)
        self.assertTrue(np.allclose(gt_out, infer_out))
Esempio n. 10
0
    def test_program_translator(self):
        answer = "\
def dyfunc_with_if_else(x_v, label=None):\n\
\n\
    def true_fn_1(x_v):\n\
        x_v = x_v - 1\n\
        return x_v\n\
\n\
    def false_fn_1(x_v):\n\
        x_v = x_v + 1\n\
        return x_v\n\
    x_v = fluid.layers.cond(fluid.layers.mean(x_v)[0] > 5, lambda :\n\
        true_fn_1(x_v), lambda : false_fn_1(x_v))\n\
    if label is not None:\n\
        loss = fluid.layers.cross_entropy(x_v, label)\n\
        return loss\n\
    return x_v\n"

        program_translator = ProgramTranslator()
        code = program_translator.get_code(dyfunc_with_if_else)
        self.assertEqual(answer, code)
Esempio n. 11
0
# limitations under the License.

import time
import unittest

import numpy as np
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
from paddle.fluid.dygraph.io import VARIABLE_FILENAME

from bert_dygraph_model import PretrainModelLayer
from bert_utils import get_bert_config, get_feed_data_reader

from predictor_utils import PredictorTools

program_translator = ProgramTranslator()
place = fluid.CUDAPlace(
    0) if fluid.is_compiled_with_cuda() else fluid.CPUPlace()
SEED = 2020
STEP_NUM = 10
PRINT_STEP = 2
MODEL_SAVE_PATH = "./bert.inference.model"
DY_STATE_DICT_SAVE_PATH = "./bert.dygraph"


def train(bert_config, data_reader, to_static):
    with fluid.dygraph.guard(place):
        fluid.default_main_program().random_seed = SEED
        fluid.default_startup_program().random_seed = SEED

        data_loader = fluid.io.DataLoader.from_generator(capacity=50,
Esempio n. 12
0
 def setUp(self):
     self.x = np.random.randn(30, 10, 32).astype('float32')
     self.weight = np.random.randn(32, 64).astype('float32')
     self.program_translator = ProgramTranslator()
Esempio n. 13
0
 def test_program_translator(self):
     answer = get_source_code(StaticCode2.dyfunc_with_if_else)
     program_translator = ProgramTranslator()
     code = program_translator.get_code(dyfunc_with_if_else)
     self.assertEqual(answer, code)
Esempio n. 14
0
 def test_decorator(self):
     x_v = None
     program_translator = ProgramTranslator()
     code = program_translator.get_code(dyfunc_with_if_else)
     answer = get_source_code(StaticCode1.dyfunc_with_if_else)
     self.assertEqual(answer, code)
Esempio n. 15
0
 def test_code(self):
     answer = get_source_code(StaticCode.dyfunc_assign)
     program_translator = ProgramTranslator()
     code = program_translator.get_code(dyfunc_assign)
     self.assertEqual(answer, code)
Esempio n. 16
0
 def setUp(self):
     self.program_translator = ProgramTranslator()
     self.x = np.random.randn(10, 32).astype('float32')