示例#1
0
    def test_test_batch(self):
        dim = 20
        data = np.random.random(size=(4, dim)).astype(np.float32)

        def get_expect():
            fluid.enable_dygraph(fluid.CPUPlace())
            self.set_seed()
            m = MyModel()
            m.eval()
            output = m(to_tensor(data))
            fluid.disable_dygraph()
            return output.numpy()

        ref = get_expect()
        for dynamic in [True, False]:
            device = paddle.set_device('cpu')
            fluid.enable_dygraph(device) if dynamic else None
            self.set_seed()
            net = MyModel()
            inputs = [InputSpec([None, dim], 'float32', 'x')]
            model = Model(net, inputs)
            model.prepare()
            out, = model.predict_batch([data])

            np.testing.assert_allclose(out, ref, rtol=1e-6)
            fluid.disable_dygraph() if dynamic else None
示例#2
0
    def test_dygraph_export_deploy_model_about_inputs(self):
        self.set_seed()
        np.random.seed(201)
        mnist_data = MnistDataset(mode='train')
        paddle.disable_static()
        # without inputs
        save_dir = os.path.join(tempfile.mkdtemp(),
                                '.cache_test_dygraph_export_deploy')
        if not os.path.exists(save_dir):
            os.makedirs(save_dir)
        for initial in ["fit", "train_batch", "eval_batch", "predict_batch"]:
            net = LeNet()
            model = Model(net)
            optim = fluid.optimizer.Adam(
                learning_rate=0.001, parameter_list=model.parameters())
            model.prepare(
                optimizer=optim, loss=CrossEntropyLoss(reduction="sum"))
            if initial == "fit":
                model.fit(mnist_data, batch_size=64, verbose=0)
            else:
                img = np.array(
                    np.random.random((1, 1, 28, 28)), dtype=np.float32)
                label = np.array(np.random.rand(1, 1), dtype=np.int64)
                if initial == "train_batch":
                    model.train_batch([img], [label])
                elif initial == "eval_batch":
                    model.eval_batch([img], [label])
                else:
                    model.predict_batch([img])

            model.save(save_dir, training=False)
        shutil.rmtree(save_dir)
        # with inputs, and the type of inputs is InputSpec
        save_dir = os.path.join(tempfile.mkdtemp(),
                                '.cache_test_dygraph_export_deploy_2')
        if not os.path.exists(save_dir):
            os.makedirs(save_dir)
        net = LeNet()
        inputs = InputSpec([None, 1, 28, 28], 'float32', 'x')
        model = Model(net, inputs)
        optim = fluid.optimizer.Adam(
            learning_rate=0.001, parameter_list=model.parameters())
        model.prepare(optimizer=optim, loss=CrossEntropyLoss(reduction="sum"))
        model.save(save_dir, training=False)
        shutil.rmtree(save_dir)
示例#3
0
 def _calc_output(self, place, mode="test", dygraph=True):
     if dygraph:
         fluid.enable_dygraph(place)
     else:
         fluid.disable_dygraph()
     gen = paddle.seed(self._random_seed)
     paddle.framework.random._manual_program_seed(self._random_seed)
     scope = fluid.core.Scope()
     with fluid.scope_guard(scope):
         layer = self.model_cls(**self.attrs) if isinstance(
             self.attrs, dict) else self.model_cls(*self.attrs)
         model = Model(layer, inputs=self.make_inputs())
         model.prepare()
         if self.param_states:
             model.load(self.param_states, optim_state=None)
         return model.predict_batch(self.inputs)
示例#4
0
    def test_export_deploy_model(self):
        self.set_seed()
        np.random.seed(201)

        save_dir = os.path.join(tempfile.mkdtemp(),
                                '.cache_test_export_deploy_model')
        if not os.path.exists(save_dir):
            os.makedirs(save_dir)

        for dynamic in [True, False]:
            paddle.disable_static() if dynamic else None
            prog_translator = ProgramTranslator()
            prog_translator.enable(False) if not dynamic else None
            net = LeNet()
            inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')]
            model = Model(net, inputs)
            model.prepare()

            tensor_img = np.array(np.random.random((1, 1, 28, 28)),
                                  dtype=np.float32)

            model.save(save_dir, training=False)
            ori_results = model.predict_batch(tensor_img)
            fluid.disable_dygraph() if dynamic else None

            place = fluid.CPUPlace(
            ) if not fluid.is_compiled_with_cuda() else fluid.CUDAPlace(0)
            new_scope = fluid.Scope()
            with fluid.scope_guard(new_scope):
                exe = fluid.Executor(place)
                [inference_program, feed_target_names,
                 fetch_targets] = (paddle.static.io.load_inference_model(
                     path_prefix=save_dir, executor=exe))
                results = exe.run(inference_program,
                                  feed={feed_target_names[0]: tensor_img},
                                  fetch_list=fetch_targets)
                np.testing.assert_allclose(results,
                                           ori_results,
                                           rtol=1e-5,
                                           atol=1e-6)

            paddle.enable_static()

        shutil.rmtree(save_dir)