Exemplo n.º 1
0
 def train(self, to_static=False):
     prog_trans = ProgramTranslator()
     prog_trans.enable(to_static)
     with fluid.dygraph.guard(PLACE):
         net = NetWithDictPop()
         ret = net(z=0, x=self.x, y=True)
         return ret.numpy()
Exemplo n.º 2
0
    def _run(self, to_static):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        result = self.dygraph_func(self.input)

        return result.numpy()
Exemplo n.º 3
0
 def train(self, to_static=False):
     prog_trans = ProgramTranslator()
     prog_trans.enable(to_static)
     with fluid.dygraph.guard(PLACE):
         net = MainNetWithDict(batch_size=self.batch_size)
         ret = net(self.x)
         return ret.numpy()
Exemplo n.º 4
0
    def _run(self, mode, to_static):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        net = self.Net(mode)
        ret = net(self.x, self.y)
        return ret.numpy()
Exemplo n.º 5
0
    def test_export_deploy_model(self):
        for dynamic in [True, False]:
            fluid.enable_dygraph() if dynamic else None
            # paddle.disable_static() if dynamic else None
            prog_translator = ProgramTranslator()
            prog_translator.enable(False) if not dynamic else None
            net = LeNetDeclarative()
            inputs = [InputSpec([None, 1, 28, 28], 'float32', 'x')]
            model = Model(net, inputs)
            model.prepare()
            save_dir = tempfile.mkdtemp()
            if not os.path.exists(save_dir):
                os.makedirs(save_dir)
            tensor_img = np.array(np.random.random((1, 1, 28, 28)),
                                  dtype=np.float32)
            ori_results = model.test_batch(tensor_img)
            model.save(save_dir, training=False)
            fluid.disable_dygraph() if dynamic else None

            place = fluid.CPUPlace(
            ) if not fluid.is_compiled_with_cuda() else fluid.CUDAPlace(0)
            new_scope = fluid.Scope()
            with fluid.scope_guard(new_scope):
                exe = fluid.Executor(place)
                [inference_program, feed_target_names, fetch_targets
                 ] = (fluid.io.load_inference_model(dirname=save_dir,
                                                    executor=exe))
                results = exe.run(inference_program,
                                  feed={feed_target_names[0]: tensor_img},
                                  fetch_list=fetch_targets)
                np.testing.assert_allclose(results,
                                           ori_results,
                                           rtol=1e-5,
                                           atol=1e-7)
                shutil.rmtree(save_dir)
Exemplo n.º 6
0
    def _run(self, to_static=False):
        prog_trans = ProgramTranslator()
        prog_trans.enable(to_static)

        with fluid.dygraph.guard(place):
            net = self.Net()
            x_v = fluid.dygraph.to_variable(self.x)
            ret = net(x_v)
            return ret.numpy()