Exemplo n.º 1
0
 def test_train_eval(self):
     with _test_eager_guard():
         self.func_train_eval()
     self.func_train_eval()
Exemplo n.º 2
0
 def test_useless_feeded_var_names(self):
     with _test_eager_guard():
         self.func_test_useless_feeded_var_names()
     self.func_test_useless_feeded_var_names()
Exemplo n.º 3
0
 def test_tensor_fill_true(self):
     with _test_eager_guard():
         self.func_test_tensor_fill_true()
     self.func_test_tensor_fill_true()
Exemplo n.º 4
0
 def test_get_valid_program_error(self):
     with _test_eager_guard():
         self.func_test_get_valid_program_error()
     self.func_test_get_valid_program_error()
Exemplo n.º 5
0
 def test_load_vars_error(self):
     with _test_eager_guard():
         self.func_test_load_vars_error()
     self.func_test_load_vars_error()
Exemplo n.º 6
0
 def test_is_split_into_words(self):
     with _test_eager_guard():
         self.run_is_split_into_words()
     self.run_is_split_into_words()
Exemplo n.º 7
0
 def test_when_train_with_no_grad(self):
     with _test_eager_guard():
         self.func_test_when_train_with_no_grad()
     self.func_test_when_train_with_no_grad()
    for i in range(len(dp_params)):
        np.testing.assert_allclose(dp_params[i].numpy(),
                                   stage2_params[i].numpy(),
                                   rtol=1e-6)

    # save/load model
    output_dir = tempfile.mkdtemp()
    model_file = os.path.join(output_dir, "model.pdmodel")
    optimizer_file = os.path.join(output_dir, "model.pdopt")
    model_stage2, optimizer_stage2 = train_mlp(mlp6,
                                               sharding_stage=2,
                                               use_pure_fp16=False,
                                               opt_group=False,
                                               save_model=True)
    paddle.save(model_stage2.state_dict(), model_file)
    paddle.save(optimizer_stage2.state_dict(), optimizer_file)
    m_state_dict = paddle.load(model_file)
    opt_state_dict = paddle.load(optimizer_file)
    model_stage2.set_state_dict(m_state_dict)
    optimizer_stage2.set_state_dict(opt_state_dict)
    shutil.rmtree(output_dir)

    # check optimizer.minimize() error
    train_mlp(mlp7, sharding_stage=2, test_minimize=True)
    return


if __name__ == '__main__':
    with _test_eager_guard():
        test_dp_stage2()
Exemplo n.º 9
0
 def test_no_padding(self):
     with _test_eager_guard():
         self.run_no_padding()
     self.run_no_padding()
Exemplo n.º 10
0
 def test_api_eager_dygraph(self):
     with _test_eager_guard():
         self.test_check_output()
Exemplo n.º 11
0
 def test_dygraph_final_state_api(self):
     with _test_eager_guard():
         self.test_error_api()
 def test_paramter_list(self):
     with _test_eager_guard():
         self.paramter_list(False)
         self.paramter_list(True)
     self.paramter_list(False)
     self.paramter_list(True)
Exemplo n.º 13
0
 def test_prune_graph(self):
     # test eager
     with _test_eager_guard():
         self.func_prune_graph()
     self.func_prune_graph()
Exemplo n.º 14
0
 def test_with_tensor(self):
     with _test_eager_guard():
         self.func_with_tensor()
     self.func_with_tensor()
Exemplo n.º 15
0
 def test_dynamic(self):
     with _test_eager_guard():
         self.func_dynamic()
     self.func_dynamic()
Exemplo n.º 16
0
 def test_async_write_success(self):
     with _test_eager_guard():
         self.func_setUp()
         self.func_test_async_write_success()
     self.func_setUp()
     self.func_test_async_write_success()