def test_load_with_param_filename(self): self.save_dirname = "static_mnist.load_state_dict.param_filename" self.model_filename = None self.params_filename = "static_mnist.params" orig_param_dict = self.train_and_save_model() configs = paddle.SaveLoadConfig() configs.params_filename = self.params_filename load_param_dict, _ = paddle.load(self.save_dirname, configs) self.check_load_state_dict(orig_param_dict, load_param_dict)
def test_load_default(self): self.save_dirname = "static_mnist.load_state_dict.default" self.model_filename = None self.params_filename = None orig_param_dict = self.train_and_save_model() configs = paddle.SaveLoadConfig() configs.separate_params = True load_param_dict, _ = paddle.load(self.save_dirname, configs) self.check_load_state_dict(orig_param_dict, load_param_dict)
shuffle=True, drop_last=True, num_workers=2) # train model for data in loader(): exe.run(fluid.default_main_program(), feed=data, fetch_list=[avg_loss]) # save with params_filename model_path = "fc.example.model.with_params_filename" fluid.io.save_inference_model(model_path, ["image"], [pred], exe, params_filename="__params__") # enable dygraph mode paddle.disable_static(place) # load config = paddle.SaveLoadConfig() config.params_filename = "__params__" fc = paddle.jit.load(model_path, config=config) # inference fc.eval() x = paddle.randn([1, IMAGE_SIZE], 'float32') pred = fc(x) config = paddle.SaveLoadConfig() config.params_filename = "__params__" load_param_dict, _ = paddle.load(model_path, config)
import paddle paddle.disable_static() linear = paddle.nn.Linear(5, 1) state_dict = linear.state_dict() paddle.save(state_dict, "paddle_dy") configs = paddle.SaveLoadConfig() configs.keep_name_table = True para_state_dict, _ = paddle.load("paddle_dy", configs) print(para_state_dict) # the name_table is 'StructuredToParameterName@@' # {'bias': array([0.], dtype=float32), # 'StructuredToParameterName@@': # {'bias': u'linear_0.b_0', 'weight': u'linear_0.w_0'}, # 'weight': array([[ 0.04230034], # [-0.1222527 ], # [ 0.7392676 ], # [-0.8136974 ], # [ 0.01211023]], dtype=float32)}