コード例 #1
0
    def test_while_no_params_filename(self):
        # Phase 1. run and save static model
        self.train_and_save_model()

        # # Phase 2. load model & train dygraph
        with unique_name.guard():
            dy_out, dy_param_init_value, dy_param_value = \
            self.load_and_train_dygraph()

        with unique_name.guard():
            static_out, static_param_init_value, static_param_value = \
                self.load_and_train_static()

        # Phase 3. compare
        with unique_name.guard():
            dict_old_new_init = rename_var_with_generator(
                static_param_init_value.keys())
        for key, value in six.iteritems(static_param_init_value):
            key = dict_old_new_init[key]
            self.assertTrue(np.array_equal(value, dy_param_init_value[key]))

        self.assertTrue(np.allclose(static_out, dy_out))

        for key, value in six.iteritems(static_param_value):
            key += LOADED_VAR_SUFFIX
            self.assertTrue(np.allclose(value, dy_param_value[key], atol=1e-5))
コード例 #2
0
    def test_save_load_finetune_load(self):
        model_path = "test_jit_save_load_save_without_running/model"
        IMAGE_SIZE = 224
        inps0 = paddle.randn([1, IMAGE_SIZE])
        inps1 = paddle.randn([2, IMAGE_SIZE])
        # Use new namespace
        with unique_name.guard():
            layer_save = LayerSaved(IMAGE_SIZE, IMAGE_SIZE)
        #save
        paddle.jit.save(layer_save,
                        model_path,
                        input_spec=[
                            paddle.static.InputSpec(shape=[None, IMAGE_SIZE],
                                                    dtype='float32')
                        ])
        result_00 = layer_save(inps0)
        result_01 = layer_save(inps1)
        #load and save without running
        with unique_name.guard():
            layer_load = paddle.jit.load(model_path)
            paddle.jit.save(layer_load,
                            model_path,
                            input_spec=[
                                paddle.static.InputSpec(
                                    shape=[None, IMAGE_SIZE], dtype='float32')
                            ])
        #reload
        layer_reload = paddle.jit.load(model_path)
        result_10 = layer_reload(inps0)
        result_11 = layer_reload(inps1)

        self.assertTrue(float((result_00 - result_10).abs().max()) < 1e-5)
        self.assertTrue(float((result_01 - result_11).abs().max()) < 1e-5)
コード例 #3
0
    def test_save_load_finetune_load(self):
        model_path = "test_jit_save_load_finetune_load/model"
        IMAGE_SIZE = 224
        inps0 = paddle.randn([1, IMAGE_SIZE])
        inps1 = paddle.randn([2, IMAGE_SIZE])
        # Use new namespace
        with unique_name.guard():
            layer_save = LayerSaved(IMAGE_SIZE, IMAGE_SIZE)
        layer_save(inps0)
        #save
        paddle.jit.save(layer_save, model_path)
        #load
        with unique_name.guard():
            layer_load = LayerLoadFinetune(IMAGE_SIZE, IMAGE_SIZE, model_path)
        #train
        train(layer_load, input_size=IMAGE_SIZE)
        result_00 = layer_load(inps0)
        result_01 = layer_load(inps1)
        #save
        paddle.jit.save(layer_load, model_path)
        #load
        layer_finetune = paddle.jit.load(model_path)
        result_10 = layer_finetune(inps0)
        result_11 = layer_finetune(inps1)

        self.assertTrue(float((result_00 - result_10).abs().max()) < 1e-5)
        self.assertTrue(float(((result_01 - result_11)).abs().max()) < 1e-5)
コード例 #4
0
    def __init__(self, programs, persistable_vars):
        super(TranslatedLayer, self).__init__()

        if not isinstance(programs, dict):
            raise TypeError(
                "TranslatedLayer need to use _ProgramHolder's dict for initialization."
            )
        if not isinstance(persistable_vars, dict):
            raise TypeError(
                "TranslatedLayer need to use persistable variable dict for initialization."
            )

        self._program_holder_dict = programs

        # NOTE(chenweihang): [ why not use var name directly? ]
        # When add parameter or buffer to Layer by follow apis,
        # the variable name can't contain `.`, beccause which may cause
        # AttributeError when access the newly added parameter or buffer
        # in the form of `self.**.**``, but the ParamBase or BarBase
        # name contains `.` originally, such as `linear_0.w_0`, so here
        # need to generate new var name for each var
        self._persistable_var_name_dict = dict()
        # the TranslatedLayer object holded var names count started from 0
        with unique_name.guard():
            for name, var in persistable_vars.items():
                if isinstance(var, framework.ParamBase):
                    dy_name = _generate_unique_var_name(PARAMETER_NAME_PREFIX)
                    self._persistable_var_name_dict[name] = dy_name
                    self.add_parameter(dy_name, var)
                elif isinstance(var, core.VarBase):
                    dy_name = _generate_unique_var_name(BUFFER_NAME_PREFIX)
                    self._persistable_var_name_dict[name] = dy_name
                    self.register_buffer(dy_name, var)
                else:
                    raise TypeError(
                        "Adding persistent variable which  to layer is not supported now"
                    )

        self._is_test = True