Ejemplo n.º 1
0
    def check_static_result_4(self, place):
        paddle.enable_static()
        with program_guard(Program(), Program()):
            input_shape = (2, 3, 4, 5, 6)
            pad = [1, 2, 1, 1, 3, 4]
            mode = "circular"
            input_data = np.random.rand(*input_shape).astype(np.float32)
            x = paddle.fluid.data(name="x", shape=input_shape)
            result1 = F.pad(x=x, pad=pad, mode=mode, data_format="NCDHW")
            result2 = F.pad(x=x, pad=pad, mode=mode, data_format="NDHWC")
            exe = Executor(place)
            fetches = exe.run(default_main_program(),
                              feed={"x": input_data},
                              fetch_list=[result1, result2])

            np_out1 = self._get_numpy_out(input_data,
                                          pad,
                                          mode,
                                          data_format="NCDHW")
            np_out2 = self._get_numpy_out(input_data,
                                          pad,
                                          mode,
                                          data_format="NDHWC")
            self.assertTrue(np.allclose(fetches[0], np_out1))
            self.assertTrue(np.allclose(fetches[1], np_out2))
Ejemplo n.º 2
0
 def test_reflect_3():
     input_shape = (1, 2, 3, 4, 5)
     data = np.random.rand(*input_shape).astype(np.float32)
     x = paddle.fluid.data(name="x", shape=input_shape)
     y = F.pad(x, pad=[1, 1, 1, 1, 2, 3], value=1, mode='reflect')
     place = paddle.CPUPlace()
     exe = Executor(place)
     outputs = exe.run(feed={'x': data}, fetch_list=[y.name])
Ejemplo n.º 3
0
    def check_static_result_1(self, place):
        paddle.enable_static()
        with program_guard(Program(), Program()):
            input_shape = (1, 2, 3, 4, 5)
            pad = [1, 2, 1, 1, 3, 4]
            mode = "constant"
            value = 100
            input_data = np.random.rand(*input_shape).astype(np.float32)
            x = paddle.fluid.data(name="x", shape=input_shape)
            result = F.pad(x=x,
                           pad=pad,
                           value=value,
                           mode=mode,
                           data_format="NCDHW")
            exe = Executor(place)
            fetches = exe.run(default_main_program(),
                              feed={"x": input_data},
                              fetch_list=[result])

            np_out = self._get_numpy_out(input_data, pad, mode, value)
            self.assertTrue(np.allclose(fetches[0], np_out))
Ejemplo n.º 4
0
    def check_static_result(self, place):
        paddle.enable_static()

        with program_guard(Program(), Program()):
            shape = [10, 15]
            axis = 1
            eps = 1e-8
            np.random.seed(0)
            np_x1 = np.random.rand(*shape).astype(np.float32)
            np_x2 = np.random.rand(*shape).astype(np.float32)

            x1 = paddle.fluid.data(name="x1", shape=shape)
            x2 = paddle.fluid.data(name="x2", shape=shape)
            result = F.cosine_similarity(x1, x2, axis=axis, eps=eps)
            exe = Executor(place)
            fetches = exe.run(default_main_program(),
                              feed={"x1": np_x1,
                                    "x2": np_x2},
                              fetch_list=[result])

            np_out = self._get_numpy_out(np_x1, np_x2, axis=axis, eps=eps)
            self.assertTrue(np.allclose(fetches[0], np_out))
Ejemplo n.º 5
0
    def test_static(self):
        paddle.enable_static()
        self.place = fluid.NPUPlace(
            0) if fluid.core.is_compiled_with_npu() else fluid.CPUPlace()
        with program_guard(Program(), Program()):
            input_shape = (1, 2, 3, 4, 5)
            pad = [1, 2, 1, 1, 3, 4]
            mode = "constant"
            value = 0
            input_data = np.random.rand(*input_shape).astype(np.float32)
            x = paddle.fluid.data(name="x", shape=input_shape)
            result1 = F.pad(x=x,
                            pad=pad,
                            value=value,
                            mode=mode,
                            data_format="NCDHW")
            result2 = F.pad(x=x,
                            pad=pad,
                            value=value,
                            mode=mode,
                            data_format="NDHWC")
            exe = Executor(self.place)
            fetches = exe.run(default_main_program(),
                              feed={"x": input_data},
                              fetch_list=[result1, result2])

            np_out1 = self._get_numpy_out(input_data,
                                          pad,
                                          mode,
                                          value,
                                          data_format="NCDHW")
            np_out2 = self._get_numpy_out(input_data,
                                          pad,
                                          mode,
                                          value,
                                          data_format="NDHWC")
            self.assertTrue(np.allclose(fetches[0], np_out1))
            self.assertTrue(np.allclose(fetches[1], np_out2))
Ejemplo n.º 6
0
def save_model(server_model_folder,
               client_config_folder,
               feed_var_dict,
               fetch_var_dict,
               main_program=None,
               encryption=False,
               key_len=128,
               encrypt_conf=None,
               model_filename=None,
               params_filename=None,
               show_proto=False,
               feed_alias_names=None,
               fetch_alias_names=None):
    executor = Executor(place=CPUPlace())

    feed_var_names = [feed_var_dict[x].name for x in feed_var_dict]
    feed_vars = [feed_var_dict[x] for x in feed_var_dict]
    target_vars = []
    target_var_names = []
    for key in sorted(fetch_var_dict.keys()):
        target_vars.append(fetch_var_dict[key])
        target_var_names.append(key)

    main_program = normalize_program(main_program, feed_vars, target_vars)
    if not encryption and not show_proto:
        if not os.path.exists(server_model_folder):
            os.makedirs(server_model_folder)
        if not model_filename:
            model_filename = "model.pdmodel"
        if not params_filename:
            params_filename = "params.pdiparams"

        new_model_path = os.path.join(server_model_folder, model_filename)
        new_params_path = os.path.join(server_model_folder, params_filename)

        with open(new_model_path, "wb") as new_model_file:
            new_model_file.write(
                main_program._remove_training_info(
                    False).desc.serialize_to_string())

        paddle.static.save_vars(executor=executor,
                                dirname=server_model_folder,
                                main_program=main_program,
                                vars=None,
                                predicate=paddle.static.io.is_persistable,
                                filename=params_filename)
    elif not show_proto:
        if not os.path.exists(server_model_folder):
            os.makedirs(server_model_folder)
        if encrypt_conf == None:
            aes_cipher = CipherFactory.create_cipher()
        else:
            #todo: more encryption algorithms
            pass
        key = CipherUtils.gen_key_to_file(128, "key")
        params = fluid.io.save_persistables(executor=executor,
                                            dirname=None,
                                            main_program=main_program)
        model = main_program._remove_training_info(
            False).desc.serialize_to_string()
        if not os.path.exists(server_model_folder):
            os.makedirs(server_model_folder)
        os.chdir(server_model_folder)
        aes_cipher.encrypt_to_file(params, key, "encrypt_params")
        aes_cipher.encrypt_to_file(model, key, "encrypt_model")
        os.chdir("..")

    config = model_conf.GeneralModelConfig()
    if feed_alias_names is None:
        feed_alias = list(feed_var_dict.keys())
    else:
        feed_alias = feed_alias_names.split(',')
    if fetch_alias_names is None:
        fetch_alias = target_var_names
    else:
        fetch_alias = fetch_alias_names.split(',')
    if len(feed_alias) != len(
            feed_var_dict.keys()) or len(fetch_alias) != len(target_var_names):
        raise ValueError(
            "please check the input --feed_alias_names and --fetch_alias_names, should be same size with feed_vars and fetch_vars"
        )
    for i, key in enumerate(feed_var_dict):
        feed_var = model_conf.FeedVar()
        feed_var.alias_name = feed_alias[i]
        feed_var.name = feed_var_dict[key].name
        feed_var.feed_type = var_type_conversion(feed_var_dict[key].dtype)

        feed_var.is_lod_tensor = feed_var_dict[
            key].lod_level >= 1 if feed_var_dict[
                key].lod_level is not None else False
        if feed_var.is_lod_tensor:
            feed_var.shape.extend([-1])
        else:
            tmp_shape = []
            for v in feed_var_dict[key].shape:
                if v >= 0:
                    tmp_shape.append(v)
            feed_var.shape.extend(tmp_shape)
        config.feed_var.extend([feed_var])

    for i, key in enumerate(target_var_names):
        fetch_var = model_conf.FetchVar()
        fetch_var.alias_name = fetch_alias[i]
        fetch_var.name = fetch_var_dict[key].name
        fetch_var.fetch_type = var_type_conversion(fetch_var_dict[key].dtype)

        fetch_var.is_lod_tensor = fetch_var_dict[key].lod_level >= 1
        #fetch_var.is_lod_tensor = 1
        if fetch_var.is_lod_tensor:
            fetch_var.shape.extend([-1])
        else:
            tmp_shape = []
            for v in fetch_var_dict[key].shape:
                if v >= 0:
                    tmp_shape.append(v)
            fetch_var.shape.extend(tmp_shape)
        config.fetch_var.extend([fetch_var])

    if show_proto:
        print(str(config))
        return
    try:
        save_dirname = os.path.normpath(client_config_folder)
        os.makedirs(save_dirname)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise
    with open("{}/serving_client_conf.prototxt".format(client_config_folder),
              "w") as fout:
        fout.write(str(config))
    with open("{}/serving_server_conf.prototxt".format(server_model_folder),
              "w") as fout:
        fout.write(str(config))
    with open(
            "{}/serving_client_conf.stream.prototxt".format(
                client_config_folder), "wb") as fout:
        fout.write(config.SerializeToString())
    with open(
            "{}/serving_server_conf.stream.prototxt".format(
                server_model_folder), "wb") as fout:
        fout.write(config.SerializeToString())
Ejemplo n.º 7
0
def save_model(server_model_folder,
               client_config_folder,
               feed_var_dict,
               fetch_var_dict,
               main_program=None):
    executor = Executor(place=CPUPlace())

    feed_var_names = [feed_var_dict[x].name for x in feed_var_dict]
    target_vars = []
    target_var_names = []
    for key in sorted(fetch_var_dict.keys()):
        target_vars.append(fetch_var_dict[key])
        target_var_names.append(key)

    save_inference_model(
        server_model_folder,
        feed_var_names,
        target_vars,
        executor,
        main_program=main_program)

    config = model_conf.GeneralModelConfig()

    #int64 = 0; float32 = 1; int32 = 2;
    for key in feed_var_dict:
        feed_var = model_conf.FeedVar()
        feed_var.alias_name = key
        feed_var.name = feed_var_dict[key].name
        feed_var.is_lod_tensor = feed_var_dict[key].lod_level >= 1
        if feed_var_dict[key].dtype == core.VarDesc.VarType.INT64:
            feed_var.feed_type = 0
        if feed_var_dict[key].dtype == core.VarDesc.VarType.FP32:
            feed_var.feed_type = 1
        if feed_var_dict[key].dtype == core.VarDesc.VarType.INT32:
            feed_var.feed_type = 2
        if feed_var.is_lod_tensor:
            feed_var.shape.extend([-1])
        else:
            tmp_shape = []
            for v in feed_var_dict[key].shape:
                if v >= 0:
                    tmp_shape.append(v)
            feed_var.shape.extend(tmp_shape)
        config.feed_var.extend([feed_var])

    for key in target_var_names:
        fetch_var = model_conf.FetchVar()
        fetch_var.alias_name = key
        fetch_var.name = fetch_var_dict[key].name
        fetch_var.is_lod_tensor = fetch_var_dict[key].lod_level >= 1
        if fetch_var_dict[key].dtype == core.VarDesc.VarType.INT64:
            fetch_var.fetch_type = 0
        if fetch_var_dict[key].dtype == core.VarDesc.VarType.FP32:
            fetch_var.fetch_type = 1
        if fetch_var_dict[key].dtype == core.VarDesc.VarType.INT32:
            fetch_var.fetch_type = 2
        if fetch_var.is_lod_tensor:
            fetch_var.shape.extend([-1])
        else:
            tmp_shape = []
            for v in fetch_var_dict[key].shape:
                if v >= 0:
                    tmp_shape.append(v)
            fetch_var.shape.extend(tmp_shape)
        config.fetch_var.extend([fetch_var])

    cmd = "mkdir -p {}".format(client_config_folder)

    os.system(cmd)
    with open("{}/serving_client_conf.prototxt".format(client_config_folder),
              "w") as fout:
        fout.write(str(config))
    with open("{}/serving_server_conf.prototxt".format(server_model_folder),
              "w") as fout:
        fout.write(str(config))
    with open("{}/serving_client_conf.stream.prototxt".format(
            client_config_folder), "wb") as fout:
        fout.write(config.SerializeToString())
    with open("{}/serving_server_conf.stream.prototxt".format(
            server_model_folder), "wb") as fout:
        fout.write(config.SerializeToString())