コード例 #1
0
    def to_static_inputs_with_spec(self, input_with_spec, main_program):
        """
        Constructs feed layer by inputs with InputSpec information for main program.

        Args:
            input_with_spec(tuple): input arguments by replacing argument with InputSpec.
            main_program(Program): main program for inserting feed layer.
        """
        flat_input_spec = flatten(input_with_spec)

        inputs = []
        block = main_program.global_block()
        for i, var_spec in enumerate(flat_input_spec):
            if isinstance(var_spec, paddle.static.InputSpec):
                feed_layer = block.create_var(
                    # TODO(Aurelius84): consider a more elegant way to name this
                    name=var_spec.name or "feed_%s" % i,
                    shape=var_spec.shape,
                    dtype=var_spec.dtype,
                    is_data=True,
                    need_check_feed=False)
            else:
                feed_layer = var_spec
            inputs.append(feed_layer)

        return pack_sequence_as(input_with_spec, inputs)
コード例 #2
0
            def _convert_input(self,
                               input,
                               input_name,
                               input_idx,
                               is_default=False):
                def _to_variable(x, x_desc=None, x_name=None, x_idx=None):
                    if isinstance(x, np.ndarray):
                        out = fluid.data(name=x_name if x_idx is None else
                                         (x_name + "_" + str(x_idx)),
                                         shape=([None] + list(x.shape[1:]))
                                         if x_desc is None else x_desc.shape,
                                         dtype=x.dtype)
                        # set the way to get input data, then we can use it to
                        # extract data from args and kwargs when running __call__
                        if is_default:  # for defaults
                            if x_idx is None:  # if input is plain
                                data_extracter = lambda args, kwargs: input
                            else:  #  if input is nested structure
                                data_extracter = lambda args, kwargs: flatten(
                                    input)[x_idx]
                        elif input_idx is None:  # for named arg
                            if x_idx is None:  # if input is plain
                                data_extracter = lambda args, kwargs: kwargs[
                                    input_name]
                            else:  #  if input is nested structure
                                data_extracter = lambda args, kwargs: flatten(
                                    kwargs[input_name])[x_idx]
                        else:  # for positional arg
                            if x_idx is None:  # if input is plain
                                data_extracter = lambda args, kwargs: args[
                                    input_idx]
                            else:  #  if input is nested structure
                                data_extracter = lambda args, kwargs: flatten(
                                    args[input_idx])[x_idx]
                        self._inputs[out.name] = data_extracter
                    else:
                        out = x
                    return out

                input_desc = model_self._data_descs.get(input_name, None)
                if not utils.is_sequence(input):
                    return _to_variable(input, input_desc, input_name)
                flat_output = []
                if input_desc is None:
                    for i, x in enumerate(flatten(input)):
                        out = _to_variable(x, x_name=input_name, x_idx=i)
                        flat_output.append(out)
                else:
                    for i, x in enumerate(
                            zip(flatten(input), flatten(input_desc))):
                        out = _to_variable(*x, x_name=input_name, x_idx=i)
                        flat_output.append(out)
                output = pack_sequence_as(input, flat_output)
                return output
コード例 #3
0
    def _replace_value_with_input_spec(self, args):
        args_with_spec = []
        for idx, input_var in enumerate(flatten(args)):
            if isinstance(input_var, np.ndarray):
                input_var = paddle.static.InputSpec.from_numpy(input_var)
            elif isinstance(input_var, core.VarBase):
                input_var = paddle.static.InputSpec.from_tensor(input_var)

            args_with_spec.append(input_var)

        args_with_spec = pack_sequence_as(args, args_with_spec)
        return args_with_spec
コード例 #4
0
    def args_to_input_spec(self, args, kwargs):
        """
        Converts input arguments into InputSpec.
        
        1. If specific input_spec, use them to construct feed layers.
        2. If input_spec is None, consider all Tensor and Numpy.ndarray as feed layers

        Args:
            args(tuple): tuple of input arguments value of function containing default kwargs value.
            kwargs(dict): kwargs arguments received by **kwargs.

        Return:
            Same nest structure with args by replacing value with InputSpec.
        """
        input_with_spec = []

        if self._input_spec is not None:
            # Note: Because the value type and length of `kwargs` is uncertain.
            # So we don't support to deal this case while specificing `input_spec` currently.
            if kwargs:
                raise ValueError(
                    "{} got unexpected keyword arguments: {}. Cannot trace the function when `input_spec` is specificed."
                    .format(self._dygraph_function.__name__, kwargs))

            # Note: The length of `input_spec` can be greater than `args`,
            # because `args` may contains non-tensor value merged form `kwargs`
            # after `unified_args_and_kwargs`.
            if len(args) < len(self._input_spec):
                raise ValueError(
                    "Requires len(arguments) >= len(input_spec), but received len(args):{} < len(InputSpec): {}"
                    .format(len(args), len(self._input_spec)))

            # replace argument with corresponding InputSpec.
            input_with_spec = convert_to_input_spec(args, self._input_spec)
        else:
            for idx, input_var in enumerate(flatten(args)):
                if isinstance(input_var, np.ndarray):
                    input_var = paddle.static.InputSpec.from_numpy(input_var)
                elif isinstance(input_var, core.VarBase):
                    input_var = paddle.static.InputSpec.from_tensor(input_var)

                input_with_spec.append(input_var)

            input_with_spec = pack_sequence_as(args, input_with_spec)

        # If without specificing name in input_spec, add default name
        # according to argument name from decorated function.
        input_with_spec = replace_spec_empty_name(self._arg_names,
                                                  input_with_spec)

        return input_with_spec
コード例 #5
0
ファイル: function_spec.py プロジェクト: sandyhouse/Paddle
    def _replace_value_with_input_spec(self, args):
        args_with_spec = []
        for idx, input_var in enumerate(flatten(args)):
            if isinstance(input_var, np.ndarray):
                input_var = paddle.static.InputSpec.from_numpy(input_var)
                _set_spec_stop_gradient(input_var, True)
            elif isinstance(input_var, (core.VarBase, core.eager.Tensor)):
                stop_gradient = input_var.stop_gradient
                input_var = paddle.static.InputSpec.from_tensor(input_var)
                _set_spec_stop_gradient(input_var, stop_gradient)

            args_with_spec.append(input_var)

        args_with_spec = pack_sequence_as(args, args_with_spec)
        return args_with_spec
コード例 #6
0
ファイル: partial_program.py プロジェクト: sandyhouse/Paddle
 def restore(self, value_list):
     """
     Restores the nested sequence from value list.
     """
     assert len(self.__input_list) == len(value_list)
     return pack_sequence_as(self.__raw_input, value_list)
コード例 #7
0
ファイル: dygraph_helper.py プロジェクト: heliqi/Paddle2ONNX
def get_program(layer, input_spec, output_spec, **configs):
    paddle.jit.set_verbosity(0)
    prog_translator = ProgramTranslator()
    if not prog_translator.enable_to_static:
        raise RuntimeError(
            "The Paddle2onnx doesn't work when setting ProgramTranslator.enable to False."
        )

    if not isinstance(layer, Layer):
        raise TypeError(
            "The input of paddle2onnx should be 'Layer', but received input type is %s."
            % type(layer))

    if isinstance(layer, paddle.DataParallel):
        inner_layer = layer._layers
    else:
        inner_layer = layer

    # avoid change user given input_spec
    inner_input_spec = None
    if input_spec is not None:
        for attr_func in dir(inner_layer):
            static_func = getattr(inner_layer, attr_func, None)
            if isinstance(static_func,
                          StaticFunction) and 'forward' != attr_func:
                raise ValueError(
                    "If there are static functions other than 'forward' that need to be saved, the input 'input_spec' should be None, but received the type of 'input_spec' is %s."
                    % type(input_spec))

        if not isinstance(input_spec, (list, tuple)):
            raise TypeError(
                "The input input_spec should be 'list', but received input_spec's type is %s."
                % type(input_spec))
        inner_input_spec = []
        for var in flatten(input_spec):
            if isinstance(var, paddle.static.InputSpec):
                inner_input_spec.append(var)
            elif isinstance(var, (core.VarBase, core.eager.Tensor, Variable)):
                inner_input_spec.append(
                    paddle.static.InputSpec.from_tensor(var))
            else:
                # NOTE(Aurelius84): Support non-Tensor type in `input_spec`.
                inner_input_spec.append(var)

    extra_var_info = dict()
    functions = dir(inner_layer)
    for attr_func in functions:
        static_func = getattr(inner_layer, attr_func, None)
        if isinstance(static_func, StaticFunction):
            concrete_program = static_func.concrete_program_specify_input_spec(
                inner_input_spec)
        elif 'forward' == attr_func:
            # transform in jit.save, if input_spec is incomplete, declarative will throw error
            # inner_input_spec is list[InputSpec], it should be packed with same structure
            # as original input_spec here.
            if inner_input_spec:
                inner_input_spec = pack_sequence_as(input_spec,
                                                    inner_input_spec)
            static_forward = declarative(inner_layer.forward,
                                         input_spec=inner_input_spec)
            concrete_program = static_forward.concrete_program
            # the input_spec has been used in declarative, which is equal to
            # @declarative with input_spec and jit.save without input_spec,
            # avoid needless warning
            inner_input_spec = None
        else:
            continue

        input_var_names = _get_input_var_names(concrete_program.inputs,
                                               inner_input_spec)

        # NOTE(chenweihang): [ Get output variables ]
        # the rule is like [ Get input variables name ]. For output var,
        # we only support VarBase spec, and actually, we only need the
        # var name of output, and we don't recommended to use output_spec
        output_vars = _get_output_vars(concrete_program.outputs, output_spec)

    feeded_var_names = input_var_names
    target_vars = output_vars
    main_program = concrete_program.main_program.clone()
    export_for_deployment = True

    if isinstance(feeded_var_names, six.string_types):
        feeded_var_names = [feeded_var_names]
    elif export_for_deployment:
        if len(feeded_var_names) > 0:
            # TODO(paddle-dev): polish these code blocks
            if not (bool(feeded_var_names) and all(
                    isinstance(name, six.string_types)
                    for name in feeded_var_names)):
                raise ValueError("'feed_var_names' should be a list of str.")

    if isinstance(target_vars, Variable):
        target_vars = [target_vars]
    elif export_for_deployment:
        if not (bool(target_vars)
                and all(isinstance(var, Variable) for var in target_vars)):
            raise ValueError("'target_vars' should be a list of Variable.")

    main_program = _get_valid_program(main_program)

    # remind user to set auc_states to zeros if the program contains auc op
    all_ops = main_program.global_block().ops
    for op in all_ops:
        # clear device of Op
        device_attr_name = core.op_proto_and_checker_maker.kOpDeviceAttrName()
        op._set_attr(device_attr_name, "")
        if op.type == 'auc':
            warnings.warn(
                "please ensure that you have set the auc states to zeros before saving inference model"
            )
            break

    with program_guard(main_program):
        uniq_target_vars = []
        for i, var in enumerate(target_vars):
            uniq_target_vars.append(var)
        target_vars = uniq_target_vars
    target_var_name_list = [var.name for var in target_vars]

    origin_program = main_program.clone()

    main_program = main_program.clone()
    global_block = main_program.global_block()
    need_to_remove_op_index = []
    for i, op in enumerate(global_block.ops):
        op.desc.set_is_target(False)
        if op.type == "feed" or op.type == "fetch":
            need_to_remove_op_index.append(i)

    for index in need_to_remove_op_index[::-1]:
        global_block._remove_op(index)

    main_program.desc.flush()

    main_program = main_program._prune_with_input(
        feeded_var_names=feeded_var_names, targets=target_vars)
    main_program = main_program._inference_optimize(prune_read_op=True)
    fetch_var_names = [v.name for v in target_vars]

    for target_v in target_vars:
        if not main_program.global_block().has_var(target_v.name):
            main_program.global_block().create_var(
                name=target_v.name,
                shape=target_v.shape,
                dtype=target_v.dtype,
                persistable=target_v.persistable)

    prepend_feed_ops(main_program, feeded_var_names)
    append_fetch_ops(main_program, fetch_var_names)

    main_program.desc._set_version()
    paddle.fluid.core.save_op_version_info(main_program.desc)

    main_program._copy_dist_param_info_from(origin_program)

    return main_program, feeded_var_names, target_vars