Exemple #1
0
def update_attr_name(name, default_name, attr, is_bias):
    """
    Update the name in an attribute
    1. If the user provides a name, then generate the candidate name using the
       provided name;
    2. else generate the candidate name using the default name (which should be
       the name of the layer wrapper).
    3. After obtaining the candidate name, if the attr is False, then we return False;
    4. if the attr is None or attr.name is None, then we set the attr's name as the candidate name;
    5. else we ignore the candidate name and do nothing.
    """
    def check_or_replace_name(name, attr):
        ## if this para is not used
        if attr == False:
            return False

        if attr is None:
            return ParamAttr(name=name)

        if attr.name is None:
            attr.name = name
        return attr

    attr = ParamAttr._to_attr(attr)
    name = (default_name if name is None else name)
    suffix = "b" if is_bias else "w"
    new_name = unique_name.generate(name + "." + suffix)
    return check_or_replace_name(new_name, attr)
Exemple #2
0
def _convert_param_attr_to_list(param_attr, n):
    """
    If `param_attr` is a list or tuple, convert every element in it to a
    ParamAttr instance. Otherwise, repeat `param_attr` `n` times to
    construct a list, and rename every one by appending a increasing index
    suffix to avoid having same names when `param_attr` contains a name.
    Parameters:
        param_attr (list|tuple|ParamAttr): A list, tuple or something can be
            converted to a ParamAttr instance by `ParamAttr._to_attr`.
        n (int): The times to repeat to construct a list when `param_attr`
            is not a list or tuple.
    Returns:
        list: A list composed of each including cell's `param_attr`.
    """
    if isinstance(param_attr, (list, tuple)):
        assert len(param_attr) == n, (
            "length of param_attr should be %d when it is a list/tuple" % n)
        param_attrs = []
        for attr in param_attr:
            if isinstance(attr, bool):
                if attr:
                    param_attrs.append(ParamAttr._to_attr(None))
                else:
                    param_attrs.append(False)
            else:
                param_attrs.append(ParamAttr._to_attr(attr))
        # param_attrs = [ParamAttr._to_attr(attr) for attr in param_attr]
    elif isinstance(param_attr, bool):
        param_attrs = []
        if param_attr:
            param_attrs = [ParamAttr._to_attr(None) for i in range(n)]
        else:
            param_attrs = [False] * n
    else:
        param_attrs = []
        attr = ParamAttr._to_attr(param_attr)
        for i in range(n):
            attr_i = copy.deepcopy(attr)
            if attr.name:
                attr_i.name = attr_i.name + "_" + str(i)
            param_attrs.append(attr_i)
    return param_attrs
Exemple #3
0
    def create_mpc_parameter(self,
                             attr,
                             shape,
                             dtype,
                             is_bias=False,
                             default_initializer=None,
                             stop_gradient=False,
                             type=core.VarDesc.VarType.LOD_TENSOR):
        """
        Create mpc parameters for this layers.
        Refer to LayerHelper.create_parameter in Paddle 1.7.
        :param attr:
        :param shape:
        :param dtype:
        :param is_bias:
        :param default_initializer:
        :param stop_gradient:
        :param type:
        :return:
        """
        # Deepcopy the attr so that parameters can be shared in program
        attr = copy.deepcopy(attr)
        attr = ParamAttr._to_attr(attr)
        if not attr:
            return None
        assert isinstance(attr, ParamAttr)
        suffix = 'b' if is_bias else 'w'
        if attr.name is None:
            attr.name = unique_name.generate(".".join([self.name, suffix]))

        if default_initializer is None and attr.initializer is None:
            if isinstance(dtype, core.VarDesc.VarType):
                if dtype != core.VarDesc.VarType.INT64:
                    raise TypeError(
                        "Can not create mpc parameter with default initializer "
                        "when dtype is not int64 type. Set default_initializer "
                        "to fit the parameter dtype!")
            else:
                if not dtype == "int64":
                    raise TypeError(
                        "Can not create mpc parameter with default initializer when "
                        "dtype is not int64 type. Set default_initializer to "
                        "fit the parameter dtype!")
            if is_bias:
                attr._set_default_bias_initializer()
            else:
                attr._set_default_initializer(XavierInitializer(seed=65536))
        else:
            attr._set_default_initializer(default_initializer)

        # TODO(xukun07): not support WeightNormParamAttr in this first version
        # Paddle1.7: If weight normalization is set, insert extra parameters and ops.
        # Refer to https://arxiv.org/pdf/1602.07868.pdf
        if isinstance(attr, WeightNormParamAttr):
            # param = self._create_weight_normalize(attr, shape, dtype)
            # WeightNormParamAttr.params_with_weight_norm.append(param)
            # return param
            raise NotImplementedError(
                "The WeightNormParamAttr for attr is not "
                "supported in this version")

        startup_program_global_block = self.startup_program.global_block()
        create_mpc_parameter(
            block=startup_program_global_block,
            dtype=dtype,
            shape=shape,
            type=type,
            **attr._to_kwargs(with_initializer=True))
        main_program_global_block = self.main_program.global_block()
        return create_mpc_parameter(
            block=main_program_global_block,
            dtype=dtype,
            shape=shape,
            type=type,
            **attr._to_kwargs())