Esempio n. 1
0
def init_method(method, shape, name, max_val=1.0):
    if method in ['uniform']:
        params = Parameter(initializer(Uniform(max_val), shape, ms_type),
                           name=name)
    elif method == "one":
        params = Parameter(initializer("ones", shape, ms_type), name=name)
    elif method == 'zero':
        params = Parameter(initializer("zeros", shape, ms_type), name=name)
    elif method == "normal":
        params = Parameter(Tensor(
            np.random.normal(loc=0.0, scale=0.01,
                             size=shape).astype(dtype=np_type)),
                           name=name)
    return params
Esempio n. 2
0
def init_method(method, shape, name, max_val=1.0):
    '''
    parameter init method
    '''
    if method in ['uniform']:
        params = Parameter(initializer(
            Uniform(max_val), shape, ms_type), name=name)
    elif method == "one":
        params = Parameter(initializer("ones", shape, ms_type), name=name)
    elif method == 'zero':
        params = Parameter(initializer("zeros", shape, ms_type), name=name)
    elif method == "normal":
        params = Parameter(initializer("normal", shape, ms_type), name=name)
    return params
Esempio n. 3
0
def init_var_dict(init_args, var_list):
    """ init var with different methods. """
    var_map = {}
    _, max_val = init_args
    for i, _ in enumerate(var_list):
        key, shape, method = var_list[i]
        if key not in var_map.keys():
            if method in ['random', 'uniform']:
                var_map[key] = Parameter(initializer(Uniform(max_val), shape, ms_type), name=key)
            elif method == "one":
                var_map[key] = Parameter(initializer("ones", shape, ms_type), name=key)
            elif method == "zero":
                var_map[key] = Parameter(initializer("zeros", shape, ms_type), name=key)
            elif method == 'normal':
                var_map[key] = Parameter(Tensor(np.random.normal(loc=0.0, scale=0.01, size=shape).
                                                astype(dtype=np_type)), name=key)
    return var_map
Esempio n. 4
0
def _initialize_weight_goog(shape=None, layer_type='conv', bias=False):
    if layer_type not in ('conv', 'bn', 'fc'):
        raise ValueError('The layer type is not known, the supported are conv, bn and fc')
    if bias:
        return Zero()
    if layer_type == 'conv':
        assert isinstance(shape, (tuple, list)) and len(
            shape) == 3, 'The shape must be 3 scalars, and are in_chs, ks, out_chs respectively'
        n = shape[1] * shape[1] * shape[2]
        return Normal(math.sqrt(2.0 / n))
    if layer_type == 'bn':
        return One()
    assert isinstance(shape, (tuple, list)) and len(
        shape) == 2, 'The shape must be 2 scalars, and are in_chs, out_chs respectively'
    n = shape[1]
    init_range = 1.0 / math.sqrt(n)
    return Uniform(init_range)
Esempio n. 5
0
def init_var_dict(init_args, in_vars):
    '''
    var init function
    '''
    var_map = {}
    _, _max_val = init_args
    for _, iterm in enumerate(in_vars):
        key, shape, method = iterm
        if key not in var_map.keys():
            if method in ['random', 'uniform']:
                var_map[key] = Parameter(initializer(
                    Uniform(_max_val), shape, ms_type), name=key)
            elif method == "one":
                var_map[key] = Parameter(initializer(
                    "ones", shape, ms_type), name=key)
            elif method == "zero":
                var_map[key] = Parameter(initializer(
                    "zeros", shape, ms_type), name=key)
            elif method == 'normal':
                var_map[key] = Parameter(initializer(
                    "normal", shape, ms_type), name=key)
    return var_map
Esempio n. 6
0
def init_method(method, shape, name, max_val=1.0):
    """
    The method of init parameters.

    Args:
        method (str): The method uses to initialize parameter.
        shape (list): The shape of parameter.
        name (str): The name of parameter.
        max_val (float): Max value in parameter when uses 'random' or 'uniform' to initialize parameter.

    Returns:
        Parameter.
    """
    if method in ['uniform']:
        params = Parameter(initializer(Uniform(max_val), shape, ms_type), name=name)
    elif method == "one":
        params = Parameter(initializer("ones", shape, ms_type), name=name)
    elif method == 'zero':
        params = Parameter(initializer("zeros", shape, ms_type), name=name)
    elif method == "normal":
        params = Parameter(Tensor(np.random.normal(loc=0.0, scale=0.01, size=shape).astype(dtype=np_type)), name=name)
    return params
Esempio n. 7
0
def init_method(method, shape, name, max_val=0.01):
    """
    The method of init parameters.

    Args:
        method (str): The method uses to initialize parameter.
        shape (list): The shape of parameter.
        name (str): The name of parameter.
        max_val (float): Max value in parameter when uses 'random' or 'uniform' to initialize parameter.

    Returns:
        Parameter.
    """
    if method in ['random', 'uniform']:
        params = Parameter(initializer(Uniform(max_val), shape, ms_type), name=name)
    elif method == "one":
        params = Parameter(initializer("ones", shape, ms_type), name=name)
    elif method == 'zero':
        params = Parameter(initializer("zeros", shape, ms_type), name=name)
    elif method == "normal":
        params = Parameter(initializer(Normal(max_val), shape, ms_type), name=name)
    return params
Esempio n. 8
0
 def __init__(
     self,
     in_channels: int,
     out_channels: int,
     kernel_size: Union[int, Tuple[int, ...]],
     stride: Union[int, Tuple[int, ...]] = 1,
     padding: Union[str, int, Tuple[int, ...]] = 0,
     dilation: Union[int, Tuple[int, ...]] = 1,
     groups: int = 1,
     has_bias: bool = True,
     padding_mode: str = 'zeros',
 ):
     fan_in, _ = _calculate_fan_in_and_fan_out(self.weight.shape)
     bound = 1 / math.sqrt(fan_in)
     weight_init = HeUniform(math.sqrt(5))
     bias_init = Uniform(bound)
     super().__init__(in_channels, out_channels, kernel_size, stride,
                      padding, dilation, groups, has_bias, padding_mode,
                      weight_init, bias_init)
     self.conv = ops.Conv2D(
         out_channels,
         kernel_size,
     )
Esempio n. 9
0
def init_var_dict(init_args, values):
    """
    Init parameter.

    Args:
        init_args (list): Define max and min value of parameters.
        values (list): Define name, shape and init method of parameters.

    Returns:
        dict, a dict ot Parameter.
    """
    var_map = {}
    _, _max_val = init_args
    for key, shape, init_flag in values:
        if key not in var_map.keys():
            if init_flag in ['random', 'uniform']:
                var_map[key] = Parameter(initializer(Uniform(_max_val), shape, ms_type), name=key)
            elif init_flag == "one":
                var_map[key] = Parameter(initializer("ones", shape, ms_type), name=key)
            elif init_flag == "zero":
                var_map[key] = Parameter(initializer("zeros", shape, ms_type), name=key)
            elif init_flag == 'normal':
                var_map[key] = Parameter(initializer(Normal(_max_val), shape, ms_type), name=key)
    return var_map
Esempio n. 10
0
 def reset_parameters(self):
     stdv = 1 / math.sqrt(self.hidden_size)
     for weight in self.get_parameters():
         weight.set_data(initializer(Uniform(stdv), weight.shape))
Esempio n. 11
0
 def reset_parameters(self):
     self.weight = Parameter(initializer(HeUniform(math.sqrt(5)), self.weight.shape), name='weight')
     if self.has_bias:
         fan_in, _ = _calculate_fan_in_and_fan_out(self.weight.shape)
         bound = 1 / math.sqrt(fan_in)
         self.bias = Parameter(initializer(Uniform(bound), [self.out_channels]), name='bias')
Esempio n. 12
0
def init_wt_unif(wt):
    wt.set_data(initializer(Uniform(0.02), wt.shape))
Esempio n. 13
0
def Conv2d(in_channels: int,
           out_channels: int,
           kernel_size: Union[int, Tuple[int, int]],
           stride: Union[int, Tuple[int, int]] = 1,
           padding: Union[str, int, Tuple[int, int]] = 'same',
           groups: int = 1,
           dilation: int = 1,
           bias: Optional[bool] = None,
           norm: Optional[str] = None,
           act: Optional[str] = None):
    if isinstance(kernel_size, int):
        kernel_size = (kernel_size, kernel_size)
    if isinstance(stride, int):
        stride = (stride, stride)
    if isinstance(dilation, int):
        dilation = (dilation, dilation)
    if isinstance(padding, int):
        padding = (padding, padding)
    if isinstance(padding, str):
        assert padding == 'same'
    if padding == 'same':
        padding = calc_same_padding(kernel_size, dilation)

    # Init
    init_cfg = DEFAULTS['init']
    if init_cfg['type'] == 'msra':
        mode = init_cfg['mode']
        distribution = init_cfg['distribution']
        if 'uniform' in distribution:
            weight_init = HeUniform(mode=mode)
        else:
            weight_init = HeNormal(mode=mode)
    else:
        raise ValueError("Unsupported init type: %s" % init_cfg['type'])

    scale = math.sqrt(1 / (kernel_size[0] * kernel_size[1] *
                           (in_channels // groups)))
    bias_init = Uniform(scale)

    if bias is None:
        use_bias = norm is None
    else:
        use_bias = bias

    conv = nn.Conv2d(in_channels,
                     out_channels,
                     kernel_size=kernel_size,
                     stride=stride,
                     padding=padding,
                     pad_mode='pad',
                     has_bias=use_bias,
                     dilation=dilation,
                     group=groups,
                     weight_init=weight_init,
                     bias_init=bias_init)

    layers = [conv]

    if norm:
        layers.append(Norm(out_channels, norm))
    if act:
        layers.append(Act(act))

    if len(layers) == 1:
        return layers[0]
    else:
        return nn.SequentialCell(layers)
Esempio n. 14
0
 def __init__(self, hidden_dim):
     super().__init__()
     self.query_proj = Dense(hidden_dim, hidden_dim, has_bias=False)
     self.key_proj = Dense(hidden_dim, hidden_dim, has_bias=False)
     self.bias = Parameter(initializer(Uniform(0.1), hidden_dim), 'bias')
     self.score_proj = Dense(hidden_dim, 1)