Esempio n. 1
0
def truncated_normal_initializer(
        mean: float = 0.0,
        stddev: float = 1.0) -> initializer_conf_util.InitializerConf:
    r"""Initializer that generates a truncated normal distribution.

    Args:
        mean (float, optional): A scalar (float). Defaults to 0.0.
        stddev (float, optional): A scalar (float). Defaults to 1.0.

    Returns:
        initializer_conf_util.InitializerConf: Initial configuration

    For example: 

    Example 1: 

    .. code-block:: python 

        import oneflow as flow
        import oneflow.typing as tp


        def watch_handler(y: tp.Numpy):
            print("out", y)


        @flow.global_function()
        def truncated_normal_Job() -> None:
            init = flow.truncated_normal_initializer(mean=1, stddev=1)
            blob = flow.get_variable(
                "blob-weight",
                shape=(3, ),
                initializer=init,
                trainable=True
            )
            flow.watch(blob, watch_handler)


        checkpoint = flow.train.CheckPoint()
        checkpoint.init()
        truncated_normal_Job()

        # out [1.8303236  0.09787154 0.83049864]

    Example 2: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def conv2d_truncated_normal_Job(x: tp.Numpy.Placeholder((1, 256, 32, 32))
        ) -> tp.Numpy:
            initializer = flow.truncated_normal_initializer(mean=0, stddev=1)

            conv2d = flow.layers.conv2d(
                x,
                filters=128,
                kernel_size=3,
                strides=1,
                padding='SAME',
                kernel_initializer=initializer, 
                name="Conv2d"
            )
            return conv2d


        x = np.random.randn(1, 256, 32, 32).astype(np.float32)
        out = conv2d_truncated_normal_Job(x)

        # out.shape (1, 128, 32, 32)

    """
    initializer = initializer_conf_util.InitializerConf()
    setattr(initializer.truncated_normal_conf, "mean", float(mean))
    setattr(initializer.truncated_normal_conf, "std", float(stddev))
    return initializer
Esempio n. 2
0
def variance_scaling_initializer(
    scale: float = 1.0,
    mode: str = "fan_in",
    distribution: str = "truncated_normal",
    data_format: str = "",
) -> initializer_conf_util.InitializerConf:
    r"""Initializer that generates a truncated normal distribution or a random normal distribution or a random uniform distribution with a scale adapting to it.

    When the distribution is "truncated_normal"

    The equation is: 

    .. math:: 

        W\sim N(0, \sqrt{\frac{{scale}}{{n}}})

    If mode is "fan_in", the "n" is the number of input units in the weight Blob. 

    If mode is "fan_out", the "n" is the number of output units in the weight Blob. 

    if mode is "fan_avg", the "n" is the average of the number of input and output units in the weight Blob

    Args:
        scale (float, optional): Scaling factor (positive float). Defaults to 1.0.
        mode (str, optional): One of "fan_in", "fan_out", "fan_avg". Defaults to "fan_in".
        distribution (str, optional): Random distribution to use. One of "truncated_normal",. Defaults to "truncated_normal".
        data_format (str, optional): A string be one of "N...C" or "NC...". Defaults to "".

    Returns:
        initializer_conf_util.InitializerConf: Initial configuration

    For example: 

    Example 1: 

    .. code-block:: python 

        import oneflow as flow
        import oneflow.typing as tp


        def watch_handler(y: tp.Numpy):
            print("out", y)


        @flow.global_function()
        def variance_scale_Job() -> None:
            init = flow.variance_scaling_initializer(scale=2.0, mode="fan_avg")
            blob = flow.get_variable(
                "blob-weight",
                shape=(3, 3),
                initializer=init,
                trainable=True
            )
            flow.watch(blob, watch_handler)


        checkpoint = flow.train.CheckPoint()
        checkpoint.init()
        variance_scale_Job()

        # out [[-0.13931477  0.12266728 -0.9434968 ]
        #      [-0.49665168  0.10231158 -0.19194333]
        #      [-0.7902896  -1.7034698  -0.38695997]]

    Example 2: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def conv2d_variance_scaling_Job(x: tp.Numpy.Placeholder((1, 256, 32, 32))
        ) -> tp.Numpy:
            initializer = flow.variance_scaling_initializer(mode="fan_out")
            conv2d = flow.layers.conv2d(
                x,
                filters=128,
                kernel_size=3,
                strides=1,
                padding='SAME',
                kernel_initializer=initializer, 
                name="Conv2d"
            )
            return conv2d


        x = np.random.randn(1, 256, 32, 32).astype(np.float32)
        out = conv2d_variance_scaling_Job(x)

        # out.shape (1, 128, 32, 32)

    """
    initializer = initializer_conf_util.InitializerConf()
    setattr(initializer.variance_scaling_conf, "scale", float(scale))
    setattr(
        initializer.variance_scaling_conf,
        "variance_norm",
        _get_variance_norm(mode),
    )
    setattr(
        initializer.variance_scaling_conf,
        "distribution",
        _get_random_distribution(distribution),
    )
    setattr(
        initializer.variance_scaling_conf,
        "data_format",
        _get_data_format(data_format),
    )
    return initializer
Esempio n. 3
0
def random_normal_initializer(
    mean: float = 0.0,
    stddev: float = 1.0,
    seed: Optional[int] = None,
    dtype: Optional[flow.dtype] = None,
) -> initializer_conf_util.InitializerConf:
    r"""Initializer that generates blob with a normal distribution.

    Args:
        mean (float, optional): A python scalar. Mean of the random values to generate.. Defaults to 0.0.
        stddev (float, optional): A python scalar. Standard deviation of the random values to generate. Defaults to 1.0.
        seed (Optional[int], optional): None. Not support yet. Defaults to None.
        dtype (Optional[flow.dtype], optional): . Defaults to None.

    Returns:
        initializer_conf_util.InitializerConf: Initial configuration

    For example: 

    Example 1: 

    .. code-block:: python 

        import oneflow as flow
        import oneflow.typing as tp


        def watch_handler(y: tp.Numpy):
            print("out", y)


        @flow.global_function()
        def random_normal_Job() -> None:
            init = flow.random_normal_initializer(mean=1, stddev=1)
            blob = flow.get_variable(
                "blob-weight",
                shape=(3, ),
                initializer=init,
                trainable=True
            )
            flow.watch(blob, watch_handler)


        checkpoint = flow.train.CheckPoint()
        checkpoint.init()
        random_normal_Job()

        # out [1.4190257 2.7663114 1.7114428]

    Example 2: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def conv2d_random_normal_Job(x: tp.Numpy.Placeholder((1, 256, 32, 32))
        ) -> tp.Numpy:
            initializer = flow.random_normal_initializer(mean=0, stddev=1)

            conv2d = flow.layers.conv2d(
                x,
                filters=128,
                kernel_size=3,
                strides=1,
                padding='SAME',
                kernel_initializer=initializer, 
                name="Conv2d"
            )
            return conv2d


        x = np.random.randn(1, 256, 32, 32).astype(np.float32)
        out = conv2d_random_normal_Job(x)

        # out.shape (1, 128, 32, 32)

    """
    assert seed is None
    assert dtype is None
    if seed is not None:
        assert name is not None
    initializer = initializer_conf_util.InitializerConf()
    setattr(initializer.random_normal_conf, "mean", float(mean))
    setattr(initializer.random_normal_conf, "std", float(stddev))

    return initializer
Esempio n. 4
0
def constant_initializer(
        value: float = 0,
        dtype: flow.dtype = flow.float
) -> initializer_conf_util.InitializerConf:
    r"""Initializer that generates blob with constant values.

    Args:
        value (float, optional): A Python scalar. All elements of the initialized variable . Defaults to 0.
        dtype (flow.dtype, optional): Default data type. Defaults to flow.float.

    Raises:
        NotImplementedError:  Do not support such data type.

    Returns:
        initializer_conf_util.InitializerConf:  An InitializerConf object.
    
    For example: 

    Example 1:

    .. code-block:: python 

        import oneflow as flow
        import oneflow.typing as tp


        def watch_handler(y: tp.Numpy):
            print("out", y)


        @flow.global_function()
        def constant_Job() -> None:
            init = flow.constant_initializer(2.5)
            blob = flow.get_variable(
                "blob-weight",
                shape=(3, ),
                initializer=init,
                trainable=True
            )
            flow.watch(blob, watch_handler)


        checkpoint = flow.train.CheckPoint()
        checkpoint.init()
        constant_Job()

        # out [2.5 2.5 2.5]

    Example 2:

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def conv2d_constant_Job(x: tp.Numpy.Placeholder((1, 256, 32, 32))
        ) -> tp.Numpy:
            initializer = flow.constant_initializer(0.01)
            conv2d = flow.layers.conv2d(
                x,
                filters=128,
                kernel_size=3,
                strides=1,
                padding='SAME',
                kernel_initializer=initializer, 
                name="Conv2d"
            )
            return conv2d


        x = np.random.randn(1, 256, 32, 32).astype(np.float32)
        out = conv2d_constant_Job(x)

        # out.shape (1, 128, 32, 32)

    """
    initializer = initializer_conf_util.InitializerConf()
    if dtype in [flow.float, flow.double]:
        setattr(initializer.constant_conf, "value", float(value))
    elif dtype in [
            flow.int8,
            flow.int32,
            flow.int64,
    ]:
        setattr(initializer.constant_int_conf, "value", int(value))
    else:
        raise NotImplementedError("Do not support such data type")

    return initializer
Esempio n. 5
0
def empty_initializer(
    dtype: flow.dtype = flow.float, ) -> initializer_conf_util.InitializerConf:
    initializer = initializer_conf_util.InitializerConf()
    empty_conf = initializer_conf_util.EmptyInitializerConf()
    initializer.empty_conf.CopyFrom(empty_conf)
    return initializer
Esempio n. 6
0
def random_uniform_initializer(minval: float = 0,
                               maxval: float = 1,
                               dtype: flow.dtype = flow.float
                               ) -> initializer_conf_util.InitializerConf:
    r"""Initializer that generates blobs with a uniform distribution. 

    Args:
        minval (float, optional): A python scalar. Lower bound of the range of random values to generate. Defaults to 0.
        maxval (float, optional): A python scalar. Upper bound of the range of random values to generate. Defaults to 1.
        dtype (flow.dtype, optional): Default data type. Defaults to flow.float.

    Raises:
        NotImplementedError: Do not support such data type.

    Returns:
        initializer_conf_util.InitializerConf:  Initial configuration

    For example: 

    Example 1: 

    .. code-block:: python 

        import oneflow as flow
        import oneflow.typing as tp


        def watch_handler(y: tp.Numpy):
            print("out", y)


        @flow.global_function()
        def random_uniform_Job() -> None:
            init = flow.random_uniform_initializer(minval=0, maxval=0.5)
            blob = flow.get_variable(
                "blob-weight",
                shape=(3, ),
                initializer=init,
                trainable=True
            )
            flow.watch(blob, watch_handler)


        checkpoint = flow.train.CheckPoint()
        checkpoint.init()
        random_uniform_Job()

        # out [0.07557311 0.3943565  0.31875622]

    Example 2: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def conv2d_random_uniform_Job(x: tp.Numpy.Placeholder((1, 256, 32, 32))
        ) -> tp.Numpy:
            initializer = flow.random_uniform_initializer(minval=0, maxval=0.5)

            conv2d = flow.layers.conv2d(
                x,
                filters=128,
                kernel_size=3,
                strides=1,
                padding='SAME',
                kernel_initializer=initializer, 
                name="Conv2d"
            )
            return conv2d


        x = np.random.randn(1, 256, 32, 32).astype(np.float32)
        out = conv2d_random_uniform_Job(x)
        
        # out.shape (1, 128, 32, 32)

    """
    assert minval <= maxval
    initializer = initializer_conf_util.InitializerConf()
    if dtype in [flow.float, flow.double]:
        setattr(initializer.random_uniform_conf, "min", float(minval))
        setattr(initializer.random_uniform_conf, "max", float(maxval))
    elif dtype in [
            flow.int8,
            flow.int32,
            flow.int64,
    ]:
        setattr(initializer.random_uniform_int_conf, "min", int(minval))
        setattr(initializer.random_uniform_int_conf, "max", int(maxval))
    else:
        raise NotImplementedError("Do not support such data type")

    return initializer
Esempio n. 7
0
 def _get_initializer():
     kernel_initializer = initializer_conf_util.InitializerConf()
     kernel_initializer.truncated_normal_conf.std = 0.816496580927726
     return kernel_initializer
Esempio n. 8
0
 def _get_bias_initializer():
     bias_initializer = initializer_conf_util.InitializerConf()
     bias_initializer.constant_conf.value = 0.0
     return bias_initializer
Esempio n. 9
0
def truncated_normal_initializer(
    stddev: float = 1.0, ) -> initializer_conf_util.InitializerConf:
    initializer = initializer_conf_util.InitializerConf()
    setattr(initializer.truncated_normal_conf, "std", float(stddev))
    return initializer