Esempio n. 1
0
def dense(
    inputs: remote_blob_util.BlobDef,
    units: int,
    activation: Optional[Callable[[remote_blob_util.BlobDef, str],
                                  remote_blob_util.BlobDef]] = None,
    use_bias: bool = True,
    kernel_initializer: Optional[op_conf_util.InitializerConf] = None,
    bias_initializer: Optional[op_conf_util.InitializerConf] = None,
    kernel_regularizer: Optional[op_conf_util.RegularizerConf] = None,
    bias_regularizer: Optional[op_conf_util.RegularizerConf] = None,
    trainable: bool = True,
    name: str = "Dense",
    model_distribute: distribute_util.Distribute = distribute_util.broadcast(),
) -> remote_blob_util.BlobDef:
    r"""Analogous to `tf.keras.layers.Dense <https://www.tensorflow.org/api_docs/python/tf/keras/layers/Dense>`_

    Args:
        inputs (remote_blob_util.BlobDef): A 2D input `Blob`.
        units (int): A positive integer for the dimensionality of the output space.
        activation (Optional[remote_blob_util.BlobDef], optional):  Activation function. Defaults to None.
        use_bias (bool, optional): A boolean specifies whether to use a bias vector. Defaults to True.
        kernel_initializer (Optional[op_conf_util.InitializerConf], optional): Initializer for the kernel weights matrix. Defaults to None.
        bias_initializer (Optional[op_conf_util.InitializerConf], optional): [description]. Defaults to None.
        kernel_regularizer (Optional[op_conf_util.RegularizerConf], optional): [description]. Defaults to None.
        bias_regularizer (Optional[op_conf_util.RegularizerConf], optional): Regularizer for the bias vector. Defaults to None.
        trainable (bool, optional): A boolean specifies whether to train the variables. Defaults to True.
        name (Optional[str], optional): This layer's name. Defaults to None.
        model_distribute (distribute_util.Distribute, optional): Define the way to ditribute the model. Defaults to distribute_util.broadcast().

    Returns:
        remote_blob_util.BlobDef:  A N-D `Blob` with the shape of (batch_size, units).

    Raises:
        ValueError: The dimension of input `Blob` must be less than 2.
        VauleError: Model distribute must be in auto, broadcast, split.
        ValueError: The input must be a 2D `Blob` when the model distribute is split.
    """
    in_shape = inputs.shape
    in_num_axes = len(in_shape)
    assert in_num_axes >= 2

    assert (model_distribute is distribute_util.auto()
            or model_distribute is distribute_util.broadcast()
            or model_distribute is distribute_util.split(0))

    if model_distribute is distribute_util.split(0):
        assert in_num_axes == 2  # model distribute is hard for reshape split dim 1

    if in_num_axes > 2:
        inputs = flow.reshape(inputs, (-1, in_shape[-1]))

    with flow.scope.namespace(name):
        if kernel_initializer is None:
            kernel_initializer = flow.constant_initializer(0)

        weight = flow.get_variable(
            name="weight",
            shape=(units, inputs.shape[1]),
            dtype=inputs.dtype,
            initializer=kernel_initializer,
            regularizer=kernel_regularizer,
            trainable=trainable,
            model_name="weight",
            distribute=model_distribute,
            reuse=False,
        )
        weight = weight.with_distribute(model_distribute)

        out = flow.matmul(a=inputs, b=weight, transpose_b=True, name="matmul")

        if use_bias:
            if bias_initializer is None:
                bias_initializer = flow.constant_initializer(0)

            bias = flow.get_variable(
                name="bias",
                shape=(units, ),
                dtype=inputs.dtype,
                initializer=bias_initializer,
                regularizer=bias_regularizer,
                trainable=trainable,
                model_name="bias",
                distribute=model_distribute,
                reuse=False,
            )
            bias = bias.with_distribute(model_distribute)
            out = flow.nn.bias_add(out, bias, name="bias_add")

        if callable(activation):
            out = activation(out, name="activation")

    if in_num_axes > 2:
        out = flow.reshape(out, in_shape[:-1] + (units, ))

    return out
Esempio n. 2
0
 def with_split_distribute(self, axis):
     return self.with_distribute(distribute_util.split(axis))
Esempio n. 3
0
def check_x_dictribute(x, axis):
    for i in axis:
        if x.distribute is distribute_util.split(i):
            return True
    return False