Beispiel #1
0
def _dense_layer(
    inputs,
    units,
    activation=None,
    use_bias=True,
    kernel_initializer=None,
    bias_initializer=None,
    trainable=True,
    name=None,
):
    in_shape = inputs.shape
    in_num_axes = len(in_shape)
    assert in_num_axes >= 2

    name_prefix = name if name is not None else id_util.UniqueStr("Dense_")
    inputs = flow.reshape(inputs,
                          (-1, in_shape[-1])) if in_num_axes > 2 else inputs

    weight = flow.get_variable(
        name="{}-weight".format(name_prefix),
        shape=(units, inputs.shape[1]),
        dtype=inputs.dtype,
        initializer=(kernel_initializer if kernel_initializer is not None else
                     flow.constant_initializer(0)),
        trainable=trainable,
        model_name="weight",
    )
    weight = flow.identity(weight)
    weight = flow.repeat(weight, args.num_piece_in_batch)

    out = flow.matmul(
        a=inputs,
        b=weight,
        transpose_b=True,
        name="{}_matmul".format(name_prefix),
    )
    if use_bias:
        bias = flow.get_variable(
            name="{}-bias".format(name_prefix),
            shape=(units, ),
            dtype=inputs.dtype,
            initializer=(bias_initializer if bias_initializer is not None else
                         flow.constant_initializer(0)),
            trainable=trainable,
            model_name="bias",
        )

        bias = flow.identity(bias)
        bias = flow.repeat(bias, args.num_piece_in_batch)

        out = flow.nn.bias_add(out,
                               bias,
                               name="{}_bias_add".format(name_prefix))
    out = (activation(out, name="{}_activation".format(name_prefix))
           if activation is not None else out)
    out = flow.reshape(out, in_shape[:-1] +
                       (units, )) if in_num_axes > 2 else out

    return out
Beispiel #2
0
 def RepeatAccJob(a: oft.Numpy.Placeholder(shape)):
     if dtype == "float16":
         return flow.cast(
             flow.acc(flow.repeat(flow.cast(a, flow.float16), acc_num),
                      acc_num),
             flow.float,
         )
     else:
         return flow.acc(flow.repeat(a, acc_num), acc_num)
Beispiel #3
0
def _conv2d_layer(
        args,
        name,
        input,
        filters,
        kernel_size=3,
        strides=1,
        padding="SAME",
        data_format="NCHW",
        dilation_rate=1,
        activation=op_conf_util.kRelu,
        use_bias=False,
        weight_initializer=flow.random_uniform_initializer(),
        bias_initializer=flow.random_uniform_initializer(),
):
    weight_shape = (filters, input.shape[1], kernel_size, kernel_size)
    weight = flow.get_variable(
        name + "-weight",
        shape=weight_shape,
        dtype=input.dtype,
        initializer=weight_initializer,
    )
    weight = flow.identity(weight)
    weight = flow.repeat(weight, args.num_piece_in_batch)
    output = flow.nn.conv2d(input,
                            weight,
                            strides,
                            padding,
                            data_format,
                            dilation_rate,
                            name=name)
    if use_bias:
        bias = flow.get_variable(
            name + "-bias",
            shape=(filters, ),
            dtype=input.dtype,
            initializer=bias_initializer,
        )
        bias = flow.identity(bias)
        bias = flow.repeat(bias, args.num_piece_in_batch)
        output = flow.nn.bias_add(output, bias, data_format)

    if activation is not None:
        if activation == op_conf_util.kRelu:
            output = flow.math.relu(output)
        else:
            raise NotImplementedError

    return output
 def RepeatAccJob(a: oft.Numpy.Placeholder((3, 4))):
     return flow.acc(flow.repeat(a, 3), 3)