Example #1
0
def _convert_batch_normalization_function(
    converter: ChainerConverter, c_op:
    "chainer.functions.normalization.batch_normalization.BatchNormalizationFunction"
):
    x = converter.get_variable(c_op.inputs[0])
    unify(x.order.axes[0], Axis.N)
    unify(x.order.axes[1], Axis.C)

    gamma = converter.get_variable(c_op.inputs[1])
    unify_order(gamma.order, OrderC)

    beta = converter.get_variable(c_op.inputs[2])
    unify_order(beta.order, OrderC)

    if len(c_op.inputs) == 5:
        mean = converter.get_variable(c_op.inputs[3])
        unify_order(mean.order, OrderC)

        variance = converter.get_variable(c_op.inputs[4])
        unify_order(variance.order, OrderC)

    elif len(c_op.inputs) == 3:
        mean = 0 if c_op.running_mean is None else ConstantVariable(
            c_op.running_mean, OrderC)
        variance = 1 if c_op.running_var is None else ConstantVariable(
            c_op.running_var, OrderC)

    else:
        raise ValueError(
            "inputs to BatchNormalizationFunction have to be 5 or 3.")

    y = (x - mean) / ((variance + c_op.eps)**0.5) * gamma + beta
    converter.set_variable(c_op.outputs[0](), y)
Example #2
0
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])  # type: ConstantVariable

    x2, = Reshape(None,
                  in_order=x.order,
                  out_order=OrderNC,
                  out_shape=[x.shape[0], mul(x.shape[1:])])(x)
    w2, = ReinterpretAxis(None, in_order=w.order, out_order=OrderNC)(w)
    w2, = Transpose(None)(w2)
    w2.change_order(OrderCN)

    y, = Linear(None)(x2, w2)
    y, = ReinterpretAxis(None,
                         in_order=y.order,
                         out_order=Order([x.order.axes[0],
                                          w.order.axes[0]]))(y)

    if len(c_op.inputs) == 3:
        # with bias
        b = converter.get_variable(c_op.inputs[2])
        check_broadcast_constraints(y, b)
        y = y + b

    converter.set_variable(c_op.outputs[0](), y)
Example #3
0
def _convert_logsumexp(converter: ChainerConverter,
                       c_op: "chainer.functions.LogSumExp"):
    x = converter.get_variable(c_op.inputs[0])

    if c_op.axis is None:
        axes = list(x.order.axes)
    else:
        axes = [x.order.axes[i] for i in c_op.axis]

    # TODO: Conversion result is wrong in case x.shape[category_axis] is placeholder.
    if any(not Placeholder.check_resolved(x.shape_dict[axis])
           for axis in axes):
        raise NotImplementedError(
            "[ChainerConverter] \"LogSumExp\" for dynamic number of categories is not supported"
        )

    max_x = x
    for axis in axes:
        max_x, = Max(None, axis=axis)(max_x)
    exp_delta_x, = Exp(None)(x - max_x)

    sum_exp_delta_x = exp_delta_x
    for axis in axes:
        sum_exp_delta_x, = Sum(None, axis=axis)(sum_exp_delta_x)

    y = Log(None)(sum_exp_delta_x)[0] + max_x
    converter.set_variable(c_op.outputs[0](), y)
Example #4
0
def _convert_reshape(converter: ChainerConverter,
                     c_op: "chainer.functions.Reshape"):
    assert len(c_op.inputs) == 1, \
        f"For 'Reshape' operator in chainer, expected number of inputs is 1, but actual is {len(c_op.inputs)}"

    x = converter.get_variable(c_op.inputs[0])

    out_shape = list(c_op.shape)  # c_op.shape is tuple
    if len(out_shape) == 1:
        out_order = OrderC
    elif len(out_shape) == 2:
        out_order = OrderNC
    elif len(out_shape) == 4:
        out_order = OrderNCHW
    else:
        raise NotImplementedError(
            "Reshaping into dimensions none of 1, 2, 4 is not supported.")
    assert mul(out_shape) == x.size

    y, = Reshape(None,
                 in_order=x.order,
                 out_order=out_order,
                 out_shape=out_shape)(x)

    converter.set_variable(c_op.outputs[0](), y)
Example #5
0
def _convert_mat_mul_var_var(
        converter: ChainerConverter,
        c_op: "chainer.functions.math.basic_math.MatMulVarVar"):
    x1 = converter.get_variable(c_op.inputs[0])
    x2 = converter.get_variable(c_op.inputs[1])
    y, = Tensordot(None, axes=[x1.order.axes[1], x2.order.axes[0]])(x1, x2)
    converter.set_variable(c_op.outputs[0](), y)
Example #6
0
def _convert_get_item(converter: ChainerConverter, c_op: "chainer.functions.GetItem"):
    x = converter.get_variable(c_op.inputs[0])
    if any(not Placeholder.check_resolved(v) for v in x.shape):
        raise NotImplementedError("[ChainerConverter] \"GetItem\" for dynamic shape variable is not supported ")

    y = x[c_op.slices]
    converter.set_variable(c_op.outputs[0](), y)
Example #7
0
def _convert_selected_item(
    converter: ChainerConverter, c_op:
    "chainer.functions.connection.dilated_convolution_2d.DilatedConvolution2DFunction"
):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])

    x.order.unify(OrderNCHW)
    w.order.unify(OrderNCHW)

    # when dx == 1, it means ordinary convolution.
    conv_opr = Convolution2D(None,
                             ksize=(w.shape_dict[Axis.H],
                                    w.shape_dict[Axis.W]),
                             stride=(c_op.sy, c_op.sx),
                             padding=(c_op.ph, c_op.pw),
                             dilation_rate=(c_op.dx, c_op.dy))

    y, = conv_opr(x, w)

    if len(c_op.inputs) == 3:
        # with bias
        bias = converter.get_variable(c_op.inputs[2])
        bias.order.unify(OrderC)
        y = y + bias

    converter.set_variable(c_op.outputs[0](), y)
Example #8
0
def _convert_sub(converter: ChainerConverter,
                 c_op: "chainer.functions.math.basic_math.Sub"):
    x0 = converter.get_variable(c_op.inputs[0])
    x1 = converter.get_variable(c_op.inputs[1])
    check_broadcast_constraints(x0, x1)
    y = x0 - x1
    converter.set_variable(c_op.outputs[0](), y)
Example #9
0
def _convert_crelu(converter: ChainerConverter,
                   c_op: "chainer.functions.CReLU"):
    x = converter.get_variable(c_op.inputs[0])
    y1, = Relu(None)(x)
    y2, = Relu(None)(-x)
    y, = Concat(None, axis=x.order.axes[c_op.axis])(y1, y2)
    converter.set_variable(c_op.outputs[0](), y)
Example #10
0
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    linear_opr = Linear(None)

    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])
    if x.ndim == 4 and w.ndim == 2:
        # wを4次元に拡張 (NC -> NCHW)
        x_shape_dict = x.shape_dict
        w_shape_dict = w.shape_dict
        assert x_shape_dict[Axis.C] * x_shape_dict[Axis.H] * x_shape_dict[
            Axis.W] == w_shape_dict[Axis.C]
        assert w.order is OrderNC
        w.order = OrderNCHW
        w_new_shape = [
            w_shape_dict[Axis.N], x_shape_dict[Axis.C], x_shape_dict[Axis.H],
            x_shape_dict[Axis.W]
        ]
        w.shape = w_new_shape
        w.data = w.data.reshape(w_new_shape)

    y, = linear_opr(x, w)
    if len(c_op.inputs) == 3:
        # with bias
        bias_opr = AxiswiseBias(None, axis=Axis.C)
        bias = converter.get_variable(c_op.inputs[2])
        y, = bias_opr(y, bias)

    converter.set_variable(c_op.outputs[0](), y)
Example #11
0
def _convert_dropout(converter: ChainerConverter,
                     c_op: "chainer.functions.Dropout"):
    console.warning("Dropout is omitted")

    x = converter.get_variable(c_op.inputs[0])

    converter.set_variable(c_op.outputs[0](), x)
Example #12
0
def _convert_transpose(converter: ChainerConverter,
                       c_op: "chainer.functions.Transpose"):
    x = converter.get_variable(c_op.inputs[0])
    y, = Transpose(None)(x)
    y.change_order(Order([x.order.axes[axis] for axis in c_op.axes]))

    converter.set_variable(c_op.outputs[0](), y)
Example #13
0
def _convert_broadcast_to(converter: ChainerConverter, c_op: "chainer.functions.BroadcastTo"):
    x = converter.get_variable(c_op.inputs[0])
    if any(not Placeholder.check_resolved(v) for v in x.shape):
        raise NotImplementedError("[ChainerConverter] \"BroadcastTo\" for dynamic shape variable is not supported ")

    # noinspection PyProtectedMember
    y, = Broadcast(None, out_shape=c_op._shape, out_order=x.order)(x)
    converter.set_variable(c_op.outputs[0](), y)
Example #14
0
def _convert_swapaxes(converter: ChainerConverter, c_op: "chainer.functions.Swapaxes"):
    x = converter.get_variable(c_op.inputs[0])
    index = list(range(x.ndim))
    index[c_op.axis1] = c_op.axis2
    index[c_op.axis2] = c_op.axis1
    y = x.transpose(Order([x.order.axes[i] for i in index]))

    converter.set_variable(c_op.outputs[0](), y)
Example #15
0
def _convert_log1p(converter: ChainerConverter,
                   c_op: "chainer.functions.Log1p"):
    console.warning(
        "[ChainerConverter] In WebDNN, \"Log1p(x)\" is converted into \"Log(1+x)\", which is not enough accurate as Log1p when"
        "x is so small that \"1 + x == 1\" in floating point accuracy.")
    x = converter.get_variable(c_op.inputs[0])
    y, = Log(None)(x + 1)
    converter.set_variable(c_op.outputs[0](), y)
Example #16
0
def _convert_concat(converter: ChainerConverter, c_op: "chainer.functions.Concat"):
    xs = [converter.get_variable(x) for x in c_op.inputs]

    for x1, x2 in combinations(xs, 2):
        x1.order.unify(x2.order)

    y, = Concat(None, axis=xs[0].order.axes[c_op.axis])(*xs)
    converter.set_variable(c_op.outputs[0](), y)
Example #17
0
def _convert_expm1(converter: ChainerConverter,
                   c_op: "chainer.functions.Expm1"):
    console.warning(
        "[ChainerConverter] In WebDNN, \"Expm1(x)\" is converted into \"Exp(x)-1\", which is not enough accurate as Expm1 when"
        "x is so small that \"Exp(x) == 1\" in floating point accuracy.")
    x = converter.get_variable(c_op.inputs[0])
    y = Exp(None)(x)[0] - 1
    converter.set_variable(c_op.outputs[0](), y)
Example #18
0
def _convert_split_axis(converter: ChainerConverter, c_op: "chainer.functions.SplitAxis"):
    x = converter.get_variable(c_op.inputs[0])

    if isinstance(c_op.indices_or_sections, int):
        raise NotImplementedError("[ChainerConverter] SplitAxis with indices are not supported.")

    ys = SplitAxis(None, sections=c_op.indices_or_sections, axis=x.order.axes[c_op.axis])(x)
    for i, y in enumerate(ys):
        converter.set_variable(c_op.outputs[i](), y)
Example #19
0
def _convert_local_response_normalization(converter: ChainerConverter,
                                          c_op: "chainer.functions.normalization.local_response_normalization.LocalResponseNormalization"):
    x = converter.get_variable(c_op.inputs[0])

    n_opr = LocalResponseNormalization(None, n=c_op.n, k=c_op.k, alpha=c_op.alpha, beta=c_op.beta)

    y, = n_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Example #20
0
def _convert_flip_ud(converter: ChainerConverter,
                     c_op: "chainer.functions.FlipUD"):
    x = converter.get_variable(c_op.inputs[0])
    if any(not Placeholder.check_resolved(v) for v in x.shape):
        raise NotImplementedError(
            "[ChainerConverter] \"FlipUD\" for dynamic shape variable is not supported "
        )

    converter.set_variable(c_op.outputs[0](), x[::-1, :])
Example #21
0
def _convert_mul(converter: ChainerConverter, c_op: "chainer.functions.math.basic_math.Mul"):
    x1 = converter.get_variable(c_op.inputs[0])
    x2 = converter.get_variable(c_op.inputs[1])

    x1 = x1.reinterpret_axes(OrderNC)
    x2 = x2.reinterpret_axes(OrderCN)

    y, = Linear(None)(x1, x2)
    converter.set_variable(c_op.outputs[0](), y)
Example #22
0
def _convert_normalize_l2(converter: ChainerConverter,
                          c_op: "chainer.functions.NormalizeL2"):
    x = converter.get_variable(c_op.inputs[0])

    if len(c_op.axis) > 1:  # c_op.axis: tuple
        raise ValueError("The number of axis for NormalizeL2 must be 1.")
    y, = Normalize(None, axis=x.order.axes[c_op.axis[0]], eps=c_op.eps)(x)

    converter.set_variable(c_op.outputs[0](), y)
Example #23
0
def _convert_mul(converter: ChainerConverter, c_op: chainer.functions.math.basic_math.Mul):
    x1 = converter.get_variable(c_op.inputs[0])
    x2 = converter.get_variable(c_op.inputs[1])

    x1, = ReinterpretAxis(None, x1.order, OrderNC)(x1)
    x2, = ReinterpretAxis(None, x2.order, OrderCN)(x2)

    y, = Linear(None)(x1, x2)
    converter.set_variable(c_op.outputs[0](), y)
Example #24
0
def _convert_flatten(converter: ChainerConverter,
                     c_op: "chainer.functions.Flatten"):
    x = converter.get_variable(c_op.inputs[0])
    y, = Reshape(None, in_order=x.order, out_shape=[x.size], out_order=OrderC)
    converter.set_variable(c_op.outputs[0](), y)

    console.warning(
        "[ChainerConverter] In chainer.functions.Flatten, output data order is parsed as OrderC. To "
        "customize this, please overwrite chainer.functions.Flatten converter handler."
    )
Example #25
0
def _convert_maximum(converter: ChainerConverter,
                     c_op: "chainer.functions.Maximum"):
    x = converter.get_variable(c_op.inputs[0])
    y = converter.get_variable(c_op.inputs[1])

    check_broadcast_constraints(x, y)

    tmp, = Greater(None)(x, y)
    z = x * tmp + y * (1 - tmp)
    converter.set_variable(c_op.outputs[0](), z)
Example #26
0
def _convert_softmax(converter: ChainerConverter,
                     c_op: "chainer.functions.Softmax"):
    x = converter.get_variable(c_op.inputs[0])
    y, = Softmax(None, axis=x.order.axes[c_op.axis])(x)

    if flags.AGGRESSIVE_ORDER_INFERENCE:
        # Most of all cast, softmax is performed along to Axis.C
        unify(y.order.axes[c_op.axis], Axis.C)

    converter.set_variable(c_op.outputs[0](), y)
Example #27
0
def _convert_minimum(converter: ChainerConverter,
                     c_op: "chainer.functions.Minimum"):
    x = converter.get_variable(c_op.inputs[0])
    y = converter.get_variable(c_op.inputs[1])

    check_broadcast_constraints(x, y)

    tmp = x > y
    z = x * (1 - tmp) + y * tmp
    converter.set_variable(c_op.outputs[0](), z)
Example #28
0
def _convert_squeeze(converter: ChainerConverter, c_op: "chainer.functions.Squeeze"):
    x = converter.get_variable(c_op.inputs[0])
    if c_op.axis is None:
        axes = [a for a in x.order.axes if x.shape_dict[a] == 1]
    else:
        axes = [x.order.axes[i] for i in c_op.axis]

    for axis in axes:
        x = x.squeeze(axis)

    converter.set_variable(c_op.outputs[0](), x)
Example #29
0
    def handler(converter: ChainerConverter, c_op: "chainer.Function"):
        a = converter.get_variable(c_op.inputs[0])
        b = converter.get_variable(c_op.inputs[1])

        # Broadcasting
        check_broadcast_constraints(a, b)

        c, = OperatorClass(None)(a, b)

        # Each chainer function holds output variables as weak reference
        converter.set_variable(c_op.outputs[0](), c)
Example #30
0
def _convert_min(converter: ChainerConverter, c_op: "chainer.functions.Min"):
    x = converter.get_variable(c_op.inputs[0])
    for axis in list(x.order.axes) if c_op.axis is None else [
            x.order.axes[i] for i in c_op.axis
    ]:
        x, = Min(None, axis=axis)(x)

        if not c_op.keepdims and x.ndim > 1:
            x = x.squeeze(axis)

    converter.set_variable(c_op.outputs[0](), x)