Exemple #1
0
def _convert_unpooling2d(converter: ChainerConverter,
                         c_op: "chainer.functions.Unpooling2D"):
    x = converter.get_variable(c_op.inputs[0])
    unify_order(x.order, OrderNCHW)
    pool_opr = Unpooling2D(None,
                           ksize=(c_op.kh, c_op.kw),
                           stride=(c_op.sy, c_op.sx),
                           padding=(c_op.ph, c_op.pw),
                           outsize=(c_op.outh, c_op.outw))

    y, = pool_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #2
0
def _convert_split_axis(converter: ChainerConverter,
                        c_op: "chainer.functions.SplitAxis"):
    x = converter.get_variable(c_op.inputs[0])

    if isinstance(c_op.indices_or_sections, int):
        raise NotImplementedError(
            "[ChainerConverter] SplitAxis with indices are not supported.")

    ys = SplitAxis(None,
                   sections=c_op.indices_or_sections,
                   axis=x.order.axes[c_op.axis])(x)
    for wref_c_y, w_y in zip(c_op.outputs, ys):
        converter.set_variable(wref_c_y(), w_y)
Exemple #3
0
def _convert_reshape(converter: ChainerConverter,
                     c_op: "chainer.functions.Reshape"):
    x = converter.get_variable(c_op.inputs[0])

    out_shape = c_op.shape
    out_order = Order([None] * len(out_shape))
    assert mul(
        out_shape
    ) == x.size, f"[ChainerConverter] Shape mismatch: mul(out_shape)={mul(out_shape)}, x.size={x.size}"

    y = x.reshape(out_shape, out_order)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #4
0
def _convert_fixed_batch_normalization(
    converter: ChainerConverter, c_op:
    "chainer.functions.normalization.batch_normalization.FixedBatchNormalization"
):
    x = converter.get_variable(c_op.inputs[0])
    x.order.axes[0].unify(Axis.N)
    x.order.axes[1].unify(Axis.C)

    gamma = converter.get_variable(c_op.inputs[1])
    gamma.order.unify(OrderC)

    beta = converter.get_variable(c_op.inputs[2])
    beta.order.unify(OrderC)

    mean = converter.get_variable(c_op.inputs[3])
    mean.order.unify(OrderC)

    variance = converter.get_variable(c_op.inputs[4])
    variance.order.unify(OrderC)

    y = (x - mean) / ((variance + c_op.eps)**0.5) * gamma + beta
    converter.set_variable(c_op.outputs[0](), y)
Exemple #5
0
def _convert_tile(converter: ChainerConverter, c_op: "chainer.functions.Tile"):
    x = converter.get_variable(c_op.inputs[0])
    reps = c_op.reps

    if x.ndim > len(reps):
        reps = (1, ) * (x.ndim - len(reps)) + reps

    else:
        while x.ndim < len(c_op.reps):
            x = x.expand_dims(Axis(), 0)

    y, = Tile(None, AxisKeyDict(x.order.axes, reps))(x)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #6
0
def _convert_deconvolution_2d(
    converter: ChainerConverter,
    c_op: chainer.functions.connection.deconvolution_2d.Deconvolution2DFunction
):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])

    deconv_opr = Deconvolution2D(None,
                                 ksize=(w.shape_dict[Axis.H],
                                        w.shape_dict[Axis.W]),
                                 stride=(c_op.sy, c_op.sx),
                                 padding=(c_op.ph, c_op.pw))

    y, = deconv_opr(x, w)

    if len(c_op.inputs) == 3:
        # with bias
        bias_opr = AxiswiseBias(None, axis=Axis.C)
        bias = converter.get_variable(c_op.inputs[2])
        y, = bias_opr(y, bias)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #7
0
def _convert_softmax(converter: ChainerConverter,
                     c_op: "chainer.functions.Softmax"):
    x = converter.get_variable(c_op.inputs[0])

    # chainer.functions.softmax supported "axis" parameter since v1.24
    if chainer.__version__ < "1.24":
        axis = 1
    else:
        axis = c_op.axis

    y, = Softmax(None, axis=x.order.axes[axis])(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #8
0
def _convert_selected_item(
    converter: ChainerConverter, c_op: chainer.functions.connection.
    dilated_convolution_2d.DilatedConvolution2DFunction):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])

    # when dx == 1, it means ordinary convolution.
    conv_opr = Convolution2D(None,
                             ksize=(w.shape_dict[Axis.H],
                                    w.shape_dict[Axis.W]),
                             stride=(c_op.sy, c_op.sx),
                             padding=(c_op.ph, c_op.pw),
                             dilation_rate=(c_op.dx, c_op.dy))

    y, = conv_opr(x, w)

    if len(c_op.inputs) == 3:
        # with bias
        bias_opr = AxiswiseBias(None, axis=Axis.C)
        bias = converter.get_variable(c_op.inputs[2])
        y, = bias_opr(y, bias)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #9
0
def _convert_elu(converter: ChainerConverter, c_op: "chainer.functions.ELU"):
    x = converter.get_variable(c_op.inputs[0])
    if c_op.alpha == 0:
        y, = Relu(None)(x)

    elif c_op.alpha == 1:
        y, = Elu(None)(x)

    else:
        y1, = Elu(None)(x)
        y2, = Relu(None)(x)
        y = (y1 * c_op.alpha) + y2 * (1 - c_op.alpha)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #10
0
def _convert_local_response_normalization(
    converter: ChainerConverter, c_op: chainer.functions.normalization.
    local_response_normalization.LocalResponseNormalization):
    x = converter.get_variable(c_op.inputs[0])

    n_opr = LocalResponseNormalization(None,
                                       n=c_op.n,
                                       k=c_op.k,
                                       alpha=c_op.alpha,
                                       beta=c_op.beta)

    y, = n_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #11
0
def _convert_mat_mul(converter: ChainerConverter,
                     c_op: "chainer.functions.MatMul"):
    x0 = converter.get_variable(c_op.inputs[0])
    x1 = converter.get_variable(c_op.inputs[1])
    if x0.order.axes[1 if c_op.transa else 0] == x1.order.axes[0 if c_op.
                                                               transb else 1]:
        x1 = x1.reinterpret_axes(Order([None, None]))

    y, = Tensordot(None,
                   axes=[
                       x0.order.axes[0 if c_op.transa else 1],
                       x1.order.axes[1 if c_op.transb else 0]
                   ])(x0, x1)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #12
0
def _convert_average_pooling2d(converter: ChainerConverter,
                               c_op: "chainer.functions.AveragePooling2D"):
    x = converter.get_variable(c_op.inputs[0])
    x.order.unify(OrderNCHW)

    pool_opr = AveragePooling2D(None,
                                ksize=(c_op.kh, c_op.kw),
                                stride=(c_op.sy, c_op.sx),
                                padding=(c_op.ph, c_op.pw),
                                cover_all=c_op.cover_all)

    y, = pool_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #13
0
def _convert_linear_function(converter: ChainerConverter, c_op: "chainer.functions.connection.linear.LinearFunction"):
    linear_opr = Linear(None)

    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])
    if x.ndim == 4 and w.ndim == 2:
        # wを4次元に拡張 (NC -> NCHW)
        x_shape_dict = x.shape_dict
        w_shape_dict = w.shape_dict
        assert x_shape_dict[Axis.C] * x_shape_dict[Axis.H] * x_shape_dict[Axis.W] == w_shape_dict[Axis.C]
        assert w.order is OrderNC
        w.order = OrderNCHW
        w_new_shape = [w_shape_dict[Axis.N], x_shape_dict[Axis.C], x_shape_dict[Axis.H],
                       x_shape_dict[Axis.W]]
        w.shape = w_new_shape
        w.data = w.data.reshape(w_new_shape)

    y, = linear_opr(x, w)
    if len(c_op.inputs) == 3:
        # with bias
        bias = converter.get_variable(c_op.inputs[2])
        y = y + bias

    converter.set_variable(c_op.outputs[0](), y)
Exemple #14
0
def _convert_sum(converter: ChainerConverter, c_op: "chainer.functions.Sum"):
    x = converter.get_variable(c_op.inputs[0])
    for axis in list(x.order.axes) if c_op.axis is None else [
            x.order.axes[i] for i in c_op.axis
    ]:
        x, = Sum(None, axis=axis)(x)

        # chainer.functions.sum supported "keepdims" parameter since v1.24
        if chainer.__version__ >= "1.24" and c_op.keepdims and x.ndim > 1:
            pass

        else:
            x = x.squeeze(axis)

    converter.set_variable(c_op.outputs[0](), x)
Exemple #15
0
def _convert_max_pooling2d(converter: ChainerConverter, c_op: "chainer.functions.MaxPooling2D"):
    if not c_op.cover_all:
        raise NotImplementedError("'cover_all=False' property in 'MaxPooling2D' is not supported.")

    x = converter.get_variable(c_op.inputs[0])
    unify_order(x.order, OrderNCHW)

    pool_opr = MaxPooling2D(None,
                            ksize=(c_op.kh, c_op.kw),
                            stride=(c_op.sy, c_op.sx),
                            padding=(c_op.ph, c_op.pw))

    y, = pool_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #16
0
def _convert_selected_item(converter: ChainerConverter,
                           c_op: "chainer.functions.connection.dilated_convolution_2d.DilatedConvolution2DFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])

    unify_order(x.order, OrderNCHW)
    unify_order(w.order, OrderNCHW)

    # when dx == 1, it means ordinary convolution.
    conv_opr = Convolution2D(None,
                             ksize=(w.shape_dict[Axis.H], w.shape_dict[Axis.W]),
                             stride=(c_op.sy, c_op.sx),
                             padding=(c_op.ph, c_op.pw),
                             dilation_rate=(c_op.dx, c_op.dy))

    y, = conv_opr(x, w)

    if len(c_op.inputs) == 3:
        # with bias
        bias = converter.get_variable(c_op.inputs[2])
        unify_order(bias.order, OrderC)
        y = y + bias

    converter.set_variable(c_op.outputs[0](), y)
Exemple #17
0
def _convert_deconvolution_2d(
    converter: ChainerConverter, c_op:
    "chainer.functions.connection.deconvolution_2d.Deconvolution2DFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])

    x.order.unify(OrderNCHW)
    w.order.unify(OrderCNHW)

    deconv_opr = Deconvolution2D(None,
                                 ksize=(w.shape_dict[Axis.H],
                                        w.shape_dict[Axis.W]),
                                 stride=(c_op.sy, c_op.sx),
                                 padding=(c_op.ph, c_op.pw))

    y, = deconv_opr(x, w)

    if len(c_op.inputs) == 3:
        # with bias
        b = converter.get_variable(c_op.inputs[2])
        b.order.unify(OrderC)
        y = y + b

    converter.set_variable(c_op.outputs[0](), y)
Exemple #18
0
def _convert_max(converter: ChainerConverter, c_op: "chainer.functions.Max"):
    x = converter.get_variable(c_op.inputs[0])

    remove_axes = []

    for axis in list(x.order.axes) if c_op.axis is None else [
            x.order.axes[i] for i in c_op.axis
    ]:
        x, = Max(None, axis=axis)(x)
        if not c_op.keepdims and x.ndim > 1:
            remove_axes.append(axis)

    if not c_op.keepdims and x.ndim > 1:
        x = x.squeeze(remove_axes)

    converter.set_variable(c_op.outputs[0](), x)
Exemple #19
0
def _convert_im2col(converter: ChainerConverter, c_op: "chainer.functions.Im2Col"):
    x = converter.get_variable(c_op.inputs[0])
    if any(not Placeholder.check_resolved(v) for v in x.shape):
        raise NotImplementedError("[ChainerConverter] \"GetItem\" for dynamic shape variable is not supported ")

    x.order.unify(OrderNCHW)
    if c_op.cover_all:
        raise NotImplementedError("[ChainerConverter] \"Im2Col\" function with \"cover_all=True\" is not supported")

    y, = Im2Col(None,
                ksize=(c_op.kh, c_op.kw),
                stride=(c_op.sy, c_op.sx),
                padding=(c_op.ph, c_op.pw),
                dilation_rate=(c_op.dy, c_op.dx))(x)

    y = y.combine_axes([Axis.C, Axis.KH, Axis.KW], Axis.C).change_order(OrderNCHW)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #20
0
def _convert_reshape(converter: ChainerConverter, c_op: "chainer.functions.Reshape"):
    x = converter.get_variable(c_op.inputs[0])
    if any(not Placeholder.check_resolved(v) for v in x.shape):
        raise NotImplementedError("[ChainerConverter] \"Reshape\" for dynamic shape variable is not supported ")

    out_shape = list(c_op.shape)
    out_order = Order([None] * len(out_shape))
    if -1 in out_shape:
        i = out_shape.index(-1)
        out_shape.pop(i)
        out_shape.insert(i, x.size // mul(out_shape))

    assert mul(out_shape) == x.size, f"[ChainerConverter] Shape mismatch: mul(out_shape)={mul(out_shape)}, x.size={x.size}"

    y = x.reshape(out_shape, out_order)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #21
0
def _convert_reshape(converter: ChainerConverter,
                     c_op: "chainer.functions.Reshape"):
    x = converter.get_variable(c_op.inputs[0])

    out_shape = c_op.shape
    # noinspection PyTypeChecker
    out_order = Order([AxisVar() for _ in out_shape])
    assert mul(
        out_shape
    ) == x.size, f"[ChainerConverter] Shape mismatch: mul(out_shape)={mul(out_shape)}, x.size={x.size}"

    y, = Reshape(None,
                 in_order=x.order,
                 out_order=out_order,
                 out_shape=out_shape)(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #22
0
def _convert_log_softmax(converter: ChainerConverter,
                         c_op: "chainer.functions.LogSoftmax"):
    x = converter.get_variable(c_op.inputs[0])
    axis = x.order.axes[1]

    # TODO: Conversion result is wrong in case x.shape[1] is placeholder.
    if not Placeholder.check_resolved(x.shape[1]):
        raise NotImplementedError(
            "[ChainerConverter] \"LogSoftMax\" for dynamic number of cateogries is not supported"
        )

    max_x, = Max(None, axis=axis)(x)
    exp_delta_x, = Exp(None)(x - max_x)
    sum_exp_delta_x, = Sum(None, axis=axis)(exp_delta_x)
    log_sum_delta_exp, = Log(None)(sum_exp_delta_x)

    y = x - (log_sum_delta_exp + max_x)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #23
0
def _convert_average_pooling2d(converter: ChainerConverter, c_op: "chainer.functions.AveragePooling2D"):
    x = converter.get_variable(c_op.inputs[0])
    x.order.unify(OrderNCHW)

    pool_opr = AveragePooling2D(None,
                                ksize=(c_op.kh, c_op.kw),
                                stride=(c_op.sy, c_op.sx),
                                padding=(c_op.ph, c_op.pw))

    y, = pool_opr(x)

    if ((y.shape_dict[Axis.H] + c_op.ph * 2 - c_op.kh) % c_op.sy != 0) or ((y.shape_dict[Axis.W] + c_op.pw * 2 - c_op.kw) % c_op.sx != 0):
        console.warning(
            "[AveragePooling2D] AveragePooling2D in chainer is performed as cover_all=False mode. "
            "However, AveragePooling2D in WebDNN is always calculated as cover_all=True mode. "
            "Therefore the result may be difference from chainer's output.")

    converter.set_variable(c_op.outputs[0](), y)
Exemple #24
0
def _convert_reshape(converter: ChainerConverter,
                     c_op: "chainer.functions.Reshape"):
    x = converter.get_variable(c_op.inputs[0])

    out_shape = list(c_op.shape)
    out_order = Order([None] * len(out_shape))
    if -1 in out_shape:
        i = out_shape.index(-1)
        out_shape.pop(i)
        out_shape.insert(i, x.size // mul(out_shape))

    assert mul(
        out_shape
    ) == x.size, f"[ChainerConverter] Shape mismatch: mul(out_shape)={mul(out_shape)}, x.size={x.size}"

    y = x.reshape(out_shape, out_order)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #25
0
def _convert_unpooling2d(converter: ChainerConverter,
                         c_op: "chainer.functions.Unpooling2D"):
    x = converter.get_variable(c_op.inputs[0])
    x.order.unify(OrderNCHW)
    if not Placeholder.check_resolved(
            x.shape[2]) or not Placeholder.check_resolved(x.shape[3]):
        raise NotImplementedError(
            "[ChainerConverter] \"Unpooling2D\" with dynamic spatial size is not supported "
        )

    pool_opr = Unpooling2D(None,
                           ksize=(c_op.kh, c_op.kw),
                           stride=(c_op.sy, c_op.sx),
                           padding=(c_op.ph, c_op.pw),
                           outsize=(c_op.outh, c_op.outw))

    y, = pool_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #26
0
def _convert_max_pooling2d(converter: ChainerConverter, c_op: "chainer.functions.MaxPooling2D"):
    if not c_op.cover_all:
        raise NotImplementedError("'cover_all=False' property in 'MaxPooling2D' is not supported.")

    x = converter.get_variable(c_op.inputs[0])
    x.order.unify(OrderNCHW)

    pool_opr = MaxPooling2D(None,
                            ksize=(c_op.kh, c_op.kw),
                            stride=(c_op.sy, c_op.sx),
                            padding=(c_op.ph, c_op.pw))
    if c_op.cover_all == False:
        console.warning(
            "[MaxPooling2D] MaxPooling2D in WebDNN is always calculated as cover_all=True mode. "
            "Therefore the result may be difference from chainer's output.")

    y, = pool_opr(x)

    converter.set_variable(c_op.outputs[0](), y)
Exemple #27
0
def _convert_logsumexp(converter: ChainerConverter,
                       c_op: "chainer.functions.LogSumExp"):
    x = converter.get_variable(c_op.inputs[0])

    if c_op.axis is None:
        axes = list(x.order.axes)
    else:
        axes = [x.order.axes[i] for i in c_op.axis]

    max_x = x
    for axis in axes:
        max_x, = Max(None, axis=axis)(max_x)
    exp_delta_x, = Exp(None)(x - max_x)

    sum_exp_delta_x = exp_delta_x
    for axis in axes:
        sum_exp_delta_x, = Sum(None, axis=axis)(sum_exp_delta_x)

    y = Log(None)(sum_exp_delta_x)[0] + max_x
    converter.set_variable(c_op.outputs[0](), y)
Exemple #28
0
def _convert_split_axis(converter: ChainerConverter,
                        c_op: "chainer.functions.SplitAxis"):
    x = converter.get_variable(c_op.inputs[0])

    VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH = semver(chainer.__version__)
    if VERSION_MAJOR >= 4:
        # Internal data structure changed
        # https://github.com/chainer/chainer/commit/906a8e9b0837cd9a4e5ee6f1dbda26431a1e12d1#diff-9e610d281c820d44c4a0cbf0ca6263fd
        if c_op.indices is None:
            raise NotImplementedError(
                "[ChainerConverter] SplitAxis with sections are not supported."
            )
        indices = c_op.indices
    else:
        if isinstance(c_op.indices_or_sections, int):
            raise NotImplementedError(
                "[ChainerConverter] SplitAxis with sections are not supported."
            )
        indices = c_op.indices_or_sections

    ys = SplitAxis(None, sections=indices, axis=x.order.axes[c_op.axis])(x)
    for i, y in enumerate(ys):
        converter.set_variable(c_op.outputs[i](), y)
Exemple #29
0
def _convert_concat(converter: ChainerConverter,
                    c_op: "chainer.functions.Concat"):
    xs = [converter.get_variable(x) for x in c_op.inputs]
    y, = Concat(None, axis=xs[0].order.axes[c_op.axis])(*xs)
    converter.set_variable(c_op.outputs[0](), y)
Exemple #30
0
def _convert_sigmoid(converter: ChainerConverter,
                     c_op: "chainer.functions.Sigmoid"):
    x = converter.get_variable(c_op.inputs[0])
    y, = Sigmoid(None)(x)
    converter.set_variable(c_op.outputs[0](), y)