Exemplo n.º 1
0
def _convert_repeat_vector(converter: KerasConverter, k_op: "keras.layers.RepeatVector"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])

    assert x.order == OrderNC, f"[KerasConverter] Currently only OrderNC is supported for input variable order of " \
                               f"keras.layers.RepeatVector: x.order={x.order}"

    N = x.shape_dict[Axis.N]
    n = k_op.n
    C = x.shape_dict[Axis.C]

    # TODO: Implement more efficient version
    # ex) x.shape=(N=2, C=3), n=2
    #
    #  x(N, C)  *      w(C, n*C)     =      y(N, n*C)     =       y(N, n, C)
    # -----------------------------------------------------------------------------
    # [1, 2, 3]   [1, 0, 0, 1, 0, 0]   [1, 2, 3, 1, 2, 3]   [[1, 2, 3], [1, 2, 3]]
    # [4, 5, 6] * [0, 1, 0, 0, 1, 0] = [4, 5, 6, 4, 5, 6] = [[4, 5, 6], [4, 5, 6]]
    #             [0, 0, 1, 0, 0, 1]
    #

    w = ConstantVariable(np.tile(np.eye(C), (1, n)), OrderCN)

    y, = Linear(None)(x, w)
    y = y.reshape([N, n, C], OrderNTC)
    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
Exemplo n.º 2
0
    def __call__(self, inputs: List[Variable]) -> Tuple[Variable]:
        # noinspection PyUnresolvedReferences
        linear_opr = Linear(generate_unique_name(self.cfunc.label))
        x = inputs[0]
        w = inputs[1]
        if x.ndim == 4 and w.ndim == 2:
            # wを4次元に拡張 (NC -> NCHW)
            x_shape_dict = x.shape_dict
            w_shape_dict = w.shape_dict
            assert x_shape_dict[Axis.C] * x_shape_dict[Axis.H] * x_shape_dict[Axis.W] == w_shape_dict[Axis.C]
            assert w.order is OrderNC
            w.order = OrderNCHW
            w_new_shape = [w_shape_dict[Axis.N], x_shape_dict[Axis.C], x_shape_dict[Axis.H],
                           x_shape_dict[Axis.W]]
            w.shape = w_new_shape
            w.data = w.data.reshape(w_new_shape)

        opr_out, = linear_opr(inputs[0], inputs[1])
        if len(inputs) == 3:
            # biasあり
            # noinspection PyUnresolvedReferences
            bias_opr = AxiswiseBias(generate_unique_name(self.cfunc.label), axis=Axis.C)
            self.hidden_vars.append(opr_out)
            opr_out, = bias_opr(opr_out, inputs[2])
        return opr_out,
Exemplo n.º 3
0
    def convert_layer_dense(self, layer_config: Dict[str, object], inputs: List[Variable]) -> List[Variable]:
        assert len(inputs) == 1
        input = inputs[0]
        name: str = layer_config["name"]
        weight_array = self.weights[f"{name}/{name}/kernel:0"].value
        weight_var = ConstantVariable(weight_array, OrderCN)  # shape: (in, out)
        linear_opr = Linear(name)
        y, = linear_opr(input, weight_var)

        if layer_config["use_bias"]:
            bias_array = self.weights[f"{name}/{name}/bias:0"].value
            bias_var = ConstantVariable(bias_array, OrderC)
            bias_opr = AxiswiseBias(name + "_bias", Axis.C)
            y, = bias_opr(y, bias_var)

        act_opr: Operator = None
        activation_type: str = layer_config["activation"]
        if activation_type == "relu":
            act_opr = Relu(name + "_activation")
        elif activation_type == "softmax":
            warn("omitting softmax activation")
        else:
            raise NotImplementedError(f"Unknown activation {activation_type}")

        if act_opr is not None:
            y, = act_opr(y)

        return [y]
Exemplo n.º 4
0
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])  # type: ConstantVariable

    x2, = Reshape(None,
                  in_order=x.order,
                  out_order=OrderNC,
                  out_shape=[x.shape[0], mul(x.shape[1:])])(x)
    w2, = ReinterpretAxis(None, in_order=w.order, out_order=OrderNC)(w)
    w2, = Transpose(None)(w2)
    w2.change_order(OrderCN)

    y, = Linear(None)(x2, w2)
    y, = ReinterpretAxis(None,
                         in_order=y.order,
                         out_order=Order([x.order.axes[0],
                                          w.order.axes[0]]))(y)

    if len(c_op.inputs) == 3:
        # with bias
        b = converter.get_variable(c_op.inputs[2])
        check_broadcast_constraints(y, b)
        y = y + b

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 5
0
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    linear_opr = Linear(None)

    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])
    if x.ndim == 4 and w.ndim == 2:
        # wを4次元に拡張 (NC -> NCHW)
        x_shape_dict = x.shape_dict
        w_shape_dict = w.shape_dict
        assert x_shape_dict[Axis.C] * x_shape_dict[Axis.H] * x_shape_dict[
            Axis.W] == w_shape_dict[Axis.C]
        assert w.order is OrderNC
        w.order = OrderNCHW
        w_new_shape = [
            w_shape_dict[Axis.N], x_shape_dict[Axis.C], x_shape_dict[Axis.H],
            x_shape_dict[Axis.W]
        ]
        w.shape = w_new_shape
        w.data = w.data.reshape(w_new_shape)

    y, = linear_opr(x, w)
    if len(c_op.inputs) == 3:
        # with bias
        bias_opr = AxiswiseBias(None, axis=Axis.C)
        bias = converter.get_variable(c_op.inputs[2])
        y, = bias_opr(y, bias)

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 6
0
def _convert_mul(converter: ChainerConverter, c_op: "chainer.functions.math.basic_math.Mul"):
    x1 = converter.get_variable(c_op.inputs[0])
    x2 = converter.get_variable(c_op.inputs[1])

    x1 = x1.reinterpret_axes(OrderNC)
    x2 = x2.reinterpret_axes(OrderCN)

    y, = Linear(None)(x1, x2)
    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 7
0
def _convert_mul(converter: ChainerConverter, c_op: chainer.functions.math.basic_math.Mul):
    x1 = converter.get_variable(c_op.inputs[0])
    x2 = converter.get_variable(c_op.inputs[1])

    x1, = ReinterpretAxis(None, x1.order, OrderNC)(x1)
    x2, = ReinterpretAxis(None, x2.order, OrderCN)(x2)

    y, = Linear(None)(x1, x2)
    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 8
0
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])  # type: ConstantVariable

    x2 = x.reshape([x.shape[0], mul(x.shape[1:])], OrderNC)
    w2 = w.reinterpret_axes(OrderNC)

    y, = Linear(None)(x2, w2)
    y = y.reinterpret_axes(Order([x.order.axes[0], w.order.axes[0]]))

    if len(c_op.inputs) == 3:
        # with bias
        b = converter.get_variable(c_op.inputs[2])
        check_broadcast_constraints(y, b)
        y = y + b

    converter.set_variable(c_op.outputs[0](), y)
Exemplo n.º 9
0
def _convert_dense(converter: KerasConverter, k_op: "keras.layers.Dense"):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])
    w = converter.convert_to_constant_variable(k_op.kernel, OrderCN)
    y, = Linear(None)(x, w)

    if k_op.use_bias:
        b = converter.convert_to_constant_variable(k_op.bias, OrderC)
        y = y + b

    y = do_activation(k_op.activation, y)
    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
Exemplo n.º 10
0
def test_internal_softmax():
    linear1 = Linear('linear')
    softmax1 = Softmax('softmax', axis=Axis.C)
    linear2 = Linear('linear')
    softmax2 = Softmax('softmax', axis=Axis.C)

    x = Variable([4, 5], OrderNC)
    w1 = Variable([4, 5], OrderNC)
    w2 = Variable([3, 4], OrderNC)
    h, = linear1(x, w1)
    h, = softmax1(h)
    h, = linear2(h, w2)
    y, = softmax2(h)

    graph = Graph([x], [y])

    graph, _ = RemoveLastSoftmax().optimize(graph)

    ops = listup_operators(graph)
    assert len(ops) == 3 and isinstance(ops[0], Linear) and isinstance(ops[1], Softmax) and isinstance(ops[2], Linear)
    assert len(graph.outputs) == 1 and ops[2].outputs["y"] == graph.outputs[0]
Exemplo n.º 11
0
def matmul_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    a = converter.get_variable(tf_op.inputs[0])
    b = converter.get_variable(tf_op.inputs[1])
    transposed_a = tf_op.get_attr("transpose_a")
    transposed_b = tf_op.get_attr("transpose_b")

    if a.ndim > 2 or b.ndim > 2:
        raise NotImplementedError(
            "[TensorFlowConverter] Currently, MatMul is supported only 2D * 2D case."
        )

    c_axes = []
    if transposed_a:
        c_axes.append(a.order.axes[-1])

        if a.order != OrderCN:
            a = a.reinterpret_axes(OrderCN)

    else:
        c_axes.append(a.order.axes[-2])

        if a.order != OrderNC:
            a = a.reinterpret_axes(OrderNC)

    if transposed_b:

        c_axes.append(Axis())
        if b.order != OrderNC:
            b = b.reinterpret_axes(OrderNC)

    else:
        c_axes.append(Axis())
        if b.order != OrderCN:
            b = b.reinterpret_axes(OrderCN)

    c_normalized, = Linear(None)(a, b)
    c = c_normalized.reinterpret_axes(Order(c_axes))

    converter.set_variable(tf_op.outputs[0], c)
Exemplo n.º 12
0
def matmul_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    a = converter.get_variable(tf_op.inputs[0])
    b = converter.get_variable(tf_op.inputs[1])
    transposed_a = tf_op.get_attr("transpose_a")
    transposed_b = tf_op.get_attr("transpose_b")

    if a.ndim > 2 or b.ndim > 2:
        raise NotImplementedError(
            "[TensorFlowConverter] Currently, MatMul is supported only 2D * 2D case."
        )

    c_axes = []
    if transposed_a:
        c_axes.append(a.order.axes[-1])
        a_axis_K = a.order.axes[-2]

        if a.order != OrderCN:
            a, = ReinterpretAxis(None, in_order=a.order, out_order=OrderCN)(a)

    else:
        c_axes.append(a.order.axes[-2])
        a_axis_K = a.order.axes[-1]

        if a.order != OrderNC:
            a, = ReinterpretAxis(None, in_order=a.order, out_order=OrderNC)(a)

    if transposed_b:
        b_axis_K = b.order.axes[-1]

        c_axes.append(AxisVar())
        if b.order != OrderNC:
            b, = ReinterpretAxis(None, in_order=b.order, out_order=OrderNC)(b)

    else:
        c_axes.append(AxisVar())
        if b.order != OrderCN:
            b, = ReinterpretAxis(None, in_order=b.order, out_order=OrderCN)(b)

        b_axis_K = b.order.axes[-2]

    if flags.AGGRESSIVE_ORDER_INFERENCE:
        # Assumption: 2 inner multiplied axes are same.
        unify(a_axis_K, b_axis_K)

    c_normalized, = Linear(None)(a, b)
    c, = ReinterpretAxis(None,
                         in_order=c_normalized.order,
                         out_order=Order(c_axes))(c_normalized)

    converter.set_variable(tf_op.outputs[0], c)
Exemplo n.º 13
0
def generate_graph_model2(caption_net, hidden_num):
    # inputs
    var_input_img = Variable([1, 1, hidden_num], OrderNTC)
    var_input_word = Variable([1, 1], OrderNT)
    var_switch_img = Variable([1, 1, hidden_num], OrderNTC)
    var_switch_word = Variable([1, 1, hidden_num], OrderNTC)
    var_last_h = Variable([1, hidden_num], OrderNC)
    var_last_c = Variable([1, hidden_num], OrderNC)

    # prepare for lstm
    var_emb_word, = Embedding(None)(var_input_word,
                                    ConstantVariable(
                                        caption_net.word_vec.W.data,
                                        OrderCN))  # OrderNTC
    var_lstm_input = (var_emb_word * var_switch_word) + \
        (var_input_img * var_switch_img)

    # lstm
    lstm_opr = LSTM(None,
                    use_bias=True,
                    return_sequences=False,
                    activation="tanh",
                    recurrent_activation="sigmoid",
                    use_initial_h=True,
                    use_initial_c=True)
    w_input = _convert_lstm_to_webdnn_order(caption_net.lstm.upward.W.data.T)
    w_hidden = _convert_lstm_to_webdnn_order(caption_net.lstm.lateral.W.data.T)
    b = _convert_lstm_to_webdnn_order(
        caption_net.lstm.upward.b.data[None, :])[0]
    var_lstm_h, var_lstm_c = lstm_opr(
        x=var_lstm_input,
        w_input=ConstantVariable(w_input, OrderCN),
        w_hidden=ConstantVariable(w_hidden, OrderCN),
        b=ConstantVariable(b, OrderC),
        initial_h=var_last_h,
        initial_c=var_last_c)

    # word probability
    var_word_score, = Linear(None)(var_lstm_h,
                                   ConstantVariable(
                                       caption_net.out_word.W.data.T, OrderCN))
    var_word_score_biased, = AxiswiseBias(None, axis=Axis.C)(
        var_word_score, ConstantVariable(caption_net.out_word.b.data, OrderC))
    var_word_prob, = Softmax(None, axis=Axis.C)(var_word_score_biased)

    return Graph([
        var_input_img, var_input_word, var_switch_img, var_switch_word,
        var_last_h, var_last_c
    ], [var_word_prob, var_lstm_h, var_lstm_c])
Exemplo n.º 14
0
def test_NHWC_HWCN():
    vx = np.random.rand(2, 3, 4, 5)
    vw = np.random.rand(3, 4, 5, 2)
    vy = np.tensordot(vx, vw, ((1, 2, 3), (0, 1, 2)))

    x = Variable(vx.shape, order=OrderNHWC)
    w = ConstantVariable(vw, order=OrderHWCN)
    y, = Linear(None)(x, w)

    generate_kernel_test_case(description=f"Linear: NHWC*HWCN",
                              backend=["fallback", "webassembly", "webgpu"],
                              graph=Graph([x], [y]),
                              inputs={x: vx},
                              expected={y: vy},
                              raise_skip=False)
Exemplo n.º 15
0
def test_NC_CN():
    vx = np.random.rand(3, 4)
    vw = np.random.rand(4, 5)
    vy = np.dot(vx, vw)

    x = Variable(vx.shape, order=OrderNC)
    w = ConstantVariable(vw, order=OrderCN)
    y, = Linear(None)(x, w)

    generate_kernel_test_case(description=f"Linear: NC*CN",
                              backend=["fallback", "webassembly", "webgpu"],
                              graph=Graph([x], [y]),
                              inputs={x: vx},
                              expected={y: vy},
                              raise_skip=False)
Exemplo n.º 16
0
def test_single_softmax():
    linear = Linear('linear')
    softmax = Softmax('softmax', axis=Axis.C)

    x = Variable([4, 5], OrderNC)
    w = Variable([4, 5], OrderNC)
    h, = linear(x, w)
    y, = softmax(h)

    graph = Graph([x], [y])

    graph, _ = RemoveLastSoftmax().optimize(graph)

    ops = listup_operators(graph)
    assert len(ops) == 1 and isinstance(ops[0], Linear)
    assert len(graph.outputs) == 1 and ops[0].outputs["y"] == graph.outputs[0]
Exemplo n.º 17
0
def test_every_order():
    orders = [OrderNC, OrderCN, OrderNHWC, OrderHWNC, OrderHWCN, OrderNCHW, OrderCNHW, OrderCHWN]

    for order1, order2 in itertools.product(orders, orders):  # type: Order, Order
        if not order1.check_same_axes(order2):
            continue

        default_order = {
            2: OrderNC,
            4: OrderNHWC
        }

        op = Linear("op")
        x1 = Variable(np.arange(order1.ndim) + 1, default_order[order2.ndim])
        x2 = Variable(np.arange(order2.ndim) + 1, default_order[order2.ndim])

        x1.change_order(order1)
        x2.change_order(order2)

        y, = op(x1, x2)
        assert y.shape_dict[Axis.N] == x1.shape_dict[Axis.N]
        assert y.shape_dict[Axis.C] == x2.shape_dict[Axis.N]