示例#1
0
    def __call__(self, x: Variable, w: Variable):
        assert x.order.axes == OrderNHWC.axes, \
            "Input variable of Deconvolution2D must have N, C, H, and W axes.: " \
            f"x.order.axes={x.order.axes}"

        assert w.order.axes == OrderNHWC.axes, \
            "Kernel variable of Deconvolution2D must have N, C, H, and W axes.: " \
            f"w.order.axes={w.order.axes}"

        if Placeholder.check_resolved(
                w.shape_dict[Axis.H]) and Placeholder.check_resolved(
                    w.shape_dict[Axis.W]):
            assert (w.shape_dict[Axis.H], w.shape_dict[Axis.W]) == self.ksize, \
                "Kernel variable of Deconvolution2D must be same spatial size as ksize parameter: " \
                f"w.shape_dict[Axis.H]={w.shape_dict[Axis.H]}, " \
                f"w.shape_dict[Axis.W]={w.shape_dict[Axis.W]}, " \
                f"self.ksize={self.ksize}"

        if Placeholder.check_resolved(
                w.shape_dict[Axis.C]) and Placeholder.check_resolved(
                    x.shape_dict[Axis.C]):
            assert w.shape_dict[Axis.C] == x.shape_dict[Axis.C], \
                "Input and Kernel variables of Deconvolution2D must be same channel size: " \
                f"x.shape_dict[Axis.C]={x.shape_dict[Axis.C]}, " \
                f"w.shape_dict[Axis.C]={w.shape_dict[Axis.C]}"

        N = x.shape_dict[Axis.N]
        H2 = (x.shape_dict[Axis.H] - 1) * self.SH - 2 * self.PH + self.KH
        W2 = (x.shape_dict[Axis.W] - 1) * self.SW - 2 * self.PW + self.KW
        C2 = w.shape_dict[Axis.N]

        y = Variable([N, H2, W2, C2], OrderNHWC)
        y.change_order(
            x.order
        )  # output same order as input to preserve following reshape semantics

        self.append_input("x", x)
        self.append_input("w", w)
        self.append_output("y", y)
        return y,
示例#2
0
def test_conv_scale_bias():
    for order_x, order_w in itertools.product(orders4, orders4):
        conv = Convolution2D(None, ksize=3, stride=1, padding=1)
        scale = AxiswiseScale(None, axis=Axis.C)
        bias = AxiswiseBias(None, axis=Axis.C)

        x = Variable([8, 7, 6, 5], OrderNHWC)
        x.change_order(order_x)

        w_shape = [4, 3, 3, 5]
        w = ConstantVariable(arange_shaped(w_shape), OrderNHWC)
        w.change_order(order_w)
        w_data = w.data.copy()
        h, = conv(x, w)

        s_shape = [h.shape_dict[Axis.C]]
        s = ConstantVariable(arange_shaped(s_shape), OrderC)
        s_data = s.data.copy()
        h, = scale(h, s)

        b_shape = [h.shape_dict[Axis.C]]
        b = ConstantVariable(arange_shaped(b_shape), OrderC)
        b_data = b.data.copy()
        y, = bias(h, b)

        graph = Graph([x], [y])

        graph, _ = ConcatAffine().optimize(graph)

        # noinspection PyTypeChecker
        expander = (None, ) * order_w.axes_dict[Axis.N] + (
            Ellipsis, ) + (None, ) * (3 - order_w.axes_dict[Axis.N])
        w_data_expected = w_data * s_data[expander]
        b_data_expected = b_data

        ops = listup_operators(graph)
        assert len(ops) == 2 and isinstance(
            ops[0], Convolution2D) and isinstance(ops[1], AxiswiseBias)
        assert np.all(np.equal(ops[0].inputs["w"].data, w_data_expected))
        assert np.all(np.equal(ops[1].inputs["b"].data, b_data_expected))
示例#3
0
def test_mix_order():
    vx1 = np.random.rand(2, 3, 4, 5)
    vx2 = np.random.rand(2, 3, 4, 5)
    vx3 = np.random.rand(2, 3, 4, 5)
    vx4 = np.random.rand(2, 3, 4, 5)
    vy = np.concatenate((vx1, vx2, vx3, vx4), 1)

    x1 = Variable(vx1.shape, order=OrderNHWC)
    x2 = Variable(vx2.shape, order=OrderNHWC)
    x3 = Variable(vx3.shape, order=OrderNHWC)
    x4 = Variable(vx4.shape, order=OrderNHWC)

    x2.change_order(OrderCNHW)
    vx2 = np.rollaxis(vx2, 3, 0)

    x3.change_order(OrderCHWN)
    vx3 = np.rollaxis(np.rollaxis(vx3, 3, 0), 1, 4)

    x4.change_order(OrderNCHW)
    vx4 = np.rollaxis(vx4, 3, 1)

    y, = Concat(None, axis=Axis.H)(x1, x2, x3, x4)
    y.change_order(OrderNHWC)

    generate_kernel_test_case(description=f"concat_mix_order",
                              backend=["fallback", "webassembly", "webgpu"],
                              graph=Graph([x1, x2, x3, x4], [y]),
                              inputs={
                                  x1: vx1,
                                  x2: vx2,
                                  x3: vx3,
                                  x4: vx4
                              },
                              expected={y: vy})
示例#4
0
def test_general():
    for condition_custom in [
        {},
        {
            "slope": 0
        },
        {
            "slope": 1
        },
        {
            "x_order": OrderNCHW
        },
    ]:
        condition = dict(condition_default)
        condition.update(condition_custom)

        vx = np.random.rand(2, 3, 4, 5) - 0.5
        vy = np.maximum(vx, vx * condition["slope"])

        x = Variable(vx.shape, order=OrderNHWC)
        y, = LeakyRelu(None, slope=condition["slope"])(x)

        x.change_order(condition["x_order"])
        y.change_order(condition["y_order"])

        generate_kernel_test_case(
            description=f"ScalarAffine: " +
            (", ".join([f"{k}={v}" for k, v in condition_custom.items()])),
            backend=condition["backend"],
            graph=Graph([x], [y]),
            inputs={
                x: ConstantVariable(vx, OrderNHWC).change_order(x.order).data
            },
            expected={
                y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data
            },
            raise_skip=False)

    raise SkipTest
示例#5
0
    def __call__(self, x: Variable):
        assert x.order.check_same_axes(OrderNHWC), "Input variable of Depth2Space must have N, C, H, and W axes.: " \
                                                   f"x.order.axes={x.order.axes}"
        assert x.shape_dict[Axis.C] % (self.parameters["r"] * self.parameters["r"]) == 0, \
            "Input variable C axis must be divisible by : " \
            f'r*r={self.parameters["r"]*self.parameters["r"]} ' \
            f"x.order.axes={x.order.axes}"

        N = x.shape_dict[Axis.N]
        C = x.shape_dict[
            Axis.C] // self.parameters["r"] // self.parameters["r"]
        H = x.shape_dict[Axis.H]
        W = x.shape_dict[Axis.W]
        y = Variable(
            [N, H * self.parameters["r"], W * self.parameters["r"], C],
            OrderNHWC)
        y.change_order(
            x.order
        )  # output same order as input to preserve following reshape semantics
        self.append_input("x", x)
        self.append_output("y", y)
        return y,
示例#6
0
def test_every_order():
    orders = [
        OrderC, OrderNC, OrderCN, OrderNHWC, OrderHWNC, OrderHWCN, OrderNCHW,
        OrderCNHW, OrderCHWN
    ]

    for order1, order2 in itertools.product(orders, orders):
        if set(order1.axes) != set(order2.axes):
            continue

        default_order = {1: OrderC, 2: OrderNC, 4: OrderNHWC}

        op = ElementwiseSum("op")
        x1 = Variable(np.arange(order1.ndim) + 1, default_order[order2.ndim])
        x2 = Variable(np.arange(order2.ndim) + 1, default_order[order2.ndim])

        x1.change_order(order1)
        x2.change_order(order2)

        y, = op(x1, x2)
        for axis in order1.axes:
            assert y.shape_dict[axis] == x1.shape_dict[axis]
示例#7
0
    def exec(self):
        x = self.inputs["x"]
        assert x.order.check_same_axes(OrderNHWC), "Input variable of Depth2Space must have N, C, H, and W axes.: " \
                                                   f"x.order.axes={x.order.axes}"
        assert x.shape_dict[Axis.H] % self.parameters["r"] == 0, \
            "Input variable H axis must be divisible by : " \
            f'r={self.parameters["r"]} ' \
            f"x.shape_dict[Axis.H]={x.shape_dict[Axis.H]}"

        assert x.shape_dict[Axis.W] % self.parameters["r"] == 0, \
            "Input variable W axis must be divisible by : " \
            f'r={self.parameters["r"]} ' \
            f"x.shape_dict[Axis.W]={x.shape_dict[Axis.H]}"

        N = x.shape_dict[Axis.N]
        C = x.shape_dict[Axis.C] * self.parameters["r"] * self.parameters["r"]
        H = x.shape_dict[Axis.H] // self.parameters["r"]
        W = x.shape_dict[Axis.W] // self.parameters["r"]
        y = Variable([N, H, W, C], OrderNHWC)
        y.change_order(x.order)  # output same order as input to preserve following reshape semantics
        self.append_output("y", y)
        return y,
示例#8
0
def test_every_order():
    orders = [OrderNC, OrderCN, OrderNHWC, OrderHWNC, OrderHWCN, OrderNCHW, OrderCNHW, OrderCHWN]

    for order1, order2 in itertools.product(orders, orders):  # type: Order, Order
        if not order1.check_same_axes(order2):
            continue

        default_order = {
            2: OrderNC,
            4: OrderNHWC
        }

        op = Linear("op")
        x1 = Variable(np.arange(order1.ndim) + 1, default_order[order2.ndim])
        x2 = Variable(np.arange(order2.ndim) + 1, default_order[order2.ndim])

        x1.change_order(order1)
        x2.change_order(order2)

        y, = op(x1, x2)
        assert y.shape_dict[Axis.N] == x1.shape_dict[Axis.N]
        assert y.shape_dict[Axis.C] == x2.shape_dict[Axis.N]
def test_no_change1():
    """
    test_no_change1

    c[OrderNCHW] -+
                  +-{Add}- y
    v[OrderNHWC] -+
    """

    c = ConstantVariable(np.random.rand(2, 3, 4, 5), OrderNCHW)
    v = Variable(c.shape, c.order)

    v.change_order(OrderNHWC)

    y = c + v
    op = y.output_from

    assert not op.has_attribute(Inplace)

    UpdateInplaceAttribute().optimize(Graph([v], [y]))

    assert not op.has_attribute(Inplace)
示例#10
0
def test_value_1_webgl():
    # NOTE: webgl calculate x^1 as abs(x), therefore, if x contains negative element, result is wrong.
    vx = np.random.rand(2, 3, 4, 5) + 0.5
    vy = vx.copy()

    x = Variable(vx.shape, order=OrderNHWC)
    y = x**1  # type: Variable

    x.change_order(OrderNHWC)
    y.change_order(OrderNHWC)

    generate_kernel_test_case(
        description=f"ScalarPow value=1",
        graph=Graph([x], [y]),
        backend=["webgl"],
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#11
0
def template(x_order=OrderNHWC,
             y_order=OrderNHWC,
             slope=0.5,
             description: str = ""):
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = np.maximum(vx, vx * slope)

    x = Variable(vx.shape, order=OrderNHWC)
    y, = LeakyRelu(None, slope=slope)(x)

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"LeakyRelu {description}",
        graph=Graph([x], [y]),
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#12
0
def template(x_order=OrderNHWC,
             y_order=OrderNHWC,
             value=4,
             description: str = ""):
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = vx.copy() + value

    x = Variable(vx.shape, order=OrderNHWC)
    y = x + value  # type: Variable

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"ScalarAdd {description}",
        graph=Graph([x], [y]),
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#13
0
def template(x_order=OrderNHWC,
             y_order=OrderNHWC,
             beta=1.0,
             description: str = ""):
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = np.log(np.exp(vx * beta) + 1.0) / beta

    x = Variable(vx.shape, order=OrderNHWC)
    y, = Softplus(None, beta=beta)(x)

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"Softplus {description}",
        graph=Graph([x], [y]),
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#14
0
def template(x_order: Order = OrderNHWC,
             y_order: Order = OrderNHWC,
             description: str = ""):
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = vx.copy()

    x = Variable(vx.shape, order=OrderNHWC)
    y, = ConvertRGBAtoR(None)(x)

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"ConvertRGBAtoR {description}",
        graph=Graph([x], [y]),
        backend=["webgl"],
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#15
0
    def __call__(self, x: Variable):
        x_shape_dict = x.shape_dict
        N = x_shape_dict[Axis.N]
        H2 = (x_shape_dict[Axis.H] + 2 * self.PH - self.KH +
              (self.SH - 1 if self.cover_all else 0)) // self.SH + 1
        W2 = (x_shape_dict[Axis.W] + 2 * self.PW - self.KW +
              (self.SW - 1 if self.cover_all else 0)) // self.SW + 1
        C2 = x_shape_dict[Axis.C]

        y = Variable([N, H2, W2, C2], OrderNHWC)
        y.change_order(
            x.order
        )  # output same order as input to preserve following reshape semantics

        for axis in x.order.axes:
            if axis == Axis.H or axis == Axis.W:
                continue

            self.attributes.add(Tensorwise(axis))

        self.append_input("x", x)
        self.append_output("y", y)
        return y,
示例#16
0
def test_conv_bias_bias():
    for order_x, order_w in itertools.product(orders4, orders4):
        conv = Convolution2D(None, ksize=3, stride=1, padding=1)
        bias1 = AxiswiseBias(None, axis=Axis.C)
        bias2 = AxiswiseBias(None, axis=Axis.C)

        x = Variable([8, 7, 6, 5], OrderNHWC)
        x.change_order(order_x)

        w_shape = [4, 3, 3, 5]
        w = ConstantVariable(arange_shaped(w_shape).copy(), OrderNHWC)
        w.change_order(order_w)
        w_data = w.data.copy()
        h, = conv(x, w)

        b1_shape = [h.shape_dict[Axis.C]]
        b1 = ConstantVariable(arange_shaped(b1_shape), OrderC)
        b1_data = b1.data.copy()
        h, = bias1(h, b1)

        b2_shape = [h.shape_dict[Axis.C]]
        b2 = ConstantVariable(arange_shaped(b2_shape), OrderC)
        b2_data = b2.data.copy()
        y, = bias2(h, b2)

        graph = Graph([x], [y])

        graph, _ = ConcatAffine().optimize(graph)

        w_data_expected = w_data
        b_data_expected = b1_data + b2_data

        ops = listup_operators(graph)
        assert len(ops) == 2 and isinstance(
            ops[0], Convolution2D) and isinstance(ops[1], AxiswiseBias)
        assert np.all(np.equal(ops[0].inputs["w"].data, w_data_expected))
        assert np.all(np.equal(ops[1].inputs["b"].data, b_data_expected))
示例#17
0
def test_general():
    for condition_custom in [{}, {
            "x1_order": OrderNCHW,
            "x2_order": OrderHWCN
    }]:
        condition = dict(condition_default)
        condition.update(condition_custom)

        vx1 = np.random.rand(2, 3, 4, 5)
        vx2 = np.random.rand(2, 3, 4, 5)
        vy = vx1 / vx2

        x1 = Variable(vx1.shape, order=OrderNHWC)
        x2 = Variable(vx2.shape, order=OrderNHWC)
        y = x1 / x2

        x1.change_order(condition["x1_order"])
        x2.change_order(condition["x2_order"])
        y.change_order(condition["y_order"])

        generate_kernel_test_case(
            description=f"ElementwiseDiv: " +
            (", ".join([f"{k}={v}" for k, v in condition_custom.items()])),
            backend=condition["backend"],
            graph=Graph([x1, x2], [y]),
            inputs={
                x1: ConstantVariable(vx1,
                                     OrderNHWC).change_order(x1.order).data,
                x2: ConstantVariable(vx2,
                                     OrderNHWC).change_order(x2.order).data
            },
            expected={
                y: ConstantVariable(vy, OrderNHWC).change_order(y.order).data
            },
            raise_skip=False)

    raise SkipTest
示例#18
0
def test_conv_scale_scale():
    for order_x, order_w in itertools.product(orders4, orders4):
        conv = Convolution2D(None, ksize=3, stride=1, padding=1)
        scale1 = AxiswiseScale(None, axis=Axis.C)
        scale2 = AxiswiseScale(None, axis=Axis.C)

        x = Variable([8, 7, 6, 5], OrderNHWC)
        x.change_order(order_x)

        w_shape = [4, 3, 3, 5]
        w = ConstantVariable(arange_shaped(w_shape), OrderNHWC)
        w.change_order(order_w)
        w_data = w.data.copy()
        h, = conv(x, w)

        s1_shape = [h.shape_dict[Axis.C]]
        s1 = ConstantVariable(arange_shaped(s1_shape), OrderC)
        s1_data = s1.data.copy()
        h, = scale1(h, s1)

        s2_shape = [h.shape_dict[Axis.C]]
        s2 = ConstantVariable(arange_shaped(s2_shape), OrderC)
        s2_data = s2.data.copy()
        y, = scale2(h, s2)

        graph = Graph([x], [y])

        graph, _ = ConcatAffine().optimize(graph)

        # noinspection PyTypeChecker
        expander = (None, ) * order_w.axes_dict[Axis.N] + (
            Ellipsis, ) + (None, ) * (3 - order_w.axes_dict[Axis.N])
        w_data_expected = w_data * s1_data[expander] * s2_data[expander]

        ops = listup_operators(graph)
        assert len(ops) == 1 and isinstance(ops[0], Convolution2D)
        assert np.all(np.equal(w.data, w_data_expected))
示例#19
0
def template(x_order=OrderNHWC,
             y_order=OrderNHWC,
             scale=4,
             bias=5,
             description: str = ""):
    vx = np.random.rand(2, 3, 4, 5) - 0.5
    vy = vx.copy() * scale + bias

    x = Variable(vx.shape, order=OrderNHWC)
    y, = ScalarAffine(None, scale=scale, bias=bias)(x)  # type: Variable

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"ScalarAffine {description}",
        graph=Graph([x], [y]),
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHWC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#20
0
def test_every_order():
    orders_x = [OrderNHWC, OrderHWNC, OrderHWCN, OrderNCHW, OrderCNHW, OrderCHWN]
    axes = [Axis.C]

    default_order = {
        1: OrderC,
        2: OrderNC,
        4: OrderNHWC,
        Axis.C: OrderC
    }

    for order_x, axis in itertools.product(orders_x, axes):
        if axis not in order_x.axes:
            continue

        op = AxiswiseBias(None, axis=axis)
        x = Variable(np.arange(order_x.ndim) + 1, default_order[order_x.ndim])
        x.change_order(order_x)
        w = Variable((x.shape_dict[axis],), default_order[axis])

        y, = op(x, w)

        for axis in y.order.axes:
            assert y.shape_dict[axis] == x.shape_dict[axis]
示例#21
0
def template(x_order=OrderNHWC,
             y_order=OrderNHW,
             axis=Axis.C,
             description: str = ""):
    vx = np.arange(120).reshape(2, 3, 4, 5)
    vy = np.sum(vx, axis=OrderNHWC.axes_dict[axis])

    x = Variable(vx.shape, order=OrderNHWC)
    y, = Sum(None, axis=axis)(x)

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"Sum {description}",
        graph=Graph([x], [y]),
        backend=["webgl"],
        inputs={
            x: np.transpose(vx, [OrderNHWC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNHW.axes_dict[a] for a in y.order.axes])
        },
    )
示例#22
0
def template(x_order=OrderNC,
             y_order=OrderNC,
             axis=Axis.C,
             description: str = ""):
    vx = np.random.rand(2, 3) - 0.5
    vy = np.exp(vx) / np.sum(
        np.exp(vx), axis=OrderNC.axes_dict[axis], keepdims=True)

    x = Variable(vx.shape, order=OrderNC)
    y, = Softmax(None, axis=axis)(x)

    x.change_order(x_order)
    y.change_order(y_order)

    generate_kernel_test_case(
        description=f"Softmax {description}",
        graph=Graph([x], [y]),
        inputs={
            x: np.transpose(vx, [OrderNC.axes_dict[a] for a in x.order.axes])
        },
        expected={
            y: np.transpose(vy, [OrderNC.axes_dict[a] for a in y.order.axes])
        },
    )
示例#23
0
文件: lstm.py 项目: xczhanjun/webdnn
    def __call__(self,
                 x: Variable,
                 w_input: Variable,
                 w_hidden: Variable,
                 b: Optional[Variable] = None,
                 initial_c: Optional[Variable] = None,
                 initial_h: Optional[Variable] = None):
        """
        Args:
            x (:class:`~webdnn.graph.variable.Variable`): Input (sequence OrderNTC)
            w_input (:class:`~webdnn.graph.variable.Variable`): Weight for input
            w_hidden (:class:`~webdnn.graph.variable.Variable`): Weight for hidden state
            b (:class:`~webdnn.graph.variable.Variable`): Bias
            initial_c (:class:`~webdnn.graph.variable.Variable`): Initial cell state
            initial_h (:class:`~webdnn.graph.variable.Variable`): Initial hidden state

        Returns:
            y (:class:`~webdnn.graph.variable.Variable`): Output (OrderNC)
            final_c (:class:`~webdnn.graph.variable.Variable`): Last cell state (OrderNC)
        """
        assert self.parameters["use_bias"] == (b is not None)
        assert self.parameters["use_initial_c"] == (initial_c is not None)
        assert self.parameters["use_initial_h"] == (initial_h is not None)

        self.append_input("x", x)
        self.append_input("w_input", w_input)
        self.append_input("w_hidden", w_hidden)

        if b is not None:
            self.append_input("b", b)

        # @TODO: this is too strict condition. It should be supported in optimization phase, not here.
        if x.order != OrderNTC:
            raise NotImplementedError(
                "Currently, LSTM supports only OrderNTC variable for input sequence variable."
            )

        x_shape_dict = x.shape_dict
        w_input_shape_dict = w_input.shape_dict
        w_hidden_shape_dict = w_hidden.shape_dict

        assert set(x.order.axes) == {Axis.N, Axis.T, Axis.C}
        assert set(w_input.order.axes) == {Axis.N, Axis.C}
        assert set(w_hidden.order.axes) == {Axis.N, Axis.C}
        assert b.order == OrderC

        batch_size = x_shape_dict[Axis.N]
        sequence_len = x_shape_dict[Axis.T]
        input_dim = x_shape_dict[Axis.C]
        hidden_dim = w_hidden_shape_dict[Axis.C]

        assert x_shape_dict[Axis.N] == batch_size
        assert x_shape_dict[Axis.C] == w_input_shape_dict[Axis.C] == input_dim
        assert w_input_shape_dict[Axis.N] == w_hidden_shape_dict[
            Axis.N] == hidden_dim * 4

        if initial_c is not None:
            self.append_input("initial_c", initial_c)
            initial_c_shape_dict = initial_c.shape_dict

            assert set(initial_c.order.axes) == {Axis.N, Axis.C}
            assert initial_c_shape_dict[Axis.N] == batch_size
            assert initial_c_shape_dict[Axis.C] == hidden_dim

        if initial_h is not None:
            self.append_input("initial_h", initial_h)
            initial_h_shape_dict = initial_h.shape_dict

            assert set(initial_h.order.axes) == {Axis.N, Axis.C}
            assert initial_h_shape_dict[Axis.N] == batch_size
            assert initial_h_shape_dict[Axis.C] == hidden_dim

        if self.parameters["return_sequences"]:
            y = Variable([batch_size, sequence_len, hidden_dim], OrderNTC)
            y.change_order(
                x.order
            )  # output same order as input to preserve following reshape semantics
        else:
            y = Variable([batch_size, hidden_dim], OrderNC)

        final_c = Variable([batch_size, hidden_dim], OrderNC)

        self.append_output("y", y)
        self.append_output("final_c", final_c)

        return y, final_c
示例#24
0
    def exec(self):
        x = self.inputs["x"]
        w_input = self.inputs["w_input"]
        w_hidden = self.inputs["w_hidden"]
        b = self.inputs["b"] if "b" in self.inputs else None
        initial_c = self.inputs[
            "initial_c"] if "initial_c" in self.inputs else None
        initial_h = self.inputs[
            "initial_h"] if "initial_h" in self.inputs else None

        assert self.parameters["use_bias"] == (b is not None)
        assert self.parameters["use_initial_c"] == (initial_c is not None)
        assert self.parameters["use_initial_h"] == (initial_h is not None)

        # @TODO: this is too strict condition. It should be supported in optimization phase, not here.
        if x.order != OrderNTC:
            raise NotImplementedError(
                "Currently, LSTM supports only OrderNTC variable for input sequence variable."
            )

        x_shape_dict = x.shape_dict
        w_input_shape_dict = w_input.shape_dict
        w_hidden_shape_dict = w_hidden.shape_dict

        assert x.order.check_same_axes(OrderNTC)
        assert w_input.order.check_same_axes(OrderNC)
        assert w_hidden.order.check_same_axes(OrderNC)
        assert b.order == OrderC

        batch_size = x_shape_dict[Axis.N]
        sequence_len = x_shape_dict[Axis.T]
        input_dim = x_shape_dict[Axis.C]
        hidden_dim = w_hidden_shape_dict[Axis.C]

        assert x_shape_dict[Axis.N] == batch_size
        assert x_shape_dict[Axis.C] == w_input_shape_dict[Axis.C] == input_dim
        assert w_input_shape_dict[Axis.N] == w_hidden_shape_dict[
            Axis.N] == hidden_dim * 4

        if initial_c is not None:
            initial_c_shape_dict = initial_c.shape_dict

            assert initial_c.order.check_same_axes(OrderNC)
            assert initial_c_shape_dict[Axis.N] == batch_size
            assert initial_c_shape_dict[Axis.C] == hidden_dim

        if initial_h is not None:
            initial_h_shape_dict = initial_h.shape_dict

            assert initial_h.order.check_same_axes(OrderNC)
            assert initial_h_shape_dict[Axis.N] == batch_size
            assert initial_h_shape_dict[Axis.C] == hidden_dim

        if self.parameters["return_sequences"]:
            y = Variable([batch_size, sequence_len, hidden_dim], OrderNTC)
            y.change_order(
                x.order
            )  # output same order as input to preserve following reshape semantics
        else:
            y = Variable([batch_size, hidden_dim], OrderNC)

        final_c = Variable([batch_size, hidden_dim], OrderNC)

        self.append_output("y", y)
        self.append_output("final_c", final_c)

        return y, final_c
示例#25
0
    def __call__(self,
                 x: Variable,
                 w_input: Variable,
                 w_hidden: Variable,
                 b: Optional[Variable] = None,
                 initial_c: Optional[Variable] = None,
                 initial_h: Optional[Variable] = None):
        """
        Args:
            x (:class:`~webdnn.graph.variable.Variable`): Input (sequence OrderNTC)
            w_input (:class:`~webdnn.graph.variable.Variable`): Weight for input
            w_hidden (:class:`~webdnn.graph.variable.Variable`): Weight for hidden state
            b (:class:`~webdnn.graph.variable.Variable`): Bias
            initial_c (:class:`~webdnn.graph.variable.Variable`): Initial cell state
            initial_h (:class:`~webdnn.graph.variable.Variable`): Initial hidden state

        Returns:
            y (:class:`~webdnn.graph.variable.Variable`): Output (OrderNC)
            final_c (:class:`~webdnn.graph.variable.Variable`): Last cell state (OrderNC)
        """
        assert self.parameters["use_bias"] == (b is not None)
        assert self.parameters["use_initial_c"] == (initial_c is not None)
        assert self.parameters["use_initial_h"] == (initial_h is not None)

        x_shape_dict = x.shape_dict
        w_input_shape_dict = w_input.shape_dict
        w_hidden_shape_dict = w_hidden.shape_dict

        assert x.order.check_same_axes(OrderNTC)
        assert w_input.order.check_same_axes(OrderNC)
        assert w_hidden.order.check_same_axes(OrderNC)
        assert b.order == OrderC

        batch_size = x_shape_dict[Axis.N]
        sequence_len = x_shape_dict[Axis.T]
        input_dim = x_shape_dict[Axis.C]
        hidden_dim = w_hidden_shape_dict[Axis.C]

        assert x_shape_dict[Axis.N] == batch_size
        assert x_shape_dict[Axis.C] == w_input_shape_dict[Axis.C] == input_dim
        assert w_input_shape_dict[Axis.N] == w_hidden_shape_dict[
            Axis.N] == hidden_dim * 4

        if initial_c is not None:
            initial_c_shape_dict = initial_c.shape_dict

            assert initial_c.order.check_same_axes(OrderNC)
            assert initial_c_shape_dict[Axis.N] == batch_size
            assert initial_c_shape_dict[Axis.C] == hidden_dim

        if initial_h is not None:
            initial_h_shape_dict = initial_h.shape_dict

            assert initial_h.order.check_same_axes(OrderNC)
            assert initial_h_shape_dict[Axis.N] == batch_size
            assert initial_h_shape_dict[Axis.C] == hidden_dim

        if self.parameters["return_sequences"]:
            y = Variable([batch_size, sequence_len, hidden_dim], OrderNTC)
            y.change_order(
                x.order
            )  # output same order as input to preserve following reshape semantics
        else:
            y = Variable([batch_size, hidden_dim], OrderNC)

        final_c = Variable([batch_size, hidden_dim], OrderNC)

        self.append_input("x", x)
        self.append_input("w_input", w_input)
        self.append_input("w_hidden", w_hidden)

        if b is not None:
            self.append_input("b", b)

        if initial_c is not None:
            self.append_input("initial_c", initial_c)

        if initial_h is not None:
            self.append_input("initial_h", initial_h)

        self.append_output("y", y)
        self.append_output("final_c", final_c)
        return y, final_c
示例#26
0
    def exec(self):
        x = self.inputs["x"]
        w_input = self.inputs["w_input"]
        w_hidden = self.inputs["w_hidden"]
        b = self.inputs["b"] if "b" in self.inputs else None
        initial_c = self.inputs[
            "initial_c"] if "initial_c" in self.inputs else None
        initial_h = self.inputs[
            "initial_h"] if "initial_h" in self.inputs else None

        assert self.parameters["use_bias"] == (b is not None)
        assert self.parameters["use_initial_c"] == (initial_c is not None)
        assert self.parameters["use_initial_h"] == (initial_h is not None)

        x_shape_dict = x.shape_dict
        w_input_shape_dict = w_input.shape_dict
        w_hidden_shape_dict = w_hidden.shape_dict

        assert x.order.check_same_axes(OrderNTC)
        assert w_input.order.check_same_axes(OrderNC)
        assert w_hidden.order.check_same_axes(OrderNC)
        assert b.order == OrderC

        batch_size = x_shape_dict[Axis.N]
        sequence_len = x_shape_dict[Axis.T]
        input_dim = x_shape_dict[Axis.C]
        hidden_dim = w_hidden_shape_dict[Axis.C]

        assert x_shape_dict[Axis.N] == batch_size
        assert x_shape_dict[Axis.C] == w_input_shape_dict[Axis.C] == input_dim
        assert w_input_shape_dict[Axis.N] == w_hidden_shape_dict[
            Axis.N] == hidden_dim * 4

        if initial_c is not None:
            initial_c_shape_dict = initial_c.shape_dict

            assert initial_c.order.check_same_axes(OrderNC)
            assert initial_c_shape_dict[Axis.N] == batch_size
            assert initial_c_shape_dict[Axis.C] == hidden_dim

        if initial_h is not None:
            initial_h_shape_dict = initial_h.shape_dict

            assert initial_h.order.check_same_axes(OrderNC)
            assert initial_h_shape_dict[Axis.N] == batch_size
            assert initial_h_shape_dict[Axis.C] == hidden_dim

        if self.parameters["return_sequences"]:
            y = Variable([batch_size, sequence_len, hidden_dim], OrderNTC)
            y.change_order(
                x.order
            )  # output same order as input to preserve following reshape semantics
        else:
            y = Variable([batch_size, hidden_dim], OrderNC)

        final_c = Variable([batch_size, hidden_dim], OrderNC)

        self.append_output("y", y)
        self.append_output("final_c", final_c)

        return y, final_c
示例#27
0
def test_change_order():
    v = Variable([1, 2, 3, 4], OrderNHWC)
    v.change_order(OrderHWCN)

    assert v.order == OrderHWCN
    assert v.shape == (2, 3, 4, 1)
示例#28
0
def test_change_order_with_expansion():
    v = Variable([3, 4], OrderNC)
    v.change_order(OrderCHWN)

    assert v.order == OrderCHWN
    assert v.shape == (4, 1, 1, 3)
示例#29
0
def test_change_order_with_compression():
    v = Variable([3, 1, 1, 4], OrderNHWC)
    v.change_order(OrderCN)

    assert v.order == OrderCN
    assert v.shape == (4, 3)
示例#30
0
def test_change_order_with_invalid_compression():
    v = Variable([3, 2, 2, 4], OrderNHWC)
    v.change_order(OrderCN)