def template(x_shape=(2, 5, 6, 8), slope=0.5, description: str = ""): vx = chainer.Variable(np.random.rand(*x_shape).astype(np.float32)) vy = chainer.functions.leaky_relu(vx, slope) graph = ChainerConverter().convert([vx], [vy]) generate_kernel_test_case( description=f"[chainer] F.leaky_relu {description}", graph=graph, inputs={graph.inputs[0]: vx.data}, expected={graph.outputs[0]: vy.data}, )
def test_no_reorder(): vx = np.random.rand(2, 1, 1, 5) vy = vx.copy() x = Variable(vx.shape, order=OrderNHWC) y, = Flatten(None, in_axes=[Axis.H, Axis.W, Axis.C], out_axis=Axis.C)(x) generate_kernel_test_case(description=f"Flatten: H=W=1, no-reorder", backend=["webgpu", "webassembly", "fallback"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def template(units=16, return_sequences=False, return_state=False, go_backwards=False, stateful=False, activation="tanh", recurrent_activation="hard_sigmoid", use_bias=True, description: str = ""): x = keras.layers.Input((14, 15)) vx = np.random.rand(2, 14, 15).astype(np.float32) outputs = keras.layers.LSTM(units=units, return_sequences=return_sequences, return_state=return_state, go_backwards=go_backwards, stateful=stateful, activation=activation, recurrent_activation=recurrent_activation, use_bias=use_bias)(x) if return_state: y, _, c = outputs model = keras.models.Model([x], [y, c]) graph = KerasConverter(batch_size=2, use_tensorflow_converter=False).convert(model) vy, vc = model.predict(vx, batch_size=2) expected = { graph.outputs[0]: vy, graph.outputs[1]: vc, } else: y = outputs model = keras.models.Model([x], [y]) graph = KerasConverter(batch_size=2, use_tensorflow_converter=False).convert(model) vy = model.predict(vx, batch_size=2) expected = { graph.outputs[0]: vy, } generate_kernel_test_case(description=f"[keras] LSTM {description}", graph=graph, backend=["webgpu", "webassembly"], inputs={graph.inputs[0]: vx}, expected=expected, EPS=1e-2)
def template(vx, vy, description: str = ""): graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.MatMul{{Var, Constant}} {description}", graph=graph, inputs={x: vx.data}, expected={y: vy.data}, )
def test_wide_stride_NHWC(): v_im, v_col = generate_data_212() col = Variable(v_col.shape, order=OrderNHWC) im, = Col2Im(None, ksize=2, padding=1, stride=2)(col) generate_kernel_test_case(description=f"Col2Im output=NHWC stride=2", backend=["webgpu", "webgl", "webassembly"], graph=Graph([col], [im]), inputs={col: v_col}, expected={im: v_im})
def test_general(): vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = np.tanh(vx) x = Variable(vx.shape, order=OrderNHWC) y, = Tanh(None)(x) generate_kernel_test_case(description=f"Tanh", backend=["webgpu", "webassembly"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def test_general(): vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = np.exp(vx) / np.sum(np.exp(vx), axis=3, keepdims=True) x = Variable(vx.shape, order=OrderNHWC) y, = Softmax(None, axis=Axis.C)(x) generate_kernel_test_case(description=f"Softmax", backend=["webgpu", "webassembly", "fallback"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def test_NHWC(): v_im, v_col = generate_data_311() im = Variable(v_im.shape, order=OrderNHWC) col, = Im2Col(None, ksize=3, padding=1, stride=1, dilation_rate=1)(im) col.change_order(OrderNHWC) generate_kernel_test_case(description=f"Im2Col output=NHWC", backend=["webgpu", "webgl", "webassembly"], graph=Graph([im], [col]), inputs={im: v_im}, expected={col: v_col})
def test_general(): vx = np.random.rand(2, 3, 4, 5) - 0.5 vy = vx.copy() vy[vx < 0] = np.exp(vy[vx < 0]) - 1 x = Variable(vx.shape, order=OrderNHWC) y, = Elu(None)(x) generate_kernel_test_case(description=f"Elu", backend=["webgpu", "webassembly"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def template(x_shape, axis, description=""): vx = chainer.Variable(np.random.rand(*x_shape).astype(np.float32)) vy = chainer.functions.squeeze(vx, axis=axis) graph = ChainerConverter().convert([vx], [vy]) assert list(graph.outputs[0].shape) == list(vy.shape) generate_kernel_test_case( description=f"[chainer] F.squeeze {description}", graph=graph, inputs={graph.inputs[0]: vx.data}, expected={graph.outputs[0]: vy.data}, )
def test(): x = keras.layers.Input((3, 4, 5)) y = keras.layers.Flatten()(x) model = keras.models.Model([x], [y]) vx = np.random.rand(2, 3, 4, 5) vy = model.predict(vx, batch_size=2) graph = KerasConverter(batch_size=2).convert(model) generate_kernel_test_case(description="[keras] Flatten", graph=graph, inputs={graph.inputs[0]: vx}, expected={graph.outputs[0]: vy})
def test(): x = keras.layers.Input((14, )) y = keras.layers.RepeatVector(n=5)(x) model = keras.models.Model([x], [y]) vx = np.random.rand(2, 14) vy = model.predict(vx, batch_size=2) graph = KerasConverter(batch_size=2).convert(model) generate_kernel_test_case(description="[keras] Reshape", graph=graph, inputs={graph.inputs[0]: vx}, expected={graph.outputs[0]: vy})
def test_scalar_affine(): vx = np.random.rand(2, 3) vy = vx * 4 + 5 x = Variable(vx.shape, order=OrderNC) y, = ScalarAffine(None, scale=4, bias=5)(x) generate_kernel_test_case( description=f"ScalarAffine", backend="webgpu", graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy} )
def test_NHWC(): vx = np.random.rand(10, 6, 4, 8) vs = np.random.rand(8) vy = vx * vs[None, None, None, :] x = Variable(vx.shape, order=OrderNHWC) s = ConstantVariable(vs, order=OrderC) y, = AxiswiseScale(None, axis=Axis.C)(x, s) generate_kernel_test_case(description=f"AxiswiseScale for input OrderNHWC", backend=["webgpu", "webassembly", "fallback"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def test_major_axis(): vx = np.random.rand(10, 6, 4, 8) vb = np.random.rand(10) vy = vx + vb[:, None, None, None] x = Variable(vx.shape, order=OrderCNHW) b = ConstantVariable(vb, order=OrderC) y, = AxiswiseBias(None, axis=Axis.C)(x, b) generate_kernel_test_case(description=f"AxiswiseBias for major axis", backend=["webgpu", "fallback"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def test(): x = keras.layers.Input((14, 15)) y = keras.layers.GlobalAveragePooling1D()(x) model = keras.models.Model([x], [y]) vx = np.random.rand(2, 14, 15) vy = model.predict(vx, batch_size=2) graph = KerasConverter(batch_size=2).convert(model) generate_kernel_test_case(description="[keras] GlobalAveragePooling1D", graph=graph, inputs={graph.inputs[0]: vx}, expected={graph.outputs[0]: vy})
def template(x_order=OrderNHWC, y_order=OrderNCHW, description: str = ""): vx = np.random.rand(2, 3, 4, 5) vy = np.transpose(vx, [x_order.axes_dict[a] for a in y_order.axes]) x = Variable(vx.shape, order=x_order) y = x.transpose(y_order) generate_kernel_test_case( description=f"Transpose {description}", backend=["webgpu", "webgl", "webassembly"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy}, )
def test_general(): vx = np.array([[2, 4, 3]]) vw = np.arange(15).reshape(5, 3) vy = vw[vx] x = Variable(vx.shape, order=OrderNT) w = ConstantVariable(vw, order=OrderCN) y, = Embedding(None)(x, w) generate_kernel_test_case(description=f"Embedding", backend=["webgpu"], graph=Graph([x], [y]), inputs={x: vx}, expected={y: vy})
def template(x_shape, description=""): vx = chainer.Variable(np.random.rand(*x_shape).astype(np.float32)) vy = chainer.functions.fliplr(vx) graph = ChainerConverter().convert([vx], [vy]) assert list(graph.outputs[0].shape) == list(vy.shape) generate_kernel_test_case( description=f"[chainer] F.fliplr {description}", graph=graph, backend=["webgpu", "webgl", "webassembly"], inputs={graph.inputs[0]: vx.data}, expected={graph.outputs[0]: vy.data}, )
def template(z=0.5, description=""): vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32)) vy = chainer.functions.clipped_relu(vx, z=z) graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.clipped_relu {description}", graph=graph, inputs={x: vx.data}, expected={y: vy.data})
def template(description: str = ""): vx = chainer.Variable(np.random.rand(2, 5, 6, 8).astype(np.float32)) vy = chainer.functions.cosh(vx) graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case(description=f"[chainer] F.cosh {description}", graph=graph, inputs={x: vx.data}, expected={y: vy.data}, EPS=1e-2)
def test(): for kwargs in [ { "pool_size": 3, "strides": 2 }, # {"pool_size": 3, "strides": 2, "data_format": "channels_first"}, # FIXME: Not Supported Yet { "pool_size": 3, "strides": 2, "data_format": "channels_last" }, { "pool_size": (3, 4), "strides": (2, 1) }, { "pool_size": 3, "strides": 2, "padding": "valid" }, { "pool_size": 3, "strides": 2, "padding": "same" }, ]: channels_first = ("data_format" in kwargs) and (kwargs["data_format"] == "channels_first") x = keras.layers.Input((14, 15, 16)) y = keras.layers.MaxPooling2D(**kwargs)(x) model = keras.models.Model([x], [y]) vx = np.random.rand(2, 14, 15, 16) vy = model.predict(vx, batch_size=2) graph = KerasConverter(batch_size=2).convert( model, input_orders=[OrderNCHW if channels_first else OrderNHWC]) generate_kernel_test_case( description="[keras] MaxPooling2D " + (", ".join([f"{k}={v}" for k, v in kwargs.items()])), graph=graph, inputs={graph.inputs[0]: vx}, expected={graph.outputs[0]: vy}, raise_skip=False) raise SkipTest
def test_itself(): vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32)) vy = vx - vx graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.Sub itself", graph=graph, inputs={x: vx.data}, expected={y: vy.data}, )
def test_itself(): vx = chainer.Variable(np.random.rand(8, 8)) vy = chainer.functions.matmul(vx, vx, False, False) graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.MatMul itself", graph=graph, inputs={x: vx.data}, expected={y: vy.data}, )
def test(): vx = chainer.Variable(np.random.rand(2, 4, 6, 8)) vy = vx - 1 graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.SubFromConstant", graph=graph, inputs={x: vx.data}, expected={y: vy.data}, )
def template(description=""): vx = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32) - 0.5) vy = abs(vx) graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.Abs {description}", graph=graph, inputs={x: vx.data}, expected={y: vy.data}, )
def template(vx, vy, description: str = ""): graph = ChainerConverter().convert([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] assert list(vy.shape) == list(y.shape), f"vy_shape: {vy.shape}, y.shape: {y.shape}" generate_kernel_test_case( description=f"[chainer] F.get_item {description}", graph=graph, backend=["webgpu", "webgl", "webassembly"], inputs={x: vx.data}, expected={y: vy.data} )
def test_add_itself(): vx = chainer.Variable(np.random.rand(2, 4, 6, 8)) vy = vx + vx graph = ChainerConverter().convert_from_inout_vars([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.Add (y=x+x)", graph=graph, inputs={x: ConstantVariable(vx.data, OrderNCHW).change_order(x.order).data}, expected={y: ConstantVariable(vy.data, OrderNCHW).change_order(y.order).data} )
def test_itself(): vx = chainer.Variable(np.random.rand(8, 8)) vy = vx @ vx graph = ChainerConverter().convert_from_inout_vars([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.MatMul itself", graph=graph, inputs={x: np.transpose(vx.data, [OrderNC.axes_dict[a] for a in x.order.axes])}, expected={y: np.transpose(vy.data, [OrderNC.axes_dict[a] for a in y.order.axes])}, )
def test(): vx = chainer.Variable(np.random.rand(2, 4, 6, 8)) vy = chainer.functions.local_response_normalization(vx) graph = ChainerConverter().convert_from_inout_vars([vx], [vy]) x = graph.inputs[0] y = graph.outputs[0] generate_kernel_test_case( description=f"[chainer] F.local_response_normalization", graph=graph, inputs={x: ConstantVariable(vx.data, OrderNCHW).change_order(x.order).data}, expected={y: ConstantVariable(vy.data, OrderNCHW).change_order(y.order).data} )