Exemplo n.º 1
0
def test_expand_dims():
    x = sym.Variable("x", shape=(10, 20))
    y = sym.expand_dims(x, axis=1, name="y")
    sdict = infer_shape(y)
    assert(sdict["y"][0] == [10, 1, 20])
    y = sym.expand_dims(x, axis=-1, name="y", num_newaxis=2)
    sdict = infer_shape(y)
    assert(sdict["y"][0] == [10, 20, 1, 1])
Exemplo n.º 2
0
def test_expand_dims():
    x = sym.Variable("x", shape=(10, 20))
    y = sym.expand_dims(x, axis=1, name="y")
    sdict = infer_shape(y)
    assert(sdict["y"][0] == [10, 1, 20])
    y = sym.expand_dims(x, axis=-1, name="y", num_newaxis=2)
    sdict = infer_shape(y)
    assert(sdict["y"][0] == [10, 20, 1, 1])
Exemplo n.º 3
0
 def before(x, conv_weight, conv_bias, in_scale, out_scale, channels):
     x = x * sym.expand_dims(in_scale, axis=1, num_newaxis=2)
     y = sym.conv2d(x, conv_weight, conv_bias,
                    channels=channels,
                    kernel_size=(3, 3),
                    padding=(1, 1),
                    name="conv")
     y = sym.relu(y)
     y = y * sym.expand_dims(out_scale, axis=1, num_newaxis=2)
     return y
Exemplo n.º 4
0
 def before(x, conv_weight, conv_bias, in_scale, out_scale, channels):
     x = x * sym.expand_dims(in_scale, axis=1, num_newaxis=2)
     y = sym.conv2d(x, conv_weight, conv_bias,
                    channels=channels,
                    kernel_size=(3, 3),
                    padding=(1, 1),
                    name="conv")
     y = sym.relu(y)
     y = y * sym.expand_dims(out_scale, axis=1, num_newaxis=2)
     return y
Exemplo n.º 5
0
 def simple_bn(x, gamma, beta, moving_mean, moving_var,
               axis=1, epsilon=1e-5, shape=None):
     # expect = (x - moving_mean) / sym.sqrt(moving_var + eps) * gamma + beta
     scale = sym.elemwise_mul(1 / sym.sqrt(moving_var + epsilon), gamma)
     shift = sym.elemwise_add(
         sym.elemwise_mul(sym.negative(moving_mean), scale), beta)
     # for 2D
     num_newaxis=len(shape) - axis - 1
     if num_newaxis:
         scale = sym.expand_dims(scale, axis=1, num_newaxis=num_newaxis)
         shift = sym.expand_dims(shift, axis=1, num_newaxis=num_newaxis)
     return x * scale + shift
Exemplo n.º 6
0
 def expected(x, conv_weight, conv_bias, in_scale, out_scale, channels):
     conv_weight = conv_weight * sym.expand_dims(out_scale, axis=1, num_newaxis=3)
     conv_weight = conv_weight * sym.expand_dims(in_scale, axis=1, num_newaxis=2)
     conv_bias = conv_bias * out_scale
     y = sym.conv2d(x,
                    conv_weight,
                    conv_bias,
                    channels=channels,
                    kernel_size=(3, 3),
                    padding=(1, 1),
                    name="conv")
     y = sym.relu(y)
     return y
Exemplo n.º 7
0
 def expected(x, conv_weight, conv_bias, in_scale, out_scale, channels):
     conv_weight = conv_weight * sym.expand_dims(out_scale, axis=1, num_newaxis=3)
     conv_weight = conv_weight * sym.expand_dims(in_scale, axis=1, num_newaxis=2)
     conv_bias = conv_bias * out_scale
     y = sym.conv2d(x,
                    conv_weight,
                    conv_bias,
                    channels=channels,
                    kernel_size=(3, 3),
                    padding=(1, 1),
                    name="conv")
     y = sym.relu(y)
     return y
Exemplo n.º 8
0
 def before(x, scale, channels):
     y = sym.conv2d(x,
                    channels=channels,
                    kernel_size=(3, 3),
                    padding=(1, 1),
                    name="conv")
     y = y * sym.expand_dims(scale, axis=1, num_newaxis=1)
     return y
Exemplo n.º 9
0
 def before(x, scale, channels):
     y = sym.conv2d(x,
                    channels=channels,
                    kernel_size=(3, 3),
                    padding=(1, 1),
                    name="conv")
     y = y * sym.expand_dims(scale, axis=1, num_newaxis=1)
     return y
Exemplo n.º 10
0
 def simple_bn(x,
               gamma,
               beta,
               moving_mean,
               moving_var,
               axis=1,
               epsilon=1e-5,
               shape=None):
     # expect = (x - moving_mean) / sym.sqrt(moving_var + eps) * gamma + beta
     scale = sym.elemwise_mul(1 / sym.sqrt(moving_var + epsilon), gamma)
     shift = sym.elemwise_add(
         sym.elemwise_mul(sym.negative(moving_mean), scale), beta)
     # for 2D
     num_newaxis = len(shape) - axis - 1
     if num_newaxis:
         scale = sym.expand_dims(scale, axis=1, num_newaxis=num_newaxis)
         shift = sym.expand_dims(shift, axis=1, num_newaxis=num_newaxis)
     return x * scale + shift
Exemplo n.º 11
0
def test1():

    in_shape = [3, 3, 3]
    out_shape = [3, 3, 3, 2]
    data = {
        "x": np.arange(np.prod(in_shape), dtype=np.float32).reshape(in_shape),
        "y": np.arange(np.prod(in_shape), dtype=np.float32).reshape(in_shape)
    }

    axis = -4
    x = sym.Variable("x")
    y = sym.Variable("y")

    x = sym.expand_dims(x, axis=axis, num_newaxis=1)  # sym.elemwise_add(x,y)
    y = sym.expand_dims(y, axis=axis, num_newaxis=1)  # sym.elemwise_add(x,y)
    z = sym.concatenate(x, y, axis=-4)

    nnvm_graph = nnvm.graph.create(z)
    print('Got NNVM graph')
    print(nnvm_graph.json())

    in_shapes_dict = {n: list(np.shape(v)) for n, v in data.items()}
    tvm_graph, lib, params = nnvm.compiler.build(nnvm_graph, 'llvm',
                                                 in_shapes_dict)
    print('Got TVM graph')

    ctx = tvm.cpu(0)
    graph_module = graph_runtime.create(tvm_graph, lib, ctx)
    print('Got graph module')

    print(tvm_graph.__dir__())
    print(tvm_graph.json())

    for name, value in data.items():
        graph_module.set_input(name, value)

    graph_module.run()

    out_value = graph_module.get_output(0, tvm.nd.empty((out_shape),
                                                        'float32'))

    # print('Inputs:\nX:', data["x"], "\nY:", data["y"])
    print('Output value:', type(out_value), '\nShape:', out_value.shape,
          '\nO:', out_value)
Exemplo n.º 12
0
def test_expand_dims():
    x = sym.Variable("x", shape=(10, 20))
    y = sym.expand_dims(x, axis=1, name="y")
    g, ldict = correct_layout(y, "HW")
    assert(ldict["x"][0] == "HW")
    assert(ldict["y"][0] == "__undef__")
    # second pass will insert layout transform
    _, ldict = correct_layout(g, "HW16w")
    assert(ldict["x"][0] == "HW16w")
    assert(ldict["x_HW"][0] == "HW")
    assert(ldict["y"][0] == "__undef__")
Exemplo n.º 13
0
def test_expand_dims():
    x = sym.Variable("x", shape=(10, 20))
    y = sym.expand_dims(x, axis=1, name="y")
    g, ldict = correct_layout(y, "HW")
    assert (ldict["x"][0] == "HW")
    assert (ldict["y"][0] == "__undef__")
    # second pass will insert layout transform
    _, ldict = correct_layout(g, "HW16w")
    assert (ldict["x"][0] == "HW16w")
    assert (ldict["x_HW"][0] == "HW")
    assert (ldict["y"][0] == "__undef__")
Exemplo n.º 14
0
def nnvm_distribute(c, v, shp):
    """Implementation of distribute."""
    nv = c.ref(v)
    assert shp.is_constant()
    shp = shp.value
    vshp = ashape(v)
    if len(shp) != len(vshp):
        # We need to pad the shape
        nv = sym.expand_dims(nv, axis=0, num_newaxis=len(shp) - len(vshp))
    if shp == vshp:
        return nv
    return sym.broadcast_to(nv, shape=shp)
Exemplo n.º 15
0
def weight_prepack_conv2d(attrs, inputs, tinfos):
    import ast
    print(attrs)
    data_sym = inputs[0]
    data = tinfos[0]
    kernel = tinfos[1]
    padding = ast.literal_eval(attrs['padding'])
    stride = ast.literal_eval(attrs['strides'])
    wkl = _get_workload(data, kernel, stride, padding, 'float32')
    sch = _get_schedule_conv(wkl)
    print(sch)
    is_kernel_1x1 = isinstance(sch, AVX512Conv1x1Fwd)

    ic_bn, oc_bn = sch.ic_bn, sch.oc_bn
    # TODO: hack checking input layer
    if ic_bn == 3:
        data_sym = sym.expand_dims(data_sym, axis=4)

    new_attrs = {k: attrs[k] for k in attrs.keys()}
    new_attrs['layout'] = 'NCHWc'
    new_attrs['ic_bn'] = ic_bn
    new_attrs['oc_bn'] = oc_bn

    kernel_sym = inputs[1]
    reorder_attrs = {
        'ic_bn': ic_bn,
        'oc_bn': oc_bn,
        'kernel_1x1': is_kernel_1x1
    }
    trans_kernel = sym.reorder(kernel_sym, **reorder_attrs)

    if attrs.get_bool('use_bias'):
        bias = inputs[2]
        bias = sym.bn_reorder(bias, bn=oc_bn)
        print('!!!!!!!!!!conv2d_nopack')
        return sym.conv2d_nopack(data_sym, trans_kernel, bias, **new_attrs)
    else:
        return sym.conv2d_nopack(data_sym, trans_kernel, **new_attrs)
Exemplo n.º 16
0
def test_expand_dims():
    x = sym.Variable('x')
    y = sym.expand_dims(x, axis=1, num_newaxis=2)
    assert y.list_input_names() == ['x']
Exemplo n.º 17
0
    def _impl(cls, inputs, args, params):
        assert len(inputs) == 3
        mean = _sym.expand_dims(inputs[1], axis=2, num_newaxis=2)
        std = _sym.expand_dims(inputs[2], axis=2, num_newaxis=2)

        return _sym.broadcast_div(_sym.broadcast_sub(inputs[0], mean), std)
Exemplo n.º 18
0
def test_expand_dims():
    x = sym.Variable('x')
    y = sym.expand_dims(x, axis=1, num_newaxis=2)
    assert y.list_input_names() == ['x']
Exemplo n.º 19
0
    def _impl(cls, inputs, args, params):
        assert len(inputs) == 3
        mean = _sym.expand_dims(inputs[1], axis=2, num_newaxis=2)
        std = _sym.expand_dims(inputs[2], axis=2, num_newaxis=2)

        return _sym.broadcast_div(_sym.broadcast_sub(inputs[0], mean), std)