Esempio n. 1
0
def test_pad_invalid_paddings_length(N):
    """
    pad should raise an exception if the paddings length is not the same as the
    input dimensionality.
    """
    x = ng.variable([N])
    with pytest.raises(ValueError):
        ng.pad(x, [1, 0])
Esempio n. 2
0
def test_pad_0(N):
    """
    pad with length 0 should be a nop
    """
    x = ng.variable([N])

    assert ng.pad(x, [0]).axes == x.axes
Esempio n. 3
0
def test_pad_0():
    """
    pad with length 0 should be a nop
    """

    N = ng.make_axis(1)

    x = ng.variable([N])

    assert ng.pad(x, [0]).axes == x.axes
Esempio n. 4
0
def test_pad_mixed():
    """
    mix 0 padding with non-0 padding
    """
    input_axes = ng.make_axes([ng.make_axis(1), ng.make_axis(1)])
    x = ng.variable(input_axes)

    pad = ng.pad(x, [0, 1])

    assert pad.axes[0] == x.axes[0]
    assert pad.axes[1] != x.axes[1]
Esempio n. 5
0
def test_pad_mixed():
    """
    mix 0 padding with non-0 padding
    """

    N = ng.make_axis(1)
    M = ng.make_axis(1)

    x = ng.variable([N, M])

    pad = ng.pad(x, [0, 1])

    assert pad.axes[0] == x.axes[0]
    assert pad.axes[1] != x.axes[1]
Esempio n. 6
0
def Pad(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    pads = onnx_node.get_attribute_value('pads')
    constant = 'constant'
    mode = onnx_node.get_attribute_value(
        'mode', constant)  # 'constant', 'reflect' or 'edge'
    value = onnx_node.get_attribute_value('value', 0)

    if mode != constant or value != 0:
        raise NotImplementedError(
            'Pad node (%s): only constant padding with value=0 '
            'is supported.', onnx_node.name)

    # Split paddings into pairs for each axis
    pads = [pad for pad in split_pads_into_pairs(pads)]
    return cast_to_pos_axes(ng.pad(ng_inputs[0], pads))
def test_pad_edge():
    input_data = np.arange(1, 13).reshape([3, 4])
    pads_begin = np.array([0, 1], dtype=np.int32)
    pads_end = np.array([2, 3], dtype=np.int32)

    input_param = ng.parameter(input_data.shape, name='input', dtype=np.int32)
    model = ng.pad(input_param, pads_begin, pads_end, 'edge')

    runtime = get_runtime()
    computation = runtime.computation(model, input_param)
    result = computation(input_data)

    expected = np.array([[1, 1, 2, 3, 4, 4, 4, 4], [5, 5, 6, 7, 8, 8, 8, 8],
                         [9, 9, 10, 11, 12, 12, 12, 12],
                         [9, 9, 10, 11, 12, 12, 12, 12],
                         [9, 9, 10, 11, 12, 12, 12, 12]])
    assert np.allclose(result, expected)
Esempio n. 8
0
def Pad(onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Add padding to the input tensor."""
    data = ng_inputs[0]
    # Oprator set version 1
    paddings = onnx_node.get_attribute_value('paddings')
    # Operator set version >= 2
    pads = onnx_node.get_attribute_value('pads')

    pads = pads if pads is not None else paddings
    if pads is None:
        raise ValueError('Pad node (s%): pads attribute is required.',
                         onnx_node.name)

    constant = 'constant'
    mode = onnx_node.get_attribute_value(
        'mode', constant)  # 'constant', 'reflect' or 'edge'
    value = onnx_node.get_attribute_value('value', 0.)

    if len(pads) != 2 * len(data.shape):
        raise ValueError(
            'Pad node (%s): \'pads rank (%d) should be double of input tensor '
            'rank (%d).', onnx_node.name, len(pads), len(data.shape))

    # Operator set version 1 accepts only positive values, while operator set version 2 use negative
    # values to remove pads elements. Here we check only for latter case.
    if any(map(lambda x: x < 0, pads)):
        raise NotImplementedError(
            'Pad node (%s): removing padding elements is not supported yet.',
            onnx_node.name)
    if mode != constant:
        raise NotImplementedError(
            'Pad node (%s): only constant padding is supported.',
            onnx_node.name)

    # Split paddings into pairs for each axis
    pading_below, pading_above = split_pads_into_pairs(pads)
    return ng.pad(data,
                  ng.constant(value, dtype=get_dtype(data.get_element_type())),
                  pading_below, pading_above)
Esempio n. 9
0
def test_pad_constant():
    input_data = np.arange(1, 13).reshape([3, 4])
    pads_begin = np.array([0, 1], dtype=np.int32)
    pads_end = np.array([2, 3], dtype=np.int32)

    input_param = ng.parameter(input_data.shape, name="input", dtype=np.int32)
    model = ng.pad(input_param, pads_begin, pads_end, "constant", arg_pad_value=np.array(100, dtype=np.int32))

    runtime = get_runtime()
    computation = runtime.computation(model, input_param)
    result = computation(input_data)

    expected = np.array(
        [
            [100, 1, 2, 3, 4, 100, 100, 100],
            [100, 5, 6, 7, 8, 100, 100, 100],
            [100, 9, 10, 11, 12, 100, 100, 100],
            [100, 100, 100, 100, 100, 100, 100, 100],
            [100, 100, 100, 100, 100, 100, 100, 100],
        ]
    )
    assert np.allclose(result, expected)
Esempio n. 10
0
def test_padding(transformer_factory):
    """TODO."""
    C = ng.make_axis()
    D = ng.make_axis()
    M = ng.make_axis()
    N = ng.make_axis()

    tests = [{
        'tensor': [[1, 3], [2, 5]],
        'tensor_axes': (C, D),
        'padding': [(0, 1), (1, 0)],
        'padded_axes': (M, N),
        'axes_lengths': {
            C: 2,
            D: 2,
            M: 3,
            N: 3
        }
    }, {
        'tensor': [[1, 4, 5], [1, 4, 6]],
        'tensor_axes': (C, D),
        'padding': [(0, 1), 1],
        'padded_axes': None,
        'axes_lengths': {
            C: 2,
            D: 3
        }
    }]

    for test in tests:
        with ExecutorFactory() as ex:
            for axis, length in test['axes_lengths'].items():
                axis.length = length
            tensor_axes = test['tensor_axes']
            tensor_np = np.array(test['tensor'], dtype='float32')
            tensor = ng.placeholder(tensor_axes)
            padding = test['padding']
            padded_axes = test['padded_axes']
            padded = ng.pad(tensor, padding, padded_axes)
            computed_val_fun = ex.executor(padded, tensor)

            # Test backpropagation
            numeric_deriv_fun = ex.numeric_derivative(padded, tensor, delta)
            sym_deriv_fun = ex.derivative(padded, tensor)

            def to_tuple(p):
                """
                TODO.

                Arguments:
                  p: TODO

                Returns:

                """
                return (p, p) if isinstance(p, int) else p

            np_padding = tuple(to_tuple(p) for p in padding)
            expected_val = np.pad(tensor_np, np_padding, mode='constant')

            computed_val = computed_val_fun(tensor_np)
            assert np.array_equal(expected_val, computed_val)

            numeric_deriv = numeric_deriv_fun(tensor_np)
            sym_deriv = sym_deriv_fun(tensor_np)

            assert ng.testing.allclose(numeric_deriv,
                                       sym_deriv,
                                       rtol=rtol,
                                       atol=atol)