Esempio n. 1
0
def as_elementwise_compatible_nodes(*input_values):  # type: (*NodeInput) -> List[Node]
    """Return all input values as ngraph Nodes with the same shape and element type.

    Scalar values will be converted to ngraph Constant Nodes.
    """
    input_nodes = [node for node in input_values
                   if issubclass(type(node), Node)]  # type: List[Node]

    if not input_nodes:
        raise NotImplementedError('Operations on scalars only are not supported.')

    shapes = {tuple(node.shape) for node in input_nodes}
    if len(shapes) > 1:
        log.warning('More than one different shape in input nodes %s.', input_nodes)

    types = {node.get_element_type() for node in input_nodes}
    if len(types) > 1:
        log.warning('More than one different data type in input nodes %s.', input_nodes)

    sorted_shapes = sorted(shapes, key=len)
    broadcast_shape = sorted_shapes.pop()
    broadcast_dtype = get_dtype(types.pop())

    output_nodes = []
    for input_value in input_values:
        if issubclass(type(input_value), Node):
            input_value = ng.broadcast(input_value, broadcast_shape)
            output_nodes.append(input_value)
        else:
            input_value = make_constant_node(input_value, dtype=broadcast_dtype)
            output_nodes.append(ng.broadcast(input_value, broadcast_shape))

    return output_nodes
Esempio n. 2
0
    def Times(self, cntk_op, inputs):
        """
        Returns input[0] x input[1] (matrix multiplication).

        Arguments:
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        cast_0, cast_1 = inputs

        if len(cast_0.axes) == 1 & len(cast_1.axes) == 1:
            pass
        elif len(cast_0.axes) == 1:
            temp = next((x for x in cast_1.axes if x.length == 1), None)
            if temp is None:
                temp = ng.make_axis(1)
            cast_0 = ng.broadcast(cast_0, [temp, cast_0.axes])
        elif len(cast_1.axes) == 1:
            temp = next((x for x in cast_0.axes if x.length == 1), None)
            if temp is None:
                temp = ng.make_axis(1)
            cast_1 = ng.broadcast(cast_1, [ng.make_axis(1), cast_1.axes])

        cast_0 = ng.cast_axes(cast_0, [cast_0.axes[0], cast_1.axes[0]])
        return ng.dot(cast_0, cast_1).named(cntk_op.uid)
Esempio n. 3
0
def BatchNormalization(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Carry out batch normalization."""
    x, scale, bias, mean, var = ng_inputs

    is_test = onnx_node.get_attribute_value('is_test', 1)
    spatial = onnx_node.get_attribute_value('spatial', 1)
    epsilon = onnx_node.get_attribute_value('epsilon', 1e-3)

    # @TODO: Implement learning mode support
    # momentum = onnx_node.get_attribute_value('momentum', 0.99)

    if not is_test:
        raise NotImplementedError('BatchNormalization node (%s): only `is_test` mode is currently '
                                  'supported.', onnx_node.name)
    if not spatial:
        raise NotImplementedError('BatchNormalization node (%s): only `spatial` mode is currently '
                                  'supported.', onnx_node.name)

    mean = ng.broadcast(mean, x.shape, axis=1)
    scale = ng.broadcast(scale, x.shape, axis=1)
    var = ng.broadcast(var, x.shape, axis=1)
    bias = ng.broadcast(bias, x.shape, axis=1)
    epsilon = ng.broadcast(ng.constant(epsilon, dtype=get_dtype(x.get_element_type())),
                           x.shape, axis=1)
    return (scale * ((x - mean) * (1 / (ng.sqrt(var + epsilon)))) + bias)
Esempio n. 4
0
def as_elementwise_compatible_nodes(*input_values):  # type: (*NodeInput) -> List[Node]
    """Return all input values as ngraph Nodes with the same shape and element type.

    Scalar values will be converted to ngraph Constant Nodes.
    """
    input_nodes = [node for node in input_values
                   if issubclass(type(node), Node)]  # type: List[Node]

    if not input_nodes:
        raise NotImplementedError('Operations on scalars only are not supported.')

    shapes = {tuple(node.shape) for node in input_nodes}
    if len(shapes) > 1:
        log.warning('More than one different shape in input nodes %s.', input_nodes)

    types = [node.get_element_type() for node in input_nodes]
    unique_types = {repr(type) for type in types}
    if len(unique_types) > 1:
        log.warning('More than one different data type in input nodes %s.', input_nodes)

    sorted_shapes = sorted(shapes, key=len)
    broadcast_shape = sorted_shapes.pop()
    broadcast_dtype = get_dtype(types.pop())

    output_nodes = []
    for input_value in input_values:
        if issubclass(type(input_value), Node):
            input_value = ng.broadcast(input_value, broadcast_shape)
            output_nodes.append(input_value)
        else:
            input_value = make_constant_node(input_value, dtype=broadcast_dtype)
            output_nodes.append(ng.broadcast(input_value, broadcast_shape))

    return output_nodes
Esempio n. 5
0
    def cast_axes_for_compound_op(self, inputs):
        left, right = inputs

        left_dim = len(left.axes)
        right_dim = len(right.axes)

        # pad left and right axis to be the same length, align right
        result_dim = max(left_dim, right_dim)
        left_axes_pad = [
            ng.make_axis(length=1) for _ in range(result_dim - left_dim)
        ] + list(left.axes)
        right_axes_pad = [
            ng.make_axis(length=1) for _ in range(result_dim - right_dim)
        ] + list(right.axes)
        result_axes = [
            ng.make_axis(length=max(l.length, r.length))
            for l, r in zip(left_axes_pad, right_axes_pad)
        ]

        # broadcast left / right, introducing dummy length 1 axes
        left = ng.broadcast(left, left_axes_pad)
        right = ng.broadcast(right, right_axes_pad)

        # make two-way map of lr matching axes and map for result axes
        lr_axes_map = dict()
        result_axes_map = dict()
        for l, r, re in zip(left.axes, right.axes, result_axes):
            lr_axes_map[l] = r
            lr_axes_map[r] = l
            result_axes_map[l] = re
            result_axes_map[r] = re

        # get left / right slice
        left_slice = []
        right_slice = []
        for l, r in zip(left.axes, right.axes):
            if l.length == 1 and r.length != 1:
                left_slice.append(0)
            else:
                left_slice.append(slice(None))
            if r.length == 1 and l.length != 1:
                right_slice.append(0)
            else:
                right_slice.append(slice(None))

        # perform slicing
        left_sliced = ng.tensor_slice(left, left_slice)
        right_sliced = ng.tensor_slice(right, right_slice)

        # now cast the right_sliced to left_sliced from the axis map
        right_casted_axes = []
        for r in right_sliced.axes:
            if r in lr_axes_map and lr_axes_map[r] in left_sliced.axes:
                right_casted_axes.append(lr_axes_map[r])
            else:
                right_casted_axes.append(r)
        right_sliced_casted = ng.cast_axes(right_sliced, right_casted_axes)

        return left_sliced, right_sliced_casted
Esempio n. 6
0
def reorder_axes(input_tensor, input_template, output_template):
    # type: (TensorOp, str, str) -> TensorOp
    """
    Reorder input_tensor axes based on a template defined by two strings.

    Each letter of the template string denotes an axis. If a letter is not present in the
    input_axes string, but is present in output_axes, a new axis with length=1 will be added.

    E.g. `reorder_axes(input_tensor, 'NCHW', 'CDHWN')` will add an axis named D with length=1

    :param input_tensor: ngraph op, with a set of axes matching input_template
    :param input_template: string with one letter for each axis e.g. 'NCHW'
    :param output_template: string with one letter for each axis in a different order e.g. 'CHWN'
    :return: broadcast Op which reorders the axes of the input tensor
    """
    if not len(set(input_template)) == len(input_template):
        raise ValueError('Input axes names cannot repeat.')

    if not len(set(output_template)) == len(output_template):
        raise ValueError('Output axes names cannot repeat.')

    output_axes = []
    for output_axis_name in output_template:
        output_axes.append(input_tensor.axes[input_template.index(
            output_axis_name)] if output_axis_name in input_template else ng.
                           make_axis(name=output_axis_name, length=1))

    return ng.broadcast(input_tensor, axes=output_axes)
Esempio n. 7
0
def broadcast_for_binary_operation(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """
    Cast shape of the right operand to make ops compatible for an element-wise binary operation.

    Casting is based on `broadcast` and `axis` attributes of an ONNX node.

    :param onnx_node: wrapped ONNX node
    :param ng_inputs: left and right operand
    :return: left and right operand after broadcasting
    """
    left = ng_inputs[0]
    right = ng_inputs[1]

    dimensions_identical = left.shape == right.shape
    if dimensions_identical:
        return left, right

    broadcast = onnx_node.get_attribute_value('broadcast', 0)
    if not broadcast:
        logger.warning(
            '%s node (%s): operands have different dimensions, and "broadcast"'
            ' attribute is not set. ', onnx_node.op_type, onnx_node.name)
        return left, right

    start_axis = onnx_node.get_attribute_value(
        'axis')  # start of mutually equal shape
    right = ng.broadcast(right, left.shape, start_axis)
    return left, right
Esempio n. 8
0
    def Reshape(self, cntk_op, inputs):
        """
        Returns input having reinterpreted tensor dimensions.

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        assert len(inputs) == 1

        in_axes = list(inputs[0].axes)
        out_axes = []
        for dim in cntk_op.shape:
            found = False
            for axis in in_axes:
                if axis.length == dim:
                    found = True
                    out_axes.append(axis)
                    in_axes.remove(axis)
                    break
            if found is not True:
                out_axes.append(ng.make_axis(dim))

        out_axes += in_axes
        return ng.broadcast(inputs[0], out_axes).named(cntk_op.uid)
Esempio n. 9
0
    def _expand_filters_axes(self, filters, C):
        """
        Expand and cast 1D or 2D filter into 3D filter.

        Arguments:
            axes: Convolution filter's axes.

        Returns:
            Expanded list of filter's axes.
        """
        axes = filters.axes
        dim = len(axes)
        if dim == 5:
            O, _, T, M1, M2 = axes
            filters = ng.cast_axes(filters, [O, C, T, M1, M2])
        elif dim == 4:
            O, _, M1, M2 = axes
            filters = ng.cast_axes(filters, [O, C, M1, M2])
            T = ng.make_axis(1)
        elif dim == 3:
            O, M1, M2 = axes
            T = ng.make_axis(1)
        elif dim == 2:
            O, M1 = axes
            T = ng.make_axis(1)
            M2 = ng.make_axis(1)
        elif dim == 1:
            O = axes
            T = ng.make_axis(1)
            M1 = ng.make_axis(1)
            M2 = ng.make_axis(1)
        else:
            raise ValueError("Convolution filter must have 1 to 5 axes.")

        return ng.broadcast(filters, [C, T, M1, M2, O])
Esempio n. 10
0
    def ExpandDims(self, tf_node, inputs):
        """
        Inserts a dimension of 1 into a tensor's shape.

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            input, dim, name
        """
        # get input
        tensor, dim = inputs[0], int(inputs[1].const)

        # check `-1-input.dims() <= dim <= input.dims()`
        input_ndims = len(tensor.axes.lengths)
        assert -1 - input_ndims <= dim <= input_ndims

        # deal with negative number
        if dim < 0:
            dim = input_ndims + 1 + dim

        # create new axis
        one_axis = ng.make_axis(length=1)

        # get output axis
        pre_axis = [axis for axis in tensor.axes[:dim]]  # avoid FlattenedAxis
        pos_axis = [axis for axis in tensor.axes[dim:]]  # avoid FlattenedAxis
        out_axis = ng.make_axes(pre_axis + [one_axis] + pos_axis)

        # broadcast
        return ng.broadcast(tensor, out_axis)
Esempio n. 11
0
def ReduceMean(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Compute the mean value of the input tensor's elements along the provided axes."""
    input_shape = list(ng_inputs[0].shape)
    sum_node = make_reduction_op(ng.sum, onnx_node, ng_inputs[0])
    reduction_axes = get_reduction_axes(onnx_node, ng_inputs[0])
    avg_elem_count = np.prod([input_shape[x] for x in reduction_axes])
    const_node = ng.broadcast(ng.constant(avg_elem_count, get_dtype(sum_node.get_element_type())),
                              sum_node.shape)
    return ng.divide(sum_node, const_node)
Esempio n. 12
0
def test_idempotent_axes_c():
    """
    Test test axes transformations with autodiff, case c, with broadcast,
    slice, cast and dim-shuffle
    """
    with ExecutorFactory() as ex:
        axes = ng.make_axes([ng.make_axis(3), ng.make_axis(1)])
        result_axes = [ng.make_axis(length=axis.length) for axis in axes]

        # variable
        w = ng.variable(axes, initial_value=np.ones((3, 1)))

        # broadcast l / r, introducing dummy length 1 axes
        l = ng.broadcast(w, axes)
        r = ng.broadcast(w, axes)

        # slice
        axes_slice = [slice(None, None, None), slice(None, None, None)]
        l_sliced = ng.tensor_slice(l, axes_slice)
        r_sliced = ng.tensor_slice(r, axes_slice)

        # cast r
        r_sliced_casted = ng.cast_axes(r_sliced, axes)

        # perform add
        result = ng.add(l_sliced, r_sliced_casted)

        # cast / dimshuffle
        result = ng.cast_axes(result, result_axes)
        result = ng.axes_with_order(result, result_axes)

        # cost and grad
        cost = ng.sum(result, reduction_axes=result.axes)
        grad = ng.deriv(cost, w)

        grad_comp = ex.executor(grad)
        cost_comp = ex.executor(cost)

        cost_comp_ng = cost_comp()
        grad_comp_ng = grad_comp()
        grad_comp_np = np.ones((3, 1)) * 2.
        assert cost_comp_ng == 6.0
        assert np.array_equal(grad_comp_ng, grad_comp_np)
Esempio n. 13
0
def test_broadcast_numpy():
    data_shape = [16, 1, 1]
    target_shape_shape = [4]

    data_parameter = ng.parameter(data_shape, name="Data", dtype=np.float32)
    target_shape_parameter = ng.parameter(target_shape_shape, name="Target_shape", dtype=np.int64)

    node = ng.broadcast(data_parameter, target_shape_parameter)

    assert node.get_type_name() == "Broadcast"
    assert node.get_output_size() == 1
Esempio n. 14
0
def test_broadcast_bidirectional():
    data_shape = [16, 1, 1]
    target_shape_shape = [4]

    data_parameter = ng.parameter(data_shape, name="Data", dtype=np.float32)
    target_shape_parameter = ng.parameter(target_shape_shape, name="Target_shape", dtype=np.int64)

    node = ng.broadcast(data_parameter, target_shape_parameter, "BIDIRECTIONAL")

    assert node.get_type_name() == "Broadcast"
    assert node.get_output_size() == 1
Esempio n. 15
0
def test_scalar_broadcast():
    """
    Test broadcasting a scalar into a tensor
    """
    with ExecutorFactory() as ex:
        x_axes = ng.make_axes()
        broadcast_axes = ng.make_axes([ng.make_axis(2), ng.make_axis(3)])
        x = ng.constant(1., axes=x_axes)
        z = ng.broadcast(x, axes=broadcast_axes)
        z_comp = ex.executor(z)
        assert np.array_equal(z_comp(), np.ones(broadcast_axes.lengths))
Esempio n. 16
0
def test_broadcast_deriv_reorder(transformer_factory):
    H = ng.make_axis(2)
    W = ng.make_axis(3)

    x = ng.constant(np.random.rand(2, 3), axes=[H, W])
    x_broadcast = ng.broadcast(x, [W, H])
    x_sum = ng.sum(x_broadcast, out_axes=())
    dx = ng.deriv(x_sum, x)

    with ExecutorFactory() as ex:
        dx_fun = ex.executor(dx)
        ng.testing.assert_allclose(dx_fun(), np.ones((2, 3)))
Esempio n. 17
0
def test_idempotent_axes_b():
    """
    Test test axes transformations with autodiff, case b, with broadcast applied
    to the same tensor
    """
    with ExecutorFactory() as ex:
        axes = ng.make_axes([ng.make_axis(3), ng.make_axis(1)])

        w = ng.variable(axes, initial_value=np.ones((3, 1)))
        l = ng.broadcast(w, axes)
        r = ng.broadcast(w, axes)
        result = ng.add(l, r)

        result = ng.cast_axes(result, axes)
        cost = ng.sum(result, reduction_axes=axes)
        grad = ng.deriv(cost, w)

        grad_comp = ex.executor(grad)
        cost_comp = ex.executor(cost)

        assert cost_comp() == 6.0
        assert np.array_equal(grad_comp(), np.ones((3, 1)) * 2.)
Esempio n. 18
0
def numpy_style_broadcast_for_binary_operation(onnx_node, ng_inputs):
    # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """
    Cast shape of two nodes to make them compatible for an element-wise binary operation.

    :param onnx_node: a wrapped ONNX node
    :param ng_inputs: left and right node (inputs of the binary op)
    :return: left and right node after broadcasting
    """
    left = ng_inputs[0]
    right = ng_inputs[1]

    dimensions_identical = list(left.shape) == list(right.shape)
    if dimensions_identical:
        return left, right

    try:
        output_shape, left_full_shape, right_full_shape = numpy_style_broadcast_output_shape(
            left.shape, right.shape)
    except UserInputError:
        raise UserInputError(
            '%s node (%s): Unable to broadcast shapes %s and %s.',
            onnx_node.op_type, onnx_node.name, left.shape, right.shape)

    if list(right.shape) != output_shape:
        one_pos = [i for i, dim in enumerate(right_full_shape) if dim == 1]
        right = ng.reshape(right,
                           [dim for dim in right.shape if dim != 1])  # Squeeze
        right = ng.broadcast(right, output_shape, broadcast_axes=one_pos)

    if list(left.shape) != output_shape:
        one_pos = [i for i, dim in enumerate(left_full_shape) if dim == 1]
        left = ng.reshape(left, [dim for dim in left.shape if dim != 1])
        left = ng.broadcast(left, output_shape, broadcast_axes=one_pos)

    return left, right
Esempio n. 19
0
def test_broadcast():

    element_type = Type.f32
    A = Parameter(element_type, Shape([3]))
    parameter_list = [A]
    function = Function([ng.broadcast(A, [3, 3])], parameter_list, "test")

    runtime = get_runtime()
    computation = runtime.computation(function, *parameter_list)
    result = computation(np.array([1, 2, 3], dtype=np.float32))[0]

    a_arr = np.array([[0], [0], [0]], dtype=np.float32)
    b_arr = np.array([[1, 2, 3]], dtype=np.float32)
    expected = np.add(a_arr, b_arr)
    assert np.allclose(result, expected)
Esempio n. 20
0
    def _expand_input_axes(self, inputs):
        """
        Expand 1D or 2D input into 3D input.

        Arguments:
            axes: Convolution input's axes.

        Returns:
            Expanded list of input's axes.
        """
        axes = inputs.axes
        dim = len(axes)
        batch = axes.batch_axis()

        if dim == 5:
            C, D, H, W, N = axes
        elif dim == 4:
            if batch:
                C, H, W, N = axes
                D = ng.make_axis(1)
            else:
                C, D, H, W = axes
                N = ng.make_axis(1, 'N')
        elif dim == 3:
            if batch:
                H, W, N = axes
                C = ng.make_axis(1)
                D = ng.make_axis(1)
            else:
                C, H, W = axes
                D = ng.make_axis(1)
                N = ng.make_axis(1, 'N')
        elif dim == 2:
            if batch:
                H, N = axes
                C = ng.make_axis(1)
                D = ng.make_axis(1)
                W = ng.make_axis(1)
            else:
                H, W = axes
                C = ng.make_axis(1)
                D = ng.make_axis(1)
                N = ng.make_axis(1, 'N')
        else:
            raise ValueError("Convolution input must have 2 to 5 axes.")

        return ng.broadcast(inputs, [C, D, H, W, N])
Esempio n. 21
0
def test_prod_constant(transformer_factory):
    """
    Test reduce product of constants
    """
    A0 = ng.make_axis(length=2)
    A1 = ng.make_axis(length=3)
    A2 = ng.make_axis(length=4)

    # ngrpah ops
    const_3d = ng.broadcast(ng.constant(2., axes=[]), axes=[A0, A1, A2])
    prod_0 = ng.prod(const_3d, reduction_axes=[A0])
    prod_1 = ng.prod(const_3d, reduction_axes=[A1])
    prod_2 = ng.prod(const_3d, reduction_axes=[A2])
    prod_0_1 = ng.prod(const_3d, reduction_axes=[A0, A1])
    prod_0_2 = ng.prod(const_3d, reduction_axes=[A0, A2])
    prod_1_2 = ng.prod(const_3d, reduction_axes=[A1, A2])
    prod_0_1_2 = ng.prod(const_3d, reduction_axes=[A0, A1, A2])

    # numpy results
    np_const_3d = np.ones((2, 3, 4)) * 2.
    res_0_np = np.prod(np_const_3d, axis=(0))
    res_1_np = np.prod(np_const_3d, axis=(1))
    res_2_np = np.prod(np_const_3d, axis=(2))
    res_0_1_np = np.prod(np_const_3d, axis=(0, 1))
    res_0_2_np = np.prod(np_const_3d, axis=(0, 2))
    res_1_2_np = np.prod(np_const_3d, axis=(1, 2))
    res_0_1_2_np = np.prod(np_const_3d, axis=(0, 1, 2))

    # define comp
    with ExecutorFactory() as ex:
        comps = ex.executor(
            [prod_0, prod_1, prod_2, prod_0_1, prod_0_2, prod_1_2, prod_0_1_2])

        res_0_ng, res_1_ng, res_2_ng, res_0_1_ng, res_0_2_ng, res_1_2_ng, res_0_1_2_ng = comps(
        )

    np.testing.assert_allclose(res_0_np, res_0_ng)
    np.testing.assert_allclose(res_1_np, res_1_ng)
    np.testing.assert_allclose(res_2_np, res_2_ng)
    np.testing.assert_allclose(res_0_1_np, res_0_1_ng)
    np.testing.assert_allclose(res_0_2_np, res_0_2_ng)
    np.testing.assert_allclose(res_1_2_np, res_1_2_ng)
    np.testing.assert_allclose(res_0_1_2_np, res_0_1_2_ng)
Esempio n. 22
0
def test_prod_constant(prod_constant):
    """
    Test reduce product of constants
    """
    np_axis, ng_axis, axes_values = prod_constant

    # ngrpah op
    const_3d = ng.broadcast(ng.constant(2., axes=[]), axes=axes_values)
    prod = ng.prod(const_3d, reduction_axes=ng_axis)

    # numpy results
    np_const_3d = np.ones((2, 3, 4)) * 2.

    res_np = np.prod(np_const_3d, axis=np_axis)

    # define comp
    with ExecutorFactory() as ex:
        comps = ex.executor(prod)
        res_ng = comps()

    np.testing.assert_allclose(res_np, res_ng)
Esempio n. 23
0
def cast_axes_for_binary_broadcast(onnx_node, ng_inputs):
    # type: (NodeWrapper, List[TensorOp]) -> Tuple[TensorOp, TensorOp]
    """
    Cast axes of the right operand to make ops compatible for an element-wise binary operation.

    Casting is based on `broadcast` and `axis` attributes of an ONNX node.

    :param onnx_node: wrapped ONNX node
    :param ng_inputs: left and right operand
    :return: left and right operand after broadcasting
    """
    left = ng_inputs[0]
    right = ng_inputs[1]

    dimensions_identical = left.axes.lengths == right.axes.lengths
    if dimensions_identical:
        return left, right

    broadcast = onnx_node.get_attribute_value('broadcast', 0)
    if not broadcast:
        logger.warning(
            '%s node (%s): operands have different dimensions, and "broadcast"'
            ' attribute is not set. ', onnx_node.op_type, onnx_node.name)
        return left, right

    start_axis = onnx_node.get_attribute_value(
        'axis')  # start of mutually equal shape
    if start_axis is not None:
        # Rename axes in the right operand to match corresponding names in the left operand
        renamed_axes = [
            ng.make_axis(length=axis.length,
                         name='POS_' +
                         str(len(left.axes) - 1 - start_axis - i))
            for i, axis in enumerate(right.axes)
        ]
        right = ng.cast_axes(right, ng.make_axes(axes=renamed_axes))

    right = ng.broadcast(right, axes=left.axes)
    return left, right
Esempio n. 24
0
def test_broadcast():

    element_type = Type.f32
    A = Parameter(element_type, Shape([3]))
    parameter_list = [A]
    function = Function([ng.broadcast(A, [3, 3])], parameter_list, "test")
    backend = Backend.create(test.BACKEND_NAME)

    a = backend.create_tensor(element_type, Shape([3]))
    result = backend.create_tensor(element_type, Shape([3, 3]))

    a.write(util.numpy_to_c(np.array([1, 2, 3], dtype=np.float32)), 12)

    result_arr = np.zeros((3, 3), dtype=np.float32)
    result.write(util.numpy_to_c(result_arr), 36)
    handle = backend.compile(function)
    handle.call([result], [a])
    result.read(util.numpy_to_c(result_arr), 36)

    a_arr = np.array([[0], [0], [0]], dtype=np.float32)
    b_arr = np.array([[1, 2, 3]], dtype=np.float32)
    result_arr_ref = np.add(a_arr, b_arr)

    assert np.allclose(result_arr, result_arr_ref)
Esempio n. 25
0
    def Pool(self, c2_op, inputs):
        """
        Performs max or average pooling on the input.

        Arguments:
            c2_op: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the c2_op node.

        Inputs to c2_op:
            input
        """
        supported_pooling = {'MaxPool': 'max', 'AveragePool': 'avg'}

        image = inputs[0]

        # TODO: we assume NCHW, make some assert here?

        # set input axes shape
        ax_N = ng.make_axis(name='N')
        ax_C = ng.make_axis()
        ax_D = ng.make_axis(length=1)
        ax_H = ng.make_axis()
        ax_W = ng.make_axis()
        ng.make_axes([ax_N, ax_C, ax_H, ax_W]).set_shape(image.axes.lengths)

        # create placeholders for output axes
        oC = ng.make_axis(name='C')
        oD = ng.make_axis(length=1, name='D')
        oH = ng.make_axis(name='H')
        oW = ng.make_axis(name='W')

        # spatial kernel size
        kernel_size = [int(val.i) for val in c2_op.arg if val.name == "kernel"]
        if len(kernel_size) != 1:
            raise ValueError("Kernel size must be scalar value")
        # kernel is square
        kernel_h = kernel_w = kernel_size[0]
        kernel_d = kernel_c = 1

        # strides params
        stride_size = [int(val.i) for val in c2_op.arg if val.name == "stride"]
        if len(stride_size) != 1:
            raise ValueError("Stride size must be scalar value")
        stride_h = stride_w = stride_size[0]

        # padding params
        pad_t, pad_b, pad_l, pad_r = \
            _c2_padding(c2_op,
                        in_NHWC=[ax_N.length, ax_H.length, ax_W.length, ax_C.length],
                        kernel_HWIO=[kernel_h, kernel_w, ax_C.length, ax_C.length],
                        stride_NHWC=[1, stride_h, stride_w, 1])
        if pad_t != pad_b or pad_l != pad_r:
            raise NotImplementedError("Requires symmetric padding in ngraph:"
                                      "pad_t(%s) == pad_b(%s) and"
                                      "pad_l(%s) == pad_r(%s)" %
                                      (pad_t, pad_b, pad_l, pad_r))

        # pooling params
        params = dict(op=supported_pooling[c2_op.type],
                      pad_d=0,
                      pad_h=pad_t,
                      pad_w=pad_l,
                      pad_c=0,
                      str_d=1,
                      str_h=stride_h,
                      str_w=stride_w,
                      str_c=1,
                      J=kernel_c,
                      T=kernel_d,
                      R=kernel_h,
                      S=kernel_w)

        # i, o axes
        oC.length = output_dim(ax_C.length, kernel_c, params['pad_c'],
                               params['str_c'])
        oD.length = output_dim(ax_D.length, kernel_d, params['pad_d'],
                               params['str_d'])
        oH.length = output_dim(ax_H.length, kernel_h, params['pad_h'],
                               params['str_h'])
        oW.length = output_dim(ax_W.length, kernel_w, params['pad_w'],
                               params['str_w'])
        ax_i = ng.make_axes([ax_C, ax_D, ax_H, ax_W, ax_N])
        ax_o = ng.make_axes([oC, oD, oH, oW, ax_N])

        # broadcast input / filter axes
        image = ng.cast_axes(image, ng.make_axes([ax_N, ax_C, ax_H, ax_W]))
        image = ng.expand_dims(image, ax_D, 1)  # NCHW -> NDCHW
        image = ng.axes_with_order(image, axes=ax_i)  # NDCHW -> CDHWN

        # pooling
        output = ng.pooling(params, image, axes=ax_o)

        # cast back to NDCHW
        output = ng.broadcast(output, ng.make_axes([ax_N, oD, oC, oH, oW]))

        # slice away the oD
        out_slicing = [slice(None), 0, slice(None), slice(None), slice(None)]
        output = ng.tensor_slice(output, out_slicing)

        return output
Esempio n. 26
0
    def Conv(self, c2_op, inputs):
        """
        Computes a 2-D convolution given 4D input and filter tensors.

        Arguments:
            c2_op: NodeDef object, the caffe2 node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the caffe2 node.

        Inputs to c2_op:
            input, wegiths, filter

        Supports caffe2's layout NHWC and NCHW as well.
        """
        X, W, bias = inputs

        order = [val.s for val in c2_op.arg if val.name == "order"]
        if 1 != len(order):
            raise ValueError("Multiple order values in convolution")
        order = order[0]

        if order not in ("NHWC", "NCHW"):
            raise NotImplementedError("Unsupported order in convolution: {}",
                                      order)

        # set input axes shape
        ax_N = ng.make_axis(name='N')
        ax_C = ng.make_axis()
        ax_D = ng.make_axis(length=1)
        ax_H = ng.make_axis()
        ax_W = ng.make_axis()

        # set kernel axes shape
        ax_kernel_D = ng.make_axis(length=1)
        ax_kernel_H = ng.make_axis()
        ax_kernel_W = ng.make_axis()
        ax_kernel_ofm = ng.make_axis()

        # create placeholders for output axes
        oC = ng.make_axis(name='C')
        oD = ng.make_axis(name='D', length=1)
        oH = ng.make_axis(name='H')
        oW = ng.make_axis(name='W')

        axes_order = {
            'NCHW': {
                'X': [ax_N, ax_C, ax_H, ax_W],
                'W': [ax_kernel_ofm, ax_C, ax_kernel_H, ax_kernel_W]
            },
            'NHWC': {
                'X': [ax_N, ax_H, ax_W, ax_C],
                'W': [ax_kernel_ofm, ax_kernel_H, ax_kernel_W, ax_C]
            },
        }

        ng.make_axes(axes_order[order]['X']).set_shape(X.axes.lengths)
        ng.make_axes(axes_order[order]['W']).set_shape(W.axes.lengths)

        if 1 != len(bias.axes):
            raise ValueError("Bias's must be 1D.")
        if ax_kernel_ofm.length != bias.axes.lengths[0]:
            raise ValueError(
                "Bias's length must equal to number of output feature maps.")

        # strides params
        stride_size = [int(val.i) for val in c2_op.arg if val.name == "stride"]
        if len(stride_size) != 1:
            raise ValueError("Stride size must be scalar value")
        str_h = str_w = stride_size[0]

        # padding params
        pad_t, pad_b, pad_l, pad_r = \
            _c2_padding(c2_op,
                        in_NHWC=[ax_N.length, ax_H.length, ax_W.length, ax_C.length],
                        kernel_HWIO=[ax_kernel_H.length, ax_kernel_W.length,
                                     ax_C.length, ax_kernel_ofm.length],
                        stride_NHWC=[1, str_h, str_w, 1])

        if pad_t != pad_b or pad_l != pad_r:
            raise NotImplementedError("Requires symmetric padding in ngraph:"
                                      "pad_t(%s) == pad_b(%s) and"
                                      "pad_l(%s) == pad_r(%s)" %
                                      (pad_t, pad_b, pad_l, pad_r))

        # conv params
        params = dict(pad_d=0,
                      pad_h=pad_t,
                      pad_w=pad_l,
                      str_d=1,
                      str_h=str_h,
                      str_w=str_w,
                      dil_d=1,
                      dil_h=1,
                      dil_w=1)

        # input, weight, output axes
        internal_ax_dict = {
            'X':
            ng.make_axes([ax_C, ax_D, ax_H, ax_W, ax_N]),
            'W':
            ng.make_axes(
                [ax_C, ax_kernel_D, ax_kernel_H, ax_kernel_W, ax_kernel_ofm])
        }

        oC.length = ax_kernel_ofm.length
        oH.length = output_dim(ax_H.length, ax_kernel_H.length,
                               params['pad_h'], params['str_h'])
        oW.length = output_dim(ax_W.length, ax_kernel_W.length,
                               params['pad_w'], params['str_w'])
        internal_ax_dict['Y'] = ng.make_axes([oC, oD, oH, oW, ax_N])

        # broadcast input / filter axes
        # flow for NHWC order:                   |  flow for NCHW order:
        # input:                                 |  input:
        #   expand dims: NHWC -> NDHWC           |    expand dims: NCHW -> NDCHW
        #   reorder:     NDHWC -> CDHWN          |    reorder:     NDCHW -> CDHWN
        # weights:                               |  weights:
        #   expand dims: (ofm)HWC -> D(ofm)HWC   |    expand dims: (ofm)CHWC -> D(ofm)CHW
        #   reorder:     D(ofm)HWC -> CDHW(ofm)  |    reorder:     D(ofm)CHW -> CDHW(ofm)

        X = ng.cast_axes(X, ng.make_axes(axes_order[order]['X']))
        X = ng.expand_dims(X, ax_D, 1)
        X = ng.axes_with_order(X, axes=internal_ax_dict['X'])
        W = ng.cast_axes(W, ng.make_axes(axes_order[order]['W']))
        W = ng.expand_dims(W, ax_kernel_D, 0)
        W = ng.axes_with_order(W, axes=internal_ax_dict['W'])

        # convolution
        Y = ng.convolution(params, X, W, axes=internal_ax_dict['Y'])

        # cast back to proper format
        Y = ng.broadcast(Y, ng.make_axes([ax_N, oD, oH, oW, oC])) if "NHWC" == order \
            else ng.broadcast(Y, ng.make_axes([ax_N, oD, oC, oH, oW]))  # NCHW

        # slice away the oD
        out_slicing = [slice(None), 0, slice(None), slice(None), slice(None)]
        Y = ng.tensor_slice(Y, out_slicing)

        def _conv_bias_add(c2_op, inputs):
            X, bias = inputs
            bias = ng.cast_axes(bias,
                                axes=ng.make_axes(
                                    [X.axes[1 if 'NCHW' == order else 3]]))
            Y = ng.Add(X, bias)
            return Y

        return _conv_bias_add(c2_op, [Y, bias])
Esempio n. 27
0
    def Conv2D(self, tf_node, inputs):
        """
        Computes a 2-D convolution given 4D input and filter tensors.

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            input, filter

        TODO: assume default tensorflow layout NHWC, RSCK,
              need to support NCHW as well
              need to clean up / merge with maxpool

        Notes on output shape:
            https://www.tensorflow.org/api_docs/python/nn.html#convolution
        """
        image, weight = inputs

        # TODO: currently NHWC only
        assert tf_node.attr['data_format'].s.decode("ascii") == "NHWC"

        # set axes shape
        ax_N = ng.make_axis(batch=True)
        ax_C = ng.make_axis(roles=[ar.Channel])
        ax_D = ng.make_axis(roles=[ar.Depth])
        ax_H = ng.make_axis(roles=[ar.Height])
        ax_W = ng.make_axis(roles=[ar.Width])

        ax_T = ng.make_axis(roles=[ar.Depth])
        ax_R = ng.make_axis(roles=[ar.Height])
        ax_S = ng.make_axis(roles=[ar.Width])
        ax_K = ng.make_axis(roles=[ar.Channelout])

        ng.make_axes([ax_N, ax_H, ax_W, ax_C]).set_shape(image.axes.lengths)
        ng.make_axes([ax_R, ax_S, ax_C, ax_K]).set_shape(weight.axes.lengths)
        ax_D.length = 1
        ax_T.length = 1

        # strides params
        tf_strides = [int(s) for s in list(tf_node.attr['strides'].list.i)]
        if len(tf_strides) != 4:
            raise ValueError("Length of strides my be 4.")
        if tf_strides[0] != 1:
            raise NotImplementedError('Strides on batch axis (N) must be 1.')
        if tf_strides[3] != 1:
            raise NotImplementedError('Strides on channel axis (C) must be 1.')
        str_h, str_w = tf_strides[1], tf_strides[2]

        # padding params
        padding = tf_node.attr['padding'].s.decode("ascii")
        pad_t, pad_b, pad_l, pad_r = tf_conv2d_pool_padding(
            image.axes.lengths, weight.axes.lengths, tf_strides, padding)
        if pad_t != pad_b or pad_l != pad_r:
            raise NotImplementedError("Requires symmetric padding in ngraph:"
                                      "pad_t(%s) == pad_b(%s) and"
                                      "pad_l(%s) == pad_r(%s)" %
                                      (pad_t, pad_b, pad_l, pad_r))

        # conv params
        params = dict(pad_d=0,
                      pad_h=pad_t,
                      pad_w=pad_l,
                      str_d=1,
                      str_h=str_h,
                      str_w=str_w)

        # i, f, o axes
        ax_i = ng.make_axes([ax_C, ax_D, ax_H, ax_W, ax_N])
        ax_f = ng.make_axes([ax_C, ax_T, ax_R, ax_S, ax_K])
        ax_o = ng.make_axes([
            ng.make_axis(ax_K.length, name='C', roles=[ar.Channel]),
            spatial_axis(ax_i, ax_f, params['pad_d'], params['str_d'],
                         ar.Depth),
            spatial_axis(ax_i, ax_f, params['pad_h'], params['str_h'],
                         ar.Height),
            spatial_axis(ax_i, ax_f, params['pad_w'], params['str_w'],
                         ar.Width), ax_N
        ])

        # broadcast input / filter axes
        image = ng.cast_axes(image, ng.make_axes([ax_N, ax_H, ax_W, ax_C]))
        image = ng.expand_dims(image, ax_D, 1)  # NHWC -> NDHWC
        image = ng.axes_with_order(image, axes=ax_i)  # NDHWC -> CDHWN
        weight = ng.cast_axes(weight, ng.make_axes([ax_R, ax_S, ax_C, ax_K]))
        weight = ng.expand_dims(weight, ax_T, 0)  # RSCK -> TRSCK
        weight = ng.axes_with_order(weight, axes=ax_f)  # TRSCK -> CTRSK

        # convolution
        output = ng.convolution(params, image, weight, axes=ax_o)

        # cast back to NHWC
        oC, oD, oH, oW, oN = output.axes
        output = ng.broadcast(output, ng.make_axes([oN, oD, oH, oW, oC]))

        # slice away the oD
        out_slicing = [slice(None), 0, slice(None), slice(None), slice(None)]
        output = ng.Slice(output, out_slicing)

        return output
Esempio n. 28
0
    def MaxPool(self, tf_node, inputs):
        """
        Performs the max pooling on the input.

        Arguments:
            tf_node: NodeDef object, the tensorflow node tso convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            input

        TODO: assume default tensorflow layout NHWC, RSCK,
              need to support NCHW as well
              need to clean up / merge with conv2d

        Notes on output shape:
            https://www.tensorflow.org/api_docs/python/nn.html#convolution
        """
        image = inputs[0]

        # TODO: currently NHWC only
        assert tf_node.attr['data_format'].s.decode("ascii") == "NHWC"

        # set axes shape
        ax_N = ng.make_axis(batch=True)
        ax_C = ng.make_axis(roles=[ar.Channel])
        ax_D = ng.make_axis(roles=[ar.Depth])
        ax_H = ng.make_axis(roles=[ar.Height])
        ax_W = ng.make_axis(roles=[ar.Width])
        ng.make_axes([ax_N, ax_H, ax_W, ax_C]).set_shape(image.axes.lengths)
        ax_D.length = 1

        # ksize params
        tf_ksize = [int(s) for s in list(tf_node.attr['ksize'].list.i)]
        if len(tf_ksize) != 4:
            raise ValueError("Length of ksize my be 4.")
        if tf_ksize[0] != 1:
            raise NotImplementedError('Ksize on batch axis (N) must be 1.')
        if tf_ksize[3] != 1:
            raise NotImplementedError('Ksize on channel axis (C) must be 1.'
                                      'Cross map pooling to be implemented.')
        R, S = tf_ksize[1:3]
        T = J = 1

        # strides params
        tf_strides = [int(s) for s in list(tf_node.attr['strides'].list.i)]
        if len(tf_strides) != 4:
            raise ValueError("Length of strides my be 4.")
        if tf_strides[0] != 1:
            raise NotImplementedError('Strides on batch axis (N) must be 1.')
        if tf_strides[3] != 1:
            raise NotImplementedError('Strides on channel axis (C) must be 1.')
        str_h, str_w = tf_strides[1], tf_strides[2]

        # padding params
        padding = tf_node.attr['padding'].s.decode("ascii")
        pad_t, pad_b, pad_l, pad_r = tf_conv2d_pool_padding(
            image.axes.lengths, (R, S, ax_C.length, ax_C.length), tf_strides,
            padding)
        if pad_t != pad_b or pad_l != pad_r:
            raise NotImplementedError("Requires symmetric padding in ngraph:"
                                      "pad_t(%s) == pad_b(%s) and"
                                      "pad_l(%s) == pad_r(%s)" %
                                      (pad_t, pad_b, pad_l, pad_r))

        # pooling params
        params = dict(op='max',
                      pad_d=0,
                      pad_h=pad_t,
                      pad_w=pad_l,
                      pad_c=0,
                      str_d=1,
                      str_h=str_h,
                      str_w=str_w,
                      str_c=1,
                      J=J,
                      T=T,
                      R=R,
                      S=S)

        # i, f, o axes
        ax_i = ng.make_axes([ax_C, ax_D, ax_H, ax_W, ax_N])
        ax_o = ng.make_axes([
            spatial_axis(ax_i, J, params['pad_c'], params['str_c'],
                         ar.Channel),
            spatial_axis(ax_i, T, params['pad_d'], params['str_d'], ar.Depth),
            spatial_axis(ax_i, R, params['pad_h'], params['str_h'], ar.Height),
            spatial_axis(ax_i, S, params['pad_w'], params['str_w'], ar.Width),
            ax_N
        ])

        # broadcast input / filter axes
        image = ng.cast_axes(image, ng.make_axes([ax_N, ax_H, ax_W, ax_C]))
        image = ng.expand_dims(image, ax_D, 1)  # NHWC -> NDHWC
        image = ng.axes_with_order(image, axes=ax_i)  # NDHWC -> CDHWN

        # pooling
        output = ng.pooling(params, image, axes=ax_o)

        # cast back to NHWC
        oC, oD, oH, oW, oN = output.axes
        output = ng.broadcast(output, ng.make_axes([oN, oD, oH, oW, oC]))

        # slice away the oD
        out_slicing = [slice(None), 0, slice(None), slice(None), slice(None)]
        output = ng.Slice(output, out_slicing)

        return output
Esempio n. 29
0
def PRelu(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    x, slope = ng_inputs
    x = ng.broadcast(x, x.axes + slope.axes)
    slope = ng.broadcast(slope, axes=x.axes)
    return ng.maximum(slope * x, x)
Esempio n. 30
0
    def _element_wise_binary(self, ng_op, inputs, name=None):
        """
        Element-wise binary operation with broadcast.
        Args:
            ng_op: ngraph Op to be applied.
            inputs: List of ngraph Ops as inputs to this node.
            name: name of the ngraph op
        Returns:
            A ngraph Op corresponding to the element-wise binary op
        """
        # get inputs
        left, right = inputs

        # check if shape compatibility
        left_shape = left.axes.lengths
        right_shape = right.axes.lengths
        assert is_compatible_numpy_shape(left_shape, right_shape)

        if left_shape and right_shape and left_shape != right_shape:
            """
            Cast axes in numpy broadcast mapping rule
            1. introduce dummy length 1 axes to match left / right length
            2. keep maps for matching left / right / result axes
            3. slice left / right to remove length 1 axes if not both of them
               are length 1
            4. cast right to left by matching axes
            5. perform binary op
            6. cast and broadcast result
            """

            left_dim = len(left.axes)
            right_dim = len(right.axes)

            # pad left and right axis to be the same length, align right
            result_dim = max(left_dim, right_dim)
            left_axes_pad = [
                ng.make_axis(length=1) for _ in range(result_dim - left_dim)
            ] + list(left.axes)
            right_axes_pad = [
                ng.make_axis(length=1) for _ in range(result_dim - right_dim)
            ] + list(right.axes)
            result_axes = [
                ng.make_axis(length=max(l.length, r.length))
                for l, r in zip(left_axes_pad, right_axes_pad)
            ]

            # broadcast left / right, introducing dummy length 1 axes
            left = ng.broadcast(left, left_axes_pad)
            right = ng.broadcast(right, right_axes_pad)

            # make two-way map of lr matching axes and map for result axes
            lr_axes_map = dict()
            result_axes_map = dict()
            for l, r, re in zip(left.axes, right.axes, result_axes):
                lr_axes_map[l] = r
                lr_axes_map[r] = l
                result_axes_map[l] = re
                result_axes_map[r] = re

            # get left / right slice
            left_slice = []
            right_slice = []
            for l, r in zip(left.axes, right.axes):
                if l.length == 1 and r.length != 1:
                    left_slice.append(0)
                else:
                    left_slice.append(slice(None))
                if r.length == 1 and l.length != 1:
                    right_slice.append(0)
                else:
                    right_slice.append(slice(None))

            # perform slicing
            left_sliced = ng.tensor_slice(left, left_slice)
            right_sliced = ng.tensor_slice(right, right_slice)

            # now cast the right_sliced to left_sliced from the axis map
            right_casted_axes = []
            for r in right_sliced.axes:
                if r in lr_axes_map and lr_axes_map[r] in left_sliced.axes:
                    right_casted_axes.append(lr_axes_map[r])
                else:
                    right_casted_axes.append(r)
            right_sliced_casted = ng.cast_axes(right_sliced, right_casted_axes)

            # perform binary op
            result_op = ng_op(left_sliced, right_sliced_casted)

            # cast result axis and broadcast to full result axes
            trimmed_result_axes = [
                result_axes_map[re] for re in result_op.axes
            ]
            result_op = ng.cast_axes(result_op, trimmed_result_axes)
            result_op = ng.axes_with_order(result_op, axes=result_axes)

        elif left_shape == right_shape:
            # cast right axes to be the same as left
            right = ng.cast_axes(right, left.axes)
            result_op = ng_op(left, right).named(name)

        else:
            # no need for casting
            result_op = ng_op(left, right).named(name)

        # return op
        return result_op