Example #1
0
def as_elementwise_compatible_nodes(*input_values):  # type: (*NodeInput) -> List[Node]
    """Return all input values as ngraph Nodes with the same shape and element type.

    Scalar values will be converted to ngraph Constant Nodes.
    """
    input_nodes = [node for node in input_values
                   if issubclass(type(node), Node)]  # type: List[Node]

    if not input_nodes:
        raise NotImplementedError('Operations on scalars only are not supported.')

    shapes = {tuple(node.shape) for node in input_nodes}
    if len(shapes) > 1:
        log.warning('More than one different shape in input nodes %s.', input_nodes)

    types = [node.get_element_type() for node in input_nodes]
    unique_types = {repr(type) for type in types}
    if len(unique_types) > 1:
        log.warning('More than one different data type in input nodes %s.', input_nodes)

    sorted_shapes = sorted(shapes, key=len)
    broadcast_shape = sorted_shapes.pop()
    broadcast_dtype = get_dtype(types.pop())

    output_nodes = []
    for input_value in input_values:
        if issubclass(type(input_value), Node):
            input_value = ng.broadcast_to(input_value, broadcast_shape)
            output_nodes.append(input_value)
        else:
            input_value = make_constant_node(input_value, dtype=broadcast_dtype)
            output_nodes.append(ng.broadcast_to(input_value, broadcast_shape))

    return output_nodes
Example #2
0
def PRelu(onnx_node,
          ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply the Parametric Relu function to the input tensor elementwise.

    f(x) = slope * x for x < 0, f(x) = x for x >= 0
    The slope parameter is passed to the node as its second input.
    """
    x, slope = ng_inputs
    if len(slope.shape) == 0:
        return ng.maximum(slope * x, x)
    elif slope.shape[0] == 1:
        slope = ng.broadcast_to(slope, [x.shape[0], 1])
        slope = ng.reshape(slope, [x.shape[0]])
        return ng.maximum(ng.broadcast_to(slope, x.shape, 0) * x, x)
    else:
        return ng.maximum(ng.broadcast_to(slope, x.shape, 1) * x, x)
Example #3
0
def broadcast_for_binary_operation(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """
    Cast shape of the right operand to make ops compatible for an element-wise binary operation.

    Casting is based on `broadcast` and `axis` attributes of an ONNX node.

    :param onnx_node: wrapped ONNX node
    :param ng_inputs: left and right operand
    :return: left and right operand after broadcasting
    """
    left = ng_inputs[0]
    right = ng_inputs[1]

    dimensions_identical = list(left.shape) == list(right.shape)
    if dimensions_identical:
        return left, right

    broadcast = onnx_node.get_attribute_value('broadcast', 0)
    if not broadcast:
        logger.warning(
            '%s node (%s): operands have different dimensions, and "broadcast"'
            ' attribute is not set. ', onnx_node.op_type, onnx_node.name)
        return left, right

    start_axis = onnx_node.get_attribute_value(
        'axis')  # start of mutually equal shape
    right = ng.broadcast_to(right, left.shape, start_axis)
    return left, right
Example #4
0
def ConvTranspose(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Calculate convolution transpose."""
    if len(ng_inputs) == 3:
        data, weights, bias = ng_inputs
    elif len(ng_inputs) == 2:
        data, weights = ng_inputs
        bias = ng.constant(0, dtype=get_dtype(data.get_element_type()))

    strides = get_strides(onnx_node)
    dilation = get_dilations(onnx_node)
    padding_below, padding_above = get_pads(onnx_node)

    output_padding = onnx_node.get_attribute_value('output_padding')
    if output_padding is None:
        raise ValueError(
            'ConvTranspose node (s%): output_padding attribute is required.',
            onnx_node.name)

    data_shape = list(data.shape)
    weights_shape = list(weights.shape)

    num_spatial_dims = len(data.shape) - 2
    data_dilation_strides = [1, 1]

    data_batch_shape = [1] * (num_spatial_dims + 2)
    data_batch_shape[0] = data_shape[0]
    data_batch_shape[1] = weights_shape[1]

    for i in range(num_spatial_dims):
        # Calculating spatial dims of data output shape for ngraph conv backprop op
        # | pb + s(ds-1) + op - d(ws-1)+1 |
        # | ----------------------------- | + 1
        # |_            dds              _|
        #
        # d   - dilation
        # ds  - data shape
        # dds - data dilation strides
        # op  - putput padding
        # pb  - padding below
        # s   - strides
        # ws  - weights shape
        data_batch_shape[i + 2] = (
            (padding_below[i] +
             ((data_shape[i + 2] - 1) * strides[i] + 1) + output_padding[i]) -
            ((weights_shape[i + 2] - 1) * dilation[i] + 1) +
            1) // data_dilation_strides[i] + 1

    transconv = ng.convolution_backprop_data(data_batch_shape, weights, data,
                                             strides, dilation, padding_below,
                                             padding_above,
                                             data_dilation_strides)
    if len(bias.shape) > 0:
        return transconv + ng.broadcast_to(bias, transconv.shape, 1)
    else:
        return transconv
Example #5
0
def ReduceMean(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Compute the mean value of the input tensor's elements along the provided axes."""
    input_shape = list(ng_inputs[0].shape)
    sum_node = make_reduction_op(ng.sum, onnx_node, ng_inputs[0])
    reduction_axes = get_reduction_axes(onnx_node, ng_inputs[0])
    avg_elem_count = np.prod([input_shape[x] for x in reduction_axes])
    const_node = ng.broadcast_to(
        ng.constant(avg_elem_count, get_dtype(sum_node.get_element_type())),
        sum_node.shape)
    return ng.divide(sum_node, const_node)
Example #6
0
def make_convolution_op(onnx_node, ng_inputs):
    # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """
    Create an ngraph convolution Op based on an ONNX node.

    :param onnx_node: wrapped ONNX node for Conv of ConvTranspose op
    :param ng_inputs: ngraph TensorOp input tensors
    :return: ngraph Op for convolution or deconvolution
    """
    if len(ng_inputs) == 3:
        data, weights, bias = ng_inputs
    elif len(ng_inputs) == 2:
        data, weights = ng_inputs
        bias = ng.constant(0, dtype=get_dtype(data.get_element_type()))
    else:
        raise ValueError(
            'Conv node (%s): unexpected number of input values: %d.',
            onnx_node.name, len(ng_inputs))

    groups = onnx_node.get_attribute_value('group', 1)

    strides = get_strides(onnx_node)
    dilation = get_dilations(onnx_node)
    padding_below, padding_above = get_pads(onnx_node)
    if groups != 1:
        # Split one convolution op to N ops where N is the number of groups and concat results after computation.
        # reference: https://github.com/NervanaSystems/ngraph-mxnet/blob/fdd692/src/ngraph/ngraph_emitter.cc#L822-L856
        data_shape = list(data.shape)
        weights_shape = list(weights.shape)
        convolutions_nodes = []

        # initial bounds for splice
        data_lower_part = len(data_shape) * [0]
        data_upper_part = copy(data_shape)

        weights_lower_part = len(weights_shape) * [0]
        weights_upper_part = copy(weights_shape)

        for group in range(groups):
            # update bounds for splice
            data_lower_part[1] = group * int((data_shape[1] / groups))
            data_upper_part[1] = (group + 1) * int((data_shape[1] / groups))

            sliced_data = ng.slice(data, data_lower_part, data_upper_part)

            # update bounds for splice
            weights_lower_part[0] = group * int((weights_shape[0] / groups))
            weights_upper_part[0] = max((group + 1) * int(
                (weights_shape[0] / groups)), 1)

            sliced_weights = ng.slice(weights, weights_lower_part,
                                      weights_upper_part)
            convolutions_nodes.append(
                ng.convolution(sliced_data, sliced_weights, strides, dilation,
                               padding_below, padding_above))
        conv = ng.concat(convolutions_nodes, axis=1)
    else:
        conv = ng.convolution(data, weights, strides, dilation, padding_below,
                              padding_above)
    if len(bias.shape) > 0:
        return conv + ng.broadcast_to(bias, conv.shape, 1)
    else:
        return conv