def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Exemple #2
0
def generate_nodes(data, axis=-1, depth=4, on_value=1., off_value=0.):
    return {
        'indices': {'Op': 'Parameter', 'value': data, 'shape': int64_array(data.shape)},
        'indices_d': {'kind': 'data', 'value': data, 'shape': int64_array(data.shape)},
        **valued_const_with_data('depth', int64_array(depth)),
        **valued_const_with_data('on_value', float_array(on_value)),
        **valued_const_with_data('off_value', float_array(off_value)),
        **regular_op_with_shaped_data('one_hot', None, {'type': 'OneHot', 'axis': axis, 'Op': 'OneHot'})
    }
Exemple #3
0
    def test_leaky_relu_not_applicable_non_scalar_const(self):
        # const value is not a scalar or 1D tensor with 1 element so the transformation is not applicable
        graph = build_graph_with_edge_attrs(nodes, edges, {})
        Node(graph, 'const')['value'] = float_array([0.5, 0.7])
        Node(graph, 'const_d')['value'] = float_array([0.5, 0.7])
        graph_ref = graph.copy()

        LeakyReLUFusion().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
    def test_hsigmoid_with_clamp_wrong_constant(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'const_0': {
                'value': float_array([0.00001])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithClamp().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Exemple #5
0
    def test_leaky_relu_mul_multiple_consumers(self):
        # multiple consumers of Mul operation
        graph = build_graph_with_edge_attrs(nodes, edges, {})
        additional_result = Result(graph, {'name': 'result_2'}).create_node()
        Node(graph, 'mul').out_port(0).connect(additional_result.in_port(0))

        ref_nodes = {
            **regular_op_with_shaped_data('input', shape, {
                'type': 'Parameter',
                'op': 'Parameter'
            }),
            **regular_op_with_shaped_data('mul', shape, {
                'type': 'Multiply',
                'name': 'mul'
            }),
            **regular_op_with_shaped_data('max', shape, {
                'type': 'Maximum',
                'name': 'final_max'
            }),
            **valued_const_with_data('const', float_array([0.5])),
            **regular_op_with_shaped_data('leaky_relu', shape, {
                'type': 'LeakyReLU',
                'name': 'max_final',
                'negative_slope': None
            }),
            **result('result'),
            **result('result_2')
        }
        ref_edges = [
            *connect('input:0', '0:mul'), *connect('const', '1:mul'),
            *connect('max:0', 'result'), *connect('mul:0', 'result_2'),
            *connect_data('input', 'leaky_relu'),
            *connect('leaky_relu', 'result')
        ]
        graph_ref = build_graph_with_edge_attrs(ref_nodes, ref_edges)

        LeakyReLUFusion().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result_2')
        self.assertTrue(flag, resp)
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='ThresholdedRelu'):
            name = node.soft_get('name', node.id)

            greater = create_op_with_const_inputs(
                graph, Greater, {1: float_array([node.alpha])})
            greater.in_port(0).connect(node.in_port(0).get_source())
            float_greater = Cast(
                graph, {
                    'dst_type':
                    data_type_str_to_np(graph.graph['cmd_params'].data_type)
                }).create_node()
            greater.out_port(0).connect(float_greater.in_port(0))

            mul = Mul(graph, {}).create_node()
            node.out_port(0).get_connection().set_source(mul.out_port(0))
            mul.in_port(0).connect(node.in_port(0).get_source())
            mul.in_port(1).connect(float_greater.out_port(0))

            rename_nodes([(node, name + '/TBR'), (mul, name)])
            graph.remove_node(node.id)
class HSigmoidWithReluMulTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('relu', {'op': 'ReLU'}),
        **regular_op('min', {'op': 'Minimum'}),
        **regular_op('mul', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **const('add_const', float_array([3.0])),
        **const('min_const', float_array([6.0])),
        **const('mul_const', float_array([1.0 / 6.0])),
        **result('result'),
    }

    edges = [('input', 'add', {
        'in': 0,
        'out': 0
    }), ('add_const', 'add', {
        'in': 1,
        'out': 0
    }), ('add', 'relu', {
        'in': 0,
        'out': 0
    }), ('relu', 'min', {
        'in': 0,
        'out': 0
    }), ('min_const', 'min', {
        'in': 1,
        'out': 0
    }), ('min', 'mul', {
        'in': 0,
        'out': 0
    }), ('mul_const', 'mul', {
        'in': 1,
        'out': 0
    }), ('mul', 'result', {
        'in': 0,
        'out': 0
    })]

    def test_hsigmoid_with_relu_mul(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid')
        self.assertTrue(
            graph.get_op_nodes(
                name='final_mul')[0].out_nodes()[0].node == 'result')

    def test_hsigmoid_with_relu_mul_wrong_constant(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'add_const': {
                'value': float_array([0.00001])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Exemple #8
0
    'placeholder': {
        'shape': None,
        'type': 'Parameter',
        'kind': 'op',
        'op': 'Parameter'
    },
    'tfpad': {
        'type': None,
        'kind': 'op',
        'op': 'TFPad',
        'mode': 'constant',
        'name': 'tfpad_name'
    },
    **const('paddings',
            int64_array([1, 2, 3, 4, 5, 6]).reshape([3, 2])),
    **const('fill', float_array(5.75)),
    'result': {
        'type': 'Result',
        'value': None,
        'kind': 'op',
        'op': 'Result'
    },

    # new Pad layer and sub-graph
    'pad': {
        'type': 'Pad',
        'kind': 'op',
        'op': 'Pad',
        'mode': 'constant'
    },
    'transpose': {
Exemple #9
0
        'type': 'Greater',
        'kind': 'op',
        'op': 'Greater'
    },
    'mul': {
        'type': 'Multiply',
        'kind': 'op',
        'op': 'Mul',
        'name': 'my_trelu'
    },
    'squeeze2': {
        'type': 'Squeeze',
        'kind': 'op',
        'op': 'Squeeze'
    },
    **const('alpha', float_array([0.75])),
}


class ThresholdedReluDecompositionTest(unittest.TestCase):
    def test_trelu(self):
        graph = build_graph(nodes_attributes, [
            ('parameter', 'trelu', {
                'in': 0,
                'out': 0
            }),
            ('trelu', 'result', {
                'in': 0,
                'out': 0
            }),
        ],
Exemple #10
0
def replace_resize(graph: Graph, resize: Node):
    log.debug("Converting of ONNX Resize-10 to Interpolate-4 "
              "is triggered for node {}.".format(
                  resize.soft_get('name', resize.id)))

    resize_name = resize.soft_get('name', resize.id)

    rank_node = Rank(graph, {'name': resize_name + '/max_axes'}).create_node()
    range_node = create_op_with_const_inputs(graph, Range, {
        0: int64_array(2),
        2: int64_array(1)
    }, {'name': resize_name + '/axes'})

    sizes_ss = create_op_with_const_inputs(graph, StridedSlice, {
        1: int64_array([2]),
        2: int64_array([0]),
        3: int64_array([1])
    }, {
        'name': resize_name + '/sizes_ss',
        'begin_mask': int64_array([1]),
        'end_mask': int64_array([0]),
        'new_axis_mask': int64_array([0]),
        'shrink_axis_mask': int64_array([0]),
        'ellipsis_mask': int64_array([0])
    })
    scales_ss = create_op_with_const_inputs(
        graph, StridedSlice, {
            1: int64_array([2]),
            2: int64_array([0]),
            3: int64_array([1])
        }, {
            'name': resize_name + '/scales_ss',
            'begin_mask': int64_array([1]),
            'end_mask': int64_array([0]),
            'new_axis_mask': int64_array([0]),
            'shrink_axis_mask': int64_array([0]),
            'ellipsis_mask': int64_array([0])
        })

    rank_node.out_port(0).connect(range_node.in_port(1))

    interpolate_node = Interpolate(
        graph, {
            'version': 'opset4',
            'mode': 'linear_onnx' if resize.mode == 'linear' else 'nearest',
            'coordinate_transformation_mode': 'asymmetric',
            'cube_coeff': -0.75,
            'nearest_mode': 'simple',
            'pads_begin': int64_array([0]),
            'pads_end': int64_array([0]),
            'antialias': 0,
            'shape_calculation_mode': 'scales',
            'in_ports_count': 4
        }).create_node()

    range_node.out_port(0).connect(interpolate_node.in_port(3))
    shape_of = Shape(graph, {'name': resize_name + '/ShapeOf'}).create_node()

    # When we calculate 'sizes' input as floor(input_shape * scales), we can get incorrect 'sizes' if, e.g.,
    # scales = [1.0, 1.0, 1.33333, 2.0], input_shape = [1, 3, 30, 200], because
    # input_shape * scales = [1, 3, 39.9999, 400], and floor(input_shape * scales)[2] == 39, not 40.
    # Maybe we need to calculate 'sizes' input as floor(input_shape * scales + eps), where eps is some small
    # floating point number, e.g. 1.0e-5. But, in this case, if scales = [1.0, 1.0, 1.333333, 2.0],
    # input_shape = [1, 3, 30, 200], floor(input_shape * scales + eps) = 39, not 40, because
    # input_shape[2] * scales[2] + 1.0e-5 =  39.99991.
    # Hence, we need to calculate 'sizes' as floor(input_shape * (scales + eps)).
    add_node = create_op_with_const_inputs(graph, Add,
                                           {1: float_array([1.0e-5])},
                                           {'name': resize_name + '/Add'})

    dst_dtype = np.float32  # even if data_type=FP16 use float32 for shape values

    cast_shape_to_float = Cast(graph, {'dst_type': dst_dtype}).create_node()

    shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
    mul_node = Mul(graph, {
        'name': resize_name + '/Mul'
    }).create_node([cast_shape_to_float, add_node])
    floor_node = Floor(graph, {
        'name': resize_name + '/Floor'
    }).create_node([mul_node])
    cast_mul_result_to_int = Cast(graph, {
        'dst_type': np.int64
    }).create_node([floor_node])
    cast_mul_result_to_int.out_port(0).connect(sizes_ss.in_port(0))
    sizes_ss.out_port(0).connect(interpolate_node.in_port(1))

    scales_ss.out_port(0).connect(interpolate_node.in_port(2))

    connection_of_resize_input = resize.in_port(0).get_connection()
    connection_of_resize_input.set_destination(interpolate_node.in_port(0))

    connection_of_scales = resize.in_port(1).get_connection()
    connection_of_scales.set_destination(scales_ss.in_port(0))

    connection_of_resize_input.get_source().connect(shape_of.in_port(0))
    connection_of_resize_input.get_source().connect(rank_node.in_port(0))
    connection_of_scales.get_source().connect(add_node.in_port(0))

    rename_nodes([(resize, resize_name + '/delete'),
                  (interpolate_node, resize_name)])
    resize.out_port(0).get_connection().set_source(
        interpolate_node.out_port(0))
Exemple #11
0
def parse_specifier(string, graph, layer_node_map):
    pos = string.find(b'(')
    if pos == -1:
        # node name
        input_name = str(string.split(b' ')[0]).strip('b').replace(
            "\'", '').replace('\\n', '')

        if input_name not in layer_node_map:
            node_name = graph.unique_id(prefix=input_name)
            graph.add_node(node_name, parameters=[], op="", kind='op')
            layer_node_map[input_name] = node_name
        else:
            node_name = layer_node_map[input_name]
        return node_name

    spec = string[:pos]
    args = get_args_for_specifier(string[pos:])
    if spec == b'Append':
        nodes = []
        for i in range(len(args)):
            nodes.append(parse_specifier(args[i], graph, layer_node_map))
        layer_name = 'Append_'
        for node in nodes:
            layer_name = layer_name + node + "_"

        if layer_name not in layer_node_map:
            concat_name = graph.unique_id(prefix=layer_name)
            graph.add_node(concat_name,
                           parameters=None,
                           op='concat',
                           kind='op')
            layer_node_map[layer_name] = concat_name
            i = 0
            Node(graph,
                 concat_name).add_sequence_of_ports('in', range(len(nodes)))
            for node in nodes:
                out_port = len(Node(graph, node).out_nodes())
                Node(graph, node).add_output_port(out_port)
                graph.create_edge(
                    Node(graph, node), Node(graph, concat_name), out_port, i,
                    create_edge_attrs(node, concat_name, node, i, out_port))
                i = i + 1
        else:
            concat_name = layer_node_map[layer_name]
        return concat_name
    elif spec == b'Offset':
        node = parse_specifier(args[0], graph, layer_node_map)
        t = int(args[1])
        if len(args) > 2:
            raise Error("ModelOptimizer supports only 2 arguments for Offset")
        layer_name = 'Offset_' + node + '_'
        if t < 0:
            layer_name = layer_name + '_' + str(-t)
        else:
            layer_name = layer_name + str(t)

        if layer_name not in layer_node_map:
            memory_name = graph.unique_id(prefix=layer_name)
            layer_node_map[layer_name] = memory_name
            memory_name_2 = memory_name + '_out'
            graph.add_node(memory_name,
                           parameters=dict(t=t,
                                           pair_name=memory_name_2,
                                           has_default=False),
                           op='MemoryOffset',
                           kind='op')
            out_port = len(Node(graph, node).out_nodes())
            in_port = len(Node(graph, memory_name).in_nodes())
            Node(graph, memory_name).add_input_port(in_port)
            Node(graph, node).add_output_port(out_port, skip_if_exist=True)
            graph.create_edge(
                Node(graph, node), Node(graph, memory_name), out_port, in_port,
                create_edge_attrs(node, memory_name, node, in_port, out_port))
        else:
            memory_name = layer_node_map[layer_name]
        return memory_name
    elif spec == b'Sum':
        nodes = []
        for i in range(len(args)):
            nodes.append(parse_specifier(args[i], graph, layer_node_map))

        layer_name = 'Sum_'
        for node in nodes:
            layer_name = layer_name + node + "_"

        if layer_name not in layer_node_map:
            sum_name = graph.unique_id(prefix=layer_name)
            graph.add_node(sum_name, parameters=None, op='Add', kind='op')
            layer_node_map[layer_name] = sum_name
        else:
            sum_name = layer_node_map[layer_name]

        for i, node in enumerate(nodes):
            out_port = len(Node(graph, node).out_nodes())
            Node(graph, node).add_output_port(out_port, skip_if_exist=True)
            Node(graph, sum_name).add_input_port(i)
            graph.add_edge(node, sum_name,
                           **create_edge_attrs(node, sum_name, node, i))

        return sum_name
    elif spec == b'IfDefined':
        node_id = parse_specifier(args[0], graph, layer_node_map)
        node = Node(graph, node_id)
        if node.op == 'MemoryOffset':
            node['parameters']['has_default'] = True
        return node_id
    elif spec == b'ReplaceIndex':
        node = parse_specifier(args[0], graph, layer_node_map)
        return node
    elif spec == b'Scale':
        node_name = parse_specifier(args[1], graph, layer_node_map)
        scale_value = float(args[0])
        layer_name = '{}/Mul/{}'.format(node_name, scale_value)

        if layer_name not in layer_node_map:
            scale_name = graph.unique_id(prefix=layer_name)
            scale_node = Mul(graph, {'name': scale_name}).create_node()

            layer_node_map[layer_name] = scale_name

            scale_const_name = 'Const_{}'.format(scale_value)
            const_node = Const(graph, {
                'name': scale_const_name,
                'value': float_array([scale_value])
            }).create_node()

            node = Node(graph, node_name)
            graph.create_edge(
                const_node, scale_node, 0, 0,
                create_edge_attrs(const_node.id, scale_node.id, const_node.id))
            out_port = len(node.out_nodes())
            graph.create_edge(
                node, scale_node, out_port, 1,
                create_edge_attrs(node_name, scale_node.id, node_name, 1,
                                  out_port))
        else:
            scale_name = layer_node_map[layer_name]

        return scale_name
Exemple #12
0
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import unittest

from openvino.tools.mo.middle.GroupNorm import GroupNormToMVN
from openvino.tools.mo.front.common.partial_infer.utils import float_array, int64_array
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, result, connect, \
    regular_op_with_shaped_data, valued_const_with_data

shape = int64_array([1, 3, 5, 2])
nodes = {**regular_op_with_shaped_data('input', shape, {'type': 'Parameter', 'op': 'Parameter'}),
         **valued_const_with_data('gamma', float_array([0.5])),
         **valued_const_with_data('beta', float_array([0.5])),
         **regular_op_with_shaped_data('group_norm', shape,
                                       {'op': 'GroupNorm', 'name': 'group_norm', 'num_groups': 3, 'eps': 1e-9}),
         **result('result')
         }

edges = [*connect('input:0', '0:group_norm'),
         *connect('gamma', '1:group_norm'),
         *connect('beta', '2:group_norm'),
         *connect('group_norm:0', 'result'),
         ]

ref_nodes = {**regular_op_with_shaped_data('input', shape, {'type': 'Parameter', 'op': 'Parameter'}),
             **regular_op_with_shaped_data('shape1', int64_array([4]), {'op': 'ShapeOf'}),
             **regular_op_with_shaped_data('shape2', int64_array([4]), {'op': 'ShapeOf'}),
             **regular_op_with_shaped_data('shape3', int64_array([1]), {'op': 'ShapeOf'}),
             **regular_op_with_shaped_data('hcast1', int64_array([4]), {'op': 'Cast'}),
Exemple #13
0
shape = int64_array([1, 3, 5, 2])
nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
    }),
    **regular_op_with_shaped_data('mul', shape, {
        'type': 'Multiply',
        'name': 'mul'
    }),
    **regular_op_with_shaped_data('max', shape, {
        'type': 'Maximum',
        'name': 'final_max'
    }),
    **valued_const_with_data('const', float_array([0.5])),
    **result('result')
}

edges = [
    *connect('input:0', '0:mul'),
    *connect('const', '1:mul'),
    *connect_data('input', '0:max'),
    *connect('mul:0', '1:max'),
    *connect('max:0', 'result'),
]

ref_nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
def replace_resize(graph: Graph, resize: Node):
    log.debug("Converting of ONNX Resize-11 to Interpolate-4 "
              "is triggered for node {}.".format(
                  resize.soft_get('name', resize.id)))

    input_shape = resize.in_port(0).data.get_shape()
    input_rank = len(input_shape)
    resize_name = resize.soft_get('name', resize.id)
    if input_rank not in {4, 5}:
        log.warning(
            'The input shape is not 4D or 5D for op with name {}'.format(
                resize_name))
        return

    assert (resize.is_in_port_connected(0) and (resize.is_in_port_connected(2) or resize.is_in_port_connected(3))), \
        "Scales or sizes inputs must be connected to Node {} with op {}.".format(resize.soft_get("name", resize.id),
                                                                                 resize.op)

    assert resize.soft_get('coordinate_transformation_mode') != 'tf_crop_and_resize', \
        'Mode tf_crop_and_resize is not supported for op {} with name {}'.format(resize.op,
                                                                                 resize.soft_get("name", resize.id))

    layout = graph.graph['layout']

    if input_rank == 4:
        begin_dim = get_height_dim(layout, input_rank)
        end_dim = get_width_dim(layout, input_rank) + 1
    else:
        begin_dim = get_depth_dim(layout, input_rank)
        end_dim = get_width_dim(layout, input_rank) + 1

    sizes_ss = create_op_with_const_inputs(
        graph, StridedSlice, {
            1: int64_array([begin_dim]),
            2: int64_array([end_dim]),
            3: int64_array([1])
        }, {
            'name': resize_name + '/StridedSlice_sizes',
            'begin_mask': int64_array([1]),
            'end_mask': int64_array([1]),
            'new_axis_mask': int64_array([0]),
            'shrink_axis_mask': int64_array([0]),
            'ellipsis_mask': int64_array([0])
        })
    scales_ss = create_op_with_const_inputs(
        graph, StridedSlice, {
            1: int64_array([begin_dim]),
            2: int64_array([end_dim]),
            3: int64_array([1])
        }, {
            'name': resize_name + '/StridedSlice_scales',
            'begin_mask': int64_array([1]),
            'end_mask': int64_array([1]),
            'new_axis_mask': int64_array([0]),
            'shrink_axis_mask': int64_array([0]),
            'ellipsis_mask': int64_array([0])
        })
    axes_node = Const(
        graph, {
            'name': resize_name + '/axis',
            'value': int64_array(np.arange(begin_dim, end_dim))
        }).create_node()

    shape_calculation_mode = 'sizes' if resize.is_in_port_connected(
        3) else 'scales'

    interpolate_node = Interpolate(
        graph, {
            'version': 'opset4',
            'mode': convert_mode(resize.mode),
            'coordinate_transformation_mode':
            resize.coordinate_transformation_mode,
            'cube_coeff': resize.cube_coeff,
            'nearest_mode': resize.nearest_mode,
            'pads_begin': int64_array([0]),
            'pads_end': int64_array([0]),
            'antialias': 0,
            'shape_calculation_mode': shape_calculation_mode,
            'in_ports_count': 4
        }).create_node()

    axes_node.out_port(0).connect(interpolate_node.in_port(3))
    shape_of = Shape(graph, {'name': resize_name + '/ShapeOf'}).create_node()

    add_node = create_op_with_const_inputs(graph, Add,
                                           {1: float_array([1.0e-5])},
                                           {'name': resize_name + '/Add'})

    dst_dtype = np.float32  # even if data_type=FP16 use float32 for shape values

    if not resize.is_in_port_connected(3):
        cast_shape_to_float = Cast(graph, {
            'dst_type': dst_dtype
        }).create_node()
        mul_node = Mul(graph, {'name': resize_name + '/Mul'}).create_node()
        shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
        cast_shape_to_float.out_port(0).connect(mul_node.in_port(0))
        cast_add_result_to_int = Cast(graph, {
            'dst_type': np.int64
        }).create_node()
        floor_node = Floor(graph, {
            'name': resize_name + '/Floor'
        }).create_node()
        mul_node.out_port(0).connect(add_node.in_port(0))
        add_node.out_port(0).connect(floor_node.in_port(0))
        floor_node.out_port(0).connect(cast_add_result_to_int.in_port(0))
        cast_add_result_to_int.out_port(0).connect(sizes_ss.in_port(0))
        sizes_ss.out_port(0).connect(interpolate_node.in_port(1))
        scales_ss.out_port(0).connect(interpolate_node.in_port(2))

        connection_of_resize_input = resize.in_port(0).get_connection()
        connection_of_resize_input.set_destination(interpolate_node.in_port(0))

        connection_of_scales = resize.in_port(2).get_connection()
        connection_of_scales.set_destination(scales_ss.in_port(0))

        connection_of_resize_input.get_source().connect(shape_of.in_port(0))
        connection_of_scales.get_source().connect(mul_node.in_port(1))
    else:
        cast_shape_to_float = Cast(graph, {
            'dst_type': dst_dtype
        }).create_node()
        cast_sizes_to_float = Cast(graph, {
            'dst_type': dst_dtype
        }).create_node()
        div_node = Div(graph, {'name': resize_name + '/Div'}).create_node()
        cast_sizes_to_float.out_port(0).connect(div_node.in_port(0))
        cast_shape_to_float.out_port(0).connect(div_node.in_port(1))
        shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
        div_node.out_port(0).connect(add_node.in_port(0))
        add_node.out_port(0).connect(scales_ss.in_port(0))
        scales_ss.out_port(0).connect(interpolate_node.in_port(2))
        sizes_ss.out_port(0).connect(interpolate_node.in_port(1))

        connection_of_resize_input = resize.in_port(0).get_connection()
        connection_of_resize_input.set_destination(interpolate_node.in_port(0))

        connection_of_sizes = resize.in_port(3).get_connection()
        connection_of_sizes.set_destination(sizes_ss.in_port(0))

        connection_of_resize_input.get_source().connect(shape_of.in_port(0))
        connection_of_sizes.get_source().connect(
            cast_sizes_to_float.in_port(0))

    rename_nodes([(resize, resize_name + '/delete'),
                  (interpolate_node, resize_name)])
    resize.out_port(0).get_connection().set_source(
        interpolate_node.out_port(0))
Exemple #15
0
    def find_and_replace_pattern(self, graph: Graph):
        for quantize_node in graph.get_op_nodes(op='QuantizeLinear'):
            node_name = quantize_node.soft_get('name', quantize_node.id)
            axis = quantize_node.soft_get('axis', None)
            scale_y_shape = quantize_node.in_port(1).data.get_shape()

            if quantize_node.is_in_port_connected(2):
                zerop = quantize_node.in_port(2).get_source().node
            else:
                zerop = Const(
                    graph, {
                        'value': mo_array(0, dtype=np.uint8),
                        'name': node_name + '/ZeroPoint'
                    }).create_node()

            assert zerop.soft_get(
                'type'
            ) == 'Const', 'only constant for zero_point is supported for QuantizeLinear'
            zero_point_type = zerop.value.dtype
            # data type affects range of output values: [-128..127] or [0..255]
            if zero_point_type == np.int8:
                output_low_value = -128.0
                output_high_value = 127.0
            elif zero_point_type == np.uint8:
                output_low_value = 0.0
                output_high_value = 255.0
            else:
                raise Error(
                    'Not expected type {} for zero point value in node {}'.
                    format(zero_point_type, zerop.soft_get('name')))

            fake_quantize = create_op_with_const_inputs(
                graph, FakeQuantize, {
                    3: float_array(output_low_value),
                    4: float_array(output_high_value)
                }, {
                    'levels': 256,
                    'name': node_name + '/FakeQuantize'
                })
            quantize_node.in_port(0).get_connection().set_destination(
                fake_quantize.in_port(0))

            # Calculate input_low value
            mul_low = create_op_with_const_inputs(
                graph, Mul, {1: float_array(output_low_value - zerop.value)},
                {'name': node_name + '/Mul/Low'})
            quantize_node.in_port(1).get_connection().set_destination(
                mul_low.in_port(0))
            mul_low.out_port(0).connect(fake_quantize.in_port(1))

            # Calculate input_high value
            mul_high = create_op_with_const_inputs(
                graph, Mul, {1: float_array(output_high_value - zerop.value)},
                {'name': node_name + '/Mul/High'})
            mul_low.in_port(0).get_connection().add_destination(
                mul_high.in_port(0))
            mul_high.out_port(0).connect(fake_quantize.in_port(2))

            cast = Cast(graph, {
                'dst_type': zero_point_type,
                'name': node_name + '/Cast'
            }).create_node()
            fake_quantize.out_port(0).connect(cast.in_port(0))
            quantize_node.out_port(0).get_connection().set_source(
                cast.out_port(0))
            rename_nodes([(quantize_node, node_name + '/TBD'),
                          (cast, node_name)])

            assert scale_y_shape is not None
            if axis is not None and len(
                    scale_y_shape) > 0 and scale_y_shape[0] > 1:
                input_shape = fake_quantize.in_port(0).data.get_shape()
                target_shape = np.ones(len(input_shape), np.int)
                target_shape[axis] = input_shape[axis]
                mul_low_reshape = create_op_with_const_inputs(
                    graph, Reshape, {1: int64_array(target_shape)},
                    {'name': node_name + '/Reshape/Mul/Low'})
                mul_high_reshape = create_op_with_const_inputs(
                    graph, Reshape, {1: int64_array(target_shape)},
                    {'name': node_name + '/Reshape/Mul/high'})

                fake_quantize.in_port(1).get_connection().set_destination(
                    mul_low_reshape.in_port(0))
                fake_quantize.in_port(2).get_connection().set_destination(
                    mul_high_reshape.in_port(0))

                mul_low_reshape.out_port(0).connect(fake_quantize.in_port(1))
                mul_high_reshape.out_port(0).connect(fake_quantize.in_port(2))
Exemple #16
0
class GeLUMergerErfTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {
            'op': 'Parameter',
            'type': 'Parameter'
        }),
        **regular_op('mul', {'op': 'Mul'}),
        **regular_op('mul0', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **regular_op('div', {'op': 'Div'}),
        **regular_op('erf', {'op': 'Erf'}),
        **regular_op('add', {'op': 'Add'}),
        **const('mul_param', float_array([0.5])),
        **const('div_param', float_array([sqrt(2.)])),
        **const('add_param', int64_array([1])),
        **result('result'),
    }

    def test_gelu_p1(self):
        edges = [('input', 'mul'), ('mul', 'mul0'), ('input', 'div'),
                 ('div', 'erf'), ('erf', 'add'), ('add', 'mul0'),
                 ('mul_param', 'mul'), ('div_param', 'div'),
                 ('add_param', 'add'), ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation_mode == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')

    def test_gelu_p2(self):
        edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'),
                 ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul0'),
                 ('div_param', 'div'), ('add_param', 'add'),
                 ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation_mode == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')

    def test_gelu_p3(self):
        edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'),
                 ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul'),
                 ('div_param', 'div'), ('add_param', 'add'),
                 ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation_mode == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')