def replace_pattern(graph: Graph, match: dict):
        node = match['conv']
        node_name = node.soft_get('name', node.id)

        # create Reshape before convolution
        # shape = [in_shape[0], in_shape[1]/patch_stride, 1, patch_stride]
        i_shape = Shape(graph, {'name': node_name + '/Shape'}).create_node()
        shape = Cast(
            graph, {
                'name':
                node_name + '/to_float',
                'dst_type':
                data_type_str_to_np(graph.graph['cmd_params'].data_type)
            }).create_node()
        i_shape.in_port(0).connect(node.in_port(0).get_source())
        shape.in_port(0).connect(i_shape.out_port(0))

        N, H = node_to_get_shape_value_of_indices(
            shape, [0]), node_to_get_shape_value_of_indices(shape, [1])

        div = create_op_with_const_inputs(
            graph, Div, {1: float_array([node.patch_stride])},
            {'name': node_name + '/div_stride_h'})
        div.in_port(0).connect(H.out_port(0))

        concat = create_op_with_const_inputs(
            graph, Concat, {
                2: float_array([1]),
                3: float_array([node.patch_stride])
            }, {
                'name': node_name + '/concat_all_dims',
                'in_ports_count': 4,
                'axis': 0
            })
        concat.in_port(0).connect(N.out_port(0))
        concat.in_port(1).connect(div.out_port(0))

        reshape_pattern = Cast(graph, {
            'name': node_name + '/to_int',
            'dst_type': np.int64
        }).create_node()
        concat.out_port(0).connect(reshape_pattern.in_port(0))

        reshape_in = Reshape(graph, {
            'name': node_name + '/reshape_in'
        }).create_node()
        reshape_in.in_port(1).connect(reshape_pattern.out_port(0))

        # create Reshape after Convolution
        reshape_out = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            {'name': node_name + '/reshape_out'})

        # connect input_reshape_node
        source = node.in_port(0).get_source()
        node.in_port(0).get_connection().set_source(reshape_in.out_port(0))
        reshape_in.in_port(0).connect(source)
        # connect output_reshape_node
        node.out_port(0).get_connection().set_source(reshape_out.out_port(0))
        node.out_port(0).connect(reshape_out.in_port(0))
Ejemplo n.º 2
0
    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Ejemplo n.º 3
0
def generate_nodes(data, axis=-1, depth=4, on_value=1., off_value=0.):
    return {
        'indices': {'Op': 'Parameter', 'value': data, 'shape': int64_array(data.shape)},
        'indices_d': {'kind': 'data', 'value': data, 'shape': int64_array(data.shape)},
        **valued_const_with_data('depth', int64_array(depth)),
        **valued_const_with_data('on_value', float_array(on_value)),
        **valued_const_with_data('off_value', float_array(off_value)),
        **regular_op_with_shaped_data('one_hot', None, {'type': 'OneHot', 'axis': axis, 'Op': 'OneHot'})
    }
Ejemplo n.º 4
0
    def test_leaky_relu_not_applicable_non_scalar_const(self):
        # const value is not a scalar or 1D tensor with 1 element so the transformation is not applicable
        graph = build_graph_with_edge_attrs(nodes, edges, {})
        Node(graph, 'const')['value'] = float_array([0.5, 0.7])
        Node(graph, 'const_d')['value'] = float_array([0.5, 0.7])
        graph_ref = graph.copy()

        LeakyReLUFusion().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Ejemplo n.º 5
0
 def calc_convolution(input_spatial_shape, stride_spatial_shape,
                      pad_spatial_shape, kernel_extent):
     ''' Calculates output shape for Convolution.
         Verified to be applicable for both Caffe and ONNX.
     '''
     spatial_val_wo_stride = input_spatial_shape + pad_spatial_shape - kernel_extent
     float_spatial_val_wo_stride = float_array(spatial_val_wo_stride)
     return float_spatial_val_wo_stride / stride_spatial_shape + 1
Ejemplo n.º 6
0
class SoftplusFusionTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('exp', {'op': 'Exp'}),
        **const('const_1', float_array([1.0])),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('ln', {
            'op': 'Log',
            'name': 'final_log'
        }),
        **result('result'),
    }

    edges = [('input', 'exp', {
        'in': 0,
        'out': 0
    }), ('const_1', 'add', {
        'in': 0,
        'out': 0
    }), ('exp', 'add', {
        'in': 1,
        'out': 0
    }), ('add', 'ln', {
        'in': 0,
        'out': 0
    }), ('ln', 'result', {
        'in': 0,
        'out': 0
    })]

    def test_softplus_fusion_test(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            len(graph.get_op_nodes(name='final_log')) == 1
            and graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus')

    def test_softplus_fusion_test_wrong_const(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'const_1': {
                'value': float_array([0.9999])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Ejemplo n.º 7
0
    def replace_op(self, graph: Graph, node: Node):
        node_name = node.soft_get('name', node.id)

        if node.is_in_port_connected(2):
            zerop = node.in_port(2).get_source().node
        else:
            zerop = Const(graph, {'value': np.array(0, dtype=np.uint8), 'name': node_name + '/ZeroPoint'}).create_node()

        assert zerop.soft_get('type') == 'Const', 'only constant for zero_point is supported for QuantizeLinear'
        zero_point_type = zerop.value.dtype
        # data type affects range of output values: [-128..127] or [0..255]
        if zero_point_type == np.int8:
            output_low_value = -128.0
            output_high_value = 127.0
        elif zero_point_type == np.uint8:
            output_low_value = 0.0
            output_high_value = 255.0
        else:
            raise Error('Not expected type {} for zero point value in node {}'.format(
                zero_point_type, zerop.soft_get('name')))
        fake_quantize = create_op_with_const_inputs(graph, FakeQuantize, {3: float_array(output_low_value),
                                                                          4: float_array(output_high_value)},
                                                    {'levels': 256, 'name': node_name + '/FakeQuantize'})
        node.in_port(0).get_connection().set_destination(fake_quantize.in_port(0))

        # Calculate input_low value
        mul_low = create_op_with_const_inputs(graph, Mul, {1: float_array(output_low_value - zerop.value)},
                                              {'name': node_name + '/Mul/Low'})
        node.in_port(1).get_connection().set_destination(mul_low.in_port(0))
        mul_low.out_port(0).connect(fake_quantize.in_port(1))

        # Calculate input_high value
        mul_high = create_op_with_const_inputs(graph, Mul, {1: float_array(output_high_value - zerop.value)},
                                              {'name': node_name + '/Mul/High'})
        mul_low.in_port(0).get_connection().add_destination(mul_high.in_port(0))
        mul_high.out_port(0).connect(fake_quantize.in_port(2))

        cast = Cast(graph, {'dst_type': zero_point_type, 'name': node_name + '/Cast'}).create_node()
        rename_nodes([(node, node_name + '/TBD'), (cast, node_name)])
        fake_quantize.out_port(0).connect(cast.in_port(0))

        return [cast.id]
Ejemplo n.º 8
0
    def calc_convolution(input_spatial_shape, stride_spatial_shape, pad_spatial_shape, kernel_extent):
        ''' Calculates output shape for Convolution.
            Verified to be applicable for both Caffe and ONNX.
        '''
        spatial_val_wo_stride = input_spatial_shape + pad_spatial_shape - kernel_extent

        if np.any(spatial_val_wo_stride < 0):
            raise Error("Data after padding has dimension less than window size. " +
                        "Possible reason of error is incorrectly specified model input shape(s).")

        float_spatial_val_wo_stride = float_array(spatial_val_wo_stride)
        return float_spatial_val_wo_stride / stride_spatial_shape + 1
Ejemplo n.º 9
0
    def test_hsigmoid_with_clamp_wrong_constant(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'const_0': {
                'value': float_array([0.00001])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithClamp().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Ejemplo n.º 10
0
    def test_softplus_fusion_test_wrong_const(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'const_1': {
                'value': float_array([0.9999])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='ThresholdedRelu'):
            name = node.soft_get('name', node.id)

            greater = create_op_with_const_inputs(graph, Greater, {1: float_array([node.alpha])})
            greater.in_port(0).connect(node.in_port(0).get_source())
            float_greater = Cast(graph,
                                 {'dst_type': data_type_str_to_np(graph.graph['cmd_params'].data_type)}).create_node()
            greater.out_port(0).connect(float_greater.in_port(0))

            mul = Mul(graph, {}).create_node()
            node.out_port(0).get_connection().set_source(mul.out_port(0))
            mul.in_port(0).connect(node.in_port(0).get_source())
            mul.in_port(1).connect(float_greater.out_port(0))

            rename_nodes([(node, name + '/TBR'), (mul, name)])
Ejemplo n.º 12
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        softplus = match['op']

        name = softplus.soft_get('name', softplus.id)
        exp_node = Exp(graph, {'name': name + '/Exp'}).create_node()
        add_node = create_op_node_with_second_input(graph, Add,
                                                    float_array([1.0]),
                                                    {'name': name + '/Add'})
        log_node = Log(graph, {'name': name + '/Log'}).create_node()
        rename_nodes([(softplus, name + '/Log'), (log_node, name)])

        softplus.in_port(0).get_connection().set_destination(
            exp_node.in_port(0))
        add_node.in_port(0).connect(exp_node.out_port(0))
        log_node.in_port(0).connect(add_node.out_port(0))
        softplus.out_port(0).get_connection().set_source(log_node.out_port(0))
Ejemplo n.º 13
0
    def test_leaky_relu_mul_multiple_consumers(self):
        # multiple consumers of Mul operation
        graph = build_graph_with_edge_attrs(nodes, edges, {})
        additional_result = Result(graph, {'name': 'result_2'}).create_node()
        Node(graph, 'mul').out_port(0).connect(additional_result.in_port(0))

        ref_nodes = {
            **regular_op_with_shaped_data('input', shape, {
                'type': 'Parameter',
                'op': 'Parameter'
            }),
            **regular_op_with_shaped_data('mul', shape, {
                'type': 'Multiply',
                'name': 'mul'
            }),
            **regular_op_with_shaped_data('max', shape, {
                'type': 'Maximum',
                'name': 'final_max'
            }),
            **valued_const_with_data('const', float_array([0.5])),
            **regular_op_with_shaped_data('leaky_relu', shape, {
                'type': 'LeakyReLU',
                'name': 'max_final',
                'negative_slope': None
            }),
            **result('result'),
            **result('result_2')
        }
        ref_edges = [
            *connect('input:0', '0:mul'), *connect('const', '1:mul'),
            *connect('max:0', 'result'), *connect('mul:0', 'result_2'),
            *connect_data('input', 'leaky_relu'),
            *connect('leaky_relu', 'result')
        ]
        graph_ref = build_graph_with_edge_attrs(ref_nodes, ref_edges)

        LeakyReLUFusion().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result_2')
        self.assertTrue(flag, resp)
Ejemplo n.º 14
0
    def replace_op(self, graph: Graph, node: Node):
        node_name = node.soft_get('name', node.id)

        rename_node(node, node_name + '/TBR')
        sqr_node = Mul(graph, {}).create_node()
        reduce_sum_node = ReduceSum(
            graph, {
                'keep_dims': node.soft_get('keep_dims', 0),
                'axis': node.soft_get('axis', None)
            }).create_node()
        sqrt_node = create_op_with_const_inputs(graph, Pow,
                                                {1: float_array(0.5)})
        rename_node(sqrt_node, node_name)

        # Connect nodes
        node.in_port(0).get_connection().set_destination(sqr_node.in_port(0))
        sqr_node.in_port(0).get_connection().add_destination(
            sqr_node.in_port(1))
        sqr_node.out_port(0).connect(reduce_sum_node.in_port(0))
        reduce_sum_node.out_port(0).connect(sqrt_node.in_port(0))

        return [sqrt_node.id]
Ejemplo n.º 15
0
class HSigmoidWithReluMulTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('relu', {'op': 'ReLU'}),
        **regular_op('min', {'op': 'Minimum'}),
        **regular_op('mul', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **const('add_const', float_array([3.0])),
        **const('min_const', float_array([6.0])),
        **const('mul_const', float_array([1.0 / 6.0])),
        **result('result'),
    }

    edges = [('input', 'add', {
        'in': 0,
        'out': 0
    }), ('add_const', 'add', {
        'in': 1,
        'out': 0
    }), ('add', 'relu', {
        'in': 0,
        'out': 0
    }), ('relu', 'min', {
        'in': 0,
        'out': 0
    }), ('min_const', 'min', {
        'in': 1,
        'out': 0
    }), ('min', 'mul', {
        'in': 0,
        'out': 0
    }), ('mul_const', 'mul', {
        'in': 1,
        'out': 0
    }), ('mul', 'result', {
        'in': 0,
        'out': 0
    })]

    def test_hsigmoid_with_relu_mul(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid')
        self.assertTrue(
            graph.get_op_nodes(
                name='final_mul')[0].out_nodes()[0].node == 'result')

    def test_hsigmoid_with_relu_mul_wrong_constant(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'add_const': {
                'value': float_array([0.00001])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Ejemplo n.º 16
0
    def replace_pattern(self, graph: Graph, match: dict):

        merge = match['merge']
        power = Pow(graph, {
            'name': merge.name + '/reciprocal_',
            'type': 'PNORM'
        }).create_node()
        const1 = Const(graph, {
            'value': -1.0,
            'name': merge.name + '/negate_const'
        }).create_node()
        merge.in_port(0).get_connection().set_destination(power.in_port(0))
        const1.out_port(0).connect(power.in_port(1))

        concat_node = Concat(
            graph, {
                'axis': 0,
                'name': merge.name + '/Concat_',
                'override_output_shape': True
            }).create_node()
        const3 = Const(graph, {
            'name': merge.name + '/const_reduce',
            'value': 0
        }).create_node()

        for ii, idx in enumerate(
                range(merge.significant, merge.to_significant + 1, 1)):
            const_node = Const(
                graph, {
                    'value': float_array(math.pow(10.0, idx)),
                    'name': merge.name + '/Const_' + ii.__str__()
                }).create_node()

            mul_node = Mul(graph, {
                'name': merge.name + '/Mul_' + ii.__str__()
            }).create_node()
            const_node.out_port(0).connect(mul_node.in_port(0))

            power.out_port(0).connect(
                mul_node.in_port(1))  # connect to the graph node
            mul_node2 = Mul(graph, {
                'name': merge.name + '/Mul_Div_' + ii.__str__()
            }).create_node()

            const_node2 = Const(
                graph, {
                    'value': float_array(math.pow(10.0, -1 * idx)),
                    'name': merge.name + '/Const_Pow_' + ii.__str__()
                }).create_node()
            cast_node = Cast(
                graph, {
                    'name': merge.name + '/Cast_' + idx.__str__(),
                    'dst_type': np.float32
                }).create_node()

            mul_node.out_port(0).connect(cast_node.in_port(0))
            const_node2.out_port(0).connect(mul_node2.in_port(1))
            cast_node.out_port(0).connect(mul_node2.in_port(0))
            concat_node.add_input_port(ii, skip_if_exist=True)
            concat_node.in_port(ii).get_connection().set_source(
                mul_node2.out_port(0))

        reducesum_node = ReduceMean(
            graph, {
                'name': merge.id + '/_pnorm_reduced_sum',
                'keep_dims': False,
                'in_ports_count': 2,
                'need_shape_inference': None,
                'infer': reduce_infer
            }).create_node()

        const3.out_port(0).connect(reducesum_node.in_port(1))
        reducesum_node.in_port(0).get_connection().set_source(
            concat_node.out_port(0))

        reshape = Reshape(graph, {
            'name': merge.name + '/Reshape_Node'
        }).create_node()
        reshape_dim = Const(graph, {
            'value': np.array([1, 5]),
            'name': merge.id + '/Reshape_Dim'
        }).create_node()
        reducesum_node.out_port(0).connect(reshape.in_port(0))
        reshape.in_port(1).connect(reshape_dim.out_port(0))
        merge.out_port(0).get_connection().set_source(reshape.out_port(0))
Ejemplo n.º 17
0
def parse_specifier(string, graph, layer_node_map):
    pos = string.find(b'(')
    if pos == -1:
        # node name
        input_name = str(string.split(b' ')[0]).strip('b').replace(
            "\'", '').replace('\\n', '')

        if input_name not in layer_node_map:
            node_name = graph.unique_id(prefix=input_name)
            graph.add_node(node_name, parameters=[], op="", kind='op')
            layer_node_map[input_name] = node_name
        else:
            node_name = layer_node_map[input_name]
        return node_name

    spec = string[:pos]
    args = get_args_for_specifier(string[pos:])
    if spec == b'Append':
        nodes = []
        for i in range(len(args)):
            nodes.append(parse_specifier(args[i], graph, layer_node_map))
        layer_name = 'Append_'
        for node in nodes:
            layer_name = layer_name + node + "_"

        if layer_name not in layer_node_map:
            concat_name = graph.unique_id(prefix=layer_name)
            graph.add_node(concat_name,
                           parameters=None,
                           op='concat',
                           kind='op')
            layer_node_map[layer_name] = concat_name
            i = 0
            Node(graph,
                 concat_name).add_sequence_of_ports('in', range(len(nodes)))
            for node in nodes:
                out_port = len(Node(graph, node).out_nodes())
                Node(graph, node).add_output_port(out_port)
                graph.create_edge(
                    Node(graph, node), Node(graph, concat_name), out_port, i,
                    create_edge_attrs(node, concat_name, node, i, out_port))
                i = i + 1
        else:
            concat_name = layer_node_map[layer_name]
        return concat_name
    elif spec == b'Offset':
        node = parse_specifier(args[0], graph, layer_node_map)
        t = int(args[1])
        if len(args) > 2:
            raise Error("ModelOptimizer supports only 2 arguments for Offset")
        layer_name = 'Offset_' + node + '_'
        if t < 0:
            layer_name = layer_name + '_' + str(-t)
        else:
            layer_name = layer_name + str(t)

        if layer_name not in layer_node_map:
            memory_name = graph.unique_id(prefix=layer_name)
            layer_node_map[layer_name] = memory_name
            memory_name_2 = memory_name + '_out'
            graph.add_node(memory_name,
                           parameters=dict(t=t,
                                           pair_name=memory_name_2,
                                           has_default=False),
                           op='MemoryOffset',
                           kind='op')
            out_port = len(Node(graph, node).out_nodes())
            in_port = len(Node(graph, memory_name).in_nodes())
            Node(graph, memory_name).add_input_port(in_port)
            Node(graph, node).add_output_port(out_port, skip_if_exist=True)
            graph.create_edge(
                Node(graph, node), Node(graph, memory_name), out_port, in_port,
                create_edge_attrs(node, memory_name, node, in_port, out_port))
        else:
            memory_name = layer_node_map[layer_name]
        return memory_name
    elif spec == b'Sum':
        nodes = []
        for i in range(len(args)):
            nodes.append(parse_specifier(args[i], graph, layer_node_map))

        layer_name = 'Sum_'
        for node in nodes:
            layer_name = layer_name + node + "_"

        if layer_name not in layer_node_map:
            sum_name = graph.unique_id(prefix=layer_name)
            graph.add_node(sum_name, parameters=None, op='Add', kind='op')
            layer_node_map[layer_name] = sum_name
        else:
            sum_name = layer_node_map[layer_name]

        for i, node in enumerate(nodes):
            out_port = len(Node(graph, node).out_nodes())
            Node(graph, node).add_output_port(out_port, skip_if_exist=True)
            Node(graph, sum_name).add_input_port(i)
            graph.add_edge(node, sum_name,
                           **create_edge_attrs(node, sum_name, node, i))

        return sum_name
    elif spec == b'IfDefined':
        node_id = parse_specifier(args[0], graph, layer_node_map)
        node = Node(graph, node_id)
        if node.op == 'MemoryOffset':
            node['parameters']['has_default'] = True
        return node_id
    elif spec == b'ReplaceIndex':
        node = parse_specifier(args[0], graph, layer_node_map)
        return node
    elif spec == b'Scale':
        node_name = parse_specifier(args[1], graph, layer_node_map)
        scale_value = float(args[0])
        layer_name = '{}/Mul/{}'.format(node_name, scale_value)

        if layer_name not in layer_node_map:
            scale_name = graph.unique_id(prefix=layer_name)
            scale_node = Mul(graph, {'name': scale_name}).create_node()

            layer_node_map[layer_name] = scale_name

            scale_const_name = 'Const_{}'.format(scale_value)
            const_node = Const(graph, {
                'name': scale_const_name,
                'value': float_array([scale_value])
            }).create_node()

            node = Node(graph, node_name)
            graph.create_edge(
                const_node, scale_node, 0, 0,
                create_edge_attrs(const_node.id, scale_node.id, const_node.id))
            out_port = len(node.out_nodes())
            graph.create_edge(
                node, scale_node, out_port, 1,
                create_edge_attrs(node_name, scale_node.id, node_name, 1,
                                  out_port))
        else:
            scale_name = layer_node_map[layer_name]

        return scale_name
Ejemplo n.º 18
0
    def replace_pattern(self, graph: Graph, match: dict):

        const = 0.99
        merge = match['merge']
        digits = significant_digits()
        pnorm = Power(
            graph, {
                'name': merge.name + '/reciprocal_',
                'type': 'PNORM',
                'significant': digits[0],
                'to_significant': digits[1],
                'scale': 1,
                'shift': 0,
                'power': get_power_attr()
            }).create_node()
        merge.in_port(0).get_connection().set_destination(pnorm.in_port(0))

        in_shape = pnorm.in_port(0).data.get_shape()
        in_shape = list(in_shape)
        in_shape.insert(0, 1)

        reshape1 = Reshape(graph, {
            'name': merge.name + '/Reshape_Node1'
        }).create_node()
        reshape_dim1 = Const(graph, {
            'value': np.array(in_shape),
            'name': merge.id + '/Reshape_Dim1'
        }).create_node()
        pnorm.out_port(0).connect(reshape1.in_port(0))
        reshape1.in_port(1).connect(reshape_dim1.out_port(0))

        concat_node = Concat(
            graph, {
                'axis': 0,
                'name': merge.name + '/Concat_',
                'override_output_shape': True
            }).create_node()
        const3 = Const(graph, {
            'name': merge.name + '/const_reduce',
            'value': 0
        }).create_node()

        for ii, idx in enumerate(
                range(pnorm.significant, pnorm.to_significant + 1, 1)):
            const_node = Const(
                graph, {
                    'value': float_array(math.pow(const, idx)),
                    'name': merge.name + '/Const_' + ii.__str__()
                }).create_node()

            mul_node = Mul(graph, {
                'name': merge.name + '/Mul_' + ii.__str__()
            }).create_node()
            const_node.out_port(0).connect(mul_node.in_port(0))

            reshape1.out_port(0).connect(
                mul_node.in_port(1))  # connect to the graph node
            mul_node2 = Mul(graph, {
                'name': merge.name + '/Mul_Div_' + ii.__str__()
            }).create_node()

            const_node2 = Const(
                graph, {
                    'value': float_array(math.pow(const, -1 * idx)),
                    'name': merge.name + '/Const_Pow_' + ii.__str__()
                }).create_node()
            cast_node = ExpOp(graph, {
                'name': merge.name + '/Exp_' + idx.__str__()
            }).create_node()

            mul_node.out_port(0).connect(cast_node.in_port(0))
            const_node2.out_port(0).connect(mul_node2.in_port(1))
            cast_node.out_port(0).connect(mul_node2.in_port(0))
            concat_node.add_input_port(ii, skip_if_exist=True)
            concat_node.in_port(ii).get_connection().set_source(
                mul_node2.out_port(0))

        in_shape = pnorm.in_port(0).data.get_shape()
        in_shape = list(in_shape)

        reducesum_node = ReduceMean(
            graph, {
                'name': merge.id + '/_pnorm_reduced_sum',
                'keep_dims': True,
                'in_ports_count': 2,
                'shape': in_shape,
                'axis': 0,
                'need_shape_inference': None,
                'infer': reduce_infer
            }).create_node()

        const3.out_port(0).connect(reducesum_node.in_port(1))
        reducesum_node.in_port(0).get_connection().set_source(
            concat_node.out_port(0))

        reshape = Reshape(graph, {
            'name': merge.name + '/Reshape_Node'
        }).create_node()
        reshape_dim = Const(graph, {
            'value': np.array(in_shape),
            'name': merge.id + '/Reshape_Dim'
        }).create_node()
        reducesum_node.out_port(0).connect(reshape.in_port(0))
        reshape.in_port(1).connect(reshape_dim.out_port(0))
        merge.out_port(0).get_connection().set_source(reshape.out_port(0))
Ejemplo n.º 19
0
    'placeholder': {
        'shape': None,
        'type': 'Parameter',
        'kind': 'op',
        'op': 'Parameter'
    },
    'tfpad': {
        'type': None,
        'kind': 'op',
        'op': 'TFPad',
        'mode': 'constant',
        'name': 'tfpad_name'
    },
    **const('paddings',
            int64_array([1, 2, 3, 4, 5, 6]).reshape([3, 2])),
    **const('fill', float_array(5.75)),
    'result': {
        'type': 'Result',
        'value': None,
        'kind': 'op',
        'op': 'Result'
    },

    # new Pad layer and sub-graph
    'pad': {
        'type': 'Pad',
        'kind': 'op',
        'op': 'Pad',
        'mode': 'constant'
    },
    'transpose': {
        'type': 'Greater',
        'kind': 'op',
        'op': 'Greater'
    },
    'mul': {
        'type': 'Multiply',
        'kind': 'op',
        'op': 'Mul',
        'name': 'my_trelu'
    },
    'squeeze2': {
        'type': 'Squeeze',
        'kind': 'op',
        'op': 'Squeeze'
    },
    **const('alpha', float_array([0.75])),
}


class ThresholdedReluDecompositionTest(unittest.TestCase):
    def test_trelu(self):
        graph = build_graph(nodes_attributes, [
            ('parameter', 'trelu', {
                'in': 0,
                'out': 0
            }),
            ('trelu', 'result', {
                'in': 0,
                'out': 0
            }),
        ],
Ejemplo n.º 21
0
def replace_resize(graph: Graph, resize: Node):
    log.debug("Converting of ONNX Resize-10 to Interpolate-4 "
              "is triggered for node {}.".format(
                  resize.soft_get('name', resize.id)))

    resize_name = resize.soft_get('name', resize.id)

    rank_node = Rank(graph, {'name': resize_name + '/max_axes'}).create_node()
    range_node = create_op_with_const_inputs(graph, Range, {
        0: int64_array(2),
        2: int64_array(1)
    }, {'name': resize_name + '/axes'})

    sizes_ss = create_op_with_const_inputs(graph, StridedSlice, {
        1: int64_array([2]),
        2: int64_array([0]),
        3: int64_array([1])
    }, {
        'name': resize_name + '/sizes_ss',
        'begin_mask': int64_array([1]),
        'end_mask': int64_array([0]),
        'new_axis_mask': int64_array([0]),
        'shrink_axis_mask': int64_array([0]),
        'ellipsis_mask': int64_array([0])
    })
    scales_ss = create_op_with_const_inputs(
        graph, StridedSlice, {
            1: int64_array([2]),
            2: int64_array([0]),
            3: int64_array([1])
        }, {
            'name': resize_name + '/scales_ss',
            'begin_mask': int64_array([1]),
            'end_mask': int64_array([0]),
            'new_axis_mask': int64_array([0]),
            'shrink_axis_mask': int64_array([0]),
            'ellipsis_mask': int64_array([0])
        })

    rank_node.out_port(0).connect(range_node.in_port(1))

    interpolate_node = Interpolate(
        graph, {
            'version': 'opset4',
            'mode': 'linear_onnx' if resize.mode == 'linear' else 'nearest',
            'coordinate_transformation_mode': 'asymmetric',
            'cube_coeff': -0.75,
            'nearest_mode': 'simple',
            'pads_begin': int64_array([0]),
            'pads_end': int64_array([0]),
            'antialias': 0,
            'shape_calculation_mode': 'scales',
            'in_ports_count': 4
        }).create_node()

    range_node.out_port(0).connect(interpolate_node.in_port(3))
    shape_of = Shape(graph, {'name': resize_name + '/ShapeOf'}).create_node()

    # When we calculate 'sizes' input as floor(input_shape * scales), we can get incorrect 'sizes' if, e.g.,
    # scales = [1.0, 1.0, 1.33333, 2.0], input_shape = [1, 3, 30, 200], because
    # input_shape * scales = [1, 3, 39.9999, 400], and floor(input_shape * scales)[2] == 39, not 40.
    # Maybe we need to calculate 'sizes' input as floor(input_shape * scales + eps), where eps is some small
    # floating point number, e.g. 1.0e-5. But, in this case, if scales = [1.0, 1.0, 1.333333, 2.0],
    # input_shape = [1, 3, 30, 200], floor(input_shape * scales + eps) = 39, not 40, because
    # input_shape[2] * scales[2] + 1.0e-5 =  39.99991.
    # Hence, we need to calculate 'sizes' as floor(input_shape * (scales + eps)).
    add_node = create_op_with_const_inputs(graph, Add,
                                           {1: float_array([1.0e-5])},
                                           {'name': resize_name + '/Add'})

    input_data_type = data_type_str_to_np(graph.graph['cmd_params'].data_type)

    cast_shape_to_float = Cast(graph, {
        'dst_type': input_data_type
    }).create_node()

    shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
    mul_node = Mul(graph, {
        'name': resize_name + '/Mul'
    }).create_node([cast_shape_to_float, add_node])
    floor_node = Floor(graph, {
        'name': resize_name + '/Floor'
    }).create_node([mul_node])
    cast_mul_result_to_int = Cast(graph, {
        'dst_type': np.int64
    }).create_node([floor_node])
    cast_mul_result_to_int.out_port(0).connect(sizes_ss.in_port(0))
    sizes_ss.out_port(0).connect(interpolate_node.in_port(1))

    scales_ss.out_port(0).connect(interpolate_node.in_port(2))

    connection_of_resize_input = resize.in_port(0).get_connection()
    connection_of_resize_input.set_destination(interpolate_node.in_port(0))

    connection_of_scales = resize.in_port(1).get_connection()
    connection_of_scales.set_destination(scales_ss.in_port(0))

    connection_of_resize_input.get_source().connect(shape_of.in_port(0))
    connection_of_resize_input.get_source().connect(rank_node.in_port(0))
    connection_of_scales.get_source().connect(add_node.in_port(0))

    rename_nodes([(resize, resize_name + '/delete'),
                  (interpolate_node, resize_name)])
    resize.out_port(0).get_connection().set_source(
        interpolate_node.out_port(0))
Ejemplo n.º 22
0
shape = int64_array([1, 3, 5, 2])
nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
    }),
    **regular_op_with_shaped_data('mul', shape, {
        'type': 'Multiply',
        'name': 'mul'
    }),
    **regular_op_with_shaped_data('max', shape, {
        'type': 'Maximum',
        'name': 'final_max'
    }),
    **valued_const_with_data('const', float_array([0.5])),
    **result('result')
}

edges = [
    *connect('input:0', '0:mul'),
    *connect('const', '1:mul'),
    *connect_data('input', '0:max'),
    *connect('mul:0', '1:max'),
    *connect('max:0', 'result'),
]

ref_nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
class GeLUMergerErfTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {
            'op': 'Parameter',
            'type': 'Parameter'
        }),
        **regular_op('mul', {'op': 'Mul'}),
        **regular_op('mul0', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **regular_op('div', {'op': 'Div'}),
        **regular_op('erf', {'op': 'Erf'}),
        **regular_op('add', {'op': 'Add'}),
        **const('mul_param', float_array([0.5])),
        **const('div_param', float_array([sqrt(2.)])),
        **const('add_param', int64_array([1])),
        **result('result'),
    }

    def test_gelu_p1(self):
        edges = [('input', 'mul'), ('mul', 'mul0'), ('input', 'div'),
                 ('div', 'erf'), ('erf', 'add'), ('add', 'mul0'),
                 ('mul_param', 'mul'), ('div_param', 'div'),
                 ('add_param', 'add'), ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')

    def test_gelu_p2(self):
        edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'),
                 ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul0'),
                 ('div_param', 'div'), ('add_param', 'add'),
                 ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')

    def test_gelu_p3(self):
        edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'),
                 ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul'),
                 ('div_param', 'div'), ('add_param', 'add'),
                 ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')
Ejemplo n.º 24
0
import unittest

from extensions.middle.GroupNorm import GroupNormToMVN
from mo.front.common.partial_infer.utils import float_array, int64_array
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, result, build_graph_with_edge_attrs, connect, \
    regular_op_with_shaped_data, valued_const_with_data, connect_data

shape = int64_array([1, 3, 5, 2])
nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
    }),
    **valued_const_with_data('gamma', float_array([0.5])),
    **valued_const_with_data('beta', float_array([0.5])),
    **regular_op_with_shaped_data('group_norm', shape, {
        'op': 'GroupNorm',
        'name': 'group_norm',
        'num_groups': 3,
        'eps': 1e-9
    }),
    **result('result')
}

edges = [
    *connect('input:0', '0:group_norm'),
    *connect('gamma', '1:group_norm'),
    *connect('beta', '2:group_norm'),
    *connect('group_norm:0', 'result'),
Ejemplo n.º 25
0
    def pool_infer(node: Node):
        input_shape = node.in_node(0).shape
        if input_shape is None:
            return

        if not node.has_valid('spatial_dims'):
            node['spatial_dims'] = np.delete(
                [x for x in range(len(input_shape))],
                [node.batch_dims[0], node.channel_dims[0]])

        input_spatial_shape = input_shape[node.spatial_dims]

        # Setting default pad and stride attrs in case if None specified
        if not node.has_valid('pad'):
            node['pad'] = int64_array([[0, 0]
                                       for x in range(len(input_shape))])
        if not node.has_valid('pad_spatial_shape'):
            node['pad_spatial_shape'] = node.pad[node.spatial_dims]

        if not node.has_valid('stride'):
            node['stride'] = int64_array([1 for x in range(len(input_shape))])

        if node.has_and_set('global_pool'):
            node['window'] = np.zeros(len(input_shape), dtype=np.int64)
            node.window[node.spatial_dims] = input_spatial_shape

        window_spatial_shape = node.window[node.spatial_dims]
        stride_spatial = node.stride[node.spatial_dims]
        assert any(stride_spatial), 'Stride can not be zero in node {}'.format(
            node.id)

        if node.has_valid('auto_pad') and node.auto_pad != 'explicit':
            node.pad_spatial_shape, node.output_spatial_shape = tf_window_op_pad_infer(
                input_spatial_shape, window_spatial_shape, stride_spatial,
                node.auto_pad)
            pad = np.zeros((len(input_shape), 2), dtype=np.int64)
            pad[node.spatial_dims] = node.pad_spatial_shape
            node.pad = pad
        else:

            pad_spatial_shape = np.add.reduce(node.pad_spatial_shape, axis=1)

            rounding = np.floor
            if node.soft_get('pooling_convention') == 'full' or node.soft_get(
                    'rounding_type') == 'ceil':
                rounding = np.ceil

            padded_spatial_shape = input_spatial_shape + pad_spatial_shape - window_spatial_shape
            if np.any(padded_spatial_shape < 0):
                raise Error(
                    "Data after padding has dimension less than window size. "
                    +
                    "Possible reason of error is incorrectly specified model input shape(s)."
                )

            output_spatial_shape = int64_array(
                rounding(
                    float_array(padded_spatial_shape) / stride_spatial)) + 1

            original_pads = np.array([i[1] for i in node.pad_spatial_shape])

            for i in range(len(input_spatial_shape)):
                if original_pads[i] and (output_spatial_shape[i] - 1) * stride_spatial[i] >= \
                        input_spatial_shape[i] + original_pads[i]:
                    output_spatial_shape[i] -= 1

            node['output_spatial_shape'] = output_spatial_shape

        output_shape = input_shape.copy()
        output_shape[node.spatial_dims] = node.output_spatial_shape
        node.out_node().shape = output_shape

        # Add permute_attrs
        PermuteAttrs.create_permute_attrs(node,
                                          attrs=[('pad', 'input:0'),
                                                 ('stride', 'input:0'),
                                                 ('window', 'input:0'),
                                                 ('spatial_dims', 'input:0')])
Ejemplo n.º 26
0
def replace_resize(graph: Graph, resize: Node):
    log.debug("Converting of ONNX Resize-11 to Interpolate-4 "
              "is triggered for node {}.".format(
                  resize.soft_get('name', resize.id)))

    input_shape = resize.in_port(0).data.get_shape()
    input_rank = len(input_shape)
    resize_name = resize.soft_get('name', resize.id)
    if input_rank not in {4, 5}:
        log.warning(
            'The input shape is not 4D or 5D for op with name {}'.format(
                resize_name))
        return

    num_of_inputs = len([
        port for port in resize.in_ports().values() if not port.disconnected()
    ])
    assert num_of_inputs in {3, 4}, \
        "Number of inputs of ONNXResize (with name {}) should be equal to 3 or 4".format(resize_name)

    assert resize.soft_get('coordinate_transformation_mode') != 'tf_crop_and_resize', \
        'Mode tf_crop_and_resize is not supported for op {} with name {}'.format(resize.op, resize_name)

    layout = graph.graph['layout']

    if input_rank == 4:
        begin_dim = get_height_dim(layout, input_rank)
        end_dim = get_width_dim(layout, input_rank) + 1
    else:
        begin_dim = get_depth_dim(layout, input_rank)
        end_dim = get_width_dim(layout, input_rank) + 1

    sizes_ss = create_op_with_const_inputs(
        graph, StridedSlice, {
            1: int64_array([begin_dim]),
            2: int64_array([end_dim]),
            3: int64_array([1])
        }, {
            'name': resize_name + '/StridedSlice_sizes',
            'begin_mask': int64_array([1]),
            'end_mask': int64_array([1]),
            'new_axis_mask': int64_array([0]),
            'shrink_axis_mask': int64_array([0]),
            'ellipsis_mask': int64_array([0])
        })
    scales_ss = create_op_with_const_inputs(
        graph, StridedSlice, {
            1: int64_array([begin_dim]),
            2: int64_array([end_dim]),
            3: int64_array([1])
        }, {
            'name': resize_name + '/StridedSlice_scales',
            'begin_mask': int64_array([1]),
            'end_mask': int64_array([1]),
            'new_axis_mask': int64_array([0]),
            'shrink_axis_mask': int64_array([0]),
            'ellipsis_mask': int64_array([0])
        })
    axes_node = Const(
        graph, {
            'name': resize_name + '/axis',
            'value': int64_array(np.arange(begin_dim, end_dim))
        }).create_node()

    shape_calculation_mode = 'scales' if num_of_inputs == 3 else 'sizes'

    interpolate_node = Interpolate(
        graph, {
            'version': 'opset4',
            'mode': convert_mode(resize.mode),
            'coordinate_transformation_mode':
            resize.coordinate_transformation_mode,
            'cube_coeff': resize.cube_coeff,
            'nearest_mode': resize.nearest_mode,
            'pads_begin': int64_array([0]),
            'pads_end': int64_array([0]),
            'antialias': 0,
            'shape_calculation_mode': shape_calculation_mode,
            'in_ports_count': 4
        }).create_node()

    axes_node.out_port(0).connect(interpolate_node.in_port(3))
    shape_of = Shape(graph, {'name': resize_name + '/ShapeOf'}).create_node()

    add_node = create_op_with_const_inputs(graph, Add,
                                           {1: float_array([1.0e-5])},
                                           {'name': resize_name + '/Add'})

    input_data_type = data_type_str_to_np(graph.graph['cmd_params'].data_type)

    if num_of_inputs == 3:
        cast_shape_to_float = Cast(graph, {
            'dst_type': input_data_type
        }).create_node()
        mul_node = Mul(graph, {'name': resize_name + '/Mul'}).create_node()
        shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
        cast_shape_to_float.out_port(0).connect(mul_node.in_port(0))
        cast_add_result_to_int = Cast(graph, {
            'dst_type': np.int64
        }).create_node()
        floor_node = Floor(graph, {
            'name': resize_name + '/Floor'
        }).create_node()
        mul_node.out_port(0).connect(add_node.in_port(0))
        add_node.out_port(0).connect(floor_node.in_port(0))
        floor_node.out_port(0).connect(cast_add_result_to_int.in_port(0))
        cast_add_result_to_int.out_port(0).connect(sizes_ss.in_port(0))
        sizes_ss.out_port(0).connect(interpolate_node.in_port(1))
        scales_ss.out_port(0).connect(interpolate_node.in_port(2))

        connection_of_resize_input = resize.in_port(0).get_connection()
        connection_of_resize_input.set_destination(interpolate_node.in_port(0))

        connection_of_scales = resize.in_port(2).get_connection()
        connection_of_scales.set_destination(scales_ss.in_port(0))

        connection_of_resize_input.get_source().connect(shape_of.in_port(0))
        connection_of_scales.get_source().connect(mul_node.in_port(1))
    else:
        cast_shape_to_float = Cast(graph, {
            'dst_type': input_data_type
        }).create_node()
        cast_sizes_to_float = Cast(graph, {
            'dst_type': input_data_type
        }).create_node()
        div_node = Div(graph, {'name': resize_name + '/Div'}).create_node()
        cast_sizes_to_float.out_port(0).connect(div_node.in_port(0))
        cast_shape_to_float.out_port(0).connect(div_node.in_port(1))
        shape_of.out_port(0).connect(cast_shape_to_float.in_port(0))
        div_node.out_port(0).connect(add_node.in_port(0))
        add_node.out_port(0).connect(scales_ss.in_port(0))
        scales_ss.out_port(0).connect(interpolate_node.in_port(2))
        sizes_ss.out_port(0).connect(interpolate_node.in_port(1))

        connection_of_resize_input = resize.in_port(0).get_connection()
        connection_of_resize_input.set_destination(interpolate_node.in_port(0))

        connection_of_sizes = resize.in_port(3).get_connection()
        connection_of_sizes.set_destination(sizes_ss.in_port(0))

        connection_of_resize_input.get_source().connect(shape_of.in_port(0))
        connection_of_sizes.get_source().connect(
            cast_sizes_to_float.in_port(0))

    rename_nodes([(resize, resize_name + '/delete'),
                  (interpolate_node, resize_name)])
    resize.out_port(0).get_connection().set_source(
        interpolate_node.out_port(0))