Beispiel #1
0
def convert_const_node_value_type(const_node: Node, np_data_type):
    assert const_node.type == 'Const'
    log.warning('Converting type of Const node "{}" to "{}"'.format(
        const_node.name, np_data_type))
    const_node.value = const_node.value.astype(np_data_type)
    const_node.data_type = np_data_type
    const_node.infer(const_node)
    const_node.type_infer(const_node)

    # if the Const node has an input data node then need to update it also
    if len(const_node.in_nodes()) == 1:
        input_data = const_node.in_node(0)
        assert input_data.kind == 'data'
        input_data.value = input_data.value.astype(const_node.data_type)
        input_data.data_type = const_node.data_type
Beispiel #2
0
    def infer(node: Node):
        axes_1_value = node.in_port(1).data.get_value()
        axes_2_value = node.in_port(2).data.get_value()
        if axes_1_value is None or axes_2_value is None:
            log.warning('Reduction indices for mean and variance for MVN node {} are not constants'.format(node.name))
            return

        if not (all(axes_1_value == axes_2_value)):
            log.warning('Reduction indices for mean {} and variance {} do not match'.format(
                axes_1_value,
                axes_2_value
            ))
            return

        power_value = node.in_port(3).data.get_value()
        eps_value = node.in_port(4).data.get_value()
        if power_value is None or eps_value is None:
            log.warning('Power or/and epsilon values for MVN node {} are not constants'.format(node.name))
            return

        if power_value != 0.5:
            log.warning('Power for MVN node {} ({}) is not equal to 0.5'.format(node.name, power_value))
            return

        node['eps'] = eps_value

        for i in range(2, 5):
            node.in_port(i).disconnect()
        node.old_infer(node)
        node.infer = node.old_infer
        del node['old_infer']
def assert_that_is_castable_to_fp16(node: Node):
    op_name = node.soft_get('op')
    node_name = node.soft_get('name', node.id)

    for i in operations_with_data_type_attributes[op_name][
            'in_ports_to_check']:
        val = node.in_port(i).data.get_value()
        if val is None:
            return

        if np.any(val > np.finfo(np.float16).max) or np.any(
                val < np.finfo(np.float16).min):
            raise Error(
                "Try to convert with --data_type=FP32 argument. "
                "This model can not be converted to FP16 precision, since "
                "'{}' node value {} exceeds FP16 allowed limits: [{}, {}]".
                format(node_name, val,
                       np.finfo(np.float16).min,
                       np.finfo(np.float16).max))
        # further this input values will be rewritten since force_shape_inference=True
        node.in_port(i).data.set_value(val.astype(np.float16))

    original_output = node.out_port(0).data.get_value()
    node.infer(node)
    casted_output = node.out_port(0).data.get_value()
    original_output_len = len(original_output) if hasattr(
        original_output, '__len__') else None
    casted_output_len = len(casted_output) if hasattr(casted_output,
                                                      '__len__') else None

    if original_output_len != casted_output_len:
        raise Error(
            "Try to convert with --data_type=FP32 argument. "
            "This model can not be converted to FP16 precision, since "
            "after conversion of '{}' node to FP16 output shape {} differs from the original {}."
            .format(node_name, casted_output_len, original_output_len))

    diff_count = np.count_nonzero(
        np.subtract(original_output, casted_output) > 1.e-4)
    if diff_count > 0:
        log.warning(
            "{} elements of {} of Range node '{}' output differ from the original values while "
            "converting network to FP16 precision".format(
                diff_count, len(original_output), node_name))
Beispiel #4
0
    def test_value_propagation(self, a_shape, a_value, b_shape, b_value,
                               elem_type):
        graph = build_graph(nodes_attrs=graph_nodes_attrs,
                            edges=graph_edges,
                            update_attributes={
                                'A': {
                                    'shape': int64_array(a_shape),
                                    'value': a_value.astype(elem_type)
                                },
                                'A_data': {
                                    'shape': int64_array(a_shape),
                                    'value': a_value.astype(elem_type)
                                },
                                'B': {
                                    'shape': int64_array(b_shape),
                                    'value': b_value.astype(elem_type)
                                },
                                'B_data': {
                                    'shape': int64_array(b_shape),
                                    'value': b_value.astype(elem_type)
                                },
                            })
        node = Node(graph, 'div')
        node['infer'] = Div(graph, node.attrs()).create_node().infer
        node.infer(node)
        node_data = node.out_port(0).get_destination().data.get_value()

        def func_for_ref():
            if np.issubdtype(elem_type, np.integer):
                return lambda a, b: a // b
            else:
                return lambda a, b: a / b

        ref_data = func_for_ref()(a_value, b_value)
        node_data_shape = node_data.shape
        ref_data_shape = ref_data.shape
        msg = "Value propagation for 'div' node is not correct."
        self.assertTrue(
            node_data_shape == ref_data_shape
            and np.all(node_data == ref_data), msg)
Beispiel #5
0
    def infer(node: Node):
        assert len(node.in_nodes()) == len(__class__.inputs) + len(__class__.extra_inputs)

        for axis in ['concat_axis', 'split_axis']:
            axis_node = __class__.extra_inputs.index(axis) + len(__class__.inputs)
            assert node.in_node(axis_node).has_valid('value')
            assert node.in_node(axis_node).value == 1

        shift_const = node.in_node(__class__.extra_inputs.index('shift_const') + len(__class__.inputs))
        assert shift_const.has_valid('value')
        shift_const = shift_const.value
        assert shift_const.ndim == 0  # expect scalar value
        node['shift_const'] = shift_const.copy()

        weights_node = node.in_node(__class__.inputs.index('weights'))
        biases_node = node.in_node(__class__.inputs.index('biases'))

        assert weights_node.has_valid('value')
        assert biases_node.has_valid('value')

        # Restore original infer function (to avoid calling previous code twice) and call it
        node.infer = node.old_infer
        node.infer(node)
Beispiel #6
0
    def infer(node: Node):
        axes_1_value = node.in_port(1).data.get_value()
        axes_2_value = node.in_port(2).data.get_value()
        if axes_1_value is None or axes_2_value is None:
            log.warning(
                'Reduction indices for mean and variance for MVN node {} are not constants'
                .format(node.name))
            return

        if not (all(axes_1_value == axes_2_value)):
            log.warning(
                'Reduction indices for mean {} and variance {} do not match'.
                format(axes_1_value, axes_2_value))
            return

        node.in_port(2).disconnect()
        node.old_infer(node)
        node.infer = node.old_infer
        del node['old_infer']
Beispiel #7
0
    def test_switch_infer_with_condition(self):
        nodes = [
            ('tensor', {
                'value': np.zeros((3, 3)),
                'kind': 'data',
                'executable': True,
                'shape': np.array([3, 3])
            }),
            ('pred_id', {
                'value': True,
                'kind': 'data',
                'executable': True
            }),
            ('switch', {
                'type': 'Switch',
                'kind': 'op',
                'op': 'Switch',
                'infer': Switch.infer
            }),
            ('switch_data_0', {
                'value': None,
                'kind': 'data',
                'executable': True,
                'shape': None
            }),
            ('switch_data_1', {
                'value': None,
                'kind': 'data',
                'executable': True,
                'shape': None
            }),
            ('result_0', {
                'value': None,
                'kind': 'op',
                'executable': True,
                'type': 'Result',
                'op': 'Result'
            }),
            ('result_1', {
                'value': None,
                'kind': 'op',
                'executable': True,
                'type': 'Result',
                'op': 'Result'
            }),
        ]
        edges = [
            ('tensor', 'switch', {
                'in': 0
            }),
            ('pred_id', 'switch', {
                'in': 1
            }),
            ('switch', 'switch_data_0', {
                'out': 0
            }),
            ('switch', 'switch_data_1', {
                'out': 1
            }),
            ('switch_data_0', 'result_0'),
            ('switch_data_1', 'result_1'),
        ]
        graph = build_graph_with_attrs(nodes_with_attrs=nodes,
                                       edges_with_attrs=edges)

        # We should propagate shapes and values
        graph_ref = build_graph_with_attrs(nodes_with_attrs=nodes,
                                           edges_with_attrs=edges,
                                           update_nodes_attributes=[
                                               ('switch_data_0', {
                                                   'shape': np.array([3, 3]),
                                                   'value': np.zeros((3, 3))
                                               }),
                                               ('switch_data_1', {
                                                   'shape': np.array([3, 3]),
                                                   'value': np.zeros((3, 3))
                                               })
                                           ])

        node = Node(graph, 'switch')
        node.infer(node)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'switch_data_0',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #8
0
def partial_infer(graph: Graph, start_node: str = None):
    """
    Tries to execute constant parts of the graph and deduce as much as possible
    information following the data flow, e.g. calculate and propagate shapes and
    constant values. Partially or completely defined values are stored in data
    nodes (kind='data').
    """
    # We have to turn off strict mode due to above we add and remove edeges without attributes that is prohibited
    graph.strict_mode = False
    cycle_nodes = graph.get_nodes_with_attributes(is_cyclic=True)
    cycle_nodes = [Node(graph, node).out_node().id for node in cycle_nodes]
    ebunch_cyclic = list(
        graph.out_edges(nbunch=cycle_nodes, data=True, keys=True))
    ebunch_reconnected = exit_bound_edges(graph,
                                          sources=cycle_nodes,
                                          end_node_attrs={'op': 'Exit'})
    graph.remove_edges_from(ebunch_cyclic)
    graph.add_edges_from(ebunch_reconnected)

    try:
        nodes = list(nx.topological_sort(graph))
    except:
        raise Error('Graph contains a cycle. Can not proceed. ' +
                    refer_to_faq_msg(97))

    graph.remove_edges_from(ebunch_reconnected)
    graph.add_edges_from(ebunch_cyclic)
    graph.strict_mode = True

    # Mark all nodes as not inferred yet
    if start_node is not None:
        start_index = nodes.index(start_node)
        nx.set_node_attributes(G=graph.subgraph(nodes[start_index:]),
                               name='is_partial_inferred',
                               values=False)
    else:
        nx.set_node_attributes(G=graph,
                               name='is_partial_inferred',
                               values=False)

    nx.set_node_attributes(
        G=graph,
        name='executable',
        values={n: True
                for n in graph.get_nodes_with_attributes(kind='data')})

    # first we infer constant sub-graphs so the reverse infer could use constant values sub-graphs. For example,
    # convolution weights may be reshuffled by some operation in the graph and are not directly consumed by the conv
    # node
    infer_nodes(graph, nodes, True)

    # we may need to deduce shape for Parameter node(s) if it is not defined
    need_reverse_infer = False
    for parameter in graph.get_op_nodes(op='Parameter'):
        if parameter.soft_get('shape', None) is None:
            need_reverse_infer = True

    if need_reverse_infer:
        reverse_infer(graph, nodes)

    infer_nodes(graph, nodes, False)

    not_fully_inferred = graph.get_nodes_with_attributes(
        is_not_fully_inferred=True)
    for n in not_fully_inferred:
        node = Node(graph, n)
        if node.has_and_set('infer'):
            node.infer(node)

    return graph
Beispiel #9
0
def infer_nodes(graph: Graph,
                nodes: List[Node],
                constant_subgraph_only: bool = False):
    """
    Run "infer" function of the specified nodes.

    :param graph: graph with nodes
    :param nodes: list of node ids in the topological order
    :param constant_subgraph_only: flag which specifies whether only inference of constant sub-graphs should be done
    """
    debug_logger = log.getLogger().isEnabledFor(log.DEBUG)
    for n in nodes:
        # Data Flow Infer
        node = Node(graph, n)
        node_name = node.soft_get('name', node.id)
        try:
            if node.has(
                    'is_partial_inferred') and not node.is_partial_inferred:
                if node.has('infer') and not node.infer is None:
                    # we consider that operation will produce value if all inputs are constants or it is
                    # 'ShapeOf' operation
                    if constant_subgraph_only:
                        in_values = [
                            port.data.get_value()
                            for port in node.in_ports().values()
                        ]
                        if node.soft_get('op') == 'Parameter' or any(value is None for value in in_values) or \
                                (node.soft_get('op') == 'ShapeOf' and node.in_port(0).data.get_shape() is None):
                            # if here will be any new ShapeOf type operation, we should update condition above
                            continue

                    if debug_logger:
                        log.debug('-' * 20)
                        log.debug('Partial infer for {}'.format(
                            node.soft_get('name')))
                        log.debug('Op: {}'.format(node.soft_get('op')))
                        log.debug('Inputs:')
                        log_debug_dict(node.in_nodes(), 'input')

                    node.infer(node)
                    out_nodes = node.out_nodes()

                    # propagate nchw_layout attributes to data nodes
                    if node.has('nchw_layout'):
                        for out_node in out_nodes.values():
                            out_node['nchw_layout'] = node.nchw_layout

                    # In debug print current node attributes, input shapes/values and output shape/values
                    if debug_logger:
                        log.debug('Outputs:')
                        log_debug_dict(node.out_nodes(), 'output')

                    if not constant_subgraph_only:
                        not_all_output_shapes = False

                        for out_port, out_node in out_nodes.items():
                            not_all_output_shapes = False
                            if not out_node.has_valid('shape'):
                                log.error(
                                    'Shape is not defined for output {} of "{}".'
                                    .format(out_port, node_name))
                                not_all_output_shapes = True

                        if not_all_output_shapes:
                            raise Error(
                                'Not all output shapes were inferred or fully defined for node "{}". '
                                + refer_to_faq_msg(40), node_name)
                elif node.kind != 'data':
                    raise Error(
                        'There is no registered "infer" function for node "{}" with op = "{}". '
                        +
                        'Please implement this function in the extensions. ' +
                        refer_to_faq_msg(37), node_name, node.soft_get('op'))
                node.is_partial_inferred = True
        except Exception as err:
            log.error('Cannot infer shapes or values for node "{}".'.format(
                node.soft_get('name')))
            log.error(str(err))
            log.error('')
            log.error(
                'It can happen due to bug in custom shape infer function {}.'.
                format(node.soft_get('infer')))
            log.error(
                'Or because the node inputs have incorrect values/shapes.')
            log.error(
                'Or because input shapes are incorrect (embedded to the model or passed via --input_shape).'
            )
            debug_messages = '\n'.join([
                'Layer "' + node_name + '": ' + node_attrs['debug_message']
                for node_name, node_attrs in graph.nodes(data=True)
                if 'debug_message' in node_attrs
            ])
            if debug_messages != "":
                log.error('')
                log.error('Other possible failure reasons are listed below:')
                log.error(debug_messages)
            if not debug_logger:
                log.error(
                    'Run Model Optimizer with --log_level=DEBUG for more information.'
                )
            else:
                log.debug('Node "{}" attributes: {}'.format(
                    node.soft_get('name'), node.graph.node[node.id]))
            raise Error('Stopped shape/value propagation at "{}" node. '.
                        format(node.soft_get('name')) +
                        refer_to_faq_msg(38)) from err
        control_flow_infer(graph, n)