Ejemplo n.º 1
0
 def test_two_nodes_one_bin(self):
     """Test case for two output nodes, one with 'bin' parameter, other without."""
     shape = np.array([2, 3, 4])
     data = np.zeros(shape)
     graph = build_graph_with_attrs(
         nodes_with_attrs=self.nodes + [('next_node_2', {
             'kind': 'op'
         })],
         edges_with_attrs=self.edges + [('data_node', 'next_node_2')],
         update_nodes_attributes=[('data_node', {
             'shape': shape,
             'value': data
         })],
         update_edge_attrs={('data_node', 'next_node', 0): {
                                'bin': 0
                            }},
     )
     graph_ref = build_graph_with_attrs(
         nodes_with_attrs=self.nodes + self.new_nodes + [('next_node_2', {
             'kind': 'op'
         })],
         edges_with_attrs=self.edges + self.new_edges +
         [('data_node', 'next_node_2')],
         update_nodes_attributes=[('data_node', {
             'shape': shape,
             'value': data
         }), ('const_data', {
             'shape': shape,
             'value': data
         })])
     tested_pattern = CreateConstNodesReplacement()
     tested_pattern.find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph, graph_ref, last_node='next_node')
     self.assertTrue(flag, resp)
Ejemplo n.º 2
0
 def test_two_nodes_with_bin(self):
     """Test case for data node with 2 consumers with bin edge attr.
     Nothing should happened."""
     shape = np.array([2, 3, 4])
     data = np.zeros(shape)
     graph = build_graph_with_attrs(
         nodes_with_attrs=self.nodes + [('next_node_2', {
             'kind': 'op'
         })],
         edges_with_attrs=self.edges + [('data_node', 'next_node_2')],
         update_nodes_attributes=[('data_node', {
             'shape': shape,
             'value': data
         })],
         update_edge_attrs={
             ('data_node', 'next_node', 0): {
                 'bin': 0
             },
             ('data_node', 'next_node_2', 0): {
                 'bin': 0
             }
         },
     )
     tested_pattern = CreateConstNodesReplacement()
     tested_pattern.find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph, graph, last_node='next_node')
     self.assertTrue(flag, resp)
 def test_one_bin_node(self):
     """Nothing should happen."""
     shape = np.array([2, 3, 4])
     data = np.zeros(shape)
     graph = build_graph_with_attrs(
         nodes_with_attrs=self.nodes,
         edges_with_attrs=self.edges,
         update_nodes_attributes=[('data_node', {'shape': shape, 'value': data})],
         update_edge_attrs={('data_node', 'next_node', 0): {'bin': 0}},
     )
     tested_pattern = CreateConstNodesReplacement()
     tested_pattern.find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph, graph, last_node='next_node')
     self.assertTrue(flag, resp)
Ejemplo n.º 4
0
def emit_ir(graph: Graph, argv: argparse.Namespace):
    NormalizeTI().find_and_replace_pattern(graph)
    for_graph_and_each_sub_graph_recursively(
        graph,
        RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        CreateConstNodesReplacement().find_and_replace_pattern)

    prepare_emit_ir(
        graph=graph,
        data_type=graph.graph['cmd_params'].data_type,
        output_dir=argv.output_dir,
        output_model_name=argv.model_name,
        mean_data=graph.graph['mf'] if 'mf' in graph.graph else None,
        input_names=graph.graph['input_names']
        if 'input_names' in graph.graph else [],
        meta_info=get_meta_info(argv))

    if not (argv.framework == 'tf'
            and argv.tensorflow_custom_operations_config_update):
        output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
        print('\n[ SUCCESS ] Generated IR version {} model.'.format(
            get_ir_version(argv)))
        print('[ SUCCESS ] XML file: {}.xml'.format(
            os.path.join(output_dir, argv.model_name)))
        print('[ SUCCESS ] BIN file: {}.bin'.format(
            os.path.join(output_dir, argv.model_name)))

    return 0
Ejemplo n.º 5
0
def save_restored_graph(graph: Graph, path: str, meta_data, name=None):
    """
    Function to apply all necessary transforms from back stage to prepare and save restored graph and metadata.
    :param graph: Graph to save
    :param path: Path to saved IR
    :param meta_data: Namespace with converting parameters restored from IR
    :param name: Name for saved IR
    :return:
    """

    if name is None:
        name = graph.name

    precision = data_type_str_to_precision(graph.graph['cmd_params'].data_type)
    assert precision in ['FP16', 'FP32'], 'Cannot define precision for restored model!'

    # List items order matters, do not change it.
    transformation_list = [
        ConvolutionWithGroupsResolver,
        StridedSliceMasksNormalizer,
        PackBinaryWeights,
        BlobNormalizer,
        ConvolutionNormalizer,
        KaldiRemoveMemoryOutputBackReplacementPattern,
    ]

    # We need to run some specific passes from MO back stage.
    apply_replacements_list(graph, transformation_list)

    # Transformations with enabled=False should be run manually.
    for_graph_and_each_sub_graph_recursively(graph, RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(graph, CreateConstNodesReplacement().find_and_replace_pattern)

    prepare_emit_ir(graph, precision, path, name, meta_info=meta_data)
Ejemplo n.º 6
0
def save_restored_graph(graph: Graph, path: str, meta_data, name=None):
    """
    Function to apply all necessary transforms from back stage to prepare and save restored graph and metadata.
    :param graph: Graph to save
    :param path: Path to saved IR
    :param meta_data: Namespace with converting parameters restored from IR
    :param name: Name for saved IR
    :return:
    """

    if name is None:
        name = graph.name

    if 'data_type' not in meta_data:
        log.debug(
            'Provided `meta_data` does not contain `data_type` parameter. Set `data_type`'
            ' parameter value to `FP32`.')
        # Set data_type to FP32. All restored constants will be saved in provided data type.
        data_type = 'FP32'

        # We need to specify this attribute to pass graph transformations. This information will not be saved into IR.
        # All constants and placeholders will be saved with same types as restored from IR
        graph.graph['cmd_params'].data_type = data_type
    else:
        data_type = data_type_str_to_precision(
            graph.graph['cmd_params'].data_type)

    assert data_type in ['FP16', 'FP32'], '`data_type` value {} is not supported by MO,' \
                                          ' cannot save graph'.format(data_type)

    # List items order matters, do not change it.
    transformation_list = [
        ConvolutionWithGroupsResolver,
        StridedSliceMasksNormalizer,
        PackBinaryWeights,
        BlobNormalizer,
        ConvolutionNormalizer,
        MarkNodesWithShapeValues,
    ]

    # We need to run some specific passes from MO back stage.
    apply_replacements_list(graph, transformation_list)

    # Transformations with enabled=False should be run manually.
    for_graph_and_each_sub_graph_recursively(
        graph,
        RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        CreateConstNodesReplacement().find_and_replace_pattern)

    prepare_emit_ir(graph,
                    data_type,
                    path,
                    name,
                    meta_info=meta_data,
                    used_by_ir_reader=True)
 def test_one_node(self):
     """We should add Const node and data node."""
     shape = np.array([2, 3, 4])
     data = np.zeros(shape)
     graph = build_graph_with_attrs(
         nodes_with_attrs=self.nodes,
         edges_with_attrs=self.edges,
         update_nodes_attributes=[('data_node', {'shape': shape, 'value': data})]
     )
     graph_ref = build_graph_with_attrs(
         nodes_with_attrs=self.nodes + self.new_nodes,
         edges_with_attrs=self.edges + self.new_edges,
         update_nodes_attributes=[('data_node', {'shape': shape, 'value': data}),
                                  ('const_data', {'shape': shape, 'value': data})]
     )
     tested_pattern = CreateConstNodesReplacement()
     tested_pattern.find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph, graph_ref, last_node='next_node')
     self.assertTrue(flag, resp)
    def test_force_precision_parameter(self):
        precision = 'FP16'
        shape = np.array([2, 3, 4])
        data = np.zeros(shape)
        graph = build_graph_with_attrs(nodes_with_attrs=self.nodes,
                                       edges_with_attrs=self.edges,
                                       update_nodes_attributes=[('data_node', {
                                           'shape':
                                           shape,
                                           'value':
                                           data,
                                           'force_precision':
                                           precision
                                       })])
        graph_ref = build_graph_with_attrs(
            nodes_with_attrs=self.nodes + self.new_nodes,
            edges_with_attrs=self.edges + self.new_edges,
            update_nodes_attributes=[('data_node', {
                'shape': shape,
                'value': data
            }),
                                     ('const_data', {
                                         'shape': shape,
                                         'value': data,
                                         'force_precision': precision
                                     }),
                                     ('const', {
                                         'force_precision': precision
                                     })])
        tested_pattern = CreateConstNodesReplacement()
        tested_pattern.find_and_replace_pattern(graph)
        (flag, resp) = compare_graphs(graph, graph_ref, last_node='next_node')
        self.assertTrue(flag, resp)

        #check that force precision was added to data and Const nodes
        force_precision_const_node = graph.nodes['data_node_const'][
            'force_precision']
        force_precision_new_data = graph.nodes['data_node_copy_'][
            'force_precision']
        self.assertEqual(force_precision_const_node, precision)
        self.assertEqual(force_precision_new_data, precision)
Ejemplo n.º 9
0
def emit_ir(graph: Graph, argv: argparse.Namespace):
    NormalizeTI().find_and_replace_pattern(graph)
    for_graph_and_each_sub_graph_recursively(
        graph,
        RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        CreateConstNodesReplacement().find_and_replace_pattern)

    prepare_emit_ir(
        graph=graph,
        data_type=graph.graph['cmd_params'].data_type,
        output_dir=argv.output_dir,
        output_model_name=argv.model_name,
        mean_data=graph.graph['mf'] if 'mf' in graph.graph else None,
        input_names=graph.graph['input_names']
        if 'input_names' in graph.graph else [],
        meta_info=get_meta_info(argv))

    if not (argv.framework == 'tf'
            and argv.tensorflow_custom_operations_config_update):
        output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
        orig_model_name = os.path.normpath(
            os.path.join(output_dir, argv.model_name))

        # This try-except is additional reinsurance that the IE
        # dependency search does not break the MO pipeline
        try:
            if find_ie_version(silent=True):
                path_to_offline_transformations = os.path.join(
                    os.path.realpath(os.path.dirname(__file__)), 'back',
                    'offline_transformations.py')
                status = subprocess.run([
                    sys.executable, path_to_offline_transformations,
                    orig_model_name
                ],
                                        env=os.environ,
                                        timeout=100)
                if status.returncode != 0 and not argv.silent:
                    print("[ WARNING ] offline_transformations return code {}".
                          format(status.returncode))
        except Exception as e:
            # TODO: send error message
            pass

        print('[ SUCCESS ] Generated IR version {} model.'.format(
            get_ir_version(argv)))
        print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name))
        print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name))

    return 0
Ejemplo n.º 10
0
def emit_ir(graph: Graph, argv: argparse.Namespace):
    NormalizeTI().find_and_replace_pattern(graph)
    for_graph_and_each_sub_graph_recursively(
        graph,
        RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        CreateConstNodesReplacement().find_and_replace_pattern)

    if 'feManager' in argv:
        del argv.feManager

    mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None
    input_names = deepcopy(
        graph.graph['input_names']) if 'input_names' in graph.graph else []

    prepare_emit_ir(graph=graph,
                    data_type=graph.graph['cmd_params'].data_type,
                    output_dir=argv.output_dir,
                    output_model_name=argv.model_name,
                    mean_data=mean_data,
                    input_names=input_names,
                    meta_info=get_meta_info(argv),
                    use_temporary_path=True)

    # This graph cleanup is required to avoid double memory consumption
    graph.clear()

    if not (argv.framework == 'tf'
            and argv.tensorflow_custom_operations_config_update):
        output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
        orig_model_name = os.path.normpath(
            os.path.join(output_dir, argv.model_name))

        return_code = "not executed"
        # This try-except is additional reinsurance that the IE
        # dependency search does not break the MO pipeline
        try:
            if not argv.legacy_ir_generation:
                path_to_offline_transformations = os.path.join(
                    os.path.realpath(os.path.dirname(__file__)), 'back',
                    'offline_transformations.py')
                cmd = [
                    sys.executable, path_to_offline_transformations,
                    "--input_model", orig_model_name, "--framework",
                    argv.framework, "--transform", argv.transform
                ]
                if "compress_fp16" in argv and argv.compress_fp16:
                    cmd += ["--compress_fp16"]
                    # restore data_type cmd parameter
                    argv.data_type = 'FP16'
                status = subprocess.run(cmd, env=os.environ)
                return_code = status.returncode
        except Exception as e:
            return_code = "failed"
            log.error(e)

        message = str(
            dict({
                "platform": platform.system(),
                "mo_version": get_simplified_mo_version(),
                "ie_version": get_simplified_ie_version(env=os.environ),
                "python_version": sys.version,
                "return_code": return_code
            }))
        t = tm.Telemetry()
        t.send_event('mo', 'offline_transformations_status', message)

        if return_code != 0:
            raise Error("offline transformations step has failed.")

        for suf in [".xml", ".bin", ".mapping"]:
            # remove existing files
            path_to_file = orig_model_name + "_tmp" + suf
            if os.path.exists(path_to_file):
                os.remove(path_to_file)

        # add meta information to IR
        append_ir_info(file=orig_model_name,
                       meta_info=get_meta_info(argv),
                       mean_data=mean_data,
                       input_names=input_names)

        print('[ SUCCESS ] Generated IR version {} model.'.format(
            get_ir_version(argv)))
        print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name))
        print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name))

    return 0
Ejemplo n.º 11
0
def emit_ir(graph: Graph, argv: argparse.Namespace):
    NormalizeTI().find_and_replace_pattern(graph)
    for_graph_and_each_sub_graph_recursively(
        graph,
        RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        CreateConstNodesReplacement().find_and_replace_pattern)

    mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None
    input_names = deepcopy(
        graph.graph['input_names']) if 'input_names' in graph.graph else []

    # Remove temporary ie_is_available key from argv no to have it in IR
    ie_is_available = argv.ie_is_available
    del argv.ie_is_available

    prepare_emit_ir(graph=graph,
                    data_type=graph.graph['cmd_params'].data_type,
                    output_dir=argv.output_dir,
                    output_model_name=argv.model_name,
                    mean_data=mean_data,
                    input_names=input_names,
                    meta_info=get_meta_info(argv),
                    use_temporary_path=True)

    # This graph cleanup is required to avoid double memory consumption
    graph.clear()

    if not (argv.framework == 'tf'
            and argv.tensorflow_custom_operations_config_update):
        output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
        orig_model_name = os.path.normpath(
            os.path.join(output_dir, argv.model_name))

        return_code = "not executed"
        # This try-except is additional reinsurance that the IE
        # dependency search does not break the MO pipeline
        try:
            if not argv.legacy_ir_generation and ie_is_available:
                path_to_offline_transformations = os.path.join(
                    os.path.realpath(os.path.dirname(__file__)), 'back',
                    'offline_transformations.py')
                status = subprocess.run([
                    sys.executable, path_to_offline_transformations,
                    "--input_model", orig_model_name, "--framework",
                    argv.framework, "--transform", argv.transform
                ],
                                        env=os.environ)
                return_code = status.returncode
        except Exception as e:
            return_code = "failed"
            log.error(e, extra={'is_warning': True})

        message = str(
            dict({
                "platform": platform.system(),
                "mo_version": get_simplified_mo_version(),
                "ie_version": get_simplified_ie_version(env=os.environ),
                "python_version": sys.version,
                "return_code": return_code
            }))
        t = tm.Telemetry()
        t.send_event('mo', 'offline_transformations_status', message)

        # if IR wasn't produced by offline_transformations step we need to fallback to IR
        # produced by prepare_ir. This IR needs to be renamed from XXX_tmp.xml to XXX.xml
        suffixes = [".xml", ".bin", ".mapping"]
        if return_code != 0:
            if len(argv.transform) != 0:
                # Remove temporary IR before throwing exception
                for suf in suffixes:
                    path_to_file = orig_model_name + "_tmp" + suf
                    if os.path.exists(path_to_file):
                        os.remove(path_to_file)
                raise Error("Failed to apply transformations: {}".format(
                    argv.transform))

            log.error("Using fallback to produce IR.",
                      extra={'is_warning': True})
            for suf in suffixes:
                # remove existing files
                path_to_file = orig_model_name + suf
                if os.path.exists(path_to_file):
                    os.remove(path_to_file)

                # rename tmp IR to original name
                os.rename(orig_model_name + "_tmp" + suf,
                          orig_model_name + suf)
        else:
            for suf in suffixes:
                # remove existing files
                path_to_file = orig_model_name + "_tmp" + suf
                if os.path.exists(path_to_file):
                    os.remove(path_to_file)

            # add meta information to IR
            append_ir_info(file=orig_model_name,
                           meta_info=get_meta_info(argv),
                           mean_data=mean_data,
                           input_names=input_names)

        print('[ SUCCESS ] Generated IR version {} model.'.format(
            get_ir_version(argv)))
        print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name))
        print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name))

    return 0
Ejemplo n.º 12
0
def driver(argv, input_model, output_model_name, output_dir):
    meta_info = get_meta_info(argv)

    EltwiseChecker.enabled = False

    try:
        graph = load_kaldi_model(input_model)
    except Exception as e:
        raise Error('Model Optimizer is not able to parse Kaldi model {}. '.format(input_model) +
                    refer_to_faq_msg(91)) from e
    graph.check_empty_graph('load_kaldi_nnet_model')
    graph.graph['cmd_params'] = argv
    graph.graph['fw'] = 'kaldi'

    if graph.graph['cmd_params'].generate_experimental_IR_V10:
        version = 10
    else:
        version = 6
    graph.graph['ir_version'] = 2 if argv.generate_deprecated_IR_V2 else version

    update_extractors_with_extensions(kaldi_type_extractors)
    extract_node_attrs(graph, lambda node: kaldi_extractor(node))

    # --------------------------------- LOAD END ------------------------------------------------------
    class_registration.apply_replacements(graph, [class_registration.ClassType.FRONT_REPLACER])
    MoveEmbeddedInputsToInputs().find_and_replace_pattern(graph)

    graph = partial_infer(graph)

    ReplacePNormNodePattern().find_and_replace_pattern(graph)
    ReplaceMemoryOffsetNodePattern().find_and_replace_pattern(graph)
    ReplaceMemoryOffsetWithMemoryNodePattern().find_and_replace_pattern(graph)
    RemoveMemoryDuplicationPattern().find_and_replace_pattern(graph)
    MergeNeighborSplicePattern().find_and_replace_pattern(graph)
    RemoveUselessCropsPattern().find_and_replace_pattern(graph)
    RemoveIdentity().find_and_replace_pattern(graph)
    graph.clean_up()

    AddSelectBeforeMemoryNodePattern().find_and_replace_pattern(graph)

    ReplaceSpliceNodePattern().find_and_replace_pattern(graph)
    graph.clean_up()

    # The order is intentional, firstly eliminate repeated, then remove redundant
    FuseRepeatedReshapes().find_and_replace_pattern(graph)
    EliminateRedundantReshape().find_and_replace_pattern(graph)
    graph.clean_up()
    graph.check_empty_graph('partial_infer')

    MatMulToFullyConnected().find_and_replace_pattern(graph)
    graph.clean_up()
    SSBiasAddonForFC().find_and_replace_pattern(graph)
    graph.clean_up()
    BiasAddonForFC().find_and_replace_pattern(graph)
    graph.clean_up()

    if argv.counts:
        try:
            counts = read_counts_file(argv.counts)
        except Exception as e:
            raise Error('Model Optimizer is not able to read counts file {}'.format(argv.counts) +
                        refer_to_faq_msg(92)) from e

        apply_biases_to_last_layer(graph, counts)

    if argv.remove_output_softmax:
        RemoveLastSoftMaxPattern().find_and_replace_pattern(graph)
        graph.clean_up()
        log.debug("After removing softmax")
        graph.print_graph_stat()

    LeakyReluToReluWithNegativeSlope().find_and_replace_pattern(graph)
    TransposeToPermute().find_and_replace_pattern(graph)
    DivideToEltwises().find_and_replace_pattern(graph)
    SubtractToEltwises().find_and_replace_pattern(graph)
    SimpleEltwiseToEltwiseOp().find_and_replace_pattern(graph)

    # Intentionally after all transformations
    if argv.remove_memory:
        CutMemory().find_and_replace_pattern(graph)
        graph.clean_up()
    ParameterToInput().find_and_replace_pattern(graph)

    KaldiRemoveMemoryOutputBackReplacementPattern().find_and_replace_pattern(graph)
    ForceStrictPrecision().find_and_replace_pattern(graph)
    RemoveConstOps().find_and_replace_pattern(graph)
    CreateConstNodesReplacement().find_and_replace_pattern(graph)
    RemoveOutputOps().find_and_replace_pattern(graph)
    prepare_emit_ir(graph, argv.data_type, output_dir, output_model_name, meta_info=meta_info)
    return 0
def save_restored_graph(graph: Graph, path: str, meta_data, name=None):
    """
    Function to apply all necessary transforms from back stage to prepare and save restored graph and metadata.
    :param graph: Graph to save
    :param path: Path to saved IR
    :param meta_data: Namespace with converting parameters restored from IR
    :param name: Name for saved IR
    :return:
    """

    if name is None:
        name = graph.name

    precisions = set()

    for op in graph.get_op_nodes():
        if op.type in ('Convolution', 'MatMul'):
            if op.in_port(1).get_source().node.type == 'FakeQuantize':
                data_type = op.in_port(1).get_source().node.in_port(
                    0).get_source().node.soft_get('data_type', None)
            else:
                data_type = op.in_port(1).get_source().node.soft_get(
                    'data_type', None)

            if data_type is not None:
                precisions.add(np_data_type_to_precision(data_type))
            else:
                log.warning(
                    'Cannot check data type for node {} with type {}, skip it.'
                    .format(op.name, op.type))

    precision = 'FP16' if 'FP16' in precisions else 'FP32'

    # We need to run some specific passes from MO back stage.
    # After some of them we need to clean up graph!
    for_graph_and_each_sub_graph_recursively(
        graph,
        ConvolutionWithGroupsResolver().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        TopKNormalizer().find_and_replace_pattern)
    graph.clean_up()

    for_graph_and_each_sub_graph_recursively(
        graph,
        StridedSliceMasksNormalizer().find_and_replace_pattern)

    for_graph_and_each_sub_graph_recursively(
        graph,
        BlobNormalizer().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        ConvolutionNormalizer().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        RemoveConstOps().find_and_replace_pattern)
    for_graph_and_each_sub_graph_recursively(
        graph,
        CreateConstNodesReplacement().find_and_replace_pattern)

    prepare_emit_ir(graph, precision, path, name, meta_info=meta_data)