Ejemplo n.º 1
0
def main(cli_parser: argparse.ArgumentParser, framework: str):
    telemetry = tm.Telemetry(app_name='Model Optimizer',
                             app_version=get_version())
    telemetry.start_session()
    telemetry.send_event('mo', 'version', get_version())
    try:
        # Initialize logger with 'ERROR' as default level to be able to form nice messages
        # before arg parser deliver log_level requested by user
        init_logger('ERROR', False)

        argv = cli_parser.parse_args()
        if framework:
            argv.framework = framework

        ov_update_message = None
        if not hasattr(argv, 'silent') or not argv.silent:
            ov_update_message = get_ov_update_message()
        ret_code = driver(argv)
        if ov_update_message:
            print(ov_update_message)
        telemetry.send_event('mo', 'conversion_result', 'success')
        telemetry.end_session()
        telemetry.force_shutdown(1.0)
        return ret_code
    except (FileNotFoundError, NotADirectoryError) as e:
        log.error('File {} was not found'.format(
            str(e).split('No such file or directory:')[1]))
        log.debug(traceback.format_exc())
    except Error as err:
        analysis_results = AnalysisResults()
        if analysis_results.get_messages() is not None:
            for el in analysis_results.get_messages():
                log.error(el, extra={'analysis_info': True})
        log.error(err)
        log.debug(traceback.format_exc())
    except FrameworkError as err:
        log.error(err, extra={'framework_error': True})
        log.debug(traceback.format_exc())
    except Exception as err:
        log.error("-------------------------------------------------")
        log.error("----------------- INTERNAL ERROR ----------------")
        log.error("Unexpected exception happened.")
        log.error(
            "Please contact Model Optimizer developers and forward the following information:"
        )
        log.error(str(err))
        log.error(traceback.format_exc())
        log.error("---------------- END OF BUG REPORT --------------")
        log.error("-------------------------------------------------")

    telemetry.send_event('mo', 'conversion_result', 'fail')
    telemetry.end_session()
    telemetry.force_shutdown(1.0)
    return 1
Ejemplo n.º 2
0
def add_meta_data(net: xml.etree.ElementTree.Element, meta_info: dict):
    meta = SubElement(net, 'meta_data')
    SubElement(meta, 'MO_version').set('value', get_version())
    parameters = SubElement(meta, 'cli_parameters')
    [SubElement(parameters, str(key)).set('value', str(meta_info[key])) for key in sorted(meta_info.keys()) if
     key != 'unset']
    SubElement(parameters, 'unset').set('unset_cli_parameters', ', '.join(sorted(meta_info['unset'])))
Ejemplo n.º 3
0
def add_meta_data(net: Element, meta_info: dict):
    if meta_info == {}:
        log.warning(
            '`meta_info` is not provided, IR will not contain appropriate section.'
        )
    else:
        meta = SubElement(net, 'meta_data')
        SubElement(meta, 'MO_version').set('value', get_version())
        parameters = SubElement(meta, 'cli_parameters')
        [
            SubElement(parameters, str(key)).set('value', str(meta_info[key]))
            for key in sorted(meta_info.keys())
            if key not in ('unset', 'quantization_parameters')
        ]
        if 'unset' in meta_info:
            SubElement(parameters,
                       'unset').set('unset_cli_parameters',
                                    ', '.join(sorted(meta_info['unset'])))
Ejemplo n.º 4
0
def print_argv(argv: argparse.Namespace, is_caffe: bool, is_tf: bool,
               is_mxnet: bool, is_kaldi: bool, is_onnx: bool, model_name: str):
    print('Model Optimizer arguments:')
    props = OrderedDict()
    props['common_args'] = get_common_cli_options(model_name)
    if is_caffe:
        props['caffe_args'] = get_caffe_cli_options()
    if is_tf:
        props['tf_args'] = get_tf_cli_options()
    if is_mxnet:
        props['mxnet_args'] = get_mxnet_cli_options()
    if is_kaldi:
        props['kaldi_args'] = get_kaldi_cli_options()
    if is_onnx:
        props['onnx_args'] = get_onnx_cli_options()

    framework_specifics_map = {
        'common_args': 'Common parameters:',
        'caffe_args': 'Caffe specific parameters:',
        'tf_args': 'TensorFlow specific parameters:',
        'mxnet_args': 'MXNet specific parameters:',
        'kaldi_args': 'Kaldi specific parameters:',
        'onnx_args': 'ONNX specific parameters:',
    }

    lines = []
    for key in props:
        lines.append(framework_specifics_map[key])
        for (op, desc) in props[key].items():
            if isinstance(desc, list):
                lines.append('\t{}: \t{}'.format(
                    desc[0], desc[1](getattr(argv, op, 'NONE'))))
            else:
                if op is 'k':
                    default_path = os.path.join(
                        os.path.dirname(sys.argv[0]),
                        'extensions/front/caffe/CustomLayersMapping.xml')
                    if getattr(argv, op, 'NONE') == default_path:
                        lines.append('\t{}: \t{}'.format(desc, 'Default'))
                        continue
                lines.append('\t{}: \t{}'.format(desc,
                                                 getattr(argv, op, 'NONE')))
    lines.append('Model Optimizer version: \t{}'.format(get_version()))
    print('\n'.join(lines), flush=True)
Ejemplo n.º 5
0
def prepare_ir(argv: argparse.Namespace):
    is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx = deduce_framework_by_namespace(
        argv)

    if not any([is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx]):
        raise Error(
            'Framework {} is not a valid target. Please use --framework with one from the list: caffe, tf, '
            'mxnet, kaldi, onnx. ' + refer_to_faq_msg(15), argv.framework)

    if is_tf and not argv.input_model and not argv.saved_model_dir and not argv.input_meta_graph:
        raise Error(
            'Path to input model or saved model dir is required: use --input_model, --saved_model_dir or '
            '--input_meta_graph')
    elif is_mxnet and not argv.input_model and not argv.input_symbol and not argv.pretrained_model_name:
        raise Error(
            'Path to input model or input symbol or pretrained_model_name is required: use --input_model or '
            '--input_symbol or --pretrained_model_name')
    elif is_caffe and not argv.input_model and not argv.input_proto:
        raise Error(
            'Path to input model or input proto is required: use --input_model or --input_proto'
        )
    elif (is_kaldi or is_onnx) and not argv.input_model:
        raise Error('Path to input model is required: use --input_model.')

    log.debug(str(argv))
    log.debug("Model Optimizer started")
    t = tm.Telemetry()
    t.start_session()

    model_name = "<UNKNOWN_NAME>"
    if argv.model_name:
        model_name = argv.model_name
    elif argv.input_model:
        model_name = get_model_name(argv.input_model)
    elif is_tf and argv.saved_model_dir:
        model_name = "saved_model"
    elif is_tf and argv.input_meta_graph:
        model_name = get_model_name(argv.input_meta_graph)
    elif is_mxnet and argv.input_symbol:
        model_name = get_model_name(argv.input_symbol)
    argv.model_name = model_name

    log.debug('Output model name would be {}{{.xml, .bin}}'.format(
        argv.model_name))

    # if --input_proto is not provided, try to retrieve another one
    # by suffix substitution from model file name
    if is_caffe and not argv.input_proto:
        argv.input_proto = replace_ext(argv.input_model, '.caffemodel',
                                       '.prototxt')

        if not argv.input_proto:
            raise Error(
                "Cannot find prototxt file: for Caffe please specify --input_proto - a "
                +
                "protobuf file that stores topology and --input_model that stores "
                + "pretrained weights. " + refer_to_faq_msg(20))
        log.info('Deduced name for prototxt: {}'.format(argv.input_proto))

    if not argv.silent:
        print_argv(argv, is_caffe, is_tf, is_mxnet, is_kaldi, is_onnx,
                   argv.model_name)

    # This try-except is additional reinsurance that the IE
    # dependency search does not break the MO pipeline
    try:
        if not find_ie_version(silent=argv.silent) and not argv.silent:
            print(
                "[ WARNING ] Could not find the Inference Engine Python API. At this moment, the Inference Engine dependency is not required, but will be required in future releases."
            )
            print(
                "[ WARNING ] Consider building the Inference Engine Python API from sources or try to install OpenVINO (TM) Toolkit using \"install_prerequisites.{}\""
                .format("bat" if sys.platform == "windows" else "sh"))
            # If the IE was not found, it will not print the MO version, so we have to print it manually
            print("{}: \t{}".format("Model Optimizer version", get_version()))
    except Exception as e:
        pass

    ret_code = check_requirements(framework=argv.framework)
    if ret_code:
        raise Error(
            'check_requirements exit with return code {}'.format(ret_code))

    if is_tf and argv.tensorflow_use_custom_operations_config is not None:
        argv.transformations_config = argv.tensorflow_use_custom_operations_config

    if is_caffe and argv.mean_file and argv.mean_values:
        raise Error(
            'Both --mean_file and mean_values are specified. Specify either mean file or mean values. '
            + refer_to_faq_msg(17))
    elif is_caffe and argv.mean_file and argv.mean_file_offsets:
        values = get_tuple_values(argv.mean_file_offsets,
                                  t=int,
                                  num_exp_values=2)
        mean_file_offsets = np.array([int(x) for x in values[0].split(',')])
        if not all([offset >= 0 for offset in mean_file_offsets]):
            raise Error(
                "Negative value specified for --mean_file_offsets option. "
                "Please specify positive integer values in format '(x,y)'. " +
                refer_to_faq_msg(18))
        argv.mean_file_offsets = mean_file_offsets

    if argv.scale and argv.scale_values:
        raise Error(
            'Both --scale and --scale_values are defined. Specify either scale factor or scale values per input '
            + 'channels. ' + refer_to_faq_msg(19))

    if argv.scale and argv.scale < 1.0:
        log.error(
            "The scale value is less than 1.0. This is most probably an issue because the scale value specifies "
            "floating point value which all input values will be *divided*.",
            extra={'is_warning': True})

    if argv.input_model and (is_tf and argv.saved_model_dir):
        raise Error('Both --input_model and --saved_model_dir are defined. '
                    'Specify either input model or saved model directory.')
    if is_tf:
        if argv.saved_model_tags is not None:
            if ' ' in argv.saved_model_tags:
                raise Error(
                    'Incorrect saved model tag was provided. Specify --saved_model_tags with no spaces in it'
                )
            argv.saved_model_tags = argv.saved_model_tags.split(',')

    argv.output = argv.output.split(',') if argv.output else None

    argv.placeholder_shapes, argv.placeholder_data_types = get_placeholder_shapes(
        argv.input, argv.input_shape, argv.batch)

    mean_values = parse_tuple_pairs(argv.mean_values)
    scale_values = parse_tuple_pairs(argv.scale_values)
    mean_scale = get_mean_scale_dictionary(mean_values, scale_values,
                                           argv.input)
    argv.mean_scale_values = mean_scale

    if not os.path.exists(argv.output_dir):
        try:
            os.makedirs(argv.output_dir)
        except PermissionError as e:
            raise Error(
                "Failed to create directory {}. Permission denied! " +
                refer_to_faq_msg(22), argv.output_dir) from e
    else:
        if not os.access(argv.output_dir, os.W_OK):
            raise Error(
                "Output directory {} is not writable for current user. " +
                refer_to_faq_msg(22), argv.output_dir)

    log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes))

    if hasattr(argv,
               'extensions') and argv.extensions and argv.extensions != '':
        extensions = argv.extensions.split(',')
    else:
        extensions = None

    argv.freeze_placeholder_with_value, argv.input = get_freeze_placeholder_values(
        argv.input, argv.freeze_placeholder_with_value)
    if is_tf:
        t.send_event('mo', 'framework', 'tf')
        from mo.front.tf.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
    elif is_caffe:
        t.send_event('mo', 'framework', 'caffe')
        from mo.front.caffe.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
    elif is_mxnet:
        t.send_event('mo', 'framework', 'mxnet')
        from mo.front.mxnet.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
    elif is_kaldi:
        t.send_event('mo', 'framework', 'kaldi')
        from mo.front.kaldi.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
    elif is_onnx:
        t.send_event('mo', 'framework', 'onnx')
        from mo.front.onnx.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
    graph = unified_pipeline(argv)
    return graph
Ejemplo n.º 6
0
def import_core_modules(silent: bool, path_to_module: str):
    """
        This function checks that InferenceEngine Python API is available
        and necessary python modules exists. So the next list of imports
        must contain all IE/NG Python API imports that are used inside MO.

    :param silent: enables or disables logs printing to stdout
    :param path_to_module: path where python API modules were found
    :return: True if all imports were successful and False otherwise
    """
    try:
        from openvino.inference_engine import get_version, read_network  # pylint: disable=import-error,no-name-in-module
        from openvino.offline_transformations import ApplyMOCTransformations, ApplyLowLatencyTransformation, \
            ApplyMakeStatefulTransformation, GenerateMappingFile  # pylint: disable=import-error,no-name-in-module

        # TODO: it is temporary import to check that nGraph python API is available. But in future
        # we need to replace it with Frontend imports
        from ngraph.impl.op import Parameter  # pylint: disable=import-error,no-name-in-module
        from _pyngraph import PartialShape, Dimension  # pylint: disable=import-error,no-name-in-module

        import openvino  # pylint: disable=import-error,no-name-in-module
        import ngraph  # pylint: disable=import-error,no-name-in-module
        import ngraph.frontend  # pylint: disable=import-error,no-name-in-module

        if silent:
            return True

        ie_version = str(get_version())
        mo_version = str(v.get_version())  # pylint: disable=no-member,no-name-in-module

        print("\t- {}: \t{}".format("Inference Engine found in",
                                    os.path.dirname(openvino.__file__)))
        # TODO: when nGraph version will be available we need to start compare it to IE and MO versions. Ticket: 58091
        print("\t- {}: \t{}".format("nGraph found in",
                                    os.path.dirname(ngraph.__file__)))
        print("{}: \t{}".format("Inference Engine version", ie_version))
        print("{}: \t{}".format("Model Optimizer version", mo_version))

        versions_mismatch = False
        if mo_version != ie_version:
            versions_mismatch = True
            extracted_mo_release_version = v.extract_release_version(
                mo_version)
            mo_is_custom = extracted_mo_release_version == (None, None)

            print(
                "[ WARNING ] Model Optimizer and Inference Engine versions do no match."
            )
            print(
                "[ WARNING ] Consider building the Inference Engine Python API from sources or reinstall OpenVINO "
                "(TM) toolkit using",
                end=" ")
            if mo_is_custom:
                print(
                    "\"pip install openvino\" (may be incompatible with the current Model Optimizer version)"
                )
            else:
                print("\"pip install openvino=={}.{}\"".format(
                    *extracted_mo_release_version))

        simplified_mo_version = v.get_simplified_mo_version()
        message = str(
            dict({
                "platform": platform.system(),
                "mo_version": simplified_mo_version,
                "ie_version": v.get_simplified_ie_version(version=ie_version),
                "versions_mismatch": versions_mismatch,
            }))
        send_telemetry(simplified_mo_version, message, 'ie_version_check')

        return True
    except Exception as e:
        # Do not print a warning if module wasn't found or silent mode is on
        if "No module named 'openvino'" not in str(e):
            print(
                "[ WARNING ] Failed to import Inference Engine Python API in: {}"
                .format(path_to_module))
            print("[ WARNING ] {}".format(e))

            # Send telemetry message about warning
            simplified_mo_version = v.get_simplified_mo_version()
            message = str(
                dict({
                    "platform": platform.system(),
                    "mo_version": simplified_mo_version,
                    "ie_version": v.get_simplified_ie_version(env=os.environ),
                    "python_version": sys.version,
                    "error_type": classify_error_type(e),
                }))
            send_telemetry(simplified_mo_version, message, 'ie_import_failed')

        return False
Ejemplo n.º 7
0
 def test_get_version(self, mock_open, mock_isfile):
     mock_isfile.return_value = True
     mock_open.return_value.__enter__ = mock_open
     self.assertEqual(get_version(), '2021.1.0-1028-55e4d5673a8')
Ejemplo n.º 8
0
 def test_unknown_version(self):
     self.assertEqual(get_version(), "unknown version")
Ejemplo n.º 9
0
 def test_release_version_extractor_neg(self, mock_open, mock_isfile):
     mock_isfile.return_value = True
     mock_open.return_value.__enter__ = mock_open
     self.assertEqual(extract_release_version(get_version()), (None, None))
Ejemplo n.º 10
0
 def test_custom_release_version_extractor(self, mock_open, mock_isfile):
     mock_isfile.return_value = True
     mock_open.return_value.__enter__ = mock_open
     self.assertEqual(extract_release_version(get_version()), ('2021', '1'))
Ejemplo n.º 11
0
def import_core_modules(silent: bool, path_to_module: str):
    try:
        from openvino.inference_engine import IECore, get_version  # pylint: disable=import-error
        from openvino.offline_transformations import ApplyMOCTransformations, CheckAPI  # pylint: disable=import-error

        import openvino  # pylint: disable=import-error

        if silent:
            return True

        ie_version = str(get_version())
        mo_version = str(v.get_version())  # pylint: disable=no-member

        print("\t- {}: \t{}".format("Inference Engine found in",
                                    os.path.dirname(openvino.__file__)))
        print("{}: \t{}".format("Inference Engine version", ie_version))
        print("{}: \t    {}".format("Model Optimizer version", mo_version))

        versions_mismatch = False
        # MO and IE version have a small difference in the beginning of version because
        # IE version also includes API version. For example:
        #   Inference Engine version: 2.1.custom_HEAD_4c8eae0ee2d403f8f5ae15b2c9ad19cfa5a9e1f9
        #   Model Optimizer version:      custom_HEAD_4c8eae0ee2d403f8f5ae15b2c9ad19cfa5a9e1f9
        # So to match this versions we skip IE API version.
        if not re.match(r"^([0-9]+).([0-9]+).{}$".format(mo_version),
                        ie_version):
            versions_mismatch = True
            extracted_mo_release_version = v.extract_release_version(
                mo_version)
            mo_is_custom = extracted_mo_release_version == (None, None)

            print(
                "[ WARNING ] Model Optimizer and Inference Engine versions do no match."
            )
            print(
                "[ WARNING ] Consider building the Inference Engine Python API from sources or reinstall OpenVINO (TM) toolkit using",
                end=" ")
            if mo_is_custom:
                print(
                    "\"pip install openvino\" (may be incompatible with the current Model Optimizer version)"
                )
            else:
                print("\"pip install openvino=={}.{}\"".format(
                    *extracted_mo_release_version))

        simplified_mo_version = v.get_simplified_mo_version()
        message = str(
            dict({
                "platform": platform.system(),
                "mo_version": simplified_mo_version,
                "ie_version": v.get_simplified_ie_version(version=ie_version),
                "versions_mismatch": versions_mismatch,
            }))
        send_telemetry(simplified_mo_version, message, 'ie_version_check')

        return True
    except Exception as e:
        # Do not print a warning if module wasn't found or silent mode is on
        if "No module named 'openvino'" not in str(e) and not silent:
            print(
                "[ WARNING ] Failed to import Inference Engine Python API in: {}"
                .format(path_to_module))
            print("[ WARNING ] {}".format(e))

            # Send telemetry message about warning
            simplified_mo_version = v.get_simplified_mo_version()
            message = str(
                dict({
                    "platform": platform.system(),
                    "mo_version": simplified_mo_version,
                    "ie_version": v.get_simplified_ie_version(env=os.environ),
                    "python_version": sys.version,
                    "error_type": classify_error_type(e),
                }))
            send_telemetry(simplified_mo_version, message, 'ie_import_failed')

        return False
Ejemplo n.º 12
0
def driver(argv: argparse.Namespace):
    if argv.version:
        print('Version of Model Optimizer is: {}'.format(get_version()))
        return 0

    init_logger(argv.log_level.upper(), argv.silent)
    start_time = datetime.datetime.now()

    if not argv.framework:
        if 'saved_model_dir' in argv and argv.saved_model_dir or \
                'input_meta_graph' in argv and argv.input_meta_graph:
            argv.framework = 'tf'
        elif 'input_symbol ' in argv and argv.input_symbol or \
                'pretrained_model_name' in argv and argv.pretrained_model_name:
            argv.framework = 'mxnet'
        elif 'input_proto' in argv and argv.input_proto:
            argv.framework = 'caffe'
        elif argv.input_model is None:
            raise Error('Path to input model is required: use --input_model.')
        else:
            argv.framework = guess_framework_by_ext(argv.input_model)
        if not argv.framework:
            raise Error(
                'Framework name can not be deduced from the given options: {}={}. '
                +
                'Use --framework to choose one of caffe, tf, mxnet, kaldi, onnx',
                '--input_model',
                argv.input_model,
                refer_to_faq_msg(15),
            )

    is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx = (
        argv.framework == x for x in ['tf', 'caffe', 'mxnet', 'kaldi', 'onnx'])

    if is_tf and not argv.input_model and not argv.saved_model_dir and not argv.input_meta_graph:
        raise Error(
            'Path to input model or saved model dir is required: use --input_model, --saved_model_dir or '
            '--input_meta_graph')
    elif is_mxnet and not argv.input_model and not argv.input_symbol and not argv.pretrained_model_name:
        raise Error(
            'Path to input model or input symbol or pretrained_model_name is required: use --input_model or '
            '--input_symbol or --pretrained_model_name')
    elif is_caffe and not argv.input_model and not argv.input_proto:
        raise Error(
            'Path to input model or input proto is required: use --input_model or --input_proto'
        )
    elif (is_kaldi or is_onnx) and not argv.input_model:
        raise Error('Path to input model is required: use --input_model.')

    log.debug(str(argv))
    log.debug("Model Optimizer started")

    model_name = "<UNKNOWN_NAME>"
    if argv.model_name:
        model_name = argv.model_name
    elif argv.input_model:
        model_name = get_model_name(argv.input_model)
    elif is_tf and argv.saved_model_dir:
        model_name = "saved_model"
    elif is_tf and argv.input_meta_graph:
        model_name = get_model_name(argv.input_meta_graph)
    elif is_mxnet and argv.input_symbol:
        model_name = get_model_name(argv.input_symbol)

    log.debug('Output model name would be {}{{.xml, .bin}}'.format(model_name))

    # if --input_proto is not provided, try to retrieve another one
    # by suffix substitution from model file name
    if is_caffe and not argv.input_proto:
        argv.input_proto = replace_ext(argv.input_model, '.caffemodel',
                                       '.prototxt')

        if not argv.input_proto:
            raise Error(
                "Cannot find prototxt file: for Caffe please specify --input_proto - a "
                +
                "protobuf file that stores topology and --input_model that stores "
                + "pretrained weights. " + refer_to_faq_msg(20))
        log.info('Deduced name for prototxt: {}'.format(argv.input_proto))

    if not argv.silent:
        print_argv(argv, is_caffe, is_tf, is_mxnet, is_kaldi, is_onnx,
                   model_name)

    if not any([is_tf, is_caffe, is_mxnet, is_kaldi, is_onnx]):
        raise Error(
            'Framework {} is not a valid target. ' +
            'Please use --framework with one from the list: caffe, tf, mxnet, kaldi, onnx. '
            + refer_to_faq_msg(15), argv.framework)

    ret_code = check_requirements(framework=argv.framework)
    if ret_code:
        return ret_code

    if is_mxnet and not argv.input_shape:
        raise Error(
            'Input shape is required to convert MXNet model. Please provide it with --input_shape. '
            + refer_to_faq_msg(16))

    mean_file_offsets = None
    if is_caffe and argv.mean_file and argv.mean_values:
        raise Error(
            'Both --mean_file and mean_values are specified. Specify either mean file or mean values. '
            + refer_to_faq_msg(17))
    elif is_caffe and argv.mean_file and argv.mean_file_offsets:

        values = get_tuple_values(argv.mean_file_offsets,
                                  t=int,
                                  num_exp_values=2)
        mean_file_offsets = np.array([int(x) for x in values[0].split(',')])
        if not all([offset >= 0 for offset in mean_file_offsets]):
            raise Error(
                "Negative value specified for --mean_file_offsets option. "
                "Please specify positive integer values in format '(x,y)'. " +
                refer_to_faq_msg(18))
    custom_layers_mapping_path = argv.k if is_caffe and argv.k else None

    if argv.scale and argv.scale_values:
        raise Error(
            'Both --scale and --scale_values are defined. Specify either scale factor or scale values per input '
            + 'channels. ' + refer_to_faq_msg(19))

    if argv.scale and argv.scale < 1.0:
        log.error(
            "The scale value is less than 1.0. This is most probably an issue because the scale value specifies "
            "floating point value which all input values will be *divided*.",
            extra={'is_warning': True})

    if argv.input_model and (is_tf and argv.saved_model_dir):
        raise Error('Both --input_model and --saved_model_dir are defined. '
                    'Specify either input model or saved model directory.')
    if is_tf:
        if argv.saved_model_tags is not None:
            if ' ' in argv.saved_model_tags:
                raise Error(
                    'Incorrect saved model tag was provided. Specify --saved_model_tags with no spaces in it'
                )
            argv.saved_model_tags = argv.saved_model_tags.split(',')

    argv.output = argv.output.split(',') if argv.output else None

    argv.placeholder_shapes = get_placeholder_shapes(argv.input,
                                                     argv.input_shape,
                                                     argv.batch)

    mean_values = parse_tuple_pairs(argv.mean_values)
    scale_values = parse_tuple_pairs(argv.scale_values)
    mean_scale = get_mean_scale_dictionary(mean_values, scale_values,
                                           argv.input)
    argv.mean_scale_values = mean_scale

    if not os.path.exists(argv.output_dir):
        try:
            os.makedirs(argv.output_dir)
        except PermissionError as e:
            raise Error(
                "Failed to create directory {}. Permission denied! " +
                refer_to_faq_msg(22), argv.output_dir) from e
    else:
        if not os.access(argv.output_dir, os.W_OK):
            raise Error(
                "Output directory {} is not writable for current user. " +
                refer_to_faq_msg(22), argv.output_dir)

    log.debug("Placeholder shapes : {}".format(argv.placeholder_shapes))

    ret_res = 1
    if hasattr(argv,
               'extensions') and argv.extensions and argv.extensions != '':
        extensions = argv.extensions.split(',')
    else:
        extensions = None

    argv.freeze_placeholder_with_value, argv.input = get_freeze_placeholder_values(
        argv.input, argv.freeze_placeholder_with_value)

    if is_tf:
        import mo.pipeline.tf as mo_tf
        from mo.front.tf.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
        ret_res = mo_tf.tf2nx(argv,
                              argv.input_model,
                              model_name,
                              argv.output_dir,
                              is_binary=not argv.input_model_is_text)

    elif is_caffe:
        import mo.pipeline.caffe as mo_caffe
        from mo.front.caffe.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
        ret_res = mo_caffe.driver(
            argv,
            argv.input_proto,
            argv.input_model,
            model_name,
            argv.output_dir,
            argv.caffe_parser_path,
            mean_file=argv.mean_file,
            mean_file_offsets=mean_file_offsets,
            custom_layers_mapping_path=custom_layers_mapping_path)

    elif is_mxnet:
        import mo.pipeline.mx as mo_mxnet
        from mo.front.mxnet.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
        ret_res = mo_mxnet.driver(argv, argv.input_model, model_name,
                                  argv.output_dir)

    elif is_kaldi:
        import mo.pipeline.kaldi as mo_kaldi
        from mo.front.kaldi.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
        ret_res = mo_kaldi.driver(argv, argv.input_model, model_name,
                                  argv.output_dir)
    elif is_onnx:
        import mo.pipeline.onnx as mo_onnx
        from mo.front.onnx.register_custom_ops import get_front_classes
        import_extensions.load_dirs(argv.framework, extensions,
                                    get_front_classes)
        ret_res = mo_onnx.driver(argv, argv.input_model, model_name,
                                 argv.output_dir)

    if ret_res != 0:
        return ret_res
    if not (is_tf and argv.tensorflow_custom_operations_config_update):
        output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd()
        print('\n[ SUCCESS ] Generated IR model.')
        print('[ SUCCESS ] XML file: {}.xml'.format(
            os.path.join(output_dir, model_name)))
        print('[ SUCCESS ] BIN file: {}.bin'.format(
            os.path.join(output_dir, model_name)))
        elapsed_time = datetime.datetime.now() - start_time
        print('[ SUCCESS ] Total execution time: {:.2f} seconds. '.format(
            elapsed_time.total_seconds()))
    return ret_res
Ejemplo n.º 13
0
def collect_env():
    env_info = {}
    env_info['sys.platform'] = sys.platform
    env_info['Python'] = sys.version.replace('\n', '')

    cuda_available = torch.cuda.is_available()
    env_info['CUDA available'] = cuda_available

    if cuda_available:
        from torch.utils.cpp_extension import CUDA_HOME
        env_info['CUDA_HOME'] = CUDA_HOME

        if CUDA_HOME is not None and osp.isdir(CUDA_HOME):
            try:
                nvcc = osp.join(CUDA_HOME, 'bin/nvcc')
                nvcc = subprocess.check_output(f'"{nvcc}" -V | tail -n1',
                                               shell=True)
                nvcc = nvcc.decode('utf-8').strip()
            except subprocess.SubprocessError:
                nvcc = 'Not Available'
            env_info['NVCC'] = nvcc

        devices = defaultdict(list)
        for k in range(torch.cuda.device_count()):
            devices[torch.cuda.get_device_name(k)].append(str(k))
        for name, devids in devices.items():
            env_info['GPU ' + ','.join(devids)] = name

    gcc = subprocess.check_output('gcc --version | head -n1', shell=True)
    gcc = gcc.decode('utf-8').strip()
    env_info['GCC'] = gcc

    env_info['PyTorch'] = torch.__version__
    env_info['PyTorch compiling details'] = torch.__config__.show()

    env_info['TorchVision'] = torchvision.__version__

    env_info['OpenCV'] = cv2.__version__

    env_info['MMCV'] = mmcv.__version__
    env_info['MMDetection'] = mmdet.__version__
    from mmdet.ops import get_compiler_version, get_compiling_cuda_version
    env_info['MMDetection Compiler'] = get_compiler_version()
    env_info['MMDetection CUDA Compiler'] = get_compiling_cuda_version()
    from mmdet.integration.nncf.utils import get_nncf_version
    env_info['NNCF'] = get_nncf_version()

    env_info['ONNX'] = None
    with suppress(ImportError):
        import onnx
        env_info['ONNX'] = onnx.__version__

    env_info['ONNXRuntime'] = None
    with suppress(ImportError):
        import onnxruntime
        env_info['ONNXRuntime'] = onnxruntime.__version__

    env_info['OpenVINO MO'] = None
    with suppress(ImportError):
        from mo.utils.version import get_version
        env_info['OpenVINO MO'] = get_version()

    env_info['OpenVINO IE'] = None
    with suppress(ImportError):
        import openvino.inference_engine as ie
        env_info['OpenVINO IE'] = ie.__version__

    return env_info