def emit_ir(graph: Graph, argv: argparse.Namespace): NormalizeTI().find_and_replace_pattern(graph) for_graph_and_each_sub_graph_recursively( graph, RemoveConstOps().find_and_replace_pattern) for_graph_and_each_sub_graph_recursively( graph, CreateConstNodesReplacement().find_and_replace_pattern) prepare_emit_ir( graph=graph, data_type=graph.graph['cmd_params'].data_type, output_dir=argv.output_dir, output_model_name=argv.model_name, mean_data=graph.graph['mf'] if 'mf' in graph.graph else None, input_names=graph.graph['input_names'] if 'input_names' in graph.graph else [], meta_info=get_meta_info(argv)) if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update): output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd() print('\n[ SUCCESS ] Generated IR version {} model.'.format( get_ir_version(argv))) print('[ SUCCESS ] XML file: {}.xml'.format( os.path.join(output_dir, argv.model_name))) print('[ SUCCESS ] BIN file: {}.bin'.format( os.path.join(output_dir, argv.model_name))) return 0
def emit_ir(graph: Graph, argv: argparse.Namespace): NormalizeTI().find_and_replace_pattern(graph) for_graph_and_each_sub_graph_recursively( graph, RemoveConstOps().find_and_replace_pattern) for_graph_and_each_sub_graph_recursively( graph, CreateConstNodesReplacement().find_and_replace_pattern) prepare_emit_ir( graph=graph, data_type=graph.graph['cmd_params'].data_type, output_dir=argv.output_dir, output_model_name=argv.model_name, mean_data=graph.graph['mf'] if 'mf' in graph.graph else None, input_names=graph.graph['input_names'] if 'input_names' in graph.graph else [], meta_info=get_meta_info(argv)) if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update): output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd() orig_model_name = os.path.normpath( os.path.join(output_dir, argv.model_name)) # This try-except is additional reinsurance that the IE # dependency search does not break the MO pipeline try: if find_ie_version(silent=True): path_to_offline_transformations = os.path.join( os.path.realpath(os.path.dirname(__file__)), 'back', 'offline_transformations.py') status = subprocess.run([ sys.executable, path_to_offline_transformations, orig_model_name ], env=os.environ, timeout=100) if status.returncode != 0 and not argv.silent: print("[ WARNING ] offline_transformations return code {}". format(status.returncode)) except Exception as e: # TODO: send error message pass print('[ SUCCESS ] Generated IR version {} model.'.format( get_ir_version(argv))) print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name)) print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name)) return 0
def emit_ir(graph: Graph, argv: argparse.Namespace): NormalizeTI().find_and_replace_pattern(graph) for_graph_and_each_sub_graph_recursively( graph, RemoveConstOps().find_and_replace_pattern) for_graph_and_each_sub_graph_recursively( graph, CreateConstNodesReplacement().find_and_replace_pattern) if 'feManager' in argv: del argv.feManager mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None input_names = deepcopy( graph.graph['input_names']) if 'input_names' in graph.graph else [] prepare_emit_ir(graph=graph, data_type=graph.graph['cmd_params'].data_type, output_dir=argv.output_dir, output_model_name=argv.model_name, mean_data=mean_data, input_names=input_names, meta_info=get_meta_info(argv), use_temporary_path=True) # This graph cleanup is required to avoid double memory consumption graph.clear() if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update): output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd() orig_model_name = os.path.normpath( os.path.join(output_dir, argv.model_name)) return_code = "not executed" # This try-except is additional reinsurance that the IE # dependency search does not break the MO pipeline try: if not argv.legacy_ir_generation: path_to_offline_transformations = os.path.join( os.path.realpath(os.path.dirname(__file__)), 'back', 'offline_transformations.py') cmd = [ sys.executable, path_to_offline_transformations, "--input_model", orig_model_name, "--framework", argv.framework, "--transform", argv.transform ] if "compress_fp16" in argv and argv.compress_fp16: cmd += ["--compress_fp16"] # restore data_type cmd parameter argv.data_type = 'FP16' status = subprocess.run(cmd, env=os.environ) return_code = status.returncode except Exception as e: return_code = "failed" log.error(e) message = str( dict({ "platform": platform.system(), "mo_version": get_simplified_mo_version(), "ie_version": get_simplified_ie_version(env=os.environ), "python_version": sys.version, "return_code": return_code })) t = tm.Telemetry() t.send_event('mo', 'offline_transformations_status', message) if return_code != 0: raise Error("offline transformations step has failed.") for suf in [".xml", ".bin", ".mapping"]: # remove existing files path_to_file = orig_model_name + "_tmp" + suf if os.path.exists(path_to_file): os.remove(path_to_file) # add meta information to IR append_ir_info(file=orig_model_name, meta_info=get_meta_info(argv), mean_data=mean_data, input_names=input_names) print('[ SUCCESS ] Generated IR version {} model.'.format( get_ir_version(argv))) print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name)) print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name)) return 0
def emit_ir(graph: Graph, argv: argparse.Namespace): NormalizeTI().find_and_replace_pattern(graph) for_graph_and_each_sub_graph_recursively( graph, RemoveConstOps().find_and_replace_pattern) for_graph_and_each_sub_graph_recursively( graph, CreateConstNodesReplacement().find_and_replace_pattern) mean_data = deepcopy(graph.graph['mf']) if 'mf' in graph.graph else None input_names = deepcopy( graph.graph['input_names']) if 'input_names' in graph.graph else [] # Remove temporary ie_is_available key from argv no to have it in IR ie_is_available = argv.ie_is_available del argv.ie_is_available prepare_emit_ir(graph=graph, data_type=graph.graph['cmd_params'].data_type, output_dir=argv.output_dir, output_model_name=argv.model_name, mean_data=mean_data, input_names=input_names, meta_info=get_meta_info(argv), use_temporary_path=True) # This graph cleanup is required to avoid double memory consumption graph.clear() if not (argv.framework == 'tf' and argv.tensorflow_custom_operations_config_update): output_dir = argv.output_dir if argv.output_dir != '.' else os.getcwd() orig_model_name = os.path.normpath( os.path.join(output_dir, argv.model_name)) return_code = "not executed" # This try-except is additional reinsurance that the IE # dependency search does not break the MO pipeline try: if not argv.legacy_ir_generation and ie_is_available: path_to_offline_transformations = os.path.join( os.path.realpath(os.path.dirname(__file__)), 'back', 'offline_transformations.py') status = subprocess.run([ sys.executable, path_to_offline_transformations, "--input_model", orig_model_name, "--framework", argv.framework, "--transform", argv.transform ], env=os.environ) return_code = status.returncode except Exception as e: return_code = "failed" log.error(e, extra={'is_warning': True}) message = str( dict({ "platform": platform.system(), "mo_version": get_simplified_mo_version(), "ie_version": get_simplified_ie_version(env=os.environ), "python_version": sys.version, "return_code": return_code })) t = tm.Telemetry() t.send_event('mo', 'offline_transformations_status', message) # if IR wasn't produced by offline_transformations step we need to fallback to IR # produced by prepare_ir. This IR needs to be renamed from XXX_tmp.xml to XXX.xml suffixes = [".xml", ".bin", ".mapping"] if return_code != 0: if len(argv.transform) != 0: # Remove temporary IR before throwing exception for suf in suffixes: path_to_file = orig_model_name + "_tmp" + suf if os.path.exists(path_to_file): os.remove(path_to_file) raise Error("Failed to apply transformations: {}".format( argv.transform)) log.error("Using fallback to produce IR.", extra={'is_warning': True}) for suf in suffixes: # remove existing files path_to_file = orig_model_name + suf if os.path.exists(path_to_file): os.remove(path_to_file) # rename tmp IR to original name os.rename(orig_model_name + "_tmp" + suf, orig_model_name + suf) else: for suf in suffixes: # remove existing files path_to_file = orig_model_name + "_tmp" + suf if os.path.exists(path_to_file): os.remove(path_to_file) # add meta information to IR append_ir_info(file=orig_model_name, meta_info=get_meta_info(argv), mean_data=mean_data, input_names=input_names) print('[ SUCCESS ] Generated IR version {} model.'.format( get_ir_version(argv))) print('[ SUCCESS ] XML file: {}.xml'.format(orig_model_name)) print('[ SUCCESS ] BIN file: {}.bin'.format(orig_model_name)) return 0