def run(args): test_dir = os.path.abspath(args.test_dir) test_dir_name = test_dir.split(os.path.sep)[-1] onnx_filename = os.path.join(test_dir, 'model.onnx') input_names, output_names = onnx_input_output_names(onnx_filename) test_data_dir = os.path.join(test_dir, 'test_data_set_0') inputs, outputs = load_test_data(test_data_dir, input_names, output_names) mo_output_dir = os.path.join('out', 'dldt_{}.{}'.format( test_dir_name, args.data_type.lower())) mo_model_xml = os.path.join(mo_output_dir, 'model.xml') mo_model_bin = os.path.join(mo_output_dir, 'model.bin') # make optimized model not_found_mo = True if not os.path.exists(mo_output_dir): os.makedirs(mo_output_dir, exist_ok=True) else: if os.path.exists(mo_model_xml) and os.path.exists(mo_model_bin): not_found_mo = False if args.force_mo or not_found_mo: args.input_model = onnx_filename args.output_dir = mo_output_dir from mo.main import driver driver(args) else: log.basicConfig( format="[ %(levelname)s ] %(message)s", level=args.log_level, stream=sys.stdout) # compute inference engine return inference(args, mo_model_xml, mo_model_bin, inputs, outputs)
def export(model, config, filename, folder=None, postprocess=None): _, tmp = tempfile.mkstemp() onnx_exporter.export(model, config.input_size, tmp) from mo.main import driver from mo.utils import import_extensions from mo.utils.cli_parser import get_absolute_path folder = folder or get_absolute_path('.') argv = _argv_wrapper({ 'input_model': tmp, 'framework': 'onnx', 'model_name': filename, 'output_dir': folder, 'log_level': 'ERROR', 'mean_values': (), 'scale_values': (), 'reverse_input_channels': False, 'data_type': 'float', 'disable_fusing': False, 'disable_resnet_optimization': False, 'disable_gfusing': False, 'move_to_preprocess': False, 'extensions': ','.join([ import_extensions.default_path(), os.path.dirname(mo_extensions.__file__) ]), 'silent': True }) logging.info('===> Running model optimizer...') driver(argv) if postprocess: postprocess(os.path.join(folder, filename + '.xml'), config)