def align_model(onnx_path: str, tnn_proto_path: str, tnn_model_path: str, input_file_path: str=None, refer_path: str = None, input_names: str = None, is_tflite: bool=False ) -> bool: """ 对 onnx 模型和 tnn 模型进行对齐. 当前支持模型: 单输入,单输出;单输入,多输出; :param onnx_path: :param tnn_proto_path: :param tnn_model_path: :return: """ logging.info("{} align model (tflite or ONNX vs TNN),please wait a moment {}\n" .format("-" * 10, "-" * 10)) checker.check_file_exist(tnn_proto_path) checker.check_file_exist(tnn_model_path) if input_names is not None: input_info = parse_input_names(input_names) # check input if input_names is not None: tnn_input_info = input_info onnx_input_info = input_info else: tnn_input_info = get_input_shape_from_tnn(tnn_proto_path) if is_tflite == True: onnx_input_info = get_input_shape_from_tflite(onnx_path) else: onnx_input_info = get_input_shape_from_onnx(onnx_path) if is_tflite == True: check_input_lite_info(onnx_input_info, tnn_input_info) else: check_input_info(onnx_input_info, tnn_input_info) if input_file_path is None: # generate data input_path = data.gene_random_data(onnx_input_info) else: if os.path.exists(input_file_path): input_path = input_file_path else: logging.error("Invalid input_file_path") sys.exit(return_code.ALIGN_FAILED) if refer_path is None: if is_tflite == True: reference_output_path = run_tflite(onnx_path, input_path, onnx_input_info) else: reference_output_path = run_onnx(onnx_path, input_path, onnx_input_info) else: if os.path.exists(refer_path): reference_output_path = refer_path else: logging.error("Invalid refer_path") sys.exit(return_code.ALIGN_FAILED) run_tnn_model_check(tnn_proto_path, tnn_model_path, input_path, reference_output_path, is_tflite) if input_file_path is None and os.path.exists(input_path): data.clean_temp_data(os.path.dirname(input_path)) if refer_path is None and os.path.exists(reference_output_path): data.clean_temp_data(reference_output_path) return True
def align_model(onnx_path: str, tnn_proto_path: str, tnn_model_path: str, input_file_path: str = None, refer_path: str = None, input_names: str = None) -> bool: """ 对 onnx 模型和 tnn 模型进行对齐. 当前支持模型: 单输入,单输出;单输入,多输出; :param onnx_path: :param tnn_proto_path: :param tnn_model_path: :return: """ checker.check_file_exist(tnn_proto_path) checker.check_file_exist(tnn_model_path) if input_names is not None: input_info = parse_input_names(input_names) # check input if input_names is not None: tnn_input_info = input_info onnx_input_info = input_info else: tnn_input_info = get_input_shape_from_tnn(tnn_proto_path) onnx_input_info = get_input_shape_from_onnx(onnx_path) check_input_info(onnx_input_info, tnn_input_info) if input_file_path is None: # generate data input_path = data.gene_random_data(onnx_input_info) else: if os.path.exists(input_file_path): input_path = input_file_path else: print("invalid input_file_path") exit(-1) if refer_path is None: reference_output_path = run_onnx(onnx_path, input_path, onnx_input_info) else: if os.path.exists(refer_path): reference_output_path = refer_path else: print("invalid refer_path") exit(-1) run_tnn_model_check(tnn_proto_path, tnn_model_path, input_path, reference_output_path) if input_file_path is None and os.path.exists(input_path): data.clean_temp_data(os.path.dirname(input_path)) if refer_path is None and os.path.exists(reference_output_path): data.clean_temp_data(reference_output_path) return True
def align_model(original_model_path: str, tnn_proto_path: str, tnn_model_path: str, input_file_path: str = None, refer_path: str = None, specify_input_args: str = None, is_tflite: bool = False, debug_mode: bool = False, align_batch: bool = False) -> bool: """ 对 onnx 模型和 tnn 模型进行对齐. 当前支持模型: 单输入,单输出;单输入,多输出; :param original_model_path: :param tnn_proto_path: :param tnn_model_path: :return: """ logging.info("{} align model (tflite or ONNX vs TNN),please wait a moment {}\n" .format("-" * 10, "-" * 10)) checker.check_file_exist(tnn_proto_path) checker.check_file_exist(tnn_model_path) # list = { "input name1":{ # {"shape": [n, c,...]}, # {"data_type": 0} # }, # "input name22": { # {"shape": [n, c,...]}, # {"data_type": 0} # } # get original input info if is_tflite: original_input_info = get_input_shape_from_tflite(original_model_path) else: original_input_info = get_input_shape_from_onnx(original_model_path) # get tnn input info tnn_input_info = get_input_shape_from_tnn(tnn_proto_path) # check input if specify_input_args is not None: specify_input_info = parse_specify_input_args(specify_input_args) update_original_input_shape(original_input_info, specify_input_info) if is_tflite: check_input_lite_info(original_input_info, tnn_input_info) else: check_input_info(original_input_info, tnn_input_info) if input_file_path is None: # generate data input_path = data.gene_random_data(original_input_info) else: if os.path.exists(input_file_path): input_path = input_file_path else: logging.error("Invalid input_file_path") sys.exit(return_code.ALIGN_FAILED) if refer_path is None: if is_tflite == True: reference_output_path = run_tflite(original_model_path, input_path, original_input_info) else: reference_output_path = run_onnx(original_model_path, input_path, original_input_info) else: if os.path.exists(refer_path): reference_output_path = refer_path else: logging.error("Invalid refer_path") sys.exit(return_code.ALIGN_FAILED) logging.info("Run tnn model_check...") run_tnn_model_check(tnn_proto_path, tnn_model_path, input_path, reference_output_path, is_tflite, align_batch) if debug_mode is False: if input_file_path is None and os.path.exists(input_path): data.clean_temp_data(os.path.dirname(input_path)) if refer_path is None and os.path.exists(reference_output_path): data.clean_temp_data(reference_output_path) return True