def run(self) -> bool: """ run single op case """ for i, aicerr_kernel in enumerate(self.collection.kernel_name_list): params = self._get_op_param(kernel_name=aicerr_kernel) op_module = self._get_module_str(kernel_name=aicerr_kernel) op_imply_type = self._get_op_impl_type(params, op_module) op_type = self._str2Hump(op_module.split(".")[-1]) bin_path = os.path.join(self.collection.collect_compile_path, 'kernel_meta') block_dim, tiling_data = self._get_tiling_info( kernel_name=aicerr_kernel) utils.print_info_log( f"Create op case\nop_type:{op_type}\nop_module:{op_module}\nparams:{params}" ) op_case = OpUT(op_type, op_module, None) test_case = { "params": params, "case_name": aicerr_kernel + "_test", "bin_path": os.path.join(bin_path, aicerr_kernel + ".o"), "op_imply_type": op_imply_type, "block_dim": block_dim, "tiling_data": tiling_data } op_case.add_direct_case(test_case) result = op_case.run() sum_path = self._get_summary_path(i) case_file = self._gen_single_op(sum_path, op_case, test_case, aicerr_kernel) self._add_summary_to_info(sum_path, result, case_file)
def _get_tiling_info(self, kernel_name) -> list: aic_info_cmd = [ 'grep', '-r', '-C', '7', "\[AIC_INFO\] dev_func:{}".format(kernel_name), self.collection.collect_applog_path ] _, aic_info = utils.execute_command(aic_info_cmd) aic_info_blockdim_regexp = r"\[AIC_INFO\]\sblock_dim:(\d+)" aic_info_blockdim_ret = re.findall(aic_info_blockdim_regexp, aic_info, re.M) if len(aic_info_blockdim_ret) == 0: utils.print_warn_log(f"Failed to get {aic_info_blockdim_regexp}") elif len(aic_info_blockdim_ret[0]) == 0: utils.print_info_log(f"get {aic_info_blockdim_regexp} is null") block_dim = "" else: block_dim = int(aic_info_blockdim_ret[0][0]) aic_info_tiling_data_regex = r"\[AIC_INFO\]\stiling_data:(.*?)" aic_info_tiling_data_ret = re.findall(aic_info_tiling_data_regex, aic_info, re.M) if len(aic_info_tiling_data_ret) == 0: utils.print_warn_log(f"Failed to get {aic_info_tiling_data_regex}") elif len(aic_info_tiling_data_ret[0]) == 0: utils.print_info_log(f"get {aic_info_tiling_data_regex} is null") tiling_data = "" else: tiling_data = bytes(aic_info_tiling_data_ret[0][0], encoding="utf-8") return (block_dim, tiling_data)
def _get_available_addrs(self: any, occur_time: str) -> list: ''' 获取occur_time时刻可用的地址 :param occur_time: aicore error发生的时间 :return: 可用空间的list ''' alloc_cmd = [ 'grep', 'DevMalloc: Succ,', '-nr', self.collection.collect_applog_path ] _, alloc_data = utils.execute_command(alloc_cmd) alloc_regexp = r"(\d+-\d+-\d+-\d+:\d+:\d+\.\d+\.\d+).+?size\s*=\s*([" \ r"\d]+).+?ptr\s*=\s*([\da-zA-Z]+)" alloc_ret = re.findall(alloc_regexp, alloc_data, re.M) free_cmd = [ 'grep', 'DevFree: mem', '-nr', self.collection.collect_applog_path ] _, free_data = utils.execute_command(free_cmd) free_regexp = r"(\d+-\d+-\d+-\d+:\d+:\d+\.\d+\.\d+).+?mem\s*=\s*([\da-zA-Z]+)" free_ret = re.findall(free_regexp, free_data, re.M) avl_addr = [] occur_time_obj = utils.strplogtime(occur_time) for _, (alloc_time, size, addr) in enumerate(alloc_ret): alloc_time_obj = utils.strplogtime(alloc_time) if alloc_time_obj < occur_time_obj: avl_addr.append((addr, int(size))) for _, (free_time, addr) in enumerate(free_ret): free_time_obj = utils.strplogtime(free_time) if free_time_obj < occur_time_obj: avl_addr = self._remove_first_found_addr(addr, avl_addr) utils.print_info_log("get available addr: {}".format(avl_addr)) return avl_addr
def _add_summary_to_info(self, err_i_folder, result, test_case_file): info_file = os.path.join(err_i_folder, "info.txt") single_op_result = f""" ***********************7. result of single_op_test************************* {result} Running single op test \"python3 {test_case_file}\" can reprocessing." """ utils.write_file(info_file, single_op_result, "a") utils.print_info_log(f"Write summary {info_file}")
def _gen_single_op(self, sum_path, op_case: OpUT, test_case: dict, kernel_name): i = 0 for param in test_case.get("params"): if isinstance(param, dict) and param.get("param_type") == "input": npy_file = ".".join([kernel_name, "input", str(i), "npy"]) param[ "value"] = f'start_flag_np.load(os.path.join(dump_path, "{npy_file}"))_end_flag' i += 1 test_case[ "bin_path"] = f'start_flag_os.path.join(compile_path, "{kernel_name}.o")_end_flag' test_case_str = json.dumps(test_case, indent=4) test_case_str = test_case_str.replace("\"start_flag_", "").replace( "_end_flag\"", "").replace("\\\"", "\"") template_file = """ import os import numpy as np from ms_interface.single_op_test_frame.ut import OpUT def run_sample_case(): file_path = os.path.dirname(__file__) info_path = os.path.dirname(os.path.dirname(file_path)) dump_path = os.path.join(info_path, "collection", "dump") compile_path = os.path.join(info_path, "collection", "compile", "kernel_meta") op_case = OpUT("{op_type}", "{op_module}", "{func_name}") test_case = {test_case} op_case.add_direct_case(test_case) op_case.run() if __name__ == '__main__': run_sample_case() """.format(op_type=str(op_case.op_type), op_module=str(op_case.op_module_name), func_name=str(op_case.op_func_name), test_case=str(test_case_str)) test_case_dir = os.path.join(sum_path, "single_op_test") if not os.path.exists(test_case_dir): os.makedirs(test_case_dir) test_case_file = os.path.join(test_case_dir, test_case.get("case_name") + ".py") utils.write_file(test_case_file, template_file) src_frame_dir = os.path.join(os.path.dirname(__file__), "single_op_test_frame") dst_parent_dir = os.path.join(test_case_dir, "ms_interface") if not os.path.exists(dst_parent_dir): os.makedirs(dst_parent_dir) dst_frame_dir = os.path.join(dst_parent_dir, "single_op_test_frame") shutil.copytree(src_frame_dir, dst_frame_dir) utils.print_info_log( f"op test case created! Test command \"python3 {test_case_file}\"") return test_case_file
def _get_all_error_log(self: any) -> None: error_log_file = os.path.join(self.output_path, "error.log") utils.print_info_log('Start to analyze error slog.') cmd = ['grep', r'\[ERROR\]', '-nr', self.collection.collect_slog_path] status, data = utils.execute_command(cmd) if status != 0: utils.print_error_log("Failed to execute command: %s. %s" % (" ".join(cmd), " ".join(data))) raise utils.AicErrException( Constant.MS_AICERR_EXECUTE_COMMAND_ERROR) utils.write_file(error_log_file, data) utils.print_info_log('The error slog is saved in %s.' % error_log_file)
def _check_addr(self, avaliable_addrs, used_addrs): input_params = used_addrs.get("input_addr") output_params = used_addrs.get("output_addr") workspace = used_addrs.get("workspace") for input_param in input_params: start_addr = int(input_param.get("addr")) shape_size = self._cal_shape_size(input_param.get("shape")) size_of_dtype = Constant.SIZE_OF_DTYPE.get( input_param.get("dtype")) end_addr = int(start_addr) + int(shape_size) * int(size_of_dtype) ret = self._check_addr_in_range(start_addr, avaliable_addrs) utils.print_info_log( f"shape_size is {shape_size}, size_of_dtype is {size_of_dtype}" ) input_param["size"] = int(shape_size) * int(size_of_dtype) if not ret: utils.print_error_log( "input_addr not avaliable, input_start_addr:%#x" % start_addr) input_param["invalid"] = True ret = self._check_addr_in_range(end_addr, avaliable_addrs) if not ret: utils.print_error_log( "input_addr not avaliable, input_end_addr:%#x" % end_addr) input_param["invalid"] = True for output_param in output_params: start_addr = int(output_param.get("addr")) shape_size = self._cal_shape_size(output_param.get("shape")) size_of_dtype = Constant.SIZE_OF_DTYPE.get( output_param.get("dtype")) end_addr = int(output_param.get( "addr")) + int(shape_size) * int(size_of_dtype) ret = self._check_addr_in_range(start_addr, avaliable_addrs) utils.print_info_log( f"shape_size is {shape_size}, size_of_dtype is {size_of_dtype}" ) output_param["size"] = int(shape_size) * int(size_of_dtype) if not ret: utils.print_error_log( "output_addr not avaliable, output_start_addr:%#x" % start_addr) output_param["invalid"] = True ret = self._check_addr_in_range(end_addr, avaliable_addrs) if not ret: utils.print_error_log( "output_addr not avaliable, output_end_addr:%#x" % end_addr) output_param["invalid"] = True
def _get_imas_log(self: any) -> None: imas_log_file = os.path.join(self.output_path, "imas.log") cmd = ['grep', 'IMAS', '-nr', self.collection.collect_applog_path] utils.print_info_log('Start to analyze IMAS log.') status, data = utils.execute_command(cmd) if status == 1: utils.print_warn_log("There is no IMAS log in %s" % self.output_path) return if status != 0: utils.print_error_log("Failed to execute command: %s. %s" % (" ".join(cmd), " ".join(data))) raise utils.AicErrException( Constant.MS_AICERR_EXECUTE_COMMAND_ERROR) utils.write_file(imas_log_file, data) utils.print_info_log('The IMAS log is saved in %s.' % imas_log_file)
def _get_graph_file(self: any) -> any: match_list = [] for top, _, files in os.walk(self.collection.collect_compile_path): for name in files: file_name_pattern = re.compile( Constant.BUILD_PROTO_FILE_PATTERN) pattern_match = file_name_pattern.match(name) if pattern_match: match_list.append( (pattern_match.group(1), os.path.join(top, name))) if len(match_list) == 0: utils.print_warn_log('There is no graph file in %s.' % self.collection.collect_compile_path) return '' new_match_list = sorted(match_list, key=lambda s: s[0], reverse=True) choose_file = new_match_list[0][1] utils.print_info_log('Choose %s to read op info.' % choose_file) return choose_file
def _write_summary_file(self: any, summary_info_list: list) -> None: summary = """本次信息收集发生于%s,共收集到%d个AICERROR,概要如下: *************************************************************************************************** %s *************************************************************************************************** 建议选择最近发生的AICERROR,查看其中的info.txt 注意: 1、只有在device挂起后收集到的算子输入才是正确的,所以请忽略非device挂起情况下info.txt提示的“NaN/INF” 2、err.log中收集了日志目录下所有ERROR级别的日志 3、imas.log中收集了GE的IMAS日志 """ % (time.strftime("%Y-%m-%d %H:%M:%S", self.collect_time), len(self.collection.ai_core_error_list), "\n".join(summary_info_list)) summary_file = os.path.join(self.output_path, "README.txt") utils.write_file(summary_file, summary) utils.print_info_log('The summary info is saved in %s' % summary_file) utils.print_info_log('Analysis finished, please check %s, you can ' 'view README.txt first.' % self.output_path)
def collect_dump_file(self: any, collect_path: str, op_name_list: list) -> str: """ collect dump file :param collect_path: the collect path :param op_name_list: the op name list """ # dump files are in compile_path utils.check_path_valid(self.compile_path, isdir=True) collect_dump_path = os.path.join(collect_path, 'dump') utils.check_path_valid(collect_dump_path, isdir=True, output=True) copy_dump_file_status = False for op_name in op_name_list: copy_dump_file_status = utils.copy_dump_file( self.compile_path, collect_dump_path, op_name) if copy_dump_file_status: utils.print_info_log('The dump file is saved in %s.' % collect_dump_path) return collect_dump_path
def parse(self: any) -> str: """ Function Description: dump data parse. """ # 1. check arguments valid self.check_arguments_valid() match_name = "".join(['.', self.op_name.replace('/', '_'), '.']) match_dump_list = [] for top, _, files in os.walk(self.input_path): for name in files: if match_name in name: match_dump_list.append(os.path.join(top, name)) result_info_list = [] for dump_file in match_dump_list: result_info_list.extend(['%s\n' % dump_file, self.parse_dump_data(dump_file)]) result_info = "".join(result_info_list) if len(match_dump_list) == 0: utils.print_warn_log('There is no dump file for "%s". Please ' 'check the dump path.' % self.op_name) utils.print_info_log(f"Parse dump file finished,result_info:{result_info}") return result_info
def collect_compile_file(self: any, collect_path: str, kernel_name_list: list) -> str: """ collect compile file :param collect_path: the collect path :param kernel_name_list: the kernel name list """ utils.check_path_valid(self.report_path, isdir=True) collect_compile_path = os.path.join(collect_path, 'compile') utils.check_path_valid(collect_compile_path, isdir=True, output=True) copy_kernel_meta_status = False for kernel_name in kernel_name_list: copy_kernel_meta_status = self.copy_kernel_meta( self.report_path, collect_compile_path, kernel_name) copy_proto_file_status = self.copy_proto_file(self.report_path, collect_compile_path) if copy_kernel_meta_status or copy_proto_file_status: utils.print_info_log( 'The compile file is saved in %s.' % collect_compile_path) return collect_compile_path
def get_op_info(self: any) -> tuple: grep_cmd = ['grep', '<exception_print>TIME.*4060006', '-nr', '-A', '120', self.collect_slog_path] status, data = utils.execute_command(grep_cmd) if status != 0: utils.print_error_log("Failed to execute command: %s." % " ".join(grep_cmd)) raise utils.AicErrException( Constant.MS_AICERR_INVALID_SLOG_DATA_ERROR) ret = re.findall(Constant.EXCEPTION_PATTERN, data, re.M | re.S) if len(ret) == 0: utils.print_info_log("No AIC_ERROR found.") raise utils.AicErrException( Constant.MS_AICERR_INVALID_SLOG_DATA_ERROR) for device_aic_err in ret: if len(device_aic_err) != Constant.AIC_ERROR_TUPLE_LEN: utils.print_info_log("The AIC_ERROR is not complete.") raise utils.AicErrException( Constant.MS_AICERR_INVALID_SLOG_DATA_ERROR) log_time = device_aic_err[0] dev_id = device_aic_err[1] stream_id = device_aic_err[2] task_id = device_aic_err[3] err_time = utils.strplogtime(log_time) node_name, kernel_name = self._get_node_and_kernel_name( dev_id, task_id, stream_id, err_time) if node_name == '' and kernel_name == '': continue self.ai_core_error_list.append(device_aic_err) self.node_name_list.append(node_name) self.kernel_name_list.append(kernel_name) if len(self.ai_core_error_list) == 0: utils.print_error_log( "The AIC_ERROR of device does not match the host.") raise utils.AicErrException( Constant.MS_AICERR_INVALID_SLOG_DATA_ERROR) return self.ai_core_error_list, self.node_name_list, self.kernel_name_list
def parse(self: any) -> None: """ parse by collection info """ utils.print_info_log('******************Analysis******************') aicore_error_data_list = self._aicore_error_data() utils.print_info_log('Start to analyze each ai core error.') summary_info_list = [] # decompile if "aarch64" in platform.machine(): obj_dump_file = "cce-objdump_aarch64" else: obj_dump_file = "cce-objdump" obj_dump_file = os.path.join(os.getcwd(), "tools", obj_dump_file) if os.path.exists(obj_dump_file): os.system("chmod 755 " + obj_dump_file) os.environ["PATH"] = os.path.join( os.getcwd(), "tools") + ":" + os.environ["PATH"] else: cce_dump = shutil.which("cce-objdump") if not cce_dump: # guess where is cce-objdump parent_path = "aarch64-linux" if "aarch64" in platform.machine( ) else "x86_64-linux" cce_dump_guess = os.path.join("usr/local/Ascend/latest", parent_path, "ccec_compiler/bin/cce-objdump") if os.path.exists(cce_dump_guess): cce_dump = cce_dump_guess if not cce_dump: utils.print_error_log( 'Cannot find cce-objdump! please add cce-objdump path in env PATH.' ) raise utils.AicErrException( Constant.MS_AICERR_EXECUTE_COMMAND_ERROR) for i, current_pc in enumerate(self.collection.ai_core_error_list): # parser aic error by slog info = AicErrorInfo() info.err_time, info.dev_id, info.stream_id, info.task_id, \ info.core_id, info.aic_error, info.start_pc, info.extra_info, \ info.current_pc = current_pc utils.print_info_log( "******************No.%d %s******************" % (i, info.err_time)) info.err_time_obj = utils.strplogtime(info.err_time) err_i_folder_name = "aicerror_%d_%s" % ( i, time.strftime("%Y%m%d%H%M%S", info.err_time_obj.timetuple())) err_i_folder = os.path.join(self.output_path, err_i_folder_name) utils.check_path_valid(err_i_folder, isdir=True, output=True) info.node_name = self.collection.node_name_list[i] info.kernel_name = self.collection.kernel_name_list[i] # get hisi log self._get_hisi_log(info, err_i_folder) # get op info in build proto file self._get_op_by_graph(aicore_error_data_list[Constant.GRAPH_FILE], info) kernel_meta_path = os.path.join( self.collection.collect_compile_path, 'kernel_meta') if os.path.exists(kernel_meta_path): # 反编译 出错指令 result = self._decompile([info.kernel_name, kernel_meta_path], err_i_folder, info) if result is False: utils.print_warn_log( "decompile kernel_meta file %s failed." % os.path.join( kernel_meta_path, info.kernel_name + ".o")) else: utils.print_warn_log("kernel_meta path %s not exist" % kernel_meta_path) try: # input output address info.aval_addrs = self._get_available_addrs(info.err_time) info.necessary_addr = self._get_necessary_addrs( info.kernel_name) self._check_addr(info.aval_addrs, info.necessary_addr) # self.print_summary(avl_addr, necessary_addr) except BaseException as e: import logging logging.exception(e) print("Check addr error failed") info.input_output_addrs = self._get_input_output_addrs( info, err_i_folder, aicore_error_data_list[Constant.ALLOC_ADDR], aicore_error_data_list[Constant.ACTUAL_ADDR]) # 地址越界信息收集 info.addr_overflow = aicore_error_data_list[Constant.ADDR_OVERFLOW] # 算子代码地址,args地址 info.op_addr, info.args_addr, info.multi_args_addr = \ self._get_op_and_args_addr(info.start_pc) # parse dump if self.collection.collect_dump_path: parser = DumpDataParser(self.collection.collect_dump_path, info.node_name, info.kernel_name) info.dump_info = parser.parse() # write info file self._write_errorinfo_file(err_i_folder, info, i) summary_info_list.append( "%s %s device_id=%s core_id=%s task_id=%s node=%s " "kernel=%s" % (err_i_folder_name, info.aic_error, info.dev_id, info.core_id, info.task_id, info.node_name, info.kernel_name)) utils.print_info_log('Finish to analyze each ai core error.') # write summary info self._write_summary_file(summary_info_list)
def _write_errorinfo_file(err_i_folder: str, info: any, index: int) -> None: info_file = os.path.join(err_i_folder, "info.txt") utils.write_file(info_file, info.analyse()) utils.print_info_log('The ai core error info for No.%s is saved ' 'in %s' % (index, info_file))
def collect(self: any) -> None: """ collect info """ self.check_argument_valid() collect_path = os.path.join(self.output_path, 'collection') utils.check_path_valid(collect_path, isdir=True, output=True) utils.print_info_log('******************Collection******************') # collect slog utils.print_info_log('Start to collect slog file.') self.collect_slog_path = self.collect_slog_file( self.report_path, collect_path) utils.print_info_log('The slog file is saved in %s.' % self.collect_slog_path) # collect plog utils.print_info_log('Start to collect plog file.') self.collect_plog_file(self, collect_path) self.collect_applog_path = collect_path utils.print_info_log('The plog file is saved in %s.' % self.collect_applog_path) # if os.path.exists(os.path.join(self.report_path, "log", "device")): # utils.print_info_log( # 'Start to parse ai core error by slog and plog file.') # log_parser = DeviceLogParser(self.collect_applog_path, self.collect_slog_path) # else: # # 某些场景无法获取device日志 utils.print_info_log('Start to parse ai core error only by plog file.') log_parser = HostLogParser(self.collect_applog_path) self.ai_core_error_list, self.node_name_list, self.kernel_name_list = log_parser.get_op_info( ) utils.print_info_log('The ai core error occurs in %s.' % self.node_name_list) # collect compile utils.print_info_log('Start to collect compile file.') self.collect_compile_path = self.collect_compile_file( collect_path, self.kernel_name_list) utils.print_info_log('Start to collect dump file.') self.collect_dump_path = self.collect_dump_file( collect_path, self.node_name_list) # collect bbox utils.print_info_log('Start to collect bbox file.') self.collect_bbox_path = self.collect_bbox_file( self.report_path, collect_path) utils.print_info_log('The bbox file is saved in %s.' % self.collect_bbox_path)
def _cal_shape_size(self, shape_str): utils.print_info_log("shape_str is {}".format(shape_str)) if shape_str == "": return 1 shape_str_list = shape_str.replace("[", "").replace("]", "").split(",") return reduce(lambda x, y: int(x) * int(y), shape_str_list)
def _get_necessary_addrs(self: any, kernal_name: str) -> list: ''' 获取occur_time时刻可用的地址 :param kernal_name: 发生aicore error的kernal_name :return: 需要的空间 ''' result = {} aic_info_cmd = [ 'grep', '-r', '-C', '7', "\[AIC_INFO\] dev_func:{}".format(kernal_name), self.collection.collect_applog_path ] _, aic_info = utils.execute_command(aic_info_cmd) utils.print_info_log( "===============================\n{}\n==================================" .format(aic_info)) aic_info_all_regexp = r"\[AIC_INFO\]\snode_name:(.*?),\snode_type:(.*?),\sstream_id:(\d+),\stask_id:(\d+)" aic_info_all_ret = re.findall(aic_info_all_regexp, aic_info, re.M) if len(aic_info_all_ret) == 0: utils.print_warn_log( "Failed to get [AIC_INFO]\snode_name(.*?),\snode_tye(.*?),\sstream_id:(\d+),\stask_id:(\d+)" ) return node_name = aic_info_all_ret[0][0] node_type = aic_info_all_ret[0][1] stream_id = aic_info_all_ret[0][2] task_id = aic_info_all_ret[0][3] aic_info_input_regexp = r"\[AIC_INFO\]\sinput:(.*?);shape:(.*?);format:(.*?);dtype:(.*?);addr:(.*?)$" aic_info_input_ret = re.findall(aic_info_input_regexp, aic_info, re.M) if len(aic_info_input_ret) == 0: utils.print_warn_log( "Failed to get [AIC_INFO]\sinput:(.*?);shape(.*?);format:(.*?);dtype(.*?);addr:(.*?)" ) return input_params = [] for input_info in aic_info_input_ret: input_param = {} input_param["index"] = input_info[0] input_param["shape"] = input_info[1] input_param["format"] = input_info[2] input_param["dtype"] = input_info[3] input_param["addr"] = input_info[4] input_params.append(input_param) aic_info_output_regexp = r"\[AIC_INFO\]\soutput:(.*?);shape:(.*?);format:(.*?);dtype:(.*?);addr:(.*?)$" aic_info_output_ret = re.findall(aic_info_output_regexp, aic_info, re.M) if len(aic_info_output_ret) == 0: utils.print_warn_log( "Failed to get [AIC_INFO]\soutput:(.*?);shape(.*?);format:(.*?);dtype(.*?);addr:(.*?)" ) return output_params = [] for output_info in aic_info_output_ret: output_param = {} output_param["index"] = output_info[0] output_param["shape"] = output_info[1] output_param["format"] = output_info[2] output_param["dtype"] = output_info[3] output_param["addr"] = output_info[4] output_params.append(output_param) aic_info_blockdim_regexp = r"\[AIC_INFO\]\sblock_dim:(\d+)" aic_info_blockdim_ret = re.findall(aic_info_blockdim_regexp, aic_info, re.M) if len(aic_info_blockdim_ret) == 0: utils.print_warn_log(f"Failed to get {aic_info_blockdim_regexp}") elif len(aic_info_blockdim_ret[0]) == 0: utils.print_info_log(f"get {aic_info_blockdim_regexp} is null") block_dim = "" else: block_dim = int(aic_info_blockdim_ret[0][0]) aic_info_workspace_regex = r"\[AIC_INFO\]\sworkspace_bytes:(.*?)" aic_info_workspace_ret = re.findall(aic_info_workspace_regex, aic_info, re.M) if len(aic_info_workspace_ret) == 0: utils.print_warn_log(f"Failed to get {aic_info_workspace_regex}") elif len(aic_info_workspace_ret[0]) == 0: utils.print_info_log(f"get {aic_info_workspace_regex} is null") workspace = "0" else: workspace = aic_info_workspace_ret[0][0] aic_info_dev_func_regex = r"\[AIC_INFO\]\sdev_func:(.*?)" aic_info_dev_func_ret = re.findall(aic_info_dev_func_regex, aic_info, re.M) aic_info_tvm_magic_regex = r"\[AIC_INFO\]\stvm_magic:(.*?)" aic_info_tvm_magic_ret = re.findall(aic_info_tvm_magic_regex, aic_info, re.M) aic_info_kernel_info_regex = r"\[AIC_INFO\]\skernel_info:(.*?)" aic_info_kernel_info_ret = re.findall(aic_info_kernel_info_regex, aic_info, re.M) aic_info_tiling_key_regex = r"\[AIC_INFO\]\stiling_key:(.*?)" aic_info_tiling_key_ret = re.findall(aic_info_tiling_key_regex, aic_info, re.M) aic_info_tiling_data_regex = r"\[AIC_INFO\]\stiling_data:(.*?)" aic_info_tiling_data_ret = re.findall(aic_info_tiling_data_regex, aic_info, re.M) if len(aic_info_tiling_data_ret) == 0: utils.print_warn_log(f"Failed to get {aic_info_tiling_data_regex}") elif len(aic_info_tiling_data_ret[0]) == 0: utils.print_info_log(f"get {aic_info_tiling_data_regex} is null") tiling_data = "" else: tiling_data = bytes(aic_info_tiling_data_ret[0][0], encoding="utf-8") aic_info_op_file_path_regex = r"\[AIC_INFO\]\sop_file_path:(.*?)" aic_info_op_file_path_ret = re.findall(aic_info_op_file_path_regex, aic_info, re.M) result["input_addr"] = input_params result["output_addr"] = output_params result["workspace"] = workspace return result