def execute(source_path, output_path, device_id): """ Execute the parser. Args: source_path (str): the source file path. output_path (str): the output file path. device_id (str): the device id. """ col_names = ["node_name", "start_time", "end_time", "queue_size"] minddata_aicpu_source_path = get_file_join_name( input_path=source_path, file_name='DATA_PREPROCESS.dev.AICPUMI') if not minddata_aicpu_source_path: minddata_aicpu_source_path = get_file_join_name( input_path=os.path.join(source_path, "data"), file_name='DATA_PREPROCESS.dev.AICPUMI') if not minddata_aicpu_source_path: return minddata_aicpu_output_path = os.path.join( output_path, "minddata_aicpu_" + device_id + ".txt") minddata_aicpu_data = MinddataParser.parse_minddata_aicpu_data( minddata_aicpu_source_path) if minddata_aicpu_data: fwrite_format(minddata_aicpu_output_path, " ".join(col_names), is_start=True) fwrite_format(minddata_aicpu_output_path, minddata_aicpu_data, is_start=True)
def execute(self): """Execute the parser, get result data, and write it to the output file.""" if not os.path.exists(self._source_file_name): logger.info("Did not find the aicpu profiling source file") return with open(self._source_file_name, 'rb') as ai_cpu_data: ai_cpu_str = str(ai_cpu_data.read().replace( b'\n\x00', b' ___ ').replace(b'\x00', b' ___ '))[2:-1] ai_cpu_lines = ai_cpu_str.split(" ___ ") result_list = list() ai_cpu_total_time_summary = 0 # Node serial number. serial_number = 1 for i in range(len(ai_cpu_lines) - 1): node_line = ai_cpu_lines[i] thread_line = ai_cpu_lines[i + 1] if "Node" in node_line and "Thread" in thread_line: # Get the node data from node_line node_list = node_line.split(',') thread_list = thread_line.split(',') result = self._get_kernel_result(serial_number, node_list, thread_list) if result is None: continue result_list.append(result) # Calculate the total time. total_time = result[2] ai_cpu_total_time_summary += total_time # Increase node serial number. serial_number += 1 elif "Node" in node_line and "Thread" not in thread_line: node_type_name = node_line.split(',')[0].split(':')[-1] logger.warning("The node type:%s cannot find thread data", node_type_name) if result_list: ai_cpu_total_time = format(ai_cpu_total_time_summary, '.6f') result_list.append(["AI CPU Total Time(ms):", ai_cpu_total_time]) fwrite_format(self._output_filename, " ".join(self._dst_file_column_title), is_start=True, is_print=True) fwrite_format(self._output_filename, result_list, is_print=True) # For timeline display. self._result_list = result_list
def _write_op_time_into_file(self, result_data): """ Write the metadata of operators into the file, including op name, average time, and stream id. Args: result_data (str): The metadata to be written into the file. 'op_name_1', 'avg_time_1', 'stream_id_1', 'op_name_2', 'avg_time_2', 'stream_id_2', ... """ fwrite_format(self._output_filename, data_source=self._dst_file_title, is_start=True) fwrite_format(self._output_filename, data_source=self._dst_file_column_title) fwrite_format(self._output_filename, data_source=result_data)
def _analyser_op_info(self): """Analyse the operator information.""" integrator = Integrator(self._output_path, self._dev_id) integrator.integrate() aicore_type_result = self._query_op_type_info() detail_file_path = os.path.join( self._output_path, 'output_op_compute_time_detail_{}.txt'.format(self._dev_id)) fwrite_format(detail_file_path, data_source='title:op compute time') display_names = [ 'optype_name', 'compute_time(ms, per-step)', 'called_times(per-step)', 'percent' ] fwrite_format(detail_file_path, data_source=" ".join(display_names), is_print=True) fwrite_format(detail_file_path, data_source=aicore_type_result, is_print=True) op_type_order = [item[0] for item in aicore_type_result] aicore_detail_result = self._query_op_detail_info(op_type_order) fwrite_format(detail_file_path, data_source='', is_print=True) fwrite_format(detail_file_path, data_source='Detail:', is_print=True) fwrite_format(detail_file_path, data_source=" ".join( aicore_detail_result.get('col_name_detail')), is_print=True) fwrite_format(detail_file_path, data_source=aicore_detail_result.get('object'), is_print=True)
def execute(self): """ Execute the parser, get result data, and write it to the output file. Returns: bool, whether succeed to analyse hwts log. """ content_format = ['QIIIIIIIIIIII', 'QIIQIIIIIIII', 'IIIIQIIIIIIII'] log_type = [ 'Start of task', 'End of task', 'Start of block', 'End of block', 'Block PMU' ] result_data = "" self._source_flie_name = validate_and_normalize_path( self._source_flie_name) with open(self._source_flie_name, 'rb') as hwts_data: while True: # read 64 bit data line = hwts_data.read(64) if line: if not line.strip(): continue else: break byte_first_four = struct.unpack('BBHHH', line[0:8]) # byte_first[0:4] refers to count. byte_first[4] refers to is_warn_res0_0v. # byte_first[5:8] refers to the type of ms. byte_first = bin(byte_first_four[0]).replace('0b', '').zfill(8) ms_type = byte_first[-3:] is_warn_res0_ov = byte_first[4] cnt = int(byte_first[0:4], 2) core_id = byte_first_four[1] blk_id, task_id = byte_first_four[3], byte_first_four[4] if ms_type in ['000', '001', '010']: # log type 0,1,2 result = struct.unpack(content_format[0], line[8:]) syscnt = result[0] stream_id = result[1] elif ms_type == '011': # log type 3 result = struct.unpack(content_format[1], line[8:]) syscnt = result[0] stream_id = result[1] elif ms_type == '100': # log type 4 result = struct.unpack(content_format[2], line[8:]) stream_id = result[2] if is_warn_res0_ov == '0': syscnt = result[4] else: syscnt = None else: logger.info("Profiling: invalid hwts log record type %s", ms_type) continue if int(task_id) < 25000: task_id = str(stream_id) + "_" + str(task_id) result_data += ("%-14s %-4s %-8s %-9s %-8s %-15s %s\n" % (log_type[int(ms_type, 2)], cnt, core_id, blk_id, task_id, syscnt, stream_id)) fwrite_format(self._output_filename, data_source=self._dst_file_title, is_start=True) fwrite_format(self._output_filename, data_source=self._dst_file_column_title) fwrite_format(self._output_filename, data_source=result_data) return True