def parse_innovus_timing_report(report_file_path): logger = get_logger() metrics = { 'timing_wns': None, 'timing_tns': None, 'timing_violating_paths': None } try: with open(report_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', report_file_path) return # Slack regex = 'Total negative slacks\(TNS\)= *(?P<tns>[\-\.0-9]*).*' m = re.search(regex, report) if m: metrics['timing_tns'] = float(m.group('tns')) regex = 'Worst negative slacks\(WNS\)= *(?P<wns>[\-\.0-9]*).*' m = re.search(regex, report) if m: metrics['timing_wns'] = float(m.group('wns')) regex = 'Number of timing violation paths= *(?P<paths>[0-9]*).*' m = re.search(regex, report) if m: metrics['timing_violating_paths'] = int(m.group('paths')) logger.info('Successfully extracted metrics from %s', report_file_path) return metrics
def parse_openroad_log(log_file_path, tool): logger = get_logger() if tool == "" or tool is None: logger.warning('No tool specified') return if tool.lower() == 'opensta': return _parse_opensta_log(log_file_path)
def parse_innovus_power_report(report_file_path): logger = get_logger() # this could be substituted by a default dictionary # but keeping it this way to see what metrics this function reports metrics = { 'power_internal_total': None, 'power_switching_total': None, 'power_leakage_total': None, 'power_internal_percentage': None, 'power_switching_percentage': None, 'power_leakage_percentage': None, 'power_total': None, } try: with open(report_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', report_file_path) return # Internal power regex = 'Total Internal Power:[ \t]+(?P<total>[0-9\.]*)[ \t]+(?P<percentage>[%0-9\.]*)%' m = re.search(regex, report) if m: metrics['power_internal_total'] = float(m.group('total')) metrics['power_internal_percentage'] = float(m.group('percentage')) # Switching power regex = 'Total Switching Power:[ \t]+(?P<total>[0-9\.]*)[ \t]+(?P<percentage>[%0-9\.]*)%' m = re.search(regex, report) if m: metrics['power_switching_total'] = float(m.group('total')) metrics['power_switching_percentage'] = float(m.group('percentage')) # Leakage power regex = 'Total Leakage Power:[ \t]+(?P<total>[0-9\.]*)[ \t]+(?P<percentage>[%0-9\.]*)%' m = re.search(regex, report) if m: metrics['power_leakage_total'] = float(m.group('total')) metrics['power_leakage_percentage'] = float(m.group('percentage')) # Total power regex = 'Total Power:[ \t]+(?P<total>[0-9\.]*)' m = re.search(regex, report) if m: metrics['power_total'] = float(m.group('total')) logger.info('Successfully extracted metrics from %s', report_file_path) return metrics
def _parse_opensta_log(log_file_path): logger = get_logger() metrics = { 'slack__negative__total': None, 'slack__negative__worst': None, 'std__area__total': None, 'util': None } try: with open(log_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', log_file_path) return # slack regex = 'tns (?P<tns>[0-9\.]*)\n' m = re.findall(regex, report) if m: metrics['slack__negative__total'] = float(m[-1]) regex = 'wns (?P<wns>[0-9\.]*)\n' m = re.findall(regex, report) if m: metrics['slack__negative__worst'] = float(m[-1]) # area & util regex = 'Design area (?P<area>[0-9\.]*) u\^2 (?P<util>[0-9\.]*)\% utilization.' m = re.findall(regex, report) if m: metrics['std__area__total'] = float(m[-1][0]) metrics['util'] = float(m[-1][1]) logger.info('Successfully extracted metrics from %s', log_file_path) return metrics
def parse_innovus_log(log_file_path): logger = get_logger() metrics = { 'compute_cpu_time_total': None, 'compute_real_time_total': None, 'compute_mem_total': None, 'area_stdcell': None, 'area_total': None } try: with open(log_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', log_file_path) return # Stats regex = '--- Ending \"Innovus\" \(totcpu=(?P<cpu_total>[\-0-9\.:]*), real=(?P<time_total>[\-0-9\.:]*), mem=(?P<mem_total>[\-0-9\.]*)M\) ---' m = re.search(regex, report) if m: metrics['compute_cpu_time_total'] = _time_string_to_seconds(m.group('cpu_total')) metrics['compute_real_time_total'] = _time_string_to_seconds(m.group('time_total')) metrics['compute_mem_total'] = float(m.group('mem_total')) # Area regex = ' *= stdcell_area [0-9\.]* sites \((?P<stdcell_area>[0-9\.]*) um\^2\) \/ alloc_area [0-9\.]* sites \((?P<total_area>[0-9\.]*) um\^2\).*' m = re.search(regex, report) if m: metrics['area_stdcell'] = int(float(m.group('stdcell_area'))) metrics['area_total'] = int(float(m.group('total_area'))) logger.info('Successfully extracted metrics from %s', log_file_path) return metrics
def parse_innovus_conn_report(report_file_path): logger = get_logger() # this could be substituted by a default dictionary # but keeping it this way to see what metrics this function reports metrics = { 'conn_open_nets': None } try: with open(report_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', report_file_path) return # Open Nets regex = 'has regular routing with opens' matches = re.findall(regex, report) metrics['conn_open_nets'] = len(matches) logger.info('Successfully extracted metrics from %s', report_file_path) return metrics
import unittest import os import pathlib from edaac.metrics.parsers import parse_innovus_conn_report from edaac.log import get_logger logger = get_logger() class TestInnovusCONN1(unittest.TestCase): def test(self): report_file = os.path.join( pathlib.Path(__file__).parent.absolute(), 'data', 'conn1.rpt') metrics = {'conn_open_nets': 0} if os.path.exists(report_file): result = parse_innovus_conn_report(report_file) self.assertDictEqual(metrics, result) else: logger.warning('Skipping private DRC report file %s' % report_file) class TestInnovusCONN2(unittest.TestCase): def test(self): report_file = os.path.join( pathlib.Path(__file__).parent.absolute(), 'data', 'conn2.rpt') metrics = {'conn_open_nets': 22} if os.path.exists(report_file): result = parse_innovus_conn_report(report_file)
def parse_yosys_log(log_file_path): logger = get_logger() metrics = { 'run__synth__yosys_version': None, 'synth__inst__num__total': None, 'synth__inst__stdcell__area__total': None, 'synth__wire__num__total': None, 'synth__wirebits__num__total': None, 'synth__memory__num__total': None, 'synth__memorybits__num__total': None, 'run__synth__warning__total': None, 'run__synth__warning__unique__total': None, 'run__synth__cpu__total': None, 'run__synth__mem__total': None } try: with open(log_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', log_file_path) return # version regex = 'Yosys (?P<yosys_version>[0-9]+.*)\n' m = re.search(regex, report) if m: metrics['run__synth__yosys_version'] = m.group('yosys_version') # number of cell instances regex = '.*Number of cells: *(?P<number_of_cells>[0-9]*).*' matches = re.findall(regex, report) if matches: metrics['synth__inst__num__total'] = int(float(matches[-1])) # std cell area regex = '.*Chip area for module .*\: *(?P<chip_area>[0-9\.]*).*' matches = re.findall(regex, report) if matches: metrics['synth__inst__stdcell__area__total'] = float(matches[-1]) # wires regex = '.*Number of wires.*\: *(?P<number_of_wires>[0-9\.]*).*' matches = re.findall(regex, report) if matches: metrics['synth__wire__num__total'] = int(matches[-1]) regex = '.*Number of wire bits.*\: *(?P<number_of_wire_bits>[0-9\.]*).*' matches = re.findall(regex, report) if matches: metrics['synth__wirebits__num__total'] = int(matches[-1]) # memory regex = '.*Number of memories.*\: *(?P<number_of_memory>[0-9\.]*).*' matches = re.findall(regex, report) if matches: metrics['synth__memory__num__total'] = int(matches[-1]) regex = '.*Number of memory bits.*\: *(?P<number_of_memory_bits>[0-9\.]*).*' matches = re.findall(regex, report) if matches: metrics['synth__memorybits__num__total'] = int(matches[-1]) # warnings regex = 'Warnings: (?P<warning_unique>[0-9]*) unique messages, (?P<warning_total>[0-9]*) total' m = re.search(regex, report) if m: metrics['run__synth__warning__total'] = int(m.group('warning_total')) metrics['run__synth__warning__unique__total'] = int( m.group('warning_unique')) # runtime and memory regex = '.*CPU\: user (?P<cpu_user_time>[0-9\.]*)s system.*MEM\: (?P<memory>[0-9\.]*) MB peak.*' m = re.search(regex, report) if m: metrics['run__synth__cpu__total'] = float(m.group('cpu_user_time')) metrics['run__synth__mem__total'] = float(m.group('memory')) logger.info('Successfully extracted metrics from %s', log_file_path) return metrics
def parse_innovus_drc_report(report_file_path): logger = get_logger() # this could be substituted by a default dictionary # but keeping it this way to see what metrics this function reports metrics = { 'drv_total': None, 'drv_short_metal_total': None, 'drv_short_metal_area': None, 'drv_short_cut_total': None, 'drv_short_cut_area': None, 'drv_out_of_die_total': None, 'drv_out_of_die_area': None, 'drv_spacing_total': None, 'drv_spacing_parallel_run_length_total': None, 'drv_spacing_eol_total': None, 'drv_spacing_cut_total': None, 'drv_min_area_total': None } try: with open(report_file_path, 'r') as f: report = ''.join(f.readlines()) except Exception as e: logger.error('Can\'t read report file: %s. Skipping ..', report_file_path) return # DRV Total regex = 'Total Violations +: +[0-9]+' matches = re.findall(regex, report) for match in matches: metrics['drv_total'] = int(match.split(':')[1].strip()) # Short Violations regex = 'SHORT: \( Metal Short \) +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_short_metal_total'] = len(matches) metrics['drv_short_metal_area'] = 0.0 for match in matches: violation, bounds = match.strip().split('\n') m = re.search( '\((?P<x1>[\-0-9\. ]*),(?P<y1>[\-0-9\. ]*)\) +\((?P<x2>[\-0-9\. ]*),(?P<y2>[\-0-9\. ]*)\)', bounds) x1, x2 = float(m.group('x1').strip()), float(m.group('x2').strip()) y1, y2 = float(m.group('y1').strip()), float(m.group('y2').strip()) area = abs(x1 - x2) * abs(y1 - y2) metrics['drv_short_metal_area'] += area metrics['drv_short_metal_area'] = float( format(metrics['drv_short_metal_area'], '.8f')) regex = 'SHORT: \( Cut Short \) +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_short_cut_total'] = len(matches) metrics['drv_short_cut_area'] = 0.0 for match in matches: violation, bounds = match.strip().split('\n') m = re.search( '\((?P<x1>[\-0-9\. ]*),(?P<y1>[\-0-9\. ]*)\) +\((?P<x2>[\-0-9\. ]*),(?P<y2>[\-0-9\. ]*)\)', bounds) x1, x2 = float(m.group('x1').strip()), float(m.group('x2').strip()) y1, y2 = float(m.group('y1').strip()), float(m.group('y2').strip()) area = abs(x1 - x2) * abs(y1 - y2) metrics['drv_short_cut_area'] += area metrics['drv_short_cut_area'] = float( format(metrics['drv_short_cut_area'], '.8f')) # Out of Die Violations regex = 'SHORT: \( Out Of Die \) +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_out_of_die_total'] = len(matches) metrics['drv_out_of_die_area'] = 0.0 for match in matches: violation, bounds = match.strip().split('\n') m = re.search( '\((?P<x1>[\-0-9\. ]*),(?P<y1>[\-0-9\. ]*)\) +\((?P<x2>[\-0-9\. ]*),(?P<y2>[\-0-9\. ]*)\)', bounds) x1, x2 = float(m.group('x1').strip()), float(m.group('x2').strip()) y1, y2 = float(m.group('y1').strip()), float(m.group('y2').strip()) area = abs(x1 - x2) * abs(y1 - y2) metrics['drv_out_of_die_area'] += area metrics['drv_out_of_die_area'] = float( format(metrics['drv_out_of_die_area'], '.8f')) # Spacing Violations regex = 'EndOfLine: \( EndOfLine Spacing \) +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_spacing_eol_total'] = len(matches) regex = 'SPACING: \( ParallelRunLength Spacing \) +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_spacing_parallel_run_length_total'] = len(matches) regex = 'CUTSPACING: +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_spacing_cut_total'] = len(matches) metrics['drv_spacing_total'] = metrics['drv_spacing_eol_total'] + \ metrics['drv_spacing_parallel_run_length_total'] + \ metrics['drv_spacing_cut_total'] # Minimum Area Violations regex = 'MAR: +\( +Minimum Area +\) +.*\nBounds +: +.*\n' matches = re.findall(regex, report) metrics['drv_min_area_total'] = len(matches) logger.info('Successfully extracted metrics from %s', report_file_path) return metrics