def main(argv): status = 0 try: config = Config().init(CONFIG) config.argparse.add_argument('inputs', metavar='FILE', nargs=2) config.parse(argv) config['args.fill_holes'] = False inputs = config.get('args.inputs') a_dfs = memdf.collect.collect_files(config, files=[inputs[0]]) b_dfs = memdf.collect.collect_files(config, files=[inputs[1]]) a_syms = a_dfs[SymbolDF.name].sort_values(by='symbol', ignore_index=True) b_syms = b_dfs[SymbolDF.name].sort_values(by='symbol', ignore_index=True) # TBD: Differences other than size, configurably. differences = [] ai = a_syms.itertuples() bi = b_syms.itertuples() a = next(ai, None) b = next(bi, None) while a and b: if a.symbol < b.symbol: differences.append((-a.size, a.size, 0, a.symbol)) a = next(ai, None) continue if a.symbol > b.symbol: differences.append((b.size, 0, b.size, b.symbol)) b = next(bi, None) continue if a.size != b.size: differences.append((b.size - a.size, a.size, b.size, a.symbol)) a = next(ai, None) b = next(bi, None) for a in ai: differences.append((-a.size, a.size, 0, a.symbol)) for b in bi: differences.append((b.size, 0, b.size, b.symbol)) df = pd.DataFrame(differences, columns=['change', 'a-size', 'b-size', 'symbol']) if config['report.demangle']: # Demangle early to sort by demangled name. df['symbol'] = df['symbol'].apply(memdf.report.demangle) config['report.demangle'] = False df.sort_values(by=['change', 'symbol'], ascending=[False, True], inplace=True) memdf.report.write_dfs(config, {'Differences': df}) except Exception as exception: status = 1 raise exception return status
def main(argv): status = 0 try: config = Config().init(CONFIG) config.argparse.add_argument('inputs', metavar='FILE', nargs='+') config.parse(argv) config['collect.method'] = 'elftools' config['args.tag_inputs'] = True dfs = memdf.collect.collect_files(config) elf = {} for filename in config['args.inputs']: elf[filename] = { 'elffile': ELFFile(open(filename, 'rb')), 'section': {}, 'data': {}, 'limit': {}, } with memdf.report.open_output(config) as fp: for i in dfs['gap'].itertuples(): e = elf[i.input] if i.section in e['section']: section = e['section'][i.section] data = e['data'][i.section] limit = e['limit'][i.section] else: section = e['elffile'].get_section_by_name(i.section) data = section.data() limit = memdf.select.get_limit(config, 'section', i.section) e['section'][i.section] = section e['data'][i.section] = data e['limit'][i.section] = limit if limit and i.size < limit: continue offset = i.address - section['sh_addr'] assert section['sh_size'] == len(data) print('\n{:08X} length {} in section {} of {}'.format( i.address, i.size, i.section, i.input), file=fp) for i in hexdump(data, offset, i.size, i.address): print(i, file=fp) except Exception as exception: status = 1 raise exception return status
def main(argv): status = 0 try: config = Config().init(CONFIG) config.argparse.add_argument('inputs', metavar='FILE', nargs=2) config.parse(argv) config['args.fill_holes'] = False inputs = config.get('args.inputs') a_dfs = memdf.collect.collect_files(config, files=[inputs[0]]) b_dfs = memdf.collect.collect_files(config, files=[inputs[1]]) a_syms = a_dfs[SymbolDF.name].sort_values(by='symbol') b_syms = b_dfs[SymbolDF.name].sort_values(by='symbol') # TBD: Differences other than size, configurably. differences = [] ai = a_syms.itertuples() bi = b_syms.itertuples() while True: if (a := next(ai, None)) is None: break if (b := next(bi, None)) is None: differences.append((a.symbol, a.size, None)) break if a.symbol < b.symbol: differences.append((a.symbol, a.size, None)) a = next(ai, None) continue if a.symbol > b.symbol: differences.append((b.symbol, None, b.size)) b = next(bi, None) continue if a.size != b.size: differences.append((a.symbol, a.size, b.size))
def main(argv): status = 0 try: _, platform, config_name, target_name, binary, *args = argv except ValueError: program = pathlib.Path(argv[0]) logging.error( """ Usage: %s platform config target binary [output] [options] This is intended for use in github workflows. For other purposes, a general program for the same operations is %s/report_summary.py """, program.name, program.parent) return 1 try: config_file = pathlib.Path(platform) if config_file.is_file(): platform = config_file.stem else: config_file = (PLATFORM_CONFIG_DIR / platform).with_suffix('.cfg') output_base = f'{platform}-{config_name}-{target_name}-sizes.json' if args and not args[0].startswith('-'): out, *args = args output = pathlib.Path(out) if out.endswith('/') and not output.exists(): output.mkdir(parents=True) if output.is_dir(): output = output / output_base else: output = pathlib.Path(binary).parent / output_base config = Config().init({ **memdf.util.config.CONFIG, **memdf.collect.CONFIG, **memdf.select.CONFIG, **memdf.report.OUTPUT_CONFIG, **CONFIG, }) config.put('output.file', output) config.put('output.format', 'json_records') if config_file.is_file(): config.read_config_file(config_file) else: logging.warning('Missing config file: %s', config_file) config.parse([argv[0]] + args) config.put('output.metadata.platform', platform) config.put('output.metadata.config', config_name) config.put('output.metadata.target', target_name) config.put('output.metadata.time', config['timestamp']) config.put('output.metadata.input', binary) config.put('output.metadata.by', 'section') for key in ['event', 'hash', 'parent', 'pr']: if value := config[key]: config.putl(['output', 'metadata', key], value) collected: DFs = memdf.collect.collect_files(config, [binary]) sections = collected[SectionDF.name] section_summary = sections[['section', 'size']].sort_values(by='section') section_summary.attrs['name'] = "section" summaries = { 'section': section_summary, } # Write configured (json) report to the output file. memdf.report.write_dfs(config, summaries) # Write text report to stdout. memdf.report.write_dfs(config, summaries, sys.stdout, 'simple')
def main(argv): status = 0 try: _, platform, config_name, target_name, binary, *args = argv except ValueError: program = pathlib.Path(argv[0]) logging.error( """ Usage: %s platform config target binary [output] [options] This is intended for use in github workflows. For other purposes, a general program for the same operations is %s/report_summary.py """, program.name, program.parent) return 1 try: config_file = pathlib.Path(platform) if config_file.is_file(): platform = config_file.stem else: config_file = (PLATFORM_CONFIG_DIR / platform).with_suffix('.cfg') output_base = f'{platform}-{config_name}-{target_name}-sizes.json' if args and not args[0].startswith('-'): out, *args = args output = pathlib.Path(out) if out.endswith('/') and not output.exists(): output.mkdir(parents=True) if output.is_dir(): output = output / output_base else: output = pathlib.Path(binary).parent / output_base config = Config().init({ **memdf.util.config.CONFIG, **memdf.collect.CONFIG, **memdf.select.CONFIG, **memdf.report.OUTPUT_CONFIG, **CONFIG, }) config.put('output.file', output) config.put('output.format', 'json_records') if config_file.is_file(): config.read_config_file(config_file) else: logging.warning('Missing config file: %s', config_file) config.parse([argv[0]] + args) config.put('output.metadata.platform', platform) config.put('output.metadata.config', config_name) config.put('output.metadata.target', target_name) config.put('output.metadata.time', config['timestamp']) config.put('output.metadata.input', binary) config.put('output.metadata.by', 'section') for key in ['event', 'hash', 'parent', 'pr', 'ref']: if value := config[key]: config.putl(['output', 'metadata', key], value) collected: DFs = memdf.collect.collect_files(config, [binary]) # Aggregate loaded segments, by writable (flash) or not (RAM). segments = collected[SegmentDF.name] segments['segment'] = segments.index segments['wr'] = ((segments['flags'] & 2) != 0).convert_dtypes( convert_boolean=False, convert_integer=True) segment_summary = segments[segments['type'] == 'PT_LOAD'][[ 'wr', 'size' ]].groupby('wr').aggregate(np.sum).reset_index().astype( {'size': np.int64}) segment_summary.attrs['name'] = "wr" sections = collected[SectionDF.name] sections = sections.join(on='segment', how='left', other=segments, rsuffix='-segment') section_summary = sections[['section', 'size', 'wr']].sort_values(by='section') section_summary.attrs['name'] = "section" summaries = { 'section': section_summary, 'memory': segment_summary, } # Write configured (json) report to the output file. memdf.report.write_dfs(config, summaries) # Write text report to stdout. memdf.report.write_dfs(config, summaries, sys.stdout, 'simple', floatfmt='.0f')