예제 #1
0
def postprocess_config(config: Config, _key: str, _info: Mapping) -> None:
    """Postprocess --github-repository."""
    if config['github.repository']:
        owner, repo = config.get('github.repository').split('/', 1)
        config.put('github.owner', owner)
        config.put('github.repo', repo)
        if not config['github.token']:
            config['github.token'] = os.environ.get('GITHUB_TOKEN')
            if not config['github.token']:
                logging.error('Missing --github-token')
예제 #2
0
def postprocess_output_metadata(config: Config, key: str) -> None:
    """For --output-metadata=KEY:VALUE list, convert to dictionary."""
    assert key == 'output.metadata'
    metadata = {}
    for s in config.get(key):
        if ':' in s:
            k, v = s.split(':', 1)
        else:
            k, v = s, True
        metadata[k] = v
    config.put(key, metadata)
예제 #3
0
def postprocess_report_by(config: Config, key: str, info: Mapping) -> None:
    """For --report-by=region, select all sections."""
    assert key == 'report.by'
    if config.get(key) == 'region':
        config.put('section.select-all', True),
예제 #4
0
def main(argv):
    status = 0

    try:
        _, platform, config_name, target_name, binary, *args = argv
    except ValueError:
        program = pathlib.Path(argv[0])
        logging.error(
            """
            Usage: %s platform config target binary [output] [options]

            This is intended for use in github workflows.
            For other purposes, a general program for the same operations is
            %s/report_summary.py

            """, program.name, program.parent)
        return 1

    try:
        config_file = pathlib.Path(platform)
        if config_file.is_file():
            platform = config_file.stem
        else:
            config_file = (PLATFORM_CONFIG_DIR / platform).with_suffix('.cfg')

        output_base = f'{platform}-{config_name}-{target_name}-sizes.json'
        if args and not args[0].startswith('-'):
            out, *args = args
            output = pathlib.Path(out)
            if out.endswith('/') and not output.exists():
                output.mkdir(parents=True)
            if output.is_dir():
                output = output / output_base
        else:
            output = pathlib.Path(binary).parent / output_base

        config = Config().init({
            **memdf.util.config.CONFIG,
            **memdf.collect.CONFIG,
            **memdf.select.CONFIG,
            **memdf.report.OUTPUT_CONFIG,
            **CONFIG,
        })
        config.put('output.file', output)
        config.put('output.format', 'json_records')
        if config_file.is_file():
            config.read_config_file(config_file)
        else:
            logging.warning('Missing config file: %s', config_file)
        config.parse([argv[0]] + args)

        config.put('output.metadata.platform', platform)
        config.put('output.metadata.config', config_name)
        config.put('output.metadata.target', target_name)
        config.put('output.metadata.time', config['timestamp'])
        config.put('output.metadata.input', binary)
        config.put('output.metadata.by', 'section')
        for key in ['event', 'hash', 'parent', 'pr']:
            if value := config[key]:
                config.putl(['output', 'metadata', key], value)

        collected: DFs = memdf.collect.collect_files(config, [binary])

        sections = collected[SectionDF.name]
        section_summary = sections[['section',
                                    'size']].sort_values(by='section')
        section_summary.attrs['name'] = "section"

        summaries = {
            'section': section_summary,
        }

        # Write configured (json) report to the output file.
        memdf.report.write_dfs(config, summaries)

        # Write text report to stdout.
        memdf.report.write_dfs(config, summaries, sys.stdout, 'simple')
예제 #5
0
def main(argv):
    status = 0

    try:
        _, platform, config_name, target_name, binary, *args = argv
    except ValueError:
        program = pathlib.Path(argv[0])
        logging.error(
            """
            Usage: %s platform config target binary [output] [options]

            This is intended for use in github workflows.
            For other purposes, a general program for the same operations is
            %s/report_summary.py

            """, program.name, program.parent)
        return 1

    try:
        config_file = pathlib.Path(platform)
        if config_file.is_file():
            platform = config_file.stem
        else:
            config_file = (PLATFORM_CONFIG_DIR / platform).with_suffix('.cfg')

        output_base = f'{platform}-{config_name}-{target_name}-sizes.json'
        if args and not args[0].startswith('-'):
            out, *args = args
            output = pathlib.Path(out)
            if out.endswith('/') and not output.exists():
                output.mkdir(parents=True)
            if output.is_dir():
                output = output / output_base
        else:
            output = pathlib.Path(binary).parent / output_base

        config = Config().init({
            **memdf.util.config.CONFIG,
            **memdf.collect.CONFIG,
            **memdf.select.CONFIG,
            **memdf.report.OUTPUT_CONFIG,
            **CONFIG,
        })
        config.put('output.file', output)
        config.put('output.format', 'json_records')
        if config_file.is_file():
            config.read_config_file(config_file)
        else:
            logging.warning('Missing config file: %s', config_file)
        config.parse([argv[0]] + args)

        config.put('output.metadata.platform', platform)
        config.put('output.metadata.config', config_name)
        config.put('output.metadata.target', target_name)
        config.put('output.metadata.time', config['timestamp'])
        config.put('output.metadata.input', binary)
        config.put('output.metadata.by', 'section')
        for key in ['event', 'hash', 'parent', 'pr', 'ref']:
            if value := config[key]:
                config.putl(['output', 'metadata', key], value)

        collected: DFs = memdf.collect.collect_files(config, [binary])

        # Aggregate loaded segments, by writable (flash) or not (RAM).
        segments = collected[SegmentDF.name]
        segments['segment'] = segments.index
        segments['wr'] = ((segments['flags'] & 2) != 0).convert_dtypes(
            convert_boolean=False, convert_integer=True)
        segment_summary = segments[segments['type'] == 'PT_LOAD'][[
            'wr', 'size'
        ]].groupby('wr').aggregate(np.sum).reset_index().astype(
            {'size': np.int64})
        segment_summary.attrs['name'] = "wr"

        sections = collected[SectionDF.name]
        sections = sections.join(on='segment',
                                 how='left',
                                 other=segments,
                                 rsuffix='-segment')
        section_summary = sections[['section', 'size',
                                    'wr']].sort_values(by='section')
        section_summary.attrs['name'] = "section"

        summaries = {
            'section': section_summary,
            'memory': segment_summary,
        }

        # Write configured (json) report to the output file.
        memdf.report.write_dfs(config, summaries)

        # Write text report to stdout.
        memdf.report.write_dfs(config,
                               summaries,
                               sys.stdout,
                               'simple',
                               floatfmt='.0f')