Пример #1
0
def main(argv):
    status = 0
    try:
        config = Config().init({
            **memdf.util.config.CONFIG,
            **memdf.util.sqlite.CONFIG,
            **memdf.report.OUTPUT_CONFIG,
            **GITHUB_CONFIG,
        })
        config.argparse.add_argument('inputs', metavar='FILE', nargs='*')
        config.parse(argv)

        dfs = {}
        with SizeDatabase(config) as db:
            db.read_inputs()
            dfs.update(report_matching_commits(db))
            dfs.update(report_queries(db))

        memdf.report.write_dfs(config,
                               dfs,
                               hierify=True,
                               title=True,
                               floatfmt='5.1f')

    except Exception as exception:
        raise exception

    return status
Пример #2
0
def main(argv):
    status = 0
    try:
        config = Config().init({
            **memdf.util.config.CONFIG,
            **memdf.util.github.CONFIG,
            **memdf.util.sqlite.CONFIG,
            **memdf.report.OUTPUT_CONFIG,
            **GITHUB_CONFIG,
            **DB_CONFIG,
            **REPORT_CONFIG,
        })
        config.argparse.add_argument('inputs', metavar='FILE', nargs='*')
        config.parse(argv)

        szc = SizeContext(config)
        szc.read_inputs()
        dfs = szc.report_matching_commits()

        memdf.report.write_dfs(config,
                               dfs,
                               hierify=True,
                               title=True,
                               floatfmt='5.1f')

    except Exception as exception:
        raise exception

    return status
Пример #3
0
def main(argv):
    status = 0
    try:

        config = Config().init({
            **memdf.util.config.CONFIG,
            **memdf.collect.PREFIX_CONFIG,
            **memdf.collector.readelf.NM_CONFIG,
            **memdf.report.REPORT_CONFIG,
            **memdf.report.OUTPUT_CONFIG,
            **BLOCKLIST_CONFIG,
        })
        config.argparse.add_argument('inputs', metavar='FILE', nargs='+')
        config = config.parse(argv)

        block_re: Optional[Pattern] = config.get_re('symbol.block')
        if block_re is None:
            logging.warning('No block list')
        else:
            frames = []
            for filename in config.get('args.inputs', []):
                ssdf = memdf.collector.readelf.read_sources(config, filename)
                frames.append(ssdf[ssdf.kind == 'U'])
            ssdf = pd.concat(frames)
            ssdf = ssdf[ssdf.symbol.str.fullmatch(block_re)]
            memdf.report.write_dfs(config, {'Symbols': ssdf})
    except Exception as exception:
        raise exception

    return status
Пример #4
0
def main(argv):
    status = 0
    try:
        sqlite_config = memdf.util.sqlite.CONFIG
        sqlite_config['database.file']['argparse']['required'] = True

        config = Config().init({
            **memdf.util.config.CONFIG,
            **memdf.util.github.CONFIG,
            **sqlite_config,
            **GITHUB_CONFIG,
        })
        config.argparse.add_argument('inputs', metavar='FILE', nargs='*')
        config.parse(argv)

        db = memdf.sizedb.SizeDatabase(config['database.file']).open()

        if gh := Gh(config):

            artifact_limit = config['github.limit-artifacts']
            artifacts_added = 0
            events = config['github.event']
            if not events:
                events = ['push']
            for a in gh.get_size_artifacts(label=config['github.label']):
                if events and a.event not in events:
                    logging.debug('Skipping %s artifact %d', a.event, a.id)
                    continue
                cur = db.execute('SELECT id FROM build WHERE artifact = ?',
                                 (a.id, ))
                if cur.fetchone():
                    logging.debug('Skipping known artifact %d', a.id)
                    continue
                blob = gh.download_artifact(a.id)
                if blob:
                    logging.info('Adding artifact %d %s %s %s %s', a.id,
                                 a.commit[:12], a.pr, a.event, a.group)
                    db.add_sizes_from_zipfile(io.BytesIO(blob),
                                              {'artifact': a.id})
                    db.commit()
                    artifacts_added += 1
                    if artifact_limit and artifact_limit <= artifacts_added:
                        break

        for filename in config['args.inputs']:
            db.add_sizes_from_file(filename)
            db.commit()
Пример #5
0
def parse_args(config_desc: Mapping, argv: Sequence[str]) -> Config:
    """Common argument parsing for collection tools."""
    config = Config().init({
        **memdf.util.config.CONFIG,
        **CONFIG,
        **config_desc
    })
    config.argparse.add_argument('inputs', metavar='FILE', nargs='+')
    return config.parse(argv)
Пример #6
0
def main(argv):
    status = 0
    try:
        cfg = {
            **memdf.util.config.CONFIG,
            **memdf.util.sqlite.CONFIG,
            **memdf.report.OUTPUT_CONFIG,
            **QUERY_CONFIG,
        }
        cfg['database.file']['argparse']['required'] = True

        config = Config().init(cfg)
        config.parse(argv)

        db = SizeDatabase(config['database.file'], writable=False)
        db.open()

        dfs = {}

        q = 0
        for title, key, values, info in config.get('queries', []):
            q += 1
            query = make_query(config, info)
            logging.debug('Option: %s', key)
            logging.debug('Title: %s', title)
            logging.debug('Query: %s', query.strip())
            logging.debug('With: %s', values)
            cur = db.execute(query, values)
            columns = [i[0] for i in cur.description]
            rows = cur.fetchall()
            if rows:
                df = pd.DataFrame(rows, columns=columns)
                df.attrs = {'name': f'query{q}', 'title': title}
                for f in info['sql'].get('postprocess', []):
                    df = f(config, df)
                dfs[df.attrs['name']] = df

        if build := config['query.build-sizes']:
            q += 1
            if (df := query_build_sizes(config, db, build)) is not None:
                dfs[df.attrs['name']] = df