Exemple #1
0
def cli(ctx, datadir, db, verbose, quiet):
    """Manipulate and query a package porting database.
    """
    verbose -= quiet
    ctx.obj['verbose'] = verbose
    if verbose >= 2:
        if verbose >= 3:
            level = logging.DEBUG
        else:
            level = logging.INFO
        logging.basicConfig(level=level)
        logging.getLogger('sqlalchemy.engine').setLevel(level)
    ctx.obj['datadirs'] = [os.path.abspath(d) for d in datadir]

    if 'db' in ctx.obj:
        url = '<passed in>'
    else:
        if db is None:
            url = 'sqlite:///'
        else:
            parsed = urllib.parse.urlparse(db)
            if parsed.scheme:
                url = db
            else:
                url = 'sqlite:///' + os.path.abspath(db)
        engine = create_engine(url)
        ctx.obj['db'] = get_db(None, engine=engine)
    ctx.obj['db_url'] = url
Exemple #2
0
def cli(ctx, datadir, db, verbose, quiet):
    """Manipulate and query a package porting database.
    """
    verbose -= quiet
    ctx.obj['verbose'] = verbose
    if verbose >= 2:
        if verbose >= 3:
            level = logging.DEBUG
        else:
            level = logging.INFO
        logging.basicConfig(level=level)
        logging.getLogger('sqlalchemy.engine').setLevel(level)
    ctx.obj['datadirs'] = [os.path.abspath(d) for d in datadir]

    if 'db' in ctx.obj:
        url = '<passed in>'
    else:
        if db is None:
            url = 'sqlite:///'
        else:
            parsed = urllib.parse.urlparse(db)
            if parsed.scheme:
                url = db
            else:
                url = 'sqlite:///' + os.path.abspath(db)
        engine = create_engine(url)
        ctx.obj['db'] = get_db(None, engine=engine)
    ctx.obj['db_url'] = url
Exemple #3
0
def main(update):
    excluded = set(BAD_COMMITS)
    tmpdir = tempfile.mkdtemp()
    writer = csv.DictWriter(sys.stdout,
                            ['commit', 'date', 'status', 'num_packages'],
                            lineterminator='\n')
    writer.writeheader()

    all_statuses = [
        "blocked", "dropped", "idle", "in-progress", "released", "mispackaged"]

    if update:
        with open(update) as f:
            for row in csv.DictReader(f):
                excluded.add(row['commit'])
                writer.writerow(row)

    try:
        tmpclone = os.path.join(tmpdir, 'tmp_clone')
        tmpdata = os.path.join(tmpclone, 'data')
        tmpdb = os.path.join(tmpclone, 'tmp-portingdb.sqlite')
        run(['git', 'clone', '.', tmpclone])
        prev_data_hash = None
        for commit in reversed(git_history()):
            data_hash = run(['git', 'rev-parse', commit + ':' + 'data'])
            if (commit not in excluded) and (data_hash != prev_data_hash):
                # Note: we don't remove files that didn't exist in the old
                # version.
                run(['git', 'checkout', commit, '--', 'data'], cwd=tmpclone)
                run(['python3', '-m', 'portingdb',
                     '--datadir', tmpdata,
                     '--db', tmpdb,
                     'load'])

                engine = create_engine('sqlite:///' + os.path.abspath(tmpdb))
                db = get_db(engine=engine)
                columns = [tables.Package.status, func.count()]
                query = select(columns).select_from(tables.Package.__table__)
                query = query.group_by(tables.Package.status)

                date = run(['git', 'log', '-n1', '--pretty=%ci', commit]).strip()
                package_numbers = {status: num_packages
                                   for status, num_packages
                                   in db.execute(query)}
                for status in all_statuses:
                    row = {
                        'commit': commit,
                        'date': date,
                        'status': status,
                        'num_packages': package_numbers.get(status, 0),
                    }
                    writer.writerow(row)

                os.unlink(tmpdb)
            prev_data_hash = data_hash
    finally:
        shutil.rmtree(tmpdir)
    return
Exemple #4
0
def main(update, naming):
    excluded = set(BAD_COMMITS)
    tmpdir = tempfile.mkdtemp()
    writer = csv.DictWriter(sys.stdout,
                            ['commit', 'date', 'status', 'num_packages'],
                            lineterminator='\n')
    writer.writeheader()

    prev_date = None
    prev_commit = None
    if update:
        with open(update) as f:
            for row in csv.DictReader(f):
                excluded.add(row['commit'])
                prev_date = row['date']
                prev_commit = row['commit']
                writer.writerow(row)

    try:
        tmpclone = os.path.join(tmpdir, 'tmp_clone')
        tmpdata = os.path.join(tmpclone, 'data')
        tmpdb = os.path.join(tmpclone, 'tmp-portingdb.sqlite')
        run(['git', 'clone', '.', tmpclone])
        prev_data_hash = None
        prev_batch = []

        end_commit = HISTORY_NAMING_END_COMMIT if naming else HISTORY_END_COMMIT
        for commit in reversed(git_history(end=end_commit)):
            date = run(['git', 'log', '-n1', '--pretty=%ci', commit]).strip()
            if prev_date and prev_date > date:
                continue
            data_hash = run(['git', 'rev-parse', commit + ':' + 'data'])
            if (commit in excluded) or (data_hash == prev_data_hash):
                prev_data_hash = data_hash
                continue
            if prev_date and prev_date[:11] != date[:11]:
                prev_date = date
                prev_commit = commit
                for row in prev_batch:
                    writer.writerow(row)
            else:
                prev_commit = commit
                print('{},{} - skipping'.format(prev_commit, prev_date),
                      file=sys.stderr)
            prev_batch = []

            # Note: we don't remove files that didn't exist in the old
            # version.
            run(['git', 'checkout', commit, '--', 'data'], cwd=tmpclone)
            run([
                'python3', '-m', 'portingdb', '--datadir', tmpdata, '--db',
                tmpdb, 'load'
            ])

            engine = create_engine('sqlite:///' + os.path.abspath(tmpdb))
            db = get_db(engine=engine)

            if naming:
                prev_batch = get_history_naming_package_numbers(
                    db, commit, date)
            else:
                prev_batch = get_history_package_numbers(db, commit, date)

            os.unlink(tmpdb)

            prev_data_hash = data_hash
        for row in prev_batch:
            writer.writerow(row)
    finally:
        shutil.rmtree(tmpdir)
    return
def main(update):
    excluded = set(BAD_COMMITS)
    tmpdir = tempfile.mkdtemp()
    writer = csv.DictWriter(sys.stdout,
                            ['commit', 'date', 'status', 'num_packages'],
                            lineterminator='\n')
    writer.writeheader()

    all_statuses = [
        "blocked", "dropped", "idle", "in-progress", "released", "mispackaged"
    ]

    prev_date = None
    prev_commit = None
    if update:
        with open(update) as f:
            for row in csv.DictReader(f):
                excluded.add(row['commit'])
                prev_date = row['date']
                prev_commit = row['commit']
                writer.writerow(row)

    try:
        tmpclone = os.path.join(tmpdir, 'tmp_clone')
        tmpdata = os.path.join(tmpclone, 'data')
        tmpdb = os.path.join(tmpclone, 'tmp-portingdb.sqlite')
        run(['git', 'clone', '.', tmpclone])
        prev_data_hash = None
        prev_batch = []
        for commit in reversed(git_history()):
            date = run(['git', 'log', '-n1', '--pretty=%ci', commit]).strip()
            if prev_date and prev_date > date:
                continue
            data_hash = run(['git', 'rev-parse', commit + ':' + 'data'])
            if (commit in excluded) or (data_hash == prev_data_hash):
                prev_data_hash = data_hash
                continue
            if prev_date and prev_date[:11] != date[:11]:
                prev_date = date
                prev_commit = commit
                for row in prev_batch:
                    writer.writerow(row)
            else:
                prev_commit = commit
                print('{},{} - skipping'.format(prev_commit, prev_date),
                      file=sys.stderr)
            prev_batch = []

            # Note: we don't remove files that didn't exist in the old
            # version.
            run(['git', 'checkout', commit, '--', 'data'], cwd=tmpclone)
            run([
                'python3', '-m', 'portingdb', '--datadir', tmpdata, '--db',
                tmpdb, 'load'
            ])

            engine = create_engine('sqlite:///' + os.path.abspath(tmpdb))
            db = get_db(engine=engine)
            columns = [tables.Package.status, func.count()]
            query = select(columns).select_from(tables.Package.__table__)
            query = query.group_by(tables.Package.status)

            package_numbers = {
                status: num_packages
                for status, num_packages in db.execute(query)
            }
            for status in all_statuses:
                row = {
                    'commit': commit,
                    'date': date,
                    'status': status,
                    'num_packages': package_numbers.get(status, 0),
                }
                prev_batch.append(row)

            os.unlink(tmpdb)

            prev_data_hash = data_hash
        for row in prev_batch:
            writer.writerow(row)
    finally:
        shutil.rmtree(tmpdir)
    return
Exemple #6
0
def main(update, naming):
    excluded = set(BAD_COMMITS)
    tmpdir = tempfile.mkdtemp()
    writer = csv.DictWriter(sys.stdout,
                            ['commit', 'date', 'status', 'num_packages'],
                            lineterminator='\n')
    writer.writeheader()

    prev_date = None
    prev_commit = None
    if update:
        with open(update) as f:
            for row in csv.DictReader(f):
                excluded.add(row['commit'])
                prev_date = row['date']
                prev_commit = row['commit']
                writer.writerow(row)

    try:
        tmpclone = os.path.join(tmpdir, 'tmp_clone')
        tmpdata = os.path.join(tmpclone, 'data')
        tmpdb = os.path.join(tmpclone, 'tmp-portingdb.sqlite')
        run(['git', 'clone', '.', tmpclone])
        prev_data_hash = None
        prev_batch = []

        end_commit = HISTORY_NAMING_END_COMMIT if naming else HISTORY_END_COMMIT
        for commit in reversed(git_history(end=end_commit)):
            date = run(['git', 'log', '-n1', '--pretty=%ci', commit]).strip()
            if prev_date and prev_date > date:
                continue
            data_hash = run(['git', 'rev-parse', commit + ':' + 'data'])
            if (commit in excluded) or (data_hash == prev_data_hash):
                prev_data_hash = data_hash
                continue
            if prev_date and prev_date[:11] != date[:11]:
                prev_date = date
                prev_commit = commit
                for row in prev_batch:
                    writer.writerow(row)
            else:
                prev_commit = commit
                print('{},{} - skipping'.format(prev_commit, prev_date),
                      file=sys.stderr)
            prev_batch = []

            # Note: we don't remove files that didn't exist in the old
            # version.
            run(['git', 'checkout', commit, '--', 'data'], cwd=tmpclone)
            run(['python3', '-m', 'portingdb',
                 '--datadir', tmpdata,
                 '--db', tmpdb,
                 'load'])

            engine = create_engine('sqlite:///' + os.path.abspath(tmpdb))
            db = get_db(engine=engine)

            if naming:
                prev_batch = get_history_naming_package_numbers(db, commit, date)
            else:
                prev_batch = get_history_package_numbers(db, commit, date)

            os.unlink(tmpdb)

            prev_data_hash = data_hash
        for row in prev_batch:
            writer.writerow(row)
    finally:
        shutil.rmtree(tmpdir)
    return
Exemple #7
0
def get_portingdb(db):
    """Return session object for portingdb."""
    url = 'sqlite:///' + db
    engine = create_engine(url)
    return get_db(None, engine=engine)