def alien_rbpkg():
    log.info('running alien_rbpkg check...')
    query = '''SELECT s.name
               FROM sources AS s
               WHERE s.name='{pkg}' AND s.suite='{suite}'
               AND s.architecture='{arch}'
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    bad_files = []
    for root, dirs, files in os.walk(RB_PKG_PATH):
        if not files:
            continue
        # Extract the "suite" and "arch" from the directory structure
        if os.path.split(root)[1] == 'diffoscope-results':
            # We are presently inspecting package pages in the
            # RB_PKG_PATH/{{suite}}/{{arch}}/diffoscope-results directory
            suite, arch = os.path.split(root)[0].rsplit('/', 2)[1:]
        else:
            # We are presently inspecting package pages in the
            # RB_PKG_PATH/{{suite}}/{{arch}}/ directory
            suite, arch = root.rsplit('/', 2)[1:]
        for file in files:
            pkg = file.rsplit('.', 1)[0]
            if not query_db(query.format(pkg=pkg, suite=suite, arch=arch)):
                bad_files.append('/'.join([root, file]))
                log.warning('/'.join([root, file]) + ' should not be there')
    return bad_files
def pbuilder_dep_fail():
    log.info('running pbuilder_dep_fail check...')
    bad_pkgs = []
    # we only care about these failures in the !unstable !experimental suites
    # as they happen all the time in there, as packages are buggy
    # and specific versions also come and go
    query = '''SELECT s.name, r.version, s.suite, s.architecture
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status = 'FTBFS'
               AND s.suite NOT IN ('unstable', 'experimental')
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    results = query_db(query)
    for pkg, version, suite, arch in results:
        eversion = strip_epoch(version)
        rbuild = RBUILD_PATH + '/' + suite + '/' + arch + '/' + pkg + '_' + \
            eversion + '.rbuild.log'
        if os.access(rbuild, os.R_OK):
            log.debug('\tlooking at ' + rbuild)
            with open(rbuild, "br") as fd:
                for line in fd:
                    if re.search(b'E: pbuilder-satisfydepends failed.', line):
                        bad_pkgs.append((pkg, version, suite, arch))
                        log.warning(suite + '/' + arch + '/' + pkg + ' (' +
                                    version +
                                    ') failed to satisfy its dependencies.')
    return bad_pkgs
def unrep_with_dbd_issues():
    log.info('running unrep_with_dbd_issues check...')
    without_dbd = []
    bad_dbd = []
    sources_without_dbd = set()
    query = '''SELECT s.name, r.version, s.suite, s.architecture
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status='unreproducible'
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    results = query_db(query)
    for pkg, version, suite, arch in results:
        eversion = strip_epoch(version)
        dbd = DBD_PATH + '/' + suite + '/' + arch + '/' + pkg + '_' + \
            eversion + '.diffoscope.html'
        if not os.access(dbd, os.R_OK):
            without_dbd.append((pkg, version, suite, arch))
            sources_without_dbd.add(pkg)
            log.warning(suite + '/' + arch + '/' + pkg + ' (' + version +
                        ') is '
                        'unreproducible without diffoscope file.')
        else:
            log.debug(dbd + ' found.')
            data = open(dbd, 'br').read(3)
            if b'<' not in data:
                bad_dbd.append((pkg, version, suite, arch))
                log.warning(suite + '/' + arch + '/' + pkg + ' (' + version +
                            ') has '
                            'diffoscope output, but it does not seem to '
                            'be an HTML page.')
                sources_without_dbd.add(pkg)
    return without_dbd, bad_dbd, sources_without_dbd
示例#4
0
def update_stats(suite, arch, stats, pkgset_name):
    result = query_db("""
            SELECT datum, meta_pkg, suite
            FROM stats_meta_pkg_state
            WHERE datum = '{date}' AND suite = '{suite}'
            AND architecture = '{arch}' AND meta_pkg = '{name}'
        """.format(date=YESTERDAY, suite=suite, arch=arch, name=pkgset_name))

    # if there is not a result for this day, add one
    if not result:
        insert = "INSERT INTO stats_meta_pkg_state VALUES ('{date}', " + \
                 "'{suite}', '{arch}', '{pkgset_name}', '{count_good}', " + \
                 "'{count_bad}', '{count_ugly}', '{count_rest}')"
        query_db(
            insert.format(date=YESTERDAY,
                          suite=suite,
                          arch=arch,
                          pkgset_name=pkgset_name,
                          count_good=stats['count_good'],
                          count_bad=stats['count_bad'],
                          count_ugly=stats['count_ugly'],
                          count_rest=stats['count_rest']))
        log.info("Updating db entry for meta pkgset %s in %s/%s on %s.",
                 pkgset_name, suite, arch, YESTERDAY)
    else:
        log.debug(
            "Not updating db entry for meta pkgset %s in %s/%s on %s as one exists already.",
            pkgset_name, suite, arch, YESTERDAY)
示例#5
0
def create_pkgset_graph(png_file, suite, arch, pkgset_name):
    table = "stats_meta_pkg_state"
    columns = ["datum", "reproducible", "FTBR", "FTBFS", "other"]
    where = "WHERE suite = '%s' AND architecture = '%s' AND meta_pkg = '%s'" % \
            (suite, arch, pkgset_name)
    if arch == 'i386':
        # i386 only has pkg sets since later to make nicer graphs
        # (date added in commit 7f2525f7)
        where += " AND datum >= '2016-05-06'"
    query = "SELECT {fields} FROM {table} {where} ORDER BY datum".format(
        fields=", ".join(columns), table=table, where=where)
    result = query_db(query)
    result_rearranged = [dict(zip(columns, row)) for row in result]

    with create_temp_file(mode='w') as f:
        csv_tmp_file = f.name
        csv_writer = csv.DictWriter(f, columns)
        csv_writer.writeheader()
        csv_writer.writerows(result_rearranged)
        f.flush()

        graph_command = os.path.join(BIN_PATH, "make_graph.py")
        main_label = "Reproducibility status for packages in " + suite + \
                     " from " + pkgset_name
        y_label = "Amount (" + pkgset_name + " packages)"
        log.info("Creating graph for meta pkgset %s in %s/%s.", pkgset_name,
                 suite, arch)
        check_call([
            graph_command, csv_tmp_file, png_file, '4', main_label, y_label,
            '1920', '960'
        ])
def packages_matching_criteria(arch, suite, criteria):
    "Return a list of packages in (SUITE, ARCH) matching the given CRITERIA."
    # TODO: Rewrite this function to query all suites/archs in one go
    issue, status, built_after, built_before = criteria
    del criteria

    formatter = dict(suite=suite, arch=arch, notes_table='')
    log.info('Querying packages with given issues/status...')
    query = "SELECT s.name " + \
            "FROM sources AS s, {notes_table} results AS r " + \
            "WHERE r.package_id=s.id " + \
            "AND s.architecture= '{arch}' " + \
            "AND s.suite = '{suite}' AND r.status != 'blacklisted' "
    if issue:
        query += "AND n.package_id=s.id AND n.issues LIKE '%%{issue}%%' "
        formatter['issue'] = issue
        formatter['notes_table'] = "notes AS n,"
    if status:
        query += "AND r.status = '{status}'"
        formatter['status'] = status
    if built_after:
        query += "AND r.build_date > '{built_after}' "
        formatter['built_after'] = built_after
    if built_before:
        query += "AND r.build_date < '{built_before}' "
        formatter['built_before'] = built_before
    results = query_db(query.format_map(formatter))
    results = [x for (x, ) in results]
    log.info('Selected packages: ' + ' '.join(results))
    return results
def update_stats_breakages(diffoscope_timeouts, diffoscope_crashes):
    # we only do stats up until yesterday
    YESTERDAY = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d')

    result = query_db("""
            SELECT datum, diffoscope_timeouts, diffoscope_crashes
            FROM stats_breakages
            WHERE datum = '{date}'
        """.format(date=YESTERDAY))

    # if there is not a result for this day, add one
    if not result:
        insert = "INSERT INTO stats_breakages VALUES ('{date}', " + \
                 "'{diffoscope_timeouts}', '{diffoscope_crashes}')"
        query_db(
            insert.format(date=YESTERDAY,
                          diffoscope_timeouts=diffoscope_timeouts,
                          diffoscope_crashes=diffoscope_crashes))
        log.info(
            "Updating db table stats_breakages on %s with %s timeouts and %s crashes.",
            YESTERDAY, diffoscope_timeouts, diffoscope_crashes)
    else:
        log.debug(
            "Not updating db table stats_breakages as it already has data for %s.",
            YESTERDAY)
示例#8
0
def gen_all_rb_pkg_pages(no_clean=False):
    query = 'SELECT DISTINCT name FROM sources WHERE suite = ANY(:s)'
    rows = query_db(sqlalchemy.text(query), s=SUITES)
    pkgs = [Package(str(i[0]), no_notes=True) for i in rows]
    log.info('Processing all %s package from all suites/architectures',
             len(pkgs))
    gen_packages_html(pkgs, no_clean=True)  # we clean at the end
    purge_old_pages()
示例#9
0
def queue_packages(all_pkgs, packages, date):
    date = date.strftime('%Y-%m-%d %H:%M')
    pkgs = [x for x in packages if x[0] not in all_pkgs]
    if len(pkgs) > 0:
        log.info('The following ' + str(len(pkgs)) + ' source packages have ' +
                 'been queued up for scheduling at ' + date + ': ' +
                 ' '.join([str(x[1]) for x in pkgs]))
    all_pkgs.update({x[0]: date for x in pkgs})
    return all_pkgs
示例#10
0
def load_issues():
    """
    format:
    { 'issue_name': {'description': 'blabla', 'url': 'blabla'} }
    """
    with open(ISSUES) as fd:
        issues = yaml.load(fd)
    log.info("Issues loaded. There are " + str(len(issues)) + " issues")
    return issues
def alien_log(directory=None):
    if directory is None:
        bad_files = []
        for path in RBUILD_PATH, LOGS_PATH, DIFFS_PATH:
            bad_files.extend(alien_log(path))
        return bad_files
    log.info('running alien_log check over ' + directory + '...')
    query = '''SELECT r.version
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status != '' AND s.name='{pkg}' AND s.suite='{suite}'
               AND s.architecture='{arch}'
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    bad_files = []
    for root, dirs, files in os.walk(directory):
        if not files:
            continue
        suite, arch = root.rsplit('/', 2)[1:]
        for file in files:
            # different file have differnt name patterns and different splitting needs
            if file.endswith('.diff.gz'):
                rsplit_level = 2
            elif file.endswith('.gz'):
                rsplit_level = 3
            else:
                rsplit_level = 2
            try:
                pkg, version = file.rsplit('.', rsplit_level)[0].rsplit('_', 1)
            except ValueError:
                log.critical(
                    bcolors.FAIL + '/'.join([root, file]) +
                    ' does not seem to be a file that should be there' +
                    bcolors.ENDC)
                continue
            try:
                rversion = query_db(
                    query.format(pkg=pkg, suite=suite, arch=arch))[0][0]
            except IndexError:  # that package is not known (or not yet tested)
                rversion = ''  # continue towards the "bad file" path
            if strip_epoch(rversion) != version:
                try:
                    if os.path.getmtime('/'.join([root, file
                                                  ])) < time.time() - 86400:
                        os.remove('/'.join([root, file]))
                        log.warning(
                            '/'.join([root, file]) +
                            ' should not be there and and was older than a day so it was removed.'
                        )
                    else:
                        bad_files.append('/'.join([root, file]))
                        log.info(
                            '/'.join([root, file]) +
                            ' should not be there, but is also less than 24h old and will probably soon be gone. Probably diffoscope is running on that package right now.'
                        )
                except FileNotFoundError:
                    pass  # that bad file is already gone.
    return bad_files
def alien_history():
    log.info('running alien_history check...')
    result = query_db('SELECT DISTINCT name FROM sources')
    actual_packages = [x[0] for x in result]
    bad_files = []
    for f in sorted(os.listdir(HISTORY_PATH)):
        full_path = os.path.join(HISTORY_PATH, f)
        if f.rsplit('.', 1)[0] not in actual_packages and not os.path.isdir(full_path):
            bad_files.append(full_path)
            os.remove(full_path)
            log.warning('%s should not be there so it has been removed.', full_path)
    return bad_files
示例#13
0
def db_update():
    """
    Update the database schema.
    Get a list of queries to perform from schema_updates.
    The need for an update is detected by checking the biggest value in the
    rb_schema table against the biggest value in the schema_updates dictionary.
    """
    current = query_db('SELECT MAX(version) FROM rb_schema')[0][0]
    if not current:
        log.warning('This is probably a new database, there are no ' +
                    'previous updates noted')
        current = 0
    last = max(schema_updates.keys())
    if current == last:
        return False
    if current > last:
        print_critiacal_message('The active database schema is higher than' +
                                '  the last update available.\nPlease check!')
        sys.exit(1)
    log.info('Found schema updates.')
    for update in range(current + 1, last + 1):
        log.info('Applying database update #' + str(update) + '. Queries:')
        startTime = datetime.now()
        for query in schema_updates[update]:
            log.info('\t' + query)
            query_db(query)
        log.info(
            str(len(schema_updates[update])) + ' queries executed in ' +
            str(datetime.now() - startTime))
    return True
示例#14
0
def db_create_tables():
    """
    Check whether all tables are present, and create them if not.
    The check is done against sqlite_master, a reserved sqlite table
    containing all database metadata.
    """
    changed = False
    for table in db_schema:
        if not table_exists(table['name']):
            log.warning(table['name'] + ' does not exists. Creating...')
            for query in table['query']:
                log.info('\t' + re.sub(' +', ' ', query.replace('\n', ' ')))
                query_db(query)
                changed = True
    return changed
示例#15
0
def print_schedule_result(suite, arch, criteria, packages):
    '''
    `packages` is the usual list-of-tuples returned by SQL queries,
    where the first item is the id and the second one the package name
    '''
    log.info('Criteria:   ' + criteria)
    log.info('Suite/Arch: ' + suite + '/' + arch)
    log.info('Amount:     ' + str(len(packages)))
    log.info('Packages:   ' + ' '.join([x[1] for x in packages]))
def lack_rbuild():
    log.info('running lack_rbuild check...')
    bad_pkgs = []
    query = '''SELECT s.name, r.version, s.suite, s.architecture
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status NOT IN ('blacklisted', '')
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    results = query_db(query)
    for pkg, version, suite, arch in results:
        rbuild = os.path.join(RBUILD_PATH, suite, arch) + \
                '/{}_{}.rbuild.log.gz'.format(pkg, strip_epoch(version))
        if not os.access(rbuild, os.R_OK):
            bad_pkgs.append((pkg, version, suite, arch))
            log.warning(suite + '/' + arch + '/' + pkg + ' (' + version +
                        ') has been '
                        'built, but a buildlog is missing.')
    return bad_pkgs
def alien_buildinfo():
    log.info('running alien_buildinfo check...')
    query = '''SELECT r.version
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status != '' AND s.name='{pkg}' AND s.suite='{suite}'
               AND s.architecture='{arch}'
               AND r.status IN ('reproducible', 'unreproducible')
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    bad_files = []
    for root, dirs, files in os.walk(BUILDINFO_PATH):
        if not files:
            continue
        suite, arch = root.rsplit('/', 2)[1:]
        for file in files:
            try:
                pkg, version = file.rsplit('.', 1)[0].split('_')[:2]
            except ValueError:
                log.critical(
                    bcolors.FAIL + '/'.join([root, file]) +
                    ' does not seem to be a file that should be there' +
                    bcolors.ENDC)
                continue
            try:
                rversion = query_db(
                    query.format(pkg=pkg, suite=suite, arch=arch))[0][0]
            except IndexError:  # that package is not known (or not yet tested)
                rversion = ''  # continue towards the "bad file" path
            if strip_epoch(rversion) != version:
                try:
                    if os.path.getmtime('/'.join([root, file
                                                  ])) < time.time() - 86400:
                        os.remove('/'.join([root, file]))
                        log.warning(
                            '/'.join([root, file]) +
                            ' should not be there and and was older than a day so it was removed.'
                        )
                    else:
                        bad_files.append('/'.join([root, file]))
                        log.info(
                            '/'.join([root, file]) +
                            ' should not be there, but is also less than 24h old and will probably soon be gone.'
                        )
                except FileNotFoundError:
                    pass  # that bad file is already gone.
    return bad_files
def not_unrep_with_dbd_file():
    log.info('running not_unrep_with_dbd_file check...')
    bad_pkgs = []
    query = '''SELECT s.name, r.version, s.suite, s.architecture
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status != 'unreproducible'
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    results = query_db(query)
    for pkg, version, suite, arch in results:
        eversion = strip_epoch(version)
        dbd = DBD_PATH + '/' + suite + '/' + arch + '/' + pkg + '_' + \
            eversion + '.diffoscope.html'
        if os.access(dbd, os.R_OK):
            bad_pkgs.append((pkg, version, suite, arch))
            log.warning(dbd + ' exists but ' + suite + '/' + arch + '/' + pkg +
                        ' (' + version + ')'
                        ' is not unreproducible.')
    return bad_pkgs
示例#19
0
def create_index_page(suite, arch):
    title = 'Package sets in %s/%s' % (suite, arch)
    body = create_pkgset_navigation(suite, arch)
    destfile = os.path.join(DISTRO_BASE, suite, arch, "index_pkg_sets.html")
    suite_arch_nav_template = DISTRO_URI + \
                              '/{{suite}}/{{arch}}/index_pkg_sets.html'
    left_nav_html = create_main_navigation(
        suite=suite,
        arch=arch,
        displayed_page='pkg_set',
        suite_arch_nav_template=suite_arch_nav_template,
        ignore_experimental=True,
    )
    log.info("Creating pkgset index page for %s/%s.", suite, arch)
    write_html_page(title=title,
                    body=body,
                    destfile=destfile,
                    left_nav_html=left_nav_html)
示例#20
0
def iterate_over_notes(notes):
    num_notes = str(len(notes))
    i = 0
    for package in sorted(notes):
        log.debug('iterating over notes... ' + str(i) + '/' + num_notes)
        note = notes[package]
        note['package'] = package
        log.debug('\t' + str(note))
        html = gen_html_note(package, note)

        title = 'Notes for ' + package + ' - reproducible builds result'
        destfile = NOTES_PATH + '/' + package + '_note.html'
        write_html_page(title=title, body=html, destfile=destfile)

        desturl = REPRODUCIBLE_URL + NOTES_URI + '/' + package + '_note.html'
        log.debug("Note created: " + desturl)
        i = i + 1
    log.info('Created ' + str(i) + ' note pages.')
示例#21
0
def purge_old_pages():
    for suite in SUITES:
        for arch in ARCHS:
            log.info('Removing old pages from ' + suite + '/' + arch + '.')
            try:
                presents = sorted(
                    os.listdir(RB_PKG_PATH + '/' + suite + '/' + arch))
            except OSError as e:
                if e.errno != errno.ENOENT:  # that's 'No such file or
                    raise  # directory' error (errno 17)
                presents = []
            log.debug('page presents: ' + str(presents))

            # get the existing packages
            query = "SELECT name, suite, architecture FROM sources " + \
                    "WHERE suite='{}' AND architecture='{}'".format(suite, arch)
            cur_pkgs = set([(p.name, p.suite, p.architecture)
                            for p in query_db(query)])

            for page in presents:
                # When diffoscope results exist for a package, we create a page
                # that displays the diffoscope results by default in the main iframe
                # in this subdirectory. Ignore this directory.
                if page == 'diffoscope-results':
                    continue
                pkg = page.rsplit('.', 1)[0]

                if (pkg, suite, arch) not in cur_pkgs:
                    log.info('There is no package named ' + pkg + ' from ' +
                             suite + '/' + arch + ' in the database. ' +
                             'Removing old page.')
                    os.remove(RB_PKG_PATH + '/' + suite + '/' + arch + '/' +
                              page)

            # Additionally clean up the diffoscope results default pages
            log.info('Removing old pages from ' + suite + '/' + arch +
                     '/diffoscope-results/.')
            try:
                presents = sorted(
                    os.listdir(RB_PKG_PATH + '/' + suite + '/' + arch +
                               '/diffoscope-results'))
            except OSError as e:
                if e.errno != errno.ENOENT:  # that's 'No such file or
                    raise  # directory' error (errno 17)
                presents = []
            log.debug('diffoscope page presents: ' + str(presents))
            for page in presents:
                pkg = page.rsplit('.', 1)[0]
                if (pkg, suite, arch) not in cur_pkgs:
                    log.info('There is no package named ' + pkg + ' from ' +
                             suite + '/' + arch + '/diffoscope-results in ' +
                             'the database. Removing old page.')
                    os.remove(RB_PKG_PATH + '/' + suite + '/' + arch + '/' +
                              'diffoscope-results/' + page)
def lack_buildinfo():
    log.info('running lack_buildinfo check...')
    bad_pkgs = []
    query = '''SELECT s.name, r.version, s.suite, s.architecture
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status NOT IN
                ('blacklisted', 'not for us', 'FTBFS', 'depwait', '404', '')
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    results = query_db(query)
    for pkg, version, suite, arch in results:
        eversion = strip_epoch(version)
        buildinfo = BUILDINFO_PATH + '/' + suite + '/' + arch + '/' + pkg + \
            '_' + eversion + '_' + arch + '.buildinfo'
        if not os.access(buildinfo, os.R_OK):
            bad_pkgs.append((pkg, version, suite, arch))
            log.warning(suite + '/' + arch + '/' + pkg + ' (' + version +
                        ') has been '
                        'successfully built, but a .buildinfo is missing')
    return bad_pkgs
示例#23
0
def iterate_over_issues(issues):
    num_issues = str(len(issues))
    for suite in SUITES:
        i = 0
        for issue in sorted(issues):
            log.debug('iterating over issues in ' + suite +'... ' + str(i) + '/' + num_issues)
            log.debug('\t' + str(issue))
            html = gen_html_issue(issue, suite)

            title = 'Notes about issue ' + issue + ' in ' + suite
            destfile = ISSUES_PATH + '/' + suite + '/' + issue + '_issue.html'
            left_nav_html = create_main_navigation(displayed_page='issues')
            write_html_page(title=title, body=html, destfile=destfile,
                            style_note=True, left_nav_html=left_nav_html)

            desturl = REPRODUCIBLE_URL + ISSUES_URI + '/' + suite + '/' + issue + '_issue.html'
            log.debug("Issue created: " + desturl)
            i = i + 1
        log.info('Created ' + str(i) + ' issue pages for ' + suite)
def create_breakages_graph(png_file, main_label):
    png_fullpath = os.path.join(DISTRO_BASE, png_file)
    table = "stats_breakages"
    columns = ["datum", "diffoscope_timeouts", "diffoscope_crashes"]
    query = "SELECT {fields} FROM {table} ORDER BY datum".format(
        fields=", ".join(columns), table=table)
    result = query_db(query)
    result_rearranged = [dict(zip(columns, row)) for row in result]

    with create_temp_file(mode='w') as f:
        csv_tmp_file = f.name
        csv_writer = csv.DictWriter(f, columns)
        csv_writer.writeheader()
        csv_writer.writerows(result_rearranged)
        f.flush()

        graph_command = os.path.join(BIN_PATH, "make_graph.py")
        y_label = "Amount (packages)"
        log.info("Creating graph for stats_breakges.")
        check_call([graph_command, csv_tmp_file, png_fullpath, '2', main_label,
                    y_label, '1920', '960'])
示例#25
0
def index_issues(issues, scorefuncs):
    firstscorefunc = next(iter(scorefuncs.values()))
    templ = "\n<table class=\"body\">\n" + tab + "<tr>\n" + tab*2 + "<th>\n" \
          + tab*3 + "Identified issues\n" + tab*2 + "</th>\n" + tab*2 + "<th>\n" \
          + "".join(
            tab*3 + k + "\n" + tab*2 + "</th>\n" + tab*2 + "<th>\n"
            for k in scorefuncs.keys()) \
          + tab*3 + "Affected packages<br/>\n" \
          + tab*3 + "(the 1/4 most-popular ones (within the issue) are underlined)\n" \
          + tab*2 + "</th>\n" + tab + "</tr>\n"
    html = (tab*2).join(templ.splitlines(True))
    for issue in sorted(issues, key=lambda issue: sort_issues(firstscorefunc, issue)):
        html += tab*3 + '<tr>\n'
        html += tab*4 + '<td><a href="' + ISSUES_URI + '/' + defaultsuite + \
                '/'+ issue + '_issue.html">' + issue.replace("_", " ") + '</a></td>\n'
        issues_list = issues_count.get(issue, [])
        for scorefunc in scorefuncs.values():
            html += tab*4 + '<td><b>' + str(scorefunc(issues_list)) + '</b></td>\n'
        html += tab*4 + '<td>\n'
        issues_with_popcon = issues_popcon_annotate(issues_list)
        issue_strings = [
            '<span %stitle="popcon score: %s">%s</span>' % (
                'class="package-popular" ' if p[2] else '', p[1], p[0]
            ) for p in issues_with_popcon]
        html += tab*5 + ', '.join(issue_strings) + '\n'
        html += tab*4 + '</td>\n'
        html += tab*3 + '</tr>\n'
    html += tab*2 + '</table>\n'
    html += tab*2 + '<p>For a total of <b>' + \
            str(len([x for x in notes if notes[x].get('issues')])) + \
            '</b> packages categorized in <b>' + str(len(issues)) + \
            '</b> issues.</p>'
    html += tab*2 + '<p>' + NOTESGIT_DESCRIPTION + '</p>'
    title = 'Known issues related to reproducible builds'
    destfile = DISTRO_BASE + '/index_issues.html'
    desturl = DISTRO_URL + '/index_issues.html'
    left_nav_html = create_main_navigation(displayed_page='issues')
    write_html_page(title=title, body=html, destfile=destfile,
                    left_nav_html=left_nav_html)
    log.info('Issues index now available at ' + desturl)
示例#26
0
def store_notes():
    log.debug('Removing all notes')
    notes_table = db_table('notes')
    conn_db.execute(notes_table.delete())
    to_insert = []
    for entry in [x for y in sorted(notes) for x in notes[y]]:
        pkg_id = entry['id']
        pkg_version = entry['version']
        pkg_issues = json.dumps(entry['issues'])
        pkg_bugs = json.dumps(entry['bugs'])
        pkg_comments = entry['comments']
        to_insert.append({
            'package_id': pkg_id,
            'version': pkg_version,
            'issues': pkg_issues,
            'bugs': pkg_bugs,
            'comments': pkg_comments
        })

    if (len(to_insert)):
        conn_db.execute(notes_table.insert(), to_insert)
        log.info('Saved ' + str(len(to_insert)) + ' notes in the database')
示例#27
0
def generate_oldies(arch):
    log.info('Building the oldies page for ' + arch + '...')
    title = 'Oldest results on ' + arch
    html = ''
    for suite in SUITES:
        query = select([
            sources.c.suite, sources.c.architecture, sources.c.name,
            results.c.status, results.c.build_date
        ]).select_from(results.join(sources)).where(
            and_(sources.c.suite == bindparam('suite'),
                 sources.c.architecture == bindparam('arch'),
                 results.c.status != 'blacklisted')).order_by(
                     results.c.build_date).limit(15)
        text = Template('Oldest results on $suite/$arch:')
        rows = query_db(query.params({'arch': arch, 'suite': suite}))
        html += build_leading_text_section({'text': text}, rows, suite, arch)
        html += '<p><table class="scheduled">\n' + tab
        html += '<tr><th class="center">#</th><th class="center">suite</th><th class="center">arch</th>'
        html += '<th class="center">source package</th><th class="center">status</th><th class="center">build date</th></tr>\n'
        for row in rows:
            # 0: suite, 1: arch, 2: pkg name 3: status 4: build date
            pkg = row[2]
            html += tab + '<tr><td>&nbsp;</td><td>' + row[0] + '</td>'
            html += '<td>' + row[1] + '</td><td><code>'
            html += Package(pkg).html_link(row[0], row[1])
            html += '</code></td><td>' + convert_into_status_html(str(
                row[3])) + '</td><td>' + row[4] + '</td></tr>\n'
        html += '</table></p>\n'
    destfile = DISTRO_BASE + '/index_' + arch + '_oldies.html'
    desturl = DISTRO_URL + '/index_' + arch + '_oldies.html'
    left_nav_html = create_main_navigation(arch=arch)
    write_html_page(title=title,
                    body=html,
                    destfile=destfile,
                    style_note=True,
                    refresh_every=60,
                    left_nav_html=left_nav_html)
    log.info("Page generated at " + desturl)
def not_unrep_with_dbd_file():
    log.info('running not_unrep_with_dbd_file check...')
    bad_pkgs = []
    query = '''SELECT s.name, r.version, s.suite, s.architecture
               FROM sources AS s JOIN results AS r ON r.package_id=s.id
               WHERE r.status != 'FTBR'
               ORDER BY s.name ASC, s.suite DESC, s.architecture ASC'''
    results = query_db(query)
    for pkg, version, suite, arch in results:
        eversion = strip_epoch(version)
        for prefix, extension in ((
            (DBD_PATH, 'html'),
            (DBDTXT_PATH, 'txt.gz'),
            (DBDJSON_PATH, 'json.gz'),
        )):
            filename = '{}/{}/{}/{}_{}.diffoscope.{}.gz'.format(
                prefix, suite, arch, pkg, eversion, extension)
            if not os.access(filename, os.R_OK):
                continue
            bad_pkgs.append((pkg, version, suite, arch))
            log.warning(filename + ' exists but ' + suite + '/' + arch + '/' + pkg + ' (' + version + ')'
                        ' is not FTBR.')
    return bad_pkgs
示例#29
0
def purge_old_notes(notes):
    removed_pages = []
    to_rebuild = []
    presents = sorted(os.listdir(NOTES_PATH))
    for page in presents:
        pkg = page.rsplit('_', 1)[0]
        log.debug('Checking if ' + page + ' (from ' + pkg + ') is still needed')
        if pkg not in notes:
            log.info('There are no notes for ' + pkg + '. Removing old page.')
            os.remove(NOTES_PATH + '/' + page)
            removed_pages.append(pkg)
    for pkg in removed_pages:
        for suite in SUITES:
            try:
                query = "SELECT s.name " + \
                        "FROM results AS r JOIN sources AS s ON r.package_id=s.id " + \
                        "WHERE s.name='{pkg}' AND r.status != '' AND s.suite='{suite}'"
                query = query.format(pkg=pkg, suite=suite)
                to_rebuild.append(query_db(query)[0][0])
            except IndexError:  # the package is not tested. this can happen if
                pass            # a package got removed from the archive
    if to_rebuild:
        gen_packages_html([Package(x) for x in to_rebuild])
示例#30
0
def store_issues():
    issues_table = db_table('issues')
    # Get existing issues
    results = conn_db.execute(sql.select([issues_table.c.name]))
    existing_issues = set([row[0] for row in results])
    to_insert = []
    to_update = []
    for name in issues:
        url = issues[name]['url'] if 'url' in issues[name] else ''
        desc = issues[name]['description']
        if name in existing_issues:
            to_update.append({
                'issuename': name,
                'url': url,
                'description': desc
            })
            # remove this package from the set, to know who to delete later
            existing_issues.remove(name)
        else:
            to_insert.append({'name': name, 'url': url, 'description': desc})

    if to_update:
        update_query = issues_table.update().\
                  where(issues_table.c.name == sql.bindparam('issuename'))
        conn_db.execute(update_query, to_update)
        log.debug('Issues updated in the database')
    if to_insert:
        conn_db.execute(issues_table.insert(), to_insert)
        log.debug('Issues added to the database')

    # if there are any existing issues left, delete them.
    if existing_issues:
        to_delete = [{'issuename': name} for name in existing_issues]
        delete_query = issues_table.delete().\
                  where(issues_table.c.name == sql.bindparam('issuename'))
        conn_db.execute(delete_query, to_delete)
        log.info("Removed the following issues: " + str(existing_issues))