def purge_old_notes(notes): removed_pages = [] to_rebuild = [] presents = sorted(os.listdir(NOTES_PATH)) for page in presents: pkg = page.rsplit('_', 1)[0] log.debug('Checking if ' + page + ' (from ' + pkg + ') is still needed') if pkg not in notes: log.info('There are no notes for ' + pkg + '. Removing old page.') os.remove(NOTES_PATH + '/' + page) removed_pages.append(pkg) for pkg in removed_pages: for suite in SUITES: try: query = 'SELECT s.name ' + \ 'FROM results AS r JOIN sources AS s ON r.package_id=s.id ' + \ 'WHERE s.name="{pkg}" AND r.status != "" AND s.suite="{suite}"' query = query.format(pkg=pkg, suite=suite) to_rebuild.append(query_db(query)[0][0]) except IndexError: # the package is not tested. this can happen if pass # a package got removed from the archive if to_rebuild: gen_packages_html([Package(x) for x in to_rebuild])
def purge_old_notes(notes): removed_pages = [] to_rebuild = [] presents = sorted(os.listdir(NOTES_PATH)) for page in presents: pkg = page.rsplit('_', 1)[0] log.debug('Checking if ' + page + ' (from ' + pkg + ') is still needed') if pkg not in notes: log.info('There are no notes for ' + pkg + '. Removing old page.') os.remove(NOTES_PATH + '/' + page) removed_pages.append(pkg) for pkg in removed_pages: for suite in SUITES: try: query = "SELECT s.name " + \ "FROM results AS r JOIN sources AS s ON r.package_id=s.id " + \ "WHERE s.name='{pkg}' AND r.status != '' AND s.suite='{suite}'" query = query.format(pkg=pkg, suite=suite) to_rebuild.append(query_db(query)[0][0]) except IndexError: # the package is not tested. this can happen if pass # a package got removed from the archive if to_rebuild: gen_packages_html([Package(x) for x in to_rebuild])
left_nav_html=left_nav_html) log.info('Issues index now available at ' + desturl) if __name__ == '__main__': issues_count = {} bugs = Bugs().bugs notes = load_notes() issues = load_issues() iterate_over_notes(notes) iterate_over_issues(issues) try: index_issues(issues, OrderedDict([ ("Sum of packages' popcon scores", lambda l: sum(popcon.source_package(*l).values())), ("Sum of square-roots of packages' popcon scores", lambda l: int(sum(map(sqrt, popcon.source_package(*l).values())))), ("Number of packages", len), ])) except UnicodeDecodeError: pass purge_old_notes(notes) purge_old_issues(issues) gen_packages_html([Package(x) for x in notes]) for suite in SUITES: for arch in ARCHS: build_page('notes', suite, arch) build_page('no_notes', suite, arch) build_page('FTBFS', suite, arch)
'</b> issues.</p>' html += tab*2 + '<p>Notes are stored in <a href="https://anonscm.debian.org/cgit/reproducible/notes.git" target="_parent">notes.git</a> and are targeted at packages in \'unstable\'.</p>' title = 'Known issues related to reproducible builds' destfile = BASE + '/index_issues.html' desturl = REPRODUCIBLE_URL + '/index_issues.html' write_html_page(title=title, body=html, destfile=destfile) log.info('Issues index now available at ' + desturl) if __name__ == '__main__': all_pkgs = query_db('SELECT s.name, r.status, s.suite, s.architecture ' + 'FROM results AS r JOIN sources AS s ON r.package_id=s.id ' + 'ORDER BY s.name') issues_count = {} bugs = get_bugs() notes = load_notes() issues = load_issues() iterate_over_notes(notes) iterate_over_issues(issues) index_issues(issues) purge_old_notes(notes) purge_old_issues(issues) gen_packages_html([Package(x) for x in notes]) for suite in SUITES: for arch in ARCHS: if arch == 'armhf' and suite != 'unstable': continue build_page('notes', suite, arch) build_page('no_notes', suite, arch) build_page('FTBFS', suite, arch)
def update_sources_db(suite, arch, sources): # extract relevant info (package name and version) from the sources file new_pkgs = set() newest_version = {} for src in deb822.Sources.iter_paragraphs(sources.split('\n')): pkg = (src['Package'], src['Version'], suite, arch) if 'Extra-Source-Only' in src and src['Extra-Source-Only'] == 'yes': log.debug('Ignoring {} due to Extra-Source-Only'.format(pkg)) continue # only keep the most recent version of a src for each package/suite/arch key = src['Package'] + suite + arch if key in newest_version: oldversion = newest_version[key] oldpackage = (src['Package'], oldversion, suite, arch) new_pkgs.remove(oldpackage) newest_version[key] = src['Version'] new_pkgs.add(pkg) # get the current packages in the database query = "SELECT name, version, suite, architecture FROM sources " + \ "WHERE suite='{}' AND architecture='{}'".format(suite, arch) cur_pkgs = set([(p.name, p.version, p.suite, p.architecture) for p in query_db(query)]) pkgs_to_add = [] updated_pkgs = [] different_pkgs = [x for x in new_pkgs if x not in cur_pkgs] log.debug('Packages different in the archive and in the db: %s', different_pkgs) for pkg in different_pkgs: # pkg: (name, version, suite, arch) query = "SELECT id, version, notify_maintainer FROM sources " + \ "WHERE name='{}' AND suite='{}' AND architecture='{}'" query = query.format(pkg[0], pkg[2], pkg[3]) try: result = query_db(query)[0] except IndexError: # new package pkgs_to_add.append({ 'name': pkg[0], 'version': pkg[1], 'suite': pkg[2], 'architecture': pkg[3], }) continue pkg_id = result[0] old_version = result[1] notify_maint = int(result[2]) if version_compare(pkg[1], old_version) > 0: log.debug('New version: ' + str(pkg) + ' (we had ' + old_version + ')') updated_pkgs.append({ 'update_id': pkg_id, 'name': pkg[0], 'version': pkg[1], 'suite': pkg[2], 'architecture': pkg[3], 'notify_maintainer': notify_maint, }) # Now actually update the database: sources_table = db_table('sources') # updated packages log.info('Pushing ' + str(len(updated_pkgs)) + ' updated packages to the database...') if updated_pkgs: transaction = conn_db.begin() update_query = sources_table.update().\ where(sources_table.c.id == sql.bindparam('update_id')) conn_db.execute(update_query, updated_pkgs) transaction.commit() # new packages if pkgs_to_add: log.info('Now inserting %i new sources in the database: %s', len(pkgs_to_add), pkgs_to_add) transaction = conn_db.begin() conn_db.execute(sources_table.insert(), pkgs_to_add) transaction.commit() # RM'ed packages cur_pkgs_name = [x[0] for x in cur_pkgs] new_pkgs_name = [x[0] for x in new_pkgs] rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name] log.info('Now deleting %i removed packages: %s', len(rmed_pkgs), rmed_pkgs) rmed_pkgs_id = [] pkgs_to_rm = [] query = "SELECT id FROM sources WHERE name='{}' AND suite='{}' " + \ "AND architecture='{}'" for pkg in rmed_pkgs: result = query_db(query.format(pkg, suite, arch)) rmed_pkgs_id.append({'deleteid': result[0][0]}) pkgs_to_rm.append({'name': pkg, 'suite': suite, 'architecture': arch}) log.debug('removed packages ID: %s', [str(x['deleteid']) for x in rmed_pkgs_id]) log.debug('removed packages: %s', pkgs_to_rm) if rmed_pkgs_id: transaction = conn_db.begin() results_table = db_table('results') schedule_table = db_table('schedule') notes_table = db_table('notes') removed_packages_table = db_table('removed_packages') delete_results_query = results_table.delete().\ where(results_table.c.package_id == sql.bindparam('deleteid')) delete_schedule_query = schedule_table.delete().\ where(schedule_table.c.package_id == sql.bindparam('deleteid')) delete_notes_query = notes_table.delete().\ where(notes_table.c.package_id == sql.bindparam('deleteid')) delete_sources_query = sources_table.delete().\ where(sources_table.c.id == sql.bindparam('deleteid')) conn_db.execute(delete_results_query, rmed_pkgs_id) conn_db.execute(delete_schedule_query, rmed_pkgs_id) conn_db.execute(delete_notes_query, rmed_pkgs_id) conn_db.execute(delete_sources_query, rmed_pkgs_id) conn_db.execute(removed_packages_table.insert(), pkgs_to_rm) transaction.commit() # finally check whether the db has the correct number of packages query = "SELECT count(*) FROM sources WHERE suite='{}' " + \ "AND architecture='{}'" pkgs_end = query_db(query.format(suite, arch)) count_new_pkgs = len(set([x[0] for x in new_pkgs])) if int(pkgs_end[0][0]) != count_new_pkgs: print_critical_message('AH! The number of source in the Sources file' + ' is different than the one in the DB!') log.critical('source in the debian archive for the %s suite: %s', suite, str(count_new_pkgs)) log.critical('source in the reproducible db for the %s suite: %s', suite, str(pkgs_end[0][0])) sys.exit(1) if pkgs_to_add: log.info('Building pages for the new packages') gen_packages_html([Package(x['name']) for x in pkgs_to_add], no_clean=True)
if maintainer: global conn_udd if not conn_udd: conn_udd = start_udd_connection() c = conn_udd.cursor() query = "SELECT source FROM sources WHERE maintainer_email = '{}' " + \ "AND release = 'sid' AND component = 'main'" try: c.execute(query.format(maintainer)) pkgs = [x[0] for x in c.fetchall()] except IndexError: log.info('No packages maintained by ' + maintainer) sys.exit(0) finally: conn_udd.close() log.info('Packages maintained by ' + maintainer + ':') log.info('\t' + ', '.join(pkgs)) packages.extend(pkgs) c = conn_db.cursor() for package in packages: process_pkg(package, local_args.deactivate) gen_packages_html([Package(x) for x in packages], no_clean=True) build_page('notify') if local_args.deactivate: _good('Notifications disabled for ' + str(len(packages)) + ' package(s)') else: _good('Notifications enabled for ' + str(len(packages)) + ' package(s)')
def update_sources_db(suite, arch, sources): # extract relevant info (package name and version) from the sources file new_pkgs = [] for src in deb822.Sources.iter_paragraphs(sources.split('\n')): pkg = (src['Package'], src['Version'], suite, arch) new_pkgs.append(pkg) # get the current packages in the database query = 'SELECT name, version, suite, architecture FROM sources ' + \ 'WHERE suite="{}" AND architecture="{}"'.format(suite, arch) cur_pkgs = query_db(query) pkgs_to_add = [] updated_pkgs = [] different_pkgs = [x for x in new_pkgs if x not in cur_pkgs] log.debug('Packages different in the archive and in the db: ' + str(different_pkgs)) for pkg in different_pkgs: # pkg: (name, version, suite, arch) query = 'SELECT id, version, notify_maintainer FROM sources ' + \ 'WHERE name="{}" AND suite="{}" AND architecture="{}"' query = query.format(pkg[0], pkg[2], pkg[3]) try: result = query_db(query)[0] except IndexError: # new package pkgs_to_add.append(pkg) continue pkg_id = result[0] old_version = result[1] notify_maint = int(result[2]) if version_compare(pkg[1], old_version) > 0: log.debug('New version: ' + str(pkg) + ' (we had ' + old_version + ')') updated_pkgs.append( (pkg_id, pkg[0], pkg[1], pkg[2], pkg[3], notify_maint)) # Now actually update the database: cursor = conn_db.cursor() # updated packages log.info('Pushing ' + str(len(updated_pkgs)) + ' updated packages to the database...') cursor.executemany( 'REPLACE INTO sources ' + '(id, name, version, suite, architecture, notify_maintainer) ' + 'VALUES (?, ?, ?, ?, ?, ?)', updated_pkgs) conn_db.commit() # new packages log.info('Now inserting ' + str(len(pkgs_to_add)) + ' new sources in the database: ' + str(pkgs_to_add)) cursor.executemany( 'INSERT INTO sources ' + '(name, version, suite, architecture) ' + 'VALUES (?, ?, ?, ?)', pkgs_to_add) conn_db.commit() # RM'ed packages cur_pkgs_name = [x[0] for x in cur_pkgs] new_pkgs_name = [x[0] for x in new_pkgs] rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name] log.info('Now deleting ' + str(len(rmed_pkgs)) + ' removed packages: ' + str(rmed_pkgs)) rmed_pkgs_id = [] pkgs_to_rm = [] query = 'SELECT id FROM sources WHERE name="{}" AND suite="{}" ' + \ 'AND architecture="{}"' for pkg in rmed_pkgs: result = query_db(query.format(pkg, suite, arch)) rmed_pkgs_id.extend(result) pkgs_to_rm.append((pkg, suite, arch)) log.debug('removed packages ID: ' + str([str(x[0]) for x in rmed_pkgs_id])) log.debug('removed packages: ' + str(pkgs_to_rm)) cursor.executemany('DELETE FROM sources ' 'WHERE id=?', rmed_pkgs_id) cursor.executemany('DELETE FROM results ' 'WHERE package_id=?', rmed_pkgs_id) cursor.executemany('DELETE FROM schedule ' 'WHERE package_id=?', rmed_pkgs_id) cursor.executemany( 'INSERT INTO removed_packages ' '(name, suite, architecture) ' 'VALUES (?, ?, ?)', pkgs_to_rm) conn_db.commit() # finally check whether the db has the correct number of packages query = 'SELECT count(*) FROM sources WHERE suite="{}" ' + \ 'AND architecture="{}"' pkgs_end = query_db(query.format(suite, arch)) count_new_pkgs = len(set([x[0] for x in new_pkgs])) if int(pkgs_end[0][0]) != count_new_pkgs: print_critical_message('AH! The number of source in the Sources file' + ' is different than the one in the DB!') log.critical('source in the debian archive for the ' + suite + ' suite:' + str(count_new_pkgs)) log.critical('source in the reproducible db for the ' + suite + ' suite:' + str(pkgs_end[0][0])) sys.exit(1) if pkgs_to_add: log.info('Building pages for the new packages') gen_packages_html([Package(x[0]) for x in pkgs_to_add], no_clean=True)
global conn_udd if not conn_udd: conn_udd = start_udd_connection() c = conn_udd.cursor() query = "SELECT source FROM sources WHERE maintainer_email = '{}' " + \ "AND release = 'sid' AND component = 'main'" try: c.execute(query.format(maintainer)) pkgs = [x[0] for x in c.fetchall()] except IndexError: log.info('No packages maintained by ' + maintainer) sys.exit(0) finally: conn_udd.close() log.info('Packages maintained by ' + maintainer + ':') log.info('\t' + ', '.join(pkgs)) packages.extend(pkgs) c = conn_db.cursor() for package in packages: process_pkg(package, local_args.deactivate) gen_packages_html([Package(x) for x in packages], no_clean=True) build_page('notify') if local_args.deactivate: _good('Notifications disabled for ' + str(len(packages)) + ' package(s)') else: _good('Notifications enabled for ' + str(len(packages)) + ' package(s)')
def update_sources_db(suite, arch, sources): # extract relevant info (package name and version) from the sources file new_pkgs = [] for src in deb822.Sources.iter_paragraphs(sources.split('\n')): pkg = (src['Package'], src['Version'], suite, arch) new_pkgs.append(pkg) # get the current packages in the database query = 'SELECT name, version, suite, architecture FROM sources ' + \ 'WHERE suite="{}" AND architecture="{}"'.format(suite, arch) cur_pkgs = query_db(query) pkgs_to_add = [] updated_pkgs = [] different_pkgs = [x for x in new_pkgs if x not in cur_pkgs] log.debug('Packages different in the archive and in the db: ' + str(different_pkgs)) for pkg in different_pkgs: # pkg: (name, version, suite, arch) query = 'SELECT id, version, notify_maintainer FROM sources ' + \ 'WHERE name="{}" AND suite="{}" AND architecture="{}"' query = query.format(pkg[0], pkg[2], pkg[3]) try: result = query_db(query)[0] except IndexError: # new package pkgs_to_add.append(pkg) continue pkg_id = result[0] old_version = result[1] notify_maint = int(result[2]) if version_compare(pkg[1], old_version) > 0: log.debug('New version: ' + str(pkg) + ' (we had ' + old_version + ')') updated_pkgs.append( (pkg_id, pkg[0], pkg[1], pkg[2], pkg[3], notify_maint)) # Now actually update the database: cursor = conn_db.cursor() # updated packages log.info('Pushing ' + str(len(updated_pkgs)) + ' updated packages to the database...') cursor.executemany( 'REPLACE INTO sources ' + '(id, name, version, suite, architecture, notify_maintainer) ' + 'VALUES (?, ?, ?, ?, ?, ?)', updated_pkgs) conn_db.commit() # new packages log.info('Now inserting ' + str(len(pkgs_to_add)) + ' new sources in the database: ' + str(pkgs_to_add)) cursor.executemany('INSERT INTO sources ' + '(name, version, suite, architecture) ' + 'VALUES (?, ?, ?, ?)', pkgs_to_add) conn_db.commit() # RM'ed packages cur_pkgs_name = [x[0] for x in cur_pkgs] new_pkgs_name = [x[0] for x in new_pkgs] rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name] log.info('Now deleting ' + str(len(rmed_pkgs)) + ' removed packages: ' + str(rmed_pkgs)) rmed_pkgs_id = [] pkgs_to_rm = [] query = 'SELECT id FROM sources WHERE name="{}" AND suite="{}" ' + \ 'AND architecture="{}"' for pkg in rmed_pkgs: result = query_db(query.format(pkg, suite, arch)) rmed_pkgs_id.extend(result) pkgs_to_rm.append((pkg, suite, arch)) log.debug('removed packages ID: ' + str([str(x[0]) for x in rmed_pkgs_id])) log.debug('removed packages: ' + str(pkgs_to_rm)) cursor.executemany('DELETE FROM sources ' 'WHERE id=?', rmed_pkgs_id) cursor.executemany('DELETE FROM results ' 'WHERE package_id=?', rmed_pkgs_id) cursor.executemany('DELETE FROM schedule ' 'WHERE package_id=?', rmed_pkgs_id) cursor.executemany('INSERT INTO removed_packages ' '(name, suite, architecture) ' 'VALUES (?, ?, ?)', pkgs_to_rm) conn_db.commit() # finally check whether the db has the correct number of packages query = 'SELECT count(*) FROM sources WHERE suite="{}" ' + \ 'AND architecture="{}"' pkgs_end = query_db(query.format(suite, arch)) count_new_pkgs = len(set([x[0] for x in new_pkgs])) if int(pkgs_end[0][0]) != count_new_pkgs: print_critical_message('AH! The number of source in the Sources file' + ' is different than the one in the DB!') log.critical('source in the debian archive for the ' + suite + ' suite:' + str(count_new_pkgs)) log.critical('source in the reproducible db for the ' + suite + ' suite:' + str(pkgs_end[0][0])) sys.exit(1) if pkgs_to_add: log.info('Building pages for the new packages') gen_packages_html([Package(x[0]) for x in pkgs_to_add], no_clean=True)