def build_page_section(page, section, suite, arch): try: if pages[page].get('global') and pages[page]['global']: suite = defaultsuite arch = defaultarch if pages[page].get('notes') and pages[page]['notes']: db_status = section['status'].value.name query = queries[section['query']].params({ 'status': db_status, 'suite': suite, 'arch': arch }) section['icon_status'] = section['status'].value.icon else: query = queries[section['query']].params({ 'suite': suite, 'arch': arch }) rows = query_db(query) except: print_critical_message('A query failed: %s' % query) raise html = '' footnote = True if rows else False if not rows: # there are no package in this set, do not output anything log.debug('empty query: %s' % query.compile(compile_kwargs={"literal_binds": True})) return (html, footnote) html += build_leading_text_section(section, rows, suite, arch) html += '<p>\n' + tab + '<code>\n' for row in rows: pkg = row[0] html += tab * 2 + Package(pkg).html_link(suite, arch) else: html += tab + '</code>\n' html += '</p>' if section.get('bottom'): html += section['bottom'] html = (tab * 2).join(html.splitlines(True)) return (html, footnote)
def db_update(): """ Update the database schema. Get a list of queries to perform from schema_updates. The need for an update is detected by checking the biggest value in the rb_schema table against the biggest value in the schema_updates dictionary. """ current = query_db('SELECT MAX(version) FROM rb_schema')[0][0] if not current: log.warning('This is probably a new database, there are no ' + 'previous updates noted') current = 0 last = max(schema_updates.keys()) if current == last: return False if current > last: print_critical_message('The active database schema is higher than' + ' the last update available.\nPlease check!') sys.exit(1) log.info('Found schema updates.') Session = sessionmaker(bind=DB_ENGINE, autocommit=True) session = Session() for update in range(current + 1, last + 1): log.info('Applying database update #' + str(update) + '. Queries:') startTime = datetime.now() with session.begin(): for query in schema_updates[update]: log.info('\t' + query) session.execute(query) session.execute( "INSERT INTO rb_schema (version, date) " "VALUES (:ver, CURRENT_TIMESTAMP)", {'ver': update}) log.info( str(len(schema_updates[update])) + ' queries executed in ' + str(datetime.now() - startTime)) return True
def load_notes(): """ format: { 'package_name': [ {'suite': 'unstable', 'version': '0.0', 'comments': None, 'bugs': [1234, 5678], 'issues': ['blalba','auauau']}, {'suite': 'stretch', 'version': None, 'comments': 'strstr', 'bugs': [], 'issues': []}], 'package_name':<etc> } """ with open(NOTES) as fd: original = yaml.load(fd) log.info("notes loaded. There are " + str(len(original)) + " packages listed") notes = {} for pkg in sorted(original): assert isinstance(pkg, str) try: assert 'version' in original[pkg] except AssertionError: print_critical_message(pkg + ' does not include a version') irc_msg('The note for ' + pkg + ' does not include a version.') query = """SELECT s.id, s.version, s.suite FROM results AS r JOIN sources AS s ON r.package_id=s.id WHERE s.name='{pkg}' AND r.status != ''""" # AND s.architecture='amd64'""" query = query.format(pkg=pkg) result = query_db(query) if not result: log.info('Warning: This query produces no results: ' + query + '\nThis means there is no tested ' + 'package with the name ' + pkg) try: irc_msg( "There is problem with the note for {} (it may " "have been removed from the archive). Please check {} and {}" .format(pkg, os.environ['BUILD_URL'], "https://tracker.debian.org/pkg/" + pkg)) except KeyError: log.error( 'There is a problem with the note for %s - please ' 'check.', pkg) else: notes[pkg] = [] for suite in result: pkg_details = {} # https://image-store.slidesharecdn.com/c2c44a06-5e28-4296-8d87-419529750f6b-original.jpeg try: if apt_pkg.version_compare(str(original[pkg]['version']), str(suite[1])) > 0: continue except KeyError: pass pkg_details['suite'] = suite[2] try: pkg_details['version'] = original[pkg]['version'] except KeyError: pkg_details['version'] = '' pkg_details['comments'] = original[pkg]['comments'] if \ 'comments' in original[pkg] else None pkg_details['bugs'] = original[pkg]['bugs'] if \ 'bugs' in original[pkg] else [] pkg_details['issues'] = original[pkg]['issues'] if \ 'issues' in original[pkg] else [] pkg_details['id'] = int(suite[0]) log.debug('adding %s => %s', pkg, pkg_details) notes[pkg].append(pkg_details) log.info("notes checked. There are " + str(len(notes)) + " packages listed") return notes
def build_page(page, suite=None, arch=None): gpage = False if pages[page].get('global') and pages[page]['global']: gpage = True suite = defaultsuite arch = defaultarch if not gpage and suite and not arch: print_critical_message('The architecture was not specified while ' + 'building a suite-specific page.') sys.exit(1) if gpage: log.debug('Building the ' + page + ' global index page...') title = pages[page]['title'] else: log.debug('Building the ' + page + ' index page for ' + suite + '/' + arch + '...') title = pages[page]['title'].format(suite=suite, arch=arch) page_sections = pages[page]['body'] html = '' footnote = False if pages[page].get('header'): if pages[page].get('notes_hint') and pages[page][ 'notes_hint'] and suite == defaultsuite: hint = ' <em>These</em> are the packages with failures that <em>still need to be investigated</em>.' else: hint = '' if pages[page].get('header_query'): html += pages[page]['header'].format(tot=query_db( pages[page]['header_query'].format(suite=suite, arch=arch))[0][0], suite=suite, arch=arch, hint=hint) else: html += pages[page].get('header') for section in page_sections: if gpage: if section.get('nosuite') and section['nosuite']: # only defaults html += build_page_section(page, section, suite, arch)[0] else: for suite in SUITES: for arch in ARCHS: log.debug('global page §' + section['status'].name + ' in ' + page + ' for ' + suite + '/' + arch) html += build_page_section(page, section, suite, arch)[0] footnote = True else: html1, footnote1 = build_page_section(page, section, suite, arch) html += html1 footnote = True if footnote1 else footnote suite_arch_nav_template = None if gpage: destfile = DISTRO_BASE + '/index_' + page + '.html' desturl = DISTRO_URL + '/index_' + page + '.html' suite = defaultsuite # used for the links in create_main_navigation else: destfile = DISTRO_BASE + '/' + suite + '/' + arch + '/index_' + \ page + '.html' desturl = DISTRO_URL + '/' + suite + '/' + arch + '/index_' + \ page + '.html' suite_arch_nav_template = DISTRO_URI + '/{{suite}}/{{arch}}/index_' + \ page + '.html' left_nav_html = create_main_navigation( suite=suite, arch=arch, displayed_page=page, suite_arch_nav_template=suite_arch_nav_template, ) write_html_page(title=title, body=html, destfile=destfile, style_note=True, left_nav_html=left_nav_html) log.info('"' + title + '" now available at ' + desturl)
if result == '1': log.info("user exists.") else: log.info("Postgres role %s does not exist. Creating role." % DB_USER) check_call(['sudo', '-u', 'postgres', 'createuser', '-w', DB_USER]) # check is the database exists log.info("Checking if postgres database exists...") query = "SELECT 1 FROM pg_database WHERE datname='%s'" % DB_NAME command = ['sudo', '-u', 'postgres', 'psql', '-tAc', query] result = subprocess.check_output(command).decode("utf-8").strip() if result == '1': print_critical_message( 'Database "%s" already exists. This script can' ' only be run on a completely new database. If you are certain you' ' want to clone "%s" in "%s", please drop the database "%s" and' ' run this script again.' % (DB_NAME, BACKUP_FILE, DB_NAME, DB_NAME)) sys.exit(1) else: log.info("Postgres database %s does not exist. Creating database." % DB_NAME) check_call([ 'sudo', '-u', 'postgres', 'createdb', '-O', DB_USER, '-w', DB_NAME ]) except FileNotFoundError: print_critical_message( "Postgres is not installed. Install postgres before continuing.") sys.exit(1)
def update_sources_db(suite, arch, sources): # extract relevant info (package name and version) from the sources file new_pkgs = set() newest_version = {} for src in deb822.Sources.iter_paragraphs(sources.split('\n')): pkg = (src['Package'], src['Version'], suite, arch) if 'Extra-Source-Only' in src and src['Extra-Source-Only'] == 'yes': log.debug('Ignoring {} due to Extra-Source-Only'.format(pkg)) continue # only keep the most recent version of a src for each package/suite/arch key = src['Package'] + suite + arch if key in newest_version: oldversion = newest_version[key] oldpackage = (src['Package'], oldversion, suite, arch) new_pkgs.remove(oldpackage) newest_version[key] = src['Version'] new_pkgs.add(pkg) # get the current packages in the database query = "SELECT name, version, suite, architecture FROM sources " + \ "WHERE suite='{}' AND architecture='{}'".format(suite, arch) cur_pkgs = set([(p.name, p.version, p.suite, p.architecture) for p in query_db(query)]) pkgs_to_add = [] updated_pkgs = [] different_pkgs = [x for x in new_pkgs if x not in cur_pkgs] log.debug('Packages different in the archive and in the db: %s', different_pkgs) for pkg in different_pkgs: # pkg: (name, version, suite, arch) query = "SELECT id, version, notify_maintainer FROM sources " + \ "WHERE name='{}' AND suite='{}' AND architecture='{}'" query = query.format(pkg[0], pkg[2], pkg[3]) try: result = query_db(query)[0] except IndexError: # new package pkgs_to_add.append({ 'name': pkg[0], 'version': pkg[1], 'suite': pkg[2], 'architecture': pkg[3], }) continue pkg_id = result[0] old_version = result[1] notify_maint = int(result[2]) if apt_pkg.version_compare(pkg[1], old_version) > 0: log.debug('New version: ' + str(pkg) + ' (we had ' + old_version + ')') updated_pkgs.append({ 'update_id': pkg_id, 'name': pkg[0], 'version': pkg[1], 'suite': pkg[2], 'architecture': pkg[3], 'notify_maintainer': notify_maint, }) # Now actually update the database: sources_table = db_table('sources') # updated packages log.info('Pushing ' + str(len(updated_pkgs)) + ' updated packages to the database...') if updated_pkgs: transaction = conn_db.begin() update_query = sources_table.update().\ where(sources_table.c.id == sql.bindparam('update_id')) conn_db.execute(update_query, updated_pkgs) transaction.commit() # new packages if pkgs_to_add: log.info('Now inserting %i new sources in the database: %s', len(pkgs_to_add), pkgs_to_add) transaction = conn_db.begin() conn_db.execute(sources_table.insert(), pkgs_to_add) transaction.commit() # RM'ed packages cur_pkgs_name = [x[0] for x in cur_pkgs] new_pkgs_name = [x[0] for x in new_pkgs] rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name] log.info('Now deleting %i removed packages: %s', len(rmed_pkgs), rmed_pkgs) rmed_pkgs_id = [] pkgs_to_rm = [] query = "SELECT id FROM sources WHERE name='{}' AND suite='{}' " + \ "AND architecture='{}'" for pkg in rmed_pkgs: result = query_db(query.format(pkg, suite, arch)) rmed_pkgs_id.append({'deleteid': result[0][0]}) pkgs_to_rm.append({'name': pkg, 'suite': suite, 'architecture': arch}) log.debug('removed packages ID: %s', [str(x['deleteid']) for x in rmed_pkgs_id]) log.debug('removed packages: %s', pkgs_to_rm) if rmed_pkgs_id: transaction = conn_db.begin() results_table = db_table('results') schedule_table = db_table('schedule') notes_table = db_table('notes') removed_packages_table = db_table('removed_packages') delete_results_query = results_table.delete().\ where(results_table.c.package_id == sql.bindparam('deleteid')) delete_schedule_query = schedule_table.delete().\ where(schedule_table.c.package_id == sql.bindparam('deleteid')) delete_notes_query = notes_table.delete().\ where(notes_table.c.package_id == sql.bindparam('deleteid')) delete_sources_query = sources_table.delete().\ where(sources_table.c.id == sql.bindparam('deleteid')) conn_db.execute(delete_results_query, rmed_pkgs_id) conn_db.execute(delete_schedule_query, rmed_pkgs_id) conn_db.execute(delete_notes_query, rmed_pkgs_id) conn_db.execute(delete_sources_query, rmed_pkgs_id) conn_db.execute(removed_packages_table.insert(), pkgs_to_rm) transaction.commit() # finally check whether the db has the correct number of packages query = "SELECT count(*) FROM sources WHERE suite='{}' " + \ "AND architecture='{}'" pkgs_end = query_db(query.format(suite, arch)) count_new_pkgs = len(set([x[0] for x in new_pkgs])) if int(pkgs_end[0][0]) != count_new_pkgs: print_critical_message('AH! The number of source in the Sources file' + ' is different than the one in the DB!') log.critical('source in the debian archive for the %s suite: %s', suite, str(count_new_pkgs)) log.critical('source in the reproducible db for the %s suite: %s', suite, str(pkgs_end[0][0])) sys.exit(1) if pkgs_to_add: log.info('Building pages for the new packages') gen_packages_html([Package(x['name']) for x in pkgs_to_add], no_clean=True)