def gen_all_rb_pkg_pages(no_clean=False): query = 'SELECT DISTINCT name FROM sources WHERE suite = ANY(:s)' rows = query_db(sqlalchemy.text(query), s=SUITES) pkgs = [Package(str(i[0]), no_notes=True) for i in rows] log.info('Processing all %s package from all suites/architectures', len(pkgs)) gen_packages_html(pkgs, no_clean=True) # we clean at the end purge_old_pages()
def generate_live_status_table(arch): averagesql = select([ func.coalesce(func.avg(cast(stats_build.c.build_duration, Integer)), 0) ]).where( and_( stats_build.c.status.in_(('reproducible', 'FTBR')), stats_build.c.name == sources.c.name, stats_build.c.suite == sources.c.suite, stats_build.c.architecture == sources.c.architecture, )).as_scalar() query = select([ sources.c.id, sources.c.suite, sources.c.architecture, sources.c.name, sources.c.version, schedule.c.date_build_started, results.c.status, results.c.build_duration, averagesql, schedule.c.job, ]).select_from(sources.join(schedule).join(results, isouter=True)).where( and_( schedule.c.date_build_started != None, sources.c.architecture == bindparam('arch'), )).order_by(schedule.c.date_scheduled) html = '' rows = query_db(query.params({'arch': arch})) html += '<p><table class="scheduled">\n' + tab html += '<tr><th class="center">#</th><th class="center">src pkg id</th><th class="center">suite</th><th class="center">arch</th>' html += '<th class=\"center\">source package</th><th class=\"center\">version</th></th>' html += '<th class=\"center\">build started</th><th class=\"center\">previous build status</th>' html += '<th class=\"center\">previous build duration</th><th class=\"center\">average build duration</th><th class=\"center\">builder job</th>' html += '</tr>\n' counter = 0 for row in rows: counter += 1 suite = row[1] arch = row[2] pkg = row[3] duration = convert_into_hms_string(row[7]) avg_duration = convert_into_hms_string(row[8]) html += tab + '<tr><td> </td><td>' + str(row[0]) + '</td>' html += '<td>' + suite + '</td><td>' + arch + '</td>' html += '<td><code>' + Package(pkg).html_link( suite, arch, bugs=False) + '</code></td>' html += '<td>' + str(row[4]) + '</td><td>' + str(row[5]) + '</td>' html += '<td>' + convert_into_status_html( str(row[6]) ) + '</td><td>' + duration + '</td><td>' + avg_duration + '</td>' html += '<td><a href="https://tests.reproducible-builds.org/cgi-bin/nph-logwatch?' + str( row[9]) + '">' + str(row[9]) + '</a></td>' html += '</tr>\n' html += '</table></p>\n' return html
def _gen_packages_html(header, pkgs): html = '' if pkgs: html = '<p><b>' + str(len(pkgs)) + '</b> ' html += header html += '<br/><pre>\n' for pkg in pkgs: html += tab html += Package(pkg[0]).html_link(pkg[2], pkg[3], bugs=False).strip() html += ' (' + pkg[1] + ' in ' + pkg[2] + '/' + pkg[3] + ')\n' html += '</pre></p>\n' return html
def build_page_section(page, section, suite, arch): try: if pages[page].get('global') and pages[page]['global']: suite = defaultsuite arch = defaultarch if pages[page].get('notes') and pages[page]['notes']: db_status = section['status'].value.name query = queries[section['query']].params({ 'status': db_status, 'suite': suite, 'arch': arch }) section['icon_status'] = section['status'].value.icon else: query = queries[section['query']].params({ 'suite': suite, 'arch': arch }) rows = query_db(query) except: print_critical_message('A query failed: %s' % query) raise html = '' footnote = True if rows else False if not rows: # there are no package in this set, do not output anything log.debug('empty query: %s' % query.compile(compile_kwargs={"literal_binds": True})) return (html, footnote) html += build_leading_text_section(section, rows, suite, arch) html += '<p>\n' + tab + '<code>\n' for row in rows: pkg = row[0] html += tab * 2 + Package(pkg).html_link(suite, arch) else: html += tab + '</code>\n' html += '</p>' if section.get('bottom'): html += section['bottom'] html = (tab * 2).join(html.splitlines(True)) return (html, footnote)
def generate_oldies(arch): log.info('Building the oldies page for ' + arch + '...') title = 'Oldest results on ' + arch html = '' for suite in SUITES: query = select([ sources.c.suite, sources.c.architecture, sources.c.name, results.c.status, results.c.build_date ]).select_from(results.join(sources)).where( and_(sources.c.suite == bindparam('suite'), sources.c.architecture == bindparam('arch'), results.c.status != 'blacklisted')).order_by( results.c.build_date).limit(15) text = Template('Oldest results on $suite/$arch:') rows = query_db(query.params({'arch': arch, 'suite': suite})) html += build_leading_text_section({'text': text}, rows, suite, arch) html += '<p><table class="scheduled">\n' + tab html += '<tr><th class="center">#</th><th class="center">suite</th><th class="center">arch</th>' html += '<th class="center">source package</th><th class="center">status</th><th class="center">build date</th></tr>\n' for row in rows: # 0: suite, 1: arch, 2: pkg name 3: status 4: build date pkg = row[2] html += tab + '<tr><td> </td><td>' + row[0] + '</td>' html += '<td>' + row[1] + '</td><td><code>' html += Package(pkg).html_link(row[0], row[1]) html += '</code></td><td>' + convert_into_status_html(str( row[3])) + '</td><td>' + row[4] + '</td></tr>\n' html += '</table></p>\n' destfile = DISTRO_BASE + '/index_' + arch + '_oldies.html' desturl = DISTRO_URL + '/index_' + arch + '_oldies.html' left_nav_html = create_main_navigation(arch=arch) write_html_page(title=title, body=html, destfile=destfile, style_note=True, refresh_every=60, left_nav_html=left_nav_html) log.info("Page generated at " + desturl)
def purge_old_notes(notes): removed_pages = [] to_rebuild = [] presents = sorted(os.listdir(NOTES_PATH)) for page in presents: pkg = page.rsplit('_', 1)[0] log.debug('Checking if ' + page + ' (from ' + pkg + ') is still needed') if pkg not in notes: log.info('There are no notes for ' + pkg + '. Removing old page.') os.remove(NOTES_PATH + '/' + page) removed_pages.append(pkg) for pkg in removed_pages: for suite in SUITES: try: query = "SELECT s.name " + \ "FROM results AS r JOIN sources AS s ON r.package_id=s.id " + \ "WHERE s.name='{pkg}' AND r.status != '' AND s.suite='{suite}'" query = query.format(pkg=pkg, suite=suite) to_rebuild.append(query_db(query)[0][0]) except IndexError: # the package is not tested. this can happen if pass # a package got removed from the archive if to_rebuild: gen_packages_html([Package(x) for x in to_rebuild])
def gen_html_issue(issue, suite): """ Given a issue as input (as a dict: {"issue_identifier": {"description": "blablabla", "url": "blabla"}} ) it returns the html body """ # links to the issue in other suites suite_links = '' for i in SUITES: if suite_links != '': suite_links += ' / ' if i != suite: suite_links += '<a href="' + REPRODUCIBLE_URL + ISSUES_URI + '/' + i + '/' + issue + '_issue.html">' + i + '</a>' else: suite_links += '<em>' + i + '</em>' # check for url: if 'url' in issues[issue]: url = issue_html_url.substitute(url=issues[issue]['url']) else: url = '' # add affected packages: affected = '' results = db_table('results') sources = db_table('sources') sql = select( [sources.c.name] ).select_from( results.join(sources) ).where( and_( sources.c.suite == bindparam('suite'), sources.c.architecture == bindparam('arch'), results.c.status == bindparam('status'), ) ).order_by( sources.c.name ) try: arch = 'amd64' for status in Status: status = status.value pkgs = query_db(sql.where(sources.c.name.in_(issues_count[issue]))\ .params({'suite': suite, 'arch': arch, 'status': status.name})) pkgs = [p[0] for p in pkgs] if not pkgs: continue affected += tab*4 + '<p>\n' affected += tab*5 + '<img src="/static/{}"'.format(status.icon) affected += ' alt="' + status.name + ' icon" />\n' affected += tab*5 + str(len(pkgs)) + ' ' + status.spokenstatus affected += ' packages in ' + suite + '/' + arch +':\n' affected += tab*5 + '<code>\n' pkgs_popcon = issues_popcon_annotate(pkgs) try: for pkg, popc_num, is_popular in sorted(pkgs_popcon, key=lambda x: x[0] in bugs): affected += tab*6 + Package(pkg).html_link(suite, arch, bugs, popc_num, is_popular) except ValueError: pass affected += tab*5 + '</code>\n' affected += tab*4 + '</p>\n' except KeyError: # The note is not listed in any package, that is affected = '<i>None</i>' # check for description: try: desc = issues[issue]['description'] except KeyError: log.warning('You should really include a description in the ' + issue + ' issue') desc = 'N/A' desc = url2html.sub(r'<a href="\1">\1</a>', desc) desc = desc.replace('\n', '<br />') return issue_html.substitute(issue=issue, urls=url, description=desc, affected_pkgs=affected, suite=suite, suite_links=suite_links, notesgit_description=NOTESGIT_DESCRIPTION)
left_nav_html=left_nav_html) log.info('Issues index now available at ' + desturl) if __name__ == '__main__': issues_count = {} bugs = Bugs().bugs notes = load_notes() issues = load_issues() iterate_over_notes(notes) iterate_over_issues(issues) try: index_issues(issues, OrderedDict([ ("Sum of packages' popcon scores", lambda l: sum(popcon.source_package(*l).values())), ("Sum of square-roots of packages' popcon scores", lambda l: int(sum(map(sqrt, popcon.source_package(*l).values())))), ("Number of packages", len), ])) except UnicodeDecodeError: pass purge_old_notes(notes) purge_old_issues(issues) gen_packages_html([Package(x) for x in notes]) for suite in SUITES: for arch in ARCHS: build_page('notes', suite, arch) build_page('no_notes', suite, arch) build_page('FTBFS', suite, arch)
def create_pkgset_page_and_graphs(suite, arch, stats, pkgset_name): html_body = "" html_body += create_pkgset_navigation(suite, arch, pkgset_name) pkgset_context = ({ 'pkgset_name': pkgset_name, 'suite': suite, 'arch': arch, 'pkg_symbol_legend_html': renderer.render(pkg_legend_template, {}), }) png_file, png_href = stats_png_file_href(suite, arch, pkgset_name) thumb_file, thumb_href = stats_thumb_file_href(suite, arch, pkgset_name) yesterday_timestamp = (datetime.now() - timedelta(days=1)).timestamp() if (not os.access(png_file, os.R_OK) or os.stat(png_file).st_mtime < yesterday_timestamp): create_pkgset_graph(png_file, suite, arch, pkgset_name) check_call( ['convert', png_file, '-adaptive-resize', '160x80', thumb_file]) pkgset_context['png'] = png_href other_archs = [a for a in ARCHS if a != arch] pkgset_context['other_archs']= \ gen_other_arch_context(other_archs, suite, pkgset_name) pkgset_context['status_details'] = [] status_cutename_descriptions = [ ('FTBR', 'bad', 'failed to build reproducibly'), ('FTBFS', 'ugly', 'failed to build from source'), ('rest', 'rest', 'are either in depwait state, blacklisted, not for us, or cannot be downloaded' ), ('reproducible', 'good', 'successfully build reproducibly'), ] for (status, cutename, description) in status_cutename_descriptions: icon_html = '' if status == 'rest': for x in ['depwait', 'blacklisted', 'NFU', 'E404']: s = Status.get(x) icon_html += gen_status_link_icon(s.value.name, None, s.value.icon, suite, arch) else: s = Status.get(status) icon_html += gen_status_link_icon(s.value.name, None, s.value.icon, suite, arch) details_context = { 'icon_html': icon_html, 'description': description, 'package_list_html': ''.join( [Package(x).html_link(suite, arch) for x in stats[cutename]]), 'status_count': stats["count_" + cutename], 'status_percent': stats["percent_" + cutename], } if (status in ('reproducible', 'FTBR') or stats["count_" + cutename] != 0): pkgset_context['status_details'].append(details_context) html_body += renderer.render(pkgset_details_template, pkgset_context) title = '%s package set for %s/%s' % \ (pkgset_name, suite, arch) page = "pkg_set_" + pkgset_name + ".html" destfile = os.path.join(DISTRO_BASE, suite, arch, page) suite_arch_nav_template = DISTRO_URI + '/{{suite}}/{{arch}}/' + page left_nav_html = create_main_navigation( suite=suite, arch=arch, displayed_page='pkg_set', suite_arch_nav_template=suite_arch_nav_template, ignore_experimental=True, ) log.info("Creating meta pkgset page for %s in %s/%s.", pkgset_name, suite, arch) write_html_page(title=title, body=html_body, destfile=destfile, left_nav_html=left_nav_html)
def generate_schedule(arch): """ the schedule pages are very different than others index pages """ log.info('Building the schedule index page for ' + arch + '...') title = 'Packages currently scheduled on ' + arch + ' for testing for build reproducibility' # 'AND h.name=s.name AND h.suite=s.suite AND h.architecture=s.architecture' # in this query and the query below is needed due to not using package_id # in the stats_build table, which should be fixed... averagesql = select([ func.coalesce(func.avg(cast(stats_build.c.build_duration, Integer)), 0) ]).where( and_( stats_build.c.status.in_(('reproducible', 'FTBR')), stats_build.c.name == sources.c.name, stats_build.c.suite == sources.c.suite, stats_build.c.architecture == sources.c.architecture, )).as_scalar() query = select([ schedule.c.date_scheduled, sources.c.suite, sources.c.architecture, sources.c.name, results.c.status, results.c.build_duration, averagesql ]).select_from(sources.join(schedule).join(results, isouter=True)).where( and_( schedule.c.date_build_started == None, sources.c.architecture == bindparam('arch'), )).order_by(schedule.c.date_scheduled) text = Template( '$tot packages are currently scheduled for testing on $arch:') html = '' rows = query_db(query.params({'arch': arch})) html += build_leading_text_section({'text': text}, rows, defaultsuite, arch) html += generate_live_status_table(arch) html += '<p><table class="scheduled">\n' + tab html += '<tr><th class="center">#</th><th class="center">scheduled at</th><th class="center">suite</th>' html += '<th class="center">arch</th><th class="center">source package</th><th class="center">previous build status</th><th class="center">previous build duration</th><th class="center">average build duration</th></tr>\n' for row in rows: # 0: date_scheduled, 1: suite, 2: arch, 3: pkg name 4: previous status 5: previous build duration 6. avg build duration pkg = row[3] duration = convert_into_hms_string(row[5]) avg_duration = convert_into_hms_string(row[6]) html += tab + '<tr><td> </td><td>' + row[0] + '</td>' html += '<td>' + row[1] + '</td><td>' + row[2] + '</td><td><code>' html += Package(pkg).html_link(row[1], row[2]) html += '</code></td><td>' + convert_into_status_html( str(row[4]) ) + '</td><td>' + duration + '</td><td>' + avg_duration + '</td></tr>\n' html += '</table></p>\n' destfile = DISTRO_BASE + '/index_' + arch + '_scheduled.html' desturl = DISTRO_URL + '/index_' + arch + '_scheduled.html' suite_arch_nav_template = DISTRO_URI + '/index_{{arch}}_scheduled.html' left_nav_html = create_main_navigation( arch=arch, no_suite=True, displayed_page='scheduled', suite_arch_nav_template=suite_arch_nav_template) write_html_page(title=title, body=html, destfile=destfile, style_note=True, refresh_every=60, left_nav_html=left_nav_html) log.info("Page generated at " + desturl)
log.error('dd-list printed some errors:\n' + err.decode()) log.debug('dd-list output:\n' + out.decode()) html = '<p>The following maintainers and uploaders are listed ' html += 'for packages in ' + suite + ' which have built ' html += 'unreproducibly. Please note that the while the link ' html += 'always points to the amd64 version, it\'s possible that' html += 'the unreproducibility is only present in another architecture(s).</p>\n<p><pre>' out = out.decode().splitlines() get_mail = re.compile('<(.*)>') for line in out: if line[0:3] == ' ': line = line.strip().split(None, 1) html += ' ' # the final strip() is to avoid a newline html += Package(line[0]).html_link(suite, arch).strip() try: html += ' ' + line[1] # eventual uploaders sign except IndexError: pass elif line.strip(): # be sure this is not just an empty line email = get_mail.findall(line.strip())[0] html += HTML.escape(line.strip()) html += '<a name="{maint}" href="#{maint}">¶</a>'.format( maint=email) html += '\n' html += '</pre></p>' title = 'Maintainers of unreproducible packages in ' + suite destfile = DISTRO_BASE + '/' + suite + '/index_dd-list.html' suite_arch_nav_template = DISTRO_URI + '/{{suite}}/index_dd-list.html' left_nav_html = create_main_navigation(suite=suite, arch=arch,
def update_sources_db(suite, arch, sources): # extract relevant info (package name and version) from the sources file new_pkgs = set() newest_version = {} for src in deb822.Sources.iter_paragraphs(sources.split('\n')): pkg = (src['Package'], src['Version'], suite, arch) if 'Extra-Source-Only' in src and src['Extra-Source-Only'] == 'yes': log.debug('Ignoring {} due to Extra-Source-Only'.format(pkg)) continue # only keep the most recent version of a src for each package/suite/arch key = src['Package'] + suite + arch if key in newest_version: oldversion = newest_version[key] oldpackage = (src['Package'], oldversion, suite, arch) new_pkgs.remove(oldpackage) newest_version[key] = src['Version'] new_pkgs.add(pkg) # get the current packages in the database query = "SELECT name, version, suite, architecture FROM sources " + \ "WHERE suite='{}' AND architecture='{}'".format(suite, arch) cur_pkgs = set([(p.name, p.version, p.suite, p.architecture) for p in query_db(query)]) pkgs_to_add = [] updated_pkgs = [] different_pkgs = [x for x in new_pkgs if x not in cur_pkgs] log.debug('Packages different in the archive and in the db: %s', different_pkgs) for pkg in different_pkgs: # pkg: (name, version, suite, arch) query = "SELECT id, version, notify_maintainer FROM sources " + \ "WHERE name='{}' AND suite='{}' AND architecture='{}'" query = query.format(pkg[0], pkg[2], pkg[3]) try: result = query_db(query)[0] except IndexError: # new package pkgs_to_add.append({ 'name': pkg[0], 'version': pkg[1], 'suite': pkg[2], 'architecture': pkg[3], }) continue pkg_id = result[0] old_version = result[1] notify_maint = int(result[2]) if apt_pkg.version_compare(pkg[1], old_version) > 0: log.debug('New version: ' + str(pkg) + ' (we had ' + old_version + ')') updated_pkgs.append({ 'update_id': pkg_id, 'name': pkg[0], 'version': pkg[1], 'suite': pkg[2], 'architecture': pkg[3], 'notify_maintainer': notify_maint, }) # Now actually update the database: sources_table = db_table('sources') # updated packages log.info('Pushing ' + str(len(updated_pkgs)) + ' updated packages to the database...') if updated_pkgs: transaction = conn_db.begin() update_query = sources_table.update().\ where(sources_table.c.id == sql.bindparam('update_id')) conn_db.execute(update_query, updated_pkgs) transaction.commit() # new packages if pkgs_to_add: log.info('Now inserting %i new sources in the database: %s', len(pkgs_to_add), pkgs_to_add) transaction = conn_db.begin() conn_db.execute(sources_table.insert(), pkgs_to_add) transaction.commit() # RM'ed packages cur_pkgs_name = [x[0] for x in cur_pkgs] new_pkgs_name = [x[0] for x in new_pkgs] rmed_pkgs = [x for x in cur_pkgs_name if x not in new_pkgs_name] log.info('Now deleting %i removed packages: %s', len(rmed_pkgs), rmed_pkgs) rmed_pkgs_id = [] pkgs_to_rm = [] query = "SELECT id FROM sources WHERE name='{}' AND suite='{}' " + \ "AND architecture='{}'" for pkg in rmed_pkgs: result = query_db(query.format(pkg, suite, arch)) rmed_pkgs_id.append({'deleteid': result[0][0]}) pkgs_to_rm.append({'name': pkg, 'suite': suite, 'architecture': arch}) log.debug('removed packages ID: %s', [str(x['deleteid']) for x in rmed_pkgs_id]) log.debug('removed packages: %s', pkgs_to_rm) if rmed_pkgs_id: transaction = conn_db.begin() results_table = db_table('results') schedule_table = db_table('schedule') notes_table = db_table('notes') removed_packages_table = db_table('removed_packages') delete_results_query = results_table.delete().\ where(results_table.c.package_id == sql.bindparam('deleteid')) delete_schedule_query = schedule_table.delete().\ where(schedule_table.c.package_id == sql.bindparam('deleteid')) delete_notes_query = notes_table.delete().\ where(notes_table.c.package_id == sql.bindparam('deleteid')) delete_sources_query = sources_table.delete().\ where(sources_table.c.id == sql.bindparam('deleteid')) conn_db.execute(delete_results_query, rmed_pkgs_id) conn_db.execute(delete_schedule_query, rmed_pkgs_id) conn_db.execute(delete_notes_query, rmed_pkgs_id) conn_db.execute(delete_sources_query, rmed_pkgs_id) conn_db.execute(removed_packages_table.insert(), pkgs_to_rm) transaction.commit() # finally check whether the db has the correct number of packages query = "SELECT count(*) FROM sources WHERE suite='{}' " + \ "AND architecture='{}'" pkgs_end = query_db(query.format(suite, arch)) count_new_pkgs = len(set([x[0] for x in new_pkgs])) if int(pkgs_end[0][0]) != count_new_pkgs: print_critical_message('AH! The number of source in the Sources file' + ' is different than the one in the DB!') log.critical('source in the debian archive for the %s suite: %s', suite, str(count_new_pkgs)) log.critical('source in the reproducible db for the %s suite: %s', suite, str(pkgs_end[0][0])) sys.exit(1) if pkgs_to_add: log.info('Building pages for the new packages') gen_packages_html([Package(x['name']) for x in pkgs_to_add], no_clean=True)
sys.exit(1) if DEBUG: log.debug('Double check the change:') query = 'SELECT * FROM sources WHERE name="{}"'.format(package) log.debug(query_db(query)) if maintainer: query = "SELECT source FROM sources WHERE maintainer_email = '{}' " + \ "AND release = 'sid' AND component = 'main'" ret = Udd().query(query.format(maintainer)) try: pkgs = [x[0] for x in ret] except IndexError: log.info('No packages maintained by ' + maintainer) sys.exit(0) log.info('Packages maintained by ' + maintainer + ':') log.info('\t' + ', '.join(pkgs)) packages.extend(pkgs) for package in packages: process_pkg(package, local_args.deactivate) gen_packages_html([Package(x) for x in packages], no_clean=True) build_page('notify') if local_args.deactivate: _good('Notifications disabled for ' + str(len(packages)) + ' package(s)') else: _good('Notifications enabled for ' + str(len(packages)) + ' package(s)')
def gen_suitearch_details(package, version, suite, arch, status, spokenstatus, build_date): eversion = strip_epoch(version) # epoch_free_version is too long pkg = Package(package) build = pkg.builds[suite][arch] context = {} default_view = '' # Make notes the default default view notes_file = NOTES_PATH + '/' + package + '_note.html' notes_uri = NOTES_URI + '/' + package + '_note.html' if os.access(notes_file, os.R_OK): default_view = notes_uri # Get summary context context['status_html'] = gen_status_link_icon(status, spokenstatus, None, suite, arch) context['build_date'] = build_date # Get diffoscope differences context dbd_links = get_dbd_links(package, eversion, suite, arch) dbd_uri = dbd_links.get('dbd_uri', '') if dbd_uri: context['dbd'] = { 'dbd_page_uri': dbd_links['dbd_page_uri'], 'dbdtxt_uri': dbd_links.get('dbdtxt_uri', ''), 'dbdjson_uri': dbd_links.get('dbdjson_uri', ''), } default_view = default_view if default_view else dbd_uri # Get buildinfo context if build.buildinfo: context['buildinfo_uri'] = build.buildinfo.url default_view = default_view if default_view else build.buildinfo.url elif not args.ignore_missing_files and status not in \ ('untested', 'blacklisted', 'FTBFS', 'NFU', 'depwait', '404'): log.critical('buildinfo not detected at ' + build.buildinfo.path) # Get rbuild, build2 and build diffs context if build.rbuild: context['rbuild_uri'] = build.rbuild.url context['rbuild_size'] = sizeof_fmt(build.rbuild.size) default_view = default_view if default_view else build.rbuild.url context['buildlogs'] = {} if build.build2 and build.logdiff: context['buildlogs']['build2_uri'] = build.build2.url context['buildlogs']['build2_size'] = build.build2.size context['buildlogs']['diff_uri'] = build.logdiff.url else: log.error('Either {} or {} is missing'.format( build.build2.path, build.logdiff.path)) elif status not in ('untested', 'blacklisted') and \ not args.ignore_missing_files: log.critical( DISTRO_URL + '/' + suite + '/' + arch + '/' + package + ' didn\'t produce a buildlog, even though it has been built.') context['has_buildloginfo'] = 'buildinfo_uri' in context or \ 'buildlogs' in context or \ 'rbuild_uri' in context default_view = '/untested.html' if not default_view else default_view suitearch_details_html = renderer.render(suitearch_details_template, context) return (suitearch_details_html, default_view)