def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent).filter_by( problem_id=problem_id).delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent).filter_by( name=comp_name).first()) if not component: raise ValueError( "Component {} not found.".format(comp_name)) db.session.add( ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign).filter_by( problem_id=problem_id).first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query( ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportOpSysRelease.opsysrelease_id).subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt).join(sub).order_by( desc("cnt")).all()) sub = (db.session.query( ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by(ReportArch.arch_id).subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub).order_by( desc("cnt")).all()) exes = (db.session.query( ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportExecutable.path).order_by(desc("cnt")).all()) sub = (db.session.query( ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportPackage.installed_package_id).subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query( ReportUnknownPackage, ReportUnknownPackage.count).join(Report).filter( Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query( ReportHash.hash).join(Report).join(Problem).filter( Problem.id == problem_id).distinct(ReportHash.hash).all()) forward = { "problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "solutions": solutions, "components_form": components_form } if not bt_hashes: logger.warning("No backtrace hashes found for problem #%d", problem_id) else: # Generate a permalink for this problem. We do this by uniformly picking # (at most) 10 hashes from the list. This ensures the selected hashes are # more or less representative of the problem. k = min(len(bt_hashes), 10) # A hint of determinism in this uncertain world. r = random.Random(problem_id) hashes_sampled = r.sample(bt_hashes, k) permalink_query = "&".join("bth={}".format(bth) for (bth, ) in hashes_sampled) forward["permalink_query"] = permalink_query if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def item(report_id, want_object=False): result = (db.session.query(Report, OpSysComponent).join(OpSysComponent).filter( Report.id == report_id).first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path).filter( ReportExecutable.report_id == report_id).first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query( ReportOpSysRelease, ReportOpSysRelease.count).filter( ReportOpSysRelease.report_id == report_id).order_by( desc(ReportOpSysRelease.count)).all()) arches = (db.session.query( ReportArch, ReportArch.count).filter(ReportArch.report_id == report_id).order_by( desc(ReportArch.count)).all()) modes = (db.session.query( ReportSelinuxMode, ReportSelinuxMode.count).filter( ReportSelinuxMode.report_id == report_id).order_by( desc(ReportSelinuxMode.count)).all()) history_select = lambda table, date, date_range: ( db.session.query(table).filter(table.report_id == report_id).filter( date >= date_range) # Flot is confused if not ordered .order_by(date).all()) MAX_DAYS = 20 # Default set on 20 MAX_WEEK = 20 # Default set on 20 MAX_MONTH = 20 # Default set on 20 today = datetime.date.today() # Show only 20 days daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day, (today - timedelta(days=MAX_DAYS))) if not daily_history: for x in range(0, MAX_DAYS): daily_history.append({ 'day': today - timedelta(x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(daily_history) < MAX_DAYS: if daily_history[-1].day < (today): daily_history.append({ 'day': today, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if daily_history[0].day > (today - timedelta(MAX_DAYS)): daily_history.append({ 'day': today - timedelta(MAX_DAYS), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 weeks last_monday = datetime.datetime.today() - timedelta( datetime.datetime.today().weekday()) weekly_history = history_select( ReportHistoryWeekly, ReportHistoryWeekly.week, (last_monday - timedelta(days=MAX_WEEK * 7))) if not weekly_history: for x in range(0, MAX_WEEK): weekly_history.append({ 'week': last_monday - timedelta(x * 7), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(weekly_history) < MAX_WEEK: if weekly_history[-1].week < (last_monday.date()): weekly_history.append({ 'week': last_monday, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if weekly_history[0].week > ( (last_monday - timedelta(7 * MAX_WEEK)).date()): weekly_history.append({ 'week': last_monday - timedelta(7 * MAX_WEEK), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 months monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (today - relativedelta(months=MAX_MONTH))) first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1)) fdom = first_day_of_month(datetime.datetime.today()) if not monthly_history: for x in range(0, MAX_MONTH): monthly_history.append({ 'month': fdom - relativedelta(months=x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(monthly_history) < MAX_MONTH: if monthly_history[-1].month < (fdom): monthly_history.append({ 'month': fdom, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)): monthly_history.append({ 'month': fdom - relativedelta(months=MAX_MONTH), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) complete_history = history_select( ReportHistoryMonthly, ReportHistoryMonthly.month, (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d'))) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = { 'count': ch.count, 'unique': ch.unique } else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique sorted(unique_ocurrence_os) packages = load_packages(db, report_id) # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format( pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address, ) in (db.session.query( ContactEmail.email_address).join(ReportContactEmail).filter( ReportContactEmail.report == report)) ] maintainer = (db.session.query(AssociatePeople).join( OpSysComponentAssociate).join(OpSysComponent).filter( OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query( ProblemOpSysRelease, Build).join(Problem).join(Report).join(Build).filter( Report.id == report_id).first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report( db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent) .filter_by(problem_id=problem_id) .delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent) .filter_by(name=comp_name) .first()) if not component: raise ValueError("Component {} not found.".format( comp_name)) db.session.add(ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign) .filter_by(problem_id=problem_id) .first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query(ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportOpSysRelease.opsysrelease_id) .subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt) .join(sub) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportArch.arch_id) .subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub) .order_by(desc("cnt")) .all()) exes = (db.session.query(ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportExecutable.path) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportPackage.installed_package_id) .subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query(ReportUnknownPackage, ReportUnknownPackage.count) .join(Report) .filter(Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query(ReportHash.hash) .join(Report) .join(Problem) .filter(Problem.id == problem_id) .distinct(ReportHash.hash).all()) # Limit to 10 bt_hashes (otherwise the URL can get too long) # Select the 10 hashes uniformly from the entire list to make sure it is a # good representation. (Slicing the 10 first could mean the 10 oldest # are selected which is not a good representation.) k = min(len(bt_hashes), 10) a = 0 d = len(bt_hashes)/float(k) bt_hashes_limited = [] for _ in range(k): bt_hashes_limited.append("bth=" + bt_hashes[int(a)][0]) a += d bt_hash_qs = "&".join(bt_hashes_limited) forward = {"problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "bt_hash_qs": bt_hash_qs, "solutions": solutions, "components_form": components_form } if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def item(report_id, want_object=False) -> Union[Dict[str, Any], Response, str]: result = (db.session.query(Report, OpSysComponent).join(OpSysComponent).filter( Report.id == report_id).first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path).filter( ReportExecutable.report_id == report_id).first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query( ReportOpSysRelease, ReportOpSysRelease.count).filter( ReportOpSysRelease.report_id == report_id).order_by( desc(ReportOpSysRelease.count)).all()) arches = (db.session.query( ReportArch, ReportArch.count).filter(ReportArch.report_id == report_id).order_by( desc(ReportArch.count)).all()) modes = (db.session.query( ReportSelinuxMode, ReportSelinuxMode.count).filter( ReportSelinuxMode.report_id == report_id).order_by( desc(ReportSelinuxMode.count)).all()) daily_history = precompute_history(report_id, 'day') weekly_history = precompute_history(report_id, 'week') monthly_history = precompute_history(report_id, 'month') complete_history = (db.session.query(ReportHistoryMonthly).filter( ReportHistoryMonthly.report_id == report_id).all()) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = str(ch.opsysrelease) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = { 'count': ch.count, 'unique': ch.unique } else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique packages = load_packages(db, report_id) crashed_versions = {} last_affected_version = "N/A" # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format( pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count if pkg.type == "CRASHED": crashed_versions = names[pkg.iname]["versions"] if crashed_versions: last_affected_version = sorted(crashed_versions.keys())[-1] package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address, ) in (db.session.query( ContactEmail.email_address).join(ReportContactEmail).filter( ReportContactEmail.report == report)) ] maintainer = (db.session.query(AssociatePeople).join( OpSysComponentAssociate).join(OpSysComponent).filter( OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query( ProblemOpSysRelease, Build).join(Problem).join(Report).join(Build).filter( Report.id == report_id).first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report( db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops forward['version'] = last_affected_version if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return Response(response=json.dumps(forward, cls=WebfafJSONEncoder), status=200, mimetype="application/json") forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def item(report_id, want_object=False): result = (db.session.query(Report, OpSysComponent) .join(OpSysComponent) .filter(Report.id == report_id) .first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path) .filter(ReportExecutable.report_id == report_id) .first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count) .filter(ReportOpSysRelease.report_id == report_id) .order_by(desc(ReportOpSysRelease.count)) .all()) arches = (db.session.query(ReportArch, ReportArch.count) .filter(ReportArch.report_id == report_id) .order_by(desc(ReportArch.count)) .all()) modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count) .filter(ReportSelinuxMode.report_id == report_id) .order_by(desc(ReportSelinuxMode.count)) .all()) history_select = lambda table, date, date_range: (db.session.query(table). filter(table.report_id == report_id) .filter(date >= date_range) # Flot is confused if not ordered .order_by(date) .all()) MAX_DAYS = 20 # Default set on 20 MAX_WEEK = 20 # Default set on 20 MAX_MONTH = 20 # Default set on 20 today = datetime.date.today() # Show only 20 days daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day, (today - timedelta(days=MAX_DAYS))) if not daily_history: for x in range(0, MAX_DAYS): daily_history.append({'day': today - timedelta(x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(daily_history) < MAX_DAYS: if daily_history[-1].day < (today): daily_history.append({'day': today, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if daily_history[0].day > (today - timedelta(MAX_DAYS)): daily_history.append({'day': today - timedelta(MAX_DAYS), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 weeks last_monday = datetime.datetime.today() - timedelta(datetime.datetime.today().weekday()) weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week, (last_monday - timedelta(days=MAX_WEEK*7))) if not weekly_history: for x in range(0, MAX_WEEK): weekly_history.append({'week': last_monday - timedelta(x*7), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(weekly_history) < MAX_WEEK: if weekly_history[-1].week < (last_monday.date()): weekly_history.append({'week': last_monday, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) if weekly_history[0].week > ((last_monday - timedelta(7*MAX_WEEK)).date()): weekly_history.append({'week': last_monday - timedelta(7*MAX_WEEK), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) # Show only 20 months monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (today - relativedelta(months=MAX_MONTH))) first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1)) fdom = first_day_of_month(datetime.datetime.today()) if not monthly_history: for x in range(0, MAX_MONTH): monthly_history.append({'month': fdom - relativedelta(months=x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(monthly_history) < MAX_MONTH: if monthly_history[-1].month < (fdom): monthly_history.append({'month': fdom, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)): monthly_history.append({'month': fdom - relativedelta(months=MAX_MONTH), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) complete_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d'))) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = {'count': ch.count, 'unique': ch.unique} else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique sorted(unique_ocurrence_os) packages = load_packages(db, report_id) crashed_versions = [] last_affected_version = "N/A" # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}" .format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count if pkg.type == "CRASHED": crashed_versions = names[pkg.iname]["versions"] if crashed_versions: last_affected_version = sorted(crashed_versions.keys())[-1] package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [email_address for (email_address, ) in (db.session.query(ContactEmail.email_address) .join(ReportContactEmail) .filter(ReportContactEmail.report == report))] maintainer = (db.session.query(AssociatePeople) .join(OpSysComponentAssociate) .join(OpSysComponent) .filter(OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query(ProblemOpSysRelease, Build) .join(Problem) .join(Report) .join(Build) .filter(Report.id == report_id) .first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report(db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops forward['version'] = last_affected_version if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)