def test_solution_finder(self): """ Test if no solution is given when the version of an affected package in a report is greater that the probable fix. """ self.call_action("mark-probably-fixed", { "opsys": "fedora", "opsys-release": "20" }) self.assertIsInstance(find_solution(self.ureport_systemd2), Solution) self.assertIsNone(find_solution(self.ureport_systemd77))
def run(self, cmdline, db): db.session.autocommit = False for report in db.session.query(Report).filter(Report.max_certainty.is_(None)): osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] if solutions[0] is not None: report.max_certainty = max((s.certainty for s in solutions)) self.log_info("Max_certainty of report '{0}' is changed to {1}".format(report.id, report.max_certainty)) db.session.commit()
def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent).filter_by( problem_id=problem_id).delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent).filter_by( name=comp_name).first()) if not component: raise ValueError( "Component {} not found.".format(comp_name)) db.session.add( ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign).filter_by( problem_id=problem_id).first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query( ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportOpSysRelease.opsysrelease_id).subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt).join(sub).order_by( desc("cnt")).all()) sub = (db.session.query( ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by(ReportArch.arch_id).subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub).order_by( desc("cnt")).all()) exes = (db.session.query( ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportExecutable.path).order_by(desc("cnt")).all()) sub = (db.session.query( ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportPackage.installed_package_id).subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query( ReportUnknownPackage, ReportUnknownPackage.count).join(Report).filter( Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query( ReportHash.hash).join(Report).join(Problem).filter( Problem.id == problem_id).distinct(ReportHash.hash).all()) forward = { "problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "solutions": solutions, "components_form": components_form } if not bt_hashes: logger.warning("No backtrace hashes found for problem #%d", problem_id) else: # Generate a permalink for this problem. We do this by uniformly picking # (at most) 10 hashes from the list. This ensures the selected hashes are # more or less representative of the problem. k = min(len(bt_hashes), 10) # A hint of determinism in this uncertain world. r = random.Random(problem_id) hashes_sampled = r.sample(bt_hashes, k) permalink_query = "&".join("bth={}".format(bth) for (bth, ) in hashes_sampled) forward["permalink_query"] = permalink_query if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def test_kb(self): self.assertEqual( self.call_action( "sf-prefilter-soladd", { "CAUSE": "VLC Media Player", "NOTE": "VLC unsupported.", "note-html": "<html><b>VLC unsupported.</b><html>", "url": "http://www.fedoraproject.org", }), 0) self.assertEqual( self.call_action( "sf-prefilter-soladd", { "CAUSE": "VLC Media Player", "NOTE": "VLC unsupported.", "note-html": "<html><b>VLC unsupported.</b><html>", "url": "http://www.fedoraproject.org", }), 0) self.assertEqual( self.call_action( "sf-prefilter-soladd", { "CAUSE": "Unsupported", "NOTE": "Unsupported", "note-html": "<html><b>Unsupported</b><html>", "url": "http://www.fedoraproject.org", }), 0) self.assertEqual( self.call_action("sf-prefilter-patadd", { "SOLUTION": "FooSolution", "btpath": "^.*/systemd-logind$", }), 1) self.assertEqual( self.call_action( "sf-prefilter-patadd", { "SOLUTION": "FooSolution", "opsys": "fedora", "btpath": "^.*/systemd-logind$", }), 1) self.assertEqual( self.call_action( "sf-prefilter-patadd", { "SOLUTION": "Unsupported", "opsys": "fedora", "btpath": "^.*/systemd-logind$", }), 0) self.assertEqual( self.call_action( "sf-prefilter-patadd", { "SOLUTION": "Unsupported", "opsys": "fedora", "pkgname": "^ibus-table", }), 0) sample_report_names = ("ureport1", "ureport2", "ureport_core", "ureport_python", "ureport_kerneloops", "ureport_java", "ureport_ruby") sample_reports = {} for report_name in sample_report_names: with open("sample_reports/{0}".format(report_name), "r") as file: sample_reports[report_name] = json.load(file) solution = find_solution(sample_reports['ureport_core']) self.assertIsNotNone(solution) self.assertEqual(solution.cause, "Unsupported") solution = find_solution(sample_reports['ureport_python']) self.assertIsNotNone(solution) self.assertEqual(solution.cause, "Unsupported") solution = find_solution(sample_reports['ureport_java']) self.assertIsNone(solution)
def new(): form = NewReportForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except Exception as ex: _save_invalid_ureport(db, raw_data, str(ex)) raise InvalidUsage("Couldn't parse JSON data.", 400) try: ureport.validate(data) except Exception as exp: reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) _save_invalid_ureport(db, json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): _save_unknown_opsys(db, data["os"]) raise InvalidUsage("uReport data is invalid.", 400) report = data max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: raise InvalidUsage( "uReport may only be {0} bytes long".format( max_ureport_length), 413) osr_id = None osr = None if report["os"]["name"] in systems: osr = (db.session.query(OpSysRelease).join(OpSys).filter( OpSys.name == systems[report["os"]["name"]].nice_name ).filter( OpSysRelease.version == report["os"]["version"]).first()) if osr: osr_id = osr.id try: dbreport = ureport.is_known(report, db, return_report=True, opsysrelease_id=osr_id) except Exception as e: logging.exception(e) dbreport = None known = bool(dbreport) fname = str(uuid.uuid4()) fpath = os.path.join(paths["reports_incoming"], fname) with open(fpath, 'w') as file: file.write(raw_data) if request_wants_json(): response = {'result': known} try: report2 = ureport2(report) except FafError: report2 = None if report2 is not None: solution = find_solution(report2, db=db, osr=osr) if solution is not None: response["message"] = ( "Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response["message"] += ( "\n\nYou can get more information at {0}". format(solution.url)) solution_dict = { "cause": solution.cause, "note": solution.note_text, "url": solution.url } if not solution_dict["url"]: del solution_dict["url"] response["solutions"] = [solution_dict] response["result"] = True try: problemplugin = problemtypes[report2["problem"] ["type"]] response["bthash"] = problemplugin.hash_ureport( report2["problem"]) except Exception as e: logging.exception(e) pass if known: url = url_for('reports.item', report_id=dbreport.id, _external=True) parts = [{ "reporter": "ABRT Server", "value": url, "type": "url" }] bugs = (db.session.query(BzBug).join(ReportBz).filter( ReportBz.bzbug_id == BzBug.id).filter( ReportBz.report_id == dbreport.id).all()) for bug in bugs: parts.append({ "reporter": "Bugzilla", "value": bug.url, "type": "url" }) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response['message'] += "\n".join( p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts json_response = jsonify(response) json_response.status_code = 202 return json_response else: flash("The uReport was saved successfully. Thank you.", "success") return render_template("reports/new.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response else: flash(e.message, "danger") return render_template("reports/new.html", form=form), e.status_code return render_template("reports/new.html", form=form)
def item(report_id, want_object=False): result = (db.session.query(Report, OpSysComponent).join(OpSysComponent).filter( Report.id == report_id).first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path).filter( ReportExecutable.report_id == report_id).first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query( ReportOpSysRelease, ReportOpSysRelease.count).filter( ReportOpSysRelease.report_id == report_id).order_by( desc(ReportOpSysRelease.count)).all()) arches = (db.session.query( ReportArch, ReportArch.count).filter(ReportArch.report_id == report_id).order_by( desc(ReportArch.count)).all()) modes = (db.session.query( ReportSelinuxMode, ReportSelinuxMode.count).filter( ReportSelinuxMode.report_id == report_id).order_by( desc(ReportSelinuxMode.count)).all()) history_select = lambda table, date, date_range: ( db.session.query(table).filter(table.report_id == report_id).filter( date >= date_range) # Flot is confused if not ordered .order_by(date).all()) MAX_DAYS = 20 # Default set on 20 MAX_WEEK = 20 # Default set on 20 MAX_MONTH = 20 # Default set on 20 today = datetime.date.today() # Show only 20 days daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day, (today - timedelta(days=MAX_DAYS))) if not daily_history: for x in range(0, MAX_DAYS): daily_history.append({ 'day': today - timedelta(x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(daily_history) < MAX_DAYS: if daily_history[-1].day < (today): daily_history.append({ 'day': today, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if daily_history[0].day > (today - timedelta(MAX_DAYS)): daily_history.append({ 'day': today - timedelta(MAX_DAYS), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 weeks last_monday = datetime.datetime.today() - timedelta( datetime.datetime.today().weekday()) weekly_history = history_select( ReportHistoryWeekly, ReportHistoryWeekly.week, (last_monday - timedelta(days=MAX_WEEK * 7))) if not weekly_history: for x in range(0, MAX_WEEK): weekly_history.append({ 'week': last_monday - timedelta(x * 7), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(weekly_history) < MAX_WEEK: if weekly_history[-1].week < (last_monday.date()): weekly_history.append({ 'week': last_monday, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if weekly_history[0].week > ( (last_monday - timedelta(7 * MAX_WEEK)).date()): weekly_history.append({ 'week': last_monday - timedelta(7 * MAX_WEEK), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 months monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (today - relativedelta(months=MAX_MONTH))) first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1)) fdom = first_day_of_month(datetime.datetime.today()) if not monthly_history: for x in range(0, MAX_MONTH): monthly_history.append({ 'month': fdom - relativedelta(months=x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(monthly_history) < MAX_MONTH: if monthly_history[-1].month < (fdom): monthly_history.append({ 'month': fdom, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)): monthly_history.append({ 'month': fdom - relativedelta(months=MAX_MONTH), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) complete_history = history_select( ReportHistoryMonthly, ReportHistoryMonthly.month, (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d'))) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = { 'count': ch.count, 'unique': ch.unique } else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique sorted(unique_ocurrence_os) packages = load_packages(db, report_id) # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format( pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address, ) in (db.session.query( ContactEmail.email_address).join(ReportContactEmail).filter( ReportContactEmail.report == report)) ] maintainer = (db.session.query(AssociatePeople).join( OpSysComponentAssociate).join(OpSysComponent).filter( OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query( ProblemOpSysRelease, Build).join(Problem).join(Report).join(Build).filter( Report.id == report_id).first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report( db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent) .filter_by(problem_id=problem_id) .delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent) .filter_by(name=comp_name) .first()) if not component: raise ValueError("Component {} not found.".format( comp_name)) db.session.add(ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign) .filter_by(problem_id=problem_id) .first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query(ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportOpSysRelease.opsysrelease_id) .subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt) .join(sub) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportArch.arch_id) .subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub) .order_by(desc("cnt")) .all()) exes = (db.session.query(ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportExecutable.path) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportPackage.installed_package_id) .subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query(ReportUnknownPackage, ReportUnknownPackage.count) .join(Report) .filter(Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query(ReportHash.hash) .join(Report) .join(Problem) .filter(Problem.id == problem_id) .distinct(ReportHash.hash).all()) # Limit to 10 bt_hashes (otherwise the URL can get too long) # Select the 10 hashes uniformly from the entire list to make sure it is a # good representation. (Slicing the 10 first could mean the 10 oldest # are selected which is not a good representation.) k = min(len(bt_hashes), 10) a = 0 d = len(bt_hashes)/float(k) bt_hashes_limited = [] for _ in range(k): bt_hashes_limited.append("bth=" + bt_hashes[int(a)][0]) a += d bt_hash_qs = "&".join(bt_hashes_limited) forward = {"problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "bt_hash_qs": bt_hash_qs, "solutions": solutions, "components_form": components_form } if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def item(problem_id): problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query(ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportOpSysRelease.opsysrelease_id) .subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt) .join(sub) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportArch.arch_id) .subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub) .order_by(desc("cnt")) .all()) exes = (db.session.query(ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportExecutable.path) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportPackage.installed_package_id) .subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query(ReportUnknownPackage, ReportUnknownPackage.count) .join(Report) .filter(Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query(ReportHash.hash) .join(Report) .join(Problem) .filter(Problem.id == problem_id) .distinct(ReportHash.hash).all()) # Limit to 10 bt_hashes (otherwise the URL can get too long) # Select the 10 hashes uniformly from the entire list to make sure it is a # good representation. (Slicing the 10 first could mean the 10 oldest # are selected which is not a good representation.) k = min(len(bt_hashes), 10) a = 0 d = len(bt_hashes)/float(k) bt_hashes_limited = [] for i in range(k): bt_hashes_limited.append("bth=" + bt_hashes[int(a)][0]) a += d bt_hash_qs = "&".join(bt_hashes_limited) forward = {"problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "bt_hash_qs": bt_hash_qs, "solutions": solutions } if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def item(report_id, want_object=False) -> Union[Dict[str, Any], Response, str]: result = (db.session.query(Report, OpSysComponent).join(OpSysComponent).filter( Report.id == report_id).first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path).filter( ReportExecutable.report_id == report_id).first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query( ReportOpSysRelease, ReportOpSysRelease.count).filter( ReportOpSysRelease.report_id == report_id).order_by( desc(ReportOpSysRelease.count)).all()) arches = (db.session.query( ReportArch, ReportArch.count).filter(ReportArch.report_id == report_id).order_by( desc(ReportArch.count)).all()) modes = (db.session.query( ReportSelinuxMode, ReportSelinuxMode.count).filter( ReportSelinuxMode.report_id == report_id).order_by( desc(ReportSelinuxMode.count)).all()) daily_history = precompute_history(report_id, 'day') weekly_history = precompute_history(report_id, 'week') monthly_history = precompute_history(report_id, 'month') complete_history = (db.session.query(ReportHistoryMonthly).filter( ReportHistoryMonthly.report_id == report_id).all()) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = str(ch.opsysrelease) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = { 'count': ch.count, 'unique': ch.unique } else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique packages = load_packages(db, report_id) crashed_versions = {} last_affected_version = "N/A" # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format( pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count if pkg.type == "CRASHED": crashed_versions = names[pkg.iname]["versions"] if crashed_versions: last_affected_version = sorted(crashed_versions.keys())[-1] package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address, ) in (db.session.query( ContactEmail.email_address).join(ReportContactEmail).filter( ReportContactEmail.report == report)) ] maintainer = (db.session.query(AssociatePeople).join( OpSysComponentAssociate).join(OpSysComponent).filter( OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query( ProblemOpSysRelease, Build).join(Problem).join(Report).join(Build).filter( Report.id == report_id).first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report( db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops forward['version'] = last_affected_version if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return Response(response=json.dumps(forward, cls=WebfafJSONEncoder), status=200, mimetype="application/json") forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def new(): form = NewReportForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except Exception as ex: # pylint: disable=broad-except _save_invalid_ureport(db, raw_data, str(ex)) raise InvalidUsage("Couldn't parse JSON data.", 400) try: ureport.validate(data) except Exception as exp: # pylint: disable=broad-except reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) _save_invalid_ureport(db, json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): _save_unknown_opsys(db, data["os"]) if str(exp) == 'uReport must contain affected package': raise InvalidUsage(("Server is not accepting problems " "from unpackaged files."), 400) raise InvalidUsage("uReport data is invalid.", 400) report = data max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: raise InvalidUsage("uReport may only be {0} bytes long" .format(max_ureport_length), 413) osr_id = None osr = None if report["os"]["name"] in systems: try: osr = (db.session.query(OpSysRelease) .join(OpSys) .filter(OpSys.name == systems[report["os"]["name"]].nice_name) .filter(OpSysRelease.version == report["os"]["version"]) .first()) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The uReport couldn't be saved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable if osr: osr_id = osr.id try: dbreport = ureport.is_known(report, db, return_report=True, opsysrelease_id=osr_id) except Exception as e: # pylint: disable=broad-except logging.exception(e) dbreport = None known = bool(dbreport) fname = str(uuid.uuid4()) fpath = os.path.join(paths["reports_incoming"], fname) with open(fpath, 'w') as file: file.write(raw_data.decode("utf-8")) if request_wants_json(): response = {'result': known} try: report2 = ureport2(report) ureport.validate(report2) except FafError: report2 = None if report2 is not None: try: solution = find_solution(report2, db=db, osr=osr) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The solution couldn't be retrieved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable if solution is not None: response["message"] = ( "Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response["message"] += ( "\n\nYou can get more information at {0}" .format(solution.url)) solution_dict = {"cause": solution.cause, "note": solution.note_text, "url": solution.url} if not solution_dict["url"]: del solution_dict["url"] response["solutions"] = [solution_dict] response["result"] = True try: problemplugin = problemtypes[ report2["problem"]["type"]] response["bthash"] = problemplugin.hash_ureport( report2["problem"]) except Exception as e: # pylint: disable=broad-except logging.exception(e) if known: url = url_for('reports.item', report_id=dbreport.id, _external=True) parts = [{"reporter": "ABRT Server", "value": url, "type": "url"}] try: bugs = (db.session.query(BzBug) .join(ReportBz) .filter(ReportBz.bzbug_id == BzBug.id) .filter(ReportBz.report_id == dbreport.id) .all()) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The bugs couldn't be retrieved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable for bug in bugs: parts.append({"reporter": "Bugzilla", "value": bug.url, "type": "url"}) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response[ 'message'] += "\n".join(p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts json_response = jsonify(response) json_response.status_code = 202 return json_response flash( "The uReport was saved successfully. Thank you.", "success") return render_template("reports/new.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("reports/new.html", form=form), e.status_code return render_template("reports/new.html", form=form)
def item(report_id, want_object=False): result = (db.session.query(Report, OpSysComponent) .join(OpSysComponent) .filter(Report.id == report_id) .first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path) .filter(ReportExecutable.report_id == report_id) .first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count) .filter(ReportOpSysRelease.report_id == report_id) .order_by(desc(ReportOpSysRelease.count)) .all()) arches = (db.session.query(ReportArch, ReportArch.count) .filter(ReportArch.report_id == report_id) .order_by(desc(ReportArch.count)) .all()) modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count) .filter(ReportSelinuxMode.report_id == report_id) .order_by(desc(ReportSelinuxMode.count)) .all()) history_select = lambda table, date, date_range: (db.session.query(table). filter(table.report_id == report_id) .filter(date >= date_range) # Flot is confused if not ordered .order_by(date) .all()) MAX_DAYS = 20 # Default set on 20 MAX_WEEK = 20 # Default set on 20 MAX_MONTH = 20 # Default set on 20 today = datetime.date.today() # Show only 20 days daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day, (today - timedelta(days=MAX_DAYS))) if not daily_history: for x in range(0, MAX_DAYS): daily_history.append({'day': today - timedelta(x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(daily_history) < MAX_DAYS: if daily_history[-1].day < (today): daily_history.append({'day': today, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if daily_history[0].day > (today - timedelta(MAX_DAYS)): daily_history.append({'day': today - timedelta(MAX_DAYS), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 weeks last_monday = datetime.datetime.today() - timedelta(datetime.datetime.today().weekday()) weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week, (last_monday - timedelta(days=MAX_WEEK*7))) if not weekly_history: for x in range(0, MAX_WEEK): weekly_history.append({'week': last_monday - timedelta(x*7), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(weekly_history) < MAX_WEEK: if weekly_history[-1].week < (last_monday.date()): weekly_history.append({'week': last_monday, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) if weekly_history[0].week > ((last_monday - timedelta(7*MAX_WEEK)).date()): weekly_history.append({'week': last_monday - timedelta(7*MAX_WEEK), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) # Show only 20 months monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (today - relativedelta(months=MAX_MONTH))) first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1)) fdom = first_day_of_month(datetime.datetime.today()) if not monthly_history: for x in range(0, MAX_MONTH): monthly_history.append({'month': fdom - relativedelta(months=x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(monthly_history) < MAX_MONTH: if monthly_history[-1].month < (fdom): monthly_history.append({'month': fdom, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)): monthly_history.append({'month': fdom - relativedelta(months=MAX_MONTH), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) complete_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d'))) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = {'count': ch.count, 'unique': ch.unique} else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique sorted(unique_ocurrence_os) packages = load_packages(db, report_id) crashed_versions = [] last_affected_version = "N/A" # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}" .format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count if pkg.type == "CRASHED": crashed_versions = names[pkg.iname]["versions"] if crashed_versions: last_affected_version = sorted(crashed_versions.keys())[-1] package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [email_address for (email_address, ) in (db.session.query(ContactEmail.email_address) .join(ReportContactEmail) .filter(ReportContactEmail.report == report))] maintainer = (db.session.query(AssociatePeople) .join(OpSysComponentAssociate) .join(OpSysComponent) .filter(OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query(ProblemOpSysRelease, Build) .join(Problem) .join(Report) .join(Build) .filter(Report.id == report_id) .first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report(db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops forward['version'] = last_affected_version if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def test_kb(self): self.assertEqual(self.call_action("sf-prefilter-soladd", { "CAUSE": "VLC Media Player", "NOTE": "VLC unsupported.", "note-html": "<html><b>VLC unsupported.</b><html>", "url": "http://www.fedoraproject.org", }), 0) self.assertEqual(self.call_action("sf-prefilter-soladd", { "CAUSE": "VLC Media Player", "NOTE": "VLC unsupported.", "note-html": "<html><b>VLC unsupported.</b><html>", "url": "http://www.fedoraproject.org", }), 0) self.assertEqual(self.call_action("sf-prefilter-soladd", { "CAUSE": "Unsupported", "NOTE": "Unsupported", "note-html": "<html><b>Unsupported</b><html>", "url": "http://www.fedoraproject.org", }), 0) self.assertEqual(self.call_action("sf-prefilter-patadd", { "SOLUTION": "FooSolution", "btpath": "^.*/systemd-logind$", }), 1) self.assertEqual(self.call_action("sf-prefilter-patadd", { "SOLUTION": "FooSolution", "opsys": "fedora", "btpath": "^.*/systemd-logind$", }), 1) self.assertEqual(self.call_action("sf-prefilter-patadd", { "SOLUTION": "Unsupported", "opsys": "fedora", "btpath": "^.*/systemd-logind$", }), 0) self.assertEqual(self.call_action("sf-prefilter-patadd", { "SOLUTION": "Unsupported", "opsys": "fedora", "pkgname": "^ibus-table", }), 0) sample_report_names = ("ureport1", "ureport2", "ureport_core", "ureport_python", "ureport_kerneloops", "ureport_java", "ureport_ruby") sample_reports = {} for report_name in sample_report_names: with open("sample_reports/{0}".format(report_name), "r") as file: sample_reports[report_name] = json.load(file) solution = find_solution(sample_reports['ureport_core']) self.assertIsNotNone(solution) self.assertEqual(solution.cause, "Unsupported") solution = find_solution(sample_reports['ureport_python']) self.assertIsNotNone(solution) self.assertEqual(solution.cause, "Unsupported") solution = find_solution(sample_reports['ureport_java']) self.assertIsNone(solution)
def item(report_id): result = db.session.query(Report, OpSysComponent).join(OpSysComponent).filter(Report.id == report_id).first() if result is None: abort(404) report, component = result solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = ( db.session.query(ReportOpSysRelease, ReportOpSysRelease.count) .filter(ReportOpSysRelease.report_id == report_id) .order_by(desc(ReportOpSysRelease.count)) .all() ) arches = ( db.session.query(ReportArch, ReportArch.count) .filter(ReportArch.report_id == report_id) .order_by(desc(ReportArch.count)) .all() ) modes = ( db.session.query(ReportSelinuxMode, ReportSelinuxMode.count) .filter(ReportSelinuxMode.report_id == report_id) .order_by(desc(ReportSelinuxMode.count)) .all() ) history_select = lambda table, date: ( db.session.query(table) .filter(table.report_id == report_id) # Flot is confused if not ordered .order_by(date) .all() ) daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day) weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week) monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month) packages = load_packages(db, report_id) # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append( (pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True)) ) try: backtrace = report.backtraces[0].frames except: backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address,) in ( db.session.query(ContactEmail.email_address) .join(ReportContactEmail) .filter(ReportContactEmail.report == report) ) ] forward = dict( report=report, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, solutions=solutions, ) if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def new(request): if request.method == 'POST': form = NewReportForm(request.POST, request.FILES) if form.is_valid(): db = pyfaf.storage.getDatabase() report = form.cleaned_data['file']['converted'] # maybe determine it better? max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: err = "uReport may only be {0} bytes long".format(max_ureport_length) if "application/json" in request.META.get("HTTP_ACCEPT"): return HttpResponse(json.dumps({"error": err}), status=413, mimetype="application/json") return HttpResponse(err, status=413, mimetype="application/json") try: dbreport = ureport.is_known(report, db, return_report=True) except Exception as e: logging.exception(e) dbreport = None known = bool(dbreport) spool_dir = get_spool_dir("reports") fname = str(uuid.uuid4()) with open(os.path.join(spool_dir, 'incoming', fname), 'w') as fil: fil.write(form.cleaned_data['file']['json']) if 'application/json' in request.META.get('HTTP_ACCEPT'): response = {'result': known } opsys_id = None opsys = db.session.query(OpSys).filter(OpSys.name == report["os"]["name"]).first() if opsys: opsys_id = opsys.id try: report2 = ureport2(report) except FafError: report2 = None if report2 is not None: solution = find_solution(report2, db=db) if solution is not None: response['message'] = ("Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response['message'] += ("\n\nYou can get more information at {0}" .format(solution.url)) response['solutions'] = [{'cause': solution.cause, 'note': solution.note_text, 'url': solution.url}] response['result'] = True try: problemplugin = problemtypes[report2["problem"]["type"]] response["bthash"] = problemplugin.hash_ureport(report2["problem"]) except Exception as e: logging.exception(e) pass if known: site = RequestSite(request) url = reverse('webfaf.reports.views.item', args=[dbreport.id]) parts = [{"reporter": "ABRT Server", "value": "https://{0}{1}".format(site.domain, url), "type": "url"}] bugs = (db.session.query(BzBug) .join(ReportBz) .filter(ReportBz.bzbug_id == BzBug.id) .filter(ReportBz.report_id == dbreport.id) .all()) for bug in bugs: parts.append({"reporter": "Bugzilla", "value": bug.url, "type": "url"}) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response['message'] += "\n".join(p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts return HttpResponse(json.dumps(response), status=202, mimetype='application/json') return render_to_response('reports/success.html', {'report': report, 'known': known}, context_instance=RequestContext(request)) else: err = form.errors['file'][0] if 'application/json' in request.META.get('HTTP_ACCEPT'): response = {'error' : err} return HttpResponse(json.dumps(response), status=400, mimetype='application/json') return render_to_response('reports/new.html', {'form': form}, context_instance=RequestContext(request)) else: form = NewReportForm() return render_to_response('reports/new.html', {'form': form}, context_instance=RequestContext(request))