def dashboard(): pagination = Pagination(request) filter_form = ReportFilterForm(request.args) if filter_form.validate(): if request_wants_json(): r = get_reports(filter_form, pagination) else: list_table_rows, report_count = \ reports_list_table_rows_cache(filter_form, pagination) return render_template("reports/list.html", list_table_rows=list_table_rows, report_count=report_count, filter_form=filter_form, pagination=pagination) else: r = [] if request_wants_json(): return jsonify(dict(reports=r)) return render_template("reports/list.html", reports=r, report_count=len(r), filter_form=filter_form, pagination=pagination)
def dashboard(): pagination = Pagination(request) filter_form = ProblemFilterForm(request.args) if filter_form.validate(): if request_wants_json(): p = get_problems(filter_form, pagination) else: list_table_rows, problem_count = \ problems_list_table_rows_cache(filter_form, pagination) return render_template("problems/list.html", list_table_rows=list_table_rows, problem_count=problem_count, filter_form=filter_form, pagination=pagination) else: p = [] if request_wants_json(): return jsonify(dict(problems=p)) return render_template("problems/list.html", problems=p, problem_count=len(p), filter_form=filter_form, pagination=pagination)
def dashboard(): pagination = Pagination(request) filter_form = ReportFilterForm(request.args) if filter_form.validate(): if request_wants_json(): r = get_reports(filter_form, pagination) else: list_table_rows, report_count = \ reports_list_table_rows_cache(filter_form, pagination) return render_template("reports/list.html", list_table_rows=list_table_rows, report_count=report_count, filter_form=filter_form, pagination=pagination) else: r = [] if request_wants_json(): return jsonify(dict(reports=r)) return render_template("reports/list.html", reports=r, report_count=len(r), filter_form=filter_form, pagination=pagination)
def dashboard(): pagination = Pagination(request) filter_form = ProblemFilterForm(request.args) if filter_form.validate(): if request_wants_json(): p = get_problems(filter_form, pagination) else: list_table_rows, problem_count = \ problems_list_table_rows_cache(filter_form, pagination) return render_template("problems/list.html", list_table_rows=list_table_rows, problem_count=problem_count, filter_form=filter_form, pagination=pagination) else: p = [] if request_wants_json(): return jsonify(dict(problems=p)) return render_template("problems/list.html", problems=p, problem_count=len(p), filter_form=filter_form, pagination=pagination)
def attach(): form = NewAttachmentForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except: raise InvalidUsage("Invalid JSON file", 400) try: ureport.validate_attachment(data) except Exception as ex: raise InvalidUsage("Validation failed: %s" % ex, 400) attachment = data max_attachment_length = 2048 if len(str(attachment)) > max_attachment_length: err = "uReport attachment may only be {0} bytes long" \ .format(max_attachment_length) raise InvalidUsage(err, 413) fname = str(uuid.uuid4()) fpath = os.path.join(paths["attachments_incoming"], fname) with open(fpath, "w") as file: file.write(raw_data.decode("utf-8")) if request_wants_json(): json_response = jsonify({"result": True}) json_response.status_code = 202 return json_response flash("The attachment was saved successfully. Thank you.", "success") return render_template("reports/attach.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("reports/attach.html", form=form), e.status_code return render_template("reports/attach.html", form=form)
def attach(): form = NewAttachmentForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except: raise InvalidUsage("Invalid JSON file", 400) try: ureport.validate_attachment(data) except Exception as ex: raise InvalidUsage("Validation failed: %s" % ex, 400) attachment = data max_attachment_length = 2048 if len(str(attachment)) > max_attachment_length: err = "uReport attachment may only be {0} bytes long" \ .format(max_attachment_length) raise InvalidUsage(err, 413) fname = str(uuid.uuid4()) fpath = os.path.join(paths["attachments_incoming"], fname) with open(fpath, "w") as file: file.write(raw_data) if request_wants_json(): json_response = jsonify({"result": True}) json_response.status_code = 202 return json_response else: flash("The attachment was saved successfully. Thank you.", "success") return render_template("reports/attach.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response else: flash(e.message, "danger") return render_template("reports/attach.html", form=form), e.status_code return render_template("reports/attach.html", form=form)
def get_hash(os=None, release=None, since=None, to=None): if to: to = datetime.datetime.strptime(to, "%Y-%m-%d") since = datetime.datetime.strptime(since, "%Y-%m-%d") report_hash = queries.get_all_report_hashes(db, opsys=os, opsys_releases=release, date_from=since, date_to=to) elif since: since = datetime.datetime.strptime(since, "%Y-%m-%d") report_hash = queries.get_all_report_hashes(db, opsys=os, opsys_releases=release, date_from=since) elif release: report_hash = queries.get_all_report_hashes(db, opsys=os, opsys_releases=release) elif os: report_hash = queries.get_all_report_hashes(db, opsys=os) else: report_hash = queries.get_all_report_hashes(db) r_hash = [] for item in report_hash: r_hash.append(item.hash) if request_wants_json(): return jsonify({"data": r_hash}) else: abort(405)
def get_hash(opsys=None, release=None, since=None, to=None): if to: to = datetime.datetime.strptime(to, "%Y-%m-%d") since = datetime.datetime.strptime(since, "%Y-%m-%d") report_hash = queries.get_all_report_hashes(db, opsys=opsys, opsys_releases=release, date_from=since, date_to=to) elif since: since = datetime.datetime.strptime(since, "%Y-%m-%d") report_hash = queries.get_all_report_hashes(db, opsys=opsys, opsys_releases=release, date_from=since) elif release: report_hash = queries.get_all_report_hashes(db, opsys=opsys, opsys_releases=release) elif opsys: report_hash = queries.get_all_report_hashes(db, opsys=opsys) else: report_hash = queries.get_all_report_hashes(db) r_hash = [] for rh in report_hash: r_hash.append(rh.hash) if request_wants_json(): return jsonify({"data": r_hash}) return abort(405)
def dashboard(): filter_form = ProblemFilterForm(request.args) if filter_form.validate(): p = list(get_problems(filter_form)) else: p = [] if request_wants_json(): return jsonify(dict(problems=p)) return Response( stream_with_context( stream_template("problems/list.html", problems=p, filter_form=filter_form)))
def new(): form = NewReportForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except Exception as ex: # pylint: disable=broad-except _save_invalid_ureport(db, raw_data, str(ex)) raise InvalidUsage("Couldn't parse JSON data.", 400) try: ureport.validate(data) except Exception as exp: # pylint: disable=broad-except reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) _save_invalid_ureport(db, json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): _save_unknown_opsys(db, data["os"]) if str(exp) == 'uReport must contain affected package': raise InvalidUsage(("Server is not accepting problems " "from unpackaged files."), 400) raise InvalidUsage("uReport data is invalid.", 400) report = data max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: raise InvalidUsage( "uReport may only be {0} bytes long".format( max_ureport_length), 413) osr_id = None osr = None if report["os"]["name"] in systems: osr = (db.session.query(OpSysRelease).join(OpSys).filter( OpSys.name == systems[report["os"]["name"]].nice_name ).filter( OpSysRelease.version == report["os"]["version"]).first()) if osr: osr_id = osr.id try: dbreport = ureport.is_known(report, db, return_report=True, opsysrelease_id=osr_id) except Exception as e: # pylint: disable=broad-except logging.exception(e) dbreport = None known = bool(dbreport) fname = str(uuid.uuid4()) fpath = os.path.join(paths["reports_incoming"], fname) with open(fpath, 'w') as file: file.write(raw_data.decode("utf-8")) if request_wants_json(): response = {'result': known} try: report2 = ureport2(report) ureport.validate(report2) except FafError: report2 = None if report2 is not None: solution = find_solution(report2, db=db, osr=osr) if solution is not None: response["message"] = ( "Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response["message"] += ( "\n\nYou can get more information at {0}". format(solution.url)) solution_dict = { "cause": solution.cause, "note": solution.note_text, "url": solution.url } if not solution_dict["url"]: del solution_dict["url"] response["solutions"] = [solution_dict] response["result"] = True try: problemplugin = problemtypes[report2["problem"] ["type"]] response["bthash"] = problemplugin.hash_ureport( report2["problem"]) except Exception as e: # pylint: disable=broad-except logging.exception(e) if known: url = url_for('reports.item', report_id=dbreport.id, _external=True) parts = [{ "reporter": "ABRT Server", "value": url, "type": "url" }] bugs = (db.session.query(BzBug).join(ReportBz).filter( ReportBz.bzbug_id == BzBug.id).filter( ReportBz.report_id == dbreport.id).all()) for bug in bugs: parts.append({ "reporter": "Bugzilla", "value": bug.url, "type": "url" }) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response['message'] += "\n".join( p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts json_response = jsonify(response) json_response.status_code = 202 return json_response flash("The uReport was saved successfully. Thank you.", "success") return render_template("reports/new.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("reports/new.html", form=form), e.status_code return render_template("reports/new.html", form=form)
def item(report_id, want_object=False): result = (db.session.query(Report, OpSysComponent).join(OpSysComponent).filter( Report.id == report_id).first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path).filter( ReportExecutable.report_id == report_id).first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query( ReportOpSysRelease, ReportOpSysRelease.count).filter( ReportOpSysRelease.report_id == report_id).order_by( desc(ReportOpSysRelease.count)).all()) arches = (db.session.query( ReportArch, ReportArch.count).filter(ReportArch.report_id == report_id).order_by( desc(ReportArch.count)).all()) modes = (db.session.query( ReportSelinuxMode, ReportSelinuxMode.count).filter( ReportSelinuxMode.report_id == report_id).order_by( desc(ReportSelinuxMode.count)).all()) history_select = lambda table, date, date_range: ( db.session.query(table).filter(table.report_id == report_id).filter( date >= date_range) # Flot is confused if not ordered .order_by(date).all()) MAX_DAYS = 20 # Default set on 20 MAX_WEEK = 20 # Default set on 20 MAX_MONTH = 20 # Default set on 20 today = datetime.date.today() # Show only 20 days daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day, (today - timedelta(days=MAX_DAYS))) if not daily_history: for x in range(0, MAX_DAYS): daily_history.append({ 'day': today - timedelta(x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(daily_history) < MAX_DAYS: if daily_history[-1].day < (today): daily_history.append({ 'day': today, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if daily_history[0].day > (today - timedelta(MAX_DAYS)): daily_history.append({ 'day': today - timedelta(MAX_DAYS), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 weeks last_monday = datetime.datetime.today() - timedelta( datetime.datetime.today().weekday()) weekly_history = history_select( ReportHistoryWeekly, ReportHistoryWeekly.week, (last_monday - timedelta(days=MAX_WEEK * 7))) if not weekly_history: for x in range(0, MAX_WEEK): weekly_history.append({ 'week': last_monday - timedelta(x * 7), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(weekly_history) < MAX_WEEK: if weekly_history[-1].week < (last_monday.date()): weekly_history.append({ 'week': last_monday, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if weekly_history[0].week > ( (last_monday - timedelta(7 * MAX_WEEK)).date()): weekly_history.append({ 'week': last_monday - timedelta(7 * MAX_WEEK), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 months monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (today - relativedelta(months=MAX_MONTH))) first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1)) fdom = first_day_of_month(datetime.datetime.today()) if not monthly_history: for x in range(0, MAX_MONTH): monthly_history.append({ 'month': fdom - relativedelta(months=x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) elif len(monthly_history) < MAX_MONTH: if monthly_history[-1].month < (fdom): monthly_history.append({ 'month': fdom, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)): monthly_history.append({ 'month': fdom - relativedelta(months=MAX_MONTH), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) complete_history = history_select( ReportHistoryMonthly, ReportHistoryMonthly.month, (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d'))) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = { 'count': ch.count, 'unique': ch.unique } else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique sorted(unique_ocurrence_os) packages = load_packages(db, report_id) # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format( pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address, ) in (db.session.query( ContactEmail.email_address).join(ReportContactEmail).filter( ReportContactEmail.report == report)) ] maintainer = (db.session.query(AssociatePeople).join( OpSysComponentAssociate).join(OpSysComponent).filter( OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query( ProblemOpSysRelease, Build).join(Problem).join(Report).join(Build).filter( Report.id == report_id).first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report( db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def by_daterange(since, to): ''' Render date-based report statistics including reports `since` date until `to` date. ''' try: if isinstance(since, str): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, str): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() except: # pylint: disable=bare-except return abort(400) since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) _, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum( db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component( db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } if request_wants_json(): return jsonify(data) return render_template("stats/by_date.html", **data)
def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent) .filter_by(problem_id=problem_id) .delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent) .filter_by(name=comp_name) .first()) if not component: raise ValueError("Component {} not found.".format( comp_name)) db.session.add(ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign) .filter_by(problem_id=problem_id) .first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query(ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportOpSysRelease.opsysrelease_id) .subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt) .join(sub) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportArch.arch_id) .subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub) .order_by(desc("cnt")) .all()) exes = (db.session.query(ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportExecutable.path) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportPackage.installed_package_id) .subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query(ReportUnknownPackage, ReportUnknownPackage.count) .join(Report) .filter(Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query(ReportHash.hash) .join(Report) .join(Problem) .filter(Problem.id == problem_id) .distinct(ReportHash.hash).all()) # Limit to 10 bt_hashes (otherwise the URL can get too long) # Select the 10 hashes uniformly from the entire list to make sure it is a # good representation. (Slicing the 10 first could mean the 10 oldest # are selected which is not a good representation.) k = min(len(bt_hashes), 10) a = 0 d = len(bt_hashes)/float(k) bt_hashes_limited = [] for _ in range(k): bt_hashes_limited.append("bth=" + bt_hashes[int(a)][0]) a += d bt_hash_qs = "&".join(bt_hashes_limited) forward = {"problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "bt_hash_qs": bt_hash_qs, "solutions": solutions, "components_form": components_form } if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def item(report_id, want_object=False) -> Union[Dict[str, Any], Response, str]: result = (db.session.query(Report, OpSysComponent).join(OpSysComponent).filter( Report.id == report_id).first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path).filter( ReportExecutable.report_id == report_id).first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query( ReportOpSysRelease, ReportOpSysRelease.count).filter( ReportOpSysRelease.report_id == report_id).order_by( desc(ReportOpSysRelease.count)).all()) arches = (db.session.query( ReportArch, ReportArch.count).filter(ReportArch.report_id == report_id).order_by( desc(ReportArch.count)).all()) modes = (db.session.query( ReportSelinuxMode, ReportSelinuxMode.count).filter( ReportSelinuxMode.report_id == report_id).order_by( desc(ReportSelinuxMode.count)).all()) daily_history = precompute_history(report_id, 'day') weekly_history = precompute_history(report_id, 'week') monthly_history = precompute_history(report_id, 'month') complete_history = (db.session.query(ReportHistoryMonthly).filter( ReportHistoryMonthly.report_id == report_id).all()) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = str(ch.opsysrelease) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = { 'count': ch.count, 'unique': ch.unique } else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique packages = load_packages(db, report_id) crashed_versions = {} last_affected_version = "N/A" # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}".format( pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count if pkg.type == "CRASHED": crashed_versions = names[pkg.iname]["versions"] if crashed_versions: last_affected_version = sorted(crashed_versions.keys())[-1] package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [ email_address for (email_address, ) in (db.session.query( ContactEmail.email_address).join(ReportContactEmail).filter( ReportContactEmail.report == report)) ] maintainer = (db.session.query(AssociatePeople).join( OpSysComponentAssociate).join(OpSysComponent).filter( OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query( ProblemOpSysRelease, Build).join(Problem).join(Report).join(Build).filter( Report.id == report_id).first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report( db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops forward['version'] = last_affected_version if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return Response(response=json.dumps(forward, cls=WebfafJSONEncoder), status=200, mimetype="application/json") forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def by_daterange(since, to): ''' Render date-based report statistics including reports `since` date until `to` date. ''' try: if isinstance(since, six.string_types): since = datetime.datetime.strptime(since, "%Y-%m-%d").date() if isinstance(to, six.string_types): to = datetime.datetime.strptime(to, "%Y-%m-%d").date() except: return abort(400) since = min(since, to) to = max(since, to) history = 'daily' day_count = (to - since).days if day_count > 30: history = 'weekly' if day_count > 360: history = 'monthly' def date_filter(query): return query.filter(hist_field >= since).filter(hist_field < to) hist_table, hist_field = queries.get_history_target(history) total_query = queries.get_history_sum(db, history=history) total = date_filter(total_query).one()[0] release_data = [] for release in queries.get_releases(db): release_sum = queries.get_history_sum(db, release.opsys.name, release.version, history=history) release_sum = date_filter(release_sum).one()[0] if not release_sum: continue percentage = int(release_sum * 100.0 / total) comps = queries.get_report_count_by_component(db, release.opsys.name, release.version, history=history) comp_data = [] for comp, count in date_filter(comps).all(): comp_percentage = int(count * 100.0 / release_sum) comp_data.append((comp, count, comp_percentage)) release_data.append({ 'release': release, 'sum': release_sum, 'comps': comp_data, 'percentage': percentage, }) data = { 'since': since, 'to': to, 'total': total, 'releases': sorted(release_data, key=lambda x: x['sum'], reverse=True), } if request_wants_json(): return jsonify(data) return render_template("stats/by_date.html", **data)
def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent) .filter_by(problem_id=problem_id) .delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent) .filter_by(name=comp_name) .first()) if not component: raise ValueError("Component {} not found.".format( comp_name)) db.session.add(ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign) .filter_by(problem_id=problem_id) .first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query(ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportOpSysRelease.opsysrelease_id) .subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt) .join(sub) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportArch.arch_id) .subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub) .order_by(desc("cnt")) .all()) exes = (db.session.query(ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportExecutable.path) .order_by(desc("cnt")) .all()) sub = (db.session.query(ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")) .join(Report) .filter(Report.id.in_(report_ids)) .group_by(ReportPackage.installed_package_id) .subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query(ReportUnknownPackage, ReportUnknownPackage.count) .join(Report) .filter(Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query(ReportHash.hash) .join(Report) .join(Problem) .filter(Problem.id == problem_id) .distinct(ReportHash.hash).all()) # Limit to 10 bt_hashes (otherwise the URL can get too long) # Select the 10 hashes uniformly from the entire list to make sure it is a # good representation. (Slicing the 10 first could mean the 10 oldest # are selected which is not a good representation.) k = min(len(bt_hashes), 10) a = 0 d = len(bt_hashes)/float(k) bt_hashes_limited = [] for _ in range(k): bt_hashes_limited.append("bth=" + bt_hashes[int(a)][0]) a += d bt_hash_qs = "&".join(bt_hashes_limited) forward = {"problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "bt_hash_qs": bt_hash_qs, "solutions": solutions, "components_form": components_form } if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def new(url_fname=None): """ Handle dump dir archive uploads """ form = NewDumpDirForm() if request.method in ["POST", "PUT"]: try: if request.method == "POST": if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) archive_file = request.files[form.file.name] archive_fname = archive_file.filename if request.method == "PUT": archive_file = StringIO(request.stream.read()) archive_fname = url_fname archive_file.seek(0, os.SEEK_END) archive_size = archive_file.tell() archive_file.seek(0) if not archive_size: raise InvalidUsage("Empty archive received", 400) if not check_filename(archive_fname): raise InvalidUsage("Wrong archive file name", 400) # sanitize input filename just to be sure archive_fname = secure_filename(archive_fname) if not os.path.exists(paths["dumpdir"]): raise InvalidUsage( "That's embarrassing! We have some troubles" " with deployment. Please try again later.", 500) count = 0 try: count = sum( 1 for x in os.listdir(paths["dumpdir"]) if os.path.isfile(os.path.join(paths["dumpdir"], x))) except Exception as e: raise InvalidUsage( "That's embarrassing! We have some troubles" " with storage. Please try again later.", 500) if count >= int(config["dumpdir.cachedirectorycountquota"]): raise InvalidUsage( "That's embarrassing! We have reached" " the limit of uploaded archives." " Please try again later.", 500) if archive_size > int(config["dumpdir.maxdumpdirsize"]): raise InvalidUsage("Dump dir archive is too large", 413) used_space = 0.0 try: used_space = sum( float(os.path.getsize(x)) for x in map(lambda f: os.path.join(paths["dumpdir"], f), os.listdir(paths["dumpdir"])) if os.path.isfile(x)) except Exception as e: raise InvalidUsage( "That's embarrassing! We have some" " troubles with disk space." " Please try again later.", 500) quota = int(config["dumpdir.cachedirectorysizequota"]) if (quota - archive_size) < used_space: raise InvalidUsage( "That's embarrassing! We ran out" " of disk space." " Please try again later.", 500) fpath = os.path.join(paths["dumpdir"], archive_fname) if os.path.exists(fpath): raise InvalidUsage("Dump dir archive already exists.", 409) with open(fpath, 'w') as dest: dest.write(archive_file.read()) if request_wants_json(): response = jsonify({"ok": "ok"}) response.status_code = 201 return response flash("Uploaded successfully.") return render_template("dumpdirs/new.html", form=form) except InvalidUsage as e: if e.status_code == 500: logger.error(e.message) elif e.status_code >= 400: logger.warning(e.message) if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("dumpdirs/new.html", form=form), e.status_code return render_template("dumpdirs/new.html", form=form)
def item(problem_id, component_names=None): components_form = ProblemComponents() problem = db.session.query(Problem).filter( Problem.id == problem_id).first() if problem is None: raise abort(404) if component_names: try: (db.session.query(ProblemComponent).filter_by( problem_id=problem_id).delete()) for index, comp_name in enumerate(component_names.split(',')): component = (db.session.query(OpSysComponent).filter_by( name=comp_name).first()) if not component: raise ValueError( "Component {} not found.".format(comp_name)) db.session.add( ProblemComponent(problem_id=problem.id, component_id=component.id, order=index + 1)) reassign = (db.session.query(ProblemReassign).filter_by( problem_id=problem_id).first()) if reassign is None: reassign = ProblemReassign(problem_id=problem_id) reassign.date = datetime.date.today() reassign.username = g.user.username db.session.add(reassign) db.session.commit() except SQLAlchemyError: db.session.rollback() flash("Database transaction error.", 'error') except ValueError as e: db.session.rollback() flash(str(e), 'error') report_ids = [report.id for report in problem.reports] solutions = [] equal_solution = lambda s: [x for x in solutions if s.cause == x.cause] for report in problem.reports: if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solution = find_solution(report, db=db, osr=osr) if solution and not equal_solution(solution): solutions.append(solution) sub = (db.session.query( ReportOpSysRelease.opsysrelease_id, func.sum(ReportOpSysRelease.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportOpSysRelease.opsysrelease_id).subquery()) osreleases = (db.session.query(OpSysRelease, sub.c.cnt).join(sub).order_by( desc("cnt")).all()) sub = (db.session.query( ReportArch.arch_id, func.sum(ReportArch.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by(ReportArch.arch_id).subquery()) arches = (db.session.query(Arch, sub.c.cnt).join(sub).order_by( desc("cnt")).all()) exes = (db.session.query( ReportExecutable.path, func.sum(ReportExecutable.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportExecutable.path).order_by(desc("cnt")).all()) sub = (db.session.query( ReportPackage.installed_package_id, func.sum(ReportPackage.count).label("cnt")).join(Report).filter( Report.id.in_(report_ids)).group_by( ReportPackage.installed_package_id).subquery()) packages_known = db.session.query(Package, sub.c.cnt).join(sub).all() packages_unknown = (db.session.query( ReportUnknownPackage, ReportUnknownPackage.count).join(Report).filter( Report.id.in_(report_ids))).all() packages = packages_known + packages_unknown # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for (pkg, cnt) in packages: names[pkg.name]["name"] = pkg.name names[pkg.name]["count"] += cnt names[pkg.name]["versions"][pkg.evr()] += cnt package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append((pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) for report in problem.reports: for backtrace in report.backtraces: fid = 0 for frame in backtrace.frames: fid += 1 frame.nice_order = fid bt_hashes = (db.session.query( ReportHash.hash).join(Report).join(Problem).filter( Problem.id == problem_id).distinct(ReportHash.hash).all()) forward = { "problem": problem, "osreleases": metric(osreleases), "arches": metric(arches), "exes": metric(exes), "package_counts": package_counts, "solutions": solutions, "components_form": components_form } if not bt_hashes: logger.warning("No backtrace hashes found for problem #%d", problem_id) else: # Generate a permalink for this problem. We do this by uniformly picking # (at most) 10 hashes from the list. This ensures the selected hashes are # more or less representative of the problem. k = min(len(bt_hashes), 10) # A hint of determinism in this uncertain world. r = random.Random(problem_id) hashes_sampled = r.sample(bt_hashes, k) permalink_query = "&".join("bth={}".format(bth) for (bth, ) in hashes_sampled) forward["permalink_query"] = permalink_query if request_wants_json(): return jsonify(forward) is_maintainer = is_problem_maintainer(db, g.user, problem) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) if report_ids: bt_diff_form = BacktraceDiffForm() bt_diff_form.lhs.choices = [(id, id) for id in report_ids] bt_diff_form.rhs.choices = bt_diff_form.lhs.choices forward['bt_diff_form'] = bt_diff_form return render_template("problems/item.html", **forward)
def item(report_id, want_object=False): result = (db.session.query(Report, OpSysComponent) .join(OpSysComponent) .filter(Report.id == report_id) .first()) if result is None: abort(404) report, component = result executable = (db.session.query(ReportExecutable.path) .filter(ReportExecutable.report_id == report_id) .first()) if executable: executable = executable[0] else: executable = "unknown" solutions = None if report.max_certainty is not None: osr = get_report_opsysrelease(db=db, report_id=report.id) solutions = [find_solution(report, db=db, osr=osr)] releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count) .filter(ReportOpSysRelease.report_id == report_id) .order_by(desc(ReportOpSysRelease.count)) .all()) arches = (db.session.query(ReportArch, ReportArch.count) .filter(ReportArch.report_id == report_id) .order_by(desc(ReportArch.count)) .all()) modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count) .filter(ReportSelinuxMode.report_id == report_id) .order_by(desc(ReportSelinuxMode.count)) .all()) history_select = lambda table, date, date_range: (db.session.query(table). filter(table.report_id == report_id) .filter(date >= date_range) # Flot is confused if not ordered .order_by(date) .all()) MAX_DAYS = 20 # Default set on 20 MAX_WEEK = 20 # Default set on 20 MAX_MONTH = 20 # Default set on 20 today = datetime.date.today() # Show only 20 days daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day, (today - timedelta(days=MAX_DAYS))) if not daily_history: for x in range(0, MAX_DAYS): daily_history.append({'day': today - timedelta(x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(daily_history) < MAX_DAYS: if daily_history[-1].day < (today): daily_history.append({'day': today, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) if daily_history[0].day > (today - timedelta(MAX_DAYS)): daily_history.append({'day': today - timedelta(MAX_DAYS), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id }) # Show only 20 weeks last_monday = datetime.datetime.today() - timedelta(datetime.datetime.today().weekday()) weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week, (last_monday - timedelta(days=MAX_WEEK*7))) if not weekly_history: for x in range(0, MAX_WEEK): weekly_history.append({'week': last_monday - timedelta(x*7), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(weekly_history) < MAX_WEEK: if weekly_history[-1].week < (last_monday.date()): weekly_history.append({'week': last_monday, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) if weekly_history[0].week > ((last_monday - timedelta(7*MAX_WEEK)).date()): weekly_history.append({'week': last_monday - timedelta(7*MAX_WEEK), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) # Show only 20 months monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (today - relativedelta(months=MAX_MONTH))) first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1)) fdom = first_day_of_month(datetime.datetime.today()) if not monthly_history: for x in range(0, MAX_MONTH): monthly_history.append({'month': fdom - relativedelta(months=x), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) elif len(monthly_history) < MAX_MONTH: if monthly_history[-1].month < (fdom): monthly_history.append({'month': fdom, 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)): monthly_history.append({'month': fdom - relativedelta(months=MAX_MONTH), 'count': 0, 'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id}) complete_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month, (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d'))) unique_ocurrence_os = {} if complete_history: for ch in complete_history: os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version) if ch.count is None: ch.count = 0 if ch.unique is None: ch.count = 0 if os_name not in unique_ocurrence_os: unique_ocurrence_os[os_name] = {'count': ch.count, 'unique': ch.unique} else: unique_ocurrence_os[os_name]['count'] += ch.count unique_ocurrence_os[os_name]['unique'] += ch.unique sorted(unique_ocurrence_os) packages = load_packages(db, report_id) crashed_versions = [] last_affected_version = "N/A" # creates a package_counts list with this structure: # [(package name, count, [(package version, count in the version)])] names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)}) for pkg in packages: names[pkg.iname]["name"] = pkg.iname names[pkg.iname]["count"] += pkg.count names[pkg.iname]["versions"]["{0}:{1}-{2}" .format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count if pkg.type == "CRASHED": crashed_versions = names[pkg.iname]["versions"] if crashed_versions: last_affected_version = sorted(crashed_versions.keys())[-1] package_counts = [] for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True): package_counts.append(( pkg["name"], pkg["count"], sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True))) try: backtrace = report.backtraces[0].frames except: # pylint: disable=bare-except backtrace = [] fid = 0 for frame in backtrace: fid += 1 frame.nice_order = fid is_maintainer = is_component_maintainer(db, g.user, component) contact_emails = [] if is_maintainer: contact_emails = [email_address for (email_address, ) in (db.session.query(ContactEmail.email_address) .join(ReportContactEmail) .filter(ReportContactEmail.report == report))] maintainer = (db.session.query(AssociatePeople) .join(OpSysComponentAssociate) .join(OpSysComponent) .filter(OpSysComponent.name == component.name)).first() maintainer_contact = "" if maintainer: maintainer_contact = maintainer.name probably_fixed = (db.session.query(ProblemOpSysRelease, Build) .join(Problem) .join(Report) .join(Build) .filter(Report.id == report_id) .first()) unpackaged = not (get_crashed_package_for_report(db, report.id) or get_crashed_unknown_package_nevr_for_report(db, report.id)) forward = dict(report=report, executable=executable, probably_fixed=probably_fixed, component=component, releases=metric(releases), arches=metric(arches), modes=metric(modes), daily_history=daily_history, weekly_history=weekly_history, monthly_history=monthly_history, complete_history=complete_history, unique_ocurrence_os=unique_ocurrence_os, crashed_packages=packages, package_counts=package_counts, backtrace=backtrace, contact_emails=contact_emails, unpackaged=unpackaged, solutions=solutions, maintainer_contact=maintainer_contact) forward['error_name'] = report.error_name forward['oops'] = report.oops forward['version'] = last_affected_version if want_object: try: cf = component.name if report.backtraces[0].crash_function: cf += " in {0}".format(report.backtraces[0].crash_function) forward['crash_function'] = cf except: # pylint: disable=bare-except forward['crash_function'] = "" if probably_fixed: tmp_dict = probably_fixed.ProblemOpSysRelease.serialize tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize forward['probably_fixed'] = tmp_dict # Avg count occurrence from first to last occurrence forward['avg_count_per_month'] = get_avg_count(report.first_occurrence, report.last_occurrence, report.count) if forward['report'].bugs: forward['bugs'] = [] for bug in forward['report'].bugs: try: forward['bugs'].append(bug.serialize) except: # pylint: disable=bare-except print("Bug serialize failed") return forward if request_wants_json(): return jsonify(forward) forward["is_maintainer"] = is_maintainer forward["extfafs"] = get_external_faf_instances(db) return render_template("reports/item.html", **forward)
def new(url_fname=None): """ Handle dump dir archive uploads """ form = NewDumpDirForm() if request.method in ["POST", "PUT"]: try: if request.method == "POST": if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) archive_file = request.files[form.file.name] archive_fname = archive_file.filename if request.method == "PUT": archive_file = strIO(request.stream.read()) archive_fname = url_fname archive_file.seek(0, os.SEEK_END) archive_size = archive_file.tell() archive_file.seek(0) if not archive_size: raise InvalidUsage("Empty archive received", 400) if not check_filename(archive_fname): raise InvalidUsage("Wrong archive file name", 400) # sanitize input filename just to be sure archive_fname = secure_filename(archive_fname) if not os.path.exists(paths["dumpdir"]): raise InvalidUsage("That's embarrassing! We have some troubles" " with deployment. Please try again later.", 500) count = 0 try: count = sum( 1 for x in os.listdir(paths["dumpdir"]) if os.path.isfile(os.path.join(paths["dumpdir"], x))) except Exception as e: raise InvalidUsage("That's embarrassing! We have some troubles" " with storage. Please try again later.", 500) if count >= int(config["dumpdir.cachedirectorycountquota"]): raise InvalidUsage("That's embarrassing! We have reached" " the limit of uploaded archives." " Please try again later.", 500) if archive_size > int(config["dumpdir.maxdumpdirsize"]): raise InvalidUsage("Dump dir archive is too large", 413) used_space = 0.0 try: used_space = sum( float(os.path.getsize(x)) for x in map(lambda f: os.path.join(paths["dumpdir"], f), os.listdir(paths["dumpdir"])) if os.path.isfile(x)) except Exception as e: raise InvalidUsage("That's embarrassing! We have some" " troubles with disk space." " Please try again later.", 500) quota = int(config["dumpdir.cachedirectorysizequota"]) if (quota - archive_size) < used_space: raise InvalidUsage("That's embarrassing! We ran out" " of disk space." " Please try again later.", 500) fpath = os.path.join(paths["dumpdir"], archive_fname) if os.path.exists(fpath): raise InvalidUsage("Dump dir archive already exists.", 409) with open(fpath, 'wb') as dest: dest.write(archive_file.read()) if request_wants_json(): response = jsonify({"ok": "ok"}) response.status_code = 201 return response flash("Uploaded successfully.") return render_template("dumpdirs/new.html", form=form) except InvalidUsage as e: if e.status_code == 500: logger.error(e.message) elif e.status_code >= 400: logger.warning(e.message) if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("dumpdirs/new.html", form=form), e.status_code return render_template("dumpdirs/new.html", form=form)
def new(): form = NewReportForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except Exception as ex: # pylint: disable=broad-except _save_invalid_ureport(db, raw_data, str(ex)) raise InvalidUsage("Couldn't parse JSON data.", 400) try: ureport.validate(data) except Exception as exp: # pylint: disable=broad-except reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) _save_invalid_ureport(db, json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): _save_unknown_opsys(db, data["os"]) if str(exp) == 'uReport must contain affected package': raise InvalidUsage(("Server is not accepting problems " "from unpackaged files."), 400) raise InvalidUsage("uReport data is invalid.", 400) report = data max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: raise InvalidUsage("uReport may only be {0} bytes long" .format(max_ureport_length), 413) osr_id = None osr = None if report["os"]["name"] in systems: try: osr = (db.session.query(OpSysRelease) .join(OpSys) .filter(OpSys.name == systems[report["os"]["name"]].nice_name) .filter(OpSysRelease.version == report["os"]["version"]) .first()) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The uReport couldn't be saved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable if osr: osr_id = osr.id try: dbreport = ureport.is_known(report, db, return_report=True, opsysrelease_id=osr_id) except Exception as e: # pylint: disable=broad-except logging.exception(e) dbreport = None known = bool(dbreport) fname = str(uuid.uuid4()) fpath = os.path.join(paths["reports_incoming"], fname) with open(fpath, 'w') as file: file.write(raw_data.decode("utf-8")) if request_wants_json(): response = {'result': known} try: report2 = ureport2(report) ureport.validate(report2) except FafError: report2 = None if report2 is not None: try: solution = find_solution(report2, db=db, osr=osr) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The solution couldn't be retrieved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable if solution is not None: response["message"] = ( "Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response["message"] += ( "\n\nYou can get more information at {0}" .format(solution.url)) solution_dict = {"cause": solution.cause, "note": solution.note_text, "url": solution.url} if not solution_dict["url"]: del solution_dict["url"] response["solutions"] = [solution_dict] response["result"] = True try: problemplugin = problemtypes[ report2["problem"]["type"]] response["bthash"] = problemplugin.hash_ureport( report2["problem"]) except Exception as e: # pylint: disable=broad-except logging.exception(e) if known: url = url_for('reports.item', report_id=dbreport.id, _external=True) parts = [{"reporter": "ABRT Server", "value": url, "type": "url"}] try: bugs = (db.session.query(BzBug) .join(ReportBz) .filter(ReportBz.bzbug_id == BzBug.id) .filter(ReportBz.report_id == dbreport.id) .all()) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The bugs couldn't be retrieved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable for bug in bugs: parts.append({"reporter": "Bugzilla", "value": bug.url, "type": "url"}) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response[ 'message'] += "\n".join(p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts json_response = jsonify(response) json_response.status_code = 202 return json_response flash( "The uReport was saved successfully. Thank you.", "success") return render_template("reports/new.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("reports/new.html", form=form), e.status_code return render_template("reports/new.html", form=form)