Example #1
0
    def text_overview(self, cmdline, db, opsys, release):
        release_ids = get_release_ids(db, opsys, release)

        num_days = 7
        if cmdline.last:
            num_days = int(cmdline.last)

        since = datetime.datetime.now() - datetime.timedelta(days=num_days)

        hot = query_hot_problems(db,
                                 release_ids,
                                 history=self.history_type,
                                 last_date=since)

        if not cmdline.include_low_quality:
            hot = [x for x in hot if x.quality >= 0]

        ptypes = ""
        if len(self.ptypes) != len(problemtypes):
            ptypes = " " + ", ".join(self.ptypes)
        out = "Overview of the top {0}{1} crashes over the last {2} days:\n".format(
            cmdline.count, ptypes, num_days)

        hot = [p for p in hot if p.type in self.ptypes]

        for (rank, problem) in enumerate(hot[:cmdline.count]):
            out += "#{0} {1} - {2}x\n".format(
                rank + 1, ', '.join(problem.unique_component_names),
                problem.count)

            # Reports with bugzillas for this OpSysRelease go first
            reports = sorted(
                problem.reports,
                cmp=lambda x, y: len(
                    [b for b in x.bugs if b.opsysrelease_id in release_ids]) -
                len([b for b in y.bugs if b.opsysrelease_id in release_ids]),
                reverse=True)

            if webfaf_installed():
                for report in reports[:3]:
                    out += "{0}\n".format(
                        reverse("reports.bthash_forward",
                                bthash=report.hashes[0].hash))
                    for bug in report.bugs:
                        out += "  {0}\n".format(bug.url)
            else:
                for report in reports[:3]:
                    out += "Report BT hash: {0}\n".format(
                        report.hashes[0].hash)
            if len(problem.reports) > 3:
                out += "... and {0} more.\n".format(len(problem.reports) - 3)

            if problem.tainted:
                out += "Kernel tainted.\n"

            crash_function = problem.crash_function
            if crash_function:
                out += "Crash function: {0}\n".format(crash_function)

            affected_all = []
            for report in problem.reports:
                affected_known = [
                    (affected.build.base_package_name, affected.build.epoch,
                     affected.build.version, affected.build.release)
                    for affected in get_crashed_package_for_report(
                        db, report.id)
                ]

                affected_unknown = \
                    get_crashed_unknown_package_nevr_for_report(db, report.id)

                affected_all += affected_known + affected_unknown
            affected_all = sorted(set(affected_all),
                                  cmp=lambda a, b: cmp_evr(a[1:], b[1:]),
                                  reverse=True)

            if affected_all:
                out += "Affected builds: {0}".format(", ".join([
                    "{0}-{1}:{2}-{3}".format(n, e, v, r)
                    for (n, e, v, r) in affected_all[:5]
                ]))
                if len(problem.reports) > 5:
                    out += " and {0} more.".format(len(problem.reports) - 5)
                out += "\n"

            pfix = problem.probable_fix_for_opsysrelease_ids(release_ids)
            if len(pfix) > 0:
                out += ("Problem seems to be fixed since the release of {0}\n".
                        format(pfix))
            out += "\n"

        return out
Example #2
0
    def run(self, cmdline, db):
        """
        Mark a problem probably fixed if there is a new build of the problem's
        affected package, for which no crash reports have come in.
        """

        try:
            tasks = self._get_tasks(cmdline, db)
        except FafError as ex:
            self.log_error(
                "Unable to process command line arguments: {0}".format(
                    str(ex)))
            return 1

        problems = get_problems(db)

        task_i = 0
        for osplugin, db_release in tasks:
            task_i += 1

            self.log_info("[{0} / {1}] Processing '{2} {3}'".format(
                task_i, len(tasks), osplugin.nice_name, db_release.version))

            self.log_debug("Getting builds...")
            opsys_builds = osplugin.get_released_builds(db_release.version)

            newest_builds = {}
            all_builds = {}
            now = datetime.now()
            for build in opsys_builds:
                age = now - build["completion_time"]
                # If a hot new build comes out, we need to wait a certain
                # period of time for people to use it before we can make
                # conclusions about it being a probable fix.
                if age.days >= osplugin.build_aging_days:
                    if build["name"] not in newest_builds:
                        newest_builds[build["name"]] = build

                    if build["name"] not in all_builds:
                        all_builds[build["name"]] = [
                            build,
                        ]
                    else:
                        all_builds[build["name"]].append(build)

            probably_fixed_total = 0
            problems_in_release = 0
            problem_counter = 0
            for problem in problems:
                problem_counter += 1
                self.log_debug("Processing problem ID:{0} {1}/{2}:".format(
                    problem.id, problem_counter, len(problems)))
                affected_newest = {}
                affected_not_found = False

                reports_for_release =  \
                    get_reports_for_opsysrelease(db, problem.id, db_release.id)

                # For all the reports, we need the affected packages and their
                # newest versions.
                if reports_for_release:
                    problems_in_release += 1
                else:
                    self.log_debug(
                        " This problem doesn't appear in this release.")
                    self._save_probable_fix(db, problem, db_release, None)
                    # Next problem
                    continue

                for report in reports_for_release:
                    # First we try to find the affected package among the known
                    # packages.
                    affected_known = [
                        (affected.build.base_package_name,
                         affected.build.epoch, affected.build.version,
                         affected.build.release)
                        for affected in get_crashed_package_for_report(
                            db, report.id)
                    ]

                    # Then among the unknown packages.
                    affected_unknown = \
                        get_crashed_unknown_package_nevr_for_report(db, report.id)
                    # We get the base package name directly from the report
                    affected_unknown = [(report.component.name, affected[1],
                                         affected[2], affected[3])
                                        for affected in affected_unknown]

                    affected_all = affected_known + affected_unknown
                    if not affected_all:
                        affected_not_found = True
                        break

                    for affected in affected_all:
                        if affected[0] in affected_newest:
                            # If a problem contains multiple reports with the same
                            # affected package, we only want the newest version of
                            # it.
                            affected_newest[affected[0]]['reports'].append(
                                report)
                            if cmp_evr(
                                    affected[1:], affected_newest[affected[0]]
                                ['nevr'][1:]) > 0:
                                affected_newest[affected[0]]['nevr'] = affected
                        else:
                            affected_newest[affected[0]] = {
                                'reports': [
                                    report,
                                ],
                                'nevr': affected
                            }

                if affected_not_found or not affected_newest:
                    # Affected package of one of the reports was not found.
                    # We can't make any conclusions.
                    self.log_debug(" Affected package not found.")
                    self._save_probable_fix(db, problem, db_release, None)
                    # Next problem
                    continue

                if len(affected_newest) > 1:
                    # Multiple different affected packages => cannot be fixed
                    # by a single package update
                    self.log_debug(
                        " Multiple affected packages. No simple fix.")
                    self._save_probable_fix(db, problem, db_release, None)
                    # Next problem
                    continue

                probably_fixed_since = datetime.fromtimestamp(0)

                pkg = list(affected_newest.values())[0]

                name = pkg['nevr'][0]
                newest_build = newest_builds.get(name, False)
                if newest_build:
                    newest_evr = (newest_build["epoch"]
                                  or 0, newest_build["version"],
                                  newest_build["release"])
                if newest_build and cmp_evr(newest_evr, pkg['nevr'][1:]) > 0:
                    # Newest available build is newer than the newest version
                    # of the affected package. Now find the oldest such
                    # probable fix.
                    i = 0
                    while i < len(all_builds[name]) and cmp_evr(
                        (all_builds[name][i]["epoch"]
                         or 0, all_builds[name][i]["version"],
                         all_builds[name][i]["release"]), pkg['nevr'][1:]) > 0:
                        i += 1
                    completion_time = all_builds[name][i -
                                                       1]["completion_time"]
                    probably_fixed_since = max(completion_time,
                                               probably_fixed_since)
                    pkg["probable_fix"] = (name,
                                           all_builds[name][i - 1]["epoch"]
                                           or 0,
                                           all_builds[name][i - 1]["version"],
                                           all_builds[name][i - 1]["release"])

                    self._save_probable_fix(db, problem, db_release,
                                            pkg["probable_fix"],
                                            probably_fixed_since)
                    self.log_debug("  Probably fixed for {0} days.".format(
                        (datetime.now() - probably_fixed_since).days))
                    probably_fixed_total += 1
                else:
                    self._save_probable_fix(db, problem, db_release, None)
                    self.log_debug("  Not fixed.")

            db.session.flush()
            if problems_in_release > 0:
                self.log_info(
                    "{0}% of problems in this release probably fixed.".format(
                        (probably_fixed_total * 100) // problems_in_release))
            else:
                self.log_info("No problems found in this release.")
        return 0
Example #3
0
def item(report_id, want_object=False):
    result = (db.session.query(Report,
                               OpSysComponent).join(OpSysComponent).filter(
                                   Report.id == report_id).first())

    if result is None:
        abort(404)

    report, component = result

    executable = (db.session.query(ReportExecutable.path).filter(
        ReportExecutable.report_id == report_id).first())
    if executable:
        executable = executable[0]
    else:
        executable = "unknown"

    solutions = None

    if report.max_certainty is not None:
        osr = get_report_opsysrelease(db=db, report_id=report.id)
        solutions = [find_solution(report, db=db, osr=osr)]

    releases = (db.session.query(
        ReportOpSysRelease, ReportOpSysRelease.count).filter(
            ReportOpSysRelease.report_id == report_id).order_by(
                desc(ReportOpSysRelease.count)).all())

    arches = (db.session.query(
        ReportArch,
        ReportArch.count).filter(ReportArch.report_id == report_id).order_by(
            desc(ReportArch.count)).all())

    modes = (db.session.query(
        ReportSelinuxMode, ReportSelinuxMode.count).filter(
            ReportSelinuxMode.report_id == report_id).order_by(
                desc(ReportSelinuxMode.count)).all())

    history_select = lambda table, date, date_range: (
        db.session.query(table).filter(table.report_id == report_id).filter(
            date >= date_range)
        # Flot is confused if not ordered
        .order_by(date).all())

    MAX_DAYS = 20  # Default set on 20
    MAX_WEEK = 20  # Default set on 20
    MAX_MONTH = 20  # Default set on 20

    today = datetime.date.today()

    # Show only 20 days
    daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day,
                                   (today - timedelta(days=MAX_DAYS)))

    if not daily_history:
        for x in range(0, MAX_DAYS):
            daily_history.append({
                'day':
                today - timedelta(x),
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

    elif len(daily_history) < MAX_DAYS:
        if daily_history[-1].day < (today):
            daily_history.append({
                'day':
                today,
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

        if daily_history[0].day > (today - timedelta(MAX_DAYS)):
            daily_history.append({
                'day':
                today - timedelta(MAX_DAYS),
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

    # Show only 20 weeks
    last_monday = datetime.datetime.today() - timedelta(
        datetime.datetime.today().weekday())

    weekly_history = history_select(
        ReportHistoryWeekly, ReportHistoryWeekly.week,
        (last_monday - timedelta(days=MAX_WEEK * 7)))
    if not weekly_history:
        for x in range(0, MAX_WEEK):
            weekly_history.append({
                'week':
                last_monday - timedelta(x * 7),
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })
    elif len(weekly_history) < MAX_WEEK:
        if weekly_history[-1].week < (last_monday.date()):
            weekly_history.append({
                'week':
                last_monday,
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

        if weekly_history[0].week > (
            (last_monday - timedelta(7 * MAX_WEEK)).date()):
            weekly_history.append({
                'week':
                last_monday - timedelta(7 * MAX_WEEK),
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

    # Show only 20 months
    monthly_history = history_select(ReportHistoryMonthly,
                                     ReportHistoryMonthly.month,
                                     (today - relativedelta(months=MAX_MONTH)))

    first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1))

    fdom = first_day_of_month(datetime.datetime.today())

    if not monthly_history:
        for x in range(0, MAX_MONTH):
            monthly_history.append({
                'month':
                fdom - relativedelta(months=x),
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

    elif len(monthly_history) < MAX_MONTH:
        if monthly_history[-1].month < (fdom):
            monthly_history.append({
                'month':
                fdom,
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

        if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)):
            monthly_history.append({
                'month':
                fdom - relativedelta(months=MAX_MONTH),
                'count':
                0,
                'opsysrelease_id':
                releases[0].ReportOpSysRelease.opsysrelease_id
            })

    complete_history = history_select(
        ReportHistoryMonthly, ReportHistoryMonthly.month,
        (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d')))

    unique_ocurrence_os = {}
    if complete_history:
        for ch in complete_history:
            os_name = "{0} {1}".format(ch.opsysrelease.opsys.name,
                                       ch.opsysrelease.version)

            if ch.count is None:
                ch.count = 0

            if ch.unique is None:
                ch.count = 0

            if os_name not in unique_ocurrence_os:
                unique_ocurrence_os[os_name] = {
                    'count': ch.count,
                    'unique': ch.unique
                }
            else:
                unique_ocurrence_os[os_name]['count'] += ch.count
                unique_ocurrence_os[os_name]['unique'] += ch.unique

    sorted(unique_ocurrence_os)

    packages = load_packages(db, report_id)

    # creates a package_counts list with this structure:
    # [(package name, count, [(package version, count in the version)])]
    names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)})
    for pkg in packages:
        names[pkg.iname]["name"] = pkg.iname
        names[pkg.iname]["count"] += pkg.count
        names[pkg.iname]["versions"]["{0}:{1}-{2}".format(
            pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count

    package_counts = []
    for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True):
        package_counts.append((pkg["name"], pkg["count"],
                               sorted(pkg["versions"].items(),
                                      key=itemgetter(1),
                                      reverse=True)))

    try:
        backtrace = report.backtraces[0].frames
    except:  # pylint: disable=bare-except
        backtrace = []

    fid = 0
    for frame in backtrace:
        fid += 1
        frame.nice_order = fid

    is_maintainer = is_component_maintainer(db, g.user, component)

    contact_emails = []
    if is_maintainer:
        contact_emails = [
            email_address for (email_address, ) in (db.session.query(
                ContactEmail.email_address).join(ReportContactEmail).filter(
                    ReportContactEmail.report == report))
        ]

    maintainer = (db.session.query(AssociatePeople).join(
        OpSysComponentAssociate).join(OpSysComponent).filter(
            OpSysComponent.name == component.name)).first()

    maintainer_contact = ""
    if maintainer:
        maintainer_contact = maintainer.name

    probably_fixed = (db.session.query(
        ProblemOpSysRelease,
        Build).join(Problem).join(Report).join(Build).filter(
            Report.id == report_id).first())

    unpackaged = not (get_crashed_package_for_report(db, report.id)
                      or get_crashed_unknown_package_nevr_for_report(
                          db, report.id))

    forward = dict(report=report,
                   executable=executable,
                   probably_fixed=probably_fixed,
                   component=component,
                   releases=metric(releases),
                   arches=metric(arches),
                   modes=metric(modes),
                   daily_history=daily_history,
                   weekly_history=weekly_history,
                   monthly_history=monthly_history,
                   complete_history=complete_history,
                   unique_ocurrence_os=unique_ocurrence_os,
                   crashed_packages=packages,
                   package_counts=package_counts,
                   backtrace=backtrace,
                   contact_emails=contact_emails,
                   unpackaged=unpackaged,
                   solutions=solutions,
                   maintainer_contact=maintainer_contact)

    forward['error_name'] = report.error_name
    forward['oops'] = report.oops

    if want_object:
        try:
            cf = component.name
            if report.backtraces[0].crash_function:
                cf += " in {0}".format(report.backtraces[0].crash_function)
            forward['crash_function'] = cf
        except:  # pylint: disable=bare-except
            forward['crash_function'] = ""

        if probably_fixed:
            tmp_dict = probably_fixed.ProblemOpSysRelease.serialize
            tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize

            forward['probably_fixed'] = tmp_dict
        # Avg count occurrence from first to last occurrence
        forward['avg_count_per_month'] = get_avg_count(report.first_occurrence,
                                                       report.last_occurrence,
                                                       report.count)

        if forward['report'].bugs:
            forward['bugs'] = []
            for bug in forward['report'].bugs:
                try:
                    forward['bugs'].append(bug.serialize)
                except:  # pylint: disable=bare-except
                    print("Bug serialize failed")
        return forward

    if request_wants_json():
        return jsonify(forward)

    forward["is_maintainer"] = is_maintainer
    forward["extfafs"] = get_external_faf_instances(db)

    return render_template("reports/item.html", **forward)
Example #4
0
def item(report_id, want_object=False) -> Union[Dict[str, Any], Response, str]:
    result = (db.session.query(Report,
                               OpSysComponent).join(OpSysComponent).filter(
                                   Report.id == report_id).first())

    if result is None:
        abort(404)

    report, component = result

    executable = (db.session.query(ReportExecutable.path).filter(
        ReportExecutable.report_id == report_id).first())
    if executable:
        executable = executable[0]
    else:
        executable = "unknown"

    solutions = None

    if report.max_certainty is not None:
        osr = get_report_opsysrelease(db=db, report_id=report.id)
        solutions = [find_solution(report, db=db, osr=osr)]

    releases = (db.session.query(
        ReportOpSysRelease, ReportOpSysRelease.count).filter(
            ReportOpSysRelease.report_id == report_id).order_by(
                desc(ReportOpSysRelease.count)).all())

    arches = (db.session.query(
        ReportArch,
        ReportArch.count).filter(ReportArch.report_id == report_id).order_by(
            desc(ReportArch.count)).all())

    modes = (db.session.query(
        ReportSelinuxMode, ReportSelinuxMode.count).filter(
            ReportSelinuxMode.report_id == report_id).order_by(
                desc(ReportSelinuxMode.count)).all())

    daily_history = precompute_history(report_id, 'day')
    weekly_history = precompute_history(report_id, 'week')
    monthly_history = precompute_history(report_id, 'month')

    complete_history = (db.session.query(ReportHistoryMonthly).filter(
        ReportHistoryMonthly.report_id == report_id).all())

    unique_ocurrence_os = {}
    if complete_history:
        for ch in complete_history:
            os_name = str(ch.opsysrelease)

            if ch.count is None:
                ch.count = 0

            if ch.unique is None:
                ch.count = 0

            if os_name not in unique_ocurrence_os:
                unique_ocurrence_os[os_name] = {
                    'count': ch.count,
                    'unique': ch.unique
                }
            else:
                unique_ocurrence_os[os_name]['count'] += ch.count
                unique_ocurrence_os[os_name]['unique'] += ch.unique

    packages = load_packages(db, report_id)

    crashed_versions = {}
    last_affected_version = "N/A"

    # creates a package_counts list with this structure:
    # [(package name, count, [(package version, count in the version)])]
    names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)})
    for pkg in packages:
        names[pkg.iname]["name"] = pkg.iname
        names[pkg.iname]["count"] += pkg.count
        names[pkg.iname]["versions"]["{0}:{1}-{2}".format(
            pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count
        if pkg.type == "CRASHED":
            crashed_versions = names[pkg.iname]["versions"]

    if crashed_versions:
        last_affected_version = sorted(crashed_versions.keys())[-1]

    package_counts = []
    for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True):
        package_counts.append((pkg["name"], pkg["count"],
                               sorted(pkg["versions"].items(),
                                      key=itemgetter(1),
                                      reverse=True)))

    try:
        backtrace = report.backtraces[0].frames
    except:  # pylint: disable=bare-except
        backtrace = []

    fid = 0
    for frame in backtrace:
        fid += 1
        frame.nice_order = fid

    is_maintainer = is_component_maintainer(db, g.user, component)

    contact_emails = []
    if is_maintainer:
        contact_emails = [
            email_address for (email_address, ) in (db.session.query(
                ContactEmail.email_address).join(ReportContactEmail).filter(
                    ReportContactEmail.report == report))
        ]

    maintainer = (db.session.query(AssociatePeople).join(
        OpSysComponentAssociate).join(OpSysComponent).filter(
            OpSysComponent.name == component.name)).first()

    maintainer_contact = ""
    if maintainer:
        maintainer_contact = maintainer.name

    probably_fixed = (db.session.query(
        ProblemOpSysRelease,
        Build).join(Problem).join(Report).join(Build).filter(
            Report.id == report_id).first())

    unpackaged = not (get_crashed_package_for_report(db, report.id)
                      or get_crashed_unknown_package_nevr_for_report(
                          db, report.id))

    forward = dict(report=report,
                   executable=executable,
                   probably_fixed=probably_fixed,
                   component=component,
                   releases=metric(releases),
                   arches=metric(arches),
                   modes=metric(modes),
                   daily_history=daily_history,
                   weekly_history=weekly_history,
                   monthly_history=monthly_history,
                   complete_history=complete_history,
                   unique_ocurrence_os=unique_ocurrence_os,
                   crashed_packages=packages,
                   package_counts=package_counts,
                   backtrace=backtrace,
                   contact_emails=contact_emails,
                   unpackaged=unpackaged,
                   solutions=solutions,
                   maintainer_contact=maintainer_contact)

    forward['error_name'] = report.error_name
    forward['oops'] = report.oops
    forward['version'] = last_affected_version

    if want_object:
        try:
            cf = component.name
            if report.backtraces[0].crash_function:
                cf += " in {0}".format(report.backtraces[0].crash_function)
            forward['crash_function'] = cf
        except:  # pylint: disable=bare-except
            forward['crash_function'] = ""

        if probably_fixed:
            tmp_dict = probably_fixed.ProblemOpSysRelease.serialize
            tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize

            forward['probably_fixed'] = tmp_dict
        # Avg count occurrence from first to last occurrence
        forward['avg_count_per_month'] = get_avg_count(report.first_occurrence,
                                                       report.last_occurrence,
                                                       report.count)

        if forward['report'].bugs:
            forward['bugs'] = []
            for bug in forward['report'].bugs:
                try:
                    forward['bugs'].append(bug.serialize)
                except:  # pylint: disable=bare-except
                    print("Bug serialize failed")
        return forward

    if request_wants_json():
        return Response(response=json.dumps(forward, cls=WebfafJSONEncoder),
                        status=200,
                        mimetype="application/json")

    forward["is_maintainer"] = is_maintainer
    forward["extfafs"] = get_external_faf_instances(db)

    return render_template("reports/item.html", **forward)
Example #5
0
File: reports.py Project: abrt/faf
def item(report_id, want_object=False):
    result = (db.session.query(Report, OpSysComponent)
              .join(OpSysComponent)
              .filter(Report.id == report_id)
              .first())

    if result is None:
        abort(404)

    report, component = result

    executable = (db.session.query(ReportExecutable.path)
                  .filter(ReportExecutable.report_id == report_id)
                  .first())
    if executable:
        executable = executable[0]
    else:
        executable = "unknown"


    solutions = None

    if report.max_certainty is not None:
        osr = get_report_opsysrelease(db=db, report_id=report.id)
        solutions = [find_solution(report, db=db, osr=osr)]

    releases = (db.session.query(ReportOpSysRelease, ReportOpSysRelease.count)
                .filter(ReportOpSysRelease.report_id == report_id)
                .order_by(desc(ReportOpSysRelease.count))
                .all())

    arches = (db.session.query(ReportArch, ReportArch.count)
              .filter(ReportArch.report_id == report_id)
              .order_by(desc(ReportArch.count))
              .all())

    modes = (db.session.query(ReportSelinuxMode, ReportSelinuxMode.count)
             .filter(ReportSelinuxMode.report_id == report_id)
             .order_by(desc(ReportSelinuxMode.count))
             .all())

    history_select = lambda table, date, date_range: (db.session.query(table).
                                                      filter(table.report_id == report_id)
                                                      .filter(date >= date_range)
                                                      # Flot is confused if not ordered
                                                      .order_by(date)
                                                      .all())

    MAX_DAYS = 20  # Default set on 20
    MAX_WEEK = 20  # Default set on 20
    MAX_MONTH = 20  # Default set on 20

    today = datetime.date.today()

    # Show only 20 days
    daily_history = history_select(ReportHistoryDaily, ReportHistoryDaily.day,
                                   (today - timedelta(days=MAX_DAYS)))

    if not daily_history:
        for x in range(0, MAX_DAYS):
            daily_history.append({'day': today - timedelta(x),
                                  'count': 0,
                                  'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    elif len(daily_history) < MAX_DAYS:
        if daily_history[-1].day < (today):
            daily_history.append({'day': today,
                                  'count': 0,
                                  'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id
                                 })

        if daily_history[0].day > (today - timedelta(MAX_DAYS)):
            daily_history.append({'day': today - timedelta(MAX_DAYS),
                                  'count': 0,
                                  'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id
                                 })

    # Show only 20 weeks
    last_monday = datetime.datetime.today() - timedelta(datetime.datetime.today().weekday())

    weekly_history = history_select(ReportHistoryWeekly, ReportHistoryWeekly.week,
                                    (last_monday - timedelta(days=MAX_WEEK*7)))
    if not weekly_history:
        for x in range(0, MAX_WEEK):
            weekly_history.append({'week': last_monday - timedelta(x*7),
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})
    elif len(weekly_history) < MAX_WEEK:
        if weekly_history[-1].week < (last_monday.date()):
            weekly_history.append({'week': last_monday,
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

        if weekly_history[0].week > ((last_monday - timedelta(7*MAX_WEEK)).date()):
            weekly_history.append({'week': last_monday - timedelta(7*MAX_WEEK),
                                   'count': 0,
                                   'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    # Show only 20 months
    monthly_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month,
                                     (today - relativedelta(months=MAX_MONTH)))

    first_day_of_month = lambda t: (datetime.date(t.year, t.month, 1))

    fdom = first_day_of_month(datetime.datetime.today())

    if not monthly_history:
        for x in range(0, MAX_MONTH):
            monthly_history.append({'month': fdom - relativedelta(months=x),
                                    'count': 0,
                                    'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    elif len(monthly_history) < MAX_MONTH:
        if monthly_history[-1].month < (fdom):
            monthly_history.append({'month': fdom,
                                    'count': 0,
                                    'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

        if monthly_history[0].month > (fdom - relativedelta(months=MAX_MONTH)):
            monthly_history.append({'month': fdom - relativedelta(months=MAX_MONTH),
                                    'count': 0,
                                    'opsysrelease_id': releases[0].ReportOpSysRelease.opsysrelease_id})

    complete_history = history_select(ReportHistoryMonthly, ReportHistoryMonthly.month,
                                      (datetime.datetime.strptime('1970-01-01', '%Y-%m-%d')))

    unique_ocurrence_os = {}
    if complete_history:
        for ch in complete_history:
            os_name = "{0} {1}".format(ch.opsysrelease.opsys.name, ch.opsysrelease.version)

            if ch.count is None:
                ch.count = 0

            if ch.unique is None:
                ch.count = 0

            if os_name not in unique_ocurrence_os:
                unique_ocurrence_os[os_name] = {'count': ch.count, 'unique': ch.unique}
            else:
                unique_ocurrence_os[os_name]['count'] += ch.count
                unique_ocurrence_os[os_name]['unique'] += ch.unique

    sorted(unique_ocurrence_os)

    packages = load_packages(db, report_id)

    crashed_versions = []
    last_affected_version = "N/A"

    # creates a package_counts list with this structure:
    # [(package name, count, [(package version, count in the version)])]
    names = defaultdict(lambda: {"count": 0, "versions": defaultdict(int)})
    for pkg in packages:
        names[pkg.iname]["name"] = pkg.iname
        names[pkg.iname]["count"] += pkg.count
        names[pkg.iname]["versions"]["{0}:{1}-{2}"
                                     .format(pkg.iepoch, pkg.iversion, pkg.irelease)] += pkg.count
        if pkg.type == "CRASHED":
            crashed_versions = names[pkg.iname]["versions"]

    if crashed_versions:
        last_affected_version = sorted(crashed_versions.keys())[-1]

    package_counts = []
    for pkg in sorted(names.values(), key=itemgetter("count"), reverse=True):
        package_counts.append((
            pkg["name"],
            pkg["count"],
            sorted(pkg["versions"].items(), key=itemgetter(1), reverse=True)))

    try:
        backtrace = report.backtraces[0].frames
    except: # pylint: disable=bare-except
        backtrace = []

    fid = 0
    for frame in backtrace:
        fid += 1
        frame.nice_order = fid

    is_maintainer = is_component_maintainer(db, g.user, component)

    contact_emails = []
    if is_maintainer:
        contact_emails = [email_address for (email_address, ) in
                          (db.session.query(ContactEmail.email_address)
                           .join(ReportContactEmail)
                           .filter(ReportContactEmail.report == report))]

    maintainer = (db.session.query(AssociatePeople)
                  .join(OpSysComponentAssociate)
                  .join(OpSysComponent)
                  .filter(OpSysComponent.name == component.name)).first()

    maintainer_contact = ""
    if maintainer:
        maintainer_contact = maintainer.name

    probably_fixed = (db.session.query(ProblemOpSysRelease, Build)
                      .join(Problem)
                      .join(Report)
                      .join(Build)
                      .filter(Report.id == report_id)
                      .first())

    unpackaged = not (get_crashed_package_for_report(db, report.id) or
                      get_crashed_unknown_package_nevr_for_report(db, report.id))

    forward = dict(report=report,
                   executable=executable,
                   probably_fixed=probably_fixed,
                   component=component,
                   releases=metric(releases),
                   arches=metric(arches),
                   modes=metric(modes),
                   daily_history=daily_history,
                   weekly_history=weekly_history,
                   monthly_history=monthly_history,
                   complete_history=complete_history,
                   unique_ocurrence_os=unique_ocurrence_os,
                   crashed_packages=packages,
                   package_counts=package_counts,
                   backtrace=backtrace,
                   contact_emails=contact_emails,
                   unpackaged=unpackaged,
                   solutions=solutions,
                   maintainer_contact=maintainer_contact)

    forward['error_name'] = report.error_name
    forward['oops'] = report.oops
    forward['version'] = last_affected_version

    if want_object:
        try:
            cf = component.name
            if report.backtraces[0].crash_function:
                cf += " in {0}".format(report.backtraces[0].crash_function)
            forward['crash_function'] = cf
        except: # pylint: disable=bare-except
            forward['crash_function'] = ""

        if probably_fixed:
            tmp_dict = probably_fixed.ProblemOpSysRelease.serialize
            tmp_dict['probable_fix_build'] = probably_fixed.Build.serialize

            forward['probably_fixed'] = tmp_dict
        # Avg count occurrence from first to last occurrence
        forward['avg_count_per_month'] = get_avg_count(report.first_occurrence,
                                                       report.last_occurrence,
                                                       report.count)

        if forward['report'].bugs:
            forward['bugs'] = []
            for bug in forward['report'].bugs:
                try:
                    forward['bugs'].append(bug.serialize)
                except: # pylint: disable=bare-except
                    print("Bug serialize failed")
        return forward

    if request_wants_json():
        return jsonify(forward)

    forward["is_maintainer"] = is_maintainer
    forward["extfafs"] = get_external_faf_instances(db)

    return render_template("reports/item.html", **forward)
Example #6
0
    def run(self, cmdline, db):
        """
        Mark a problem probably fixed if there is a new build of the problem's
        affected package, for which no crash reports have come in.
        """

        try:
            tasks = self._get_tasks(cmdline, db)
        except FafError as ex:
            self.log_error("Unable to process command line arguments: {0}"
                           .format(str(ex)))
            return 1

        problems = get_problems(db)

        task_i = 0
        for osplugin, db_release in tasks:
            task_i += 1

            self.log_info("[{0} / {1}] Processing '{2} {3}'"
                          .format(task_i, len(tasks), osplugin.nice_name,
                                  db_release.version))

            self.log_debug("Getting builds...")
            opsys_builds = osplugin.get_released_builds(db_release.version)

            newest_builds = {}
            all_builds = {}
            now = datetime.now()
            for build in opsys_builds:
                age = now - build["completion_time"]
                # If a hot new build comes out, we need to wait a certain
                # period of time for people to use it before we can make
                # conclusions about it being a probable fix.
                if age.days >= osplugin.build_aging_days:
                    if build["name"] not in newest_builds:
                        newest_builds[build["name"]] = build

                    if build["name"] not in all_builds:
                        all_builds[build["name"]] = [build, ]
                    else:
                        all_builds[build["name"]].append(build)

            probably_fixed_total = 0
            problems_in_release = 0
            problem_counter = 0
            for problem in problems:
                problem_counter += 1
                self.log_debug("Processing problem ID:{0} {1}/{2}:"
                               .format(problem.id, problem_counter, len(problems)))
                affected_newest = {}
                affected_not_found = False

                reports_for_release =  \
                    get_reports_for_opsysrelease(db, problem.id, db_release.id)

                # For all the reports, we need the affected packages and their
                # newest versions.
                if reports_for_release:
                    problems_in_release += 1
                else:
                    self.log_debug(" This problem doesn't appear in this release.")
                    self._save_probable_fix(db, problem, db_release, None)
                    # Next problem
                    continue

                for report in reports_for_release:
                    # First we try to find the affected package among the known
                    # packages.
                    affected_known = [
                        (affected.build.base_package_name,
                         affected.build.epoch,
                         affected.build.version,
                         affected.build.release) for affected in
                        get_crashed_package_for_report(db, report.id)]

                    # Then among the unknown packages.
                    affected_unknown = \
                        get_crashed_unknown_package_nevr_for_report(db, report.id)
                    # We get the base package name directly from the report
                    affected_unknown = [(report.component.name,
                                         affected[1],
                                         affected[2],
                                         affected[3]) for affected in affected_unknown]

                    affected_all = affected_known + affected_unknown
                    if not affected_all:
                        affected_not_found = True
                        break

                    for affected in affected_all:
                        if affected[0] in affected_newest:
                            # If a problem contains multiple reports with the same
                            # affected package, we only want the newest version of
                            # it.
                            affected_newest[affected[0]]['reports'].append(report)
                            if cmp_evr(affected[1:],
                                       affected_newest[affected[0]]['nevr'][1:]) > 0:
                                affected_newest[affected[0]]['nevr'] = affected
                        else:
                            affected_newest[affected[0]] = {
                                'reports': [report, ],
                                'nevr': affected
                            }

                if affected_not_found or not affected_newest:
                    # Affected package of one of the reports was not found.
                    # We can't make any conclusions.
                    self.log_debug(" Affected package not found.")
                    self._save_probable_fix(db, problem, db_release, None)
                    # Next problem
                    continue

                if len(affected_newest) > 1:
                    # Multiple different affected packages => cannot be fixed
                    # by a single package update
                    self.log_debug(" Multiple affected packages. No simple fix.")
                    self._save_probable_fix(db, problem, db_release, None)
                    # Next problem
                    continue

                probably_fixed_since = datetime.fromtimestamp(0)

                pkg = list(affected_newest.values())[0]

                name = pkg['nevr'][0]
                newest_build = newest_builds.get(name, False)
                if newest_build:
                    newest_evr = (newest_build["epoch"] or 0,
                                  newest_build["version"],
                                  newest_build["release"])
                if newest_build and cmp_evr(newest_evr, pkg['nevr'][1:]) > 0:
                    # Newest available build is newer than the newest version
                    # of the affected package. Now find the oldest such
                    # probable fix.
                    i = 0
                    while i < len(all_builds[name]) and cmp_evr(
                            (all_builds[name][i]["epoch"] or 0,
                             all_builds[name][i]["version"],
                             all_builds[name][i]["release"]), pkg['nevr'][1:]) > 0:
                        i += 1
                    completion_time = all_builds[name][i-1]["completion_time"]
                    probably_fixed_since = max(completion_time,
                                               probably_fixed_since)
                    pkg["probable_fix"] = (name,
                                           all_builds[name][i-1]["epoch"] or 0,
                                           all_builds[name][i-1]["version"],
                                           all_builds[name][i-1]["release"])

                    self._save_probable_fix(db, problem, db_release,
                                            pkg["probable_fix"],
                                            probably_fixed_since)
                    self.log_debug("  Probably fixed for {0} days.".format(
                        (datetime.now() - probably_fixed_since).days))
                    probably_fixed_total += 1
                else:
                    self._save_probable_fix(db, problem, db_release, None)
                    self.log_debug("  Not fixed.")

            db.session.flush()
            if problems_in_release > 0:
                self.log_info("{0}% of problems in this release probably fixed.".format(
                    (probably_fixed_total * 100) // problems_in_release))
            else:
                self.log_info("No problems found in this release.")
Example #7
0
    def text_overview(self, cmdline, db, opsys, release):
        release_ids = get_release_ids(db, opsys, release)

        num_days = 7
        if cmdline.last:
            num_days = int(cmdline.last)

        since = datetime.datetime.now() - datetime.timedelta(days=num_days)

        hot = query_hot_problems(db, release_ids, history=self.history_type,
                                 last_date=since)

        if not cmdline.include_low_quality:
            hot = [x for x in hot if x.quality >= 0]

        ptypes = ""
        if len(self.ptypes) != len(problemtypes):
            ptypes = " "+", ".join(self.ptypes)
        out = "Overview of the top {0}{1} crashes over the last {2} days:\n".format(
            cmdline.count, ptypes, num_days)

        hot = [p for p in hot if p.type in self.ptypes]

        for (rank, problem) in enumerate(hot[:cmdline.count]):
            out += "#{0} {1} - {2}x\n".format(
                rank+1,
                ', '.join(problem.unique_component_names),
                problem.count)

            # Reports with bugzillas for this OpSysRelease go first
            reports = sorted(problem.reports,
                             cmp=lambda x, y: len([b for b in x.bugs if b.opsysrelease_id in release_ids])
                             - len([b for b in y.bugs if b.opsysrelease_id in release_ids]), reverse=True)

            if webfaf_installed():
                for report in reports[:3]:
                    out += "{0}\n".format(reverse("reports.bthash_forward",
                                                  bthash=report.hashes[0].hash))
                    for bug in report.bugs:
                        out += "  {0}\n".format(bug.url)
            else:
                for report in reports[:3]:
                    out += "Report BT hash: {0}\n".format(report.hashes[0].hash)
            if len(problem.reports) > 3:
                out += "... and {0} more.\n".format(len(problem.reports)-3)

            if problem.tainted:
                out += "Kernel tainted.\n"

            crash_function = problem.crash_function
            if crash_function:
                out += "Crash function: {0}\n".format(crash_function)

            affected_all = []
            for report in problem.reports:
                affected_known = [
                    (affected.build.base_package_name,
                     affected.build.epoch,
                     affected.build.version,
                     affected.build.release) for affected in
                    get_crashed_package_for_report(db, report.id)]

                affected_unknown = \
                    get_crashed_unknown_package_nevr_for_report(db, report.id)

                affected_all += affected_known + affected_unknown
            affected_all = sorted(set(affected_all),
                                  cmp=lambda a, b: cmp_evr(a[1:], b[1:]),
                                  reverse=True)

            if affected_all:
                out += "Affected builds: {0}".format(", ".join(
                    ["{0}-{1}:{2}-{3}".format(n, e, v, r)
                     for (n, e, v, r) in affected_all[:5]]))
                if len(problem.reports) > 5:
                    out += " and {0} more.".format(len(problem.reports)-5)
                out += "\n"

            pfix = problem.probable_fix_for_opsysrelease_ids(release_ids)
            if pfix:
                out += ("Problem seems to be fixed since the release of {0}\n"
                        .format(pfix))
            out += "\n"

        return out
Example #8
0
File: stats.py Project: trams/faf
    def text_overview(self, cmdline, db, opsys, release):
        release_ids = get_release_ids(db, opsys, release)

        num_days = 7
        if cmdline.last:
            num_days = int(cmdline.last)

        since = datetime.datetime.now() - datetime.timedelta(days=num_days)

        hot = query_hot_problems(db, release_ids, history=self.history_type,
                                 last_date=since)

        if not cmdline.include_low_quality:
            hot = filter(lambda x: x.quality >= 0, hot)

        out = "Overview of the top {0} crashes over the last {1} days:\n".format(
            cmdline.count, num_days)

        for (rank, problem) in enumerate(hot[:cmdline.count]):
            out += "#{0} {1} - {2}x\n".format(
                rank+1,
                ', '.join(problem.unique_component_names),
                problem.count)
            if webfaf_installed():
                for report in problem.reports:
                    out += "{0}\n".format(reverse("webfaf.reports.views.bthash_forward",
                                          args=[report.hashes[0].hash]))
            else:
                for report in problem.reports:
                    out += "Report BT hash: {0}\n".format(report.hashes[0].hash)

            if problem.tainted:
                out += "Kernel tainted.\n"

            crash_function = problem.crash_function
            if crash_function:
                out += "Crash function: {0}\n".format(crash_function)

            affected_all = []
            for report in problem.reports:
                affected_known = [
                    (affected.build.base_package_name,
                     affected.build.epoch,
                     affected.build.version,
                     affected.build.release) for affected in
                    get_crashed_package_for_report(db, report.id)]

                affected_unknown = \
                    get_crashed_unknown_package_nevr_for_report(db, report.id)

                affected_all += affected_known + affected_unknown
            affected_all = sorted(set(affected_all), cmp=lambda a, b: cmp_evr(a[1:], b[1:]))

            if affected_all:
                out += "Affected builds: {0}\n".format(", ".join(
                    ["{0}-{1}:{2}-{3}".format(n, e, v, r)
                     for (n, e, v, r) in affected_all]))

            pfix = problem.probable_fix_for_opsysrelease_ids(release_ids)
            if len(pfix) > 0:
                out += ("Problem seems to be fixed since the release of {0}\n"
                        .format(pfix))
            out += "\n"

        return out