Example #1
0
    def save_report_dict(self, report):
        ureport.validate(report)

        mtime = datetime.datetime.utcnow()
        ureport.save(self.db, report, timestamp=mtime)

        self.db.session.flush()
Example #2
0
    def save_report_dict(self, report):
        ureport.validate(report)

        mtime = datetime.datetime.utcnow()
        ureport.save(self.db, report, timestamp=mtime)

        self.db.session.flush()
Example #3
0
    def test_ureport_saving(self):
        """
        Check if ureport saving works correctly.
        """

        # save raises FafError on failure
        for report_name in self.sample_report_names:
            save(self.db, self.sample_reports[report_name])
Example #4
0
    def _save_reports(self, db, pattern="*"):
        self.log_info("Saving reports")

        report_filenames = glob.glob(
            os.path.join(self.dir_report_incoming, pattern))

        i = 0
        for filename in sorted(report_filenames):
            i += 1

            fname = os.path.basename(filename)
            self.log_info("[{0} / {1}] Processing file '{2}'".format(
                i, len(report_filenames), filename))

            try:
                with open(filename, "r") as fil:
                    ureport = json.load(fil)
            except (OSError, ValueError) as ex:
                self.log_warn("Failed to load uReport: {0}".format(str(ex)))
                self._move_report_to_deferred(fname)
                continue

            try:
                validate(ureport)
            except FafError as ex:
                self.log_warn("uReport is invalid: {0}".format(str(ex)))

                if ("os" in ureport and "name" in ureport["os"]
                        and ureport["os"]["name"] not in systems
                        and ureport["os"]["name"].lower() not in systems):
                    self._save_unknown_opsys(db, ureport["os"])

                self._move_report_to_deferred(fname)
                continue

            mtime = os.path.getmtime(filename)
            timestamp = datetime.datetime.fromtimestamp(mtime)

            try:
                save(db,
                     ureport,
                     create_component=self.create_components,
                     timestamp=timestamp)
            except FafError as ex:
                self.log_warn("Failed to save uReport: {0}".format(str(ex)))
                self._move_report_to_deferred(fname)
                continue

            self._move_report_to_saved(fname)
Example #5
0
    def test_comment_saving(self):
        """
        Check if comment attachment is added to report.
        """

        save(self.db, self.sample_reports['ureport2'])
        report = self.db.session.query(Report).first()

        # update hash locally
        reporthash = report.hashes[0].hash
        com_attachment = self.comment_attachment
        com_attachment["bthash"] = reporthash

        save_attachment(self.db, com_attachment)
        self.assertEqual(len(report.comments), 1)
Example #6
0
    def _save_reports(self, db):
        self.log_info("Saving reports")

        report_filenames = os.listdir(self.dir_report_incoming)

        i = 0
        for fname in sorted(report_filenames):
            i += 1

            filename = os.path.join(self.dir_report_incoming, fname)
            self.log_info("[{0} / {1}] Processing file '{2}'"
                          .format(i, len(report_filenames), filename))

            try:
                with open(filename, "r") as fil:
                    ureport = json.load(fil)
            except (OSError, ValueError) as ex:
                self.log_warn("Failed to load uReport: {0}".format(str(ex)))
                self._move_report_to_deferred(fname)
                continue

            try:
                validate(ureport)
            except FafError as ex:
                self.log_warn("uReport is invalid: {0}".format(str(ex)))

                if ("os" in ureport and
                    "name" in ureport["os"] and
                    ureport["os"]["name"] not in systems and
                    ureport["os"]["name"].lower() not in systems):
                    self._save_unknown_opsys(db, ureport["os"])

                self._move_report_to_deferred(fname)
                continue

            mtime = os.path.getmtime(filename)
            timestamp = datetime.datetime.fromtimestamp(mtime)

            try:
                save(db, ureport, create_component=self.create_components,
                     timestamp=timestamp)
            except FafError as ex:
                self.log_warn("Failed to save uReport: {0}".format(str(ex)))
                self._move_report_to_deferred(fname)
                continue

            self._move_report_to_saved(fname)
Example #7
0
    def save_report(self, filename):
        """
        Save report located in sample_reports directory
        with `filename`.
        """

        path = os.path.join(self.reports_path, filename)

        with open(path) as file:
            report = json.load(file)

        ureport.validate(report)

        mtime = datetime.datetime.utcnow()
        ureport.save(self.db, report, timestamp=mtime)

        self.db.session.flush()
Example #8
0
    def test_url_saving(self):
        """
        Check if URL attachment is added to report.
        """

        save(self.db, self.sample_reports['ureport2'])
        report = self.db.session.query(Report).first()

        # update hash locally
        reporthash = report.hashes[0].hash
        url_attachment = self.url_attachment
        url_attachment["bthash"] = reporthash

        save_attachment(self.db, url_attachment)
        self.assertEqual(len(report.urls), 1)
        self.assertEqual(report.urls[0].url, 'http://example.org')
        self.assertIsNotNone(report.urls[0].saved)
Example #9
0
    def test_contact_email_saving(self):
        """
        Check if comment attachment is added to report.
        """

        save(self.db, self.sample_reports['ureport2'])
        report = self.db.session.query(Report).first()

        # update hash locally
        reporthash = report.hashes[0].hash
        contact_email_attachment = self.contact_email_attachment
        contact_email_attachment["bthash"] = reporthash

        save_attachment(self.db, contact_email_attachment)
        self.assertEqual(len(report.report_contact_emails), 1)
        self.assertEqual(len(self.db.session.query(ContactEmail).all()), 1)
        self.assertEqual(report.report_contact_emails[0].contact_email.email_address,
                         contact_email_attachment["data"])

        # saving it twice should have no effect
        save_attachment(self.db, contact_email_attachment)
        self.assertEqual(len(report.report_contact_emails), 1)
        self.assertEqual(len(self.db.session.query(ContactEmail).all()), 1)
Example #10
0
    def test_attachment_saving(self):
        """
        Check if bugzilla attachment is added to report.
        """

        save(self.db, self.sample_reports['ureport2'])
        report = self.db.session.query(Report).first()

        # update hash locally
        reporthash = report.hashes[0].hash
        bz_attachment = self.bugzilla_attachment
        bz_attachment["bthash"] = reporthash

        class MockBugtracker(object):
            def download_bug_to_storage(db, bug_id):
                return self.bug

        old_fedora_bt = bugtrackers["fedora-bugzilla"]
        bugtrackers["fedora-bugzilla"] = MockBugtracker()

        save_attachment(self.db, bz_attachment)
        self.assertEqual(len(report.bz_bugs), 1)

        bugtrackers["fedora-bugzilla"] = old_fedora_bt
Example #11
0
    def _save_reports_speedup(self, db):
        self.log_info("Saving reports (--speedup)")

        # This creates a lock file and only works on file modified between the
        # last lock file and this new lock file. This way a new process can
        # be run while the older is still running.

        now = time.time()
        lock_name = ".sr-speedup-{0}-{1}.lock".format(os.getpid(), int(now))

        self.lock_filename = os.path.join(self.dir_report_incoming, lock_name)
        open(self.lock_filename, "w").close()
        os.utime(self.lock_filename, (int(now), int(now)))
        self.log_debug("Created lock %s", self.lock_filename)

        # Remove lock on SIGTERM and Ctrl-C
        def handle_term(_, __):
            self.log_debug("Signal caught, removing lock %s",
                           self.lock_filename)
            os.remove(self.lock_filename)
            sys.exit(0)

        signal.signal(signal.SIGTERM, handle_term)
        signal.signal(signal.SIGINT, handle_term)

        locks = glob.glob(
            os.path.join(self.dir_report_incoming, ".sr-speedup-*.lock"))
        newest_older_ctime = 0
        for lock in locks:
            stat = os.stat(lock)
            if int(stat.st_ctime) > int(now) and not lock.endswith(lock_name):
                self.log_info("Newer lock found. Exiting.")
                os.remove(self.lock_filename)
                return
            if stat.st_ctime > newest_older_ctime and int(
                    stat.st_ctime) < int(now):
                newest_older_ctime = stat.st_ctime

        report_filenames = []
        with os.scandir(self.dir_report_incoming) as iterator:
            for entry in iterator:
                if not entry.name.startswith('.') and entry.is_file():
                    stat = entry.stat()
                    if stat.st_mtime > newest_older_ctime and stat.st_mtime <= now:
                        report_filenames.append(entry.name)

        # We create a dict of SHA1 unique reports and then treat them as one
        # with appropriate count.

        reports = {}
        for i, fname in enumerate(sorted(report_filenames), start=1):
            filename = os.path.join(self.dir_report_incoming, fname)
            self.log_info("[{0} / {1}] Loading file '{2}'".format(
                i, len(report_filenames), filename))

            try:
                with open(filename, "rb") as fil:
                    stat = os.stat(filename)
                    contents = fil.read()
                    h = hashlib.sha1()
                    h.update(contents)
                    h.update(
                        datetime.date.fromtimestamp(
                            stat.st_mtime).isoformat().encode("utf-8"))
                    digest = h.digest()
                    if digest in reports:
                        reports[digest]["filenames"].append(fname)
                        if reports[digest]["mtime"] < stat.st_mtime:
                            reports[digest]["mtime"] = stat.st_mtime
                        self.log_debug("Duplicate")
                    else:
                        reports[digest] = {
                            "ureport": json.loads(contents),
                            "filenames": [fname],
                            "mtime": stat.st_mtime,
                        }
                        self.log_debug("Original")

            except (OSError, ValueError) as ex:
                self.log_warn("Failed to load uReport: {0}".format(str(ex)))
                self._move_report_to_deferred(fname)
                continue

        for i, unique in enumerate(reports.values(), start=1):
            self.log_info("[{0} / {1}] Processing unique file '{2}'".format(
                i, len(reports), unique["filenames"][0]))
            ureport = unique["ureport"]
            try:
                validate(ureport)
            except FafError as ex:
                self.log_warn("uReport is invalid: {0}".format(str(ex)))

                if ("os" in ureport and "name" in ureport["os"]
                        and ureport["os"]["name"] not in systems
                        and ureport["os"]["name"].lower() not in systems):
                    self._save_unknown_opsys(db, ureport["os"])

                self._move_reports_to_deferred(unique["filenames"])
                continue

            mtime = unique["mtime"]
            timestamp = datetime.datetime.fromtimestamp(mtime)

            try:
                save(db,
                     ureport,
                     create_component=self.create_components,
                     timestamp=timestamp,
                     count=len(unique["filenames"]))
            except FafError as ex:
                self.log_warn("Failed to save uReport: {0}".format(str(ex)))
                self._move_reports_to_deferred(unique["filenames"])
                continue

            self._move_reports_to_saved(unique["filenames"])

        self.log_debug("Removing lock %s", self.lock_filename)
        os.remove(self.lock_filename)
Example #12
0
    def _save_reports_speedup(self, db):
        self.log_info("Saving reports (--speedup)")

        # This creates a lock file and only works on file modified between the
        # last lock file and this new lock file. This way a new process can
        # be run while the older is still running.

        now = time.time()
        lock_name = ".sr-speedup-{0}-{1}.lock".format(os.getpid(),
                                                      int(now))

        self.lock_filename = os.path.join(self.dir_report_incoming, lock_name)
        open(self.lock_filename, "w").close()
        os.utime(self.lock_filename, (int(now), int(now)))
        self.log_debug("Created lock {0}".format(self.lock_filename))

        # Remove lock on SIGTERM and Ctrl-C
        def handle_term(_, __):
            self.log_debug("Signal caught, removing lock {0}".format(self.lock_filename))
            os.remove(self.lock_filename)
            sys.exit(0)
        signal.signal(signal.SIGTERM, handle_term)
        signal.signal(signal.SIGINT, handle_term)

        locks = glob.glob(os.path.join(self.dir_report_incoming,
                                       ".sr-speedup-*.lock"))
        newest_older_ctime = 0
        for lock in locks:
            stat = os.stat(lock)
            if int(stat.st_ctime) > int(now) and not lock.endswith(lock_name):
                self.log_info("Newer lock found. Exiting.")
                os.remove(self.lock_filename)
                return
            if stat.st_ctime > newest_older_ctime and int(stat.st_ctime) < int(now):
                newest_older_ctime = stat.st_ctime

        report_filenames = []
        for fname in os.listdir(self.dir_report_incoming):
            stat = os.stat(os.path.join(self.dir_report_incoming, fname))
            if fname[0] != "." and stat.st_mtime > newest_older_ctime and stat.st_mtime <= now:
                report_filenames.append(fname)

        # We create a dict of SHA1 unique reports and then treat them as one
        # with appropriate count.

        reports = {}
        i = 0
        for fname in sorted(report_filenames):
            i += 1

            filename = os.path.join(self.dir_report_incoming, fname)
            self.log_info("[{0} / {1}] Loading file '{2}'"
                          .format(i, len(report_filenames), filename))

            try:
                with open(filename, "rb") as fil:
                    stat = os.stat(filename)
                    contents = fil.read()
                    h = hashlib.sha1()
                    h.update(contents)
                    h.update(datetime.date.fromtimestamp(stat.st_mtime)
                             .isoformat().encode("utf-8"))
                    digest = h.digest()
                    if digest in reports:
                        reports[digest]["filenames"].append(fname)
                        if reports[digest]["mtime"] < stat.st_mtime:
                            reports[digest]["mtime"] = stat.st_mtime
                        self.log_debug("Duplicate")
                    else:
                        reports[digest] = {
                            "ureport": json.loads(contents),
                            "filenames": [fname],
                            "mtime": stat.st_mtime,
                        }
                        self.log_debug("Original")

            except (OSError, ValueError) as ex:
                self.log_warn("Failed to load uReport: {0}".format(str(ex)))
                self._move_report_to_deferred(fname)
                continue

        i = 0
        for unique in reports.values():
            i += 1
            self.log_info("[{0} / {1}] Processing unique file '{2}'"
                          .format(i, len(reports), unique["filenames"][0]))
            ureport = unique["ureport"]
            try:
                validate(ureport)
            except FafError as ex:
                self.log_warn("uReport is invalid: {0}".format(str(ex)))

                if ("os" in ureport and
                        "name" in ureport["os"] and
                        ureport["os"]["name"] not in systems and
                        ureport["os"]["name"].lower() not in systems):
                    self._save_unknown_opsys(db, ureport["os"])

                self._move_reports_to_deferred(unique["filenames"])
                continue

            mtime = unique["mtime"]
            timestamp = datetime.datetime.fromtimestamp(mtime)

            try:
                save(db, ureport, create_component=self.create_components,
                     timestamp=timestamp, count=len(unique["filenames"]))
            except FafError as ex:
                self.log_warn("Failed to save uReport: {0}".format(str(ex)))
                self._move_reports_to_deferred(unique["filenames"])
                continue

            self._move_reports_to_saved(unique["filenames"])

        self.log_debug("Removing lock {0}".format(self.lock_filename))
        os.remove(self.lock_filename)