def find_solution_ureport(self, db, ureport, osr=None): ureport = ureport2(ureport) validate(ureport) db_report = self._get_db_report(db, ureport) if db_report is None: return None if db_report.problem is None: return None for posr in db_report.problem.opsysreleases: if osr is None or posr.opsysrelease_id == osr.id: if posr.probable_fix_build is not None: db_build = posr.probable_fix_build for pkg in ureport["packages"]: if pkg.get("package_role", "") == "affected": break if pkg.get("package_role", "") != "affected": return None # Fixing version must be greater than affected version if cmp_evr((pkg["epoch"], pkg["version"], pkg["release"]), (db_build.epoch, db_build.version, db_build.release)) < 0: return self._posr_to_solution(posr) return None return None
def find_solution_ureport(self, db, ureport, osr=None): ureport = ureport2(ureport) validate(ureport) db_report = self._get_db_report(db, ureport) if db_report is None: return None if db_report.problem is None: return None for posr in db_report.problem.opsysreleases: if osr is None or posr.opsysrelease_id == osr.id: if posr.probable_fix_build is not None: db_build = posr.probable_fix_build for pkg in ureport["packages"]: if pkg.get("package_role", "") == "affected": break if pkg.get("package_role", "") != "affected": return None # Fixing version must be greater than affected version if cmp_evr((pkg["epoch"], pkg["version"], pkg["release"]), (db_build.epoch, db_build.version, db_build.release)) < 0: return self._posr_to_solution(posr) else: return None return None
def save_report_dict(self, report): ureport.validate(report) mtime = datetime.datetime.utcnow() ureport.save(self.db, report, timestamp=mtime) self.db.session.flush()
def clean_file(self): raw_data = self.cleaned_data['file'].read() try: data = json.loads(raw_data) except Exception as ex: self._save_invalid_ureport(raw_data, str(ex)) raise forms.ValidationError('Invalid JSON file') try: ureport.validate(data) except Exception as exp: reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) self._save_invalid_ureport(json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): self._save_unknown_opsys(data["os"]) raise forms.ValidationError('Validation failed: %s' % exp) return dict(converted=data, json=raw_data)
def test_ureport_validation(self): """ Check if ureport validation works correctly for both versions. """ # validate raises FafError on failure for report_name in self.sample_report_names: validate(self.sample_reports[report_name])
def _get_db_report(self, db, ureport): ureport = ureport2(ureport) validate(ureport) problemplugin = problemtypes[ureport["problem"]["type"]] report_hash = problemplugin.hash_ureport(ureport["problem"]) report = get_report(db, report_hash) if report is None: return None return report
def _save_reports(self, db, pattern="*"): self.log_info("Saving reports") report_filenames = glob.glob( os.path.join(self.dir_report_incoming, pattern)) i = 0 for filename in sorted(report_filenames): i += 1 fname = os.path.basename(filename) self.log_info("[{0} / {1}] Processing file '{2}'".format( i, len(report_filenames), filename)) try: with open(filename, "r") as fil: ureport = json.load(fil) except (OSError, ValueError) as ex: self.log_warn("Failed to load uReport: {0}".format(str(ex))) self._move_report_to_deferred(fname) continue try: validate(ureport) except FafError as ex: self.log_warn("uReport is invalid: {0}".format(str(ex))) if ("os" in ureport and "name" in ureport["os"] and ureport["os"]["name"] not in systems and ureport["os"]["name"].lower() not in systems): self._save_unknown_opsys(db, ureport["os"]) self._move_report_to_deferred(fname) continue mtime = os.path.getmtime(filename) timestamp = datetime.datetime.fromtimestamp(mtime) try: save(db, ureport, create_component=self.create_components, timestamp=timestamp) except FafError as ex: self.log_warn("Failed to save uReport: {0}".format(str(ex))) self._move_report_to_deferred(fname) continue self._move_report_to_saved(fname)
def clean_file(self): raw_data = self.cleaned_data['file'].read() try: data = json.loads(raw_data) except Exception as ex: self._save_invalid_ureport(raw_data, str(ex)) raise forms.ValidationError('Invalid JSON file') converted = ureport.convert_to_str(data) try: converted = ureport.validate(converted) except Exception as exp: reporter = None if ("reporter" in converted and "name" in converted["reporter"] and "version" in converted["reporter"]): reporter = "{0} {1}".format(converted["reporter"]["name"], converted["reporter"]["version"]) self._save_invalid_ureport(json.dumps(data, indent=2), str(exp), reporter=reporter) raise forms.ValidationError('Validation failed: %s' % exp) return dict(converted=converted, json=raw_data)
def find_solution_ureport(self, db, ureport, osr=None) -> Optional[SfPrefilterSolution]: """ Check whether uReport matches a knowledgebase entry. Return a pyfaf.storage.SfPrefilterSolution object or None. """ if "ureport_version" in ureport and ureport["ureport_version"] == 1: ureport = ureport1to2(ureport) validate(ureport) db_opsys = None if osr is not None: db_opsys = osr.opsys osname = ureport["os"]["name"] if osname not in systems: log.warning("Operating system '%s' is not supported", osname) else: osplugin = systems[osname] db_opsys = get_opsys_by_name(db, osplugin.nice_name) if db_opsys is None: log.warning( "Operaring system '%s' is not installed in storage", osplugin.nice_name) else: pkgname_parsers = self._get_pkgname_parsers(db, db_opsys=db_opsys) for parser, solution in pkgname_parsers.items(): if osplugin.check_pkgname_match(ureport["packages"], parser): return self._sfps_to_solution(solution) ptype = ureport["problem"]["type"] if ptype not in problemtypes: log.warning("Problem type '%s' is not supported", ptype) else: problemplugin = problemtypes[ptype] btpath_parsers = self._get_btpath_parsers(db, db_opsys=db_opsys) for parser, solution in btpath_parsers.items(): if problemplugin.check_btpath_match(ureport["problem"], parser): return self._sfps_to_solution(solution) return None
def _save_reports(self, db): self.log_info("Saving reports") report_filenames = os.listdir(self.dir_report_incoming) i = 0 for fname in sorted(report_filenames): i += 1 filename = os.path.join(self.dir_report_incoming, fname) self.log_info("[{0} / {1}] Processing file '{2}'" .format(i, len(report_filenames), filename)) try: with open(filename, "r") as fil: ureport = json.load(fil) except (OSError, ValueError) as ex: self.log_warn("Failed to load uReport: {0}".format(str(ex))) self._move_report_to_deferred(fname) continue try: validate(ureport) except FafError as ex: self.log_warn("uReport is invalid: {0}".format(str(ex))) if ("os" in ureport and "name" in ureport["os"] and ureport["os"]["name"] not in systems and ureport["os"]["name"].lower() not in systems): self._save_unknown_opsys(db, ureport["os"]) self._move_report_to_deferred(fname) continue mtime = os.path.getmtime(filename) timestamp = datetime.datetime.fromtimestamp(mtime) try: save(db, ureport, create_component=self.create_components, timestamp=timestamp) except FafError as ex: self.log_warn("Failed to save uReport: {0}".format(str(ex))) self._move_report_to_deferred(fname) continue self._move_report_to_saved(fname)
def save_report(self, filename): """ Save report located in sample_reports directory with `filename`. """ path = os.path.join(self.reports_path, filename) with open(path) as file: report = json.load(file) ureport.validate(report) mtime = datetime.datetime.utcnow() ureport.save(self.db, report, timestamp=mtime) self.db.session.flush()
def find_solution_ureport(self, db, ureport, osr=None): """ Check whether uReport matches a knowledgebase entry. Return a pyfaf.storage.SfPrefilterSolution object or None. """ if "ureport_version" in ureport and ureport["ureport_version"] == 1: ureport = ureport1to2(ureport) validate(ureport) db_opsys = None if osr is not None: db_opsys = osr.opsys osname = ureport["os"]["name"] if osname not in systems: log.warning("Operating system '%s' is not supported", osname) else: osplugin = systems[osname] db_opsys = get_opsys_by_name(db, osplugin.nice_name) if db_opsys is None: log.warning("Operaring system '%s' is not installed in storage", osplugin.nice_name) else: pkgname_parsers = self._get_pkgname_parsers(db, db_opsys=db_opsys) for parser, solution in pkgname_parsers.items(): if osplugin.check_pkgname_match(ureport["packages"], parser): return self._sfps_to_solution(solution) ptype = ureport["problem"]["type"] if ptype not in problemtypes: log.warning("Problem type '%s' is not supported", ptype) else: problemplugin = problemtypes[ptype] btpath_parsers = self._get_btpath_parsers(db, db_opsys=db_opsys) for parser, solution in btpath_parsers.items(): if problemplugin.check_btpath_match(ureport["problem"], parser): return self._sfps_to_solution(solution) return None
def save_report(self, filename): ''' Save report located in sample_reports directory with `filename`. ''' path = os.path.join('sample_reports', filename) with open(path) as f: report = ureport.convert_to_str(json.loads(f.read())) report = ureport.validate(report) mtime = datetime.datetime.utcfromtimestamp(os.stat(path).st_mtime) ureport.add_report(report, self.db, utctime=mtime) self.db.session.flush()
def new(): form = NewReportForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except Exception as ex: # pylint: disable=broad-except _save_invalid_ureport(db, raw_data, str(ex)) raise InvalidUsage("Couldn't parse JSON data.", 400) try: ureport.validate(data) except Exception as exp: # pylint: disable=broad-except reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) _save_invalid_ureport(db, json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): _save_unknown_opsys(db, data["os"]) if str(exp) == 'uReport must contain affected package': raise InvalidUsage(("Server is not accepting problems " "from unpackaged files."), 400) raise InvalidUsage("uReport data is invalid.", 400) report = data max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: raise InvalidUsage("uReport may only be {0} bytes long" .format(max_ureport_length), 413) osr_id = None osr = None if report["os"]["name"] in systems: try: osr = (db.session.query(OpSysRelease) .join(OpSys) .filter(OpSys.name == systems[report["os"]["name"]].nice_name) .filter(OpSysRelease.version == report["os"]["version"]) .first()) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The uReport couldn't be saved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable if osr: osr_id = osr.id try: dbreport = ureport.is_known(report, db, return_report=True, opsysrelease_id=osr_id) except Exception as e: # pylint: disable=broad-except logging.exception(e) dbreport = None known = bool(dbreport) fname = str(uuid.uuid4()) fpath = os.path.join(paths["reports_incoming"], fname) with open(fpath, 'w') as file: file.write(raw_data.decode("utf-8")) if request_wants_json(): response = {'result': known} try: report2 = ureport2(report) ureport.validate(report2) except FafError: report2 = None if report2 is not None: try: solution = find_solution(report2, db=db, osr=osr) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The solution couldn't be retrieved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable if solution is not None: response["message"] = ( "Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response["message"] += ( "\n\nYou can get more information at {0}" .format(solution.url)) solution_dict = {"cause": solution.cause, "note": solution.note_text, "url": solution.url} if not solution_dict["url"]: del solution_dict["url"] response["solutions"] = [solution_dict] response["result"] = True try: problemplugin = problemtypes[ report2["problem"]["type"]] response["bthash"] = problemplugin.hash_ureport( report2["problem"]) except Exception as e: # pylint: disable=broad-except logging.exception(e) if known: url = url_for('reports.item', report_id=dbreport.id, _external=True) parts = [{"reporter": "ABRT Server", "value": url, "type": "url"}] try: bugs = (db.session.query(BzBug) .join(ReportBz) .filter(ReportBz.bzbug_id == BzBug.id) .filter(ReportBz.report_id == dbreport.id) .all()) except (DatabaseError, InterfaceError) as e: flash("Database unreachable. The bugs couldn't be retrieved. Please try again later.", "danger") logging.exception(e) return render_template("reports/new.html", form=form), 503 #HTTP Service Unavailable for bug in bugs: parts.append({"reporter": "Bugzilla", "value": bug.url, "type": "url"}) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response[ 'message'] += "\n".join(p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts json_response = jsonify(response) json_response.status_code = 202 return json_response flash( "The uReport was saved successfully. Thank you.", "success") return render_template("reports/new.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response flash(e.message, "danger") return render_template("reports/new.html", form=form), e.status_code return render_template("reports/new.html", form=form)
def new(): form = NewReportForm() if request.method == "POST": try: if not form.validate() or form.file.name not in request.files: raise InvalidUsage("Invalid form data.", 400) raw_data = request.files[form.file.name].read() try: data = json.loads(raw_data) except Exception as ex: _save_invalid_ureport(db, raw_data, str(ex)) raise InvalidUsage("Couldn't parse JSON data.", 400) try: ureport.validate(data) except Exception as exp: reporter = None if ("reporter" in data and "name" in data["reporter"] and "version" in data["reporter"]): reporter = "{0} {1}".format(data["reporter"]["name"], data["reporter"]["version"]) _save_invalid_ureport(db, json.dumps(data, indent=2), str(exp), reporter=reporter) if ("os" in data and "name" in data["os"] and data["os"]["name"] not in systems and data["os"]["name"].lower() not in systems): _save_unknown_opsys(db, data["os"]) raise InvalidUsage("uReport data is invalid.", 400) report = data max_ureport_length = InvalidUReport.__lobs__["ureport"] if len(str(report)) > max_ureport_length: raise InvalidUsage( "uReport may only be {0} bytes long".format( max_ureport_length), 413) osr_id = None osr = None if report["os"]["name"] in systems: osr = (db.session.query(OpSysRelease).join(OpSys).filter( OpSys.name == systems[report["os"]["name"]].nice_name ).filter( OpSysRelease.version == report["os"]["version"]).first()) if osr: osr_id = osr.id try: dbreport = ureport.is_known(report, db, return_report=True, opsysrelease_id=osr_id) except Exception as e: logging.exception(e) dbreport = None known = bool(dbreport) fname = str(uuid.uuid4()) fpath = os.path.join(paths["reports_incoming"], fname) with open(fpath, 'w') as file: file.write(raw_data) if request_wants_json(): response = {'result': known} try: report2 = ureport2(report) except FafError: report2 = None if report2 is not None: solution = find_solution(report2, db=db, osr=osr) if solution is not None: response["message"] = ( "Your problem seems to be caused by {0}\n\n" "{1}".format(solution.cause, solution.note_text)) if solution.url: response["message"] += ( "\n\nYou can get more information at {0}". format(solution.url)) solution_dict = { "cause": solution.cause, "note": solution.note_text, "url": solution.url } if not solution_dict["url"]: del solution_dict["url"] response["solutions"] = [solution_dict] response["result"] = True try: problemplugin = problemtypes[report2["problem"] ["type"]] response["bthash"] = problemplugin.hash_ureport( report2["problem"]) except Exception as e: logging.exception(e) pass if known: url = url_for('reports.item', report_id=dbreport.id, _external=True) parts = [{ "reporter": "ABRT Server", "value": url, "type": "url" }] bugs = (db.session.query(BzBug).join(ReportBz).filter( ReportBz.bzbug_id == BzBug.id).filter( ReportBz.report_id == dbreport.id).all()) for bug in bugs: parts.append({ "reporter": "Bugzilla", "value": bug.url, "type": "url" }) if 'message' not in response: response['message'] = '' else: response['message'] += '\n\n' response['message'] += "\n".join( p["value"] for p in parts if p["type"].lower() == "url") response['reported_to'] = parts json_response = jsonify(response) json_response.status_code = 202 return json_response else: flash("The uReport was saved successfully. Thank you.", "success") return render_template("reports/new.html", form=form), 202 except InvalidUsage as e: if request_wants_json(): response = jsonify({"error": e.message}) response.status_code = e.status_code return response else: flash(e.message, "danger") return render_template("reports/new.html", form=form), e.status_code return render_template("reports/new.html", form=form)
def _save_reports_speedup(self, db): self.log_info("Saving reports (--speedup)") # This creates a lock file and only works on file modified between the # last lock file and this new lock file. This way a new process can # be run while the older is still running. now = time.time() lock_name = ".sr-speedup-{0}-{1}.lock".format(os.getpid(), int(now)) self.lock_filename = os.path.join(self.dir_report_incoming, lock_name) open(self.lock_filename, "w").close() os.utime(self.lock_filename, (int(now), int(now))) self.log_debug("Created lock %s", self.lock_filename) # Remove lock on SIGTERM and Ctrl-C def handle_term(_, __): self.log_debug("Signal caught, removing lock %s", self.lock_filename) os.remove(self.lock_filename) sys.exit(0) signal.signal(signal.SIGTERM, handle_term) signal.signal(signal.SIGINT, handle_term) locks = glob.glob( os.path.join(self.dir_report_incoming, ".sr-speedup-*.lock")) newest_older_ctime = 0 for lock in locks: stat = os.stat(lock) if int(stat.st_ctime) > int(now) and not lock.endswith(lock_name): self.log_info("Newer lock found. Exiting.") os.remove(self.lock_filename) return if stat.st_ctime > newest_older_ctime and int( stat.st_ctime) < int(now): newest_older_ctime = stat.st_ctime report_filenames = [] with os.scandir(self.dir_report_incoming) as iterator: for entry in iterator: if not entry.name.startswith('.') and entry.is_file(): stat = entry.stat() if stat.st_mtime > newest_older_ctime and stat.st_mtime <= now: report_filenames.append(entry.name) # We create a dict of SHA1 unique reports and then treat them as one # with appropriate count. reports = {} for i, fname in enumerate(sorted(report_filenames), start=1): filename = os.path.join(self.dir_report_incoming, fname) self.log_info("[{0} / {1}] Loading file '{2}'".format( i, len(report_filenames), filename)) try: with open(filename, "rb") as fil: stat = os.stat(filename) contents = fil.read() h = hashlib.sha1() h.update(contents) h.update( datetime.date.fromtimestamp( stat.st_mtime).isoformat().encode("utf-8")) digest = h.digest() if digest in reports: reports[digest]["filenames"].append(fname) if reports[digest]["mtime"] < stat.st_mtime: reports[digest]["mtime"] = stat.st_mtime self.log_debug("Duplicate") else: reports[digest] = { "ureport": json.loads(contents), "filenames": [fname], "mtime": stat.st_mtime, } self.log_debug("Original") except (OSError, ValueError) as ex: self.log_warn("Failed to load uReport: {0}".format(str(ex))) self._move_report_to_deferred(fname) continue for i, unique in enumerate(reports.values(), start=1): self.log_info("[{0} / {1}] Processing unique file '{2}'".format( i, len(reports), unique["filenames"][0])) ureport = unique["ureport"] try: validate(ureport) except FafError as ex: self.log_warn("uReport is invalid: {0}".format(str(ex))) if ("os" in ureport and "name" in ureport["os"] and ureport["os"]["name"] not in systems and ureport["os"]["name"].lower() not in systems): self._save_unknown_opsys(db, ureport["os"]) self._move_reports_to_deferred(unique["filenames"]) continue mtime = unique["mtime"] timestamp = datetime.datetime.fromtimestamp(mtime) try: save(db, ureport, create_component=self.create_components, timestamp=timestamp, count=len(unique["filenames"])) except FafError as ex: self.log_warn("Failed to save uReport: {0}".format(str(ex))) self._move_reports_to_deferred(unique["filenames"]) continue self._move_reports_to_saved(unique["filenames"]) self.log_debug("Removing lock %s", self.lock_filename) os.remove(self.lock_filename)
def _save_reports_speedup(self, db): self.log_info("Saving reports (--speedup)") # This creates a lock file and only works on file modified between the # last lock file and this new lock file. This way a new process can # be run while the older is still running. now = time.time() lock_name = ".sr-speedup-{0}-{1}.lock".format(os.getpid(), int(now)) self.lock_filename = os.path.join(self.dir_report_incoming, lock_name) open(self.lock_filename, "w").close() os.utime(self.lock_filename, (int(now), int(now))) self.log_debug("Created lock {0}".format(self.lock_filename)) # Remove lock on SIGTERM and Ctrl-C def handle_term(_, __): self.log_debug("Signal caught, removing lock {0}".format(self.lock_filename)) os.remove(self.lock_filename) sys.exit(0) signal.signal(signal.SIGTERM, handle_term) signal.signal(signal.SIGINT, handle_term) locks = glob.glob(os.path.join(self.dir_report_incoming, ".sr-speedup-*.lock")) newest_older_ctime = 0 for lock in locks: stat = os.stat(lock) if int(stat.st_ctime) > int(now) and not lock.endswith(lock_name): self.log_info("Newer lock found. Exiting.") os.remove(self.lock_filename) return if stat.st_ctime > newest_older_ctime and int(stat.st_ctime) < int(now): newest_older_ctime = stat.st_ctime report_filenames = [] for fname in os.listdir(self.dir_report_incoming): stat = os.stat(os.path.join(self.dir_report_incoming, fname)) if fname[0] != "." and stat.st_mtime > newest_older_ctime and stat.st_mtime <= now: report_filenames.append(fname) # We create a dict of SHA1 unique reports and then treat them as one # with appropriate count. reports = {} i = 0 for fname in sorted(report_filenames): i += 1 filename = os.path.join(self.dir_report_incoming, fname) self.log_info("[{0} / {1}] Loading file '{2}'" .format(i, len(report_filenames), filename)) try: with open(filename, "rb") as fil: stat = os.stat(filename) contents = fil.read() h = hashlib.sha1() h.update(contents) h.update(datetime.date.fromtimestamp(stat.st_mtime) .isoformat().encode("utf-8")) digest = h.digest() if digest in reports: reports[digest]["filenames"].append(fname) if reports[digest]["mtime"] < stat.st_mtime: reports[digest]["mtime"] = stat.st_mtime self.log_debug("Duplicate") else: reports[digest] = { "ureport": json.loads(contents), "filenames": [fname], "mtime": stat.st_mtime, } self.log_debug("Original") except (OSError, ValueError) as ex: self.log_warn("Failed to load uReport: {0}".format(str(ex))) self._move_report_to_deferred(fname) continue i = 0 for unique in reports.values(): i += 1 self.log_info("[{0} / {1}] Processing unique file '{2}'" .format(i, len(reports), unique["filenames"][0])) ureport = unique["ureport"] try: validate(ureport) except FafError as ex: self.log_warn("uReport is invalid: {0}".format(str(ex))) if ("os" in ureport and "name" in ureport["os"] and ureport["os"]["name"] not in systems and ureport["os"]["name"].lower() not in systems): self._save_unknown_opsys(db, ureport["os"]) self._move_reports_to_deferred(unique["filenames"]) continue mtime = unique["mtime"] timestamp = datetime.datetime.fromtimestamp(mtime) try: save(db, ureport, create_component=self.create_components, timestamp=timestamp, count=len(unique["filenames"])) except FafError as ex: self.log_warn("Failed to save uReport: {0}".format(str(ex))) self._move_reports_to_deferred(unique["filenames"]) continue self._move_reports_to_saved(unique["filenames"]) self.log_debug("Removing lock {0}".format(self.lock_filename)) os.remove(self.lock_filename)