def save_ureport2(db, ureport, create_component=False, timestamp=None, count=1): """ Save uReport2 """ if timestamp is None: timestamp = datetime.datetime.utcnow() osplugin = systems[ureport["os"]["name"]] problemplugin = problemtypes[ureport["problem"]["type"]] db_osrelease = get_osrelease(db, osplugin.nice_name, ureport["os"]["version"]) if db_osrelease is None: raise FafError("Operating system '{0} {1}' not found in storage" .format(osplugin.nice_name, ureport["os"]["version"])) report_hash = problemplugin.hash_ureport(ureport["problem"]) db_report = get_report(db, report_hash) if db_report is None: component_name = problemplugin.get_component_name(ureport["problem"]) db_component = get_component_by_name(db, component_name, osplugin.nice_name) if db_component is None: if create_component: log.info("Creating an unsupported component '{0}' in " "operating system '{1}'".format(component_name, osplugin.nice_name)) db_component = OpSysComponent() db_component.name = component_name db_component.opsys = db_osrelease.opsys db.session.add(db_component) else: raise FafError("Unknown component '{0}' in operating system " "{1}".format(component_name, osplugin.nice_name)) db_report = Report() db_report.type = problemplugin.name db_report.first_occurrence = timestamp db_report.last_occurrence = timestamp db_report.count = 0 db_report.component = db_component db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.report = db_report db_report_hash.hash = report_hash db.session.add(db_report_hash) if db_report.first_occurrence > timestamp: db_report.first_occurrence = timestamp if db_report.last_occurrence < timestamp: db_report.last_occurrence = timestamp db_reportosrelease = get_reportosrelease(db, db_report, db_osrelease) if db_reportosrelease is None: db_reportosrelease = ReportOpSysRelease() db_reportosrelease.report = db_report db_reportosrelease.opsysrelease = db_osrelease db_reportosrelease.count = 0 db.session.add(db_reportosrelease) db_reportosrelease.count += count db_arch = get_arch_by_name(db, ureport["os"]["architecture"]) if db_arch is None: raise FafError("Architecture '{0}' is not supported" .format(ureport["os"]["architecture"])) db_reportarch = get_reportarch(db, db_report, db_arch) if db_reportarch is None: db_reportarch = ReportArch() db_reportarch.report = db_report db_reportarch.arch = db_arch db_reportarch.count = 0 db.session.add(db_reportarch) db_reportarch.count += count reason = ureport["reason"].encode("utf-8") db_reportreason = get_reportreason(db, db_report, reason) if db_reportreason is None: db_reportreason = ReportReason() db_reportreason.report = db_report db_reportreason.reason = reason db_reportreason.count = 0 db.session.add(db_reportreason) db_reportreason.count += count day = timestamp.date() db_daily = get_history_day(db, db_report, db_osrelease, day) if db_daily is None: db_daily = ReportHistoryDaily() db_daily.report = db_report db_daily.opsysrelease = db_osrelease db_daily.day = day db_daily.count = 0 db.session.add(db_daily) db_daily.count += count week = day - datetime.timedelta(days=day.weekday()) db_weekly = get_history_week(db, db_report, db_osrelease, week) if db_weekly is None: db_weekly = ReportHistoryWeekly() db_weekly.report = db_report db_weekly.opsysrelease = db_osrelease db_weekly.week = week db_weekly.count = 0 db.session.add(db_weekly) db_weekly.count += count month = day.replace(day=1) db_monthly = get_history_month(db, db_report, db_osrelease, month) if db_monthly is None: db_monthly = ReportHistoryMonthly() db_monthly.report = db_report db_monthly.opsysrelease = db_osrelease db_monthly.month = month db_monthly.count = 0 db.session.add(db_monthly) db_monthly.count += count osplugin.save_ureport(db, db_report, ureport["os"], ureport["packages"], count=count) problemplugin.save_ureport(db, db_report, ureport["problem"], count=count) # Update count as last, so that handlers listening to its "set" event have # as much information as possible db_report.count += count db.session.flush() problemplugin.save_ureport_post_flush()
def run(self, cmdline, db) -> int: if cmdline.problemtype is None or not cmdline.problemtype: ptypes = list(problemtypes.keys()) else: ptypes = [] for ptype in cmdline.problemtype: if ptype not in problemtypes: self.log_warn("Problem type '{0}' is not supported" .format(ptype)) continue ptypes.append(ptype) if not ptypes: self.log_info("Nothing to do") return 1 for i, ptype in enumerate(ptypes, start=1): problemtype = problemtypes[ptype] self.log_info("[{0} / {1}] Processing problem type '{2}'" .format(i, len(ptypes), problemtype.nice_name)) db_reports = get_reports_by_type(db, ptype) for j, db_report in enumerate(db_reports, start=1): self.log_info(" [{0} / {1}] Processing report #{2}" .format(j, len(db_reports), db_report.id)) hashes = set() for k, db_backtrace in enumerate(db_report.backtraces, start=1): self.log_debug("\t[%d / %d] Processing backtrace #%d", k, len(db_report.backtraces), db_backtrace.id) try: component = db_report.component.name include_offset = ptype.lower() == "python" bthash = self._hash_backtrace(db_backtrace, hashbase=[component], offset=include_offset) self.log_debug("\t%s", bthash) db_dup = get_report(db, bthash) if db_dup is None: self.log_info(" Adding hash '{0}'" .format(bthash)) if not bthash in hashes: db_reporthash = ReportHash() db_reporthash.report = db_report db_reporthash.hash = bthash db.session.add(db_reporthash) hashes.add(bthash) elif db_dup == db_report: self.log_debug("\tHash '%s' already assigned", bthash) else: self.log_warn((" Conflict! Skipping hash '{0}'" " (report #{1})").format(bthash, db_dup.id)) except FafError as ex: self.log_warn(" {0}".format(str(ex))) continue db.session.flush() return 0
def process_symbol(build_id, path, offset, problem_type, create_symbol_auth_key): db_ssource = (db.session.query(SymbolSource) .filter(SymbolSource.build_id == build_id) .filter(SymbolSource.path == path) .filter(SymbolSource.offset == offset) .first()) if db_ssource is None: if (create_symbol_auth_key and symbol_transfer_auth_key and create_symbol_auth_key == symbol_transfer_auth_key and problem_type in ("kerneloops", "core")): # We need to attach our symbols to a dummy report in order to set # their type h = sha1() h.update("symbol_transfer_dummy".encode('utf-8')) h.update(problem_type.encode('utf-8')) dummy_report_hash = h.hexdigest() # The thread all our frames and symbols are going to be attached to db_thread = (db.session.query(ReportBtThread) .join(ReportBacktrace) .join(Report) .join(ReportHash) .filter(ReportHash.hash == dummy_report_hash) .first()) if db_thread is None: # Need to potentially create the whole chain of objects db_report = (db.session.query(Report) .join(ReportHash) .filter(ReportHash.hash == dummy_report_hash) .first()) if db_report is None: db_report = Report() db_report.type = problem_type db_report.first_occurence = datetime.datetime.fromtimestamp(0) db_report.last_occurence = db_report.first_occurence db_report.count = 0 # Random component db_report.component = db.session.query(OpSysComponent).first() db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.hash = dummy_report_hash db_report_hash.report = db_report db.session.add(db_report_hash) db_rbt = None if db_report.id: db_rbt = (db.session.query(ReportBacktrace) .filter(ReportBacktrace.report == db_report) .first()) if db_rbt is None: db_rbt = ReportBacktrace() db_rbt.report = db_report db_rbt.quality = -1000 db.session.add(db_rbt) db_thread = ReportBtThread() db_thread.backtrace = db_rbt # This prevents this dummy thread from being clustered db_thread.crashthread = False db.session.add(db_thread) db_ssource = SymbolSource() db_ssource.build_id = build_id db_ssource.path = path db_ssource.offset = offset db.session.add(db_ssource) max_order = 0 if db_thread.id: max_order = (db.session.query(func.max(ReportBtFrame.order)) .filter(ReportBtFrame.thread == db_thread) .scalar() or 0) db_frame = ReportBtFrame() db_frame.thread = db_thread db_frame.symbolsource = db_ssource db_frame.order = max_order + 1 db.session.add(db_frame) db.session.commit() return {"error": "SymbolSource not found but created. Please wait."}, 202 return {"error": "SymbolSource not found"}, 404 if db_ssource.line_number is None: return {"error": "SymbolSource not yet retraced. Please wait."}, 404 return { "Symbol": { "name": db_ssource.symbol.name, "nice_name": db_ssource.symbol.nice_name, "normalized_path": db_ssource.symbol.normalized_path, }, "SymbolSource": { "build_id": db_ssource.build_id, "path": db_ssource.path, "offset": db_ssource.offset, "func_offset": db_ssource.func_offset, "hash": db_ssource.hash, "source_path": db_ssource.source_path, "line_number": db_ssource.line_number, } }, 200
def save_ureport2(db, ureport, create_component=False, timestamp=None, count=1): """ Save uReport2 """ if timestamp is None: timestamp = datetime.datetime.utcnow() osplugin = systems[ureport["os"]["name"]] problemplugin = problemtypes[ureport["problem"]["type"]] db_osrelease = get_osrelease(db, osplugin.nice_name, ureport["os"]["version"]) if db_osrelease is None: raise FafError("Operating system '{0} {1}' not found in storage" .format(osplugin.nice_name, ureport["os"]["version"])) report_hash = problemplugin.hash_ureport(ureport["problem"]) db_report = get_report_by_hash(db, report_hash) if db_report is None: component_name = problemplugin.get_component_name(ureport["problem"]) db_component = get_component_by_name(db, component_name, osplugin.nice_name) if db_component is None: if create_component: log.info("Creating an unsupported component '{0}' in " "operating system '{1}'".format(component_name, osplugin.nice_name)) db_component = OpSysComponent() db_component.name = component_name db_component.opsys = db_osrelease.opsys db.session.add(db_component) else: raise FafError("Unknown component '{0}' in operating system " "{1}".format(component_name, osplugin.nice_name)) db_report = Report() db_report.type = problemplugin.name db_report.first_occurrence = timestamp db_report.last_occurrence = timestamp db_report.count = 0 db_report.component = db_component db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.report = db_report db_report_hash.hash = report_hash db.session.add(db_report_hash) if db_report.first_occurrence > timestamp: db_report.first_occurrence = timestamp if db_report.last_occurrence < timestamp: db_report.last_occurrence = timestamp db_reportosrelease = get_reportosrelease(db, db_report, db_osrelease) if db_reportosrelease is None: db_reportosrelease = ReportOpSysRelease() db_reportosrelease.report = db_report db_reportosrelease.opsysrelease = db_osrelease db_reportosrelease.count = 0 db.session.add(db_reportosrelease) db_reportosrelease.count += count db_arch = get_arch_by_name(db, ureport["os"]["architecture"]) if db_arch is None: raise FafError("Architecture '{0}' is not supported" .format(ureport["os"]["architecture"])) db_reportarch = get_reportarch(db, db_report, db_arch) if db_reportarch is None: db_reportarch = ReportArch() db_reportarch.report = db_report db_reportarch.arch = db_arch db_reportarch.count = 0 db.session.add(db_reportarch) db_reportarch.count += count reason = ureport["reason"].encode("utf-8") db_reportreason = get_reportreason(db, db_report, reason) if db_reportreason is None: db_reportreason = ReportReason() db_reportreason.report = db_report db_reportreason.reason = reason db_reportreason.count = 0 db.session.add(db_reportreason) db_reportreason.count += count day = timestamp.date() db_daily = get_history_day(db, db_report, db_osrelease, day) if db_daily is None: db_daily = ReportHistoryDaily() db_daily.report = db_report db_daily.opsysrelease = db_osrelease db_daily.day = day db_daily.count = 0 db.session.add(db_daily) db_daily.count += count week = day - datetime.timedelta(days=day.weekday()) db_weekly = get_history_week(db, db_report, db_osrelease, week) if db_weekly is None: db_weekly = ReportHistoryWeekly() db_weekly.report = db_report db_weekly.opsysrelease = db_osrelease db_weekly.week = week db_weekly.count = 0 db.session.add(db_weekly) db_weekly.count += count month = day.replace(day=1) db_monthly = get_history_month(db, db_report, db_osrelease, month) if db_monthly is None: db_monthly = ReportHistoryMonthly() db_monthly.report = db_report db_monthly.opsysrelease = db_osrelease db_monthly.month = month db_monthly.count = 0 db.session.add(db_monthly) db_monthly.count += count osplugin.save_ureport(db, db_report, ureport["os"], ureport["packages"], count=count) problemplugin.save_ureport(db, db_report, ureport["problem"], count=count) # Update count as last, so that handlers listening to its "set" event have # as much information as possible db_report.count += count db.session.flush() problemplugin.save_ureport_post_flush()
def run(self, cmdline, db): if cmdline.problemtype is None or len(cmdline.problemtype) < 1: ptypes = problemtypes.keys() else: ptypes = [] for ptype in cmdline.problemtype: if ptype not in problemtypes: self.log_warn("Problem type '{0}' is not supported" .format(ptype)) continue ptypes.append(ptype) if len(ptypes) < 1: self.log_info("Nothing to do") return 0 i = 0 for ptype in ptypes: i += 1 problemtype = problemtypes[ptype] self.log_info("[{0} / {1}] Processing problem type '{2}'" .format(i, len(ptypes), problemtype.nice_name)) db_reports = get_reports_by_type(db, ptype) j = 0 for db_report in db_reports: j += 1 self.log_info(" [{0} / {1}] Processing report #{2}" .format(j, len(db_reports), db_report.id)) hashes = set() k = 0 for db_backtrace in db_report.backtraces: k += 1 self.log_debug(" [{0} / {1}] Processing backtrace #{2}" .format(k, len(db_report.backtraces), db_backtrace.id)) try: component = db_report.component.name include_offset = ptype.lower() == "python" bthash = self._hash_backtrace(db_backtrace, hashbase=[component], offset=include_offset) self.log_debug(" {0}".format(bthash)) db_dup = get_report_by_hash(db, bthash) if db_dup is None: self.log_info(" Adding hash '{0}'" .format(bthash)) if not bthash in hashes: db_reporthash = ReportHash() db_reporthash.report = db_report db_reporthash.hash = bthash db.session.add(db_reporthash) hashes.add(bthash) elif db_dup == db_report: self.log_debug(" Hash '{0}' already assigned" .format(bthash)) else: self.log_warn((" Conflict! Skipping hash '{0}'" " (report #{1})").format(bthash, db_dup.id)) except FafError as ex: self.log_warn(" {0}".format(str(ex))) continue db.session.flush()
def run(self, cmdline, db): if cmdline.problemtype is None or len(cmdline.problemtype) < 1: ptypes = problemtypes.keys() else: ptypes = [] for ptype in cmdline.problemtype: if ptype not in problemtypes: self.log_warn( "Problem type '{0}' is not supported".format(ptype)) continue ptypes.append(ptype) if len(ptypes) < 1: self.log_info("Nothing to do") return 0 i = 0 for ptype in ptypes: i += 1 problemtype = problemtypes[ptype] self.log_info("[{0} / {1}] Processing problem type '{2}'".format( i, len(ptypes), problemtype.nice_name)) db_reports = get_reports_by_type(db, ptype) j = 0 for db_report in db_reports: j += 1 self.log_info(" [{0} / {1}] Processing report #{2}".format( j, len(db_reports), db_report.id)) hashes = set() k = 0 for db_backtrace in db_report.backtraces: k += 1 self.log_debug( " [{0} / {1}] Processing backtrace #{2}".format( k, len(db_report.backtraces), db_backtrace.id)) try: component = db_report.component.name include_offset = ptype.lower() == "python" bthash = self._hash_backtrace(db_backtrace, hashbase=[component], offset=include_offset) self.log_debug(" {0}".format(bthash)) db_dup = get_report(db, bthash) if db_dup is None: self.log_info( " Adding hash '{0}'".format(bthash)) if not bthash in hashes: db_reporthash = ReportHash() db_reporthash.report = db_report db_reporthash.hash = bthash db.session.add(db_reporthash) hashes.add(bthash) elif db_dup == db_report: self.log_debug( " Hash '{0}' already assigned".format( bthash)) else: self.log_warn( (" Conflict! Skipping hash '{0}'" " (report #{1})").format(bthash, db_dup.id)) except FafError as ex: self.log_warn(" {0}".format(str(ex))) continue db.session.flush()