def save_ureport(self, db, db_report, ureport, flush=False, count=1): bthash1 = self._hash_koops(ureport["frames"], skip_unreliable=False) bthash2 = self._hash_koops(ureport["frames"], skip_unreliable=True) if len(db_report.backtraces) < 1: db_backtrace = ReportBacktrace() db_backtrace.report = db_report db.session.add(db_backtrace) db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.crashthread = True db.session.add(db_thread) db_bthash1 = ReportBtHash() db_bthash1.backtrace = db_backtrace db_bthash1.hash = bthash1 db_bthash1.type = "NAMES" db.session.add(db_bthash1) if bthash2 is not None and bthash1 != bthash2: db_bthash2 = ReportBtHash() db_bthash2.backtrace = db_backtrace db_bthash2.hash = bthash2 db_bthash2.type = "NAMES" db.session.add(db_bthash2) new_symbols = {} new_symbolsources = {} i = 0 for frame in ureport["frames"]: # OK, this is totally ugly. # Frames may contain inlined functions, that would normally # require shifting all frames by 1 and inserting a new one. # There is no way to do this efficiently with SQL Alchemy # (you need to go one by one and flush after each) so # creating a space for additional frames is a huge speed # optimization. i += 10 # nah, another hack, deals with wrong parsing if frame["function_name"].startswith("0x"): continue if not "module_name" in frame: module = "vmlinux" else: module = frame["module_name"] db_symbol = get_symbol_by_name_path(db, frame["function_name"], module) if db_symbol is None: key = (frame["function_name"], module) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = frame["function_name"] db_symbol.normalized_path = module db.session.add(db_symbol) new_symbols[key] = db_symbol # this doesn't work well. on 64bit, kernel maps to # the end of address space (64bit unsigned), but in # postgres bigint is 64bit signed and can't save # the value - let's just map it to signed if frame["address"] >= (1 << 63): address = frame["address"] - (1 << 64) else: address = frame["address"] db_symbolsource = get_ssource_by_bpo(db, ureport["version"], module, address) if db_symbolsource is None: key = (ureport["version"], module, address) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.path = module db_symbolsource.offset = address db_symbolsource.func_offset = frame["function_offset"] db_symbolsource.symbol = db_symbol db_symbolsource.build_id = ureport["version"] db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.thread = db_thread db_frame.order = i db_frame.symbolsource = db_symbolsource db_frame.inlined = False db_frame.reliable = frame["reliable"] db.session.add(db_frame) for taintflag in ureport["taint_flags"]: db_taintflag = get_taint_flag_by_ureport_name(db, taintflag) if db_taintflag is None: self.log_warn("Skipping unsupported taint flag '{0}'" .format(taintflag)) continue db_bttaintflag = ReportBtTaintFlag() db_bttaintflag.backtrace = db_backtrace db_bttaintflag.taintflag = db_taintflag db.session.add(db_bttaintflag) if "modules" in ureport: new_modules = {} # use set() to remove duplicates for module in set(ureport["modules"]): idx = module.find("(") if idx >= 0: module = module[:idx] db_module = get_kernelmodule_by_name(db, module) if db_module is None: if module in new_modules: db_module = new_modules[module] else: db_module = KernelModule() db_module.name = module db.session.add(db_module) new_modules[module] = db_module db_btmodule = ReportBtKernelModule() db_btmodule.kernelmodule = db_module db_btmodule.backtrace = db_backtrace db.session.add(db_btmodule) # do not overwrite an existing oops if not db_report.has_lob("oops"): # do not append here, but create a new dict # we only want save_ureport_post_flush process the most # recently saved report self.add_lob = {db_report: ureport["raw_oops"].encode("utf-8")} if flush: db.session.flush()
def save_ureport(self, db, db_report, ureport, flush=False, count=1): crashframe = ureport["stacktrace"][0] if "special_function" in crashframe: crashfn = "<{0}>".format(crashframe["special_function"]) else: crashfn = crashframe["function_name"] if not db_report.errname: db_report.errname = ureport["exception_name"] elif ureport["exception_name"] and (ureport["exception_name"][0] in ascii_uppercase or "." in ureport["exception_name"]): # Only overwrite errname if the new one begins with an uppercase # letter or contains a ".", i.e. is probably a valid exception type db_report.errname = ureport["exception_name"] db_reportexe = get_reportexe(db, db_report, crashframe["file_name"]) if db_reportexe is None: db_reportexe = ReportExecutable() db_reportexe.report = db_report db_reportexe.path = crashframe["file_name"] db_reportexe.count = 0 db.session.add(db_reportexe) db_reportexe.count += count bthash = self._hash_traceback(ureport["stacktrace"]) if not db_report.backtraces: db_backtrace = ReportBacktrace() db_backtrace.report = db_report db_backtrace.crashfn = crashfn db.session.add(db_backtrace) db_bthash = ReportBtHash() db_bthash.type = "NAMES" db_bthash.hash = bthash db_bthash.backtrace = db_backtrace db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.crashthread = True db.session.add(db_thread) new_symbols = {} new_symbolsources = {} i = 0 for frame in ureport["stacktrace"]: i += 1 if "special_function" in frame: function_name = "<{0}>".format(frame["special_function"]) else: function_name = frame["function_name"] if "special_file" in frame: file_name = "<{0}>".format(frame["special_file"]) else: file_name = frame["file_name"] norm_path = get_libname(file_name) db_symbol = get_symbol_by_name_path(db, function_name, norm_path) if db_symbol is None: key = (function_name, norm_path) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = function_name db_symbol.normalized_path = norm_path db.session.add(db_symbol) new_symbols[key] = db_symbol db_symbolsource = get_symbolsource(db, db_symbol, file_name, frame["file_line"]) if db_symbolsource is None: key = (function_name, file_name, frame["file_line"]) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.path = file_name db_symbolsource.offset = frame["file_line"] db_symbolsource.source_path = file_name db_symbolsource.symbol = db_symbol if "line_contents" in frame: db_symbolsource.srcline = frame["line_contents"] if "file_line" in frame: db_symbolsource.line_number = frame["file_line"] db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.order = i db_frame.inlined = False db_frame.symbolsource = db_symbolsource db_frame.thread = db_thread db.session.add(db_frame) if flush: db.session.flush()
def save_ureport(self, db, db_report, ureport, flush=False): db_report.errname = str(ureport["signal"]) db_reportexe = get_reportexe(db, db_report, ureport["executable"]) if db_reportexe is None: db_reportexe = ReportExecutable() db_reportexe.path = ureport["executable"] db_reportexe.report = db_report db_reportexe.count = 0 db.session.add(db_reportexe) db_reportexe.count += 1 bthashes = self._hash_backtrace(ureport["stacktrace"]) if len(bthashes) < 1: raise FafError("Unable to get backtrace hash") bts = filter(None, set(get_backtrace_by_hash(db, b) for b in bthashes)) if len(bts) > 1: raise FafError("Unable to reliably identify backtrace by hash") if len(bts) == 1: db_backtrace = bts.pop() else: new_symbols = {} new_symbolsources = {} db_backtrace = ReportBacktrace() db_backtrace.report = db_report db.session.add(db_backtrace) for bthash in bthashes: db_bthash = ReportBtHash() db_bthash.backtrace = db_backtrace db_bthash.type = "NAMES" db_bthash.hash = bthash db.session.add(db_bthash) tid = 0 for thread in ureport["stacktrace"]: tid += 1 crash = "crash_thread" in thread and thread["crash_thread"] db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.number = tid db_thread.crashthread = crash db.session.add(db_thread) fid = 0 for frame in thread["frames"]: # OK, this is totally ugly. # Frames may contain inlined functions, that would normally # require shifting all frames by 1 and inserting a new one. # There is no way to do this efficiently with SQL Alchemy # (you need to go one by one and flush after each) so # creating a space for additional frames is a huge speed # optimization. fid += 10 if "build_id" in frame: build_id = frame["build_id"] else: build_id = None if "fingerprint" in frame: fingerprint = frame["fingerprint"] else: fingerprint = None path = os.path.abspath(frame["file_name"]) offset = frame["build_id_offset"] db_symbol = None if "function_name" in frame: norm_path = get_libname(path) db_symbol = \ get_symbol_by_name_path(db, frame["function_name"], norm_path) if db_symbol is None: key = (frame["function_name"], norm_path) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = frame["function_name"] db_symbol.normalized_path = norm_path db.session.add(db_symbol) new_symbols[key] = db_symbol db_symbolsource = get_ssource_by_bpo(db, build_id, path, offset) if db_symbolsource is None: key = (build_id, path, offset) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.symbol = db_symbol db_symbolsource.build_id = build_id db_symbolsource.path = path db_symbolsource.offset = offset db_symbolsource.hash = fingerprint db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.thread = db_thread db_frame.order = fid db_frame.symbolsource = db_symbolsource db_frame.inlined = False db.session.add(db_frame) if flush: db.session.flush()
def save_ureport(self, db, db_report, ureport, flush=False, count=1): # at the moment we only send crash thread # we may need to identify the crash thread in the future crashthread = ureport["threads"][0] crashfn = None for frame in crashthread["frames"]: if not frame["is_exception"]: crashfn = frame["name"] break if crashfn is not None and "." in crashfn: crashfn = crashfn.rsplit(".", 1)[1] errname = None for frame in crashthread["frames"]: if frame["is_exception"]: errname = frame["name"] break if "." in errname: errname = errname.rsplit(".", 1)[1] db_report.errname = errname bthash = self._hash_backtrace(ureport["threads"]) if not db_report.backtraces: db_backtrace = ReportBacktrace() db_backtrace.report = db_report db_backtrace.crashfn = crashfn db.session.add(db_backtrace) db_bthash = ReportBtHash() db_bthash.type = "NAMES" db_bthash.hash = bthash db_bthash.backtrace = db_backtrace new_symbols = {} new_symbolsources = {} j = 0 for thread in ureport["threads"]: j += 1 db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.crashthread = thread == crashthread db_thread.number = j db.session.add(db_thread) i = 0 for frame in thread["frames"]: i += 1 function_name = frame["name"] if "class_path" in frame: file_name = frame["class_path"] elif frame["is_exception"]: file_name = JavaProblem.exception elif frame["is_native"]: file_name = JavaProblem.native else: file_name = JavaProblem.unknown if "file_line" in frame: file_line = frame["file_line"] else: file_line = 0 db_symbol = get_symbol_by_name_path( db, function_name, file_name) if db_symbol is None: key = (function_name, file_name) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = function_name db_symbol.normalized_path = file_name db.session.add(db_symbol) new_symbols[key] = db_symbol db_symbolsource = get_symbolsource(db, db_symbol, file_name, file_line) if db_symbolsource is None: key = (function_name, file_name, file_line) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.path = file_name db_symbolsource.offset = file_line if "file_name" in frame: db_symbolsource.source_path = frame[ "file_name"] db_symbolsource.line_number = file_line db_symbolsource.symbol = db_symbol db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.order = i db_frame.inlined = False db_frame.symbolsource = db_symbolsource db_frame.thread = db_thread db.session.add(db_frame) if flush: db.session.flush()
def save_ureport(self, db, db_report, ureport, flush=False, count=1): db_report.errname = str(ureport["signal"]) db_reportexe = get_reportexe(db, db_report, ureport["executable"]) if db_reportexe is None: db_reportexe = ReportExecutable() db_reportexe.path = ureport["executable"] db_reportexe.report = db_report db_reportexe.count = 0 db.session.add(db_reportexe) db_reportexe.count += count bthashes = self._hash_backtrace(ureport["stacktrace"]) if not bthashes: raise FafError("Unable to get backtrace hash") if not db_report.backtraces: new_symbols = {} new_symbolsources = {} db_backtrace = ReportBacktrace() db_backtrace.report = db_report db.session.add(db_backtrace) for bthash in bthashes: db_bthash = ReportBtHash() db_bthash.backtrace = db_backtrace db_bthash.type = "NAMES" db_bthash.hash = bthash db.session.add(db_bthash) tid = 0 for thread in ureport["stacktrace"]: tid += 1 crash = "crash_thread" in thread and thread["crash_thread"] db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.number = tid db_thread.crashthread = crash db.session.add(db_thread) fid = 0 for frame in thread["frames"]: # OK, this is totally ugly. # Frames may contain inlined functions, that would normally # require shifting all frames by 1 and inserting a new one. # There is no way to do this efficiently with SQL Alchemy # (you need to go one by one and flush after each) so # creating a space for additional frames is a huge speed # optimization. fid += 10 if "build_id" in frame: build_id = frame["build_id"] else: build_id = None if "fingerprint" in frame: fingerprint = frame["fingerprint"] else: fingerprint = None path = os.path.abspath(frame["file_name"]) offset = frame["build_id_offset"] db_symbol = None if "function_name" in frame: norm_path = get_libname(path) db_symbol = \ get_symbol_by_name_path(db, frame["function_name"], norm_path) if db_symbol is None: key = (frame["function_name"], norm_path) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = frame["function_name"] db_symbol.normalized_path = norm_path db.session.add(db_symbol) new_symbols[key] = db_symbol db_symbolsource = get_ssource_by_bpo( db, build_id, path, offset) if db_symbolsource is None: key = (build_id, path, offset) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.symbol = db_symbol db_symbolsource.build_id = build_id db_symbolsource.path = path db_symbolsource.offset = offset db_symbolsource.hash = fingerprint db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.thread = db_thread db_frame.order = fid db_frame.symbolsource = db_symbolsource db_frame.inlined = False db.session.add(db_frame) if flush: db.session.flush()
def save_ureport(self, db, db_report, ureport, flush=False, count=1): # at the moment we only send crash thread # we may need to identify the crash thread in the future crashthread = ureport["threads"][0] crashfn = None for frame in crashthread["frames"]: if not frame["is_exception"]: crashfn = frame["name"] break if crashfn is not None and "." in crashfn: crashfn = crashfn.rsplit(".", 1)[1] errname = None for frame in crashthread["frames"]: if frame["is_exception"]: errname = frame["name"] break if "." in errname: errname = errname.rsplit(".", 1)[1] db_report.errname = errname bthash = self._hash_backtrace(ureport["threads"]) if len(db_report.backtraces) < 1: db_backtrace = ReportBacktrace() db_backtrace.report = db_report db_backtrace.crashfn = crashfn db.session.add(db_backtrace) db_bthash = ReportBtHash() db_bthash.type = "NAMES" db_bthash.hash = bthash db_bthash.backtrace = db_backtrace new_symbols = {} new_symbolsources = {} j = 0 for thread in ureport["threads"]: j += 1 db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.crashthread = thread == crashthread db_thread.number = j db.session.add(db_thread) i = 0 for frame in thread["frames"]: i += 1 function_name = frame["name"] if "class_path" in frame: file_name = frame["class_path"] elif frame["is_exception"]: file_name = JavaProblem.exception elif frame["is_native"]: file_name = JavaProblem.native else: file_name = JavaProblem.unknown if "file_line" in frame: file_line = frame["file_line"] else: file_line = 0 db_symbol = get_symbol_by_name_path(db, function_name, file_name) if db_symbol is None: key = (function_name, file_name) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = function_name db_symbol.normalized_path = file_name db.session.add(db_symbol) new_symbols[key] = db_symbol db_symbolsource = get_symbolsource(db, db_symbol, file_name, file_line) if db_symbolsource is None: key = (function_name, file_name, file_line) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.path = file_name db_symbolsource.offset = file_line if "file_name" in frame: db_symbolsource.source_path = frame["file_name"] db_symbolsource.line_number = file_line db_symbolsource.symbol = db_symbol db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.order = i db_frame.inlined = False db_frame.symbolsource = db_symbolsource db_frame.thread = db_thread db.session.add(db_frame) if flush: db.session.flush()
def process_symbol(build_id, path, offset, problem_type, create_symbol_auth_key) -> Tuple[Dict[str, Any], int]: db_ssource = (db.session.query(SymbolSource) .filter(SymbolSource.build_id == build_id) .filter(SymbolSource.path == path) .filter(SymbolSource.offset == offset) .first()) if db_ssource is None: if (create_symbol_auth_key and symbol_transfer_auth_key and create_symbol_auth_key == symbol_transfer_auth_key and problem_type in ("kerneloops", "core")): # We need to attach our symbols to a dummy report in order to set # their type h = sha1() h.update("symbol_transfer_dummy".encode('utf-8')) h.update(problem_type.encode('utf-8')) dummy_report_hash = h.hexdigest() # The thread all our frames and symbols are going to be attached to db_thread = (db.session.query(ReportBtThread) .join(ReportBacktrace) .join(Report) .join(ReportHash) .filter(ReportHash.hash == dummy_report_hash) .first()) if db_thread is None: # Need to potentially create the whole chain of objects db_report = (db.session.query(Report) .join(ReportHash) .filter(ReportHash.hash == dummy_report_hash) .first()) if db_report is None: db_report = Report() db_report.type = problem_type db_report.first_occurence = datetime.datetime.fromtimestamp(0) db_report.last_occurence = db_report.first_occurence db_report.count = 0 # Random component db_report.component = db.session.query(OpSysComponent).first() db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.hash = dummy_report_hash db_report_hash.report = db_report db.session.add(db_report_hash) db_rbt = None if db_report.id: db_rbt = (db.session.query(ReportBacktrace) .filter(ReportBacktrace.report == db_report) .first()) if db_rbt is None: db_rbt = ReportBacktrace() db_rbt.report = db_report db_rbt.quality = -1000 db.session.add(db_rbt) db_thread = ReportBtThread() db_thread.backtrace = db_rbt # This prevents this dummy thread from being clustered db_thread.crashthread = False db.session.add(db_thread) db_ssource = SymbolSource() db_ssource.build_id = build_id db_ssource.path = path db_ssource.offset = offset db.session.add(db_ssource) max_order = 0 if db_thread.id: max_order = (db.session.query(func.max(ReportBtFrame.order)) .filter(ReportBtFrame.thread == db_thread) .scalar() or 0) db_frame = ReportBtFrame() db_frame.thread = db_thread db_frame.symbolsource = db_ssource db_frame.order = max_order + 1 db.session.add(db_frame) db.session.commit() return {"error": "SymbolSource not found but created. Please wait."}, 202 return {"error": "SymbolSource not found"}, 404 if db_ssource.line_number is None: return {"error": "SymbolSource not yet retraced. Please wait."}, 404 return { "Symbol": { "name": db_ssource.symbol.name, "nice_name": db_ssource.symbol.nice_name, "normalized_path": db_ssource.symbol.normalized_path, }, "SymbolSource": { "build_id": db_ssource.build_id, "path": db_ssource.path, "offset": db_ssource.offset, "func_offset": db_ssource.func_offset, "hash": db_ssource.hash, "source_path": db_ssource.source_path, "line_number": db_ssource.line_number, } }, 200
def save_ureport(self, db, db_report, ureport, flush=False, count=1): crashframe = ureport["stacktrace"][0] if "special_function" in crashframe: crashfn = "<{0}>".format(crashframe["special_function"]) else: crashfn = crashframe["function_name"] db_report.errname = ureport["exception_name"] db_reportexe = get_reportexe(db, db_report, crashframe["file_name"]) if db_reportexe is None: db_reportexe = ReportExecutable() db_reportexe.report = db_report db_reportexe.path = crashframe["file_name"] db_reportexe.count = 0 db.session.add(db_reportexe) db_reportexe.count += count bthash = self._hash_traceback(ureport["stacktrace"]) if len(db_report.backtraces) < 1: db_backtrace = ReportBacktrace() db_backtrace.report = db_report db_backtrace.crashfn = crashfn db.session.add(db_backtrace) db_bthash = ReportBtHash() db_bthash.type = "NAMES" db_bthash.hash = bthash db_bthash.backtrace = db_backtrace db_thread = ReportBtThread() db_thread.backtrace = db_backtrace db_thread.crashthread = True db.session.add(db_thread) new_symbols = {} new_symbolsources = {} i = 0 for frame in ureport["stacktrace"]: i += 1 if "special_function" in frame: function_name = "<{0}>".format(frame["special_function"]) else: function_name = frame["function_name"] if "special_file" in frame: file_name = "<{0}>".format(frame["special_file"]) else: file_name = frame["file_name"] norm_path = get_libname(file_name) db_symbol = get_symbol_by_name_path(db, function_name, norm_path) if db_symbol is None: key = (function_name, norm_path) if key in new_symbols: db_symbol = new_symbols[key] else: db_symbol = Symbol() db_symbol.name = function_name db_symbol.normalized_path = norm_path db.session.add(db_symbol) new_symbols[key] = db_symbol db_symbolsource = get_symbolsource(db, db_symbol, file_name, frame["file_line"]) if db_symbolsource is None: key = (function_name, file_name, frame["file_line"]) if key in new_symbolsources: db_symbolsource = new_symbolsources[key] else: db_symbolsource = SymbolSource() db_symbolsource.path = file_name db_symbolsource.offset = frame["file_line"] db_symbolsource.source_path = file_name db_symbolsource.symbol = db_symbol if "line_contents" in frame: db_symbolsource.srcline = frame["line_contents"] if "file_line" in frame: db_symbolsource.line_number = frame["file_line"] db.session.add(db_symbolsource) new_symbolsources[key] = db_symbolsource db_frame = ReportBtFrame() db_frame.order = i db_frame.inlined = False db_frame.symbolsource = db_symbolsource db_frame.thread = db_thread db.session.add(db_frame) if flush: db.session.flush()
def process_symbol(build_id, path, offset, problem_type, create_symbol_auth_key): db_ssource = (db.session.query(SymbolSource) .filter(SymbolSource.build_id == build_id) .filter(SymbolSource.path == path) .filter(SymbolSource.offset == offset) .first()) if db_ssource is None: if (create_symbol_auth_key and symbol_transfer_auth_key and create_symbol_auth_key == symbol_transfer_auth_key and problem_type in ("kerneloops", "core")): # We need to attach our symbols to a dummy report in order to set # their type h = sha1() h.update("symbol_transfer_dummy".encode('utf-8')) h.update(problem_type.encode('utf-8')) dummy_report_hash = h.hexdigest() # The thread all our frames and symbols are going to be attached to db_thread = (db.session.query(ReportBtThread) .join(ReportBacktrace) .join(Report) .join(ReportHash) .filter(ReportHash.hash == dummy_report_hash) .first()) if db_thread is None: # Need to potentially create the whole chain of objects db_report = (db.session.query(Report) .join(ReportHash) .filter(ReportHash.hash == dummy_report_hash) .first()) if db_report is None: db_report = Report() db_report.type = problem_type db_report.first_occurence = datetime.datetime.fromtimestamp(0) db_report.last_occurence = db_report.first_occurence db_report.count = 0 # Random component db_report.component = db.session.query(OpSysComponent).first() db.session.add(db_report) db_report_hash = ReportHash() db_report_hash.hash = dummy_report_hash db_report_hash.report = db_report db.session.add(db_report_hash) db_rbt = None if db_report.id: db_rbt = (db.session.query(ReportBacktrace) .filter(ReportBacktrace.report == db_report) .first()) if db_rbt is None: db_rbt = ReportBacktrace() db_rbt.report = db_report db_rbt.quality = -1000 db.session.add(db_rbt) db_thread = ReportBtThread() db_thread.backtrace = db_rbt # This prevents this dummy thread from being clustered db_thread.crashthread = False db.session.add(db_thread) db_ssource = SymbolSource() db_ssource.build_id = build_id db_ssource.path = path db_ssource.offset = offset db.session.add(db_ssource) max_order = 0 if db_thread.id: max_order = (db.session.query(func.max(ReportBtFrame.order)) .filter(ReportBtFrame.thread == db_thread) .scalar() or 0) db_frame = ReportBtFrame() db_frame.thread = db_thread db_frame.symbolsource = db_ssource db_frame.order = max_order + 1 db.session.add(db_frame) db.session.commit() return {"error": "SymbolSource not found but created. Please wait."}, 202 return {"error": "SymbolSource not found"}, 404 if db_ssource.line_number is None: return {"error": "SymbolSource not yet retraced. Please wait."}, 404 return { "Symbol": { "name": db_ssource.symbol.name, "nice_name": db_ssource.symbol.nice_name, "normalized_path": db_ssource.symbol.normalized_path, }, "SymbolSource": { "build_id": db_ssource.build_id, "path": db_ssource.path, "offset": db_ssource.offset, "func_offset": db_ssource.func_offset, "hash": db_ssource.hash, "source_path": db_ssource.source_path, "line_number": db_ssource.line_number, } }, 200