def testParseError(self): test_file_early_eof = textwrap.dedent("""\ # some comment { a sample suppression myClass frame ... """) dummy_open = lambda x: StringIO.StringIO(test_file_early_eof) with self.assertRaises(suppressions.UnexpectedEofError): suppressions.ReadSuppressionsFromFile('', open=dummy_open) test_file_unparsable = textwrap.dedent("""\ # some comment { a sample suppression myClass frame ... another frame } this doesn't parse """) dummy_open = lambda x: StringIO.StringIO(test_file_unparsable) with self.assertRaises(suppressions.ParseError): suppressions.ReadSuppressionsFromFile('', open=dummy_open)
def testReadFile(self): test_file = textwrap.dedent("""\ # some comment { a sample suppression myClass frame ... another frame } # more comments { another suppression class* frame1 frame2 frame3 } """) dummy_open = lambda x: StringIO.StringIO(test_file) result = suppressions.ReadSuppressionsFromFile('', open=dummy_open) self.assertEqual(2, len(result)) supp = result[0] self.assertEqual('myClass', supp.class_name) self.assertEqual(['frame', '...', 'another frame'], supp._stack) supp = result[1] self.assertEqual('class*', supp.class_name) self.assertEqual(['frame1', 'frame2', 'frame3'], supp._stack)
def __init__(self, supp_files): self._mode = 'strict' self._timeout = 3600 self._nocleanup_on_exit = False self._suppressions = [] for fname in supp_files: self._suppressions.extend(suppressions.ReadSuppressionsFromFile(fname)) if os.path.exists(self.TMP_FILE): os.remove(self.TMP_FILE)
def __init__(self, leak_definition): """Initializes the JSLeakCheck object. Args: leak_definition: LeakDefinition, defines what kind of leaks to check. """ self.leak_definition = leak_definition self._suppressions = [] if not self.leak_definition.suppressions: return logging.info('Reading suppressions from "%s"', self.leak_definition.suppressions) try: self._suppressions = suppressions.ReadSuppressionsFromFile( os.path.join(os.path.dirname(__file__), self.leak_definition.suppressions)) except suppressions.Error as e: logging.error('Could not load suppressions: %s', str(e)) except IOError as e: logging.warning('Could not read suppressions file: %s', str(e))
last_hash = "" else: cur_supp += [" " * 3 + line] elif line == "{": in_suppression = True cur_supp = ["{"] elif line.find("Suppression (error hash=#") == 0: last_hash = line[25:41] # The line at the end of the file is assumed to store the URL of the report. return reports, line suppressions_root = path_utils.ScriptDir() supp_filename = os.path.join(suppressions_root, "memcheck", "suppressions.txt") common_suppressions = suppressions.ReadSuppressionsFromFile(supp_filename) supp_filename = os.path.join(suppressions_root, "memcheck", "suppressions_mac.txt") mac_suppressions = suppressions.ReadSuppressionsFromFile(supp_filename) # all_reports is a map {report: list of urls containing this report} all_reports = defaultdict(list) report_hashes = {} for f in sys.argv[1:]: f_reports, url = ReadReportsFromFile(f) for (hash, report) in f_reports: all_reports[report] += [url] report_hashes[report] = hash
last_hash = "" else: cur_supp += [" " * 3 + line] elif line == "{": in_suppression = True cur_supp = ["{"] elif line.find("Suppression (error hash=#") == 0: last_hash = line[25:41] # The line at the end of the file is assumed to store the URL of the report. return reports, line suppressions_root = path_utils.ScriptDir() supp_filename = os.path.join(suppressions_root, "memcheck", "suppressions.txt") vg_common = suppressions.ReadSuppressionsFromFile(supp_filename) supp_filename = os.path.join(suppressions_root, "tsan", "suppressions.txt") tsan_common = suppressions.ReadSuppressionsFromFile(supp_filename) common_suppressions = vg_common + tsan_common supp_filename = os.path.join(suppressions_root, "memcheck", "suppressions_mac.txt") vg_mac = suppressions.ReadSuppressionsFromFile(supp_filename) supp_filename = os.path.join(suppressions_root, "tsan", "suppressions_mac.txt") tsan_mac = suppressions.ReadSuppressionsFromFile(supp_filename) mac_suppressions = vg_mac + tsan_mac supp_filename = os.path.join(suppressions_root, "..", "heapcheck", "suppressions.txt") heapcheck_suppressions = suppressions.ReadSuppressionsFromFile(supp_filename)
def main(argv): suppressions_root = path_utils.ScriptDir() JOIN = os.path.join supp_filename = JOIN(suppressions_root, "memcheck", "suppressions.txt") vg_common = suppressions.ReadSuppressionsFromFile(supp_filename) supp_filename = JOIN(suppressions_root, "tsan", "suppressions.txt") tsan_common = suppressions.ReadSuppressionsFromFile(supp_filename) common_suppressions = vg_common + tsan_common supp_filename = JOIN(suppressions_root, "memcheck", "suppressions_mac.txt") vg_mac = suppressions.ReadSuppressionsFromFile(supp_filename) supp_filename = JOIN(suppressions_root, "tsan", "suppressions_mac.txt") tsan_mac = suppressions.ReadSuppressionsFromFile(supp_filename) mac_suppressions = vg_mac + tsan_mac supp_filename = JOIN(suppressions_root, "tsan", "suppressions_win32.txt") tsan_win = suppressions.ReadSuppressionsFromFile(supp_filename) win_suppressions = tsan_win supp_filename = JOIN(suppressions_root, "..", "heapcheck", "suppressions.txt") heapcheck_suppressions = suppressions.ReadSuppressionsFromFile( supp_filename) supp_filename = JOIN(suppressions_root, "drmemory", "suppressions.txt") drmem_suppressions = suppressions.ReadSuppressionsFromFile(supp_filename) supp_filename = JOIN(suppressions_root, "drmemory", "suppressions_full.txt") drmem_full_suppressions = suppressions.ReadSuppressionsFromFile( supp_filename) # all_reports is a map {report: list of urls containing this report} all_reports = defaultdict(list) report_hashes = {} for f in argv: f_reports, url = ReadReportsFromFile(f) for (hash, report) in f_reports: all_reports[report] += [url] report_hashes[report] = hash reports_count = 0 for r in all_reports: cur_supp = common_suppressions if all([ re.search("%20Mac%20|mac_valgrind", url) for url in all_reports[r] ]): # Include mac suppressions if the report is only present on Mac cur_supp += mac_suppressions elif all([re.search("Windows%20", url) for url in all_reports[r]]): # Include win32 suppressions if the report is only present on Windows cur_supp += win_suppressions elif all([re.search("%20Heapcheck", url) for url in all_reports[r]]): cur_supp += heapcheck_suppressions elif all(["DrMemory%20full" in url for url in all_reports[r]]): cur_supp += drmem_suppressions + drmem_full_suppressions elif all(["DrMemory" in url for url in all_reports[r]]): cur_supp += drmem_suppressions match = False for s in cur_supp: if s.Match(r.split("\n")): match = True break if not match: reports_count += 1 print "===================================" print "This report observed at" for url in all_reports[r]: print " %s" % url print "didn't match any suppressions:" print "Suppression (error hash=#%s#):" % (report_hashes[r]) print r print "===================================" if reports_count > 0: print("%d unique reports don't match any of the suppressions" % reports_count) else: print "Congratulations! All reports are suppressed!"