def test_file_reading_single(self): temp = NamedTemporaryFile(mode='wt') temp.write(CONFIG1) temp.flush() s = ConfigSettings() s.register_provider(Provider1) s.load_file(temp.name) self.assertEqual(s.foo.bar, 'bar_value')
def test_hash_file_known_hash(self): """Ensure a known hash value is recreated.""" data = b'The quick brown fox jumps over the lazy cog' expected = 'de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3' temp = NamedTemporaryFile() temp.write(data) temp.flush() actual = hash_file(temp.name) self.assertEqual(actual, expected)
def test_hash_file_known_hash(self): """Ensure a known hash value is recreated.""" data = b"The quick brown fox jumps over the lazy cog" expected = "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3" temp = NamedTemporaryFile() temp.write(data) temp.flush() actual = hash_file(temp.name) self.assertEqual(actual, expected)
def test_pruning(self): """Ensure old warnings are removed from database appropriately.""" db = WarningsDatabase() source_files = [] for i in range(1, 21): temp = NamedTemporaryFile(mode='wt') temp.write('x' * (100 * i)) temp.flush() # Keep reference so it doesn't get GC'd and deleted. source_files.append(temp) w = CompilerWarning() w['filename'] = temp.name w['line'] = 1 w['column'] = i * 10 w['message'] = 'irrelevant' db.insert(w) self.assertEqual(len(db), 20) # If we change a source file, inserting a new warning should nuke the # old one. source_files[0].write('extra') source_files[0].flush() w = CompilerWarning() w['filename'] = source_files[0].name w['line'] = 1 w['column'] = 50 w['message'] = 'replaced' db.insert(w) self.assertEqual(len(db), 20) warnings = list(db.warnings_for_file(source_files[0].name)) self.assertEqual(len(warnings), 1) self.assertEqual(warnings[0]['column'], w['column']) # If we delete the source file, calling prune should cause the warnings # to go away. old_filename = source_files[0].name del source_files[0] self.assertFalse(os.path.exists(old_filename)) db.prune() self.assertEqual(len(db), 19)
def test_hash_file_large(self): """Ensure that hash_file seems to work with a large file.""" data = b'x' * 1048576 hasher = hashlib.sha1() hasher.update(data) expected = hasher.hexdigest() temp = NamedTemporaryFile() temp.write(data) temp.flush() actual = hash_file(temp.name) self.assertEqual(actual, expected)
def test_hash_file_large(self): """Ensure that hash_file seems to work with a large file.""" data = b"x" * 1048576 hasher = hashlib.sha1() hasher.update(data) expected = hasher.hexdigest() temp = NamedTemporaryFile() temp.write(data) temp.flush() actual = hash_file(temp.name) self.assertEqual(actual, expected)
def test_file_reading_multiple(self): """Loading multiple files has proper overwrite behavior.""" temp1 = NamedTemporaryFile(mode='wt') temp1.write(CONFIG1) temp1.flush() temp2 = NamedTemporaryFile(mode='wt') temp2.write(CONFIG2) temp2.flush() s = ConfigSettings() s.register_provider(Provider1) s.load_files([temp1.name, temp2.name]) self.assertEqual(s.foo.bar, 'value2')
def test_hashing(self): """Ensure that hashing files on insert works.""" db = WarningsDatabase() temp = NamedTemporaryFile(mode='wt') temp.write('x' * 100) temp.flush() w = CompilerWarning() w['filename'] = temp.name w['line'] = 1 w['column'] = 4 w['message'] = 'foo bar' # Should not throw. db.insert(w) w['filename'] = 'DOES_NOT_EXIST' with self.assertRaises(Exception): db.insert(w)
def test_hashing(self): """Ensure that hashing files on insert works.""" db = WarningsDatabase() temp = NamedTemporaryFile(mode="wt") temp.write("x" * 100) temp.flush() w = CompilerWarning() w["filename"] = temp.name w["line"] = 1 w["column"] = 4 w["message"] = "foo bar" # Should not throw. db.insert(w) w["filename"] = "DOES_NOT_EXIST" with self.assertRaises(Exception): db.insert(w)