def _HashEntry(stat, flow, max_size=None): hasher = client_utils_common.MultiHasher(progress=flow.Progress) try: hasher.HashFilePath(stat.GetPath(), max_size or stat.GetSize()) return hasher.GetHashObject() except IOError: return None
def testHashBufferProgress(self): progress = mock.Mock() hasher = client_utils_common.MultiHasher(progress=progress) hasher.HashBuffer(os.urandom(108)) self.assertTrue(progress.called) self.assertEqual(hasher.GetHashObject().num_bytes, 108)
def testHashBufferSingleInput(self): hasher = client_utils_common.MultiHasher() hasher.HashBuffer("foo") hash_object = hasher.GetHashObject() self.assertEqual(hash_object.num_bytes, len("foo")) self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, "foo")) self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, "foo")) self.assertEqual(hash_object.sha256, self._GetHash(hashlib.sha256, "foo"))
def testHashBufferMultiInput(self): hasher = client_utils_common.MultiHasher(["md5", "sha1"]) hasher.HashBuffer("foo") hasher.HashBuffer("bar") hash_object = hasher.GetHashObject() self.assertEqual(hash_object.num_bytes, len("foobar")) self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, "foobar")) self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, "foobar")) self.assertFalse(hash_object.sha256)
def testHashFilePart(self): tmp_path = test_lib.TempFilePath() with open(tmp_path, "wb") as tmp_file: tmp_file.write("foobar") hasher = client_utils_common.MultiHasher(["md5", "sha1"]) hasher.HashFilePath(tmp_path, len("foo")) hash_object = hasher.GetHashObject() self.assertEqual(hash_object.num_bytes, len("foo")) self.assertEqual(hash_object.md5, self._GetHash(hashlib.md5, "foo")) self.assertEqual(hash_object.sha1, self._GetHash(hashlib.sha1, "foo")) self.assertFalse(hash_object.sha256) os.remove(tmp_path)
def Hash(self, fname, stat_object, opts): file_size = stat_object.st_size if file_size <= opts.max_size: max_hash_size = file_size else: policy = rdf_file_finder.FileFinderHashActionOptions.OversizedFilePolicy if opts.oversized_file_policy == policy.SKIP: return None elif opts.oversized_file_policy == policy.HASH_TRUNCATED: max_hash_size = opts.max_size hasher = client_utils_common.MultiHasher(progress=self.Progress) try: hasher.HashFilePath(fname, max_hash_size) except IOError: return None return hasher.GetHashObject()
def Run(self, args): hash_types = set() for t in args.tuples: for hash_name in t.hashers: hash_types.add(str(hash_name).lower()) hasher = client_utils_common.MultiHasher(hash_types, progress=self.Progress) with vfs.VFSOpen(args.pathspec, progress_callback=self.Progress) as fd: hasher.HashFile(fd, args.max_filesize) hash_object = hasher.GetHashObject() response = rdf_client.FingerprintResponse( pathspec=fd.pathspec, bytes_read=hash_object.num_bytes, hash=hash_object) self.SendReply(response)