def testCreateAndAppendSinglePathImage(self): try: try: os.unlink(self.containerName) except: pass container_urn = rdfvalue.URN.FromFileName(self.containerName) resolver = data_store.MemoryDataStore() urn = None frag1path = os.path.join(self.testImagesPath, "paper-hash_based_disk_imaging_using_aff4.pdf.frag.1") with container.Container.createURN(resolver, container_urn) as volume: with open(frag1path, "rb") as src: stream = linear_hasher.StreamHasher(src, [lexicon.HASH_SHA1]) urn = volume.writeLogicalStreamHashBased(frag1path, stream, 32768, False) for h in stream.hashes: hh = hashes.newImmutableHash(h.hexdigest(), stream.hashToType[h]) self.assertEqual("deb3fa3b60c6107aceb97f684899387c78587eae", hh.value) resolver.Add(volume.urn, urn, rdfvalue.URN(lexicon.standard.hash), hh) frag2path = os.path.join(self.testImagesPath, "paper-hash_based_disk_imaging_using_aff4.pdf.frag.2") with container.Container.openURNtoContainer(container_urn, mode="+") as volume: with open(frag2path, "rb") as src: stream = linear_hasher.StreamHasher(src, [lexicon.HASH_SHA1, lexicon.HASH_MD5 ]) urn = volume.writeLogicalStreamHashBased(frag2path, stream, 2*32768, False) for h in stream.hashes: hh = hashes.newImmutableHash(h.hexdigest(), stream.hashToType[h]) resolver.Add(volume.urn, urn, rdfvalue.URN(lexicon.standard.hash), hh) with container.Container.openURNtoContainer(container_urn) as volume: images = list(volume.images()) images = sorted(images, key=lambda x: utils.SmartUnicode(x.pathName), reverse=False) self.assertEqual(2, len(images), "Only two logical images") fragmentA = escaping.member_name_for_urn(images[0].urn.value, volume.version, base_urn=volume.urn, use_unicode=True) fragmentB = escaping.member_name_for_urn(images[1].urn.value, volume.version, base_urn=volume.urn, use_unicode=True) self.assertTrue(fragmentA.endswith("paper-hash_based_disk_imaging_using_aff4.pdf.frag.1")) self.assertTrue(fragmentB.endswith("paper-hash_based_disk_imaging_using_aff4.pdf.frag.2")) hasher = linear_hasher.LinearHasher2(volume.resolver, self) for image in volume.images(): print("\t%s <%s>" % (image.name(), image.urn)) hasher.hash(image) except: traceback.print_exc() self.fail() finally: #os.unlink(containerName) pass
def verify(file, password): with container.Container.openURNtoContainer( rdfvalue.URN.FromFileName(file)) as volume: if password != None: assert not issubclass(volume.__class__, container.PhysicalImageContainer) volume.setPassword(password[0]) childVolume = volume.getChildContainer() printVolumeInfo(file, childVolume) printCaseInfo(childVolume) resolver = childVolume.resolver hasher = linear_hasher.LinearHasher2(resolver, LinearVerificationListener()) for image in childVolume.images(): print("\t%s <%s>" % (image.name(), trimVolume(childVolume.urn, image.urn))) hasher.hash(image) else: printVolumeInfo(file, volume) printCaseInfo(volume) resolver = volume.resolver if type(volume) == container.PhysicalImageContainer: image = volume.image listener = VerificationListener() validator = block_hasher.Validator(listener) print("Verifying AFF4 File: %s" % file) validator.validateContainer(rdfvalue.URN.FromFileName(file)) for result in listener.results: print("\t%s" % result) elif type(volume) == container.LogicalImageContainer: #print ("\tLogical Images:") hasher = linear_hasher.LinearHasher2( resolver, LinearVerificationListener()) for image in volume.images(): print("\t%s <%s>" % (image.name(), trimVolume(volume.urn, image.urn))) hasher.hash(image)
def verify(file): volume = container.Container.openURNtoContainer(rdfvalue.URN.FromFileName(file)) printVolumeInfo(file, volume) printCaseInfo(volume) resolver = volume.resolver if type(volume) == container.PhysicalImageContainer: image = volume.image listener = VerificationListener() validator = block_hasher.Validator(listener) print("Verifying AFF4 File: %s" % file) validator.validateContainer(rdfvalue.URN.FromFileName(file)) for result in listener.results: print("\t%s" % result) elif type(volume) == container.LogicalImageContainer: #print ("\tLogical Images:") hasher = linear_hasher.LinearHasher2(resolver, LinearVerificationListener()) for image in volume.images(): print ("\t%s <%s>" % (image.name(), trimVolume(volume.urn, image.urn))) hasher.hash(image)
def testFuzz(self): chunksize = 512 for length in [ chunksize - 1, chunksize, chunksize + 1, chunksize * 2 - 1, chunksize * 2, chunksize * 2 + 1, chunksize * 1000, 0 ]: for maxSegmentResidentSize in [ 0, 1, chunksize - 1, chunksize, chunksize + 1 ]: try: containerName = tempfile.gettempdir( ) + "/testfuzz-length-%d-maxresident%d.aff4" % ( length, maxSegmentResidentSize) print(containerName) hasher = linear_hasher.PushHasher( [lexicon.HASH_SHA1, lexicon.HASH_MD5]) container_urn = rdfvalue.URN.FromFileName(containerName) with data_store.MemoryDataStore() as resolver: with container.Container.createURN( resolver, container_urn) as volume: volume.maxSegmentResidentSize = maxSegmentResidentSize with volume.newLogicalStream("/foo", length) as writer: with open("/dev/random", "rb") as randomStream: writer.chunk_size = chunksize writer_arn = writer.urn pos = 0 while pos < length: toread = int( min( math.ceil(1024 * random.random()), length - pos)) data = randomStream.read(toread) writer.Write(data) hasher.update(data) pos += toread # write in the hashes before auto-close for h in hasher.hashes: hh = hashes.newImmutableHash( h.hexdigest(), hasher.hashToType[h]) volume.resolver.Add( volume.urn, writer_arn, rdfvalue.URN(lexicon.standard.hash), hh) print() with container.Container.openURNtoContainer( container_urn) as volume: images = list(volume.images()) self.assertEqual(1, len(images), "Only one logical image") self.assertEqual( "/foo", images[0].name(), "unicode filename should be preserved") fragment = escaping.member_name_for_urn( images[0].urn.value, volume.version, base_urn=volume.urn, use_unicode=True) hasher = linear_hasher.LinearHasher2( volume.resolver, self) for image in volume.images(): hasher.hash(image) os.unlink(containerName) except Exception: traceback.print_exc() self.fail() continue