Ejemplo n.º 1
0
    def testCreateAndReadContainer(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11
        try:
            os.unlink(self.filenameB)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                volume.setPassword("password")
                logicalContainer = volume.getChildContainer()
                with logicalContainer.newLogicalStream("hello", 137) as w:
                    w.Write(src)

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with container.Container.openURNtoContainer(container_urn) as volume:
            volume.setPassword("password")
            childVolume = volume.getChildContainer()
            images = list(childVolume.images())
            with childVolume.resolver.AFF4FactoryOpen(images[0].urn) as fd:
                txt = fd.ReadAll()
                self.assertEqual(src, txt)
Ejemplo n.º 2
0
    def testCreateAndReadContainerBadPassword(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11
        try:
            os.unlink(self.filenameB)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                volume.setPassword("password")
                logicalContainer = volume.getChildContainer()
                with logicalContainer.newLogicalStream("hello", 137) as w:
                    w.Write(src)

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with container.Container.openURNtoContainer(container_urn) as volume:
            try:
                volume.setPassword("passwor")
                self.fail("Bad password should throw")
            except:
                pass
Ejemplo n.º 3
0
    def testAppendOfEncryptedSingleChunkPlusOne(self):
        version = container.Version(0, 1, "pyaff4")
        print(self.filename)
        kb = keybag.PasswordWrappedKeyBag.create("secret")
        txt = b'a' * 512 * 1024 + b'b'
        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                self.image_urn = self.volume_urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver,
                        self.image_urn_2,
                        self.volume_urn,
                        type=lexicon.AFF4_ENCRYPTEDSTREAM_TYPE) as image:
                    image.DEBUG = True
                    image.setKeyBag(kb)
                    image.setKey(kb.unwrap_key("secret"))
                    image.Write(b'a' * 512)

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("random"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                self.image_urn = self.volume_urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver,
                        self.image_urn_2,
                        self.volume_urn,
                        type=lexicon.AFF4_ENCRYPTEDSTREAM_TYPE) as image:
                    image.DEBUG = True
                    image.setKeyBag(kb)
                    image.setKey(kb.unwrap_key("secret"))
                    image.SeekWrite(512, 0)
                    image.Write(b'b')

        with data_store.MemoryDataStore() as resolver:
            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                image_urn = zip_file.urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with resolver.AFF4FactoryOpen(self.image_urn_2) as image:
                    image.setKeyBag(kb)
                    image.DEBUG = True
                    image.setKey(kb.unwrap_key("secret"))
                    self.assertEquals(513, image.Size())
                    self.assertEquals(b'a' * 512 + b'b', image.ReadAll())
Ejemplo n.º 4
0
    def read(self):
        resolver = data_store.MemoryDataStore()
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11

        # This is required in order to load and parse metadata from this volume
        # into a fresh empty resolver.
        with zip.ZipFile.NewZipFile(resolver, version,
                                    self.filenameA_urn) as zip_file:
            image_urn = zip_file.urn.Append(self.image_name)

            volume_urn = zip_file.urn

            with resolver.AFF4FactoryOpen(image_urn) as image:
                self.assertEquals(image.chunk_size, 512)
                self.assertEquals(image.chunks_per_segment, 1024)

                kbARN = resolver.GetUnique(volume_urn, image.urn, lex.keyBag)
                kb = keybag.KeyBag.loadFromResolver(resolver, zip_file.urn,
                                                    kbARN)
                image.setKeyBag(kb)
                image.setKey(kb.unwrap_key("password"))

                self.assertEquals(src, image.Read(len(src)))

                image.SeekRead(137)
                self.assertEquals(src, image.Read(len(src)))

                # read from chunk 2
                image.SeekRead(548)
                self.assertEquals(src, image.Read(len(src)))

                self.assertEquals(len(src) * 100, image.Size())
Ejemplo n.º 5
0
    def create(self):
        version = container.Version(1, 1, "pyaff4")
        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filenameA_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filenameA_urn) as zip_file:
                self.volume_urn = zip_file.urn
                image_urn = self.volume_urn.Append(self.image_name)

                self.crypto_stream_arn = image_urn

                # Use default compression.
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver,
                        image_urn,
                        self.volume_urn,
                        type=lexicon.AFF4_ENCRYPTEDSTREAM_TYPE) as image:
                    image.chunk_size = 512
                    image.chunks_per_segment = 1024

                    kb = keybag.KeyBag.create("password")
                    image.setKeyBag(kb)
                    image.setKey(kb.unwrap_key("password"))

                    for i in range(100):
                        image.Write(src)

                    self.image_urn = image.urn
Ejemplo n.º 6
0
    def testLargerThanBevyWrite(self):
        version = container.Version(0, 1, "pyaff4")

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                self.image_urn = self.volume_urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver, self.image_urn_2, self.volume_urn) as image:
                    image.chunk_size = 5
                    image.chunks_per_segment = 2
                    image.Write(b"abcdeabcdea")
                    self.assertEquals(b"abcde", image.Read(5))

        with data_store.MemoryDataStore() as resolver:
            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                image_urn = zip_file.urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with resolver.AFF4FactoryOpen(self.image_urn_2) as image:
                    self.assertEquals(11, image.Size())
                    self.assertEqual(b"abcdeabcdea", image.ReadAll())
Ejemplo n.º 7
0
    def testCreateAndReadContainerWithCertEncrypted(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11

        try:
            os.unlink(self.filenameB)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                volume.setPassword("password")
                volume.setPublicKeyCert(self.cert)
                logicalContainer = volume.getChildContainer()
                with logicalContainer.newLogicalStream("hello", 137) as w:
                    w.Write(b'a' * 512)
                    w.Write(b'b' * 512)
                    w.SeekWrite(0, 0)
                    w.Write(b'c' * 512)

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with container.Container.openURNtoContainer(container_urn) as volume:
            volume.setPrivateKey(self.privateKey)
            childVolume = volume.getChildContainer()
            images = list(childVolume.images())
            with childVolume.resolver.AFF4FactoryOpen(images[0].urn) as fd:
                self.assertEqual(b'c' * 512, fd.Read(512))
                self.assertEqual(b'b' * 512, fd.Read(512))
Ejemplo n.º 8
0
    def testAbortEncrypted(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11

        try:
            os.unlink(self.filename)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filename)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                #volume.block_store_stream.chunks_per_segment = 1
                volume.setPassword("password")
                logicalContainer = volume.getChildContainer()
                logicalContainer.maxSegmentResidentSize = 512
                with logicalContainer.newLogicalStream("hello", 1024) as w:
                    w.chunks_per_segment = 1
                    w.chunk_size = 512
                    w.Write(b'a' * 512)
                    w.Write(b'b' * 512)
                    w.SeekWrite(0, 0)
                    w.Write(b'c' * 512)
                    w.Abort()

        container_urn = rdfvalue.URN.FromFileName(self.filename)
        with container.Container.openURNtoContainer(container_urn) as volume:
            volume.setPassword("password")
            childVolume = volume.getChildContainer()
            images = list(childVolume.images())
            self.assertEquals(0, len(images))
Ejemplo n.º 9
0
    def testAbortImageStreamWithSingleBevyThenSecondStream(self):
        version = container.Version(0, 1, "pyaff4")

        image_urn_3 = None

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                image_urn = self.volume_urn.Append(self.image_name)

                image_urn_2 = image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver, image_urn_2, self.volume_urn) as image:
                    image.chunk_size = 3
                    image.chunks_per_segment = 2
                    image.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_STORED)
                    image.Write(b"abcdefg")
                    image.Abort()

                self.image_urn_3 = image_urn.Append("3")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver, self.image_urn_3, self.volume_urn) as image:
                    image.chunk_size = 3
                    image.chunks_per_segment = 2
                    image.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_STORED)
                    image.Write(b"abcdefg")

        with data_store.MemoryDataStore() as resolver:
            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                for i in range(0, 2):
                    seg_arn = image_urn_2.Append("%08d" % i)
                    idx_arn = image_urn_2.Append("%08d.index" % i)
                    self.assertFalse(zip_file.ContainsMember(seg_arn))
                    self.assertFalse(zip_file.ContainsMember(idx_arn))

                for i in range(0, 2):
                    seg_arn = self.image_urn_3.Append("%08d" % i)
                    idx_arn = self.image_urn_3.Append("%08d.index" % i)
                    self.assertTrue(zip_file.ContainsMember(seg_arn))
                    self.assertTrue(zip_file.ContainsMember(idx_arn))

            with resolver.AFF4FactoryOpen(self.image_urn_3) as image:
                image.SeekRead(0, 0)
                res = image.Read(7)
                self.assertEqual(b"abcdefg", res)
        self.assertEquals(1265, os.stat(self.filename).st_size)
Ejemplo n.º 10
0
 def testZipSegmenttoARN(self):
     version = container.Version(1, 1, "pyaff4")
     base_urn = rdfvalue.URN(u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18")
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//c:/foo",
         urn_from_member_name(u"/c:/foo", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//c:/foo",
         urn_from_member_name(u"/c:/foo", base_urn, version))
     self.assertEqual(u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$",
                      urn_from_member_name(u"bar/c$", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$/foo.txt",
         urn_from_member_name(u"bar/c$/foo.txt", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/bar",
         urn_from_member_name(u"/foo/bar", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/some%20file",
         urn_from_member_name(u"/foo/some file", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/some%20%20file",
         urn_from_member_name(u"/foo/some  file", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$/foo/ネコ.txt",
         urn_from_member_name(u"bar/c$/foo/ネコ.txt", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/laptop/My%20Documents/FileSchemeURIs.doc",
         urn_from_member_name(u"laptop/My Documents/FileSchemeURIs.doc",
                              base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/Documents%20and%20Settings/davris/FileSchemeURIs.doc",
         urn_from_member_name(
             u"/C:/Documents and Settings/davris/FileSchemeURIs.doc",
             base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//D:/Program%20Files/Viewer/startup.htm",
         urn_from_member_name(u"/D:/Program Files/Viewer/startup.htm",
                              base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/Program%20Files/Music/Web%20Sys/main.html?REQUEST=RADIO",
         urn_from_member_name(
             u"/C:/Program Files/Music/Web Sys/main.html?REQUEST=RADIO",
             base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/applib/products/a-b/abc_9/4148.920a/media/start.swf",
         urn_from_member_name(
             u"applib/products/a-b/abc_9/4148.920a/media/start.swf",
             base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/exampleㄓ.txt",
         urn_from_member_name(u"/C:/exampleㄓ.txt", base_urn, version))
Ejemplo n.º 11
0
    def testCreateAndReadContainerAppendTiny(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11

        try:
            os.unlink(self.filenameB)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                volume.block_store_stream.DEBUG = True
                volume.chunk_size = 5
                volume.chunks_per_segment = 2
                volume.setPassword("password")
                logicalContainer = volume.getChildContainer()
                with logicalContainer.newLogicalStream("hello", 137) as w:
                    w.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_STORED)
                    w.SeekWrite(10, 0)
                    #hexdump.hexdump(w.fd.getvalue())
                    w.Write(b'b' * 10)
                    #hexdump.hexdump(w.fd.getvalue())
                    w.Write(b'c' * 5)

        with data_store.MemoryDataStore() as resolver:
            container_urn = rdfvalue.URN.FromFileName(self.filenameB)
            with container.Container.openURNtoContainer(container_urn,
                                                        mode="+") as volume:
                volume.block_store_stream.DEBUG = True
                volume.chunk_size = 5
                volume.chunks_per_segment = 2
                volume.setPassword("password")
                childVolume = volume.getChildContainer()
                images = list(childVolume.images())
                with childVolume.resolver.AFF4FactoryOpen(images[0].urn) as w:
                    w.SeekWrite(0, 0)
                    w.Write(b'c' * 5)

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with container.Container.openURNtoContainer(container_urn) as volume:
            volume.block_store_stream.DEBUG = True
            volume.setPassword("password")
            childVolume = volume.getChildContainer()
            images = list(childVolume.images())
            with childVolume.resolver.AFF4FactoryOpen(images[0].urn) as fd:
                expected = b'c' * 5 + b'\0' * 5 + b'b' * 10 + b'c' * 5
                self.assertEqual(expected, fd.ReadAll())
Ejemplo n.º 12
0
    def setUp(self):
        version = container.Version(1, 1, "pyaff4")
        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                self.image_urn = self.volume_urn.Append(self.image_name)

                # Write Map image sequentially (Seek/Write method).
                with aff4_map.AFF4Map.NewAFF4Map(resolver, self.image_urn,
                                                 self.volume_urn) as image:
                    # Maps are written in random order.
                    image.SeekWrite(50)
                    image.Write(b"XX - This is the position.")

                    image.SeekWrite(0)
                    image.Write(b"00 - This is the position.")

                    # We can "overwrite" data by writing the same range again.
                    image.SeekWrite(50)
                    image.Write(b"50")

                # Test the Stream method.
                with resolver.CachePut(
                        aff4_file.AFF4MemoryStream(resolver)) as source:
                    # Fill it with data.
                    source.Write(b"AAAABBBBCCCCDDDDEEEEFFFFGGGGHHHH")

                    # Make a temporary map that defines our plan.
                    helper_map = aff4_map.AFF4Map(resolver)

                    helper_map.AddRange(4, 0, 4, source.urn)  # 0000AAAA
                    helper_map.AddRange(0, 12, 4, source.urn)  # DDDDAAAA
                    helper_map.AddRange(12, 16, 4,
                                        source.urn)  # DDDDAAAA0000EEEE

                    image_urn_2 = self.volume_urn.Append(
                        self.image_name).Append("streamed")

                    with aff4_map.AFF4Map.NewAFF4Map(resolver, image_urn_2,
                                                     self.volume_urn) as image:

                        # Now we create the real map by copying the temporary
                        # map stream.
                        image.WriteStream(helper_map)
Ejemplo n.º 13
0
    def testCreateMapStream(self):
        resolver = data_store.MemoryDataStore()
        version = container.Version(1, 1, "pyaff4")
        # This is required in order to load and parse metadata from this volume
        # into a fresh empty resolver.
        with zip.ZipFile.NewZipFile(resolver, version,
                                    self.filename_urn) as zip_file:
            image_urn = zip_file.urn.Append(self.image_name)
            image_urn_2 = image_urn.Append("streamed")

        # Check the first stream.
        self.CheckImageURN(resolver, image_urn)

        # The second stream must be the same.
        self.CheckStremImageURN(resolver, image_urn_2)
Ejemplo n.º 14
0
    def testAbortEncryptedImageStreamSingleBevy(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11

        try:
            os.unlink(self.filename)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filename)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                volume.block_store_stream.chunks_per_segment = 1
                volume.block_store_stream.DEBUG = True
                volume.setPassword("password")
                logicalContainer = volume.getChildContainer()
                logicalContainer.maxSegmentResidentSize = 512
                with logicalContainer.newLogicalStream("hello", 1024) as w:
                    w.chunks_per_segment = 2
                    w.chunk_size = 512
                    w.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_STORED)
                    w.Write(b'a' * 512)
                    w.Write(b'b' * 512)
                    w.Abort()

                with logicalContainer.newLogicalStream("foo", 1024) as w:
                    w.chunks_per_segment = 2
                    w.chunk_size = 512
                    w.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_STORED)
                    w.Write(b'd' * 512)
                    w.Write(b'e' * 512)

        container_urn = rdfvalue.URN.FromFileName(self.filename)
        with container.Container.openURNtoContainer(container_urn,
                                                    mode="+") as volume:
            volume.block_store_stream.DEBUG = True
            volume.setPassword("password")

            childVolume = volume.getChildContainer()
            images = list(childVolume.images())
            self.assertEquals(1, len(images))
            with childVolume.resolver.AFF4FactoryOpen(images[0].urn) as fd:
                self.assertEqual(b'd' * 512, fd.Read(512))
                self.assertEqual(b'e' * 512, fd.Read(512))
Ejemplo n.º 15
0
    def testCreateMember(self):
        version = container.Version(1, 1, "pyaff4")
        with data_store.MemoryDataStore() as resolver:
            root_urn = rdfvalue.URN.NewURNFromFilename(self.root_path)
            with aff4_directory.AFF4Directory.NewAFF4Directory(
                    resolver, version, root_urn) as directory:

                # Check for member.
                child_urn = directory.urn.Append(self.segment_name)
                with resolver.AFF4FactoryOpen(child_urn) as child:
                    self.assertEquals(child.Read(10000), b"Hello world")

                # Check that the metadata is carried over.
                filename = resolver.Get(child_urn,
                                        lexicon.AFF4_STREAM_ORIGINAL_FILENAME)

                self.assertEquals(filename, self.root_path + self.segment_name)
Ejemplo n.º 16
0
    def setUp(self):
        version = container.Version(0, 1, "pyaff4")
        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                image_urn = self.volume_urn.Append(self.image_name)

                # Use default compression.
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver, image_urn, self.volume_urn) as image:
                    image.chunk_size = 10
                    image.chunks_per_segment = 3

                    for i in range(100):
                        image.Write(b"Hello world %02d!" % i)

                    self.image_urn = image.urn

                # Write a snappy compressed image.
                self.image_urn_2 = self.image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver, self.image_urn_2,
                        self.volume_urn) as image_2:
                    image_2.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_SNAPPY)
                    image_2.Write(b"This is a test")

                # Use streaming API to write image.
                self.image_urn_3 = self.image_urn.Append("3")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver, self.image_urn_3, self.volume_urn) as image:
                    image.chunk_size = 10
                    image.chunks_per_segment = 3
                    stream = io.BytesIO()
                    for i in range(100):
                        stream.write(b"Hello world %02d!" % i)

                    stream.seek(0)
                    image.WriteStream(stream)
Ejemplo n.º 17
0
    def testBevySizePlusOneWriteNoEncryption(self):
        version = container.Version(0, 1, "pyaff4")

        kb = keybag.PasswordWrappedKeyBag.create("secret")

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                self.image_urn = self.volume_urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver,
                        self.image_urn_2,
                        self.volume_urn,
                        type=lexicon.AFF4_ENCRYPTEDSTREAM_TYPE) as image:
                    image.chunk_size = 5
                    image.chunks_per_segment = 2
                    image.setKeyBag(kb)
                    image.DEBUG = True
                    image.setKey(kb.unwrap_key("secret"))
                    image.Write(b"abcdeabcdea")
                    image.SeekRead(5, 0)
                    self.assertEqual(b"abcdea", image.Read(6))

        with data_store.MemoryDataStore() as resolver:
            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                image_urn = zip_file.urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                resolver.Set(lexicon.transient_graph, image.urn,
                             lexicon.AFF4_STORED, rdfvalue.URN(zip_file.urn))
                with resolver.AFF4FactoryOpen(self.image_urn_2) as image:
                    image.DEBUG = True
                    image.setKeyBag(kb)
                    image.setKey(kb.unwrap_key("secret"))
                    self.assertEquals(11, image.Size())
                    self.assertEqual(b"abcdeabcdea", image.ReadAll())
Ejemplo n.º 18
0
    def setUp(self):
        version = container.Version(1, 1, "pyaff4")
        with data_store.MemoryDataStore() as resolver:
            root_urn = rdfvalue.URN.NewURNFromFilename(self.root_path)

            resolver.Set(lexicon.transient_graph, root_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with aff4_directory.AFF4Directory.NewAFF4Directory(
                    resolver, version, root_urn) as volume:

                segment_urn = volume.urn.Append(self.segment_name)
                with volume.CreateMember(segment_urn) as member:
                    member.Write(b"Hello world")
                    resolver.Set(
                        lexicon.transient_graph, member.urn,
                        lexicon.AFF4_STREAM_ORIGINAL_FILENAME,
                        rdfvalue.XSDString(self.root_path + self.segment_name))
Ejemplo n.º 19
0
    def testBadAPIUsage(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11
        try:
            os.unlink(self.filenameB)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                try:
                    volume.setPassword("password")
                    volume.block_store_stream.DEBUG = True
                    logicalContainer = volume.getChildContainer()
                    self.fail("Set DEBUG after init() should fail")
                except:
                    pass
Ejemplo n.º 20
0
    def testCreateAndReadContainerAppendFails(self):
        version = container.Version(1, 1, "pyaff4")
        lex = lexicon.standard11

        try:
            os.unlink(self.filenameB)
        except (IOError, OSError):
            pass

        container_urn = rdfvalue.URN.FromFileName(self.filenameB)
        with data_store.MemoryDataStore() as resolver:
            with container.Container.createURN(resolver,
                                               container_urn,
                                               encryption=True) as volume:
                volume.setPassword("password")
                logicalContainer = volume.getChildContainer()
                with logicalContainer.newLogicalStream("hello", 137) as w:
                    w.setCompressionMethod(
                        lexicon.AFF4_IMAGE_COMPRESSION_STORED)
                    w.SeekWrite(512 * 1024, 0)
                    w.Write(b'b' * 512 * 1024)
                    w.Write(b'c' * 512)

        with data_store.MemoryDataStore() as resolver:
            container_urn = rdfvalue.URN.FromFileName(self.filenameB)
            with container.Container.openURNtoContainer(
                    container_urn) as volume:
                volume.setPassword("password")
                childVolume = volume.getChildContainer()
                images = list(childVolume.images())
                try:
                    with childVolume.resolver.AFF4FactoryOpen(
                            images[0].urn) as w:
                        w.SeekWrite(0, 0)
                        w.Write(b'c' * 512)
                    self.fail("Wrote to container")
                except:
                    pass
        print()
Ejemplo n.º 21
0
    def testSmallWriteEncryption(self):
        version = container.Version(0, 1, "pyaff4")

        kb = keybag.PasswordWrappedKeyBag.create("secret")

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                self.image_urn = self.volume_urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with aff4_image.AFF4Image.NewAFF4Image(
                        resolver,
                        self.image_urn_2,
                        self.volume_urn,
                        type=lexicon.AFF4_ENCRYPTEDSTREAM_TYPE) as image:
                    image.DEBUG = False
                    image.setKeyBag(kb)
                    image.setKey(kb.unwrap_key("secret"))
                    image.Write(b"abcd")

        with data_store.MemoryDataStore() as resolver:
            with zip.ZipFile.NewZipFile(resolver, version,
                                        self.filename_urn) as zip_file:
                image_urn = zip_file.urn.Append(self.image_name)

                self.image_urn_2 = self.image_urn.Append("2")
                with resolver.AFF4FactoryOpen(self.image_urn_2) as image:
                    image.setKeyBag(kb)
                    image.DEBUG = False
                    image.setKey(kb.unwrap_key("secret"))
                    self.assertEquals(4, image.Size())
                    self.assertEqual(b"abcd", image.ReadAll())
Ejemplo n.º 22
0
    def testOpenImageByURN(self):
        resolver = data_store.MemoryDataStore()
        version = container.Version(1, 1, "pyaff4")
        # This is required in order to load and parse metadata from this volume
        # into a fresh empty resolver.
        with zip.ZipFile.NewZipFile(resolver, version,
                                    self.filename_urn) as zip_file:
            image_urn = zip_file.urn.Append(self.image_name)

        with resolver.AFF4FactoryOpen(image_urn) as image:
            self.assertEquals(image.chunk_size, 10)
            self.assertEquals(image.chunks_per_segment, 3)
            self.assertEquals(
                b"Hello world 00!Hello world 01!Hello world 02!Hello world 03!"
                + b"Hello world 04!Hello world 05!Hello worl", image.Read(100))

            self.assertEquals(1500, image.Size())

        # Now test snappy decompression.
        with resolver.AFF4FactoryOpen(self.image_urn_2) as image_2:
            self.assertEquals(
                resolver.GetUnique(zip_file.urn, image_2.urn,
                                   lexicon.AFF4_IMAGE_COMPRESSION),
                lexicon.AFF4_IMAGE_COMPRESSION_SNAPPY)

            data = image_2.Read(100)
            self.assertEquals(data, b"This is a test")

        # Now test streaming API image.
        with resolver.AFF4FactoryOpen(self.image_urn_3) as image_3:
            self.assertEquals(image_3.chunk_size, 10)
            self.assertEquals(image_3.chunks_per_segment, 3)
            self.assertEquals(
                b"Hello world 00!Hello world 01!Hello world 02!Hello world 03!"
                + b"Hello world 04!Hello world 05!Hello worl",
                image_3.Read(100))
Ejemplo n.º 23
0
    def testAddRange(self):
        resolver = data_store.MemoryDataStore()
        version = container.Version(1, 1, "pyaff4")
        # This is required in order to load and parse metadata from this volume
        # into a fresh empty resolver.
        with zip.ZipFile.NewZipFile(resolver, version,
                                    self.filename_urn) as zip_file:
            image_urn = zip_file.urn.Append(self.image_name)

        with resolver.AFF4FactoryOpen(image_urn) as map:
            a = rdfvalue.URN("aff4://a")
            b = rdfvalue.URN("aff4://b")

            # First test - overlapping regions:
            map.AddRange(0, 0, 100, a)
            map.AddRange(10, 10, 100, a)

            # Should be merged into a single range.
            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 1)
            self.assertEquals(ranges[0].length, 110)

            map.Clear()

            # Repeating regions - should not be merged but first region should
            # be truncated.
            map.AddRange(0, 0, 100, a)
            map.AddRange(50, 0, 100, a)

            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 2)
            self.assertEquals(ranges[0].length, 50)

            # Inserted region. Should split existing region into three.
            map.Clear()

            map.AddRange(0, 0, 100, a)
            map.AddRange(50, 0, 10, b)

            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 3)
            self.assertEquals(ranges[0].length, 50)
            self.assertEquals(ranges[0].target_id, 0)

            self.assertEquals(ranges[1].length, 10)
            self.assertEquals(ranges[1].target_id, 1)

            self.assertEquals(ranges[2].length, 40)
            self.assertEquals(ranges[2].target_id, 0)

            # New range overwrites all the old ranges.
            map.AddRange(0, 0, 100, b)

            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 1)
            self.assertEquals(ranges[0].length, 100)
            self.assertEquals(ranges[0].target_id, 1)

            # Simulate writing contiguous regions. These should be merged into a
            # single region automatically.
            map.Clear()

            map.AddRange(0, 100, 10, a)
            map.AddRange(10, 110, 10, a)
            map.AddRange(20, 120, 10, a)
            map.AddRange(30, 130, 10, a)

            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 1)
            self.assertEquals(ranges[0].length, 40)
            self.assertEquals(ranges[0].target_id, 0)

            # Writing sparse image.
            map.Clear()

            map.AddRange(0, 100, 10, a)
            map.AddRange(30, 130, 10, a)

            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 2)
            self.assertEquals(ranges[0].length, 10)
            self.assertEquals(ranges[0].target_id, 0)
            self.assertEquals(ranges[1].length, 10)
            self.assertEquals(ranges[1].map_offset, 30)
            self.assertEquals(ranges[1].target_id, 0)

            # Now merge. Adding the missing region makes the image not sparse.
            map.AddRange(10, 110, 20, a)
            ranges = map.GetRanges()
            self.assertEquals(len(ranges), 1)
            self.assertEquals(ranges[0].length, 40)
Ejemplo n.º 24
0
    def testARNtoZipSegment(self):
        version = container.Version(1, 1, "pyaff4")
        self.assertEqual(
            u"/c:/foo",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//c:/foo",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"bar/c$",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"bar/c$/foo.txt",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$/foo.txt",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/foo/bar",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/bar",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/foo/some file",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/some%20file",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/foo/some  file",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/some%20%20file",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"bar/c$/foo/ネコ.txt",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$/foo/ネコ.txt",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))

        # examples from https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/
        self.assertEqual(
            u"laptop/My Documents/FileSchemeURIs.doc",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/laptop/My%20Documents/FileSchemeURIs.doc",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/C:/Documents and Settings/davris/FileSchemeURIs.doc",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/Documents and Settings/davris/FileSchemeURIs.doc",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/D:/Program Files/Viewer/startup.htm",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//D:/Program Files/Viewer/startup.htm",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/C:/Program Files/Music/Web Sys/main.html?REQUEST=RADIO",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/Program Files/Music/Web%20Sys/main.html?REQUEST=RADIO",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"applib/products/a-b/abc_9/4148.920a/media/start.swf",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/applib/products/a-b/abc_9/4148.920a/media/start.swf",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))
        self.assertEqual(
            u"/C:/exampleㄓ.txt",
            member_name_for_urn(
                u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/exampleㄓ.txt",
                version,
                base_urn=rdfvalue.URN(
                    u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18"),
                use_unicode=True))