Esempio n. 1
0
    def identifyURN(urn, resolver=None):
        if resolver == None:
            if data_store.HAS_HDT:
                resolver = data_store.HDTAssistedDataStore(lexicon.standard)
            else:
                resolver = data_store.MemoryDataStore(lexicon.standard)

        with resolver as resolver:
            with zip.ZipFile.NewZipFile(resolver, Version(0,1,"pyaff4"), urn) as zip_file:
                if len(list(zip_file.members.keys())) == 0:
                    # it's a new zipfile
                    raise IOError("Not an AFF4 Volume")

                try:
                    with zip_file.OpenZipSegment("version.txt") as version_segment:
                        # AFF4 Std v1.0 introduced the version file
                        versionTxt = version_segment.ReadAll()
                        #resolver.Close(version)
                        version = parseProperties(versionTxt.decode("utf-8"))
                        version = Version.create(version)
                        if version.is11():
                            return (version, lexicon.standard11)
                        else:
                            return (version, lexicon.standard)
                except:
                    if str(resolver.aff4NS) == lexicon.AFF4_NAMESPACE:
                        # Rekall defined the new AFF4 namespace post the Wirespeed paper
                        return (Version(1,0,"pyaff4"), lexicon.scudette)
                    else:
                        # Wirespeed (Evimetry) 1.x and Evimetry 2.x stayed with the original namespace
                        return (Version(0,1,"pyaff4"), lexicon.legacy)
Esempio n. 2
0
    def testEditInplaceZip(self):
        try:
            os.unlink(self.filename)
        except (IOError, OSError):
            pass

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_container:
                self.volume_urn = zip_container.urn

                with zip_container.CreateZipSegment("foo") as segment:
                    segment.compression_method = zip.ZIP_STORED
                    segment.Write(b'abcdefghijk')
                    segment.Flush()

                with zip_container.CreateZipSegment("bar") as segment:
                    segment.compression_method = zip.ZIP_STORED
                    segment.Write(b'alkjflajdflaksjdadflkjd')
                    segment.Flush()

                backing_store_urn = resolver.GetUnique(lexicon.transient_graph,
                                                       self.volume_urn,
                                                       lexicon.AFF4_STORED)
                with resolver.AFF4FactoryOpen(
                        backing_store_urn) as backing_store:
                    print()

        self.assertEquals(716, os.stat(self.filename).st_size)

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("random"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn

                with zip_file.OpenZipSegment("foo") as segment:
                    segment.SeekWrite(0, 0)
                    segment.Write(b'0000')

        self.assertEquals(716, os.stat(self.filename).st_size)
Esempio n. 3
0
    def testOpenSegmentByURN(self):
        try:
            resolver = data_store.MemoryDataStore()

            # This is required in order to load and parse metadata from this volume
            # into a fresh empty resolver.
            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_file:
                segment_urn = zip_file.urn.Append(
                    escaping.arnPathFragment_from_path(self.segment_name),
                    quote=False)
                unc_segment_urn = zip_file.urn.Append(
                    escaping.arnPathFragment_from_path(self.unc_segment_name),
                    quote=False)
                period_start_segment_urn = self.volume_urn.Append(
                    escaping.arnPathFragment_from_path(
                        self.period_start_segment_name),
                    quote=False)

            with resolver.AFF4FactoryOpen(segment_urn) as segment:
                self.assertEquals(segment.Read(1000), self.data1)
            with resolver.AFF4FactoryOpen(unc_segment_urn) as segment:
                self.assertEquals(segment.Read(1000), self.data1)
            with resolver.AFF4FactoryOpen(unc_segment_urn) as segment:
                self.assertEquals(segment.Read(1000), self.data1)

        except Exception:
            traceback.print_exc()
            self.fail()
Esempio n. 4
0
    def setUp(self):
        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                segment_urn = self.volume_urn.Append(
                    escaping.arnPathFragment_from_path(self.segment_name),
                    quote=False)

                with zip_file.CreateMember(segment_urn) as segment:
                    segment.Write(self.data1)

                unc_segment_urn = self.volume_urn.Append(
                    escaping.arnPathFragment_from_path(self.unc_segment_name),
                    quote=False)

                with zip_file.CreateMember(unc_segment_urn) as segment:
                    segment.Write(self.data1)

                period_start_segment_urn = self.volume_urn.Append(
                    self.period_start_segment_name, quote=False)

                with zip_file.CreateMember(
                        period_start_segment_urn) as segment:
                    segment.Write(self.data1)
Esempio n. 5
0
    def createURN(resolver, container_urn, encryption=False):
        """Public method to create a new writable locical AFF4 container."""

        resolver.Set(lexicon.transient_graph, container_urn, lexicon.AFF4_STREAM_WRITE_MODE, rdfvalue.XSDString("truncate"))

        if encryption == False:
            version = Version(1, 1, "pyaff4")
            with zip.ZipFile.NewZipFile(resolver, version, container_urn) as zip_file:
                volume_urn = zip_file.urn
                with resolver.AFF4FactoryOpen(zip_file.backing_store_urn) as backing_store:
                    return WritableHashBasedImageContainer(backing_store, zip_file, version, volume_urn, resolver, lexicon.standard)
        else:
            version = Version(1, 2, "pyaff4")
            with zip.ZipFile.NewZipFile(resolver, version, container_urn) as zip_file:
                volume_urn = zip_file.urn
                with resolver.AFF4FactoryOpen(zip_file.backing_store_urn) as backing_store:
                    return EncryptedImageContainer(backing_store, zip_file, version, volume_urn, resolver, lexicon.standard)
Esempio n. 6
0
    def createURN(resolver, container_urn):
        """Public method to create a new writable locical AFF4 container."""

        resolver.Set(lexicon.transient_graph, container_urn, lexicon.AFF4_STREAM_WRITE_MODE, rdfvalue.XSDString("truncate"))

        version = Version(1, 1, "pyaff4")
        with zip.ZipFile.NewZipFile(resolver, version, container_urn) as zip_file:
            volume_urn = zip_file.urn
            return WritableHashBasedImageContainer(version, volume_urn, resolver, lexicon.standard)
Esempio n. 7
0
File: zip.py Progetto: yazici/pyaff4
 def __init__(self, *args, **kwargs):
     super(BasicZipFile, self).__init__(*args, **kwargs)
     self.children = set()
     # The members of this zip file. Keys is member URN, value is zip info.
     self.members = {}
     self.global_offset = 0
     try:
         self.version = kwargs["version"]
     except:
         self.version = Version(0, 0, "pyaff4")
Esempio n. 8
0
    def testRemoveThenReAdd(self):
        try:
            os.unlink(self.filename)
        except (IOError, OSError):
            pass
        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                segment_urn = self.volume_urn.Append(
                    escaping.arnPathFragment_from_path(self.segment_name),
                    quote=False)

                with zip_file.CreateMember(segment_urn) as segment:
                    segment.Write(self.data1)
                    segment.Flush()

                zip_file.RemoveMember(segment_urn)

                with zip_file.CreateMember(segment_urn) as segment:
                    segment.Write(self.data2)

        with data_store.MemoryDataStore() as resolver:
            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                segment_urn = self.volume_urn.Append(
                    escaping.arnPathFragment_from_path(self.segment_name),
                    quote=False)
                self.assertTrue(zip_file.ContainsMember(segment_urn))

                with zip_file.OpenMember(segment_urn) as segment:
                    self.assertEquals(self.data2,
                                      segment.Read(len(self.data2)))

        self.assertEquals(629, os.stat(self.filename).st_size)
Esempio n. 9
0
File: zip.py Progetto: yazici/pyaff4
    def NewZipFile(resolver, vers, backing_store_urn):
        rdfvalue.AssertURN(backing_store_urn)
        if vers == None:
            vers = Version(0, 1, "pyaff4")
        result = ZipFile(resolver, urn=None, version=vers)

        resolver.Set(lexicon.transient_graph, result.urn, lexicon.AFF4_TYPE,
                     rdfvalue.URN(lexicon.AFF4_ZIP_TYPE))

        resolver.Set(lexicon.transient_graph, result.urn, lexicon.AFF4_STORED,
                     rdfvalue.URN(backing_store_urn))

        return resolver.AFF4FactoryOpen(result.urn, version=vers)
Esempio n. 10
0
    def NewZipFile(resolver, vers, backing_store_urn, appendmode=None):
        rdfvalue.AssertURN(backing_store_urn)
        if vers == None:
            vers = Version(0,1,"pyaff4")
        result = ZipFile(resolver, urn=None, version=vers)

        resolver.Set(lexicon.transient_graph, result.urn, lexicon.AFF4_TYPE,
                     rdfvalue.URN(lexicon.AFF4_ZIP_TYPE))

        resolver.Set(lexicon.transient_graph, result.urn, lexicon.AFF4_STORED,
                     rdfvalue.URN(backing_store_urn))

        if appendmode != None and appendmode != "w":
            resolver.Set(lexicon.transient_graph, backing_store_urn, lexicon.AFF4_STREAM_WRITE_MODE, rdfvalue.XSDString("append"))

        return resolver.AFF4FactoryOpen(result.urn,  version=vers)
Esempio n. 11
0
    def testRemoveDoesntRewindForNonLastSegment(self):
        try:
            os.unlink(self.filename)
        except (IOError, OSError):
            pass

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_container:
                self.volume_urn = zip_container.urn

                with zip_container.CreateZipSegment("foo") as segment:
                    segment.Write(self.data1)
                    segment.Flush()

                with zip_container.CreateZipSegment("bar") as segment:
                    segment.Write(self.data1)
                    segment.Flush()

                backing_store_urn = resolver.GetUnique(lexicon.transient_graph,
                                                       self.volume_urn,
                                                       lexicon.AFF4_STORED)
                with resolver.AFF4FactoryOpen(
                        backing_store_urn) as backing_store:
                    print()
                    self.assertEquals(93, backing_store.writeptr)

                try:
                    zip_container.RemoveSegment("foo")
                    self.fail()
                except:
                    pass

        self.assertEquals(687, os.stat(self.filename).st_size)
Esempio n. 12
0
    def testRemoveDoesRewind(self):
        try:
            os.unlink(self.filename)
        except (IOError, OSError):
            pass

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("truncate"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_container:
                self.volume_urn = zip_container.urn

                with zip_container.CreateZipSegment("foo") as segment:
                    segment.Write(self.data1)
                    segment.Flush()

                with zip_container.CreateZipSegment("bar") as segment:
                    segment.Write(self.data1)
                    segment.Flush()

                backing_store_urn = resolver.GetUnique(lexicon.transient_graph,
                                                       self.volume_urn,
                                                       lexicon.AFF4_STORED)
                with resolver.AFF4FactoryOpen(
                        backing_store_urn) as backing_store:
                    print()
                    self.assertEquals(93, backing_store.writeptr)

                zip_container.RemoveSegment("bar")

                with zip_container.CreateZipSegment("nar") as segment:
                    segment.Write(self.data2)
                    segment.Flush()

        with data_store.MemoryDataStore() as resolver:
            resolver.Set(lexicon.transient_graph, self.filename_urn,
                         lexicon.AFF4_STREAM_WRITE_MODE,
                         rdfvalue.XSDString("append"))

            with zip.ZipFile.NewZipFile(resolver, Version(1, 1, "pyaff4"),
                                        self.filename_urn) as zip_file:
                self.volume_urn = zip_file.urn
                segment_urn = self.volume_urn.Append(
                    escaping.arnPathFragment_from_path(self.segment_name),
                    quote=False)
                self.assertFalse(zip_file.ContainsSegment("bar"))
                self.assertTrue(zip_file.ContainsSegment("foo"))
                self.assertTrue(zip_file.ContainsSegment("nar"))

                with zip_file.OpenZipSegment("foo") as segment:
                    self.assertEquals(self.data1,
                                      segment.Read(len(self.data1)))

                with zip_file.OpenZipSegment("nar") as segment:
                    self.assertEquals(self.data2,
                                      segment.Read(len(self.data2)))

        self.assertEquals(736, os.stat(self.filename).st_size)