Ejemplo n.º 1
0
Archivo: zip.py Proyecto: yazici/pyaff4
    def CreateZipSegment(self, filename, arn=None):
        self.MarkDirty()
        segment_urn = arn
        if arn is None:
            segment_urn = escaping.urn_from_member_name(
                filename, self.urn, self.version)

        # Is it in the cache?
        res = self.resolver.CacheGet(segment_urn)
        if res:
            res.readptr = 0
            return res

        self.resolver.Set(lexicon.transient_graph, segment_urn,
                          lexicon.AFF4_TYPE,
                          rdfvalue.URN(lexicon.AFF4_ZIP_SEGMENT_TYPE))

        self.resolver.Set(lexicon.transient_graph, segment_urn,
                          lexicon.AFF4_STORED, self.urn)

        #  Keep track of all the segments we issue.
        self.children.add(segment_urn)

        result = ZipFileSegment(resolver=self.resolver, urn=segment_urn)
        result.LoadFromZipFile(self)

        # FIXME commenting due to unicode logging issue
        #LOGGER.info(u"Creating ZipFileSegment %s",
        #            result.urn.SerializeToString())

        # Add the new object to the object cache.
        return self.resolver.CachePut(result)
Ejemplo n.º 2
0
    def RemoveMembers(self, child_urns):
        trimStorage = True
        backing_store_urn = self.resolver.GetUnique(lexicon.transient_graph, self.urn, lexicon.AFF4_STORED)
        with self.resolver.AFF4FactoryOpen(backing_store_urn) as backing_store:
            try:
                for zip_info in sorted(list(self.members.values()), key=lambda k: k.file_header_offset, reverse=True):
                    arn = escaping.urn_from_member_name(zip_info.filename, self.urn, self.version)
                    if arn in child_urns:
                        if self.resolver.CacheContains(arn):
                            obj = self.resolver.CacheGet(arn)
                            self.resolver.ObjectCache.Remove(obj)

                        if arn in self.children:
                            self.children.remove(arn)
                        if self.members[arn] != None:
                            del self.members[arn]

                        if trimStorage:
                            backing_store.Trim(zip_info.file_header_offset)
                    else:
                        trimStorage = False
            except:
                for arn in child_urns:
                    if self.resolver.CacheContains(arn):
                        obj = self.resolver.CacheGet(arn)
                        self.resolver.ObjectCache.Remove(obj)

                    if arn in self.children:
                        self.children.remove(arn)
                    if arn in self.members:
                        del self.members[arn]
Ejemplo n.º 3
0
    def DumpToTurtle(self, zipcontainer, ):
        infoARN = escaping.urn_from_member_name(u"information.turtle", zipcontainer.urn, zipcontainer.version)
        mode = self.GetUnique(lexicon.transient_graph, zipcontainer.backing_store_urn, lexicon.AFF4_STREAM_WRITE_MODE)
        if mode == "random":
            # random mode is used for appending to encrypted streams, where the stream size changes
            # snapshot mode creates the situation where we have multiple versions of the stream object
            # mashed together, and we cant tell the most recent
            turtle_append_mode="latest"
        else:
            # in append mode, we assume that each time we append, we are adding to the container, rather
            # than modifying any existing objects in the container. Because of this, we get to save multiple
            # independent copies of the turtle from each run, and join them together as text for efficiency
            turtle_append_mode="snapshot"

        if not zipcontainer.ContainsMember(infoARN):
            with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                turtle_segment.compression_method = ZIP_STORED

                result = self._DumpToTurtle(zipcontainer.urn)
                turtle_segment.write(utils.SmartStr(result))
                turtle_segment.Flush()
            turtle_segment.Close()
        else:
            # append to an existng container
            self.invalidateCachedMetadata(zipcontainer)
            if turtle_append_mode == "latest":
                zipcontainer.RemoveMember(infoARN)
                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_STORED

                    result = self._DumpToTurtle(zipcontainer.urn)
                    turtle_segment.write(utils.SmartStr(result))
                    turtle_segment.Flush()
                turtle_segment.Close()
                return

            explodedTurtleDirectivesARN = escaping.urn_from_member_name(u"information.turtle/directives", zipcontainer.urn, zipcontainer.version)
            if not zipcontainer.ContainsMember(explodedTurtleDirectivesARN):
                # this is the first append operation. Create the chunked turtle structures
                with zipcontainer.OpenZipSegment(u"information.turtle") as turtle_segment:
                    currentTurtleBytes= streams.ReadAll(turtle_segment)
                    currentturtle = utils.SmartUnicode(currentTurtleBytes)
                    #hexdump.hexdump(currentTurtleBytes)
                    (directives_txt, triples_txt) = turtle.toDirectivesAndTripes(currentturtle)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()
                    with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % 0) as turtle_chunk_segment:
                        turtle_chunk_segment.compression_method = ZIP_DEFLATE
                        turtle_chunk_segment.write(utils.SmartStr(triples_txt))
                        turtle_chunk_segment.Flush()
                    self.Close(turtle_chunk_segment)
                turtle_segment.Close()

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)
                if not len(directives_difference) == 0:
                    directives_txt = directives_txt + u"\r\n" + directives_difference
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, 1))
                with zipcontainer.CreateMember(current_turtle_chunk_arn) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                self.Close(turtle_segment)

                zipcontainer.RemoveSegment(u"information.turtle")
                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_STORED
                    turtle_segment.write(utils.SmartStr(directives_txt + "\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, turtleContainerIndex))

                        if zipcontainer.ContainsMember(current_turtle_chunk_arn):
                            with zipcontainer.OpenMember(current_turtle_chunk_arn) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1

                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
            else:
                # more than one append as already occurred
                turtleContainerIndex = 0
                while True:
                    turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                                zipcontainer.urn, zipcontainer.version)
                    if not zipcontainer.ContainsMember(turtleARN):
                        break
                    turtleContainerIndex = turtleContainerIndex + 1

                with zipcontainer.OpenZipSegment(u"information.turtle/directives") as directives_segment:
                    directives_txt = utils.SmartUnicode(streams.ReadAll(directives_segment))

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)

                if len(directives_difference) > 0:
                    directives_txt = directives_txt + u"\r\n" + u"\r\n".join(directives_difference)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % turtleContainerIndex) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                turtle_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(directives_txt + u"\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                  zipcontainer.urn, zipcontainer.version)
                        if zipcontainer.ContainsMember(turtleARN):
                            with zipcontainer.OpenZipSegment(
                                u"information.turtle/%08d" % turtleContainerIndex) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1
                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
Ejemplo n.º 4
0
 def testZipSegmenttoARN(self):
     version = container.Version(1, 1, "pyaff4")
     base_urn = rdfvalue.URN(u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18")
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//c:/foo",
         urn_from_member_name(u"/c:/foo", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//c:/foo",
         urn_from_member_name(u"/c:/foo", base_urn, version))
     self.assertEqual(u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$",
                      urn_from_member_name(u"bar/c$", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$/foo.txt",
         urn_from_member_name(u"bar/c$/foo.txt", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/bar",
         urn_from_member_name(u"/foo/bar", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/some%20file",
         urn_from_member_name(u"/foo/some file", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//foo/some%20%20file",
         urn_from_member_name(u"/foo/some  file", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/bar/c$/foo/ネコ.txt",
         urn_from_member_name(u"bar/c$/foo/ネコ.txt", base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/laptop/My%20Documents/FileSchemeURIs.doc",
         urn_from_member_name(u"laptop/My Documents/FileSchemeURIs.doc",
                              base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/Documents%20and%20Settings/davris/FileSchemeURIs.doc",
         urn_from_member_name(
             u"/C:/Documents and Settings/davris/FileSchemeURIs.doc",
             base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//D:/Program%20Files/Viewer/startup.htm",
         urn_from_member_name(u"/D:/Program Files/Viewer/startup.htm",
                              base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/Program%20Files/Music/Web%20Sys/main.html?REQUEST=RADIO",
         urn_from_member_name(
             u"/C:/Program Files/Music/Web Sys/main.html?REQUEST=RADIO",
             base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18/applib/products/a-b/abc_9/4148.920a/media/start.swf",
         urn_from_member_name(
             u"applib/products/a-b/abc_9/4148.920a/media/start.swf",
             base_urn, version))
     self.assertEqual(
         u"aff4://e6bae91b-0be3-4770-8a36-14d231833e18//C:/exampleㄓ.txt",
         urn_from_member_name(u"/C:/exampleㄓ.txt", base_urn, version))
Ejemplo n.º 5
0
    def DumpToTurtle(self, zipcontainer):
        infoARN = escaping.urn_from_member_name(u"information.turtle", zipcontainer.urn, zipcontainer.version)
        if not zipcontainer.ContainsMember(infoARN):
            with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                turtle_segment.compression_method = ZIP_DEFLATE

                result = self._DumpToTurtle(zipcontainer.urn)
                turtle_segment.write(utils.SmartStr(result))
                turtle_segment.Flush()
            turtle_segment.Close()
        else:
            # append to an existng container
            self.invalidateCachedMetadata(zipcontainer)
            explodedTurtleDirectivesARN = escaping.urn_from_member_name(u"information.turtle/directives", zipcontainer.urn, zipcontainer.version)
            if not zipcontainer.ContainsMember(explodedTurtleDirectivesARN):
                # this is the first append operation. Create the chunked turtle structures
                with zipcontainer.OpenZipSegment(u"information.turtle") as turtle_segment:
                    currentturtle = utils.SmartUnicode(streams.ReadAll(turtle_segment))
                    (directives_txt, triples_txt) = turtle.toDirectivesAndTripes(currentturtle)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()
                    with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % 0) as turtle_chunk_segment:
                        turtle_chunk_segment.compression_method = ZIP_DEFLATE
                        turtle_chunk_segment.write(utils.SmartStr(triples_txt))
                        turtle_chunk_segment.Flush()
                    self.Close(turtle_chunk_segment)
                turtle_segment.Close()

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)
                if not len(directives_difference) == 0:
                    directives_txt = directives_txt + u"\r\n" + directives_difference
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, 1))
                with zipcontainer.CreateMember(current_turtle_chunk_arn) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                self.Close(turtle_segment)

                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(directives_txt + "\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, turtleContainerIndex))

                        if zipcontainer.ContainsMember(current_turtle_chunk_arn):
                            with zipcontainer.OpenMember(current_turtle_chunk_arn) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1

                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
            else:
                # more than one append as already occurred
                turtleContainerIndex = 0
                while True:
                    turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                                zipcontainer.urn, zipcontainer.version)
                    if not zipcontainer.ContainsMember(turtleARN):
                        break
                    turtleContainerIndex = turtleContainerIndex + 1

                with zipcontainer.OpenZipSegment(u"information.turtle/directives") as directives_segment:
                    directives_txt = utils.SmartUnicode(streams.ReadAll(directives_segment))

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)

                if len(directives_difference) > 0:
                    directives_txt = directives_txt + u"\r\n" + u"\r\n".join(directives_difference)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % turtleContainerIndex) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                turtle_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(directives_txt + u"\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                  zipcontainer.urn, zipcontainer.version)
                        if zipcontainer.ContainsMember(turtleARN):
                            with zipcontainer.OpenZipSegment(
                                u"information.turtle/%08d" % turtleContainerIndex) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1
                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
Ejemplo n.º 6
0
Archivo: zip.py Proyecto: yazici/pyaff4
 def OpenZipSegment(self, filename):
     # Is it already in the cache?
     segment_urn = escaping.urn_from_member_name(filename, self.urn,
                                                 self.version)
     return self.OpenMember(segment_urn)
Ejemplo n.º 7
0
Archivo: zip.py Proyecto: yazici/pyaff4
    def parse_cd(self, backing_store_urn):
        with self.resolver.AFF4FactoryOpen(backing_store_urn) as backing_store:
            # Find the End of Central Directory Record - We read about 4k of
            # data and scan for the header from the end, just in case there is
            # an archive comment appended to the end.
            backing_store.SeekRead(-BUFF_SIZE, 2)

            ecd_real_offset = backing_store.TellRead()
            buffer = backing_store.Read(BUFF_SIZE)

            end_cd, buffer_offset = EndCentralDirectory.FromBuffer(buffer)

            urn_string = None

            ecd_real_offset += buffer_offset

            # Fetch the volume comment.
            if end_cd.comment_len > 0:
                backing_store.SeekRead(ecd_real_offset + end_cd.sizeof())
                urn_string = utils.SmartUnicode(
                    backing_store.Read(end_cd.comment_len))

                # trim trailing null if there
                if urn_string[len(urn_string) - 1] == chr(0):
                    urn_string = urn_string[0:len(urn_string) - 1]
                LOGGER.info("Loaded AFF4 volume URN %s from zip file.",
                            urn_string)

            #if end_cd.size_of_cd == 0xFFFFFFFF:
            #    end_cd, buffer_offset = Zip64EndCD.FromBuffer(buffer)

            #LOGGER.info("Found ECD at %#x", ecd_real_offset)

            # There is a catch 22 here - before we parse the ZipFile we dont
            # know the Volume's URN, but we need to know the URN so the
            # AFF4FactoryOpen() can open it. Therefore we start with a random
            # URN and then create a new ZipFile volume. After parsing the
            # central directory we discover our URN and therefore we can delete
            # the old, randomly selected URN.
            if urn_string and self.urn != urn_string and self.version != basic_zip:
                self.resolver.DeleteSubject(self.urn)
                self.urn.Set(utils.SmartUnicode(urn_string))

                # Set these triples so we know how to open the zip file again.
                self.resolver.Set(self.urn, self.urn, lexicon.AFF4_TYPE,
                                  rdfvalue.URN(lexicon.AFF4_ZIP_TYPE))
                self.resolver.Set(lexicon.transient_graph, self.urn,
                                  lexicon.AFF4_STORED,
                                  rdfvalue.URN(backing_store_urn))
                self.resolver.Set(lexicon.transient_graph, backing_store_urn,
                                  lexicon.AFF4_CONTAINS, self.urn)

            directory_offset = end_cd.offset_of_cd
            directory_number_of_entries = end_cd.total_entries_in_cd

            # Traditional zip file - non 64 bit.
            if directory_offset > 0 and directory_offset != 0xffffffff:
                # The global difference between the zip file offsets and real
                # file offsets. This is non zero when the zip file was appended
                # to another file.
                self.global_offset = (
                    # Real ECD offset.
                    ecd_real_offset - end_cd.size_of_cd -

                    # Claimed CD offset.
                    directory_offset)

                LOGGER.info("Global offset: %#x", self.global_offset)

            # This is a 64 bit archive, find the Zip64EndCD.
            else:
                locator_real_offset = ecd_real_offset - Zip64CDLocator.sizeof()
                backing_store.SeekRead(locator_real_offset, 0)
                locator = Zip64CDLocator(
                    backing_store.Read(Zip64CDLocator.sizeof()))

                if not locator.IsValid():
                    raise IOError("Zip64CDLocator invalid or not supported.")

                # Although it may appear that we can use the Zip64CDLocator to
                # locate the Zip64EndCD record via it's offset_of_cd record this
                # is not quite so. If the zip file was appended to another file,
                # the offset_of_cd field will not be valid, as it still points
                # to the old offset. In this case we also need to know the
                # global shift.
                backing_store.SeekRead(
                    locator_real_offset - Zip64EndCD.sizeof(), 0)

                end_cd = Zip64EndCD(backing_store.Read(Zip64EndCD.sizeof()))

                if not end_cd.IsValid():
                    LOGGER.error("Zip64EndCD magic not correct @%#x",
                                 locator_real_offset - Zip64EndCD.sizeof())
                    raise RuntimeError("Zip64EndCD magic not correct")

                directory_offset = end_cd.offset_of_cd
                directory_number_of_entries = end_cd.number_of_entries_in_volume

                # The global offset is now known:
                self.global_offset = (
                    # Real offset of the central directory.
                    locator_real_offset - Zip64EndCD.sizeof() -
                    end_cd.size_of_cd -

                    # The directory offset in zip file offsets.
                    directory_offset)

                LOGGER.info("Global offset: %#x", self.global_offset)

            # Now iterate over the directory and read all the ZipInfo structs.
            entry_offset = directory_offset
            for _ in range(directory_number_of_entries):
                backing_store.SeekRead(entry_offset + self.global_offset, 0)
                entry = CDFileHeader(backing_store.Read(CDFileHeader.sizeof()))

                if not entry.IsValid():
                    LOGGER.info("CDFileHeader at offset %#x invalid",
                                entry_offset)
                    raise RuntimeError()

                fn = backing_store.Read(entry.file_name_length)

                # decode the filename to UTF-8 if the EFS bit (bit 11) is set
                if entry.flags | (1 << 11):
                    fn = fn.decode("utf-8")

                zip_info = ZipInfo(
                    filename=fn,
                    local_header_offset=entry.relative_offset_local_header,
                    compression_method=entry.compression_method,
                    compress_size=entry.compress_size,
                    file_size=entry.file_size,
                    crc32=entry.crc32,
                    lastmoddate=entry.dosdate,
                    lastmodtime=entry.dostime)

                # Zip64 local header - parse the Zip64 extended information extra field.
                # if zip_info.local_header_offset < 0 or zip_info.local_header_offset == 0xffffffff:
                if entry.extra_field_len > 0:
                    extrabuf = backing_store.Read(entry.extra_field_len)

                    # AFF4 requres Zip64, but we still want to be able to read 3rd party
                    # zip files, so just skip unknown Extensible data fields and find the Zip64

                    while len(extrabuf) > 0:
                        (headerID,
                         dataSize) = struct.unpack("<HH", extrabuf[0:4])
                        if headerID == 1:
                            # Zip64 extended information extra field
                            extra, readbytes = Zip64FileHeaderExtensibleField.FromBuffer(
                                entry, extrabuf)
                            extrabuf = extrabuf[readbytes:]

                            if extra.header_id == 1:
                                if extra.Get("relative_offset_local_header"
                                             ) is not None:
                                    zip_info.local_header_offset = (extra.Get(
                                        "relative_offset_local_header"))
                                if extra.Get("file_size") is not None:
                                    zip_info.file_size = extra.Get("file_size")
                                if extra.Get("compress_size") is not None:
                                    zip_info.compress_size = extra.Get(
                                        "compress_size")
                        else:
                            extrabuf = extrabuf[dataSize + 4:]

                LOGGER.info("Found file %s @ %#x", zip_info.filename,
                            zip_info.local_header_offset)

                # Store this information in the resolver. Ths allows
                # segments to be directly opened by URN.
                member_urn = escaping.urn_from_member_name(
                    zip_info.filename, self.urn, self.version)

                self.resolver.Set(lexicon.transient_graph, member_urn,
                                  lexicon.AFF4_TYPE,
                                  rdfvalue.URN(lexicon.AFF4_ZIP_SEGMENT_TYPE))

                self.resolver.Set(lexicon.transient_graph, member_urn,
                                  lexicon.AFF4_STORED, self.urn)
                self.resolver.Set(lexicon.transient_graph, member_urn,
                                  lexicon.AFF4_STREAM_SIZE,
                                  rdfvalue.XSDInteger(zip_info.file_size))
                self.members[member_urn] = zip_info

                # Go to the next entry.
                entry_offset += (entry.sizeof() + entry.file_name_length +
                                 entry.extra_field_len +
                                 entry.file_comment_length)
Ejemplo n.º 8
0
 def ContainsSegment(self, segment_name):
     segment_arn = escaping.urn_from_member_name(segment_name, self.urn, self.version)
     return self.ContainsMember(segment_arn)
Ejemplo n.º 9
0
 def RemoveSegment(self, segment_name):
     segment_arn = escaping.urn_from_member_name(segment_name, self.urn, self.version)
     self.RemoveMember(segment_arn)