コード例 #1
0
    def QuerySubjectPredicate(self, subject, predicate):
        subject = utils.SmartStr(subject)
        predicate = utils.SmartStr(predicate)
        for s, data in six.iteritems(self.store):
            if s == subject:
                for pred, value in six.iteritems(data):
                    if pred == predicate:
                        if type(value) != type([]):
                            value = [value]

                        for o in value:
                            yield o
コード例 #2
0
    def DumpToTurtle(self, stream=None, verbose=False):
        g = rdflib.Graph()
        for urn, items in self.store.items():
            urn = rdflib.URIRef(utils.SmartUnicode(urn))
            type = items.get(utils.SmartStr(lexicon.AFF4_TYPE))
            if type is None:
                continue

            for attr, value in list(items.items()):
                attr = utils.SmartUnicode(attr)
                # We suppress certain facts which can be deduced from the file
                # format itself. This ensures that we do not have conflicting
                # data in the data store. The data in the data store is a
                # combination of explicit facts and implied facts.
                if not verbose:
                    if attr.startswith(lexicon.AFF4_VOLATILE_NAMESPACE):
                        continue

                    if attr in self.suppressed_rdftypes.get(type, ()):
                        continue

                attr = rdflib.URIRef(attr)
                if not isinstance(value, list):
                    value = [value]

                for item in value:
                    g.add((urn, attr, item.GetRaptorTerm()))

        result = g.serialize(format='turtle')
        if stream:
            stream.write(result)
        return result
コード例 #3
0
    def WriteFileHeader(self, backing_store):
        if self.file_header_offset is None:
            self.file_header_offset = backing_store.Tell()

        header = ZipFileHeader(crc32=self.crc32,
                               compress_size=self.compress_size,
                               file_size=self.file_size,
                               file_name_length=len(self.filename),
                               compression_method=self.compression_method,
                               lastmodtime=self.lastmodtime,
                               lastmoddate=self.lastmoddate,
                               extra_field_len=0)

        extra_header_64 = Zip64FileHeaderExtensibleField()
        if self.file_size > ZIP32_MAX_SIZE:
            header.file_size = 0xFFFFFFFF
            extra_header_64.Set("file_size", self.file_size)

        if self.compress_size > ZIP32_MAX_SIZE:
            header.compress_size = 0xFFFFFFFF
            extra_header_64.Set("compress_size", self.compress_size)

        # Only write the extra header if we have to.
        if not extra_header_64.empty():
            header.extra_field_len = extra_header_64.sizeof()

        backing_store.Seek(self.file_header_offset)
        backing_store.Write(header.Pack())
        backing_store.write(utils.SmartStr(self.filename))

        if not extra_header_64.empty():
            backing_store.Write(extra_header_64.Pack())
コード例 #4
0
    def WriteCDFileHeader(self, backing_store):
        header = CDFileHeader(
            compression_method=self.compression_method,
            file_size=self.file_size,
            compress_size=self.compress_size,
            relative_offset_local_header=self.local_header_offset,
            crc32=self.crc32,
            file_name_length=len(self.filename),
            dostime=self.lastmodtime,
            dosdate=self.lastmoddate)

        extra_header_64 = Zip64FileHeaderExtensibleField()
        if self.file_size > ZIP32_MAX_SIZE:
            header.file_size = 0xFFFFFFFF
            extra_header_64.Set("file_size", self.file_size)

        if self.compress_size > ZIP32_MAX_SIZE:
            header.compress_size = 0xFFFFFFFF
            extra_header_64.Set("compress_size", self.compress_size)

        if self.local_header_offset > ZIP32_MAX_SIZE:
            header.relative_offset_local_header = 0xFFFFFFFF
            extra_header_64.Set("relative_offset_local_header",
                                self.local_header_offset)

        # Only write the extra header if we have to.
        if not extra_header_64.empty():
            header.extra_field_len = extra_header_64.sizeof()

        backing_store.write(header.Pack())
        backing_store.write(utils.SmartStr(self.filename))

        if not extra_header_64.empty():
            backing_store.write(extra_header_64.Pack())
コード例 #5
0
    def QueryPredicateObject(self, predicate, object):
        predicate = utils.SmartStr(predicate)
        for subject, data in list(self.store.items()):
            for pred, value in list(data.items()):
                if pred == predicate:
                    if type(value) != type([]):
                        value = [value]

                    if object in value:
                        yield rdfvalue.URN().UnSerializeFromString(subject)
コード例 #6
0
 def QueryPredicate(self, predicate):
     """Yields all subjects which have this predicate."""
     predicate = utils.SmartStr(predicate)
     for subject, data in six.iteritems(self.store):
         for pred, values in six.iteritems(data):
             if pred == predicate:
                 if type(values) != type([]):
                     values = [values]
                 for value in values:
                     yield (rdfvalue.URN().UnSerializeFromString(subject),
                            rdfvalue.URN().UnSerializeFromString(predicate),
                            value)
コード例 #7
0
    def QuerySubject(self, graph, subject_regex=None):
        subject_regex = re.compile(utils.SmartStr(subject_regex))

        if graph == lexicon.any or graph == None:
            storeitems = chain(six.iteritems(self.store), six.iteritems(self.transient_store))
        elif graph == transient_graph:
            storeitems = six.iteritems(self.transient_store)
        else:
            storeitems = six.iteritems(self.store)

        for subject in storeitems:
            if subject_regex is not None and subject_regex.match(subject):
                yield rdfvalue.URN().UnSerializeFromString(subject)
コード例 #8
0
    def Write(self, data):
        self.MarkDirty()

        # On OSX, the following test doesn't work
        # so we need to do the seek every time
        # if self.fd.tell() != self.readptr:
        #    self.fd.seek(self.readptr)
        # TODO: make this platform aware
        self.fd.seek(self.writeptr)

        self.fd.write(utils.SmartStr(data))
        # self.fd.flush()

        self.size = len(data)
        self.writeptr += self.size
コード例 #9
0
ファイル: struct_parser.py プロジェクト: yang123vc/aff4-1
def CreateStruct(struct_name, definition):
    fields = []
    format_string = ["<"]
    defaults = []

    for line in definition.splitlines():
        line = line.strip(" ;")
        components = line.split()
        if len(components) >= 2:
            type_format_char = format_string_map.get(components[0])
            name = components[1]

            if type_format_char is None:
                raise RuntimeError("Invalid definition %r" % line)

            try:
                if components[2] != "=":
                    raise RuntimeError("Invalid definition %r" % line)
                defaults.append(int(components[3], 0))
            except IndexError:
                defaults.append(0)

            format_string.append(type_format_char)
            fields.append(name)

    properties = dict(_format_string="".join(format_string),
                      _fields=fields,
                      _defaults=defaults,
                      _name=struct_name)

    # Make accessors for all fields.
    for i, field in enumerate(fields):

        def setx(self, value, i=i):
            self._data[i] = value

        def getx(self, i=i):
            return self._data[i]

        properties[field] = property(getx, setx)

    if six.PY2:
        return type(utils.SmartStr(struct_name), (BaseParser, ), properties)
    else:
        return type(utils.SmartUnicode(struct_name), (BaseParser, ),
                    properties)
コード例 #10
0
    def QuerySubject(self, graph, subject_regex=None):
        if graph == transient_graph:
            yield super(HDTAssistedDataStore, self).QuerySubject(transient_graph, subject_regex)

        subject_regex = re.compile(utils.SmartStr(subject_regex))
        (triples, cardinality) = self.hdt.search_triples("", "?", "?")
        seen_subject = []

        for (s,p,o) in triples:
            if subject_regex is not None and subject_regex.match(s):
                if s not in seen_subject:
                    seen_subject.add(s)
                    yield rdfvalue.URN().UnSerializeFromString(s)

        for s in super(HDTAssistedDataStore, self).QuerySubject(graph, subject_regex=subject_regex):
            if s not in seen_subject:
                seen_subject.add(s)
                yield s
コード例 #11
0
def statx(path):
    pathname = ctypes.c_char_p(utils.SmartStr(path))
    statxbuf = ctypes.create_string_buffer(ctypes.sizeof(Statx))

    lib = ctypes.CDLL(None, use_errno=True)
    syscall = lib.syscall

    # int statx(int dirfd, const char *pathname, int flags, unsigned int mask, struct statx *statxbuf);
    syscall.argtypes = [
        ctypes.c_int, ctypes.c_int, ctypes.c_char_p, ctypes.c_int,
        ctypes.c_uint, ctypes.c_char_p
    ]
    syscall.restype = ctypes.c_int

    if syscall(SYS_STATX, AT_FDCWD, pathname, AT_SYMLINK_NOFOLLOW, STATX_ALL,
               statxbuf):
        e = ctypes.get_errno()
        raise OSError(e, os.strerror(e), path)
    return Statx.from_buffer(statxbuf)
コード例 #12
0
    def QueryPredicate(self, graph, predicate):
        """Yields all subjects which have this predicate."""
        predicate = utils.SmartStr(predicate)

        if graph == lexicon.any or graph == None:
            storeitems = chain(six.iteritems(self.store), six.iteritems(self.transient_store))
        elif graph == transient_graph:
            storeitems = six.iteritems(self.transient_store)
        else:
            storeitems = six.iteritems(self.store)

        for subject, data in storeitems:
            for pred, values in six.iteritems(data):
                if pred == predicate:
                    if type(values) != type([]):
                        values = [values]
                    for value in values:
                        yield (rdfvalue.URN().UnSerializeFromString(subject),
                               rdfvalue.URN().UnSerializeFromString(predicate),
                               value)
コード例 #13
0
    def Write(self, data):
        if LOGGER.isEnabledFor(logging.INFO):
            LOGGER.info("ZipFileSegment.Write %s @ %x[%x]", self.urn,
                        self.writeptr, len(data))
        if not self.properties.writable:
            raise IOError("Attempt to write to read only object")
        self.MarkDirty()

        # On OSX, the following test doesn't work
        # so we need to do the seek every time
        if aff4.MacOS:
            self.fd.seek(self.writeptr)
        else:
            if self.fd.tell() != self.writeptr:
                self.fd.seek(self.writeptr)

        self.fd.write(utils.SmartStr(data))
        # self.fd.flush()

        #self.size = len(data)
        #self.size = len(data)
        self.writeptr += len(data)
        if self.writeptr > self.size:
            self.size = self.writeptr
コード例 #14
0
ファイル: zip.py プロジェクト: yazici/pyaff4
    def write_zip64_CD(self):
        backing_store_urn = self.resolver.GetUnique(lexicon.transient_graph,
                                                    self.urn,
                                                    lexicon.AFF4_STORED)
        with self.resolver.AFF4FactoryOpen(backing_store_urn) as backing_store:
            # We write to a memory stream first, and then copy it into the
            # backing_store at once. This really helps when we have lots of
            # members in the zip archive.
            cd_stream = io.BytesIO()

            # Append a new central directory to the end of the zip file.
            backing_store.SeekWrite(0, aff4.SEEK_END)

            # The real start of the ECD.
            ecd_real_offset = backing_store.TellWrite()

            total_entries = len(self.members)
            for urn, zip_info in list(self.members.items()):
                LOGGER.info("Writing CD entry for %s", urn)
                zip_info.WriteCDFileHeader(cd_stream)

            offset_of_end_cd = cd_stream.tell(
            ) + ecd_real_offset - self.global_offset
            size_of_cd = cd_stream.tell()
            offset_of_cd = offset_of_end_cd - size_of_cd
            urn_string = self.urn.SerializeToString()

            # the following is included for debugging the zip implementation.
            # for small zip files, enable output to non-zip64 containers
            # NOT TO BE USED IN PRODUCTION
            if not ZIP_DEBUG or offset_of_cd > ZIP32_MAX_SIZE or size_of_cd > ZIP32_MAX_SIZE or total_entries > 0xffff:
                # only write zip64 headers if needed
                locator = Zip64CDLocator(offset_of_end_cd=(offset_of_end_cd))

                end_cd = Zip64EndCD(size_of_header=Zip64EndCD.sizeof() - 12,
                                    number_of_entries_in_volume=total_entries,
                                    total_entries_in_cd=total_entries,
                                    size_of_cd=size_of_cd,
                                    offset_of_cd=offset_of_cd)

                LOGGER.info("Writing Zip64EndCD at %#x",
                            cd_stream.tell() + ecd_real_offset)
                cd_stream.write(end_cd.Pack())
                cd_stream.write(locator.Pack())

            end = EndCentralDirectory(
                total_entries_in_cd_on_disk=total_entries,
                total_entries_in_cd=total_entries,
                comment_len=len(urn_string),
                offset_of_cd=offset_of_cd,
                size_of_cd=size_of_cd)

            if size_of_cd > ZIP32_MAX_SIZE or not ZIP_DEBUG:
                end.size_of_cd = 0xffffffff

            if offset_of_end_cd > ZIP32_MAX_SIZE or not ZIP_DEBUG:
                end.offset_of_cd = 0xffffffff

            if total_entries > 0xffff:
                end.total_entries_in_cd_on_disk = 0xffff
                end.total_entries_in_cd = 0xffff

            LOGGER.info("Writing ECD at %#x",
                        cd_stream.tell() + ecd_real_offset)

            cd_stream.write(end.Pack())
            cd_stream.write(utils.SmartStr(urn_string))

            # Now copy the cd_stream into the backing_store in one write
            # operation.
            backing_store.write(cd_stream.getvalue())
コード例 #15
0
 def QuerySubject(self, subject_regex=None):
     subject_regex = re.compile(utils.SmartStr(subject_regex))
     for subject in self.store:
         if subject_regex is not None and subject_regex.match(subject):
             yield rdfvalue.URN().UnSerializeFromString(subject)
コード例 #16
0
 def __req__(self, other):
     return utils.SmartStr(self) == utils.SmartStr(other)
コード例 #17
0
 def SerializeToString(self):
     components = self.Parse()
     return utils.SmartStr(urllib.parse.urlunparse(components))
コード例 #18
0
 def __eq__(self, other):
     if isinstance(other, RDFHash):
         if self.datatype == other.datatype:
             return self.value == other.value
     return utils.SmartStr(self.value) == utils.SmartStr(other)
コード例 #19
0
 def SerializeToString(self):
     return utils.SmartStr(self.value)
コード例 #20
0
 def SelectSubjectsByPrefix(self, prefix):
     # Keys are bytes.
     prefix = utils.SmartStr(prefix)
     for subject in self.store:
         if subject.startswith(prefix):
             yield rdfvalue.URN().UnSerializeFromString(subject)
コード例 #21
0
 def QueryPredicatesBySubject(self, subject):
     subject = utils.SmartStr(subject)
     for pred, value in list(self.store.get(subject, {}).items()):
         yield (rdfvalue.URN().UnSerializeFromString(pred), value)
コード例 #22
0
    def DumpToTurtle(self, zipcontainer, ):
        infoARN = escaping.urn_from_member_name(u"information.turtle", zipcontainer.urn, zipcontainer.version)
        mode = self.GetUnique(lexicon.transient_graph, zipcontainer.backing_store_urn, lexicon.AFF4_STREAM_WRITE_MODE)
        if mode == "random":
            # random mode is used for appending to encrypted streams, where the stream size changes
            # snapshot mode creates the situation where we have multiple versions of the stream object
            # mashed together, and we cant tell the most recent
            turtle_append_mode="latest"
        else:
            # in append mode, we assume that each time we append, we are adding to the container, rather
            # than modifying any existing objects in the container. Because of this, we get to save multiple
            # independent copies of the turtle from each run, and join them together as text for efficiency
            turtle_append_mode="snapshot"

        if not zipcontainer.ContainsMember(infoARN):
            with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                turtle_segment.compression_method = ZIP_STORED

                result = self._DumpToTurtle(zipcontainer.urn)
                turtle_segment.write(utils.SmartStr(result))
                turtle_segment.Flush()
            turtle_segment.Close()
        else:
            # append to an existng container
            self.invalidateCachedMetadata(zipcontainer)
            if turtle_append_mode == "latest":
                zipcontainer.RemoveMember(infoARN)
                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_STORED

                    result = self._DumpToTurtle(zipcontainer.urn)
                    turtle_segment.write(utils.SmartStr(result))
                    turtle_segment.Flush()
                turtle_segment.Close()
                return

            explodedTurtleDirectivesARN = escaping.urn_from_member_name(u"information.turtle/directives", zipcontainer.urn, zipcontainer.version)
            if not zipcontainer.ContainsMember(explodedTurtleDirectivesARN):
                # this is the first append operation. Create the chunked turtle structures
                with zipcontainer.OpenZipSegment(u"information.turtle") as turtle_segment:
                    currentTurtleBytes= streams.ReadAll(turtle_segment)
                    currentturtle = utils.SmartUnicode(currentTurtleBytes)
                    #hexdump.hexdump(currentTurtleBytes)
                    (directives_txt, triples_txt) = turtle.toDirectivesAndTripes(currentturtle)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()
                    with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % 0) as turtle_chunk_segment:
                        turtle_chunk_segment.compression_method = ZIP_DEFLATE
                        turtle_chunk_segment.write(utils.SmartStr(triples_txt))
                        turtle_chunk_segment.Flush()
                    self.Close(turtle_chunk_segment)
                turtle_segment.Close()

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)
                if not len(directives_difference) == 0:
                    directives_txt = directives_txt + u"\r\n" + directives_difference
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, 1))
                with zipcontainer.CreateMember(current_turtle_chunk_arn) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                self.Close(turtle_segment)

                zipcontainer.RemoveSegment(u"information.turtle")
                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_STORED
                    turtle_segment.write(utils.SmartStr(directives_txt + "\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, turtleContainerIndex))

                        if zipcontainer.ContainsMember(current_turtle_chunk_arn):
                            with zipcontainer.OpenMember(current_turtle_chunk_arn) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1

                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
            else:
                # more than one append as already occurred
                turtleContainerIndex = 0
                while True:
                    turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                                zipcontainer.urn, zipcontainer.version)
                    if not zipcontainer.ContainsMember(turtleARN):
                        break
                    turtleContainerIndex = turtleContainerIndex + 1

                with zipcontainer.OpenZipSegment(u"information.turtle/directives") as directives_segment:
                    directives_txt = utils.SmartUnicode(streams.ReadAll(directives_segment))

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)

                if len(directives_difference) > 0:
                    directives_txt = directives_txt + u"\r\n" + u"\r\n".join(directives_difference)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % turtleContainerIndex) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                turtle_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(directives_txt + u"\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                  zipcontainer.urn, zipcontainer.version)
                        if zipcontainer.ContainsMember(turtleARN):
                            with zipcontainer.OpenZipSegment(
                                u"information.turtle/%08d" % turtleContainerIndex) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1
                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
コード例 #23
0
ファイル: data_store.py プロジェクト: yazici/pyaff4
    def DumpToTurtle(self, zipcontainer):
        infoARN = escaping.urn_from_member_name(u"information.turtle", zipcontainer.urn, zipcontainer.version)
        if not zipcontainer.ContainsMember(infoARN):
            with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                turtle_segment.compression_method = ZIP_DEFLATE

                result = self._DumpToTurtle(zipcontainer.urn)
                turtle_segment.write(utils.SmartStr(result))
                turtle_segment.Flush()
            turtle_segment.Close()
        else:
            # append to an existng container
            self.invalidateCachedMetadata(zipcontainer)
            explodedTurtleDirectivesARN = escaping.urn_from_member_name(u"information.turtle/directives", zipcontainer.urn, zipcontainer.version)
            if not zipcontainer.ContainsMember(explodedTurtleDirectivesARN):
                # this is the first append operation. Create the chunked turtle structures
                with zipcontainer.OpenZipSegment(u"information.turtle") as turtle_segment:
                    currentturtle = utils.SmartUnicode(streams.ReadAll(turtle_segment))
                    (directives_txt, triples_txt) = turtle.toDirectivesAndTripes(currentturtle)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()
                    with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % 0) as turtle_chunk_segment:
                        turtle_chunk_segment.compression_method = ZIP_DEFLATE
                        turtle_chunk_segment.write(utils.SmartStr(triples_txt))
                        turtle_chunk_segment.Flush()
                    self.Close(turtle_chunk_segment)
                turtle_segment.Close()

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)
                if not len(directives_difference) == 0:
                    directives_txt = directives_txt + u"\r\n" + directives_difference
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, 1))
                with zipcontainer.CreateMember(current_turtle_chunk_arn) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                self.Close(turtle_segment)

                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(directives_txt + "\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        current_turtle_chunk_arn = rdfvalue.URN(u"%s/information.turtle/%08d" % (zipcontainer.urn, turtleContainerIndex))

                        if zipcontainer.ContainsMember(current_turtle_chunk_arn):
                            with zipcontainer.OpenMember(current_turtle_chunk_arn) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1

                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
            else:
                # more than one append as already occurred
                turtleContainerIndex = 0
                while True:
                    turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                                zipcontainer.urn, zipcontainer.version)
                    if not zipcontainer.ContainsMember(turtleARN):
                        break
                    turtleContainerIndex = turtleContainerIndex + 1

                with zipcontainer.OpenZipSegment(u"information.turtle/directives") as directives_segment:
                    directives_txt = utils.SmartUnicode(streams.ReadAll(directives_segment))

                (current_directives_txt, current_triples_txt) = turtle.toDirectivesAndTripes(utils.SmartUnicode(self._DumpToTurtle(zipcontainer.urn)))
                directives_difference = turtle.difference(directives_txt, current_directives_txt)

                if len(directives_difference) > 0:
                    directives_txt = directives_txt + u"\r\n" + u"\r\n".join(directives_difference)
                    with zipcontainer.CreateZipSegment(u"information.turtle/directives") as directives_segment:
                        directives_segment.compression_method = ZIP_DEFLATE
                        directives_segment.write(utils.SmartStr(directives_txt))
                        directives_segment.Flush()
                    directives_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle/%08d" % turtleContainerIndex) as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(current_triples_txt))
                    turtle_segment.Flush()
                turtle_segment.Close()

                with zipcontainer.CreateZipSegment(u"information.turtle") as turtle_segment:
                    turtle_segment.compression_method = ZIP_DEFLATE
                    turtle_segment.write(utils.SmartStr(directives_txt + u"\r\n\r\n"))

                    turtleContainerIndex = 0
                    while True:
                        turtleARN = escaping.urn_from_member_name(u"information.turtle/%08d" % turtleContainerIndex,
                                                                  zipcontainer.urn, zipcontainer.version)
                        if zipcontainer.ContainsMember(turtleARN):
                            with zipcontainer.OpenZipSegment(
                                u"information.turtle/%08d" % turtleContainerIndex) as turtle_chunk_segment:
                                turtle_chunk_txt = utils.SmartUnicode(streams.ReadAll(turtle_chunk_segment))
                                turtle_segment.write(utils.SmartStr(turtle_chunk_txt + u"\r\n"))
                            turtleContainerIndex += 1
                        else:
                            break
                    turtle_segment.Flush()
                turtle_segment.Close()
コード例 #24
0
    def setUp(self):
        with data_store.MemoryDataStore() as resolver:
            # Use the AFF4 Standard Lexicon
            self.lexicon = lexicon.standard

            with zip.ZipFile.NewZipFile(
                    resolver, version.aff4v10,
                    rdfvalue.URN.FromFileName(
                        self.stdLinear)) as image_container:
                # there is generally only one Image in a container. Get the underlying Map
                imageURN = next(
                    resolver.QueryPredicateObject(image_container.urn,
                                                  lexicon.AFF4_TYPE,
                                                  self.lexicon.Image))
                datastreams = list(
                    resolver.QuerySubjectPredicate(image_container.urn,
                                                   imageURN,
                                                   self.lexicon.dataStream))
                imageMapURN = datastreams[0]

                # get a reference to the actual bytestream that is the forensic image
                with resolver.AFF4FactoryOpen(imageMapURN) as mapStream:

                    # now that we have a reference to the forensic image, we start building up a new container
                    # to store our new artefacts in

                    # create a second resolver so we dont pollute our metadata with that of the first container
                    with data_store.MemoryDataStore() as resolver2:

                        # create our new container
                        destFileURN = rdfvalue.URN.FromFileName(self.fileName)
                        resolver2.Set(lexicon.transient_graph, destFileURN,
                                      lexicon.AFF4_STREAM_WRITE_MODE,
                                      rdfvalue.XSDString(u"truncate"))
                        with zip.ZipFile.NewZipFile(
                                resolver2, version.aff4v10,
                                destFileURN) as image_container:
                            self.volume_urn = image_container.urn

                            # create a "version.txt" file so readers can tell it is an AFF4 Standard v1.0 container
                            version_urn = self.volume_urn.Append("version.txt")
                            with resolver2.AFF4FactoryOpen(
                                    self.volume_urn) as volume:
                                with volume.CreateMember(
                                        version_urn) as versionFile:
                                    versionFile.Write(
                                        utils.SmartStr(
                                            u"major=1\nminor=0\ntool=pyaff4\n")
                                    )

                            # create a map to represent the byte range we are interested in
                            self.image_urn = self.volume_urn.Append("pdf1")
                            with aff4_map.AFF4Map.NewAFF4Map(
                                    resolver2, self.image_urn,
                                    self.volume_urn) as imageURN:
                                # add the segment that refers to the file in the destination address space
                                # the locations were determined by opening in a forensic tool
                                partitionOffset = 0x10000
                                fileOffset = 0xfc3000
                                diskOffset = partitionOffset + fileOffset
                                fileSize = 629087
                                imageURN.AddRange(0, diskOffset, fileSize,
                                                  mapStream.urn)