Exemplo n.º 1
0
 def updateMyself(self, original, updated, metadata_offset, dry_run=False, debuggy=False):
     if self.import_from != "":
         # if is a new file, compress it and write the result to the .pac
         # we didn't know the offset yet, so we must make a step back and fix this
         begin_data_pos = updated.tell()
         self.offset = begin_data_pos
         updated.seek(self.offset_writeback_location, 0)
         if not dry_run:
             updated.write(struct.pack("I", self.offset - metadata_offset))
         updated.seek(begin_data_pos, 0)
         if self.compressed:
             print("Compressing %s (%d chunks) ..." % (self.name, self.compr_object.chunk_num), end="\r")
             self.comp_size = self.compr_object.compress(self.import_from, updated, dry_run=dry_run, debuggy=debuggy)
             print("Compressed %s : %d -> %d (%f)" %
                   (self.name, self.size, self.comp_size, ((self.size - self.comp_size)*100) / self.size))
         else: #uncompressed file: copy it byte by byte
             if not dry_run:
                 with open(self.import_from, "rb") as importfile:
                     datamover.dd(importfile, updated, 0, self.size)
     else:
         # write back the file
         # we do that again, since the size of some file inbetween may have changed
         begin_data_pos = updated.tell()
         self.offset = begin_data_pos
         updated.seek(self.offset_writeback_location, 0)
         if not dry_run:
             updated.write(struct.pack("I", self.offset - metadata_offset))
         updated.seek(begin_data_pos, 0)
         if not dry_run:
             actual_len = self.size
             if self.compressed:
                 actual_len = self.comp_size
             datamover.dd(original, updated, self.origin_offset, actual_len)
Exemplo n.º 2
0
    def write_out_data(self,
                       original,
                       updated,
                       metadata_offset,
                       dry_run=False,
                       debuggy=False):
        """
        Write a copy of the data to a file
        :param original: file object
        :param updated: file object
        :param metadata_offset:
        :param dry_run:
        :param debuggy:
        :return:
        """
        def writeback_offset():
            begin_data_pos = updated.tell()
            self.offset = begin_data_pos
            updated.seek(self.offset_writeback_location, 0)
            if not dry_run:
                updated.write(struct.pack("I", self.offset - metadata_offset))
            updated.seek(begin_data_pos, 0)

        if self.import_from != "":
            """ if is a new file, compress it and write the result to the .pac
            we didn't know the offset yet, so we must make a step back and fix this"""
            writeback_offset()

            if self.compressed:
                print("Compressing %s (%d chunks) ..." %
                      (self.name, self.compr_object.chunk_num),
                      end="\r")
                self.comp_size = self.compr_object.compress(self.import_from,
                                                            updated,
                                                            dry_run=dry_run,
                                                            debuggy=debuggy)
                print("Compressed %s : %d -> %d (%f)" %
                      (self.name, self.size, self.comp_size,
                       ((self.size - self.comp_size) * 100) / self.size))
            else:  # uncompressed file: copy it byte by byte
                if not dry_run:
                    with open(self.import_from, "rb") as importfile:
                        datamover.dd(importfile, updated, 0, self.size)
        else:
            """ write back the file unchanged
            we rewrit the offset, since the size of some file inbetween may have changed"""
            writeback_offset()

            if not dry_run:
                actual_len = self.size
                if self.compressed:
                    actual_len = self.comp_size
                datamover.dd(original, updated, self.origin_offset, actual_len)
Exemplo n.º 3
0
    def write_out_data(self, original, updated, metadata_offset, dry_run=False, debuggy=False):
        """
        Write a copy of the data to a file
        :param original: file object
        :param updated: file object
        :param metadata_offset:
        :param dry_run:
        :param debuggy:
        :return:
        """
        def writeback_offset():
            begin_data_pos = updated.tell()
            self.offset = begin_data_pos
            updated.seek(self.offset_writeback_location, 0)
            if not dry_run:
                updated.write(struct.pack("I", self.offset - metadata_offset))
            updated.seek(begin_data_pos, 0)

        if self.import_from != "":
            """ if is a new file, compress it and write the result to the .pac
            we didn't know the offset yet, so we must make a step back and fix this"""
            writeback_offset()

            if self.compressed:
                print("Compressing %s (%d chunks) ..." % (self.name, self.compr_object.chunk_num), end="\r")
                self.comp_size = self.compr_object.compress(self.import_from, updated, dry_run=dry_run, debuggy=debuggy)
                print("Compressed %s : %d -> %d (%f)" %
                      (self.name, self.size, self.comp_size, ((self.size - self.comp_size)*100) / self.size))
            else:  # uncompressed file: copy it byte by byte
                if not dry_run:
                    with open(self.import_from, "rb") as importfile:
                        datamover.dd(importfile, updated, 0, self.size)
        else:
            """ write back the file unchanged
            we rewrit the offset, since the size of some file inbetween may have changed"""
            writeback_offset()

            if not dry_run:
                actual_len = self.size
                if self.compressed:
                    actual_len = self.comp_size
                datamover.dd(original, updated, self.origin_offset, actual_len)
Exemplo n.º 4
0
 def _dump2file(self, fromfile, tofile):
     actual_len = self.size
     if self.compressed:
         actual_len = self.comp_size
     datamover.dd(fromfile, tofile, self.offset, actual_len)
Exemplo n.º 5
0
 def _dump2file(self, fromfile, tofile):
     actual_len = self.size
     if self.compressed:
         actual_len = self.comp_size
     datamover.dd(fromfile, tofile, self.offset, actual_len)