Exemple #1
0
    def backup_data(self, backup_path, manifest_path):
        """Execute backup using rsync algorithm.

        If an existing rsync meta data is available the backup
        will be incremental, otherwise will be executed a level 0 backup

        :param backup_path:
        :param manifest_path:
        :return:
        """

        LOG.info("Starting RSYNC engine backup data stream")

        file_read_limit = 0
        data_chunk = b''
        LOG.info(
            'Recursively archiving and compressing files from {}'.format(
                os.getcwd()))

        self.compressor = compress.Compressor(self.compression_algo)

        if self.encrypt_pass_file:
            self.cipher = crypt.AESEncrypt(self.encrypt_pass_file)
            data_chunk += self.cipher.generate_header()

        rsync_queue = Queue.Queue(maxsize=2)

        t_get_sign_delta = threading.Thread(
            target=self.get_sign_delta,
            args=(
                backup_path, manifest_path, rsync_queue))
        t_get_sign_delta.daemon = True

        t_get_sign_delta.start()

        while True:
            file_block = rsync_queue.get()

            if file_block is False:
                break
            if len(file_block) == 0:
                continue

            data_chunk += file_block
            file_read_limit += len(file_block)
            if file_read_limit >= self.max_segment_size:
                yield data_chunk
                data_chunk = b''
                file_read_limit = 0

        # Upload segments smaller then max_segment_size
        if len(data_chunk) < self.max_segment_size:
            yield data_chunk

        # Rejoining thread
        t_get_sign_delta.join()
Exemple #2
0
    def backup_data(self, backup_path, manifest_path):
        """Execute backup using rsync algorithm.

        If an existing rsync meta data for file is available - the backup
        will be incremental, otherwise will be executed a level 0 backup.

        :param backup_path: Path to backup
        :param manifest_path: Path to backup metadata
        """

        LOG.info('Starting Rsync engine backup stream')
        LOG.info('Recursively archiving and compressing files '
                 'from {}'.format(os.getcwd()))

        file_read_limit = 0
        data_chunk = b''
        max_seg_size = self.max_segment_size

        # Initialize objects for compressing and encrypting data
        compressor = compress.Compressor(self.compression_algo)
        cipher = None
        if self.encrypt_pass_file:
            cipher = crypt.AESEncrypt(self.encrypt_pass_file)
            yield cipher.generate_header()

        write_queue = queue.Queue(maxsize=2)

        # Create thread for compute file signatures and read data
        t_get_sign_delta = threading.Thread(target=self.get_sign_delta,
                                            args=(backup_path, manifest_path,
                                                  write_queue))
        t_get_sign_delta.daemon = True
        t_get_sign_delta.start()

        # Get backup data from queue
        while True:
            file_block = write_queue.get()

            if file_block is False:
                break

            block_len = len(file_block)
            if block_len == 0:
                continue

            data_chunk += file_block
            file_read_limit += block_len
            if file_read_limit >= max_seg_size:
                yield self._process_backup_data(data_chunk, compressor, cipher)
                data_chunk = b''
                file_read_limit = 0

        flushed_data = self._flush_backup_data(data_chunk, compressor, cipher)

        # Upload segments smaller then max_seg_size
        if len(flushed_data) < max_seg_size:
            yield flushed_data

        # Rejoining thread
        t_get_sign_delta.join()

        LOG.info("Rsync engine backup stream completed")