示例#1
0
文件: engine.py 项目: popawu/freezer
    def backup(self, backup_path, backup, queue_size=2):
        """
        Here we now location of all interesting artifacts like metadata
        Should return stream for storing data.
        :return: stream
        """
        manifest = backup.storage.download_meta_file(backup)
        input_queue = streaming.RichQueue(queue_size)

        read_except_queue = queue.Queue()
        write_except_queue = queue.Queue()

        read_stream = streaming.QueuedThread(
            self.backup_stream,
            input_queue,
            read_except_queue,
            kwargs={"backup_path": backup_path,
                    "manifest_path": manifest})

        write_stream = streaming.QueuedThread(
            backup.storage.write_backup,
            input_queue,
            write_except_queue,
            kwargs={"backup": backup})

        read_stream.daemon = True
        write_stream.daemon = True

        read_stream.start()
        write_stream.start()

        read_stream.join()
        write_stream.join()

        # queue handling is different from SimpleQueue handling.
        def handle_except_queue(except_queue):
            if not except_queue.empty():
                while not except_queue.empty():
                    e = except_queue.get_nowait()
                    logging.critical('Engine error: {0}'.format(e))
                return True
            else:
                return False

        got_exception = None
        got_exception = (handle_except_queue(read_except_queue) or
                         got_exception)
        got_exception = (handle_except_queue(write_except_queue) or
                         got_exception)

        if (got_exception):
            raise EngineException("Engine error. Failed to backup.")

        self.post_backup(backup, manifest)
示例#2
0
    def write_backup(self, rich_queue, backup):
        output_queues = [streaming.RichQueue() for x in self.storages]
        except_queues = [queue.Queue() for x in self.storages]
        threads = ([streaming.QueuedThread(storage.write_backup, output_queue,
                    except_queue, kwargs={"backup": backup}) for
                    storage, output_queue, except_queue in
                    zip(self.storages, output_queues, except_queues)])

        for thread in threads:
            thread.daemon = True
            thread.start()

        StorageManager(rich_queue, output_queues).transmit()

        for thread in threads:
            thread.join()

        def handle_exception_queue(except_queue):
            if not except_queue.empty:
                while not except_queue.empty():
                    e = except_queue.get_nowait()
                    LOG.critical('Storage error: {0}'.format(e))
                return True
            else:
                return False

        got_exception = None
        for except_queue in except_queues:
            got_exception = (handle_exception_queue(except_queue) or
                             got_exception)

        if (got_exception):
            raise exceptions.StorageException(
                "Storage error. Failed to backup.")
示例#3
0
 def backup(self, backup_path, backup, queue_size=2):
     """
     Here we now location of all interesting artifacts like metadata
     Should return stream for storing data.
     :return: stream
     """
     manifest = backup.storage.download_meta_file(backup)
     input_queue = streaming.RichQueue(queue_size)
     read_stream = streaming.QueuedThread(
         self.backup_stream, input_queue,
         kwargs={"backup_path": backup_path, "manifest_path": manifest})
     write_stream = streaming.QueuedThread(
         backup.storage.write_backup, input_queue,
         kwargs={"backup": backup})
     read_stream.daemon = True
     write_stream.daemon = True
     read_stream.start()
     write_stream.start()
     read_stream.join()
     write_stream.join()
     self.post_backup(backup, manifest)
示例#4
0
 def write_backup(self, rich_queue, backup):
     output_queues = [streaming.RichQueue() for x in self.storages]
     threads = [
         streaming.QueuedThread(storage.write_backup,
                                queue,
                                kwargs={"backup": backup})
         for storage, queue in zip(self.storages, output_queues)
     ]
     for thread in threads:
         thread.daemon = True
         thread.start()
     StorageManager(rich_queue, output_queues).transmit()
     for thread in threads:
         thread.join()
示例#5
0
文件: engine.py 项目: szaher/freezer
    def backup(self, backup_resource, hostname_backup_name, no_incremental,
               max_level, always_level, restart_always_level, queue_size=2):
        """
        Here we now location of all interesting artifacts like metadata
        Should return stream for storing data.
        :return: stream
        """
        prev_backup = self.storage.previous_backup(
            engine=self,
            hostname_backup_name=hostname_backup_name,
            no_incremental=no_incremental,
            max_level=max_level,
            always_level=always_level,
            restart_always_level=restart_always_level
        )

        try:
            tmpdir = tempfile.mkdtemp()
        except Exception:
            LOG.error("Unable to create a tmp directory")
            raise

        try:
            engine_meta = utils.path_join(tmpdir, "engine_meta")
            freezer_meta = utils.path_join(tmpdir, "freezer_meta")
            if prev_backup:
                prev_backup.storage.get_file(prev_backup.engine_metadata_path,
                                             engine_meta)
            timestamp = utils.DateTime.now().timestamp
            level_zero_timestamp = (prev_backup.level_zero_timestamp
                                    if prev_backup else timestamp)
            backup = base.Backup(
                engine=self,
                hostname_backup_name=hostname_backup_name,
                level_zero_timestamp=level_zero_timestamp,
                timestamp=timestamp,
                level=(prev_backup.level + 1 if prev_backup else 0)
            )

            input_queue = streaming.RichQueue(queue_size)
            read_except_queue = queue.Queue()
            write_except_queue = queue.Queue()

            read_stream = streaming.QueuedThread(
                self.backup_stream,
                input_queue,
                read_except_queue,
                kwargs={"backup_resource": backup_resource,
                        "manifest_path": engine_meta})

            write_stream = streaming.QueuedThread(
                self.storage.write_backup,
                input_queue,
                write_except_queue,
                kwargs={"backup": backup})

            read_stream.daemon = True
            write_stream.daemon = True
            read_stream.start()
            write_stream.start()
            read_stream.join()
            write_stream.join()

            # queue handling is different from SimpleQueue handling.
            def handle_except_queue(except_queue):
                if not except_queue.empty():
                    while not except_queue.empty():
                        e = except_queue.get_nowait()
                        LOG.critical('Engine error: {0}'.format(e))
                    return True
                else:
                    return False

            got_exception = None
            got_exception = (handle_except_queue(read_except_queue) or
                             got_exception)
            got_exception = (handle_except_queue(write_except_queue) or
                             got_exception)

            if got_exception:
                raise engine_exceptions.EngineException(
                    "Engine error. Failed to backup.")

            with open(freezer_meta, mode='wb') as b_file:
                b_file.write(json.dumps(self.metadata(backup_resource)))
            self.storage.put_metadata(engine_meta, freezer_meta, backup)
        finally:
            shutil.rmtree(tmpdir)