Beispiel #1
0
async def get_backup_download(request):
    """ Download back up file by id

    :Example:
        wget -O fledge-backup-1.tar.gz http://localhost:8081/fledge/backup/1/download

    """
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        backup = Backup(connect.get_storage_async())
        backup_json = await backup.get_backup_details(backup_id)

        # Strip filename from backup path
        file_name_path = str(backup_json["file_name"]).split('data/backup/')
        file_name = str(file_name_path[1])
        dir_name = _FLEDGE_DATA + '/backup/' if _FLEDGE_DATA else _FLEDGE_ROOT + "/data/backup/"
        source = dir_name + file_name

        # Create tar file
        t = tarfile.open(source + ".tar.gz", "w:gz")
        t.add(source, arcname=os.path.basename(source))
        t.close()

        # Path of tar.gz file
        gz_path = Path(source + ".tar.gz")

    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(reason='Backup id {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPInternalServerError(reason=(str(ex)))

    return web.FileResponse(path=gz_path)
Beispiel #2
0
    def __init__(self):

        super().__init__()

        if not self._logger:
            self._logger = logger.setup(self._MODULE_NAME,
                                        destination=_LOGGER_DESTINATION,
                                        level=_LOGGER_LEVEL)

        self._backup = Backup(self._storage_async)
        self._backup_lib = lib.BackupRestoreLib(self._storage_async, self._logger)

        self._job = lib.Job()

        # Creates the objects references used by the library
        lib._logger = self._logger
        lib._storage = self._storage_async
Beispiel #3
0
async def create_backup(request):
    """ Creates a backup

    :Example: curl -X POST http://localhost:8081/fledge/backup
    """
    try:
        backup = Backup(connect.get_storage_async())
        status = await backup.create_backup()
    except Exception as ex:
        raise web.HTTPInternalServerError(reason=str(ex))

    return web.json_response({"status": status})
Beispiel #4
0
async def get_backups(request):
    """ Returns a list of all backups

    :Example: curl -X GET http://localhost:8081/fledge/backup
    :Example: curl -X GET http://localhost:8081/fledge/backup?limit=2&skip=1&status=completed
    """
    limit = __DEFAULT_LIMIT
    if 'limit' in request.query and request.query['limit'] != '':
        try:
            limit = int(request.query['limit'])
            if limit < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Limit must be a positive integer")

    skip = __DEFAULT_OFFSET
    if 'skip' in request.query and request.query['skip'] != '':
        try:
            skip = int(request.query['skip'])
            if skip < 0:
                raise ValueError
        except ValueError:
            raise web.HTTPBadRequest(reason="Skip/Offset must be a positive integer")

    status = None
    if 'status' in request.query and request.query['status'] != '':
        try:
            status = Status[request.query['status'].upper()].value
        except KeyError as ex:
            raise web.HTTPBadRequest(reason="{} is not a valid status".format(ex))
    try:
        backup = Backup(connect.get_storage_async())
        backup_json = await backup.get_all_backups(limit=limit, skip=skip, status=status)

        res = []
        for row in backup_json:
            r = OrderedDict()
            r["id"] = row["id"]
            r["date"] = row["ts"]
            r["status"] = _get_status(int(row["status"]))
            res.append(r)

    except Exception as ex:
        raise web.HTTPInternalServerError(reason=str(ex))

    return web.json_response({"backups": res})
Beispiel #5
0
async def delete_backup(request):
    """ Delete a backup

    :Example: curl -X DELETE http://localhost:8081/fledge/backup/1
    """
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        backup = Backup(connect.get_storage_async())
        await backup.delete_backup(backup_id)
        return web.json_response({'message': "Backup deleted successfully"})
    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(reason='Backup id {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPInternalServerError(reason=str(ex))
Beispiel #6
0
async def get_backup_details(request):
    """ Returns the details of a backup

    :Example: curl -X GET http://localhost:8081/fledge/backup/1
    """
    backup_id = request.match_info.get('backup_id', None)
    try:
        backup_id = int(backup_id)
        backup = Backup(connect.get_storage_async())
        backup_json = await backup.get_backup_details(backup_id)

        resp = {"status": _get_status(int(backup_json["status"])),
                'id': backup_json["id"],
                'date': backup_json["ts"]
                }

    except ValueError:
        raise web.HTTPBadRequest(reason='Invalid backup id')
    except exceptions.DoesNotExist:
        raise web.HTTPNotFound(reason='Backup id {} does not exist'.format(backup_id))
    except Exception as ex:
        raise web.HTTPInternalServerError(reason=(str(ex)))

    return web.json_response(resp)
Beispiel #7
0
class BackupProcess(FledgeProcess):
    """ Backups the entire Fledge repository into a file in the local filesystem,
        it executes a full warm backup
    """

    _MODULE_NAME = "fledge_backup_sqlite_process"

    _BACKUP_FILE_NAME_PREFIX = "fledge_backup_"
    """ Prefix used to generate a backup file name """

    _MESSAGES_LIST = {

        # Information messages
        "i000001": "Execution started.",
        "i000002": "Execution completed.",

        # Warning / Error messages
        "e000000": "general error",
        "e000001": "cannot initialize the logger - error details |{0}|",
        "e000002": "cannot retrieve the configuration from the manager, trying retrieving from file "
                   "- error details |{0}|",
        "e000003": "cannot retrieve the configuration from file - error details |{0}|",
        "e000004": "...",
        "e000005": "...",
        "e000006": "...",
        "e000007": "backup failed.",
        "e000008": "cannot execute the backup, either a backup or a restore is already running - pid |{0}|",
        "e000009": "...",
        "e000010": "directory used to store backups doesn't exist - dir |{0}|",
        "e000011": "directory used to store semaphores for backup/restore synchronization doesn't exist - dir |{0}|",
        "e000012": "cannot create the configuration cache file, neither FLEDGE_DATA nor FLEDGE_ROOT are defined.",
        "e000013": "cannot create the configuration cache file, provided path is not a directory - dir |{0}|",
        "e000014": "the identified path of backups doesn't exists, creation was tried "
                   "- dir |{0}| - error details |{1}|",
        "e000015": "The command is not available neither using the unmanaged approach"
                   " - command |{0}|",
        "e000019": "The command is not available using the managed approach"
                   " - command |{0}|",

    }
    """ Messages used for Information, Warning and Error notice """

    _logger = None

    def __init__(self):

        super().__init__()

        if not self._logger:
            self._logger = logger.setup(self._MODULE_NAME,
                                        destination=_LOGGER_DESTINATION,
                                        level=_LOGGER_LEVEL)

        self._backup = Backup(self._storage_async)
        self._backup_lib = lib.BackupRestoreLib(self._storage_async, self._logger)

        self._job = lib.Job()

        # Creates the objects references used by the library
        lib._logger = self._logger
        lib._storage = self._storage_async

    def _generate_file_name(self):
        """ Generates the file name for the backup operation, it uses hours/minutes/seconds for the file name generation

        Args:
        Returns:
            _backup_file: generated file name
        Raises:
        """

        self._logger.debug("{func}".format(func="_generate_file_name"))

        # Evaluates the parameters
        execution_time = time.strftime("%Y_%m_%d_%H_%M_%S")

        full_file_name = self._backup_lib.dir_backups + "/" + self._BACKUP_FILE_NAME_PREFIX + execution_time
        ext = "db"

        _backup_file = "{file}.{ext}".format(file=full_file_name, ext=ext)

        return _backup_file

    def check_for_execution_backup(self):
        """ Executes all the checks to ensure the prerequisites to execute the backup are met

        Args:
        Returns:
        Raises:
        """

    def init(self):
        """ Setups the correct state for the execution of the backup

        Args:
        Returns:
        Raises:
            exceptions.BackupOrRestoreAlreadyRunning
        """

        self._logger.debug("{func}".format(func="init"))

        self._backup_lib.evaluate_paths()

        self._backup_lib.retrieve_configuration()

        self.check_for_execution_backup()

        # Checks for backup/restore synchronization
        pid = self._job.is_running()
        if pid == 0:

            # no job is running
            pid = os.getpid()
            self._job.set_as_running(self._backup_lib.JOB_SEM_FILE_BACKUP, pid)

        else:
            _message = self._MESSAGES_LIST["e000008"].format(pid)
            self._logger.warning("{0}".format(_message))

            raise exceptions.BackupOrRestoreAlreadyRunning

    def execute_backup(self):
        """ Executes the backup functionality

        Args:
        Returns:
        Raises:
            exceptions.BackupFailed
        """

        self._logger.debug("{func}".format(func="execute_backup"))

        self._purge_old_backups()

        backup_file = self._generate_file_name()

        self._backup_lib.sl_backup_status_create(backup_file, lib.BackupType.FULL, lib.BackupStatus.RUNNING)

        status, exit_code = self._run_backup_command(backup_file)

        backup_information = self._backup_lib.sl_get_backup_details_from_file_name(backup_file)

        self._backup_lib.sl_backup_status_update(backup_information['id'], status, exit_code)

        audit = AuditLogger(self._storage_async)
        loop = asyncio.get_event_loop()
        if status != lib.BackupStatus.COMPLETED:

            self._logger.error(self._MESSAGES_LIST["e000007"])
            loop.run_until_complete(audit.information('BKEXC', {'status': 'failed'}))
            raise exceptions.BackupFailed
        else:
            loop.run_until_complete(audit.information('BKEXC', {'status': 'completed'}))

    def _purge_old_backups(self):
        """  Deletes old backups in relation at the retention parameter

        Args:
        Returns:
        Raises:
        """

        backups_info = asyncio.get_event_loop().run_until_complete(self._backup.get_all_backups(
                                            self._backup_lib.MAX_NUMBER_OF_BACKUPS_TO_RETRIEVE,
                                            0,
                                            None,
                                            lib.SortOrder.ASC))

        # Evaluates which backup should be deleted
        backups_n = len(backups_info)
        # -1 so at the end of the current backup up to 'retention' backups will be available
        last_to_delete = backups_n - (self._backup_lib.config['retention'] - 1)

        if last_to_delete > 0:

            # Deletes backups
            backups_to_delete = backups_info[:last_to_delete]

            for row in backups_to_delete:
                backup_id = row['id']
                file_name = row['file_name']

                self._logger.debug("{func} - id |{id}| - file_name |{file}|".format(func="_purge_old_backups",
                                                                                    id=backup_id,
                                                                                    file=file_name))
                asyncio.get_event_loop().run_until_complete(self._backup.delete_backup(backup_id))

    def _run_backup_command(self, _backup_file):
        """ Backups the entire Fledge repository into a file in the local file system

        Args:
            _backup_file: backup file to create  as a full path
        Returns:
            _status: status of the backup
            _exit_code: exit status of the operation, 0=Successful
        Raises:
        """

        self._logger.debug("{func} - file_name |{file}|".format(func="_run_backup_command",
                                                                file=_backup_file))

        # Force the checkpoint - WAL mechanism
        cmd = "{sqlite_cmd} {path}/{db} 'PRAGMA wal_checkpoint(PASSIVE);'".format(
            sqlite_cmd=self._backup_lib.SQLITE_SQLITE,
            path=self._backup_lib.dir_fledge_data,
            db=self._backup_lib.config['database-filename']
        )

        # noinspection PyArgumentEqualDefault
        _exit_code, output = lib.exec_wait_retry(cmd,
                                                 output_capture=True,
                                                 exit_code_ok=0,
                                                 max_retry=self._backup_lib.config['max_retry'],
                                                 timeout=self._backup_lib.config['timeout']
                                                 )

        # Prepares the backup command
        cmd = "{sqlite_cmd} {path}/{db} '{backup_cmd} {file}'".format(
                                                sqlite_cmd=self._backup_lib.SQLITE_SQLITE,
                                                path=self._backup_lib.dir_fledge_data,
                                                db=self._backup_lib.config['database-filename'],
                                                backup_cmd=self._backup_lib.SQLITE_BACKUP,
                                                file=_backup_file
        )

        # Executes the backup waiting for the completion and using a retry mechanism
        # noinspection PyArgumentEqualDefault
        _exit_code, output = lib.exec_wait_retry(cmd,
                                                 output_capture=True,
                                                 exit_code_ok=0,
                                                 max_retry=self._backup_lib.config['max_retry'],
                                                 timeout=self._backup_lib.config['timeout']
                                                 )

        if _exit_code == 0:
            _status = lib.BackupStatus.COMPLETED
        else:
            _status = lib.BackupStatus.FAILED

        self._logger.debug("{func} - status |{status}| - exit_code |{exit_code}| "
                           "- cmd |{cmd}|  output |{output}| ".format(
                                                                        func="_run_backup_command",
                                                                        status=_status,
                                                                        exit_code=_exit_code,
                                                                        cmd=cmd,
                                                                        output=output))

        return _status, _exit_code

    def shutdown(self):
        """ Sets the correct state to terminate the execution

        Args:
        Returns:
        Raises:
        """

        self._logger.debug("{func}".format(func="shutdown"))

        self._job.set_as_completed(self._backup_lib.JOB_SEM_FILE_BACKUP)

    def run(self):
        """  Creates a new backup

        Args:
        Returns:
        Raises:
        """

        self.init()

        try:
            self.execute_backup()

        except Exception as _ex:
            _message = _MESSAGES_LIST["e000002"].format(_ex)
            _logger.error(_message)

            self.shutdown()

            raise exceptions.RestoreFailed(_message)
        else:
            self.shutdown()