def removeOldBackups(self): """Get list of old backups and remove them""" # get listing, local or remote if self.options["DestinationType"] == "remote (ssh)": client, sftpClient = sftp.connect( self.options["RemoteHost"], self.options["RemoteUsername"], self.options["RemotePassword"], self.options["RemotePort"], ) listing = sftpClient.listdir(self.options["RemoteFolder"]) else: listing = os.listdir(encode(self.options["Destination"])) # Normalize the unicode strings storing the filenames. This fixes a problem # on HFS+ filesystems where supplying a set name on the command line # resulted in a different Unicode string than the filename of the set. listing = [decode(i, filename=True) for i in listing] listing.sort() oldbackups = [] for i in listing: backupPrefix = "%s-%s-" % (_("Backup"), self.config.getSetName()) if i.startswith(backupPrefix): oldbackups.append(i) # ...And remove them. oldbackups.reverse() if self.options["DestinationType"] == "remote (ssh)": for i in oldbackups[self.options["OldToKeep"] :]: remoteBackup = os.path.join(self.options["RemoteFolder"], i) self.logger.logmsg( "DEBUG", _("Removing old backup `%(a)s' on %(b)s") % {"a": remoteBackup, "b": self.options["RemoteHost"]}, ) sftp.remove(sftpClient, remoteBackup) sftpClient.close() client.close() else: if self.options["Engine"] == "rsync" and self.options["Incremental"] and oldbackups: for i in oldbackups[:-1]: self.logger.logmsg("DEBUG", _("Removing old backup `%s'") % i) path = os.path.join(self.options["Destination"], i) shutil_modded.rmtree(encode(path), onerror=self.onError) oldIncrementalBackup = os.path.join(self.options["Destination"], oldbackups[-1]) if ( not oldIncrementalBackup.endswith(".tar") and not oldIncrementalBackup.endswith(".tar.gz") and not oldIncrementalBackup.endswith(".tar.bz2") ): # oldIncrementalBackup = rsync self.logger.logmsg("DEBUG", _("Moving `%s' to `%s'") % (oldIncrementalBackup, self.dest)) shutil_modded.move(encode(oldIncrementalBackup), encode(self.dest)) else: # source = is not a rsync backup - remove it and start fresh self.logger.logmsg("DEBUG", _("`%s' is not an rsync backup - removing.") % oldIncrementalBackup) shutil_modded.rmtree(encode(oldIncrementalBackup), onerror=self.onError) else: for i in oldbackups[self.options["OldToKeep"] :]: self.logger.logmsg("DEBUG", _("Removing old backup `%s'") % i) path = os.path.join(self.options["Destination"], i) shutil_modded.rmtree(encode(path), onerror=self.onError)
def removeOldBackups(self): """Get list of old backups and remove them""" # get listing, local or remote if self.options['DestinationType'] == 'remote (ssh)': client, sftpClient = sftp.connect(self.options['RemoteHost'], self.options['RemoteUsername'], self.options['RemotePassword'], self.options['RemotePort']) listing = sftpClient.listdir(self.options['RemoteFolder']) else: listing = os.listdir(encode(self.options['Destination'])) # Normalize the unicode strings storing the filenames. This fixes a problem # on HFS+ filesystems where supplying a set name on the command line # resulted in a different Unicode string than the filename of the set. listing = [decode(i, filename=True) for i in listing] listing.sort() oldbackups = [] for i in listing: backupPrefix = '%s-%s-' % (_('Backup'), self.config.getSetName()) if i.startswith(backupPrefix): oldbackups.append(i) # ...And remove them. oldbackups.reverse() if self.options['DestinationType'] == 'remote (ssh)': for i in oldbackups[self.options['OldToKeep']:]: remoteBackup = os.path.join(self.options['RemoteFolder'], i) self.logger.logmsg('DEBUG', _('Removing old backup `%(a)s\' on %(b)s') % {'a': remoteBackup, 'b': self.options['RemoteHost']}) sftp.remove(sftpClient, remoteBackup) sftpClient.close() client.close() else: if self.options['Engine'] == 'rsync' and self.options['Incremental'] and oldbackups: for i in oldbackups[:-1]: self.logger.logmsg('DEBUG', _('Removing old backup `%s\'') % i) path = os.path.join(self.options['Destination'], i) shutil_modded.rmtree(encode(path), onerror=self.onError) oldIncrementalBackup = os.path.join(self.options['Destination'], oldbackups[-1]) if not oldIncrementalBackup.endswith('.tar') and not oldIncrementalBackup.endswith('.tar.gz') and \ not oldIncrementalBackup.endswith('.tar.bz2'): # oldIncrementalBackup = rsync self.logger.logmsg('DEBUG', _('Moving `%s\' to `%s\'') % (oldIncrementalBackup, self.dest)) shutil_modded.move(encode(oldIncrementalBackup), encode(self.dest)) else: # source = is not a rsync backup - remove it and start fresh self.logger.logmsg('DEBUG', _('`%s\' is not an rsync backup - removing.') % oldIncrementalBackup) shutil_modded.rmtree(encode(oldIncrementalBackup), onerror=self.onError) else: for i in oldbackups[self.options['OldToKeep']:]: self.logger.logmsg('DEBUG', _('Removing old backup `%s\'') % i) path = os.path.join(self.options['Destination'], i) shutil_modded.rmtree(encode(path), onerror=self.onError)
options = {} options["RemoteHost"] = hostname options["RemotePassword"] = password options["RemotePort"] = 22 options["RemoteUsername"] = username options["SourceType"] = "set" for engine in ["rsync", "tar", "tar.gz", "tar.bz2"]: setName = "backup-remote (ssh)-%s" % engine print _("*** Running restore of remote test backup %s" % setName) RESTOREPATH = os.path.join(TESTDIR, "restore-remote (ssh)-%s.conf" % engine) SETPATH = os.path.join(TESTDIR, "%s.conf" % setName) setConfig = config.BackupSetConf(SETPATH) restoreConfig = config.RestoreConf(RESTOREPATH, create=True) remoteFolder = setConfig.get("Options", "RemoteFolder") client, sftpClient = sftp.connect(hostname, username, password.decode('base64'), port) listing = sftpClient.listdir(remoteFolder) sftpClient.close() client.close() for backupName in listing: if backupName.startswith("%s-%s" % (_("Backup"), setName)): options["RemoteSource"] = os.path.join(remoteFolder, backupName) # Restore into a subdir with backup name restoreDestination = os.path.join(DESTDIR_RESTORE, setName) os.mkdir(restoreDestination) options["Destination"] = restoreDestination options["Source"] = os.path.join(restoreDestination, backupName) break restoreConfig.save(options) operation = restore.RestoreOperation(RESTOREPATH) operation.logger.setPrintToo(True)
options["RemoteHost"] = hostname options["RemotePassword"] = password options["RemotePort"] = 22 options["RemoteUsername"] = username options["SourceType"] = "set" for engine in ["rsync", "tar", "tar.gz", "tar.bz2"]: setName = "backup-remote (ssh)-%s" % engine print _("*** Running restore of remote test backup %s" % setName) RESTOREPATH = os.path.join(TESTDIR, "restore-remote (ssh)-%s.conf" % engine) SETPATH = os.path.join(TESTDIR, "%s.conf" % setName) setConfig = config.BackupSetConf(SETPATH) restoreConfig = config.RestoreConf(RESTOREPATH, create=True) remoteFolder = setConfig.get("Options", "RemoteFolder") client, sftpClient = sftp.connect(hostname, username, password.decode('base64'), port) listing = sftpClient.listdir(remoteFolder) sftpClient.close() client.close() for backupName in listing: if backupName.startswith("%s-%s" % (_("Backup"), setName)): options["RemoteSource"] = os.path.join(remoteFolder, backupName) # Restore into a subdir with backup name restoreDestination = os.path.join(DESTDIR_RESTORE, setName) os.mkdir(restoreDestination) options["Destination"] = restoreDestination options["Source"] = os.path.join(restoreDestination, backupName) break restoreConfig.save(options) operation = restore.RestoreOperation(RESTOREPATH) operation.logger.setPrintToo(True)
def start(self): """Restores a backup""" wasErrors = False if self.options['RemoteSource']: # check if server settings are OK self.logger.logmsg('DEBUG', _('Attempting to connect to server')) thread = fwbackups.runFuncAsThread(sftp.testConnection, self.options['RemoteHost'], self.options['RemoteUsername'], self.options['RemotePassword'], self.options['RemotePort'], self.options['RemoteSource']) while thread.retval == None: time.sleep(0.1) # Check for errors, if any import paramiko import socket if thread.retval == True: pass elif type(thread.exception) == IOError: self.logger.logmsg('ERROR', _('The restore source was either not ' + \ 'found or it cannot be read due to insufficient permissions.')) return False elif type(thread.exception) == paramiko.AuthenticationException: self.logger.logmsg('ERROR', _('A connection was established, but authentication ' + \ 'failed. Please verify the username and password ' + \ 'and try again.')) return False elif type(thread.exception) == socket.gaierror or type( thread.exception) == socket.error: self.logger.logmsg('ERROR', _('A connection to the server could not be established.\n' + \ 'Error %(a)s: %(b)s' % {'a': type(thread.exception), 'b': str(thread.exception)} + \ '\nPlease verify your settings and try again.')) return False elif type(thread.exception) == socket.timeout: self.logger.logmsg('ERROR', _('A connection to the server has timed out. ' + \ 'Please verify your settings and try again.')) return False elif type(thread.exception) == paramiko.SSHException: self.logger.logmsg('ERROR', _('A connection to the server could not be established ' + \ 'because an error occurred: %s' % str(thread.exception) + \ '\nPlease verify your settings and try again.')) return False # source types: 'set' 'local archive' 'local folder' # 'remote archive (SSH)', 'remote folder (SSH)' # We don't want to raise a hard error, that's already in the log. # So we settle for a simple return false. # don't need source type logic, /destination/ is always a folder if not self.prepareDestinationFolder(self.options['Destination']): return False # Receive files from remote server if self.options['RemoteSource']: self.logger.logmsg('INFO', _('Receiving files from server')) self._status = STATUS_RECEIVING_FROM_REMOTE # receiving files try: # download file to location where we expect source to be client, sftpClient = sftp.connect( self.options['RemoteHost'], self.options['RemoteUsername'], self.options['RemotePassword'], self.options['RemotePort']) retval = sftp.receive(sftpClient, self.options['RemoteSource'], self.options['Destination']) # This is used later to terminate the restore operation early remoteSourceIsFolder = sftp.isFolder( sftpClient, self.options['RemoteSource']) sftpClient.close() client.close() except Exception, error: self.logger.logmsg( 'ERROR', _('Could not receive file from server: %s' % error)) wasErrors = True if wasErrors or remoteSourceIsFolder: # We are dealing with a remote folder - our job here is done # Either that or an error was encountered above self.logger.logmsg('INFO', _('Finished restore operation')) return not wasErrors
errors += sub.stderr.readline() except IOError, description: pass self.pids.remove(sub.pid) retval = sub.poll() self.logger.logmsg('DEBUG', _('Subprocess with PID %(a)s exited with status %(b)s' % {'a': sub.pid, 'b': retval})) # Something wrong? if retval != EXIT_STATUS_OK and retval != 2: wasAnError = True self.logger.logmsg('ERROR', 'An error occurred while backing up path \'%s\'.\nPlease check the error output below to determine if any files are incomplete or missing.' % str(i)) self.logger.logmsg('ERROR', _('Process exited with status %(a)s. Errors: %(b)s' % {'a': retval, 'b': ''.join(errors)})) elif self.options['Engine'] == 'rsync': # in this case, self.{folderdest,dest} both need to be created if self.options['DestinationType'] == 'remote (ssh)': client, sftpClient = sftp.connect(self.options['RemoteHost'], self.options['RemoteUsername'], self.options['RemotePassword'], self.options['RemotePort']) if not wasAnError: for i in paths: if self.toCancel: # Check if we need to cancel in between paths # If so, break and close the SFTP session # Immediately after, self.ifCancel() is run. break self._current += 1 self.logger.logmsg('DEBUG', _('Backing up path %(a)i/%(b)i: %(c)s') % {'a': self._current, 'b': self._total, 'c': i}) if not os.path.exists(encode(i)): self.logger.logmsg('WARNING', _("Path %s is missing or cannot be read and will be excluded from the backup.") % i) sftp.put(sftpClient, encode(i), encode(os.path.normpath(self.options['RemoteFolder']+os.sep+os.path.basename(self.dest)+os.sep+os.path.dirname(i))), symlinks=not self.options['FollowLinks'], excludes=encode(self.options['Excludes'].split('\n'))) sftpClient.close() client.close() else: # destination is local
def start(self): """Restores a backup""" wasErrors = False if self.options['RemoteSource']: # check if server settings are OK self.logger.logmsg('DEBUG', _('Attempting to connect to server')) thread = fwbackups.runFuncAsThread(sftp.testConnection, self.options['RemoteHost'], self.options['RemoteUsername'], self.options['RemotePassword'], self.options['RemotePort'], self.options['RemoteSource']) while thread.retval == None: time.sleep(0.1) # Check for errors, if any import paramiko import socket if thread.retval == True: pass elif type(thread.exception) == IOError: self.logger.logmsg('ERROR', _('The restore source was either not ' + \ 'found or it cannot be read due to insufficient permissions.')) return False elif type(thread.exception) == paramiko.AuthenticationException: self.logger.logmsg('ERROR', _('A connection was established, but authentication ' + \ 'failed. Please verify the username and password ' + \ 'and try again.')) return False elif type(thread.exception) == socket.gaierror or type(thread.exception) == socket.error: self.logger.logmsg('ERROR', _('A connection to the server could not be established.\n' + \ 'Error %(a)s: %(b)s' % {'a': type(thread.exception), 'b': str(thread.exception)} + \ '\nPlease verify your settings and try again.')) return False elif type(thread.exception) == socket.timeout: self.logger.logmsg('ERROR', _('A connection to the server has timed out. ' + \ 'Please verify your settings and try again.')) return False elif type(thread.exception) == paramiko.SSHException: self.logger.logmsg('ERROR', _('A connection to the server could not be established ' + \ 'because an error occurred: %s' % str(thread.exception) + \ '\nPlease verify your settings and try again.')) return False # source types: 'set' 'local archive' 'local folder' # 'remote archive (SSH)', 'remote folder (SSH)' # We don't want to raise a hard error, that's already in the log. # So we settle for a simple return false. # don't need source type logic, /destination/ is always a folder if not self.prepareDestinationFolder(self.options['Destination']): return False # Receive files from remote server if self.options['RemoteSource']: self.logger.logmsg('INFO', _('Receiving files from server')) self._status = STATUS_RECEIVING_FROM_REMOTE # receiving files try: # download file to location where we expect source to be client, sftpClient = sftp.connect(self.options['RemoteHost'], self.options['RemoteUsername'], self.options['RemotePassword'], self.options['RemotePort']) retval = sftp.receive(sftpClient, self.options['RemoteSource'], self.options['Destination']) # This is used later to terminate the restore operation early remoteSourceIsFolder = sftp.isFolder(sftpClient, self.options['RemoteSource']) sftpClient.close() client.close() except Exception, error: self.logger.logmsg('ERROR', _('Could not receive file from server: %s' % error)) wasErrors = True if wasErrors or remoteSourceIsFolder: # We are dealing with a remote folder - our job here is done # Either that or an error was encountered above self.logger.logmsg('INFO', _('Finished restore operation')) return not wasErrors
self.logger.logmsg( "ERROR", "An error occurred while backing up path '%s'.\nPlease check the error output below to determine if any files are incomplete or missing." % str(i), ) self.logger.logmsg( "ERROR", _("Process exited with status %(a)s. Errors: %(b)s" % {"a": retval, "b": "".join(errors)}), ) elif self.options["Engine"] == "rsync": # in this case, self.{folderdest,dest} both need to be created if self.options["DestinationType"] == "remote (ssh)": client, sftpClient = sftp.connect( self.options["RemoteHost"], self.options["RemoteUsername"], self.options["RemotePassword"], self.options["RemotePort"], ) if not wasAnError: for i in paths: if self.toCancel: # Check if we need to cancel in between paths # If so, break and close the SFTP session # Immediately after, self.ifCancel() is run. break self._current += 1 self.logger.logmsg( "DEBUG", _("Backing up path %(a)i/%(b)i: %(c)s") % {"a": self._current, "b": self._total, "c": i}, ) if not os.path.exists(encode(i)):