def do_backup(self): if not self.new_backup_dir: self._get_new_backup_dir(cur_backup_dirs) new_backup_dir = str(self.new_backup_dir) dir_mode = stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH LOG.info("Creating backup directory %r with permissions %04o.", new_backup_dir, dir_mode) if not self.simulate: self.sftp_client.mkdir(new_backup_dir, dir_mode) LOG.debug("Changing to local directory %r ...", self.local_dir) os.chdir(str(self.local_dir)) LOG.debug("Changing to remote directory %r ...", new_backup_dir) if not self.simulate: self.remote_dir = new_backup_dir LOG.debug("Remote directory is now %r.", self.remote_dir) local_files = self.local_dir.glob('*') for local_file in sorted(local_files, key=lambda l: str(l).lower()): if self.verbose > 1: LOG.debug("Checking local file %r ...", local_file) if not local_file.is_file(): if self.verbose > 1: LOG.debug("%r is not a file, don't backup it.", str(local_file)) continue statinfo = local_file.stat() size = statinfo.st_size atime = statinfo.st_atime mtime = statinfo.st_mtime times = (atime, mtime) atime_out = datetime.utcfromtimestamp(atime).isoformat(' ') mtime_out = datetime.utcfromtimestamp(mtime).isoformat(' ') s = '' if size != 1: s = 's' size_human = bytes2human(size, precision=1) remote_file = local_file.name LOG.info( "Transfering file %r -> %r, size %d Byte%s (%s).", str(local_file), remote_file, size, s, size_human) if not self.simulate: attr = self.sftp_client.put( str(local_file), remote_file, confirm=True) LOG.debug( "Setting atime of %r to %r and mtime to %r.", remote_file, atime_out, mtime_out) if not self.simulate: self.sftp_client.utime(remote_file, times)
def show_disk_usage(self, only_total=False): if not self.connected: msg = "Could not detect disk usage, not connected." raise SFTPHandlerError(msg) total = 0 if six.PY2: total = long(0) dlist = [] for entry in self.sftp_client.listdir(): dlist.append(entry) total_s = 'Total' max_len = len(total_s) if not only_total: for entry in dlist: if len(entry) > max_len: max_len = len(entry) max_len += 2 LOG.info("Current disk usages:") for entry in sorted(dlist, key=str.lower): sz = self.disk_usage(entry) total += sz if not only_total: s = '' if sz != 1: s = 's' b_h = bytes2human(sz, precision=1) (val, unit) = b_h.split(maxsplit=1) b_h_s = "%6s %s" % (val, unit) LOG.info("%-*r %13d Byte%s (%s)", max_len, entry, sz, s, b_h_s) s = '' if total != 1: s = 's' b_h = bytes2human(total, precision=1) (val, unit) = b_h.split(maxsplit=1) b_h_s = "%6s %s" % (val, unit) LOG.info("%-*s %13d Byte%s (%s)", max_len, total_s + ':', total, s, b_h_s)
def put_file(self, local_file, remote_file=None): if not self.ftp or not self.logged_in: msg = "Cannot put file %r, not connected or logged in." % (local_file) raise FTPHandlerError(msg) if not remote_file: remote_file = os.path.basename(local_file) if not os.path.isfile(local_file): raise FTPPutError(local_file, "not a regular file.") statinfo = os.stat(local_file) size = statinfo.st_size s = '' if size != 1: s = 's' size_human = bytes2human(size, precision=1) LOG.info( "Transfering file %r -> %r, size %d Byte%s (%s).", local_file, remote_file, size, s, size_human) if not self.simulate: cmd = 'STOR %s' % (remote_file) with open(local_file, 'rb') as fh: try_nr = 0 while try_nr < self.max_stor_attempts: try_nr += 1 if try_nr >= 2: LOG.info("Try %d transferring file %r ...", try_nr, local_file) try: self.ftp.storbinary(cmd, fh) break except ftplib.error_temp as e: if try_nr >= 10: msg = "Giving up trying to upload %r after %d tries: %s" LOG.error(msg, local_file, try_nr, str(e)) raise self.handle_error(str(e), e.__class__.__name__, False) time.sleep(2)
def test_bytes2human(self): log.info("Testing bytes2human() from pb_base.common ...") from pb_base.common import bytes2human loc = locale.getlocale() # get current locale encoding = loc[1] log.debug("Current locale is %r.", loc) german = ('de_DE', encoding) # noqa log.debug("Setting to locale 'C' to be secure.") locale.setlocale(locale.LC_ALL, 'C') log.debug("Current locale is now %r.", locale.getlocale()) test_pairs_no_si = ( (0, '0 Bytes'), (1, '1 Byte'), (5, '5 Bytes'), (5 * 1024, '5 KiB'), (1999 * 1024 * 1024, '1999 MiB'), (2047 * 1024 * 1024, '2047 MiB'), (2048 * 1024 * 1024, '2 GiB'), (2304 * 1024 * 1024, '2.25 GiB'), ) for pair in test_pairs_no_si: src = pair[0] expected = pair[1] if self.verbose > 1: log.debug("Testing bytes2human(%r) => %r", src, expected) result = bytes2human(src) if self.verbose > 1: log.debug("Got result: %r", result) self.assertIsInstance(result, str) self.assertEqual(expected, result) test_pairs_no_si = ( (0, '0 Bytes'), (1, '1 Byte'), (5, '5 Bytes'), (5 * 1024, '5.00 KiB'), (1999 * 1024 * 1024, '1999.00 MiB'), (2047 * 1024 * 1024, '2047.00 MiB'), (2048 * 1024 * 1024, '2.00 GiB'), (2304 * 1024 * 1024, '2.25 GiB'), ) for pair in test_pairs_no_si: src = pair[0] expected = pair[1] if self.verbose > 1: log.debug("Testing bytes2human(%r) precission 2 => %r", src, expected) result = bytes2human(src, precision=2) if self.verbose > 1: log.debug("Got result: %r", result) self.assertIsInstance(result, str) self.assertEqual(expected, result) # Switch back to saved locales log.debug("Switching back to saved locales %r.", loc) locale.setlocale(locale.LC_ALL, loc) # restore saved locale
def dump_zeroes( self, target, blocksize=(1024 * 1024), seek=0, count=None, force=False): """ Dumping blocks of binary zeroes into the target. @raise PbBaseHandlerError: on some error. @param target: the file or device name of the target @type target: str @param blocksize: the blocksize for the dumping action @type blocksize: int @param seek: skip N blocksize-sized blocks at start of output @type seek: int @param count: the number of blocks to write, if not given, the zeroes are written, until the device is full @type count: int or None @param force: don't raise an exception on a full device, even if count is set @type force: bool @return: success of dumping @rtype: bool """ log.debug(_( "Dumping binary zeroes to %r ..."), target) if self.simulate: return True output_seek = 0 if seek: output_seek = int(seek) * int(blocksize) block = to_utf8_or_bust(chr(0) * blocksize) target_fh = None if self.verbose > 1: log.debug(_("Opening %r for write"), target) try: target_fh = open(target, 'wb', -1) except Exception as e: error_tuple = sys.exc_info() msg = _( "%(errname)s opening target %(tgt)r: %(msg)s") % { 'errname': e.__class__.__name__, 'tgt': target, 'msg': e} reraise(PbBaseHandlerError, msg, error_tuple[2]) if self.verbose > 1: log.debug(_("Copying (buffer size %d Bytes)..."), blocksize) blocks_written = 0 try: if output_seek: msg = _( "Seeking %(bytes)d Bytes (%(human)s) in output to %(tgt)r.") % { 'bytes': output_seek, 'human': bytes2human(output_seek), 'tgt': target} log.debug(msg) target_fh.seek(output_seek) while True: target_fh.write(block) blocks_written += 1 if count and blocks_written >= count: break except IOError as e: if e.errno == 28: if count and not force: raise else: log.debug(_( "No space left on output device %r."), target) else: raise except Exception as e: error_tuple = sys.exc_info() msg = _( "Error dumping binary zeroes to target %(tgt)r: %(msg)s") % { 'tgt': target, 'msg': e} self.handle_error(msg, e.__class__.__name__, True) reraise(PbBaseHandlerError, msg, error_tuple[2]) finally: target_fh.close() bytes_written = int(blocks_written) * int(blocksize) written_human = bytes2human(bytes_written) msg = _( "%(bytes)d Bytes (%(human)s) written to output device %(tgt)r.") % { 'bytes': bytes_written, 'human': written_human, 'tgt': target} log.debug(msg) return True
def dump_data( self, source, target, blocksize=(1024*1024), iseek=0, oseek=0, raise_on_full=True): """ Dumping the content of source into the target. @raise PbBaseHandlerError: on some error. @param source: the file or device name of the source @type source: str @param target: the file or device name of the target @type target: str @param blocksize: the blocksize for the copying action @type blocksize: int @param iseek: skip N blocksize-sized blocks at start of input @type iseek: int @param oseek: skip N blocksize-sized blocks at start of output @type oseek: int @param raise_on_full: raise an IOError, if the output device is full, else through only a debug message. @type raise_on_full: bool @return: success of copying @rtype: bool """ msg = _("Dumping data from %(src)r to %(tgt)r ...") % { 'src': source, 'tgt': target} log.debug(msg) if self.simulate: return True input_seek = 0 if iseek: input_seek = int(iseek) * int(blocksize) output_seek = 0 if oseek: output_seek = int(oseek) * int(blocksize) src_fh = None target_fh = None if self.verbose > 1: log.debug(_("Opening %r for read."), source) try: src_fh = open(source, 'rb', -1) except Exception as e: msg = _("Error opening source %(src)r: %(msg)s") % { 'src': source, 'msg': e} error_tuple = sys.exc_info() reraise(PbBaseHandlerError, msg, error_tuple[2]) if self.verbose > 1: log.debug(_("Opening %r for write"), target) try: target_fh = open(target, 'wb', -1) except Exception as e: error_tuple = sys.exc_info() src_fh.close() msg = _("Error opening target %(tgt)r: %(msg)s") % { 'tgt': target, 'msg': e} reraise(PbBaseHandlerError, msg, error_tuple[2]) if self.verbose > 1: log.debug(_( "Copying (buffer size %d Bytes)..."), blocksize) blocks_written = 0 try: if input_seek: log.debug(_( "Seeking %(bytes)d Bytes (%(human)s) in input to %(src)r.") % { 'bytes': input_seek, 'human': bytes2human(input_seek), 'src': source}) src_fh.seek(input_seek) if output_seek: log.debug(_( "Seeking %(bytes)d Bytes (%(human)s) in output to %(tgt)r.") % { 'bytes': output_seek, 'human': bytes2human(output_seek), 'tgt': target}) target_fh.seek(output_seek) cache = src_fh.read(blocksize) while cache != '': target_fh.write(cache) blocks_written += 1 cache = src_fh.read(blocksize) except IOError as e: if e.errno == errno.ENOSPC: if raise_on_full: raise else: log.debug(_("No space left on output device %r."), target) else: raise except Exception as e: error_tuple = sys.exc_info() msg = _("Error copying source %(src)r to target %(tgt)r: %(msg)s") % { 'src': source, 'tgt': target, 'msg': e} reraise(PbBaseHandlerError, msg, error_tuple[2]) finally: src_fh.close() target_fh.close() bytes_written = int(blocks_written) * int(blocksize) written_human = bytes2human(bytes_written) log.debug(_( "%(bytes)d Bytes (%(human)s) written to output device %(tgt)r.") % { 'bytes': bytes_written, 'human': written_human, 'tgt': target}) return True
def _run(self): """The underlaying startpoint of the application.""" if not os.path.isdir(self.local_directory): LOG.error("Local directory %r does not exists.", self.local_directory) sys.exit(5) re_backup_dirs = re.compile(r"^\s*\d{4}[-_]+\d\d[-_]+\d\d[-_]+\d+\s*$") re_whitespace = re.compile(r"\s+") self.login_ftp() self.ftp.cwd(self.ftp_remote_dir) cur_backup_dirs = [] dlist = self.dir_list() for entry in dlist: if self.verbose > 3: LOG.debug("Entry in FTP dir:\n%s", pp(entry.as_dict(short=True))) if re_backup_dirs.search(entry.name): cur_backup_dirs.append(entry.name) else: LOG.debug("FTP-Entry %r is not a valid backup directory.", entry.name) cur_backup_dirs.sort(key=str.lower) if self.verbose > 1: LOG.debug("Found backup directories:\n%s", pp(cur_backup_dirs)) cur_date = datetime.utcnow() backup_dir_tpl = cur_date.strftime("%Y-%m-%d_%%02d") LOG.debug("Backup directory template: %r", backup_dir_tpl) cur_weekday = cur_date.timetuple().tm_wday # Retrieving new backup directory new_backup_dir = None i = 0 found = False while not found: new_backup_dir = backup_dir_tpl % (i) if not new_backup_dir in cur_backup_dirs: found = True i += 1 LOG.info("New backup directory: %r", new_backup_dir) cur_backup_dirs.append(new_backup_dir) type_mapping = {"yearly": [], "monthly": [], "weekly": [], "daily": [], "other": []} if cur_date.month == 1 and cur_date.day == 1: if not new_backup_dir in type_mapping["yearly"]: type_mapping["yearly"].append(new_backup_dir) if cur_date.day == 1: if not new_backup_dir in type_mapping["monthly"]: type_mapping["monthly"].append(new_backup_dir) if cur_weekday == 6: # Sunday if not new_backup_dir in type_mapping["weekly"]: type_mapping["weekly"].append(new_backup_dir) if not new_backup_dir in type_mapping["daily"]: type_mapping["daily"].append(new_backup_dir) self.map_dirs2types(type_mapping, cur_backup_dirs) for key in type_mapping: type_mapping[key].sort(key=str.lower) if self.verbose > 2: LOG.debug("Mapping of found directories to backup types:\n%s", pp(type_mapping)) for key in self.copies: max_copies = self.copies[key] cur_copies = len(type_mapping[key]) while cur_copies > max_copies: type_mapping[key].pop(0) cur_copies = len(type_mapping[key]) if self.verbose > 2: LOG.debug("Directories to keep:\n%s", pp(type_mapping)) dirs_delete = [] for backup_dir in cur_backup_dirs: keep = False for key in type_mapping: if backup_dir in type_mapping[key]: if self.verbose > 2: LOG.debug("Directory %r has to be kept.", backup_dir) keep = True continue if not keep: dirs_delete.append(backup_dir) LOG.debug("Directories to remove:\n%s", pp(dirs_delete)) # Removing recursive unnecessary stuff for item in dirs_delete: self.remove_recursive(item) # Creating date formatted directory LOG.info("Creating directory %r ...", new_backup_dir) if not self.simulate: self.ftp.mkd(new_backup_dir) local_pattern = os.path.join(self.local_directory, "*") try: LOG.debug("Changing into %r ...", new_backup_dir) if not self.simulate: self.ftp.cwd(new_backup_dir) # Backing up stuff LOG.debug("Searching for stuff to backup in %r.", local_pattern) local_files = glob.glob(local_pattern) for local_file in sorted(local_files, key=str.lower): if not os.path.isfile(local_file): if self.verbose > 1: LOG.debug("%r is not a file, don't backup it.", local_file) continue statinfo = os.stat(local_file) size = statinfo.st_size s = "" if size != 1: s = "s" size_human = bytes2human(size, precision=1) remote_file = re_whitespace.sub("_", os.path.basename(local_file)) LOG.info( "Transfering file %r -> %r, size %d Byte%s (%s).", local_file, remote_file, size, s, size_human ) if not self.simulate: cmd = "STOR %s" % (remote_file) with open(local_file, "rb") as f: try_nr = 0 while try_nr < 10: try_nr += 1 if try_nr > 2: LOG.info("Try %d transferring file %r ...", try_nr, local_file) try: self.ftp.storbinary(cmd, f) break except ftplib.error_temp as e: if try_nr >= 10: msg = "Giving up trying to upload %r after %d tries: %s" LOG.error(msg, local_file, try_nr, str(e)) raise self.handle_error(str(e), e.__class__.__name__, False) time.sleep(2) finally: LOG.debug("Changing cwd up.") if not self.simulate: self.ftp.cwd("..") # Detect and display current disk usages total_bytes = 0 if six.PY2: total_bytes = long(0) dlist = self.dir_list() total_s = "Total" max_len = len(total_s) for entry in dlist: if len(entry.name) > max_len: max_len = len(entry.name) max_len += 2 LOG.info("Current disk usages:") for entry in dlist: if entry.name == "." or entry.name == "..": continue entry_size = self.disk_usage(entry) total_bytes += entry_size s = "" if entry_size != 1: s = "s" b_h = bytes2human(entry_size, precision=1) (val, unit) = b_h.split(maxsplit=1) b_h_s = "%6s %s" % (val, unit) LOG.info("%-*r %13d Byte%s (%s)", max_len, entry.name, entry_size, s, b_h_s) s = "" if total_bytes != 1: s = "s" b_h = bytes2human(total_bytes, precision=1) (val, unit) = b_h.split(maxsplit=1) b_h_s = "%6s %s" % (val, unit) LOG.info("%-*s %13d Byte%s (%s)", max_len, total_s + ":", total_bytes, s, b_h_s)
def format_df_results(self, result_list, human=False): gr = locale.localeconv()['grouping'] res_list = [] for result in result_list: df_entry = {} df_entry['dev'] = result.dev df_entry['fs'] = result.fs df_entry['type'] = result.fs_type df_entry['total'] = locale.format("%d", result.total_mb, gr) df_entry['used'] = locale.format("%d", result.used_mb, gr) df_entry['free'] = locale.format("%d", result.free_mb, gr) if result.used_percent is None: df_entry['used_pc'] = "-" else: df_entry['used_pc'] = locale.format( "%.2f", result.used_percent) + " %" if human: df_entry['total'] = bytes2human(result.total_mb) df_entry['used'] = bytes2human(result.used_mb) df_entry['free'] = bytes2human(result.free_mb) res_list.append(df_entry) if self.verbose > 2: log.debug("Formatted DF results: %s", pp(res_list)) keys = ('dev', 'type', 'total', 'used', 'free', 'used_pc') length = {} for key in keys: length[key] = 1 cur_locale = locale.getlocale() cur_encoding = cur_locale[1] if (cur_locale[1] is None or cur_locale[1] == '' or cur_locale[1].upper() == 'C' or cur_locale[1].upper() == 'POSIX'): cur_encoding = 'UTF-8' for result in res_list: for key in keys: tt = result[key] if sys.version_info[0] <= 2: tt = tt.decode(cur_encoding) if len(tt) > length[key]: length[key] = len(tt) out = '' for result in res_list: line = "%-*s %-*s %*s %*s %*s %*s %s\n" % ( length['dev'], result['dev'], length['type'], result['type'], length['total'], result['total'], length['used'], result['used'], length['free'], result['free'], length['used_pc'], result['used_pc'], result['fs'], ) out += line if self.verbose > 2: log.debug("Field lengths: %s", pp(length)) return out