def __init__(self, source_file, destination_file, target_md5_data, logger, private_key = ''): self.source_file = source_file self.destination_file = destination_file self.destination_dir = rpartition(destination_file, os.path.sep)[0] self.logger = logger self.processors = [] self.start_dir = get_slash_cwd() options = self.determine_options(self.source_file) if not options: self.writer = NullWriter(self.logger) else: self.writer = FileWriter(self.destination_file, target_md5_data, self.logger) if JOIN in options: self.reader = JoinReader(self.source_file, target_md5_data, self.logger) else: self.reader = FileReader(self.source_file, target_md5_data, self.logger) if DECRYPT in options: if not private_key: raise NoPrivateKeyException() source_file_name = self.source_file.partition('.enc')[0] decryptor = Decryptor(source_file_name, private_key, target_md5_data, self.logger) self.processors.append(decryptor) if UNCOMPRESS in options: source_file_name = self.source_file.partition('.bz2')[0] uncompressor = UnCompressor(source_file_name, target_md5_data, self.logger) self.processors.append(uncompressor)
def strip_directory(file_name): """Opposite of get_base_path. We're interested only in the filename without path information""" base_path, sep, base_file_name = rpartition(file_name, os.path.sep) if not sep: return file_name return base_file_name
def get_base_path(file_name): """return the directory that file_name is in. If it's not in a separate directory, return nothing.""" base_path, middle, end = rpartition(file_name, os.path.sep) if not middle: return '' return base_path
def __init__(self, destination_file, logger): self.logger = logger self.file_name = destination_file self.file_handle = None base_dir, _sep, _output_file_name = rpartition(self.file_name, os.path.sep) if not os.path.isdir(base_dir): os.system('bash -c "mkdir -p %s"' % (base_dir))
def _prepare_restore(self, obj, restore_path): """ Command to deal with setting up the environment prior to a restore action obj -- package object which owns methods for restoring, etc. restore_path -- place where restore files have been dropped """ if hasattr(obj, "pre_restore_cmd"): pre_backup_cmd = obj.pre_backup_cmd status = self._find_cmd(obj.pre_restore_cmd) if status != OK: erstr = "%s: restore FAILED because %s failed." Logger.error(erstr % (self.full_name, pre_backup_cmd)) return FAIL backup_data = yaml_load(open(make_path(restore_path, "backup_info.yml")).read()) for backup_target in backup_data: if backup_target.startswith("__"): continue md5_data = backup_data[backup_target].get("md5", {}) backup_file_name = backup_data[backup_target].get("backup_file", '') if not md5_data or not backup_file_name: Logger.error("Corrupted backup data for %s, aborting." % (backup_target)) return FAIL source_file = make_path(restore_path, rpartition(backup_target, os.path.sep)[0][1:], backup_file_name) destination_file = make_path(restore_path, backup_target[1:]) Logger.debug("=================================") Logger.debug("backup_target: %s..." % (backup_target)) Logger.debug("current directory: %s" % (restore_path)) Logger.debug("backup_file_name: %s..." % (backup_file_name)) Logger.debug("destination_file: %s..." % (destination_file)) Logger.debug("source_file: %s" % (source_file)) Logger.debug("=================================") if not os.path.isfile(source_file): Logger.error("Restore file %s does not exist, aborting" % (source_file)) return FAIL rfp = ReversePluggableFileProcessor(source_file, destination_file, md5_data, Logger) rfp.process_all()
def _backup(self, obj, backup_data, future_pkns, dry_run): "Perform basic backup functions for a package" pre_backup_cmd = obj.pre_backup post_backup_cmd = obj.post_backup if pre_backup_cmd: status = self._find_cmd(pre_backup_cmd, future_pkns=future_pkns, dry_run=dry_run) if status != OK: erstr = "%s: backup FAILED because pre-backup command failed." Logger.error(erstr % self.full_name) return FAIL file_names = backup_data.get("file_names") if type(file_names) != type([]): errmsg = "Package %s did not define its backup data correctly."\ " '%s' should be a list." Logger.error(errmsg % (self.full_name, file_names)) return FAIL options = backup_data.get("options", [COMPRESS]) if type(options) != type([]): errmsg = "Package %s did not define its backup data correctly."\ " '%s' should be a list." Logger.error(errmsg % (self.full_name, options)) return FAIL backup_dir = tempfile.mkdtemp() Logger.info("Temporary backup dir: %s" % backup_dir) start_time = time.time() backup_data = {"__START_TIME__": start_time} for file_name in file_names: backup_data[file_name] = {} current_start = time.time() if file_name.startswith(os.path.sep): backup_destination = make_path(backup_dir, file_name[1:]) fpf = ForwardPluggableFileProcessor(file_name, backup_destination, options, Logger) backup_file_name, md5_dict = fpf.process_all() if not os.path.isfile(backup_file_name): Logger.error("Backup file not created.") return FAIL backup_data[file_name]["md5"] = md5_dict backup_data[file_name]["backup_file"] = rpartition(backup_file_name, os.path.sep)[-1] elapsed_time = time.time() - current_start backup_data[file_name]["elapsed_time"] = elapsed_time size = os.stat(file_name)[stat.ST_SIZE] backup_data[file_name]["size"] = size backup_data[file_name]["status"] = OK if post_backup_cmd: status = self._find_cmd(post_backup_cmd, future_pkns=future_pkns, dry_run=dry_run) if status != OK: erstr = "%s: backup FAILED because post-backup command failed." Logger.error(erstr % self.full_name) return FAIL backup_data["__BACKUP_DIR__"] = backup_dir dump_string = yaml_dump(backup_data) for line in dump_string.split('\n'): Logger.info("==REPORT==:%s" % line) return OK