def backupDirs(self): archive_filename = "%s.%s" % (self._conf['filename_archive'], self._conf['suffix_tgz']) index_filename = "%s.%s" % (self._conf['filename_archive'], self._conf['suffix_index']) archive_path = os.path.join(self._conf['job_path'], archive_filename) backup_index = parse_value(self._conf.get('backup_index'), True) index_path = os.path.join(self._conf['job_path'], index_filename) base_dir = self._conf.get('base_dir') path_list = [os.path.normpath(path) for path in re.split('\s*,\s*|\s+', self._conf['path_list'])] if self._conf.has_key('exclude_patterns'): exclude_patterns = re.split('\s*,\s*|\s+', self._conf['exclude_patterns']) else: exclude_patterns = None exclude_patterns_file = self._conf.get('exclude_patterns_file') logger.info("Starting backup of paths: %s", ', '.join(path_list)) args = [self._conf['cmd_tar'],] if base_dir is not None: if os.path.isdir(base_dir): args.extend(['-C', base_dir]) else: raise errors.BackupConfigError("Invalid base directory " "(base_dir): %s"% base_dir) if backup_index: args.append('-v') if exclude_patterns is not None: for pattern in exclude_patterns: args.append("--exclude=%s" % pattern) if exclude_patterns_file is not None: if os.path.isfile(exclude_patterns_file): args.append("--exclude-from=%s" % exclude_patterns_file) else: raise errors.BackupConfigError("Invalid exclude patterns file: %s" % exclude_patterns_file) args.extend(['-zcf', archive_path]) self._checkSrcPaths(path_list) args.extend(path_list) if backup_index: returncode, out, err = self._execBackupCmd(args, #@UnusedVariable out_path=index_path) else: returncode, out, err = self._execBackupCmd(args) #@UnusedVariable if returncode == 0: logger.info("Finished backup of paths: %s", ', '.join(path_list)) else: raise errors.BackupError("Backup of paths failed with error code: %s" % returncode, *utils.splitMsg(err))
def dumpDatabase(self, db): dump_filename = "%s_%s.dump" % (self._conf['filename_dump_db'], db) dump_path = os.path.join(self._conf['job_path'], dump_filename) args = [self._conf['cmd_pg_dump'], '-w', '-Fc'] args.extend(self._connArgs) args.extend(['-f', dump_path, db]) logger.info("Starting dump of PostgreSQL Database: %s" " Backup: %s", db, dump_path) returncode, out, err = self._execBackupCmd(args, self._env) #@UnusedVariable if returncode == 0: logger.info("Finished dump of PostgreSQL Database: %s" " Backup: %s", db, dump_path) else: raise errors.BackupError("Dump of PostgreSQL database %s failed " "with error code %s." % (db, returncode), *utils.splitMsg(err))
def dumpGlobals(self): dump_path = os.path.join(self._conf['job_path'], "%s.%s" % (self._conf['filename_dump_globals'], self._conf['suffix_compress'])) args = [self._conf['cmd_pg_dumpall'], '-w', '-g'] args.extend(self._connArgs) logger.info("Starting PostgreSQL Global Objects dump." " Backup: %s", dump_path) returncode, out, err = self._execBackupCmd(args, #@UnusedVariable self._env, out_path=dump_path, out_compress=True) if returncode == 0: logger.info("Finished PostgreSQL Global Objects dump." " Backup: %s", dump_path) else: raise errors.BackupError("Dump failed with error code: %s" % returncode, *utils.splitMsg(err))
def syncDirs(self): self._initSrc() self._initDest() compress = parse_value(self._conf.get("compress"), True) delete = parse_value(self._conf.get("delete"), False) backup_index = parse_value(self._conf.get("backup_index"), True) if self._conf.has_key("exclude_patterns"): exclude_patterns = re.split("\s*,\s*|\s+", self._conf["exclude_patterns"]) else: exclude_patterns = None exclude_patterns_file = self._conf.get("exclude_patterns_file") logger.info("Starting backup of paths: %s", ", ".join(self._path_list)) args = [self._conf["cmd_rsync"]] if self._dryRun: args.append("-n") args.append("-aR") if compress: args.append("-z") if backup_index: args.append("-v") args.append("--stats") if delete: args.append("--delete") if exclude_patterns is not None: for pattern in exclude_patterns: args.append("--exclude=%s" % pattern) if exclude_patterns_file is not None: if os.path.isfile(exclude_patterns_file): args.append("--exclude-from=%s" % exclude_patterns_file) else: raise errors.BackupConfigError("Invalid exclude patterns file: %s" % exclude_patterns_file) if len(self._src_list) > 0: args.extend(self._src_list) else: raise errors.BackupConfigError("No valid source paths defined for backup.") args.append(self._archive_path) if backup_index: returncode, out, err = self._execBackupCmd( args, out_path=self._index_path, force_exec=True # @UnusedVariable ) else: returncode, out, err = self._execBackupCmd(args, force_exec=True) # @UnusedVariable if returncode == 0: logger.info("Finished backup of paths: %s", ", ".join(self._path_list)) else: raise errors.BackupError("Backup of paths failed with error code: %s" % returncode, *utils.splitMsg(err))
def dumpDatabase(self, db, data=True): if data: dump_type = "data" dump_desc = "MySQL Database Contents" else: dump_type = "db" dump_desc = "MySQL Database Container" dump_filename = "%s_%s_%s.dump.%s" % ( self._conf["filename_dump_db"], db, dump_type, self._conf["suffix_compress"], ) dump_path = os.path.join(self._conf["job_path"], dump_filename) args = [self._conf["cmd_mysqldump"]] args.extend(self._connArgs) if db in ("information_schema", "mysql"): args.append("--skip-lock-tables") if not data: args.extend(["--no-create-info", "--no-data", "--databases"]) args.append(db) logger.info("Starting dump of %s: %s" " Backup: %s", dump_desc, db, dump_path) returncode, out, err = self._execBackupCmd( args, self._env, out_path=dump_path, out_compress=True # @UnusedVariable ) if returncode == 0: logger.info("Finished dump of %s: %s" " Backup: %s", dump_desc, db, dump_path) else: raise errors.BackupError( "Dump of %s for %s failed " "with error code: %s" % (dump_desc, db, returncode), *utils.splitMsg(err) )
try: cmd = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=bufferSize, env = env) except Exception, e: raise errors.ExternalCmdError("External script execution failed.", "Command: %s" % ' '.join(args), "Error Message: %s" % str(e)) out, err = cmd.communicate(None) #@UnusedVariable if not cmd.returncode == 0: raise errors.ExternalCmdError("Execution of external command failed" " with error code: %s" % cmd.returncode, *utils.splitMsg(err)) else: return (out, err) class JobManager: """The class implementing the logic for executing multiple backup jobs. """ _globalOpts = {'backup_root': 'Root directory for storing backups.', 'hostname_dir': 'Create subdirectory for each hostname. (yes/no)', 'user': '******', 'umask': 'Umask for file and directory creation.', 'console_loglevel': 'Logging level for console.', 'logfile_loglevel': 'Logging level for log file.', 'filename_logfile': 'Filename for log file.',
stdin=cmd.stdout, stdout=out_fp, stderr=subprocess.PIPE, bufsize=bufferSize) cmd.stdout.close() except Exception, e: raise errors.BackupCmdError("Backup compression command failed.", "Command: %s" % ' '.join(args_comp), "Error Message: %s" % str(e)) comp_out, comp_err = cmd_comp.communicate(None) #@UnusedVariable err = cmd.stderr.read() cmd.wait() if cmd_comp.returncode == 0: return (cmd.returncode, '', err) else: raise errors.BackupError("Compression of backup failed " "with error code: %s" % cmd_comp.returncode, utils.splitMsg(comp_err)) else: try: cmd = subprocess.Popen(args, stdout=(out_fp or subprocess.PIPE), stderr=subprocess.PIPE, bufsize=bufferSize, env = env) except Exception, e: raise errors.BackupCmdError("Backup command execution failed.", "Command: %s" % ' '.join(args), "Error Message: %s" % str(e)) out, err = cmd.communicate(None) return (cmd.returncode, out, err)