def syncDirs(self): self._initSrc() self._initDest() compress = parse_value(self._conf.get("compress"), True) delete = parse_value(self._conf.get("delete"), False) backup_index = parse_value(self._conf.get("backup_index"), True) if self._conf.has_key("exclude_patterns"): exclude_patterns = re.split("\s*,\s*|\s+", self._conf["exclude_patterns"]) else: exclude_patterns = None exclude_patterns_file = self._conf.get("exclude_patterns_file") logger.info("Starting backup of paths: %s", ", ".join(self._path_list)) args = [self._conf["cmd_rsync"]] if self._dryRun: args.append("-n") args.append("-aR") if compress: args.append("-z") if backup_index: args.append("-v") args.append("--stats") if delete: args.append("--delete") if exclude_patterns is not None: for pattern in exclude_patterns: args.append("--exclude=%s" % pattern) if exclude_patterns_file is not None: if os.path.isfile(exclude_patterns_file): args.append("--exclude-from=%s" % exclude_patterns_file) else: raise errors.BackupConfigError("Invalid exclude patterns file: %s" % exclude_patterns_file) if len(self._src_list) > 0: args.extend(self._src_list) else: raise errors.BackupConfigError("No valid source paths defined for backup.") args.append(self._archive_path) if backup_index: returncode, out, err = self._execBackupCmd( args, out_path=self._index_path, force_exec=True # @UnusedVariable ) else: returncode, out, err = self._execBackupCmd(args, force_exec=True) # @UnusedVariable if returncode == 0: logger.info("Finished backup of paths: %s", ", ".join(self._path_list)) else: raise errors.BackupError("Backup of paths failed with error code: %s" % returncode, *utils.splitMsg(err))
def listJobs(self): """Lists all jobs defined in configuration file. """ for job_name in self._jobsConf.keys(): job_conf = self._jobsConf.get(job_name) if job_conf.has_key('active'): active = parse_value(job_conf['active'], True) else: active = True print "Job Name: %s Active: %s Method: %s" % (job_name, active, str(job_conf.get('method')))
def backupDirs(self): archive_filename = "%s.%s" % (self._conf['filename_archive'], self._conf['suffix_tgz']) index_filename = "%s.%s" % (self._conf['filename_archive'], self._conf['suffix_index']) archive_path = os.path.join(self._conf['job_path'], archive_filename) backup_index = parse_value(self._conf.get('backup_index'), True) index_path = os.path.join(self._conf['job_path'], index_filename) base_dir = self._conf.get('base_dir') path_list = [os.path.normpath(path) for path in re.split('\s*,\s*|\s+', self._conf['path_list'])] if self._conf.has_key('exclude_patterns'): exclude_patterns = re.split('\s*,\s*|\s+', self._conf['exclude_patterns']) else: exclude_patterns = None exclude_patterns_file = self._conf.get('exclude_patterns_file') logger.info("Starting backup of paths: %s", ', '.join(path_list)) args = [self._conf['cmd_tar'],] if base_dir is not None: if os.path.isdir(base_dir): args.extend(['-C', base_dir]) else: raise errors.BackupConfigError("Invalid base directory " "(base_dir): %s"% base_dir) if backup_index: args.append('-v') if exclude_patterns is not None: for pattern in exclude_patterns: args.append("--exclude=%s" % pattern) if exclude_patterns_file is not None: if os.path.isfile(exclude_patterns_file): args.append("--exclude-from=%s" % exclude_patterns_file) else: raise errors.BackupConfigError("Invalid exclude patterns file: %s" % exclude_patterns_file) args.extend(['-zcf', archive_path]) self._checkSrcPaths(path_list) args.extend(path_list) if backup_index: returncode, out, err = self._execBackupCmd(args, #@UnusedVariable out_path=index_path) else: returncode, out, err = self._execBackupCmd(args) #@UnusedVariable if returncode == 0: logger.info("Finished backup of paths: %s", ', '.join(path_list)) else: raise errors.BackupError("Backup of paths failed with error code: %s" % returncode, *utils.splitMsg(err))
def runJobs(self): """Runs the requested backup jobs. Backup jobs are either explicitly listed on the command line or all active backup jobs in configuration file are run. """ dry_run = self._globalConf.get('dry_run', False) for job_name in self._jobs: self._numJobs += 1 logmgr.setContext(job_name) job_conf = self._jobsConf.get(job_name) if job_conf is not None: active = parse_value(job_conf.get('active', 'yes'), True) if active: job_pre_exec = job_conf.get('job_pre_exec') job_post_exec = job_conf.get('job_post_exec') if job_pre_exec is not None: logger.info("Executing job pre-execution script.") try: execExternalCmd(job_pre_exec.split(), None, dry_run) job_pre_exec_ok = True except errors.ExternalCmdError, e: job_pre_exec_ok = False job_ok = False logger.error("Job pre-execution script failed.") logger.error(e.desc) for line in e: logger.error(" %s" , line) else: job_pre_exec_ok = True if job_pre_exec_ok: try: logger.info("Starting execution of backup job.") job = BackupJob(job_name, self._globalConf, job_conf) job.run() logger.info("Finished execution of backup job.") job_ok = True except errors.BackupError, e: logger.error("Execution of backup job failed.") job_ok = False if e.trace or e.fatal: raise else: if e.fatal: level = logging.CRITICAL else: level = logging.ERROR logger.log(level, e.desc) for line in e: logger.log(level, " %s" , line) if job_post_exec is not None and job_pre_exec_ok: logger.info("Executing job post-execution script.") try: execExternalCmd(job_post_exec.split(), None, dry_run) except errors.ExternalCmdError, e: job_ok = False logger.error("Job pre-execution script failed.") logger.error(e.desc) for line in e: logger.error(" %s" , line) if job_ok: self._numJobsSuccess += 1 else: self._numJobsError += 1