def do_recover(self, arg, opts=None): '''Recover files through dar_manager''' cf = getattr(self.cf, opts.job) rpath = opts.rpath if opts.rpath else cf.recover_path s = Scheme(self.cf, opts.job) r = Report(opts.job, session=s.sess) self.logger.debug("Checking dmd sync..") bkp_count = r.get_catalogs( after=cf.catalog_begin, types=("Full", "Incremental")).count() self.logger.debug('Checking backup count on db.. %s' % bkp_count) dmd_count = len(s.load_dmd()) self.logger.debug('Checking backup count on dmd.. %s' % dmd_count) if bkp_count != dmd_count: self.stdout.write("Outdated DMD please rebuild it or recover " "manually.\n") sys.exit(2) if opts.extract: cat = opts.jobid if opts.jobid else None try: run = s.recover_all( rpath, stdout=self.stdout, stderr=sys.stderr, catalog=cat) except RecoverError, e: sys.stderr.write("%s\n" % e.message) sys.exit(2)
def do_rebuild_dmd(self, arg, opts=None): '''Re-creates the dmd for a given job.''' cf = getattr(self.cf, opts.job) sl = Scheme(self.cf, opts.job) lock = sl.lock("rebuild_dmd") self.logger.debug("Removing dmd for %s" % opts.job) dmdfile = os.path.expanduser("~/.dardrive/dmd/%s.dmd" % opts.job) if os.path.exists(dmdfile): os.unlink(dmdfile) s = Scheme(self.cf, opts.job) r = Report(opts.job, session=s.sess) for cat in r.get_catalogs(after=cf.catalog_begin, order="asc"): s.add_to_dmd(cat.id) sl.sess.delete(lock) sl.sess.commit() s.sess.commit()
def do_dbdump(self, arg, opts=None): '''Run an mysql backup job''' if opts: try: s = Scheme(self.cf, opts.job) self.stdout.write('Running SQL backup job..\n') cat = s.db_backup() if cat.clean: self.report('Backup completed successfully\n', opts.job, catalog=cat) else: self.report('Error running backup\n', opts.job, catalog=cat, error=True) except (BackupDBException, ConfigException), e: self.report(e.message, opts.job, error=True) except IOError, e: self.report(e.message, opts.job, error=True)
def do_backup(self, arg, opts=None): '''Perform a backup task''' if opts: try: s = Scheme(self.cf, opts.job, opts.root) self.stdout.write('Running backup job: %s..\n' % opts.job) s.run(opts.full) ttook = timedelta(seconds=s.newcatalog.ttook) stat = dar_status(s.newcatalog.status) self.report('Dar status:\t\t%s\nTime took:\t\t%s\n' % (stat, ttook), opts.job, verbose=opts.verbose, catalog=s.newcatalog) except RefCatalogError, e: self.report( 'The reference catalog is missing, please ' 'provide one or force a full backup.\n', opts.job, error=True) except ConfigException, e: self.report( '\nThere seems to be a configuration error:\n' ' %s\n' % e.message, opts.job, error=True)
def do_import(self, arg, opts=None): '''Import an untracked job store to db.''' if opts.job not in self.cf.sections(): self.stdout.write("Unexistent job.\n") else: cs = getattr(self.cf, opts.job) s = Scheme(self.cf, opts.job) lock = s.lock('import') i = Importer(self.cf, opts.job, session=s.sess) i.load() self.stdout.write("Job store imported.\n") self.stdout.write("Rebuilding the dmd database...\n") dmdfile = os.path.expanduser("~/.dardrive/dmd/%s.dmd" % opts.job) if os.path.exists(dmdfile): os.unlink(dmdfile) s.sess.delete(lock) s = Scheme(self.cf, opts.job) lock2 = s.lock('rebuild') r = Report(opts.job, session=s.sess) for cat in r.get_catalogs(order="asc"): if cat.date >= dt.strptime(cs.catalog_begin, "%Y-%m-%d"): s.add_to_dmd(cat.id) s.sess.delete(lock2) s.sess.commit()
def do_versions(self, arg, opts=None): '''Show available copies of a given file''' s = Scheme(self.cf, opts.job) for ver in s.search_dmd(opts.file): self.stdout.write(mk_dar_date(ver) + "\n")