def send(self, id, name, filename): s3location = "s3://%s/%s/%s/%s" % ( self.bucket, id, self.runtime.strftime("%Y%m%d%H%M%S"), os.path.basename(filename)) logging.info("Uploading '%s' backup for '%s' to S3 (%s)..." % (name, self.id, s3location)) uploadargs = ['aws', 's3', 'cp', '--only-show-errors', filename, s3location] uploadenv = os.environ.copy() if self.aws_key is not None: uploadenv['AWS_ACCESS_KEY_ID'] = self.aws_key uploadenv['AWS_SECRET_ACCESS_KEY'] = self.aws_secret uploadenv['AWS_DEFAULT_REGION'] = self.region if self.endpoint_url is not None: uploadargs.insert(2, "--endpoint-url") uploadargs.insert(3, self.endpoint_url) uploadproc = subprocess.Popen(uploadargs, stderr=subprocess.PIPE, env=uploadenv) uploadproc.wait() exitcode = uploadproc.returncode errmsg = uploadproc.stderr.read() if exitcode != 0: raise BackupException("Error while uploading (%s): %s" % (self.id, errmsg)) return s3location
def dump(self): # Create temporary credentials file credsfilename = '%s/%s.pgpass' % (self.tmpdir, self.id) with open(credsfilename, 'wb') as credsfile: credsfile.write( "%s:%s:%s:%s:%s\n" % \ (self.dbhost, '5432', self.dbname, self.dbuser, self.dbpass) ) credsfile.flush() credsfile.close() # Perform dump and remove creds file try: dumpfilename = '%s/%s.sql' % (self.tmpdir, self.id) logging.info("Backing up '%s' (%s)..." % (self.name, self.type)) dumpfile = open(dumpfilename, 'wb') dumpargs = ['pg_dump', '-h', self.dbhost, '--username', self.dbuser, self.dbname] dumpenv = os.environ.copy() dumpenv['PGPASSFILE'] = credsfilename dumpproc1 = subprocess.Popen(dumpargs, stdout=dumpfile, stderr=subprocess.PIPE, env=dumpenv) if dumpproc1.stdout: dumpproc1.stdout.close() dumpproc1.wait() exitcode = dumpproc1.returncode errmsg = dumpproc1.stderr.read() if exitcode != 0: raise BackupException("Error while dumping: %s" % errmsg) dumpfile.close() finally: os.unlink(credsfilename) return [dumpfilename, ]
def dump(self): tarfilename = '%s/%s.tar' % (self.tmpdir, self.id) logging.info("Backing up '%s' (%s)..." % (self.name, self.type)) tarfile = open(tarfilename, 'wb') dumpargs = [ 'ssh', ('%s@%s' % (self.sshuser, self.sshhost)), 'tar', 'cC', self.path ] for exclude in self.excludes: dumpargs.append('--exclude') dumpargs.append(exclude) dumpargs.append(".") logging.debug("Running '%s'" % (" ".join(dumpargs))) dumpproc1 = subprocess.Popen(dumpargs, stdout=tarfile, stderr=subprocess.PIPE) if dumpproc1.stdin: dumpproc1.stdin.close() dumpproc1.wait() exitcode = dumpproc1.returncode errmsg = dumpproc1.stderr.read() if errmsg != b'': logging.error(errmsg) if exitcode > 1: raise BackupException("Error while dumping (exitcode %d): %s" % (exitcode, errmsg)) tarfile.close() return [tarfilename, ]
def encrypt(filename, passphrase): logging.info("Encrypting '%s'..." % filename) encfilename = '%s.gpg' % filename encerrsname = '%s.err' % filename encfile = open(encfilename, 'wb') encerrs = open(encerrsname, 'wb') encargs = [ 'gpg', '--batch', '--yes', '-q', '--passphrase-fd', '0', '-c', filename ] encenv = os.environ.copy() encproc1 = subprocess.Popen(encargs, stdin=subprocess.PIPE, stdout=encfile, stderr=encerrs, env=encenv) encproc1.communicate(passphrase.encode('utf8')) #if encproc1.stdout: # encproc1.stdout.close() encproc1.wait() encfile.close() encerrs.close() exitcode = encproc1.returncode if exitcode != 0: errmsg = open(encerrsname, 'rb').read() raise BackupException("Error while encrypting: %s" % errmsg) return encfilename
def dump(self): # Create temporary credentials file if 'defaults' in dir(self): credsfilename = self.defaults elif self.dbuser is not None: credsfilename = '%s/%s.my.cnf' % (self.tmpdir, self.id) credsfile = open(credsfilename, 'w') credsfile.write( "[client]\n" \ "host=%s\n" \ "user=%s\n" \ "password=%s\n\n" % \ (self.dbhost, self.dbuser, self.dbpass) ) credsfile.flush() credsfile.close() os.chmod(credsfilename, 0o400) # Perform dump and remove creds file try: dumpfilename = '%s/%s.sql' % (self.tmpdir, self.id) logging.info("Backing up '%s' (%s)..." % (self.name, self.type)) dumpfile = open(dumpfilename, 'wb') dumpargs = [ 'mysqldump', ('--defaults-file=%s' % credsfilename), ('--host=%s' % self.dbhost), '-R' ] if not 'noevents' in dir(self) or not self.noevents: dumpargs.append('--events') all_databases = False if hasattr(self, 'options'): for raw_option in self.options.split(): option = raw_option.strip() dumpargs.append(option) if not all_databases and option == '--all-databases': all_databases = True if not all_databases: dumpargs.append('--databases') for dbname in self.dbname.split(): dumpargs.append(dbname) dumpproc1 = subprocess.Popen(dumpargs, stdout=dumpfile, stderr=subprocess.PIPE) if dumpproc1.stdin: dumpproc1.stdin.close() dumpproc1.wait() exitcode = dumpproc1.returncode errmsg = dumpproc1.stderr.read() if exitcode != 0: raise BackupException("Error while dumping: %s" % errmsg) dumpfile.close() finally: os.unlink(credsfilename) return [ dumpfilename, ]
def send(self, id, name, filename): gslocation = "gs://%s/%s/%s/%s" % ( self.bucket, id, self.runtime.strftime("%Y%m%d%H%M%S"), os.path.basename(filename)) logging.info("Uploading '%s' backup to GS (%s)..." % (name, gslocation)) uploadargs = ['gsutil', '-q', 'cp', filename, gslocation] uploadenv = os.environ.copy() uploadproc = subprocess.Popen(uploadargs, stderr=subprocess.PIPE, env=uploadenv) uploadproc.wait() exitcode = uploadproc.returncode errmsg = uploadproc.stderr.read() if exitcode != 0: raise BackupException("Error while uploading: %s" % errmsg)
def compress(filename): logging.info("Compressing '%s'..." % filename) compfilename = '%s.gz' % filename comperrsname = '%s.err' % filename comperrs = open(comperrsname, 'wb') compargs = ['gzip', '-f', '--fast', filename] compproc1 = subprocess.Popen(compargs, stdin=subprocess.PIPE, stderr=comperrs) compproc1.wait() comperrs.close() exitcode = compproc1.returncode if exitcode != 0: errmsg = open(comperrsname, 'rb').read() raise BackupException("Error while compressing: %s" % errmsg) return compfilename
def dump(self): # Perform dump and remove creds file dumpfilename = '%s/%s.sql' % (self.tmpdir, self.id) logging.info("Backing up '%s' (%s)..." % (self.name, self.type)) dumpfile = open(dumpfilename, 'wb') dumpargs = [ 'ssh', ('%s@%s' % (self.sshuser, self.sshhost)), 'mysqldump', ('--host=%s' % self.dbhost), ('--user=%s' % self.dbuser), ('--password=%s' % self.dbpass), '-R' ] if not 'noevents' in dir(self) or not self.noevents: dumpargs.append('--events') all_databases = False if hasattr(self, 'options'): for raw_option in self.options.split(): option = raw_option.strip() dumpargs.append(option) if not all_databases and option == '--all-databases': all_databases = True if not all_databases: dumpargs.append('--databases') for dbname in self.dbname.split(): dumpargs.append(dbname) logging.debug("Running '%s'" % (" ".join(dumpargs))) dumpproc1 = subprocess.Popen(dumpargs, stdout=dumpfile, stderr=subprocess.PIPE) if dumpproc1.stdin: dumpproc1.stdin.close() dumpproc1.wait() exitcode = dumpproc1.returncode errmsg = dumpproc1.stderr.read() if errmsg != b'': logging.error(errmsg) if exitcode > 1: raise BackupException("Error while dumping (exitcode %d): %s" % (exitcode, errmsg)) dumpfile.close() return [ dumpfilename, ]
def send(self, id, name, suffix, filename): credsfilename = '%s/%s.smbauth' % (self.tmpdir, self.id) credsfile = open(credsfilename, 'wb') credsfile.write( "username = %s\n" \ "password = %s\n" \ "workgroup = %s\n" % \ (self.username, self.password, self.workgroup) ) credsfile.flush() credsfile.close() os.chmod(credsfilename, 0400) try: sambafile = "/%s-%s.%s" % ( id, datetime.datetime.now().strftime("%Y%m%d%H%M%S"), suffix) basename = os.path.basename(filename) sambaurl = "smb://%s/%s%s/%s" % (self.sambahost, self.sambashare, sambafile) logging.info("Uploading '%s' backup for '%s' to Samba (%s)..." % (name, self.id, sambaurl)) sharething = "//%s/%s" % (self.sambahost, self.sambashare) command = "put %s %s" % (filename, sambafile) uploadargs = [ 'smbclient', '-A', self.authfile, sharething, '-c', command ] uploadproc = subprocess.Popen(uploadargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) exitcode = uploadproc.wait() if exitcode != 0: errmsg = "%s%s" % (uploadproc.stdout.read(), uploadproc.stderr.read()) raise BackupException("Error while uploading (%s): %s" % (self.id, errmsg)) return sambafile finally: os.unlink(credsfilename)
def dump(self): tarfilename = '%s/%s.tar' % (self.tmpdir, self.id) logging.info("Backing up '%s' (%s)..." % (self.name, self.type)) tarfile = open(tarfilename, 'wb') os.chdir(os.path.dirname(self.path)) dumpargs = [ 'sudo', 'tar', 'cf', tarfilename, "./" + os.path.basename(self.path) ] for exclude in self.excludes: dumpargs.append('--exclude') dumpargs.append(exclude) dumpproc1 = subprocess.Popen(dumpargs, stdout=tarfile, stderr=subprocess.PIPE) dumpproc1.wait() exitcode = dumpproc1.returncode errmsg = dumpproc1.stderr.read() if exitcode == 2: raise BackupException("Error while dumping: %s" % errmsg) tarfile.close() return [ tarfilename, ]
def main(): try: # Read arguments parser = argparse.ArgumentParser() parser.add_argument( 'configfile', metavar='configfile', nargs=1, help='name of configuration file to use for this run') parser.add_argument('-v', dest='verbose', action='store_true') parser.add_argument('-d', dest='debug', action='store_true') args = parser.parse_args() configfile = args.configfile[0] # Enable logging if verbosity needed if args.debug: logging.basicConfig(level=logging.DEBUG) elif args.verbose: logging.basicConfig(level=logging.INFO) # Read Json with open(configfile) as json_conf: config = json.load(json_conf) # Import modules backup_modules = config['modules'] if backup_modules is None: backup_modules = default_modules for modulename in backup_modules: logging.debug("Importing module '%s'" % modulename) try: module = __import__(modulename) except ImportError as e: logging.error("Error importing module: %s" % e.__str__()) # Handlers for destination destinations = [] for dest_id, dest_class in destinations.handlers.items(): logging.debug("Dest(%s) - %s" % (dest_id, dest_class)) for dest_config in config['destinations']: if dest_config['type'] == dest_id: destination = dest_class(dest_config) destinations.append(destination) # Handlers for notifications notifications = [] for notify_id, notify_class in notifications.handlers.items(): logging.debug("Notify(%s) - %s" % (notify_id, notify_class)) for notify_config in config['notifications']: if notify_config['type'] == notify_id: notification = notify_class(notify_config) notifications.append(notification) # Find a source from where create backup sources = [] for source_id, source_class in sources.handlers.items(): logging.debug("Source(%s) - %s" % (source_id, source_class)) for source_config in config['sources']: if source_config['type'] == source_id: source = source_class(source_config) sources.append(source) if len(sources) < 1: raise BackupException("No sources listed in configuration file.") instance = BackupRunInstance() instance.notifications = notifications instance.sources = sources instance.destinations = destinations instance.run() except KeyboardInterrupt: sys.exit()
def main(): try: # Make doubly sure temp files aren't world-viewable os.umask(077) # Read command line arguments parser = argparse.ArgumentParser() parser.add_argument( 'configfile', metavar='configfile', nargs=1, help='name of configuration file to use for this run') parser.add_argument('-v', dest='verbose', action='store_true') parser.add_argument('-d', dest='debug', action='store_true') args = parser.parse_args() configfile = args.configfile[0] # Enable logging if verbosity requested if args.debug: logging.basicConfig(level=logging.DEBUG) elif args.verbose: logging.basicConfig(level=logging.INFO) # Read our JSON configuration file with open(configfile) as json_conf: config = json.load(json_conf) # Import main and additional handler library modules backup_modules = config['modules'] if backup_modules is None: backup_modules = default_modules for modulename in backup_modules: logging.debug("Importing module '%s'" % modulename) try: module = __import__(modulename) except ImportError as e: logging.error("Error importing module: %s" % e.__str__()) # Instantiate handlers for any listed destinations destinations = [] for dest_id, dest_class in backups.destinations.handlers.items(): logging.debug("Dest(%s) - %s" % (dest_id, dest_class)) for dest_config in config['destinations']: if dest_config['type'] == dest_id: destination = dest_class(dest_config) destinations.append(destination) # Instantiate handlers for any listed notifications notifications = [] for notify_id, notify_class in backups.notifications.handlers.items(): logging.debug("Notify(%s) - %s" % (notify_id, notify_class)) for notify_config in config['notifications']: if notify_config['type'] == notify_id: notification = notify_class(notify_config) notifications.append(notification) # Loop through sections, process those we have sources for sources = [] for source_id, source_class in backups.sources.handlers.items(): logging.debug("Source(%s) - %s" % (source_id, source_class)) for source_config in config['sources']: if source_config['type'] == source_id: source = source_class(source_config) sources.append(source) if len(sources) < 1: raise BackupException("No sources listed in configuration file.") instance = BackupRunInstance() instance.notifications = notifications instance.sources = sources instance.destinations = destinations instance.run() except KeyboardInterrupt: sys.exit()
def dump(self): # Identify snapshot source device snapshot_dev = dev_mapper_name(self.vg_name, self.lv_name) # Trigger a new snapshot now = datetime.datetime.now() snapshot_id = "{}-{}".format(self.lv_name, now.strftime("%Y%m%d%H%M%S")) logging.info("Snapshotting '%s'..." % self.lv_name) snapshotargs = [ 'ssh', ('%s@%s' % (self.sshuser, self.sshhost)), 'lvcreate', ('-L%s' % self.size), '-s', '-n', snapshot_id, snapshot_dev ] logging.debug("Running '%s'" % (" ".join(snapshotargs))) snapshotproc = subprocess.Popen(snapshotargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if snapshotproc.stdin: snapshotproc.stdin.close() snapshotproc.wait() exitcode = snapshotproc.returncode outmsg = snapshotproc.stdout.read() #print("OUT: %s" % outmsg) errmsg = snapshotproc.stderr.read() #print("ERR: %s" % errmsg) if errmsg != b'': logging.error(errmsg) if exitcode > 1: raise BackupException("Error while snapshotting (exitcode %d): %s" % (exitcode, errmsg)) logging.debug("Snapshot '%s' created." % snapshot_id) # Fetching list of snapshots logging.info("Fetching list of snapshots...") snapshotlistargs = [ 'ssh', ('%s@%s' % (self.sshuser, self.sshhost)), 'lvs', '-S', ('origin=\"%s\"' % self.lv_name), '--reportformat', 'json' ] logging.debug("Running '%s'" % (" ".join(snapshotlistargs))) snapshotlistproc = subprocess.Popen(snapshotlistargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if snapshotlistproc.stdin: snapshotlistproc.stdin.close() snapshotlistproc.wait() exitcode = snapshotlistproc.returncode outmsg = snapshotlistproc.stdout.read() #print("OUT: %s" % outmsg) errmsg = snapshotlistproc.stderr.read() #print("ERR: %s" % errmsg) if errmsg != b'': logging.error(errmsg) if exitcode > 1: raise BackupException("Error while listing snapshots (exitcode %d): %s" % (exitcode, errmsg)) lvs = json.loads(outmsg)['report'][0]['lv'] snapshots = [lv['lv_name'] for lv in lvs] logging.info("Found %d snapshots." % len(snapshots)) # Clear down older, redundant snapshots logging.info("Clearing down redundant snapshots...") redundant_count = len(snapshots) - self.retain_snapshots if redundant_count > 0: logging.info("Removing %d redundant snapshots..." % redundant_count) lvm_ids = sorted(snapshots) for lvm_id in lvm_ids[0:redundant_count]: logging.debug("Deleting volume snapshot '%s'..." % lvm_id) snapshotremoveargs = [ 'ssh', ('%s@%s' % (self.sshuser, self.sshhost)), 'lvremove', '-f', dev_mapper_name(self.vg_name, lvm_id), ] logging.debug("Running '%s'" % (" ".join(snapshotremoveargs))) snapshotremoveproc = subprocess.Popen(snapshotremoveargs, stderr=subprocess.PIPE) if snapshotremoveproc.stdin: snapshotremoveproc.stdin.close() snapshotremoveproc.wait() exitcode = snapshotremoveproc.returncode errmsg = snapshotremoveproc.stderr.read() if errmsg != b'': logging.error(errmsg) if exitcode > 1: raise BackupException("Error while removing snapshot ''%s' (exitcode %d): %s" % (lvm_id, exitcode, errmsg)) # Stream volume down to a local file logging.info("Downloading volume for snapshot '%s'..." % snapshot_id) dumpfilename = '%s/%s.dump' % (self.tmpdir, self.lv_name) dumpfile = open(dumpfilename, 'wb') dumpargs = [ 'ssh', ('%s@%s' % (self.sshuser, self.sshhost)), 'dd', ('if=%s' % dev_mapper_name(self.vg_name, snapshot_id)), "status=none" ] logging.debug("Running '%s'" % (" ".join(dumpargs))) dumpproc1 = subprocess.Popen(dumpargs, stdout=dumpfile, stderr=subprocess.PIPE) if dumpproc1.stdin: dumpproc1.stdin.close() dumpproc1.wait() exitcode = dumpproc1.returncode errmsg = dumpproc1.stderr.read() if errmsg != b'': logging.error(errmsg) if exitcode > 1: raise BackupException("Error while dumping (exitcode %d): %s" % (exitcode, errmsg)) dumpfile.close() return [dumpfilename, ]