class SCBackup(SCOperation): def __init__(self): self.storage = BaseStorage.storage_factory() self.servername = DB_SERVERNAME def backup_git_sources(self): # Create a tar of the git sources in the target directory sys.stdout.write("%s in %s\n" % (settings.GIT_SOURCES_BASE_DIRECTORY, self.directory)) ts = tarfile.open(os.path.join(self.directory, 'sources.tar'), 'w') call_dir = os.getcwd() os.chdir(settings.GIT_SOURCES_BASE_DIRECTORY) ts.add('.') ts.close() os.chdir(call_dir) def backup_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) with open(os.path.join(self.directory, 'dbbackup'), 'w') as outputfile: self.dbcommands.run_backup_commands(outputfile) def backup_ruleset_middleware(self): try: __import__("%s.%s" % (settings.RULESET_MIDDLEWARE, 'backup')) except ImportError: return probe_class = __import__(settings.RULESET_MIDDLEWARE) probe_class.backup.backup(self.directory) def write_migration_level(self): last_migrations = self.get_migration_levels() migfile = os.path.join(self.directory, 'miglevel') with open(migfile, 'w') as miglevel: miglevel.write(json.dumps(last_migrations)) def run(self): self.directory = tempfile.mkdtemp() self.write_migration_level() self.backup_db() self.backup_git_sources() self.backup_ruleset_middleware() # create tar archive of dir call_dir = os.getcwd() os.chdir(self.directory) filename = filename_generate('tar.bz2', self.dbcommands.settings.database['NAME'], self.servername) outputfile = tempfile.SpooledTemporaryFile() ts = tarfile.open(filename, 'w:bz2', fileobj=outputfile) for dfile in os.listdir('.'): ts.add(dfile) ts.close() self.storage.write_file(outputfile, filename) shutil.rmtree(self.directory) os.chdir(call_dir)
class SCBackup(SCOperation): def __init__(self): self.storage = BaseStorage.storage_factory() self.servername = DB_SERVERNAME def backup_git_sources(self): # Create a tar of the git sources in the target directory sys.stdout.write("%s in %s\n" % (settings.GIT_SOURCES_BASE_DIRECTORY, self.directory)) ts = tarfile.open(os.path.join(self.directory, 'sources.tar'), 'w') call_dir = os.getcwd() os.chdir(settings.GIT_SOURCES_BASE_DIRECTORY) ts.add('.') ts.close() os.chdir(call_dir) def backup_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) with open(os.path.join(self.directory, 'dbbackup'), 'w') as outputfile: self.dbcommands.run_backup_commands(outputfile) def backup_ruleset_middleware(self): try: middleware_backup = __import__("%s.%s" % (settings.RULESET_MIDDLEWARE, 'backup')) except ImportError: return Probe = __import__(settings.RULESET_MIDDLEWARE) Probe.backup.backup(self.directory) def write_migration_level(self): last_migrations = self.get_migration_levels() migfile = os.path.join(self.directory, 'miglevel') with open(migfile, 'w') as miglevel: miglevel.write(json.dumps(last_migrations)) def run(self): self.directory = tempfile.mkdtemp() self.write_migration_level() self.backup_db() self.backup_git_sources() self.backup_ruleset_middleware() # create tar archive of dir call_dir = os.getcwd() os.chdir(self.directory) filename = filename_generate('tar.bz2', self.dbcommands.settings.database['NAME'], self.servername) outputfile = tempfile.SpooledTemporaryFile() ts = tarfile.open(filename, 'w:bz2', fileobj=outputfile) for dfile in os.listdir('.'): ts.add(dfile) ts.close() self.storage.write_file(outputfile, filename) os.chdir(call_dir)
class Command(LabelCommand): help = "dbbackup [-c] [-d <dbname>] [-s <servername>] [--compress] [--encrypt]" option_list = BaseCommand.option_list + ( make_option("-c", "--clean", help="Clean up old backup files", action="store_true", default=False), make_option("-d", "--database", help="Database to backup (default: everything)"), make_option("-s", "--servername", help="Specify server name to include in backup filename"), make_option("-z", "--compress", help="Compress the backup files", action="store_true", default=False), make_option("-e", "--encrypt", help="Encrypt the backup files", action="store_true", default=False), ) @utils.email_uncaught_exception def handle(self, **options): """ Django command handler. """ try: self.clean = options.get('clean') self.clean_keep = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10) self.database = options.get('database') self.servername = options.get('servername') self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] self.dbcommands = DBCommands(database) self.save_new_backup(database, database_key) self.cleanup_old_backups(database) except StorageError as err: raise CommandError(err) def save_new_backup(self, database, database_name): """ Save a new backup file. """ print("Backing Up Database: %s" % database['NAME']) filename = self.dbcommands.filename(self.servername) outputfile = tempfile.SpooledTemporaryFile(max_size=10 * 1024 * 1024) self.dbcommands.run_backup_commands(outputfile) if self.compress: compressed_file = self.compress_file(outputfile) outputfile.close() outputfile = compressed_file if self.encrypt: encrypted_file = utils.encrypt_file(outputfile) outputfile = encrypted_file print(" Backup tempfile created: %s" % (utils.handle_size(outputfile))) print(" Writing file to %s: %s, filename: %s" % (self.storage.name, self.storage.backup_dir, filename)) self.storage.write_file(outputfile, filename) def cleanup_old_backups(self, database): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP. """ if self.clean: print("Cleaning Old Backups for: %s" % database['NAME']) filepaths = self.storage.list_directory() filepaths = self.dbcommands.filter_filepaths(filepaths) for filepath in sorted(filepaths[0:-self.clean_keep]): regex = r'^%s' % self.dbcommands.filename_match(self.servername, '(.*?)') datestr = re.findall(regex, os.path.basename(filepath))[0] dateTime = datetime.datetime.strptime(datestr, dbbackup_settings.DATE_FORMAT) # if int(dateTime.strftime("%d")) != 1: print(" Deleting: %s" % filepath) self.storage.delete_file(filepath) def compress_file(self, inputfile): """ Compress this file using gzip. The input and the output are filelike objects. """ outputfile = tempfile.SpooledTemporaryFile(max_size=10 * 1024 * 1024) outputfile.name = inputfile.name + '.gz' zipfile = gzip.GzipFile(fileobj=outputfile, mode="wb") # TODO: Why do we have an exception block without handling exceptions? try: inputfile.seek(0) copyfileobj(inputfile, zipfile, 2 * 1024 * 1024) finally: zipfile.close() return outputfile
class Command(LabelCommand): help = "dbbackup [-c] [-d <dbname>] [-s <servername>] [--compress] [--encrypt]" option_list = BaseCommand.option_list + ( make_option("-c", "--clean", help="Clean up old backup files", action="store_true", default=False), make_option("-d", "--database", help="Database to backup (default: everything)"), make_option("-x", "--backup-extension", help="The extension to use when saving backups."), make_option("-s", "--servername", help="Specify server name to include in backup filename"), make_option("-z", "--compress", help="Compress the backup files", action="store_true", default=False), make_option("-e", "--encrypt", help="Encrypt the backup files", action="store_true", default=False), ) @utils.email_uncaught_exception def handle(self, **options): """ Django command handler. """ try: self.clean = options.get('clean') self.clean_keep = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10) self.database = options.get('database') self.servername = options.get('servername') or dbbackup_settings.SERVER_NAME self.backup_extension = options.get('backup-extension') or None self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] database_name = database['NAME'] self.dbcommands = DBCommands(database) self.save_new_backup(database_name) if self.clean: self.cleanup_old_backups(database_name) except StorageError as err: raise CommandError(err) def save_new_backup(self, database_name): """ Save a new backup file. """ print("Backing Up Database: %s" % database_name) temp_dir = tempfile.mkdtemp(prefix='backup') try: backup_extension = self.backup_extension or self.dbcommands.settings.extension backup_file = os.path.join( temp_dir, utils.generate_backup_filename(database_name, self.servername, backup_extension) ) with open(backup_file, 'wb') as f: self.dbcommands.run_backup_commands(f) if self.compress: backup_file = self.compress_file(backup_file) if self.encrypt: backup_file = utils.encrypt_file(backup_file) print(" Backup tempfile created: %s" % (utils.handle_size(backup_file))) print(" Writing file to %s: %s" % (self.storage.name, backup_file)) self.storage.write_file(backup_file) finally: shutil.rmtree(temp_dir) def cleanup_old_backups(self, database_name): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. """ print("Cleaning Old Backups for: %s" % database_name) file_list = utils.get_backup_file_list( database_name, self.servername, self.dbcommands.settings.extension, self.storage ) for backup_date, filename in sorted(file_list[0:-self.clean_keep]): if int(backup_date.strftime("%d")) != 1: print(" Deleting: %s" % filename) self.storage.delete_file(filename) def compress_file(self, input_path): """ Compress this file using gzip. The input and the output are paths. """ output_path = input_path + '.gz' with open(output_path, 'wb') as output_f: zipfile = gzip.GzipFile(fileobj=output_f, mode="wb") try: with open(input_path) as input_f: zipfile.write(input_f.read()) finally: zipfile.close() return output_path
class Command(BaseDbBackupCommand): help = "dbbackup [-c] [-d <dbname>] [-s <servername>] [--compress] [--encrypt]" option_list = BaseDbBackupCommand.option_list + ( make_option("-c", "--clean", help="Clean up old backup files", action="store_true", default=False), make_option("-d", "--database", help="Database to backup (default: everything)"), make_option("-s", "--servername", help="Specify server name to include in backup filename"), make_option("-z", "--compress", help="Compress the backup files", action="store_true", default=False), make_option("-e", "--encrypt", help="Encrypt the backup files", action="store_true", default=False), ) @utils.email_uncaught_exception def handle(self, **options): """ Django command handler. """ self.verbosity = int(options.get('verbosity')) self.quiet = options.get('quiet') try: self.clean = options.get('clean') self.clean_keep = dbbackup_settings.CLEANUP_KEEP self.database = options.get('database') self.servername = options.get('servername') self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] self.dbcommands = DBCommands(database) self.save_new_backup(database) self.cleanup_old_backups(database) except StorageError as err: raise CommandError(err) def save_new_backup(self, database): """ Save a new backup file. """ self.log("Backing Up Database: %s" % database['NAME'], 1) filename = self.dbcommands.filename(self.servername) outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) self.dbcommands.run_backup_commands(outputfile) outputfile.name = filename if self.compress: compressed_file, filename = self.compress_file( outputfile, filename) outputfile.close() outputfile = compressed_file if self.encrypt: encrypted_file, filename = utils.encrypt_file(outputfile, filename) outputfile = encrypted_file self.log( " Backup tempfile created: %s" % (utils.handle_size(outputfile)), 1) self.log( " Writing file to %s: %s, filename: %s" % (self.storage.name, self.storage.backup_dir, filename), 1) self.storage.write_file(outputfile, filename) def cleanup_old_backups(self, database): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. """ if self.clean: self.log("Cleaning Old Backups for: %s" % database['NAME'], 1) filepaths = self.storage.list_directory() filepaths = self.dbcommands.filter_filepaths(filepaths) for filepath in sorted(filepaths[0:-self.clean_keep]): regex = r'^%s' % self.dbcommands.filename_match( self.servername, '(.*?)') datestr = re.findall(regex, os.path.basename(filepath))[0] dateTime = datetime.datetime.strptime( datestr, dbbackup_settings.DATE_FORMAT) if int(dateTime.strftime("%d")) != 1: self.log(" Deleting: %s" % filepath, 1) self.storage.delete_file(filepath) def compress_file(self, inputfile, filename): """ Compress this file using gzip. The input and the output are filelike objects. """ outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) new_filename = filename + '.gz' zipfile = gzip.GzipFile(filename=filename, fileobj=outputfile, mode="wb") # TODO: Why do we have an exception block without handling exceptions? try: inputfile.seek(0) copyfileobj(inputfile, zipfile, 2 * 1024 * 1024) finally: zipfile.close() return outputfile, new_filename