class SCBackup(SCOperation): def __init__(self): self.storage = BaseStorage.storage_factory() self.servername = DB_SERVERNAME def backup_git_sources(self): # Create a tar of the git sources in the target directory sys.stdout.write("%s in %s\n" % (settings.GIT_SOURCES_BASE_DIRECTORY, self.directory)) ts = tarfile.open(os.path.join(self.directory, 'sources.tar'), 'w') call_dir = os.getcwd() os.chdir(settings.GIT_SOURCES_BASE_DIRECTORY) ts.add('.') ts.close() os.chdir(call_dir) def backup_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) with open(os.path.join(self.directory, 'dbbackup'), 'w') as outputfile: self.dbcommands.run_backup_commands(outputfile) def backup_ruleset_middleware(self): try: __import__("%s.%s" % (settings.RULESET_MIDDLEWARE, 'backup')) except ImportError: return probe_class = __import__(settings.RULESET_MIDDLEWARE) probe_class.backup.backup(self.directory) def write_migration_level(self): last_migrations = self.get_migration_levels() migfile = os.path.join(self.directory, 'miglevel') with open(migfile, 'w') as miglevel: miglevel.write(json.dumps(last_migrations)) def run(self): self.directory = tempfile.mkdtemp() self.write_migration_level() self.backup_db() self.backup_git_sources() self.backup_ruleset_middleware() # create tar archive of dir call_dir = os.getcwd() os.chdir(self.directory) filename = filename_generate('tar.bz2', self.dbcommands.settings.database['NAME'], self.servername) outputfile = tempfile.SpooledTemporaryFile() ts = tarfile.open(filename, 'w:bz2', fileobj=outputfile) for dfile in os.listdir('.'): ts.add(dfile) ts.close() self.storage.write_file(outputfile, filename) shutil.rmtree(self.directory) os.chdir(call_dir)
class SCRestore(SCOperation): def __init__(self, filepath = None): self.storage = BaseStorage.storage_factory() if filepath: self.filepath = filepath else: self.filepath = self.storage.get_latest_backup() self.servername = DB_SERVERNAME def restore_git_sources(self): sys.stdout.write("Restoring to %s from %s\n" % (settings.GIT_SOURCES_BASE_DIRECTORY, self.directory)) ts = tarfile.open(os.path.join(self.directory, 'sources.tar'), 'r') shutil.rmtree(settings.GIT_SOURCES_BASE_DIRECTORY, ignore_errors = True) if not os.path.exists(settings.GIT_SOURCES_BASE_DIRECTORY): os.mkdir(settings.GIT_SOURCES_BASE_DIRECTORY) os.chdir(settings.GIT_SOURCES_BASE_DIRECTORY) ts.extractall() def restore_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) filepath = os.path.join(self.directory, 'dbbackup') with open(filepath, 'r') as inputfile: self.dbcommands.run_restore_commands(inputfile) def restore_ruleset_middleware(self): try: middleware_backup = __import__("%s.%s" % (settings.RULESET_MIDDLEWARE, 'backup')) except ImportError: return Probe = __import__(settings.RULESET_MIDDLEWARE) Probe.backup.restore(self.directory) def test_migration_level(self): miglevel = None with open(os.path.join(self.directory, 'miglevel'), 'r') as migfile: miglevel = json.load(migfile) return self.is_migration_level_lower(miglevel) def run(self): # extract archive in tmp directory inputfile = self.storage.read_file(self.filepath) call_dir = os.getcwd() ts = tarfile.open(self.filepath, 'r', fileobj=inputfile) tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) ts.extractall() ts.close() self.directory = tmpdir if self.test_migration_level() == False: raise SCBackupException("Backup is newer than local Scirius version, please update local instance and apply migrations.") self.restore_git_sources() self.restore_db() # Apply upgrades call_command('migrate', '--noinput') self.restore_ruleset_middleware() shutil.rmtree(tmpdir) os.chdir(call_dir)
class SCRestore(SCOperation): def __init__(self, filepath = None): self.storage = BaseStorage.storage_factory() if filepath: self.filepath = filepath else: self.filepath = self.storage.get_latest_backup() self.servername = DB_SERVERNAME def restore_git_sources(self): sys.stdout.write("Restoring to %s from %s\n" % (settings.GIT_SOURCES_BASE_DIRECTORY, self.directory)) ts = tarfile.open(os.path.join(self.directory, 'sources.tar'), 'r') shutil.rmtree(settings.GIT_SOURCES_BASE_DIRECTORY, ignore_errors = True) if not os.path.exists(settings.GIT_SOURCES_BASE_DIRECTORY): os.mkdir(settings.GIT_SOURCES_BASE_DIRECTORY) os.chdir(settings.GIT_SOURCES_BASE_DIRECTORY) ts.extractall() def restore_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) filepath = os.path.join(self.directory, 'dbbackup') with open(filepath, 'r') as inputfile: self.dbcommands.run_restore_commands(inputfile) def restore_ruleset_middleware(self): try: middleware_backup = __import__("%s.%s" % (settings.RULESET_MIDDLEWARE, 'backup')) except ImportError: return Probe = __import__(settings.RULESET_MIDDLEWARE) Probe.backup.restore(self.directory) def test_migration_level(self): miglevel = None with open(os.path.join(self.directory, 'miglevel'), 'r') as migfile: miglevel = json.load(migfile) return self.is_migration_level_lower(miglevel) def run(self): # extract archive in tmp directory inputfile = self.storage.read_file(self.filepath) call_dir = os.getcwd() ts = tarfile.open(self.filepath, 'r', fileobj=inputfile) tmpdir = tempfile.mkdtemp() os.chdir(tmpdir) ts.extractall() ts.close() self.directory = tmpdir if self.test_migration_level() == False: raise SCBackupException("Backup is newer than local Scirius version, please update local instance and apply migrations.") self.restore_git_sources() self.restore_db() self.restore_ruleset_middleware() shutil.rmtree(tmpdir) os.chdir(call_dir)
class SCBackup(SCOperation): def __init__(self): self.storage = BaseStorage.storage_factory() self.servername = DB_SERVERNAME def backup_git_sources(self): # Create a tar of the git sources in the target directory sys.stdout.write("%s in %s\n" % (settings.GIT_SOURCES_BASE_DIRECTORY, self.directory)) ts = tarfile.open(os.path.join(self.directory, 'sources.tar'), 'w') call_dir = os.getcwd() os.chdir(settings.GIT_SOURCES_BASE_DIRECTORY) ts.add('.') ts.close() os.chdir(call_dir) def backup_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) with open(os.path.join(self.directory, 'dbbackup'), 'w') as outputfile: self.dbcommands.run_backup_commands(outputfile) def backup_ruleset_middleware(self): try: middleware_backup = __import__("%s.%s" % (settings.RULESET_MIDDLEWARE, 'backup')) except ImportError: return Probe = __import__(settings.RULESET_MIDDLEWARE) Probe.backup.backup(self.directory) def write_migration_level(self): last_migrations = self.get_migration_levels() migfile = os.path.join(self.directory, 'miglevel') with open(migfile, 'w') as miglevel: miglevel.write(json.dumps(last_migrations)) def run(self): self.directory = tempfile.mkdtemp() self.write_migration_level() self.backup_db() self.backup_git_sources() self.backup_ruleset_middleware() # create tar archive of dir call_dir = os.getcwd() os.chdir(self.directory) filename = filename_generate('tar.bz2', self.dbcommands.settings.database['NAME'], self.servername) outputfile = tempfile.SpooledTemporaryFile() ts = tarfile.open(filename, 'w:bz2', fileobj=outputfile) for dfile in os.listdir('.'): ts.add(dfile) ts.close() self.storage.write_file(outputfile, filename) os.chdir(call_dir)
def handle(self, **options): """ Django command handler. """ try: self.clean = options.get('clean') self.clean_keep = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10) self.database = options.get('database') self.servername = options.get('servername') or dbbackup_settings.SERVER_NAME self.backup_extension = options.get('backup-extension') or None self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] database_name = database['NAME'] self.dbcommands = DBCommands(database) self.save_new_backup(database_name) if self.clean: self.cleanup_old_backups(database_name) except StorageError as err: raise CommandError(err)
def setUp(self): self.command = DbbackupCommand() self.command.database = TEST_DATABASE['NAME'] self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.storage = FakeStorage() self.command.stdout = DEV_NULL self.command.filename = None self.command.path = None
def setUp(self): self.command = DbbackupCommand() self.command.database = TEST_DATABASE['NAME'] self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.storage = FakeStorage() self.command.clean = True self.command.clean_keep = 1 self.command.stdout = DEV_NULL
def setUp(self): if six.PY3: self.skipTest("Compression isn't implemented in Python3") open(TEST_DATABASE['NAME'], 'a+b').close() self.command = DbbackupCommand() self.command.database = TEST_DATABASE['NAME'] self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.storage = FakeStorage() self.command.stdout = DEV_NULL
def handle(self, **options): """ Django command handler. """ try: connection.close() self.filepath = options.get('filepath') self.backup_extension = options.get('backup_extension') or 'backup' self.servername = options.get('servername') self.decrypt = options.get('decrypt') self.uncompress = options.get('uncompress') self.passphrase = options.get('passphrase') self.database = self._get_database(options) self.storage = BaseStorage.storage_factory() self.dbcommands = DBCommands(self.database) if options.get('list'): return self.list_backups() self.restore_backup() except StorageError as err: raise CommandError(err)
def setUp(self): open(TEST_DATABASE['NAME'], 'a+b').close() self.command = DbbackupCommand() self.command.servername = 'foo-server' self.command.encrypt = False self.command.compress = False self.command.database = TEST_DATABASE['NAME'] self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.storage = FakeStorage() self.command.stdout = DEV_NULL open(TEST_DATABASE['NAME']).close()
def setUp(self): self.command = DbrestoreCommand() self.command.stdout = DEV_NULL self.command.uncompress = False self.command.decrypt = False self.command.backup_extension = 'bak' self.command.filepath = 'foofile' self.command.database = TEST_DATABASE self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.passphrase = None self.command.storage = FakeStorage()
def handle(self, **options): """ Django command handler. """ try: self.clean = options.get('clean') self.clean_keep = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10) self.database = options.get('database') self.servername = options.get('servername') self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] self.dbcommands = DBCommands(database) self.save_new_backup(database, database_key) self.cleanup_old_backups(database) except StorageError as err: raise CommandError(err)
def setUp(self): self.command = DbrestoreCommand() self.command.stdout = DEV_NULL self.command.uncompress = False self.command.decrypt = False self.command.backup_extension = 'bak' self.command.filepath = 'foofile' self.command.database = TEST_DATABASE self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.passphrase = None self.command.storage = FakeStorage() cmd = ('gpg --import %s' % GPG_PRIVATE_PATH).split() subprocess.call(cmd, stdout=DEV_NULL, stderr=DEV_NULL)
def setUp(self): self.command = DbrestoreCommand() self.command.stdout = DEV_NULL self.command.uncompress = False self.command.decrypt = False self.command.backup_extension = 'bak' self.command.filename = 'foofile' self.command.database = TEST_DATABASE self.command.dbcommands = DBCommands(TEST_DATABASE) self.command.passphrase = None self.command.interactive = True self.command.storage = FakeStorage() HANDLED_FILES.clean() add_private_gpg()
def handle(self, **options): """ Django command handler. """ self.verbosity = int(options.get('verbosity')) self.quiet = options.get('quiet') try: self.clean = options.get('clean') self.clean_keep = dbbackup_settings.CLEANUP_KEEP self.database = options.get('database') self.servername = options.get('servername') self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] self.dbcommands = DBCommands(database) self.save_new_backup(database) self.cleanup_old_backups(database) except StorageError as err: raise CommandError(err)
def backup_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) with open(os.path.join(self.directory, 'dbbackup'), 'w') as outputfile: self.dbcommands.run_backup_commands(outputfile)
class Command(LabelCommand): help = "dbbackup [-c] [-d <dbname>] [-s <servername>] [--compress] [--encrypt]" option_list = BaseCommand.option_list + ( make_option("-c", "--clean", help="Clean up old backup files", action="store_true", default=False), make_option("-d", "--database", help="Database to backup (default: everything)"), make_option("-x", "--backup-extension", help="The extension to use when saving backups."), make_option("-s", "--servername", help="Specify server name to include in backup filename"), make_option("-z", "--compress", help="Compress the backup files", action="store_true", default=False), make_option("-e", "--encrypt", help="Encrypt the backup files", action="store_true", default=False), ) @utils.email_uncaught_exception def handle(self, **options): """ Django command handler. """ try: self.clean = options.get('clean') self.clean_keep = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10) self.database = options.get('database') self.servername = options.get('servername') or dbbackup_settings.SERVER_NAME self.backup_extension = options.get('backup-extension') or None self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] database_name = database['NAME'] self.dbcommands = DBCommands(database) self.save_new_backup(database_name) if self.clean: self.cleanup_old_backups(database_name) except StorageError as err: raise CommandError(err) def save_new_backup(self, database_name): """ Save a new backup file. """ print("Backing Up Database: %s" % database_name) temp_dir = tempfile.mkdtemp(prefix='backup') try: backup_extension = self.backup_extension or self.dbcommands.settings.extension backup_file = os.path.join( temp_dir, utils.generate_backup_filename(database_name, self.servername, backup_extension) ) with open(backup_file, 'wb') as f: self.dbcommands.run_backup_commands(f) if self.compress: backup_file = self.compress_file(backup_file) if self.encrypt: backup_file = utils.encrypt_file(backup_file) print(" Backup tempfile created: %s" % (utils.handle_size(backup_file))) print(" Writing file to %s: %s" % (self.storage.name, backup_file)) self.storage.write_file(backup_file) finally: shutil.rmtree(temp_dir) def cleanup_old_backups(self, database_name): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. """ print("Cleaning Old Backups for: %s" % database_name) file_list = utils.get_backup_file_list( database_name, self.servername, self.dbcommands.settings.extension, self.storage ) for backup_date, filename in sorted(file_list[0:-self.clean_keep]): if int(backup_date.strftime("%d")) != 1: print(" Deleting: %s" % filename) self.storage.delete_file(filename) def compress_file(self, input_path): """ Compress this file using gzip. The input and the output are paths. """ output_path = input_path + '.gz' with open(output_path, 'wb') as output_f: zipfile = gzip.GzipFile(fileobj=output_f, mode="wb") try: with open(input_path) as input_f: zipfile.write(input_f.read()) finally: zipfile.close() return output_path
class Command(BaseDbBackupCommand): help = "dbrestore [-d <dbname>] [-f <filename>] [-s <servername>]" option_list = BaseDbBackupCommand.option_list + ( make_option("-d", "--database", help="Database to restore"), make_option("-f", "--filepath", help="Specific file to backup from"), make_option("-x", "--backup-extension", help="The extension to use when scanning for files to restore from."), make_option("-s", "--servername", help="Use a different servername backup"), make_option("-l", "--list", action='store_true', default=False, help="List backups in the backup directory"), make_option("-c", "--decrypt", help="Decrypt data before restoring", default=False, action='store_true'), make_option("-p", "--passphrase", help="Passphrase for decrypt file", default=None), make_option("-z", "--uncompress", help="Uncompress gzip data before restoring", action='store_true'), ) def handle(self, **options): """ Django command handler. """ self.verbosity = int(options.get('verbosity')) self.quiet = options.get('quiet') try: connection.close() self.filepath = options.get('filepath') self.backup_extension = options.get('backup_extension') or 'backup' self.servername = options.get('servername') self.decrypt = options.get('decrypt') self.uncompress = options.get('uncompress') self.passphrase = options.get('passphrase') self.database = self._get_database(options) self.storage = BaseStorage.storage_factory() self.dbcommands = DBCommands(self.database) if options.get('list'): return self.list_backups() self.restore_backup() except StorageError as err: raise CommandError(err) def _get_database(self, options): """ Get the database to restore. """ database_key = options.get('database') if not database_key: if len(settings.DATABASES) >= 2: errmsg = "Because this project contains more than one database, you" errmsg += " must specify the --database option." raise CommandError(errmsg) database_key = list(settings.DATABASES.keys())[0] return settings.DATABASES[database_key] def restore_backup(self): """ Restore the specified database. """ self.log("Restoring backup for database: %s" % self.database['NAME'], 1) # Fetch the latest backup if filepath not specified if not self.filepath: self.log(" Finding latest backup", 1) filepaths = self.storage.list_directory() filepaths = [f for f in filepaths if f.endswith('.' + self.backup_extension)] if not filepaths: raise CommandError("No backup files found in: /%s" % self.storage.backup_dir) self.filepath = filepaths[-1] # Restore the specified filepath backup self.log(" Restoring: %s" % self.filepath, 1) input_filename = self.filepath inputfile = self.storage.read_file(input_filename) if self.decrypt: unencrypted_file, input_filename = self.unencrypt_file(inputfile, input_filename) inputfile.close() inputfile = unencrypted_file if self.uncompress: uncompressed_file = self.uncompress_file(inputfile) inputfile.close() inputfile = uncompressed_file self.log(" Restore tempfile created: %s" % utils.handle_size(inputfile), 1) answer = input("Are you sure you want to continue? [Y/n]") if answer.lower() not in ('y', 'yes', ''): self.log("Quitting", 1) sys.exit(0) inputfile.seek(0) self.dbcommands.run_restore_commands(inputfile) def get_extension(self, filename): _, extension = os.path.splitext(filename) return extension def uncompress_file(self, inputfile): """ Uncompress this file using gzip. The input and the output are filelike objects. """ outputfile = tempfile.SpooledTemporaryFile( max_size=500 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb") try: inputfile.seek(0) outputfile.write(zipfile.read()) finally: zipfile.close() return outputfile def unencrypt_file(self, inputfile, inputfilename): """ Unencrypt this file using gpg. The input and the output are filelike objects. """ import gnupg def get_passphrase(): return self.passphrase or getpass('Input Passphrase: ') or None temp_dir = tempfile.mkdtemp(dir=dbbackup_settings.TMP_DIR) try: new_basename = os.path.basename(inputfilename).replace('.gpg', '') temp_filename = os.path.join(temp_dir, new_basename) try: inputfile.seek(0) g = gnupg.GPG() result = g.decrypt_file(file=inputfile, passphrase=get_passphrase(), output=temp_filename) if not result: raise Exception('Decryption failed; status: %s' % result.status) outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) outputfile.name = new_basename f = open(temp_filename) try: outputfile.write(f.read()) finally: f.close() finally: if os.path.exists(temp_filename): os.remove(temp_filename) finally: os.rmdir(temp_dir) return outputfile, new_basename def list_backups(self): """ List backups in the backup directory. """ self.log("Listing backups on %s in /%s:" % (self.storage.name, self.storage.backup_dir), 1) for filepath in self.storage.list_directory(): self.log(" %s" % os.path.basename(filepath), 1)
class Command(BaseDbBackupCommand): help = "dbbackup [-c] [-d <dbname>] [-s <servername>] [--compress] [--encrypt]" option_list = BaseDbBackupCommand.option_list + ( make_option("-c", "--clean", help="Clean up old backup files", action="store_true", default=False), make_option("-d", "--database", help="Database to backup (default: everything)"), make_option("-s", "--servername", help="Specify server name to include in backup filename"), make_option("-z", "--compress", help="Compress the backup files", action="store_true", default=False), make_option("-e", "--encrypt", help="Encrypt the backup files", action="store_true", default=False), ) @utils.email_uncaught_exception def handle(self, **options): """ Django command handler. """ self.verbosity = int(options.get('verbosity')) self.quiet = options.get('quiet') try: self.clean = options.get('clean') self.clean_keep = dbbackup_settings.CLEANUP_KEEP self.database = options.get('database') self.servername = options.get('servername') self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] self.dbcommands = DBCommands(database) self.save_new_backup(database) self.cleanup_old_backups(database) except StorageError as err: raise CommandError(err) def save_new_backup(self, database): """ Save a new backup file. """ self.log("Backing Up Database: %s" % database['NAME'], 1) filename = self.dbcommands.filename(self.servername) outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) self.dbcommands.run_backup_commands(outputfile) outputfile.name = filename if self.compress: compressed_file, filename = self.compress_file( outputfile, filename) outputfile.close() outputfile = compressed_file if self.encrypt: encrypted_file, filename = utils.encrypt_file(outputfile, filename) outputfile = encrypted_file self.log( " Backup tempfile created: %s" % (utils.handle_size(outputfile)), 1) self.log( " Writing file to %s: %s, filename: %s" % (self.storage.name, self.storage.backup_dir, filename), 1) self.storage.write_file(outputfile, filename) def cleanup_old_backups(self, database): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. """ if self.clean: self.log("Cleaning Old Backups for: %s" % database['NAME'], 1) filepaths = self.storage.list_directory() filepaths = self.dbcommands.filter_filepaths(filepaths) for filepath in sorted(filepaths[0:-self.clean_keep]): regex = r'^%s' % self.dbcommands.filename_match( self.servername, '(.*?)') datestr = re.findall(regex, os.path.basename(filepath))[0] dateTime = datetime.datetime.strptime( datestr, dbbackup_settings.DATE_FORMAT) if int(dateTime.strftime("%d")) != 1: self.log(" Deleting: %s" % filepath, 1) self.storage.delete_file(filepath) def compress_file(self, inputfile, filename): """ Compress this file using gzip. The input and the output are filelike objects. """ outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) new_filename = filename + '.gz' zipfile = gzip.GzipFile(filename=filename, fileobj=outputfile, mode="wb") # TODO: Why do we have an exception block without handling exceptions? try: inputfile.seek(0) copyfileobj(inputfile, zipfile, 2 * 1024 * 1024) finally: zipfile.close() return outputfile, new_filename
class Command(LabelCommand): help = "dbrestore [-d <dbname>] [-f <filename>] [-s <servername>]" option_list = BaseCommand.option_list + ( make_option("-d", "--database", help="Database to restore"), make_option("-f", "--filepath", help="Specific file to backup from"), make_option( "-x", "--backup-extension", help="The extension to use when scanning for files to restore from." ), make_option( "-s", "--servername", help="Use a different servername backup"), make_option("-l", "--list", action='store_true', default=False, help="List backups in the backup directory"), make_option("-c", "--decrypt", help="Decrypt data before restoring", default=False, action='store_true'), make_option("-p", "--passphrase", help="Passphrase for decrypt file", default=None), make_option("-z", "--uncompress", help="Uncompress gzip data before restoring", action='store_true'), ) def handle(self, **options): """ Django command handler. """ try: connection.close() self.filepath = options.get('filepath') self.backup_extension = options.get('backup_extension') or 'backup' self.servername = options.get('servername') self.decrypt = options.get('decrypt') self.uncompress = options.get('uncompress') self.passphrase = options.get('passphrase') self.database = self._get_database(options) self.storage = BaseStorage.storage_factory() self.dbcommands = DBCommands(self.database) if options.get('list'): return self.list_backups() self.restore_backup() except StorageError as err: raise CommandError(err) def _get_database(self, options): """ Get the database to restore. """ database_key = options.get('database') if not database_key: if len(settings.DATABASES) >= 2: errmsg = "Because this project contains more than one database, you" errmsg += " must specify the --database option." raise CommandError(errmsg) database_key = list(settings.DATABASES.keys())[0] return settings.DATABASES[database_key] def restore_backup(self): """ Restore the specified database. """ self.stdout.write("Restoring backup for database: %s" % self.database['NAME']) # Fetch the latest backup if filepath not specified if not self.filepath: self.stdout.write(" Finding latest backup") filepaths = self.storage.list_directory() filepaths = [ f for f in filepaths if f.endswith('.' + self.backup_extension) ] if not filepaths: raise CommandError("No backup files found in: /%s" % self.storage.backup_dir) self.filepath = filepaths[-1] # Restore the specified filepath backup self.stdout.write(" Restoring: %s" % self.filepath) input_filename = self.filepath inputfile = self.storage.read_file(input_filename) if self.decrypt: unencrypted_file = self.unencrypt_file(inputfile) inputfile.close() inputfile = unencrypted_file input_filename = inputfile.name if self.uncompress: uncompressed_file = self.uncompress_file(inputfile) inputfile.close() inputfile = uncompressed_file self.stdout.write(" Restore tempfile created: %s" % utils.handle_size(inputfile)) answer = input("Are you sure you want to continue? [Y/n]") if answer.lower() not in ('y', 'yes', ''): self.stdout.write("Quitting") sys.exit(0) inputfile.seek(0) self.dbcommands.run_restore_commands(inputfile) def get_extension(self, filename): _, extension = os.path.splitext(filename) return extension def uncompress_file(self, inputfile): """ Uncompress this file using gzip. The input and the output are filelike objects. """ outputfile = tempfile.SpooledTemporaryFile( max_size=500 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb") try: inputfile.seek(0) outputfile.write(zipfile.read()) finally: zipfile.close() return outputfile def unencrypt_file(self, inputfile): """ Unencrypt this file using gpg. The input and the output are filelike objects. """ import gnupg def get_passphrase(): return self.passphrase or getpass('Input Passphrase: ') or None temp_dir = tempfile.mkdtemp(dir=dbbackup_settings.TMP_DIR) try: inputfile.fileno( ) # Convert inputfile from SpooledTemporaryFile to regular file (Fixes Issue #21) new_basename = os.path.basename(inputfile.name).replace('.gpg', '') temp_filename = os.path.join(temp_dir, new_basename) try: inputfile.seek(0) g = gnupg.GPG() result = g.decrypt_file(file=inputfile, passphrase=get_passphrase(), output=temp_filename) if not result: raise Exception('Decryption failed; status: %s' % result.status) outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) outputfile.name = new_basename f = open(temp_filename) try: outputfile.write(f.read()) finally: f.close() finally: if os.path.exists(temp_filename): os.remove(temp_filename) finally: os.rmdir(temp_dir) return outputfile def list_backups(self): """ List backups in the backup directory. """ self.stdout.write("Listing backups on %s in /%s:" % (self.storage.name, self.storage.backup_dir)) for filepath in self.storage.list_directory(): self.stdout.write(" %s" % os.path.basename(filepath))
def restore_db(self): database = settings.DATABASES['default'] self.dbcommands = DBCommands(database) filepath = os.path.join(self.directory, 'dbbackup') with open(filepath, 'r') as inputfile: self.dbcommands.run_restore_commands(inputfile)
class Command(LabelCommand): help = "dbbackup [-c] [-d <dbname>] [-s <servername>] [--compress] [--encrypt]" option_list = BaseCommand.option_list + ( make_option("-c", "--clean", help="Clean up old backup files", action="store_true", default=False), make_option("-d", "--database", help="Database to backup (default: everything)"), make_option("-s", "--servername", help="Specify server name to include in backup filename"), make_option("-z", "--compress", help="Compress the backup files", action="store_true", default=False), make_option("-e", "--encrypt", help="Encrypt the backup files", action="store_true", default=False), ) @utils.email_uncaught_exception def handle(self, **options): """ Django command handler. """ try: self.clean = options.get('clean') self.clean_keep = getattr(settings, 'DBBACKUP_CLEANUP_KEEP', 10) self.database = options.get('database') self.servername = options.get('servername') self.compress = options.get('compress') self.encrypt = options.get('encrypt') self.storage = BaseStorage.storage_factory() if self.database: database_keys = self.database, else: database_keys = dbbackup_settings.DATABASES for database_key in database_keys: database = settings.DATABASES[database_key] self.dbcommands = DBCommands(database) self.save_new_backup(database, database_key) self.cleanup_old_backups(database) except StorageError as err: raise CommandError(err) def save_new_backup(self, database, database_name): """ Save a new backup file. """ print("Backing Up Database: %s" % database['NAME']) filename = self.dbcommands.filename(self.servername) outputfile = tempfile.SpooledTemporaryFile(max_size=10 * 1024 * 1024) self.dbcommands.run_backup_commands(outputfile) if self.compress: compressed_file = self.compress_file(outputfile) outputfile.close() outputfile = compressed_file if self.encrypt: encrypted_file = utils.encrypt_file(outputfile) outputfile = encrypted_file print(" Backup tempfile created: %s" % (utils.handle_size(outputfile))) print(" Writing file to %s: %s, filename: %s" % (self.storage.name, self.storage.backup_dir, filename)) self.storage.write_file(outputfile, filename) def cleanup_old_backups(self, database): """ Cleanup old backups, keeping the number of backups specified by DBBACKUP_CLEANUP_KEEP. """ if self.clean: print("Cleaning Old Backups for: %s" % database['NAME']) filepaths = self.storage.list_directory() filepaths = self.dbcommands.filter_filepaths(filepaths) for filepath in sorted(filepaths[0:-self.clean_keep]): regex = r'^%s' % self.dbcommands.filename_match(self.servername, '(.*?)') datestr = re.findall(regex, os.path.basename(filepath))[0] dateTime = datetime.datetime.strptime(datestr, dbbackup_settings.DATE_FORMAT) # if int(dateTime.strftime("%d")) != 1: print(" Deleting: %s" % filepath) self.storage.delete_file(filepath) def compress_file(self, inputfile): """ Compress this file using gzip. The input and the output are filelike objects. """ outputfile = tempfile.SpooledTemporaryFile(max_size=10 * 1024 * 1024) outputfile.name = inputfile.name + '.gz' zipfile = gzip.GzipFile(fileobj=outputfile, mode="wb") # TODO: Why do we have an exception block without handling exceptions? try: inputfile.seek(0) copyfileobj(inputfile, zipfile, 2 * 1024 * 1024) finally: zipfile.close() return outputfile