def test_ssh(self): bunch = utils.Bunch(storage="ssh", ssh_key="key", ssh_username="******", ssh_host="localhost") validator.validate(bunch)
def freezer_main(backup_args): """Freezer main loop for job execution. """ if not backup_args.quiet: LOG.info('log file at {0}'.format(CONF.get('log_file'))) if backup_args.max_priority: utils.set_max_process_priority() backup_args.__dict__['hostname_backup_name'] = "{0}_{1}".format( backup_args.hostname, backup_args.backup_name) validator.validate(backup_args) work_dir = backup_args.work_dir max_segment_size = backup_args.max_segment_size if (backup_args.storage == 'swift' or backup_args.backup_media in ['nova', 'cinder', 'cindernative']): backup_args.client_manager = get_client_manager(backup_args.__dict__) if backup_args.storages: storage = multiple.MultipleStorage(work_dir, [ storage_from_dict(x, work_dir, max_segment_size) for x in backup_args.storages ]) else: storage = storage_from_dict(backup_args.__dict__, work_dir, max_segment_size) backup_args.engine = tar_engine.TarBackupEngine( backup_args.compression, backup_args.dereference_symlink, backup_args.exclude, storage, winutils.is_windows(), backup_args.max_segment_size, backup_args.encrypt_pass_file, backup_args.dry_run) if hasattr(backup_args, 'trickle_command'): if "tricklecount" in os.environ: if int(os.environ.get("tricklecount")) > 1: LOG.critical("[*] Trickle seems to be not working, Switching " "to normal mode ") return run_job(backup_args, storage) freezer_command = '{0} {1}'.format(backup_args.trickle_command, ' '.join(sys.argv)) LOG.debug('Trickle command: {0}'.format(freezer_command)) process = subprocess.Popen(freezer_command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=os.environ.copy()) while process.poll() is None: line = process.stdout.readline().strip() if line != '': print(line) output, error = process.communicate() if hasattr(backup_args, 'tmp_file'): utils.delete_file(backup_args.tmp_file) if process.returncode: LOG.warn("[*] Trickle Error: {0}".format(error)) LOG.info("[*] Switching to work without trickle ...") return run_job(backup_args, storage) else: return run_job(backup_args, storage)
def test_pass(self): bunch = utils.Bunch() validator.validate(bunch)
def test_restore_with_path(self): bunch = utils.Bunch(action="restore", restore_abs_path="/tmp") validator.validate(bunch)
def freezer_main(backup_args): """Freezer main loop for job execution. """ if not backup_args.quiet: LOG.info('log file at {0}'.format(CONF.get('log_file'))) if backup_args.max_priority: utils.set_max_process_priority() backup_args.__dict__['hostname_backup_name'] = "{0}_{1}".format( backup_args.hostname, backup_args.backup_name) validator.validate(backup_args) work_dir = backup_args.work_dir max_segment_size = backup_args.max_segment_size if backup_args.storage == 'swift' or ( backup_args.backup_media in ['nova', 'cinder', 'cindernative']): backup_args.client_manager = get_client_manager(backup_args.__dict__) if backup_args.storages: storage = multiple.MultipleStorage( work_dir, [storage_from_dict(x, work_dir, max_segment_size) for x in backup_args.storages]) else: storage = storage_from_dict(backup_args.__dict__, work_dir, max_segment_size) backup_args.engine = tar_engine.TarBackupEngine( backup_args.compression, backup_args.dereference_symlink, backup_args.exclude, storage, winutils.is_windows(), backup_args.max_segment_size, backup_args.encrypt_pass_file, backup_args.dry_run) if hasattr(backup_args, 'trickle_command'): if "tricklecount" in os.environ: if int(os.environ.get("tricklecount")) > 1: LOG.critical("[*] Trickle seems to be not working, Switching " "to normal mode ") return run_job(backup_args, storage) freezer_command = '{0} {1}'.format(backup_args.trickle_command, ' '.join(sys.argv)) LOG.debug('Trickle command: {0}'.format(freezer_command)) process = subprocess.Popen(freezer_command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=os.environ.copy()) while process.poll() is None: line = process.stdout.readline().strip() if line != '': print (line) output, error = process.communicate() if hasattr(backup_args, 'tmp_file'): utils.delete_file(backup_args.tmp_file) if process.returncode: LOG.warn("[*] Trickle Error: {0}".format(error)) LOG.info("[*] Switching to work without trickle ...") return run_job(backup_args, storage) else: return run_job(backup_args, storage)
def freezer_main(backup_args): """Freezer main loop for job execution. """ if not backup_args.quiet: LOG.info('log file at {0}'.format(CONF.get('log_file'))) if backup_args.max_priority: utils.set_max_process_priority() bandwidth.monkeypatch_socket_bandwidth(backup_args) backup_args.__dict__['hostname_backup_name'] = "{0}_{1}".format( backup_args.hostname, backup_args.backup_name) validator.validate(backup_args) work_dir = backup_args.work_dir os_identity = backup_args.os_identity_api_version max_segment_size = backup_args.max_segment_size if backup_args.storages: storage = multiple.MultipleStorage( work_dir, [storage_from_dict(x, work_dir, max_segment_size, os_identity) for x in backup_args.storages]) else: storage = storage_from_dict(backup_args.__dict__, work_dir, max_segment_size, os_identity) backup_args.__dict__['engine'] = tar_engine.TarBackupEngine( backup_args.compression, backup_args.dereference_symlink, backup_args.exclude, storage, winutils.is_windows(), backup_args.encrypt_pass_file, backup_args.dry_run) if hasattr(backup_args, 'trickle_command'): if "tricklecount" in os.environ: if int(os.environ.get("tricklecount")) > 1: LOG.critical("[*] Trickle seems to be not working, Switching " "to normal mode ") run_job(backup_args, storage) freezer_command = '{0} {1}'.format(backup_args.trickle_command, ' '.join(sys.argv)) process = subprocess.Popen(freezer_command.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=os.environ.copy()) while process.poll() is None: print(process.stdout.readline().rstrip()) output, error = process.communicate() if process.returncode: LOG.error("[*] Trickle Error: {0}".format(error)) LOG.critical("[*] Switching to work without trickle ...") run_job(backup_args, storage) else: run_job(backup_args, storage)