def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) # Read wal_path from environment if we're a hook script if __is_hook_script(): if "BARMAN_FILE" not in os.environ: raise BarmanException( "Expected environment variable BARMAN_FILE not set") config.wal_path = os.getenv("BARMAN_FILE") else: if config.wal_path is None: raise BarmanException( "the following arguments are required: wal_path") # Validate the WAL file name before uploading it if not is_any_xlog_file(config.wal_path): logging.error("%s is an invalid name for a WAL file" % config.wal_path) raise CLIErrorExit() try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): uploader = CloudWalUploader( cloud_interface=cloud_interface, server_name=config.server_name, compression=config.compression, ) if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) # TODO: Should the setup be optional? cloud_interface.setup_bucket() upload_kwargs = {} if is_history_file(config.wal_path): upload_kwargs["override_tags"] = config.history_tags uploader.upload_wal(config.wal_path, **upload_kwargs) except Exception as exc: logging.error("Barman cloud WAL archiver exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise OperationErrorExit() catalog = CloudBackupCatalog(cloud_interface, config.server_name) if config.release: catalog.release_keep(config.backup_id) elif config.status: target = catalog.get_keep_target(config.backup_id) if target: print("Keep: %s" % target) else: print("Keep: nokeep") else: backup_info = catalog.get_backup_info(config.backup_id) if backup_info.status == BackupInfo.DONE: catalog.keep_backup(config.backup_id, config.target) else: logging.error( "Cannot add keep to backup %s because it has status %s. " "Only backups with status DONE can be kept.", config.backup_id, backup_info.status, ) raise OperationErrorExit() except Exception as exc: logging.error("Barman cloud keep exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) # Validate the destination directory before starting recovery if os.path.exists(config.recovery_dir) and os.listdir(config.recovery_dir): logging.error( "Destination %s already exists and it is not empty", config.recovery_dir ) raise OperationErrorExit() try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): downloader = CloudBackupDownloader( cloud_interface=cloud_interface, server_name=config.server_name ) if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise OperationErrorExit() downloader.download_backup( config.backup_id, config.recovery_dir, tablespace_map(config.tablespace), ) except KeyboardInterrupt as exc: logging.error("Barman cloud restore was interrupted by the user") logging.debug("Exception details:", exc_info=exc) raise OperationErrorExit() except Exception as exc: logging.error("Barman cloud restore exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) # Validate the WAL file name before downloading it if not is_any_xlog_file(config.wal_name): logging.error("%s is an invalid name for a WAL file" % config.wal_name) raise CLIErrorExit() try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): downloader = CloudWalDownloader(cloud_interface=cloud_interface, server_name=config.server_name) if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise OperationErrorExit() downloader.download_wal(config.wal_name, config.wal_dest) except Exception as exc: logging.error("Barman cloud WAL restore exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) try: cloud_interface = get_cloud_interface(config) if not cloud_interface.test_connectivity(): # Deliberately raise an error if we cannot connect raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: # If the bucket does not exist then the check should pass return catalog = CloudBackupCatalog(cloud_interface, config.server_name) wals = list(catalog.get_wal_paths().keys()) check_archive_usable( wals, timeline=config.timeline, ) except WalArchiveContentError as err: logging.error( "WAL archive check failed for server %s: %s", config.server_name, force_str(err), ) raise OperationErrorExit() except Exception as exc: logging.error("Barman cloud WAL archive check exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): catalog = CloudBackupCatalog(cloud_interface=cloud_interface, server_name=config.server_name) if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise OperationErrorExit() backup_list = catalog.get_backup_list() # Output if config.format == "console": COLUMNS = "{:<20}{:<25}{:<30}{:<16}" print( COLUMNS.format("Backup ID", "End Time", "Begin Wal", "Archival Status")) for backup_id in sorted(backup_list): item = backup_list[backup_id] if item and item.status == BackupInfo.DONE: keep_target = catalog.get_keep_target(item.backup_id) keep_status = (keep_target and "KEEP:%s" % keep_target.upper() or "") print( COLUMNS.format( item.backup_id, item.end_time.strftime("%Y-%m-%d %H:%M:%S"), item.begin_wal, keep_status, )) else: print( json.dumps({ "backups_list": [ backup_list[backup_id].to_json() for backup_id in sorted(backup_list) ] })) except Exception as exc: logging.error("Barman cloud backup list exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise OperationErrorExit() catalog = CloudBackupCatalog(cloud_interface=cloud_interface, server_name=config.server_name) # Call catalog.get_backup_list now so we know we can read the whole catalog # (the results are cached so this does not result in extra calls to cloud # storage) catalog.get_backup_list() if len(catalog.unreadable_backups) > 0: logging.error( "Cannot read the following backups: %s\n" "Unsafe to proceed with deletion due to failure reading backup catalog" % catalog.unreadable_backups) raise OperationErrorExit() if config.backup_id: # Because we only care about one backup, skip the annotation cache # because it is only helpful when dealing with multiple backups if catalog.should_keep_backup(config.backup_id, use_cache=False): logging.error( "Skipping delete of backup %s for server %s " "as it has a current keep request. If you really " "want to delete this backup please remove the keep " "and try again.", config.backup_id, config.server_name, ) raise OperationErrorExit() _delete_backup(cloud_interface, catalog, config.backup_id, config.dry_run) elif config.retention_policy: try: retention_policy = RetentionPolicyFactory.create( "retention_policy", config.retention_policy, server_name=config.server_name, catalog=catalog, ) except InvalidRetentionPolicy as exc: logging.error( "Could not create retention policy %s: %s", config.retention_policy, force_str(exc), ) raise CLIErrorExit() # Sort to ensure that we delete the backups in ascending order, that is # from oldest to newest. This ensures that the relevant WALs will be cleaned # up after each backup is deleted. backups_to_delete = sorted([ backup_id for backup_id, status in retention_policy.report().items() if status == "OBSOLETE" ]) for backup_id in backups_to_delete: _delete_backup( cloud_interface, catalog, backup_id, config.dry_run, skip_wal_cleanup_if_standalone=False, ) except Exception as exc: logging.error("Barman cloud backup delete exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit()
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) tempdir = tempfile.mkdtemp(prefix="barman-cloud-backup-") try: # Create any temporary file in the `tempdir` subdirectory tempfile.tempdir = tempdir cloud_interface = get_cloud_interface(config) if not cloud_interface.test_connectivity(): raise NetworkErrorExit() # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) with closing(cloud_interface): # TODO: Should the setup be optional? cloud_interface.setup_bucket() # Perform the backup uploader_kwargs = { "server_name": config.server_name, "compression": config.compression, "max_archive_size": config.max_archive_size, "cloud_interface": cloud_interface, } if __is_hook_script(): if "BARMAN_BACKUP_DIR" not in os.environ: raise BarmanException( "BARMAN_BACKUP_DIR environment variable not set") if "BARMAN_BACKUP_ID" not in os.environ: raise BarmanException( "BARMAN_BACKUP_ID environment variable not set") if os.getenv("BARMAN_STATUS") != "DONE": raise UnrecoverableHookScriptError( "backup in '%s' has status '%s' (status should be: DONE)" % (os.getenv("BARMAN_BACKUP_DIR"), os.getenv("BARMAN_STATUS"))) uploader = CloudBackupUploaderBarman( backup_dir=os.getenv("BARMAN_BACKUP_DIR"), backup_id=os.getenv("BARMAN_BACKUP_ID"), **uploader_kwargs) uploader.backup() else: conninfo = build_conninfo(config) postgres = PostgreSQLConnection( conninfo, config.immediate_checkpoint, application_name="barman_cloud_backup", ) try: postgres.connect() except PostgresConnectionError as exc: logging.error("Cannot connect to postgres: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise OperationErrorExit() with closing(postgres): uploader = CloudBackupUploaderPostgres(postgres=postgres, **uploader_kwargs) uploader.backup() except KeyboardInterrupt as exc: logging.error("Barman cloud backup was interrupted by the user") logging.debug("Exception details:", exc_info=exc) raise OperationErrorExit() except UnrecoverableHookScriptError as exc: logging.error("Barman cloud backup exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise SystemExit(63) except Exception as exc: logging.error("Barman cloud backup exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise GeneralErrorExit() finally: # Remove the temporary directory and all the contained files rmtree(tempdir, ignore_errors=True)