def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): catalog = CloudBackupCatalog(cloud_interface=cloud_interface, server_name=config.server_name) if not cloud_interface.test_connectivity(): raise SystemExit(1) # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise SystemExit(1) backup_list = catalog.get_backup_list() # Output if config.format == "console": COLUMNS = "{:<20}{:<25}{:<30}" print(COLUMNS.format("Backup ID", "End Time", "Begin Wal")) for backup_id in sorted(backup_list): item = backup_list[backup_id] if item and item.status == BackupInfo.DONE: print( COLUMNS.format( item.backup_id, item.end_time.strftime("%Y-%m-%d %H:%M:%S"), item.begin_wal, )) else: print( json.dumps({ "backups_list": [ backup_list[backup_id].to_json() for backup_id in sorted(backup_list) ] })) except Exception as exc: logging.error("Barman cloud backup list exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise SystemExit(1)
def main(args=None): """ The main script entry point :param list[str] args: the raw arguments list. When not provided it defaults to sys.args[1:] """ config = parse_arguments(args) configure_logging(config) try: cloud_interface = get_cloud_interface(config) with closing(cloud_interface): if not cloud_interface.test_connectivity(): raise SystemExit(1) # If test is requested, just exit after connectivity test elif config.test: raise SystemExit(0) if not cloud_interface.bucket_exists: logging.error("Bucket %s does not exist", cloud_interface.bucket_name) raise SystemExit(1) catalog = CloudBackupCatalog( cloud_interface=cloud_interface, server_name=config.server_name ) # Call catalog.get_backup_list now so we know we can read the whole catalog # (the results are cached so this does not result in extra calls to cloud # storage) catalog.get_backup_list() if len(catalog.unreadable_backups) > 0: logging.error( "Cannot read the following backups: %s\n" "Unsafe to proceed with deletion due to failure reading backup catalog" % catalog.unreadable_backups ) raise SystemExit(1) if config.backup_id: # Because we only care about one backup, skip the annotation cache # because it is only helpful when dealing with multiple backups if catalog.should_keep_backup(config.backup_id, use_cache=False): logging.error( "Skipping delete of backup %s for server %s " "as it has a current keep request. If you really " "want to delete this backup please remove the keep " "and try again.", config.backup_id, config.server_name, ) raise SystemExit(1) _delete_backup( cloud_interface, catalog, config.backup_id, config.dry_run ) elif config.retention_policy: retention_policy = RetentionPolicyFactory.create( "retention_policy", config.retention_policy, server_name=config.server_name, catalog=catalog, ) # Sort to ensure that we delete the backups in ascending order, that is # from oldest to newest. This ensures that the relevant WALs will be cleaned # up after each backup is deleted. backups_to_delete = sorted( [ backup_id for backup_id, status in retention_policy.report().items() if status == "OBSOLETE" ] ) for backup_id in backups_to_delete: _delete_backup( cloud_interface, catalog, backup_id, config.dry_run, skip_wal_cleanup_if_standalone=False, ) except Exception as exc: logging.error("Barman cloud backup delete exception: %s", force_str(exc)) logging.debug("Exception details:", exc_info=exc) raise SystemExit(1)