def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, _, _ = create_argument_parser( description="This script can be used to delete backups " "made with barman-cloud-backup command. " "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", ) delete_arguments = parser.add_mutually_exclusive_group(required=True) delete_arguments.add_argument( "-b", "--backup-id", help="Backup ID of the backup to be deleted", ) delete_arguments.add_argument( "-r", "--retention-policy", help= "If specified, delete all backups eligible for deletion according to the " "supplied retention policy. Syntax: REDUNDANCY value | RECOVERY WINDOW OF " "value {DAYS | WEEKS | MONTHS}", ) parser.add_argument( "--dry-run", action="store_true", help="Find the objects which need to be deleted but do not delete them", ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, _, _ = create_argument_parser( description="This script can be used to tag backups in cloud storage as " "archival backups such that they will not be deleted. " "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", ) parser.add_argument( "backup_id", help="the backup ID of the backup to be kept", ) keep_options = parser.add_mutually_exclusive_group(required=True) keep_options.add_argument( "-r", "--release", help="If specified, the command will remove the keep annotation and the " "backup will be eligible for deletion", action="store_true", ) keep_options.add_argument( "-s", "--status", help="Print the keep status of the backup", action="store_true", ) keep_options.add_argument( "--target", help="Specify the recovery target for this backup", choices=[KeepManager.TARGET_FULL, KeepManager.TARGET_STANDALONE], ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, _, _ = create_argument_parser( description="Checks that the WAL archive on the specified cloud storage " "can be safely used for a new PostgreSQL server.", source_or_destination=UrlArgumentType.destination, ) parser.add_argument( "--timeline", help="The earliest timeline whose WALs should cause the check to fail", type=check_positive, ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, _, _ = create_argument_parser( description="This script can be used to list backups " "made with barman-cloud-backup command. " "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", ) parser.add_argument( "--format", default="console", help="Output format (console or json). Default console.", ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, _, _ = create_argument_parser( description="This script can be used to download a backup " "previously made with barman-cloud-backup command." "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", ) parser.add_argument("backup_id", help="the backup ID") parser.add_argument("recovery_dir", help="the path to a directory for recovery.") parser.add_argument( "--tablespace", help="tablespace relocation rule", metavar="NAME:LOCATION", action="append", default=[], ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, _, _ = create_argument_parser( description="This script can be used as a `restore_command` " "to download WAL files previously archived with " "barman-cloud-wal-archive command. " "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", ) parser.add_argument( "wal_name", help="The value of the '%%f' keyword (according to 'restore_command').", ) parser.add_argument( "wal_dest", help="The value of the '%%p' keyword (according to 'restore_command').", ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, s3_arguments, azure_arguments = create_argument_parser( description="This script can be used in the `archive_command` " "of a PostgreSQL server to ship WAL files to the Cloud. " "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", source_or_destination=UrlArgumentType.destination, ) parser.add_argument( "wal_path", nargs="?", help="the value of the '%%p' keyword (according to 'archive_command').", default=None, ) compression = parser.add_mutually_exclusive_group() compression.add_argument( "-z", "--gzip", help="gzip-compress the WAL while uploading to the cloud " "(should not be used with python < 3.2)", action="store_const", const="gzip", dest="compression", ) compression.add_argument( "-j", "--bzip2", help="bzip2-compress the WAL while uploading to the cloud " "(should not be used with python < 3.3)", action="store_const", const="bzip2", dest="compression", ) compression.add_argument( "--snappy", help="snappy-compress the WAL while uploading to the cloud " "(requires optional python-snappy library)", action="store_const", const="snappy", dest="compression", ) add_tag_argument( parser, name="tags", help="Tags to be added to archived WAL files in cloud storage", ) add_tag_argument( parser, name="history-tags", help="Tags to be added to archived history files in cloud storage", ) s3_arguments.add_argument( "-e", "--encryption", help= "The encryption algorithm used when storing the uploaded data in S3. " "Allowed values: 'AES256'|'aws:kms'.", choices=["AES256", "aws:kms"], metavar="ENCRYPTION", ) azure_arguments.add_argument( "--encryption-scope", help="The name of an encryption scope defined in the Azure Blob Storage " "service which is to be used to encrypt the data in Azure", ) azure_arguments.add_argument( "--max-block-size", help="The chunk size to be used when uploading an object via the " "concurrent chunk method (default: 4MB).", type=check_size, default="4MB", ) azure_arguments.add_argument( "--max-concurrency", help= "The maximum number of chunks to be uploaded concurrently (default: 1).", type=check_positive, default=1, ) azure_arguments.add_argument( "--max-single-put-size", help= "Maximum size for which the Azure client will upload an object in a " "single request (default: 64MB). If this is set lower than the PostgreSQL " "WAL segment size after any applied compression then the concurrent chunk " "upload method for WAL archiving will be used.", default="64MB", type=check_size, ) return parser.parse_args(args=args)
def parse_arguments(args=None): """ Parse command line arguments :return: The options parsed """ parser, s3_arguments, azure_arguments = create_argument_parser( description="This script can be used to perform a backup " "of a local PostgreSQL instance and ship " "the resulting tarball(s) to the Cloud. " "Currently AWS S3, Azure Blob Storage and Google Cloud Storage are supported.", source_or_destination=UrlArgumentType.destination, ) compression = parser.add_mutually_exclusive_group() compression.add_argument( "-z", "--gzip", help="gzip-compress the WAL while uploading to the cloud", action="store_const", const="gz", dest="compression", ) compression.add_argument( "-j", "--bzip2", help="bzip2-compress the WAL while uploading to the cloud", action="store_const", const="bz2", dest="compression", ) compression.add_argument( "--snappy", help="snappy-compress the WAL while uploading to the cloud ", action="store_const", const="snappy", dest="compression", ) parser.add_argument( "-h", "--host", help="host or Unix socket for PostgreSQL connection " "(default: libpq settings)", ) parser.add_argument( "-p", "--port", help="port for PostgreSQL connection (default: libpq settings)", ) parser.add_argument( "-U", "--user", help="user name for PostgreSQL connection (default: libpq settings)", ) parser.add_argument( "--immediate-checkpoint", help="forces the initial checkpoint to be done as quickly as possible", action="store_true", ) parser.add_argument( "-J", "--jobs", type=check_positive, help= "number of subprocesses to upload data to cloud storage (default: 2)", default=2, ) parser.add_argument( "-S", "--max-archive-size", type=check_size, help="maximum size of an archive when uploading to cloud storage " "(default: 100GB)", default="100GB", ) parser.add_argument( "-d", "--dbname", help= "Database name or conninfo string for Postgres connection (default: postgres)", default="postgres", ) add_tag_argument( parser, name="tags", help="Tags to be added to all uploaded files in cloud storage", ) s3_arguments.add_argument( "-e", "--encryption", help= "The encryption algorithm used when storing the uploaded data in S3. " "Allowed values: 'AES256'|'aws:kms'.", choices=["AES256", "aws:kms"], ) azure_arguments.add_argument( "--encryption-scope", help="The name of an encryption scope defined in the Azure Blob Storage " "service which is to be used to encrypt the data in Azure", ) return parser.parse_args(args=args)