def parse_args(args): '''Parse command line''' parser = ArgumentParser( description='Measure S3QL write performance, uplink bandwidth and ' 'compression speed and determine limiting factor.') parser.add_quiet() parser.add_debug() parser.add_backend_options() parser.add_version() parser.add_storage_url() parser.add_argument('file', metavar='<file>', type=argparse.FileType(mode='rb'), help='File to transfer') parser.add_argument( '--threads', metavar='<n>', type=int, default=None, help='Also include statistics for <n> threads in results.') parser.add_cachedir() return parser.parse_args(args)
def parse_args(args): '''Parse command line''' parser = ArgumentParser( description='Measure S3QL write performance, uplink bandwidth and ' 'compression speed and determine limiting factor.') parser.add_authfile() parser.add_quiet() parser.add_debug() parser.add_backend_options() parser.add_version() parser.add_storage_url() parser.add_argument( 'file', metavar='<file>', type=argparse.FileType(mode='rb'), help='File to transfer') parser.add_argument( '--threads', metavar='<n>', type=int, default=None, help='Also include statistics for <n> threads in results.') parser.add_cachedir() return parser.parse_args(args)
def parse_args(args): '''Parse command line''' parser = ArgumentParser( description='Measure S3QL write performance, uplink bandwidth and ' 'compression speed and determine limiting factor.') parser.add_authfile() parser.add_quiet() parser.add_debug() parser.add_version() parser.add_storage_url() parser.add_argument('file', metavar='<file>', type=argparse.FileType(mode='rb'), help='File to transfer') parser.add_cachedir() return parser.parse_args(args)
def parse_args(args): parser = ArgumentParser( description="Create metadata copy where all file- and directory names, " "and extended attribute names and values have been scrambled. " "This is intended to preserve privacy when a metadata copy " "needs to be provided to the developers for debugging.") parser.add_debug_modules() parser.add_quiet() parser.add_version() parser.add_cachedir() parser.add_storage_url() options = parser.parse_args(args) return options
def parse_args(args): parser = ArgumentParser( description="Create metadata copy where all file- and directory names, " "and extended attribute names and values have been scrambled. " "This is intended to preserve privacy when a metadata copy " "needs to be provided to the developers for debugging.") parser.add_debug() parser.add_quiet() parser.add_version() parser.add_cachedir() parser.add_storage_url() options = parser.parse_args(args) return options
def parse_args(args): parser = ArgumentParser( description="Checks and repairs an S3QL filesystem.") parser.add_log('~/.s3ql/fsck.log') parser.add_cachedir() parser.add_authfile() parser.add_debug_modules() parser.add_quiet() parser.add_version() parser.add_storage_url() parser.add_argument("--batch", action="store_true", default=False, help="If user input is required, exit without prompting.") parser.add_argument("--force", action="store_true", default=False, help="Force checking even if file system is marked clean.") options = parser.parse_args(args) return options
def parse_args(args): '''Parse command line''' parser = ArgumentParser( description="Manage S3QL Buckets.", epilog=textwrap.dedent('''\ Hint: run `%(prog)s <action> --help` to get help on the additional arguments that the different actions take.''')) pparser = ArgumentParser(add_help=False, epilog=textwrap.dedent('''\ Hint: run `%(prog)s --help` to get help on other available actions and optional arguments that can be used with all actions.''')) pparser.add_storage_url() subparsers = parser.add_subparsers(metavar='<action>', dest='action', help='may be either of') subparsers.add_parser("passphrase", help="change bucket passphrase", parents=[pparser]) subparsers.add_parser("upgrade", help="upgrade file system to newest revision", parents=[pparser]) subparsers.add_parser("clear", help="delete all S3QL data from the bucket", parents=[pparser]) subparsers.add_parser("download-metadata", help="Interactively download metadata backups. " "Use only if you know what you are doing.", parents=[pparser]) parser.add_debug_modules() parser.add_quiet() parser.add_log() parser.add_authfile() parser.add_cachedir() parser.add_version() options = parser.parse_args(args) return options
def parse_args(args): parser = ArgumentParser( description="Initializes an S3QL file system") parser.add_cachedir() parser.add_authfile() parser.add_debug_modules() parser.add_quiet() parser.add_version() parser.add_storage_url() parser.add_argument("-L", default='', help="Filesystem label", dest="label", metavar='<name>',) parser.add_argument("--blocksize", type=int, default=10240, metavar='<size>', help="Maximum block size in KB (default: %(default)d)") parser.add_argument("--plain", action="store_true", default=False, help="Create unencrypted file system.") parser.add_argument("--force", action="store_true", default=False, help="Overwrite any existing data.") options = parser.parse_args(args) return options
def parse_args(args): '''Parse command line''' # Parse fstab-style -o options if '--' in args: max_idx = args.index('--') else: max_idx = len(args) if '-o' in args[:max_idx]: pos = args.index('-o') val = args[pos + 1] del args[pos] del args[pos] for opt in reversed(val.split(',')): if '=' in opt: (key, val) = opt.split('=') args.insert(pos, val) args.insert(pos, '--' + key) else: if opt in ('rw', 'defaults', 'auto', 'noauto', 'user', 'nouser', 'dev', 'nodev', 'suid', 'nosuid', 'atime', 'diratime', 'exec', 'noexec', 'group', 'mand', 'nomand', '_netdev', 'nofail', 'norelatime', 'strictatime', 'owner', 'users', 'nobootwait'): continue elif opt == 'ro': raise QuietError('Read-only mounting not supported.') args.insert(pos, '--' + opt) parser = ArgumentParser( description="Mount an S3QL file system.") parser.add_log('~/.s3ql/mount.log') parser.add_cachedir() parser.add_authfile() parser.add_debug_modules() parser.add_quiet() parser.add_version() parser.add_storage_url() parser.add_argument("mountpoint", metavar='<mountpoint>', type=(lambda x: x.rstrip('/')), help='Where to mount the file system') parser.add_argument("--cachesize", type=int, default=102400, metavar='<size>', help="Cache size in kb (default: 102400 (100 MB)). Should be at least 10 times " "the blocksize of the filesystem, otherwise an object may be retrieved and " "written several times during a single write() or read() operation.") parser.add_argument("--max-cache-entries", type=int, default=768, metavar='<num>', help="Maximum number of entries in cache (default: %(default)d). " 'Each cache entry requires one file descriptor, so if you increase ' 'this number you have to make sure that your process file descriptor ' 'limit (as set with `ulimit -n`) is high enough (at least the number ' 'of cache entries + 100).') parser.add_argument("--allow-other", action="store_true", default=False, help= 'Normally, only the user who called `mount.s3ql` can access the mount ' 'point. This user then also has full access to it, independent of ' 'individual file permissions. If the `--allow-other` option is ' 'specified, other users can access the mount point as well and ' 'individual file permissions are taken into account for all users.') parser.add_argument("--allow-root", action="store_true", default=False, help='Like `--allow-other`, but restrict access to the mounting ' 'user and the root user.') parser.add_argument("--fg", action="store_true", default=False, help="Do not daemonize, stay in foreground") parser.add_argument("--single", action="store_true", default=False, help="Run in single threaded mode. If you don't understand this, " "then you don't need it.") parser.add_argument("--upstart", action="store_true", default=False, help="Stay in foreground and raise SIGSTOP once mountpoint " "is up.") parser.add_argument("--profile", action="store_true", default=False, help="Create profiling information. If you don't understand this, " "then you don't need it.") parser.add_argument("--compress", action="store", default='lzma', metavar='<name>', choices=('lzma', 'bzip2', 'zlib', 'none'), help="Compression algorithm to use when storing new data. Allowed " "values: `lzma`, `bzip2`, `zlib`, none. (default: `%(default)s`)") parser.add_argument("--metadata-upload-interval", action="store", type=int, default=24*60*60, metavar='<seconds>', help='Interval in seconds between complete metadata uploads. ' 'Set to 0 to disable. Default: 24h.') parser.add_argument("--metadata-download-interval", action="store", type=int, default=10, metavar='<seconds>', help='Interval in seconds between complete metadata downloads. ' 'Set to 0 to disable. Default: 10s.') parser.add_argument("--threads", action="store", type=int, default=None, metavar='<no>', help='Number of parallel upload threads to use (default: auto).') parser.add_argument("--nfs", action="store_true", default=False, help='Support export of S3QL file systems over NFS ' '(default: %(default)s)') parser.add_argument("--readonly", action="store_true", default=False, help='readonly file system doesnot commit changes to OSS when umount' '(default: %(default)s)') options = parser.parse_args(args) if options.allow_other and options.allow_root: parser.error("--allow-other and --allow-root are mutually exclusive.") if not options.log and not options.fg: parser.error("Please activate logging to a file or syslog, or use the --fg option.") if options.profile: options.single = True if options.upstart: options.fg = True if options.metadata_upload_interval == 0: options.metadata_upload_interval = None if options.metadata_download_interval == 0: options.metadata_download_interval = None if options.compress == 'none': options.compress = None return options