Пример #1
0
    def setUp(self):
        self.testpath = self.TestPath()
        init_logging(None, logging.DEBUG)

        self.config = Config(
            cfg=self.CONFIG.format(testpath=self.testpath.path),
            merge_defaults=False)
Пример #2
0
 def setUp(self):
     self.testpath = _TestPath()
     init_logging(console_level=logging.WARN if os.environ.get(
         'UNITTEST_QUIET', False) else logging.DEBUG,
                  console_formatter='console-plain')
     self.config = Config(ad_hoc_config=self.CONFIG.format(
         testpath=self.testpath.path))
Пример #3
0
 def test_lists(self):
     config = Config(cfg=self.CONFIG, merge_defaults=False)
     self.assertTrue(type(config.get('io.rbd.newImageFeatures')) is list)
     self.assertRaises(TypeError,
                       config.get('io.rbd.newImageFeatures', types=list))
     self.assertEqual('RBD_FEATURE_EXCLUSIVE_LOCK',
                      config.get('io.rbd.newImageFeatures')[1])
Пример #4
0
 def test_validation(self):
     config = Config(ad_hoc_config=self.CONFIG)
     module_configuration = {'path': '/var/tmp'}
     self.assertEqual(
         {
             'bandwidthRead': 0,
             'bandwidthWrite': 0,
             'consistencyCheckWrites': False,
             'path': '/var/tmp',
             'simultaneousReads': 3,
             'simultaneousWrites': 3,
             'simultaneousRemovals': 5,
         },
         config.validate(module='benji.storage.file',
                         config=module_configuration))
     module_configuration = {'asdasdas': 'dasdasd'}
     self.assertRaises(
         ConfigurationError,
         lambda: config.validate(module='benji.storage.file',
                                 config=module_configuration))
     module_configuration = {}
     self.assertRaises(
         ConfigurationError,
         lambda: config.validate(module='benji.storage.file',
                                 config=module_configuration))
     module_configuration = {'path': '/var/tmp', 'bandwidthRead': -1}
     self.assertRaises(
         ConfigurationError,
         lambda: config.validate(module='benji.storage.file',
                                 config=module_configuration))
     module_configuration = {'path': [1, 2, 3]}
     self.assertRaises(
         ConfigurationError,
         lambda: config.validate(module='benji.storage.file',
                                 config=module_configuration))
Пример #5
0
 def test_default_overwrite(self):
     config = Config(cfg="""
     configurationVersion: '{}'
     dataBackend:
       simultaneousReads: 12345678
     """.format(Config.CONFIG_VERSION),
                     merge_defaults=True)
     self.assertEqual(12345678, config.get('dataBackend.simultaneousReads'))
     self.assertEqual(1, config.get('io.rbd.simultaneousReads'))
Пример #6
0
 def test_lists(self):
     config = Config(ad_hoc_config=self.CONFIG)
     ios = config.get('ios', types=list)
     self.assertIsInstance(
         Config.get_from_dict(ios[0], 'configuration.newImageFeatures'),
         ConfigList)
     self.assertRaises(
         TypeError, lambda: Config.get_from_dict(
             ios[0], 'configuration.newImageFeatures', types=int))
     self.assertEqual(
         'RBD_FEATURE_EXCLUSIVE_LOCK',
         Config.get_from_dict(ios[0], 'configuration.newImageFeatures')[1])
Пример #7
0
    def setUp(self):
        self.testpath = self.TestPath()
        init_logging(
            None, logging.WARN
            if os.environ.get('UNITTEST_QUIET', False) else logging.DEBUG)
        # This disables ResourceWarnings from boto3 which are normal
        # See: https://github.com/boto/boto3/issues/454
        warnings.filterwarnings(
            "ignore",
            category=ResourceWarning,
            message=r'unclosed.*<(?:ssl.SSLSocket|socket\.socket).*>')

        self.config = Config(ad_hoc_config=self.CONFIG.format(
            testpath=self.testpath.path))
Пример #8
0
 def test_validation_io_rbd(self):
     config = Config(ad_hoc_config=self.CONFIG)
     module_configuration = config.get('ios')[0]['configuration']
     self.assertEqual(
         {
             'cephConfigFile':
             '/etc/ceph/ceph.conf',
             'clientIdentifier':
             'admin',
             'newImageFeatures':
             ['RBD_FEATURE_LAYERING', 'RBD_FEATURE_EXCLUSIVE_LOCK'],
             'simultaneousReads':
             10,
             'simultaneousWrites':
             10,
         },
         config.validate(module='benji.io.rbd',
                         config=module_configuration))
     module_configuration['newImageFeatures'] = ['ASASA', 'DDASAD']
     self.assertRaises(
         ConfigurationError,
         lambda: config.validate(module='benji.io.rbd',
                                 config=module_configuration))
Пример #9
0
 def test_missing_version(self):
     self.assertRaises(
         ConfigurationError,
         lambda: Config(cfg='a: {b: 1, c: 2}', merge_defaults=False))
Пример #10
0
 def test_wrong_version(self):
     self.assertRaises(
         ConfigurationError,
         lambda: Config(cfg='configurationVersion: \'234242.2343242\'',
                        merge_defaults=False))
Пример #11
0
 def test_correct_version(self):
     self.assertTrue(
         isinstance(
             Config(cfg='configurationVersion: \'{}\''.format(
                 Config.CONFIG_VERSION),
                    merge_defaults=False), Config))
Пример #12
0
 def test_correct_version(self):
     self.assertTrue(isinstance(Config(ad_hoc_config=self.CONFIG), Config))
Пример #13
0
 def test_dict(self):
     config = Config(cfg=self.CONFIG, merge_defaults=False)
     self.assertEqual({
         '__position': 'nbd',
         'cacheDirectory': '/tmp'
     }, config.get('nbd', types=dict))
Пример #14
0
 def test_load_from_file(self):
     cfile = os.path.join(self.testpath.path, 'test-config.yaml')
     with open(cfile, 'w') as f:
         f.write(self.CONFIG)
     config = Config(sources=[cfile])
     self.assertEqual('/var/log/benji.log', config.get('logFile'))
Пример #15
0
 def test_missing(self):
     config = Config(cfg='configurationVersion: \'{}\''.format(
         Config.CONFIG_VERSION),
                     merge_defaults=False)
     self.assertRaises(KeyError, lambda: config.get('missing.option'))
Пример #16
0
 def test_wrong_version(self):
     self.assertRaises(
         ConfigurationError,
         lambda: Config(ad_hoc_config=self.CONFIG_INVALID_VERSION))
Пример #17
0
 def test_with_storage_id(self):
     config = Config(ad_hoc_config=self.CONFIG_WITH_STORAGE_ID)
     self.assertIsInstance(config, Config)
     storage_instance = config.get('storages')[0]
     self.assertIsInstance(storage_instance, dict)
     self.assertEqual(33, storage_instance['storageId'])
Пример #18
0
 def test_missing_version(self):
     self.assertRaises(ConfigurationError,
                       lambda: Config(ad_hoc_config='a: {b: 1, c: 2}'))
Пример #19
0
 def test_load_from_string(self):
     config = Config(ad_hoc_config=self.CONFIG)
     self.assertEqual('/var/log/benji.log', config.get('logFile',
                                                       types=str))
     self.assertEqual(4194304, config.get('blockSize', types=int))
Пример #20
0
def test_integer_version(self):
    self.assertTrue(
        isinstance(Config(ad_hoc_config=self.CONFIG_INTEGER), Config))
Пример #21
0
 def test_defaults(self):
     config = Config(ad_hoc_config=self.CONFIG)
     self.assertEqual('benji', config.get('processName'))
     self.assertEqual('BLAKE2b,digest_bits=256', config.get('hashFunction'))
Пример #22
0
 def test_defaults(self):
     config = Config(cfg='configurationVersion: \'{}\''.format(
         Config.CONFIG_VERSION),
                     merge_defaults=True)
     self.assertEqual(1, config.get('dataBackend.simultaneousReads'))
     self.assertEqual(1, config.get('io.rbd.simultaneousReads'))
Пример #23
0
 def test_load_from_file(self):
     cfile = os.path.join(self.testpath.path, 'test-config.yaml')
     with open(cfile, 'w') as f:
         f.write(self.CONFIG)
     config = Config(sources=[cfile], merge_defaults=False)
     self.assertEqual(10, config.get('io.rbd.simultaneousReads'))
Пример #24
0
 def test_dict(self):
     config = Config(ad_hoc_config=self.CONFIG)
     nbd = config.get('nbd', types=dict)
     self.assertEqual('nbd', nbd.full_name)
Пример #25
0
 def test_load_from_string(self):
     config = Config(cfg=self.CONFIG, merge_defaults=False)
     self.assertEqual(5, config.get('dataBackend.simultaneousReads'))
     self.assertEqual(10, config.get('io.rbd.simultaneousReads', types=int))
Пример #26
0
 def test_with_default(self):
     config = Config(cfg='configurationVersion: \'{}\''.format(
         Config.CONFIG_VERSION),
                     merge_defaults=False)
     self.assertEqual('test', config.get('missing.option', 'test'))
Пример #27
0
 def test_missing(self):
     config = Config(ad_hoc_config=self.CONFIG)
     self.assertRaises(KeyError, lambda: config.get('missing.option'))
Пример #28
0
def main():
    if sys.hexversion < 0x030600F0:
        raise InternalError('Benji only supports Python 3.6 or above.')

    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        allow_abbrev=False)

    parser.add_argument('-c',
                        '--config-file',
                        default=None,
                        type=str,
                        help='Specify a non-default configuration file')
    parser.add_argument('-m',
                        '--machine-output',
                        action='store_true',
                        default=False,
                        help='Enable machine-readable JSON output')
    parser.add_argument(
        '--log-level',
        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
        default='INFO',
        help='Only log messages of this level or above on the console')
    parser.add_argument('--no-color',
                        action='store_true',
                        default=False,
                        help='Disable colorization of console logging')

    subparsers_root = parser.add_subparsers(title='commands')

    # BACKUP
    p = subparsers_root.add_parser('backup', help='Perform a backup')
    p.add_argument('-s',
                   '--snapshot-name',
                   default='',
                   help='Snapshot name (e.g. the name of the RBD snapshot)')
    p.add_argument('-r',
                   '--rbd-hints',
                   default=None,
                   help='Hints in rbd diff JSON format')
    p.add_argument('-f',
                   '--base-version',
                   dest='base_version_uid',
                   default=None,
                   help='Base version UID')
    p.add_argument('-b',
                   '--block-size',
                   type=int,
                   default=None,
                   help='Block size in bytes')
    p.add_argument('-l',
                   '--label',
                   action='append',
                   dest='labels',
                   metavar='label',
                   default=None,
                   help='Labels for this version (can be repeated)')
    p.add_argument(
        '-S',
        '--storage',
        default='',
        help='Destination storage (if unspecified the default is used)')
    p.add_argument('source', help='Source URL')
    p.add_argument('version_name',
                   help='Backup version name (e.g. the hostname)')
    p.set_defaults(func='backup')

    # BATCH-DEEP-SCRUB
    p = subparsers_root.add_parser(
        'batch-deep-scrub',
        help='Check data and metadata integrity of multiple versions at once',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('-P',
                   '--version-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of versions')
    p.add_argument('-g',
                   '--group_label',
                   default=None,
                   help='Label to find related versions')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.set_defaults(func='batch_deep_scrub')

    # BATCH-SCRUB
    p = subparsers_root.add_parser(
        'batch-scrub',
        help=
        'Check block existence and metadata integrity of multiple versions at once',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('-P',
                   '--version-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of versions')
    p.add_argument('-g',
                   '--group_label',
                   default=None,
                   help='Label to find related versions')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.set_defaults(func='batch_scrub')

    # CLEANUP
    p = subparsers_root.add_parser('cleanup',
                                   help='Cleanup no longer referenced blocks')
    p.add_argument('--override-lock',
                   action='store_true',
                   help='Override and release any held lock (dangerous)')
    p.set_defaults(func='cleanup')

    # COMPLETION
    p = subparsers_root.add_parser('completion',
                                   help='Emit autocompletion script')
    p.add_argument('shell', choices=['bash', 'tcsh'], help='Shell')
    p.set_defaults(func='completion')

    # DATABASE-INIT
    p = subparsers_root.add_parser(
        'database-init',
        help='Initialize the database (will not delete existing tables or data)'
    )
    p.set_defaults(func='database_init')

    # DATABASE-MIGRATE
    p = subparsers_root.add_parser(
        'database-migrate',
        help='Migrate an existing database to a new schema revision')
    p.set_defaults(func='database_migrate')

    # DEEP-SCRUB
    p = subparsers_root.add_parser(
        'deep-scrub',
        help='Check a version\'s data and metadata integrity',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-s',
                   '--source',
                   default=None,
                   help='Additionally compare version against source URL')
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('version_uid', help='Version UID')
    p.set_defaults(func='deep_scrub')

    # ENFORCE
    p = subparsers_root.add_parser('enforce',
                                   help="Enforce a retention policy ")
    p.add_argument('--dry-run',
                   action='store_true',
                   help='Only show which versions would be removed')
    p.add_argument('-k',
                   '--keep-metadata-backup',
                   action='store_true',
                   help='Keep version metadata backup')
    p.add_argument('-g',
                   '--group_label',
                   default=None,
                   help='Label to find related versions to remove')
    p.add_argument('rules_spec', help='Retention rules specification')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.set_defaults(func='enforce_retention_policy')

    # LABEL
    p = subparsers_root.add_parser('label', help='Add labels to a version')
    p.add_argument('version_uid')
    p.add_argument('labels', nargs='+')
    p.set_defaults(func='label')

    # LS
    p = subparsers_root.add_parser('ls', help='List versions')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.add_argument('-l',
                   '--include-labels',
                   action='store_true',
                   help='Include labels in output')
    p.add_argument('-s',
                   '--include-stats',
                   action='store_true',
                   help='Include statistics in output')
    p.set_defaults(func='ls')

    # METADATA-BACKUP
    p = subparsers_root.add_parser(
        'metadata-backup', help='Back up the metadata of one or more versions')
    p.add_argument('filter_expression', help="Version filter expression")
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Overwrite existing metadata backups')
    p.set_defaults(func='metadata_backup')

    # METADATA EXPORT
    p = subparsers_root.add_parser(
        'metadata-export',
        help=
        'Export the metadata of one or more versions to a file or standard output'
    )
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help="Version filter expression")
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Overwrite an existing output file')
    p.add_argument('-o',
                   '--output-file',
                   default=None,
                   help='Output file (standard output if missing)')
    p.set_defaults(func='metadata_export')

    # METADATA-IMPORT
    p = subparsers_root.add_parser(
        'metadata-import',
        help=
        'Import the metadata of one or more versions from a file or standard input'
    )
    p.add_argument('-i',
                   '--input-file',
                   default=None,
                   help='Input file (standard input if missing)')
    p.set_defaults(func='metadata_import')

    # METADATA-LS
    p = subparsers_root.add_parser('metadata-ls',
                                   help='List the version metadata backup')
    p.add_argument('-S',
                   '--storage',
                   default=None,
                   help='Source storage (if unspecified the default is used)')
    p.set_defaults(func='metadata_ls')

    # METADATA-RESTORE
    p = subparsers_root.add_parser(
        'metadata-restore',
        help='Restore the metadata of one ore more versions')
    p.add_argument('-S',
                   '--storage',
                   default=None,
                   help='Source storage (if unspecified the default is used)')
    p.add_argument('version_uids',
                   metavar='VERSION_UID',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='metadata_restore')

    # NBD
    p = subparsers_root.add_parser(
        'nbd',
        help='Start an NBD server',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-a',
                   '--bind-address',
                   default='127.0.0.1',
                   help='Bind to the specified IP address')
    p.add_argument('-p',
                   '--bind-port',
                   default=10809,
                   help='Bind to the specified port')
    p.add_argument('-r',
                   '--read-only',
                   action='store_true',
                   default=False,
                   help='NBD device is read-only')
    p.set_defaults(func='nbd')

    # PROTECT
    p = subparsers_root.add_parser('protect',
                                   help='Protect one or more versions')
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='protect')

    # RESTORE
    p = subparsers_root.add_parser('restore', help='Restore a backup')
    p.add_argument('-s',
                   '--sparse',
                   action='store_true',
                   help='Restore only existing blocks')
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Overwrite an existing file, device or image')
    p.add_argument('-d',
                   '--database-backend-less',
                   action='store_true',
                   help='Restore without requiring the database backend')
    p.add_argument('version_uid', help='Version UID to restore')
    p.add_argument('destination', help='Destination URL')
    p.set_defaults(func='restore')

    # RM
    p = subparsers_root.add_parser('rm', help='Remove one or more versions')
    p.add_argument(
        '-f',
        '--force',
        action='store_true',
        help='Force removal (overrides protection of recent versions)')
    p.add_argument('-k',
                   '--keep-metadata-backup',
                   action='store_true',
                   help='Keep version metadata backup')
    p.add_argument('--override-lock',
                   action='store_true',
                   help='Override and release any held locks (dangerous)')
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help='Version UID')
    p.set_defaults(func='rm')

    # SCRUB
    p = subparsers_root.add_parser(
        'scrub',
        help='Check a version\'s block existence and metadata integrity',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('version_uid', help='Version UID')
    p.set_defaults(func='scrub')

    # STORAGE-STATS
    p = subparsers_root.add_parser('storage-stats',
                                   help='Show storage statistics')
    p.add_argument('storage_name', nargs='?', default=None, help='Storage')
    p.set_defaults(func='storage_stats')

    # UNPROTECT
    p = subparsers_root.add_parser('unprotect',
                                   help='Unprotect one or more versions')
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help='Version UID')
    p.set_defaults(func='unprotect')

    # VERSION-INFO
    p = subparsers_root.add_parser('version-info',
                                   help='Program version information')
    p.set_defaults(func='version_info')

    argcomplete.autocomplete(parser)
    args = parser.parse_args()

    if not hasattr(args, 'func'):
        parser.print_usage()
        sys.exit(os.EX_USAGE)

    if args.func == 'completion':
        completion(args.shell)
        sys.exit(os.EX_OK)

    from benji.config import Config
    from benji.logging import logger, init_logging
    if args.config_file is not None and args.config_file != '':
        try:
            cfg = open(args.config_file, 'r', encoding='utf-8').read()
        except FileNotFoundError:
            logger.error('File {} not found.'.format(args.config_file))
            sys.exit(os.EX_USAGE)
        config = Config(ad_hoc_config=cfg)
    else:
        config = Config()

    init_logging(config.get('logFile', types=(str, type(None))),
                 console_level=args.log_level,
                 console_formatter='console-plain'
                 if args.no_color else 'console-colored')

    if sys.hexversion < 0x030604F0:
        logger.warning(
            'The installed Python version will use excessive amounts of memory when used with Benji. Upgrade Python to at least 3.6.4.'
        )

    import benji.commands
    commands = benji.commands.Commands(args.machine_output, config)
    func = getattr(commands, args.func)

    # Pass over to function
    func_args = dict(args._get_kwargs())
    del func_args['config_file']
    del func_args['func']
    del func_args['log_level']
    del func_args['machine_output']
    del func_args['no_color']

    # From most specific to least specific
    exception_mappings = [
        _ExceptionMapping(exception=benji.exception.UsageError,
                          exit_code=os.EX_USAGE),
        _ExceptionMapping(exception=benji.exception.AlreadyLocked,
                          exit_code=os.EX_NOPERM),
        _ExceptionMapping(exception=benji.exception.InternalError,
                          exit_code=os.EX_SOFTWARE),
        _ExceptionMapping(exception=benji.exception.ConfigurationError,
                          exit_code=os.EX_CONFIG),
        _ExceptionMapping(exception=benji.exception.InputDataError,
                          exit_code=os.EX_DATAERR),
        _ExceptionMapping(exception=benji.exception.ScrubbingError,
                          exit_code=os.EX_DATAERR),
        _ExceptionMapping(exception=PermissionError, exit_code=os.EX_NOPERM),
        _ExceptionMapping(exception=FileExistsError,
                          exit_code=os.EX_CANTCREAT),
        _ExceptionMapping(exception=FileNotFoundError,
                          exit_code=os.EX_NOINPUT),
        _ExceptionMapping(exception=EOFError, exit_code=os.EX_IOERR),
        _ExceptionMapping(exception=IOError, exit_code=os.EX_IOERR),
        _ExceptionMapping(exception=OSError, exit_code=os.EX_OSERR),
        _ExceptionMapping(exception=ConnectionError, exit_code=os.EX_IOERR),
        _ExceptionMapping(exception=LookupError, exit_code=os.EX_NOINPUT),
        _ExceptionMapping(exception=KeyboardInterrupt,
                          exit_code=os.EX_NOINPUT),
        _ExceptionMapping(exception=BaseException, exit_code=os.EX_SOFTWARE),
    ]

    try:
        logger.debug('commands.{0}(**{1!r})'.format(args.func, func_args))
        func(**func_args)
        sys.exit(os.EX_OK)
    except SystemExit:
        raise
    except BaseException as exception:
        for case in exception_mappings:
            if isinstance(exception, case.exception):
                message = str(exception)
                if message:
                    message = '{}: {}'.format(exception.__class__.__name__,
                                              message)
                else:
                    message = '{} exception occurred.'.format(
                        exception.__class__.__name__)
                logger.debug(message, exc_info=True)
                logger.error(message)
                sys.exit(case.exit_code)
Пример #29
0
def main():
    parser = argparse.ArgumentParser(
        description='Backup and restore for block devices.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbose output')
    parser.add_argument('-m',
                        '--machine-output',
                        action='store_true',
                        default=False,
                        help='Enable machine-readable JSON output')
    parser.add_argument('-V',
                        '--version',
                        action='store_true',
                        help='Show version')
    parser.add_argument('-c',
                        '--configfile',
                        default=None,
                        type=str,
                        help='Specify a non-default configuration file')
    parser.add_argument('--no-color',
                        action='store_true',
                        default=False,
                        help='Disable colourisation of logged messages')

    subparsers = parser.add_subparsers()

    # INITDB
    p = subparsers.add_parser(
        'initdb',
        help=
        'Initialize the database by populating tables, this will not delete tables or data if they exist'
    )
    p.set_defaults(func='initdb')

    # BACKUP
    p = subparsers.add_parser('backup', help="Perform a backup.")
    p.add_argument('source', help='Source (url-like, e.g. file:///dev/sda or rbd://pool/imagename@snapshot)')\
        .completer=ChoicesCompleter(('file://', 'rbd://'))
    p.add_argument('name', help='Backup name (e.g. the hostname)')
    p.add_argument('-s',
                   '--snapshot-name',
                   default='',
                   help='Snapshot name (e.g. the name of the RBD snapshot)')
    p.add_argument('-r',
                   '--rbd',
                   default=None,
                   help='Hints as RBD JSON format')
    p.add_argument('-f',
                   '--from-version',
                   dest='base_version_uid',
                   default=None,
                   help='Use this version as base')
    p.add_argument('-t',
                   '--tag',
                   action='append',
                   dest='tags',
                   metavar='tag',
                   default=None,
                   help='Tag this verion with the specified tag(s)')
    p.add_argument('-b',
                   '--block-size',
                   type=int,
                   help='Block size to use for this backup in bytes')
    p.set_defaults(func='backup')

    # RESTORE
    p = subparsers.add_parser('restore',
                              help="Restore a given backup to a given target.")
    p.add_argument('-s',
                   '--sparse',
                   action='store_true',
                   help='Restore only existing blocks. Works only with file ' +
                   'and RBD targets, not with LVM. Faster.')
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Force overwrite of existing files/devices/images')
    p.add_argument(
        '-M',
        '--metadata-backend-less',
        action='store_true',
        help=
        'Restore directly from data backend without requiring the metadata backend.'
    )
    p.add_argument('version_uid')
    p.add_argument('target', help='Source (URL like, e.g. file:///dev/sda or rbd://pool/imagename)')\
        .completer=ChoicesCompleter(('file://', 'rbd://'))
    p.set_defaults(func='restore')

    # PROTECT
    p = subparsers.add_parser(
        'protect',
        help="Protect a backup version. Protected versions cannot be removed.")
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='protect')

    # UNPROTECT
    p = subparsers.add_parser(
        'unprotect',
        help="Unprotect a backup version. Unprotected versions can be removed."
    )
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='unprotect')

    # RM
    p = subparsers.add_parser(
        'rm',
        help=
        "Remove the given backup versions. This will only remove meta data and you will have to cleanup after this."
    )
    p.add_argument(
        '-f',
        '--force',
        action='store_true',
        help=
        "Force removal of version, even if it's younger than the configured disallow_rm_when_younger_than_days."
    )
    p.add_argument('-k',
                   '--keep-backend-metadata',
                   action='store_true',
                   help='Don\'t delete version\'s metadata in data backend.')
    p.add_argument('version_uids', metavar='version_uid', nargs='+')
    p.set_defaults(func='rm')

    # ENFORCE
    p = subparsers.add_parser(
        'enforce',
        help="Enforce the given retenion policy on each listed version.")
    p.add_argument('--dry-run',
                   action='store_true',
                   help='Dry run: Only show which versions would be removed.')
    p.add_argument('-k',
                   '--keep-backend-metadata',
                   action='store_true',
                   help='Don\'t delete version\'s metadata in data backend.')
    p.add_argument('rules_spec', help='Retention rules specification')
    p.add_argument('version_names', metavar='version_name', nargs='+')
    p.set_defaults(func='enforce_retention_policy')

    # SCRUB
    p = subparsers.add_parser(
        'scrub', help="Scrub a given backup and check for consistency.")
    p.add_argument(
        '-p',
        '--block-percentage',
        default=100,
        help=
        "Only check BLOCK-PERCENTAGE percent of the blocks (value 1..100). Default: 100"
    )
    p.add_argument('version_uid', help='Version UID')
    p.set_defaults(func='scrub')

    # DEEP-SCRUB
    p = subparsers.add_parser(
        'deep-scrub',
        help="Deep scrub a given backup and check for consistency.")
    p.add_argument(
        '-s',
        '--source',
        default=None,
        help=
        'Source, optional. If given, check if source matches backup in addition to checksum tests. URL-like format as in backup.'
    )
    p.add_argument(
        '-p',
        '--block-percentage',
        default=100,
        help=
        "Only check BLOCK-PERCENTAGE percent of the blocks (value 1..100). Default: 100"
    )
    p.add_argument('version_uid', help='Version UID')
    p.set_defaults(func='deep_scrub')

    # BULK-SCRUB
    p = subparsers.add_parser('bulk-scrub',
                              help="Bulk deep scrub all matching versions.")
    p.add_argument(
        '-p',
        '--block-percentage',
        default=100,
        help=
        "Only check BLOCK-PERCENTAGE percent of the blocks (value 1..100). Default: 100"
    )
    p.add_argument(
        '-P',
        '--version-percentage',
        default=100,
        help=
        "Only check VERSION-PERCENTAGE of matching versions(value 1..100). Default: 100"
    )
    p.add_argument('-t',
                   '--tag',
                   action='append',
                   dest='tags',
                   metavar='TAG',
                   default=None,
                   help='Scrub only versions matching this tag.')
    p.add_argument('names', metavar='NAME', nargs='*', help="Version names")
    p.set_defaults(func='bulk_scrub')

    # BULK-DEEP-SCRUB
    p = subparsers.add_parser('bulk-deep-scrub',
                              help="Bulk deep scrub all matching versions.")
    p.add_argument(
        '-p',
        '--block-percentage',
        default=100,
        help=
        "Only check BLOCK-PERCENTAGE percent of the blocks (value 1..100). Default: 100"
    )
    p.add_argument(
        '-P',
        '--version-percentage',
        default=100,
        help=
        "Only check VERSION-PERCENTAGE of matching versions(value 1..100). Default: 100"
    )
    p.add_argument(
        '-t',
        '--tag',
        action='append',
        dest='tags',
        metavar='TAG',
        default=None,
        help=
        'Scrub only versions matching this tag. Multiple use of this option constitutes an OR operation.'
    )
    p.add_argument('names', metavar='NAME', nargs='*', help="Version names")
    p.set_defaults(func='bulk_deep_scrub')

    # Export
    p = subparsers.add_parser(
        'export',
        help=
        'Export the metadata of one or more versions to a file or standard out.'
    )
    p.add_argument('version_uids',
                   metavar='VERSION_UID',
                   nargs='+',
                   help="Version UID")
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Force overwrite of existing output file')
    p.add_argument(
        '-o',
        '--output-file',
        help=
        'Write export into this file (stdout is used if this option isn\'t specified)'
    )
    p.set_defaults(func='export')

    # Import
    p = subparsers.add_parser(
        'import',
        help=
        'Import the metadata of one or more versions from a file or standard input.'
    )
    p.add_argument(
        '-i',
        '--input-file',
        help=
        'Read from this file (stdin is used if this option isn\'t specified)')
    p.set_defaults(func='import_')

    # Export to data backend
    p = subparsers.add_parser(
        'export-to-backend',
        help='Export metadata of one or more versions to the data backend')
    p.add_argument('version_uids',
                   metavar='VERSION_UID',
                   nargs='+',
                   help="Version UID")
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Force overwrite of existing metadata in data backend')
    p.set_defaults(func='export_to_backend')

    # Import from data backend
    p = subparsers.add_parser(
        'import-from-backend',
        help="Import metadata of one ore more versions from the data backend")
    p.add_argument('version_uids',
                   metavar='VERSION_UID',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='import_from_backend')

    # CLEANUP
    p = subparsers.add_parser('cleanup', help="Clean unreferenced blobs.")
    p.add_argument(
        '-f',
        '--full',
        action='store_true',
        default=False,
        help=
        'Do a full cleanup. This will read the full metadata from the data backend (i.e. backup storage) '
        'and compare it to the metadata in the metadata backend. Unused data will then be deleted. '
        'This is a slow, but complete process. A full cleanup must not run in parallel to ANY other jobs.'
    )
    p.set_defaults(func='cleanup')

    # LS
    p = subparsers.add_parser('ls', help="List existing backups.")
    p.add_argument('name',
                   nargs='?',
                   default=None,
                   help='Show versions for this name only')
    p.add_argument('-s',
                   '--snapshot-name',
                   default=None,
                   help="Limit output to this SNAPSHOT_NAME")
    p.add_argument(
        '-t',
        '--tag',
        action='append',
        dest='tags',
        metavar='TAG',
        default=None,
        help=
        'Limit output to this TAG. Multiple use constitutes an OR operation.')
    p.add_argument('--include-blocks',
                   default=False,
                   action='store_true',
                   help='Include blocks in output')
    p.set_defaults(func='ls')

    # STATS
    p = subparsers.add_parser('stats', help="Show statistics")
    p.add_argument('version_uid',
                   nargs='?',
                   default=None,
                   help='Show statistics for this version')
    p.add_argument('-l',
                   '--limit',
                   default=None,
                   help="Limit output to this number (default: unlimited)")
    p.set_defaults(func='stats')

    # diff-meta
    p = subparsers.add_parser('diff-meta',
                              help="Output a diff between two versions")
    p.add_argument('version_uid1', help='Left version')
    p.add_argument('version_uid2', help='Right version')
    p.set_defaults(func='diff_meta')

    # NBD
    p = subparsers.add_parser('nbd', help="Start an nbd server")
    p.add_argument('-a',
                   '--bind-address',
                   default='127.0.0.1',
                   help="Bind to this ip address (default: 127.0.0.1)")
    p.add_argument('-p',
                   '--bind-port',
                   default=10809,
                   help="Bind to this port (default: 10809)")
    p.add_argument(
        '-r',
        '--read-only',
        action='store_true',
        default=False,
        help='Read only if set, otherwise a copy on write backup is created.')
    p.set_defaults(func='nbd')

    # ADD TAG
    p = subparsers.add_parser('add-tag',
                              help="Add a named tag to a backup version.")
    p.add_argument('version_uid')
    p.add_argument('names', metavar='NAME', nargs='+')
    p.set_defaults(func='add_tag')

    # REMOVE TAG
    p = subparsers.add_parser('rm-tag',
                              help="Remove a named tag from a backup version.")
    p.add_argument('version_uid')
    p.add_argument('names', metavar='NAME', nargs='+')
    p.set_defaults(func='rm_tag')

    argcomplete.autocomplete(parser)
    args = parser.parse_args()

    if args.version:
        print(__version__)
        exit(os.EX_OK)

    if not hasattr(args, 'func'):
        parser.print_usage()
        exit(os.EX_USAGE)

    if args.verbose:
        console_level = logging.DEBUG
    else:
        console_level = logging.INFO

    if args.configfile is not None and args.configfile != '':
        try:
            cfg = open(args.configfile, 'r', encoding='utf-8').read()
        except FileNotFoundError:
            logger.error('File {} not found.'.format(args.configfile))
            exit(os.EX_USAGE)
        config = Config(cfg=cfg)
    else:
        config = Config()

    # logging ERROR only when machine output is selected
    if args.machine_output:
        init_logging(config.get('logFile', types=(str, type(None))),
                     logging.ERROR,
                     no_color=args.no_color)
    else:
        init_logging(config.get('logFile', types=(str, type(None))),
                     console_level,
                     no_color=args.no_color)

    commands = Commands(args.machine_output, config)
    func = getattr(commands, args.func)

    # Pass over to function
    func_args = dict(args._get_kwargs())
    del func_args['configfile']
    del func_args['func']
    del func_args['verbose']
    del func_args['version']
    del func_args['machine_output']
    del func_args['no_color']

    # From most specific to least specific
    exit_code_list = [
        {
            'exception': benji.exception.UsageError,
            'msg': 'Usage error',
            'exit_code': os.EX_USAGE
        },
        {
            'exception': benji.exception.AlreadyLocked,
            'msg': 'Already locked error',
            'exit_code': os.EX_NOPERM
        },
        {
            'exception': benji.exception.InternalError,
            'msg': 'Internal error',
            'exit_code': os.EX_SOFTWARE
        },
        {
            'exception': benji.exception.ConfigurationError,
            'msg': 'Configuration error',
            'exit_code': os.EX_CONFIG
        },
        {
            'exception': benji.exception.InputDataError,
            'msg': 'Input data error',
            'exit_code': os.EX_DATAERR
        },
        {
            'exception': benji.exception.ScrubbingError,
            'msg': 'Scrubbing error',
            'exit_code': os.EX_DATAERR
        },
        {
            'exception': PermissionError,
            'msg': 'Already locked error',
            'exit_code': os.EX_NOPERM
        },
        {
            'exception': FileExistsError,
            'msg': 'Already exists',
            'exit_code': os.EX_CANTCREAT
        },
        {
            'exception': FileNotFoundError,
            'msg': 'Not found',
            'exit_code': os.EX_NOINPUT
        },
        {
            'exception': EOFError,
            'msg': 'I/O error',
            'exit_code': os.EX_IOERR
        },
        {
            'exception': IOError,
            'msg': 'I/O error',
            'exit_code': os.EX_IOERR
        },
        {
            'exception': OSError,
            'msg': 'Not found',
            'exit_code': os.EX_OSERR
        },
        {
            'exception': ConnectionError,
            'msg': 'I/O error',
            'exit_code': os.EX_IOERR
        },
        {
            'exception': LookupError,
            'msg': 'Not found',
            'exit_code': os.EX_NOINPUT
        },
        {
            'exception': BaseException,
            'msg': 'Other exception',
            'exit_code': os.EX_SOFTWARE
        },
    ]

    try:
        logger.debug('backup.{0}(**{1!r})'.format(args.func, func_args))
        func(**func_args)
        exit(0)
    except SystemExit:
        raise
    except BaseException as exception:
        for case in exit_code_list:
            if isinstance(exception, case['exception']):
                logger.debug(case['msg'], exc_info=True)
                logger.error(str(exception))
                exit(case['exit_code'])