Beispiel #1
0
 def add_schema(cls, *, module: str, version: semantic_version.Version, file: str) -> None:
     name = cls._schema_name(module, version)
     try:
         with open(file, 'r') as f:
             schema = ruamel.yaml.load(f, Loader=ruamel.yaml.SafeLoader)
         cls._schema_registry[name] = schema
     except FileNotFoundError:
         raise InternalError('Schema {} not found or not accessible.'.format(file))
     except SchemaError as exception:
         raise InternalError('Schema {} is invalid.'.format(file)) from exception
Beispiel #2
0
    def _get(dict_, name, *args, types=None, check_func=None, check_message=None):
        if '__position' in dict_:
            full_name = '{}.{}'.format(dict_['__position'], name)
        else:
            full_name = name

        if len(args) > 1:
            raise InternalError('Called with more than two arguments for key {}.'.format(full_name))

        try:
            value = reduce(operator.getitem, name.split('.'), dict_)
            if types is not None and not isinstance(value, types):
                raise TypeError('Config value {} has wrong type {}, expected {}.'.format(full_name, type(value), types))
            if check_func is not None and not check_func(value):
                if check_message is None:
                    raise ConfigurationError('Config option {} has the right type but the supplied value is invalid.'
                                             .format(full_name))
                else:
                    raise ConfigurationError('Config option {} is invalid: {}.'.format(full_name, check_message))
            if isinstance(value, dict):
                value['__position'] = name
            return value
        except KeyError:
            if len(args) == 1:
                return args[0]
            else:
                if types and isinstance({}, types):
                    raise KeyError('Config section {} is missing.'.format(full_name)) from None
                else:
                    raise KeyError('Config option {} is missing.'.format(full_name)) from None
Beispiel #3
0
    def verify_digest(self, dict_data) -> None:
        if not isinstance(dict_data, dict):
            raise InternalError(
                f'dict_data must be of type dict, but is of type {type(dict_data)}.'
            )
        if self._hmac_key not in dict_data:
            raise ValueError(
                f'Dictionary is missing required HMAC key {self._hmac_key}.')

        hmac_dict = dict_data[self._hmac_key]

        if not isinstance(hmac_dict, dict):
            raise ValueError(
                f'HMAC key {self._hmac_key} has an invalid type of {type(hmac_dict)}.'
            )

        for required_key in [self._ALGORITHM_KEY, self._DIGEST_KEY]:
            if required_key not in hmac_dict:
                raise ValueError(
                    f'Required key {required_key} is missing in HMAC dictionary.'
                )

        if hmac_dict[self._ALGORITHM_KEY] != self._HASH_NAME:
            raise ValueError(
                f'Unsupported hash algorithm {self._ALGORITHM_KEY}.')

        digest_expected = hmac_dict[self._DIGEST_KEY]
        del dict_data[self._hmac_key]
        digest = self._calculate_digest(dict_data)
        if digest != digest_expected:
            raise ValueError(
                f'Dictionary HMAC is invalid (expected {digest_expected}, actual {digest}).'
            )
Beispiel #4
0
    def lock(self, *, lock_name=GLOBAL_LOCK, reason=None, locked_msg=None):
        if lock_name in self._locks:
            raise InternalError(
                'Attempt to acquire lock "{}" twice'.format(lock_name))

        lock = Lock(
            host=self._host,
            process_id=self._uuid,
            lock_name=lock_name,
            reason=reason,
            date=datetime.datetime.utcnow(),
        )
        try:
            self._session.add(lock)
            self._session.commit()
        except SQLAlchemyError:  # this is actually too broad and will also include other errors
            self._session.rollback()
            if locked_msg is not None:
                raise AlreadyLocked(locked_msg)
            else:
                raise AlreadyLocked(
                    'Lock {} is already taken.'.format(lock_name))
        except:
            self._session.rollback()
            raise
        else:
            self._locks[lock_name] = lock
Beispiel #5
0
    def _check_write(self, key, metadata_key, data, metadata):
        # Source: https://stackoverflow.com/questions/4527942/comparing-two-dictionaries-in-python
        def dict_compare(d1, d2):
            d1_keys = set(d1.keys())
            d2_keys = set(d2.keys())
            intersect_keys = d1_keys.intersection(d2_keys)
            added = d1_keys - d2_keys
            removed = d2_keys - d1_keys
            modified = {
                o: (d1[o], d2[o])
                for o in intersect_keys if d1[o] != d2[o]
            }
            same = set(o for o in intersect_keys if d1[o] == d2[o])
            return added, removed, modified, same

        rdata = self._read_object(key)
        rmetadata = self._read_object(metadata_key)
        rmetadata = json.loads(rmetadata.decode('utf-8'))

        if metadata:
            added, removed, modified, same = dict_compare(rmetadata, metadata)
            logger.debug(
                'Comparing written and read metadata of {}:'.format(key))
            logger.debug(
                '  added: {}, removed: {}, modified: {}, same: {}'.format(
                    added, removed, modified, same))
            if removed:
                raise InternalError(
                    'Consistency check: Metadata headers are missing in read back data: {}'
                    .format(', '.join(removed)))
            different_for = []
            for name in modified:
                logger.debug('Metadata differences: ')
                logger.debug('  {}: wrote {}, read {}'.format(
                    name, metadata[name], rmetadata[name]))
                if metadata[name] != rmetadata[name]:
                    different_for.append(name)
            if different_for:
                raise InternalError(
                    'Consistency check: Written and read metadata of {} are different for {}.'
                    .format(', '.join(different_for)))
        # Comparing encrypted/compressed data here
        if data != rdata:
            raise InternalError(
                'Consistency check: Written and read data of {} differ.'.
                format(key))
Beispiel #6
0
    def _get(root,
             name: str,
             *args,
             types: Any = None,
             check_func: Callable[[object], bool] = None,
             check_message: str = None,
             full_name_override: str = None,
             index: int = None) -> object:
        if full_name_override is not None:
            full_name = full_name_override
        elif hasattr(root, 'full_name') and root.full_name:
            full_name = root.full_name
        else:
            full_name = ''

        if index is not None:
            full_name = '{}{}{}'.format(full_name, '.' if full_name else '',
                                        index)

        full_name = '{}{}{}'.format(full_name, '.' if full_name else '', name)

        if len(args) > 1:
            raise InternalError(
                'Called with more than two arguments for key {}.'.format(
                    full_name))

        try:
            value = reduce(operator.getitem, name.split('.'), root)
            if types is not None and not isinstance(value, types):
                raise TypeError(
                    'Config value {} has wrong type {}, expected {}.'.format(
                        full_name, type(value), types))
            if check_func is not None and not check_func(value):
                if check_message is None:
                    raise ConfigurationError(
                        'Config option {} has the right type but the supplied value is invalid.'
                        .format(full_name))
                else:
                    raise ConfigurationError(
                        'Config option {} is invalid: {}.'.format(
                            full_name, check_message))
            if isinstance(value, dict):
                value = ConfigDict(value)
                value.full_name = full_name
            elif isinstance(value, list):
                value = ConfigList(value)
                value.full_name = full_name
            return value
        except KeyError:
            if len(args) == 1:
                return args[0]
            else:
                if types and isinstance({}, types):
                    raise KeyError('Config section {} is missing.'.format(
                        full_name)) from None
                else:
                    raise KeyError('Config option {} is missing.'.format(
                        full_name)) from None
Beispiel #7
0
 def process_bind_param(self, value, dialect):
     if value is None:
         return None
     elif isinstance(value, int):
         return value
     elif isinstance(value, str):
         try:
             return int(value)
         except ValueError:
             raise InternalError(
                 'Supplied string value "{}" represents no integer VersionUidType.process_bind_param'
                 .format(value)) from None
     elif isinstance(value, VersionUid):
         return value.int
     else:
         raise InternalError(
             'Unexpected type {} for value in VersionUidType.process_bind_param'
             .format(type(value)))
Beispiel #8
0
 def uid(self, uid):
     if isinstance(uid, DereferencedBlockUid):
         self._uid = uid
     elif isinstance(uid, BlockUid):
         self._uid = uid.deref()
     else:
         raise InternalError(
             'Unexpected type {} for uid in DereferencedBlockUid.uid.setter'
             .format(type(uid)))
Beispiel #9
0
    def add_digest(self, dict_data: dict) -> None:
        if not isinstance(dict_data, dict):
            raise InternalError(
                f'dict_data must be of type dict, but is of type {type(dict_data)}'
            )

        dict_data[self._hmac_key] = {
            self._ALGORITHM_KEY: self._HASH_NAME,
            self._DIGEST_KEY: self._calculate_digest(dict_data)
        }
Beispiel #10
0
 def _get_validator(self, *, module: str, version: semantic_version.Version) -> Validator:
     name = self._schema_name(module, version)
     schema = self._resolve_schema(name=name)
     try:
         validator = Config._Validator(schema)
     except SchemaError as exception:
         logger.error('Schema {} validation errors:'.format(name))
         self._output_validation_errors(exception.args[0])
         raise InternalError('Schema {} is invalid.'.format(name)) from exception
     return validator
Beispiel #11
0
    def write(self, block: Union[DereferencedBlock, Block],
              data: bytes) -> None:
        assert self._rbd_image is not None

        if len(self._write_queue) > self._max_write_queue_len:
            raise InternalError(
                'The write queue is full, write_get_completed not called often enough.'
            )

        self._write_queue.appendleft((block.deref(), data))
        self._submit_aio_writes()
Beispiel #12
0
 def update_lock(self, *, lock_name=GLOBAL_LOCK, reason=None):
     try:
         lock = self._session.query(Lock).filter_by(
             host=self._host, lock_name=lock_name,
             process_id=self._uuid).first()
         if not lock:
             raise InternalError(
                 'Lock {} isn\'t held by this instance or doesn\'t exist.'.
                 format(lock_name))
         lock.reason = reason
         self._session.commit()
     except:
         self._session.rollback()
         raise
Beispiel #13
0
    def unlock(self, *, lock_name=GLOBAL_LOCK):
        if lock_name not in self._locks:
            raise InternalError(
                'Attempt to release lock "{}" even though it isn\'t held'.
                format(lock_name))

        lock = self._locks[lock_name]
        try:
            self._session.delete(lock)
            self._session.commit()
        except:
            self._session.rollback()
            raise
        else:
            del self._locks[lock_name]
Beispiel #14
0
 def _resolve_schema(self, *, name: str) -> Dict:
     try:
         child = self._schema_registry[name]
     except KeyError:
         raise InternalError('Schema for module {} is missing.'.format(name))
     result: Dict = {}
     if self._PARENTS_KEY in child:
         parent_names = child[self._PARENTS_KEY]
         for parent_name in parent_names:
             parent = self._resolve_schema(name=parent_name)
             self._merge_dicts(result, parent)
     result = self._merge_dicts(result, child)
     if self._PARENTS_KEY in result:
         del result[self._PARENTS_KEY]
     logger.debug('Resolved schema for {}: {}.'.format(name, result))
     return result
Beispiel #15
0
 def __init__(self) -> None:
     raise InternalError('StorageFactory constructor called.')
Beispiel #16
0
    def __init__(self, config):
        self.encryption = {}
        self.compression = {}
        self.active_encryption = None
        self.active_compression = None

        encryption_modules = config.get('dataBackend.encryption',
                                        None,
                                        types=list)
        if encryption_modules is not None:
            for encryption_module_dict in encryption_modules:
                type = config.get_from_dict(encryption_module_dict,
                                            'type',
                                            types=str)
                identifier = config.get_from_dict(encryption_module_dict,
                                                  'identifier',
                                                  types=str)
                materials = config.get_from_dict(encryption_module_dict,
                                                 'materials',
                                                 types=dict)
                try:
                    encryption_module = importlib.import_module('{}.{}'.format(
                        self._ENCRYPTION_PACKAGE_PREFIX, type))
                except ImportError:
                    raise ConfigurationError(
                        'Module file {}.{} not found or related import error.'.
                        format(self._ENCRYPTION_PACKAGE_PREFIX, type))
                else:
                    if type != encryption_module.Encryption.NAME:
                        raise InternalError(
                            'Encryption module type and name don\'t agree ({} != {}).'
                            .format(type, encryption_module.Encryption.NAME))

                    self.encryption[identifier] = encryption_module.Encryption(
                        identifier=identifier, materials=materials)

        active_encryption = config.get(
            'dataBackend.{}.activeEncryption'.format(self.NAME),
            None,
            types=str)
        if active_encryption is not None:
            if self.encryption and active_encryption in self.encryption:
                logger.info(
                    'Encryption is enabled for the {} data backend.'.format(
                        self.NAME))
                self.active_encryption = self.encryption[active_encryption]
            else:
                raise ConfigurationError(
                    'Encryption identifier {} is unknown.'.format(
                        active_encryption))

        compression_modules = config.get('dataBackend.compression',
                                         None,
                                         types=list)
        if compression_modules is not None:
            for compression_module_dict in compression_modules:
                type = config.get_from_dict(compression_module_dict,
                                            'type',
                                            types=str)
                materials = config.get_from_dict(compression_module_dict,
                                                 'materials',
                                                 None,
                                                 types=dict)
                try:
                    compression_module = importlib.import_module(
                        '{}.{}'.format(self._COMPRESSION_PACKAGE_PREFIX, type))
                except ImportError:
                    raise ConfigurationError(
                        'Module file {}.{} not found or related import error.'.
                        format(self._COMPRESSION_PACKAGE_PREFIX, type))
                else:
                    if type != compression_module.Compression.NAME:
                        raise InternalError(
                            'Compression module type and name don\'t agree ({} != {}).'
                            .format(type, compression_module.Compression.NAME))

                    self.compression[type] = compression_module.Compression(
                        materials=materials)

        active_compression = config.get(
            'dataBackend.{}.activeCompression'.format(self.NAME),
            None,
            types=str)
        if active_compression is not None:
            if self.compression and active_compression in self.compression:
                logger.info(
                    'Compression is enabled for the {} data backend.'.format(
                        self.NAME))
                self.active_compression = self.compression[active_compression]
            else:
                raise ConfigurationError(
                    'Compression type {} is unknown.'.format(
                        active_compression))

        simultaneous_writes = config.get('dataBackend.simultaneousWrites',
                                         types=int)
        simultaneous_reads = config.get('dataBackend.simultaneousReads',
                                        types=int)
        bandwidth_read = config.get('dataBackend.bandwidthRead', types=int)
        bandwidth_write = config.get('dataBackend.bandwidthWrite', types=int)

        self._consistency_check_writes = config.get(
            'dataBackend.consistencyCheckWrites'.format(self.NAME),
            False,
            types=bool)

        self._compression_statistics = {
            'objects_considered': 0,
            'objects_compressed': 0,
            'data_in': 0,
            'data_out': 0,
            'data_in_compression': 0,
            'data_out_compression': 0
        }

        self.read_throttling = TokenBucket()
        self.read_throttling.set_rate(bandwidth_read)  # 0 disables throttling
        self.write_throttling = TokenBucket()
        self.write_throttling.set_rate(
            bandwidth_write)  # 0 disables throttling

        self._read_executor = ThreadPoolExecutor(
            max_workers=simultaneous_reads,
            thread_name_prefix='DataBackend-Reader')
        self._read_futures = []
        self._read_semaphore = BoundedSemaphore(simultaneous_reads +
                                                self.READ_QUEUE_LENGTH)

        self._write_executor = ThreadPoolExecutor(
            max_workers=simultaneous_writes,
            thread_name_prefix='DataBackend-Writer')
        self._write_futures = []
        self._write_semaphore = BoundedSemaphore(simultaneous_writes +
                                                 self.WRITE_QUEUE_LENGTH)
Beispiel #17
0
def main():
    if sys.hexversion < 0x030600F0:
        raise InternalError('Benji only supports Python 3.6 or above.')

    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        allow_abbrev=False)

    parser.add_argument('-c',
                        '--config-file',
                        default=None,
                        type=str,
                        help='Specify a non-default configuration file')
    parser.add_argument('-m',
                        '--machine-output',
                        action='store_true',
                        default=False,
                        help='Enable machine-readable JSON output')
    parser.add_argument(
        '--log-level',
        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
        default='INFO',
        help='Only log messages of this level or above on the console')
    parser.add_argument('--no-color',
                        action='store_true',
                        default=False,
                        help='Disable colorization of console logging')

    subparsers_root = parser.add_subparsers(title='commands')

    # BACKUP
    p = subparsers_root.add_parser('backup', help='Perform a backup')
    p.add_argument('-s',
                   '--snapshot-name',
                   default='',
                   help='Snapshot name (e.g. the name of the RBD snapshot)')
    p.add_argument('-r',
                   '--rbd-hints',
                   default=None,
                   help='Hints in rbd diff JSON format')
    p.add_argument('-f',
                   '--base-version',
                   dest='base_version_uid',
                   default=None,
                   help='Base version UID')
    p.add_argument('-b',
                   '--block-size',
                   type=int,
                   default=None,
                   help='Block size in bytes')
    p.add_argument('-l',
                   '--label',
                   action='append',
                   dest='labels',
                   metavar='label',
                   default=None,
                   help='Labels for this version (can be repeated)')
    p.add_argument(
        '-S',
        '--storage',
        default='',
        help='Destination storage (if unspecified the default is used)')
    p.add_argument('source', help='Source URL')
    p.add_argument('version_name',
                   help='Backup version name (e.g. the hostname)')
    p.set_defaults(func='backup')

    # BATCH-DEEP-SCRUB
    p = subparsers_root.add_parser(
        'batch-deep-scrub',
        help='Check data and metadata integrity of multiple versions at once',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('-P',
                   '--version-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of versions')
    p.add_argument('-g',
                   '--group_label',
                   default=None,
                   help='Label to find related versions')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.set_defaults(func='batch_deep_scrub')

    # BATCH-SCRUB
    p = subparsers_root.add_parser(
        'batch-scrub',
        help=
        'Check block existence and metadata integrity of multiple versions at once',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('-P',
                   '--version-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of versions')
    p.add_argument('-g',
                   '--group_label',
                   default=None,
                   help='Label to find related versions')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.set_defaults(func='batch_scrub')

    # CLEANUP
    p = subparsers_root.add_parser('cleanup',
                                   help='Cleanup no longer referenced blocks')
    p.add_argument('--override-lock',
                   action='store_true',
                   help='Override and release any held lock (dangerous)')
    p.set_defaults(func='cleanup')

    # COMPLETION
    p = subparsers_root.add_parser('completion',
                                   help='Emit autocompletion script')
    p.add_argument('shell', choices=['bash', 'tcsh'], help='Shell')
    p.set_defaults(func='completion')

    # DATABASE-INIT
    p = subparsers_root.add_parser(
        'database-init',
        help='Initialize the database (will not delete existing tables or data)'
    )
    p.set_defaults(func='database_init')

    # DATABASE-MIGRATE
    p = subparsers_root.add_parser(
        'database-migrate',
        help='Migrate an existing database to a new schema revision')
    p.set_defaults(func='database_migrate')

    # DEEP-SCRUB
    p = subparsers_root.add_parser(
        'deep-scrub',
        help='Check a version\'s data and metadata integrity',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-s',
                   '--source',
                   default=None,
                   help='Additionally compare version against source URL')
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('version_uid', help='Version UID')
    p.set_defaults(func='deep_scrub')

    # ENFORCE
    p = subparsers_root.add_parser('enforce',
                                   help="Enforce a retention policy ")
    p.add_argument('--dry-run',
                   action='store_true',
                   help='Only show which versions would be removed')
    p.add_argument('-k',
                   '--keep-metadata-backup',
                   action='store_true',
                   help='Keep version metadata backup')
    p.add_argument('-g',
                   '--group_label',
                   default=None,
                   help='Label to find related versions to remove')
    p.add_argument('rules_spec', help='Retention rules specification')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.set_defaults(func='enforce_retention_policy')

    # LABEL
    p = subparsers_root.add_parser('label', help='Add labels to a version')
    p.add_argument('version_uid')
    p.add_argument('labels', nargs='+')
    p.set_defaults(func='label')

    # LS
    p = subparsers_root.add_parser('ls', help='List versions')
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help='Version filter expression')
    p.add_argument('-l',
                   '--include-labels',
                   action='store_true',
                   help='Include labels in output')
    p.add_argument('-s',
                   '--include-stats',
                   action='store_true',
                   help='Include statistics in output')
    p.set_defaults(func='ls')

    # METADATA-BACKUP
    p = subparsers_root.add_parser(
        'metadata-backup', help='Back up the metadata of one or more versions')
    p.add_argument('filter_expression', help="Version filter expression")
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Overwrite existing metadata backups')
    p.set_defaults(func='metadata_backup')

    # METADATA EXPORT
    p = subparsers_root.add_parser(
        'metadata-export',
        help=
        'Export the metadata of one or more versions to a file or standard output'
    )
    p.add_argument('filter_expression',
                   nargs='?',
                   default=None,
                   help="Version filter expression")
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Overwrite an existing output file')
    p.add_argument('-o',
                   '--output-file',
                   default=None,
                   help='Output file (standard output if missing)')
    p.set_defaults(func='metadata_export')

    # METADATA-IMPORT
    p = subparsers_root.add_parser(
        'metadata-import',
        help=
        'Import the metadata of one or more versions from a file or standard input'
    )
    p.add_argument('-i',
                   '--input-file',
                   default=None,
                   help='Input file (standard input if missing)')
    p.set_defaults(func='metadata_import')

    # METADATA-LS
    p = subparsers_root.add_parser('metadata-ls',
                                   help='List the version metadata backup')
    p.add_argument('-S',
                   '--storage',
                   default=None,
                   help='Source storage (if unspecified the default is used)')
    p.set_defaults(func='metadata_ls')

    # METADATA-RESTORE
    p = subparsers_root.add_parser(
        'metadata-restore',
        help='Restore the metadata of one ore more versions')
    p.add_argument('-S',
                   '--storage',
                   default=None,
                   help='Source storage (if unspecified the default is used)')
    p.add_argument('version_uids',
                   metavar='VERSION_UID',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='metadata_restore')

    # NBD
    p = subparsers_root.add_parser(
        'nbd',
        help='Start an NBD server',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-a',
                   '--bind-address',
                   default='127.0.0.1',
                   help='Bind to the specified IP address')
    p.add_argument('-p',
                   '--bind-port',
                   default=10809,
                   help='Bind to the specified port')
    p.add_argument('-r',
                   '--read-only',
                   action='store_true',
                   default=False,
                   help='NBD device is read-only')
    p.set_defaults(func='nbd')

    # PROTECT
    p = subparsers_root.add_parser('protect',
                                   help='Protect one or more versions')
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help="Version UID")
    p.set_defaults(func='protect')

    # RESTORE
    p = subparsers_root.add_parser('restore', help='Restore a backup')
    p.add_argument('-s',
                   '--sparse',
                   action='store_true',
                   help='Restore only existing blocks')
    p.add_argument('-f',
                   '--force',
                   action='store_true',
                   help='Overwrite an existing file, device or image')
    p.add_argument('-d',
                   '--database-backend-less',
                   action='store_true',
                   help='Restore without requiring the database backend')
    p.add_argument('version_uid', help='Version UID to restore')
    p.add_argument('destination', help='Destination URL')
    p.set_defaults(func='restore')

    # RM
    p = subparsers_root.add_parser('rm', help='Remove one or more versions')
    p.add_argument(
        '-f',
        '--force',
        action='store_true',
        help='Force removal (overrides protection of recent versions)')
    p.add_argument('-k',
                   '--keep-metadata-backup',
                   action='store_true',
                   help='Keep version metadata backup')
    p.add_argument('--override-lock',
                   action='store_true',
                   help='Override and release any held locks (dangerous)')
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help='Version UID')
    p.set_defaults(func='rm')

    # SCRUB
    p = subparsers_root.add_parser(
        'scrub',
        help='Check a version\'s block existence and metadata integrity',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    p.add_argument('-p',
                   '--block-percentage',
                   type=partial(integer_range, 1, 100),
                   default=100,
                   help='Check only a certain percentage of blocks')
    p.add_argument('version_uid', help='Version UID')
    p.set_defaults(func='scrub')

    # STORAGE-STATS
    p = subparsers_root.add_parser('storage-stats',
                                   help='Show storage statistics')
    p.add_argument('storage_name', nargs='?', default=None, help='Storage')
    p.set_defaults(func='storage_stats')

    # UNPROTECT
    p = subparsers_root.add_parser('unprotect',
                                   help='Unprotect one or more versions')
    p.add_argument('version_uids',
                   metavar='version_uid',
                   nargs='+',
                   help='Version UID')
    p.set_defaults(func='unprotect')

    # VERSION-INFO
    p = subparsers_root.add_parser('version-info',
                                   help='Program version information')
    p.set_defaults(func='version_info')

    argcomplete.autocomplete(parser)
    args = parser.parse_args()

    if not hasattr(args, 'func'):
        parser.print_usage()
        sys.exit(os.EX_USAGE)

    if args.func == 'completion':
        completion(args.shell)
        sys.exit(os.EX_OK)

    from benji.config import Config
    from benji.logging import logger, init_logging
    if args.config_file is not None and args.config_file != '':
        try:
            cfg = open(args.config_file, 'r', encoding='utf-8').read()
        except FileNotFoundError:
            logger.error('File {} not found.'.format(args.config_file))
            sys.exit(os.EX_USAGE)
        config = Config(ad_hoc_config=cfg)
    else:
        config = Config()

    init_logging(config.get('logFile', types=(str, type(None))),
                 console_level=args.log_level,
                 console_formatter='console-plain'
                 if args.no_color else 'console-colored')

    if sys.hexversion < 0x030604F0:
        logger.warning(
            'The installed Python version will use excessive amounts of memory when used with Benji. Upgrade Python to at least 3.6.4.'
        )

    import benji.commands
    commands = benji.commands.Commands(args.machine_output, config)
    func = getattr(commands, args.func)

    # Pass over to function
    func_args = dict(args._get_kwargs())
    del func_args['config_file']
    del func_args['func']
    del func_args['log_level']
    del func_args['machine_output']
    del func_args['no_color']

    # From most specific to least specific
    exception_mappings = [
        _ExceptionMapping(exception=benji.exception.UsageError,
                          exit_code=os.EX_USAGE),
        _ExceptionMapping(exception=benji.exception.AlreadyLocked,
                          exit_code=os.EX_NOPERM),
        _ExceptionMapping(exception=benji.exception.InternalError,
                          exit_code=os.EX_SOFTWARE),
        _ExceptionMapping(exception=benji.exception.ConfigurationError,
                          exit_code=os.EX_CONFIG),
        _ExceptionMapping(exception=benji.exception.InputDataError,
                          exit_code=os.EX_DATAERR),
        _ExceptionMapping(exception=benji.exception.ScrubbingError,
                          exit_code=os.EX_DATAERR),
        _ExceptionMapping(exception=PermissionError, exit_code=os.EX_NOPERM),
        _ExceptionMapping(exception=FileExistsError,
                          exit_code=os.EX_CANTCREAT),
        _ExceptionMapping(exception=FileNotFoundError,
                          exit_code=os.EX_NOINPUT),
        _ExceptionMapping(exception=EOFError, exit_code=os.EX_IOERR),
        _ExceptionMapping(exception=IOError, exit_code=os.EX_IOERR),
        _ExceptionMapping(exception=OSError, exit_code=os.EX_OSERR),
        _ExceptionMapping(exception=ConnectionError, exit_code=os.EX_IOERR),
        _ExceptionMapping(exception=LookupError, exit_code=os.EX_NOINPUT),
        _ExceptionMapping(exception=KeyboardInterrupt,
                          exit_code=os.EX_NOINPUT),
        _ExceptionMapping(exception=BaseException, exit_code=os.EX_SOFTWARE),
    ]

    try:
        logger.debug('commands.{0}(**{1!r})'.format(args.func, func_args))
        func(**func_args)
        sys.exit(os.EX_OK)
    except SystemExit:
        raise
    except BaseException as exception:
        for case in exception_mappings:
            if isinstance(exception, case.exception):
                message = str(exception)
                if message:
                    message = '{}: {}'.format(exception.__class__.__name__,
                                              message)
                else:
                    message = '{} exception occurred.'.format(
                        exception.__class__.__name__)
                logger.debug(message, exc_info=True)
                logger.error(message)
                sys.exit(case.exit_code)
Beispiel #18
0
 def __init__(self) -> None:
     raise InternalError('TransformFactory constructor called.')