示例#1
0
    def __init__(self, *, identifier, materials):
        master_key = Config.get_from_dict(materials,
                                          'masterKey',
                                          None,
                                          types=bytes)
        if master_key is not None:
            if len(master_key) != 32:
                raise ValueError(
                    'Key masterKey has the wrong length. It must be 32 bytes long.'
                )

            self._master_key = master_key
        else:
            kdfSalt = Config.get_from_dict(materials, 'kdfSalt', types=bytes)
            kdfIterations = Config.get_from_dict(materials,
                                                 'kdfIterations',
                                                 types=int)
            password = Config.get_from_dict(materials, 'password', types=str)

            self._master_key = derive_key(salt=kdfSalt,
                                          iterations=kdfIterations,
                                          key_length=32,
                                          password=password)

        self._identifier = identifier
示例#2
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict) -> None:
        ecc_key_der: str = Config.get_from_dict(module_configuration,
                                                'eccKey',
                                                types=str)
        ecc_curve: Optional[str] = Config.get_from_dict(module_configuration,
                                                        'eccCurve',
                                                        'NIST P-384',
                                                        types=str)

        ecc_key = self._unpack_envelope_key(base64.b64decode(ecc_key_der))

        if ecc_key.curve != ecc_curve:
            raise ValueError(
                f'Key eccKey does not match the eccCurve setting (found: {ecc_key.curve}, expected: {ecc_curve}).'
            )

        self._ecc_key = ecc_key
        self._ecc_curve = ecc_key.curve

        point_q_len = self._ecc_key.pointQ.size_in_bytes()
        if point_q_len < self.AES_KEY_LEN:
            raise ValueError(
                f'Size of point Q is smaller than the AES key length, which reduces security ({point_q_len} < {self.AES_KEY_LEN}).'
            )

        # Note: We don't actually have a "master" aes key, because the key is derived from the ECC key
        # and set before calling the parent's encapsulate/decapsulate method.
        aes_config = module_configuration.copy()
        aes_config['masterKey'] = base64.b64encode(
            b'\x00' * self.AES_KEY_LEN).decode('ascii')
        super().__init__(config=config,
                         name=name,
                         module_configuration=aes_config)
示例#3
0
文件: zstd.py 项目: wech71/benji
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration)

        self.level: str = Config.get_from_dict(
            module_configuration,
            'level',
            types=int,
            check_func=lambda v: v >= 1 and v <= zstandard.
            MAX_COMPRESSION_LEVEL,
            check_message='Option level must be between 1 and {} (inclusive)'.
            format(zstandard.MAX_COMPRESSION_LEVEL))

        dict_data_file: str = Config.get_from_dict(module_configuration,
                                                   'dictDataFile',
                                                   None,
                                                   types=str)
        if dict_data_file:
            with open(dict_data_file, 'rb') as f:
                dict_data_content = f.read()
            self._dict_data = zstandard.ZstdCompressionDict(
                dict_data_content, dict_type=zstandard.DICT_TYPE_FULLDICT)
            self._dict_data.precompute_compress(self.level)
        else:
            self._dict_data = None

        self._local = threading.local()
示例#4
0
文件: factory.py 项目: wech71/benji
    def _import_modules(cls, config: Config, modules: ConfigList) -> None:
        for index, module_dict in enumerate(modules):
            module = Config.get_from_dict(module_dict,
                                          'module',
                                          types=str,
                                          full_name_override=modules.full_name,
                                          index=index)
            name = Config.get_from_dict(module_dict,
                                        'name',
                                        types=str,
                                        full_name_override=modules.full_name,
                                        index=index)
            configuration = Config.get_from_dict(module_dict,
                                                 'configuration',
                                                 None,
                                                 types=dict,
                                                 full_name_override=modules.full_name,
                                                 index=index)

            if name in cls._modules:
                raise ConfigurationError('Duplicate name "{}" in list {}.'.format(name, modules.full_name))

            module = importlib.import_module('{}.{}'.format(__package__, module))
            try:
                configuration = config.validate(module=module.__name__, config=configuration)
            except ConfigurationError as exception:
                raise ConfigurationError('Configuration for IO {} is invalid.'.format(name)) from exception
            cls._modules[name] = _IOFactoryModule(module=module,
                                                  arguments={
                                                      'config': config,
                                                      'name': name,
                                                      'module_configuration': configuration
                                                  })
示例#5
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict, path: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         path=path,
                         block_size=block_size)

        ceph_config_file = config.get_from_dict(module_configuration,
                                                'cephConfigFile',
                                                types=str)
        client_identifier = config.get_from_dict(module_configuration,
                                                 'clientIdentifier',
                                                 types=str)
        self._cluster = rados.Rados(conffile=ceph_config_file,
                                    rados_id=client_identifier)
        self._cluster.connect()
        # create a bitwise or'd list of the configured features
        self._new_image_features = 0
        for feature in config.get_from_dict(module_configuration,
                                            'newImageFeatures',
                                            types=list):
            try:
                self._new_image_features = self._new_image_features | getattr(
                    rbd, feature)
            except AttributeError:
                raise ConfigurationError(
                    '{}: Unknown image feature {}.'.format(
                        module_configuration.full_name, feature))

        self._pool_name = None
        self._image_name = None
        self._snapshot_name = None
示例#6
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration)

        master_key_encoded: Optional[str] = Config.get_from_dict(
            module_configuration, 'masterKey', None, types=str)
        if master_key_encoded is not None:
            master_key = base64.b64decode(master_key_encoded)

            if len(master_key) != 32:
                raise ValueError(
                    'Key masterKey has the wrong length. It must be 32 bytes long and encoded as BASE64.'
                )

            self._master_key = master_key
        else:
            kdf_salt: bytes = base64.b64decode(
                Config.get_from_dict(module_configuration,
                                     'kdfSalt',
                                     types=str))
            kdf_iterations: int = Config.get_from_dict(module_configuration,
                                                       'kdfIterations',
                                                       types=int)
            password: str = Config.get_from_dict(module_configuration,
                                                 'password',
                                                 types=str)

            self._master_key = derive_key(salt=kdf_salt,
                                          iterations=kdf_iterations,
                                          key_length=32,
                                          password=password)
示例#7
0
    def __init__(self, *, config: Config, name: str, module_configuration: ConfigDict, url: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         url=url,
                         block_size=block_size)

        if self.parsed_url.username or self.parsed_url.password or self.parsed_url.hostname or self.parsed_url.port \
                    or self.parsed_url.params or self.parsed_url.fragment or self.parsed_url.query:
            raise UsageError('The supplied URL {} is invalid.'.format(self.url))

        ceph_config_file = config.get_from_dict(module_configuration, 'cephConfigFile', types=str)
        client_identifier = config.get_from_dict(module_configuration, 'clientIdentifier', types=str)
        self._cluster = rados.Rados(conffile=ceph_config_file, rados_id=client_identifier)
        self._cluster.connect()
        # create a bitwise or'd list of the configured features
        self._new_image_features = 0
        for feature in config.get_from_dict(module_configuration, 'newImageFeatures', types=list):
            try:
                self._new_image_features = self._new_image_features | getattr(rbd, feature)
            except AttributeError:
                raise ConfigurationError('{}: Unknown image feature {}.'.format(module_configuration.full_name, feature))

        self._pool_name = None
        self._image_name = None
        self._snapshot_name = None

        self._simultaneous_reads = config.get_from_dict(module_configuration, 'simultaneousReads', types=int)
        self._simultaneous_writes = config.get_from_dict(module_configuration, 'simultaneousWrites', types=int)
        self._read_executor: Optional[JobExecutor] = None
        self._write_executor: Optional[JobExecutor] = None
示例#8
0
    def __init__(self, *, config: Config, name: str, storage_id: int, module_configuration: ConfigDict) -> None:
        read_cache_directory = Config.get_from_dict(module_configuration, 'readCache.directory', None, types=str)
        read_cache_maximum_size = Config.get_from_dict(module_configuration, 'readCache.maximumSize', None, types=int)
        read_cache_shards = Config.get_from_dict(module_configuration, 'readCache.shards', None, types=int)

        if read_cache_directory and read_cache_maximum_size:
            os.makedirs(read_cache_directory, exist_ok=True)
            try:
                self._read_cache = FanoutCache(
                    read_cache_directory,
                    size_limit=read_cache_maximum_size,
                    shards=read_cache_shards,
                    eviction_policy='least-frequently-used',
                    statistics=1,
                )
            except Exception:
                logger.warning('Unable to enable disk based read caching. Continuing without it.')
                self._read_cache = None
            else:
                logger.debug('Disk based read caching instantiated (cache size {}, shards {}).'.format(
                    read_cache_maximum_size, read_cache_shards))
        else:
            self._read_cache = None
        self._use_read_cache = True

        # Start reader and write threads after the disk cached is created, so that they see it.
        super().__init__(config=config, name=name, storage_id=storage_id, module_configuration=module_configuration)
示例#9
0
def upgrade():
    op.create_table(
        'storages',
        sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
        sa.Column('name', sa.String(length=255), nullable=False),
        sa.PrimaryKeyConstraint('id', name=op.f('pk_storages')),
        sa.UniqueConstraint('name', name=op.f('uq_storages_name')))

    benji_config = op.get_context().config.attributes.get('benji_config', None)
    if benji_config is not None:
        storages = sa.Table('storages',
                            sa.MetaData(),
                            autoload_with=op.get_bind())
        storages_list = benji_config.get('storages', types=list)
        for index, storage in enumerate(storages_list):
            name = Config.get_from_dict(storage,
                                        'name',
                                        types=str,
                                        index=index)
            storage_id = Config.get_from_dict(storage,
                                              'storageId',
                                              None,
                                              types=int,
                                              index=index)
            op.execute(storages.insert().values(name=name, id=storage_id))

    with op.batch_alter_table('versions', schema=None) as batch_op:
        batch_op.create_foreign_key(
            batch_op.f('fk_versions_storage_id_storages'), 'storages',
            ['storage_id'], ['id'])
    with op.batch_alter_table('deleted_blocks', schema=None) as batch_op:
        batch_op.create_foreign_key(
            batch_op.f('fk_deleted_blocks_storage_id_storages'), 'storages',
            ['storage_id'], ['id'])
示例#10
0
文件: rbdaio.py 项目: q3k/benji
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict, url: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         url=url,
                         block_size=block_size)

        if self.parsed_url.username or self.parsed_url.password or self.parsed_url.hostname or self.parsed_url.port \
                    or self.parsed_url.params or self.parsed_url.fragment or self.parsed_url.query:
            raise UsageError('The supplied URL {} is invalid.'.format(
                self.url))

        ceph_config_file = config.get_from_dict(module_configuration,
                                                'cephConfigFile',
                                                types=str)
        client_identifier = config.get_from_dict(module_configuration,
                                                 'clientIdentifier',
                                                 types=str)
        self._cluster = rados.Rados(conffile=ceph_config_file,
                                    rados_id=client_identifier)
        self._cluster.connect()
        # create a bitwise or'd list of the configured features
        self._new_image_features = 0
        for feature in config.get_from_dict(module_configuration,
                                            'newImageFeatures',
                                            types=list):
            try:
                self._new_image_features = self._new_image_features | getattr(
                    rbd, feature)
            except AttributeError:
                raise ConfigurationError(
                    '{}: Unknown image feature {}.'.format(
                        module_configuration.full_name, feature))

        self._pool_name = None
        self._image_name = None
        self._snapshot_name = None
        self._rbd_image = None

        self._simultaneous_reads = config.get_from_dict(module_configuration,
                                                        'simultaneousReads',
                                                        types=int)
        self._simultaneous_writes = config.get_from_dict(module_configuration,
                                                         'simultaneousWrites',
                                                         types=int)
        self._read_queue: Deque[DereferencedBlock] = deque()
        self._write_queue: Deque[Tuple[DereferencedBlock, bytes]] = deque()
        self._outstanding_aio_reads = 0
        self._outstanding_aio_writes = 0
        self._submitted_aio_writes = threading.BoundedSemaphore(
            self._simultaneous_writes)
        self._read_completion_queue: queue.Queue[Tuple[rbd.Completion, float,
                                                       float,
                                                       DereferencedBlock,
                                                       bytes]] = queue.Queue()
        self._write_completion_queue: queue.Queue[Tuple[
            rbd.Completion, float, float, DereferencedBlock]] = queue.Queue()
示例#11
0
 def __init__(self, *, config: Config, name: str, module_configuration: ConfigDict, url: str,
              block_size: int) -> None:
     super().__init__(
         config=config, name=name, module_configuration=module_configuration, url=url, block_size=block_size)
     self._simultaneous_reads = config.get_from_dict(module_configuration, 'simultaneousReads', types=int)
     self._simultaneous_writes = config.get_from_dict(module_configuration, 'simultaneousWrites', types=int)
     self._read_executor: Optional[JobExecutor] = None
     self._write_executor: Optional[JobExecutor] = None
示例#12
0
    def _import_modules(cls, config: Config, modules: ConfigList) -> None:
        for index, module_dict in enumerate(modules):
            module = Config.get_from_dict(module_dict,
                                          'module',
                                          types=str,
                                          full_name_override=modules.full_name,
                                          index=index)
            name = Config.get_from_dict(module_dict,
                                        'name',
                                        types=str,
                                        full_name_override=modules.full_name,
                                        index=index)
            storage_id = Config.get_from_dict(
                module_dict,
                'storageId',
                types=int,
                full_name_override=modules.full_name,
                index=index)
            configuration = Config.get_from_dict(
                module_dict,
                'configuration',
                None,
                types=dict,
                full_name_override=modules.full_name,
                index=index)

            if name in cls._name_to_storage_id:
                raise ConfigurationError(
                    'Duplicate name "{}" in list {}.'.format(
                        name, modules.full_name))

            if storage_id in cls._storage_id_to_name:
                raise ConfigurationError('Duplicate id {} in list {}.'.format(
                    storage_id, modules.full_name))

            module = importlib.import_module('{}.{}.{}'.format(
                __package__, cls._MODULE, module))
            try:
                configuration = config.validate(module=module.__name__,
                                                config=configuration)
            except ConfigurationError as exception:
                raise ConfigurationError(
                    'Configuration for storage {} is invalid.'.format(
                        name)) from exception
            cls._modules[storage_id] = _StorageFactoryModule(
                module=module,
                arguments={
                    'config': config,
                    'name': name,
                    'storage_id': storage_id,
                    'module_configuration': configuration
                })
            cls._name_to_storage_id[name] = storage_id
            cls._storage_id_to_name[storage_id] = name
示例#13
0
 def test_lists(self):
     config = Config(ad_hoc_config=self.CONFIG)
     ios = config.get('ios', types=list)
     self.assertIsInstance(
         Config.get_from_dict(ios[0], 'configuration.newImageFeatures'),
         ConfigList)
     self.assertRaises(
         TypeError, lambda: Config.get_from_dict(
             ios[0], 'configuration.newImageFeatures', types=int))
     self.assertEqual(
         'RBD_FEATURE_EXCLUSIVE_LOCK',
         Config.get_from_dict(ios[0], 'configuration.newImageFeatures')[1])
示例#14
0
 def __init__(self, *, config: Config, name: str,
              module_configuration: ConfigDict, path: str,
              block_size: int) -> None:
     self._name = name
     self._path = path
     self._block_size = block_size
     self._simultaneous_reads = config.get_from_dict(module_configuration,
                                                     'simultaneousReads',
                                                     types=int)
     self._simultaneous_writes = config.get_from_dict(module_configuration,
                                                      'simultaneousWrites',
                                                      types=int)
     self._read_executor: Optional[JobExecutor] = None
     self._write_executor: Optional[JobExecutor] = None
示例#15
0
文件: file.py 项目: wech71/benji
    def __init__(self, *, config: Config, name: str, module_configuration: ConfigDict, url: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         url=url,
                         block_size=block_size)

        if self.parsed_url.username or self.parsed_url.password or self.parsed_url.hostname or self.parsed_url.port \
                or self.parsed_url.params or self.parsed_url.fragment or self.parsed_url.query:
            raise UsageError('The supplied URL {} is invalid.'.format(self.url))

        self._simultaneous_reads = config.get_from_dict(module_configuration, 'simultaneousReads', types=int)
        self._simultaneous_writes = config.get_from_dict(module_configuration, 'simultaneousWrites', types=int)
        self._read_executor: Optional[JobExecutor] = None
        self._write_executor: Optional[JobExecutor] = None
示例#16
0
文件: zstd.py 项目: jubalh/backy2
    def __init__(self, *, materials):
        self.level = Config.get_from_dict(
            materials,
            'level',
            self.DEFAULT_LEVEL,
            types=int,
            check_func=lambda v: v >= 1 and v <= zstandard.
            MAX_COMPRESSION_LEVEL,
            check_message='Option level must be between 1 and {} (inclusive)'.
            format(zstandard.MAX_COMPRESSION_LEVEL))

        self.compressors = {}
        self.decompressors = {}
示例#17
0
文件: iscsi.py 项目: q3k/benji
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict, url: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         url=url,
                         block_size=block_size)

        if self.parsed_url.params or self.parsed_url.fragment:
            raise UsageError('The supplied URL {} is invalid.'.format(
                self.url))

        self._read_queue: List[DereferencedBlock] = []
        self._outstanding_write: Optional[Tuple[DereferencedBlock,
                                                bytes]] = None

        self._username = config.get_from_dict(module_configuration,
                                              'username',
                                              None,
                                              types=str)
        self._password = config.get_from_dict(module_configuration,
                                              'password',
                                              None,
                                              types=str)
        self._target_username = config.get_from_dict(module_configuration,
                                                     'targetUsername',
                                                     None,
                                                     types=str)
        self._target_password = config.get_from_dict(module_configuration,
                                                     'targetPassword',
                                                     None,
                                                     types=str)
        header_digest = config.get_from_dict(module_configuration,
                                             'headerDigest',
                                             types=str)
        header_digest_attr_name = 'ISCSI_HEADER_DIGEST_{}'.format(
            header_digest)
        if hasattr(libiscsi, header_digest_attr_name):
            self._header_digest = getattr(libiscsi, header_digest_attr_name)
        else:
            raise ConfigurationError(
                'Unknown header digest setting {}.'.format(header_digest))
        self._initiator_name = config.get_from_dict(module_configuration,
                                                    'initiatorName',
                                                    types=str)
        self._timeout = config.get_from_dict(module_configuration,
                                             'timeout',
                                             None,
                                             types=int)

        self._iscsi_context: Any = None
示例#18
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict):
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration)

        if os.sep != '/':
            raise RuntimeError(
                'This module only works with / as a path separator.')

        self.path = Config.get_from_dict(module_configuration,
                                         'path',
                                         types=str)

        # Ensure that self.path ends in os.path.sep
        if not self.path.endswith(os.path.sep):
            self.path = os.path.join(self.path, '')
示例#19
0
    def __init__(self, *, config: Config, name: str, storage_id: int,
                 module_configuration: ConfigDict):
        super().__init__(config=config,
                         name=name,
                         storage_id=storage_id,
                         module_configuration=module_configuration)

        if os.sep != '/':
            raise RuntimeError(
                'This module only works with / as a path separator.')

        self.path = Config.get_from_dict(module_configuration,
                                         'path',
                                         types=str)

        # Ensure that self.path ends in a slash
        if not self.path.endswith('/'):
            self.path = self.path + '/'
示例#20
0
 def test_get_with_dict(self):
     self.assertEqual(
         'Hi there!',
         Config.get_from_dict({'a': {
             'b': 'Hi there!'
         }}, 'a.b', types=str))
示例#21
0
    def __init__(self, *, config: Config, name: str, storage_id: int, module_configuration: ConfigDict) -> None:
        self._name = name
        self._storage_id = storage_id
        self._active_transforms: List[TransformBase] = []

        active_transforms = Config.get_from_dict(module_configuration, 'activeTransforms', None, types=list)
        if active_transforms is not None:
            for transform in active_transforms:
                self._active_transforms.append(TransformFactory.get_by_name(transform))
            logger.info('Active transforms for storage {}: {}.'.format(
                name, ', '.join(
                    ['{} ({})'.format(transform.name, transform.module) for transform in self._active_transforms])))

        simultaneous_writes = Config.get_from_dict(module_configuration, 'simultaneousWrites', types=int)
        simultaneous_reads = Config.get_from_dict(module_configuration, 'simultaneousReads', types=int)
        simultaneous_removals = Config.get_from_dict(module_configuration, 'simultaneousRemovals', types=int)
        bandwidth_read = Config.get_from_dict(module_configuration, 'bandwidthRead', types=int)
        bandwidth_write = Config.get_from_dict(module_configuration, 'bandwidthWrite', types=int)

        self._consistency_check_writes = Config.get_from_dict(
            module_configuration, 'consistencyCheckWrites', False, types=bool)

        hmac_key_encoded = Config.get_from_dict(module_configuration, 'hmac.key', None, types=str)
        hmac_key: Optional[bytes] = None
        if hmac_key_encoded is None:
            hmac_password = Config.get_from_dict(module_configuration, 'hmac.password', None, types=str)
            if hmac_password is not None:
                hmac_kdf_salt = base64.b64decode(Config.get_from_dict(module_configuration, 'hmac.kdfSalt', types=str))
                hmac_kdf_iterations = Config.get_from_dict(module_configuration, 'hmac.kdfIterations', types=int)
                hmac_key = derive_key(
                    salt=hmac_kdf_salt, iterations=hmac_kdf_iterations, key_length=32, password=hmac_password)
        else:
            hmac_key = base64.b64decode(hmac_key_encoded)
        self._dict_hmac: Optional[DictHMAC] = None
        if hmac_key is not None:
            logger.info('Enabling HMAC object metadata integrity protection for storage {}.'.format(name))
            self._dict_hmac = DictHMAC(hmac_key=self._HMAC_KEY, secret_key=hmac_key)

        self.read_throttling = TokenBucket()
        self.read_throttling.set_rate(bandwidth_read)  # 0 disables throttling
        self.write_throttling = TokenBucket()
        self.write_throttling.set_rate(bandwidth_write)  # 0 disables throttling

        self._read_executor = JobExecutor(name='Storage-Read', workers=simultaneous_reads, blocking_submit=False)
        self._write_executor = JobExecutor(name='Storage-Write', workers=simultaneous_writes, blocking_submit=True)
        self._remove_executor = JobExecutor(name='Storage-Remove', workers=simultaneous_removals, blocking_submit=True)
示例#22
0
文件: s3.py 项目: elemental-lf/benji
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict):
        aws_access_key_id = Config.get_from_dict(module_configuration,
                                                 'awsAccessKeyId',
                                                 None,
                                                 types=str)
        if aws_access_key_id is None:
            aws_access_key_id_file = Config.get_from_dict(module_configuration,
                                                          'awsAccessKeyIdFile',
                                                          types=str)
            with open(aws_access_key_id_file, 'r') as f:
                aws_access_key_id = f.read().rstrip()
        aws_secret_access_key = Config.get_from_dict(module_configuration,
                                                     'awsSecretAccessKey',
                                                     None,
                                                     types=str)
        if aws_secret_access_key is None:
            aws_secret_access_key_file = Config.get_from_dict(
                module_configuration, 'awsSecretAccessKeyFile', types=str)
            with open(aws_secret_access_key_file, 'r') as f:
                aws_secret_access_key = f.read().rstrip()
        region_name = Config.get_from_dict(module_configuration,
                                           'regionName',
                                           None,
                                           types=str)
        endpoint_url = Config.get_from_dict(module_configuration,
                                            'endpointUrl',
                                            None,
                                            types=str)
        use_ssl = Config.get_from_dict(module_configuration,
                                       'useSsl',
                                       None,
                                       types=bool)
        addressing_style = Config.get_from_dict(module_configuration,
                                                'addressingStyle',
                                                None,
                                                types=str)
        signature_version = Config.get_from_dict(module_configuration,
                                                 'signatureVersion',
                                                 None,
                                                 types=str)
        connect_timeout = Config.get_from_dict(module_configuration,
                                               'connectTimeout',
                                               types=float)
        read_timeout = Config.get_from_dict(module_configuration,
                                            'readTimeout',
                                            types=float)
        max_attempts = Config.get_from_dict(module_configuration,
                                            'maxAttempts',
                                            types=int)

        self._bucket_name = Config.get_from_dict(module_configuration,
                                                 'bucketName',
                                                 types=str)
        self._storage_class = Config.get_from_dict(module_configuration,
                                                   'storageClass',
                                                   None,
                                                   types=str)
        self._disable_encoding_type = Config.get_from_dict(
            module_configuration, 'disableEncodingType', types=bool)

        self._resource_config = {
            'aws_access_key_id': aws_access_key_id,
            'aws_secret_access_key': aws_secret_access_key,
        }

        if region_name:
            self._resource_config['region_name'] = region_name

        if endpoint_url:
            self._resource_config['endpoint_url'] = endpoint_url

        if use_ssl:
            self._resource_config['use_ssl'] = use_ssl

        resource_config = {}
        if addressing_style:
            resource_config['s3'] = {'addressing_style': addressing_style}

        if signature_version:
            resource_config['signature_version'] = signature_version

        resource_config['connect_timeout'] = connect_timeout
        resource_config['read_timeout'] = read_timeout
        # See https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html
        resource_config['retries'] = {
            'mode': 'standard',
            'max_attempts': max_attempts,
        }

        self._resource_config['config'] = BotoCoreClientConfig(
            **resource_config)
        self._local = threading.local()
        self._init_connection()
        self._local.bucket = self._local.resource.Bucket(self._bucket_name)

        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration)
示例#23
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict, url: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         url=url,
                         block_size=block_size)

        if self.parsed_url.username or self.parsed_url.password or self.parsed_url.hostname or self.parsed_url.port \
                    or self.parsed_url.params or self.parsed_url.fragment:
            raise UsageError('The supplied URL {} is invalid.'.format(
                self.url))
        if self.parsed_url.query:
            try:
                extra_ceph_conf = parse_qs(self.parsed_url.query,
                                           keep_blank_values=True,
                                           strict_parsing=True,
                                           errors='strict')
            except (ValueError, UnicodeError) as exception:
                raise UsageError('The supplied URL {} is invalid.'.format(
                    self.url)) from exception

            # parse_qs returns the values as lists, only consider the first appearance of each key in the query string.
            extra_ceph_conf = {
                key: value[0]
                for key, value in extra_ceph_conf.items()
            }
        else:
            extra_ceph_conf = {}

        ceph_config_file = config.get_from_dict(module_configuration,
                                                'cephConfigFile',
                                                types=str)
        if 'client_identifier' in extra_ceph_conf:
            client_identifier = extra_ceph_conf['client_identifier']
            del extra_ceph_conf['client_identifier']
        else:
            client_identifier = config.get_from_dict(module_configuration,
                                                     'clientIdentifier',
                                                     types=str)

        self._cluster = rados.Rados(conffile=ceph_config_file,
                                    rados_id=client_identifier,
                                    conf=extra_ceph_conf)
        self._cluster.connect()
        # create a bitwise or'd list of the configured features
        self._new_image_features = 0
        for feature in config.get_from_dict(module_configuration,
                                            'newImageFeatures',
                                            types=list):
            try:
                self._new_image_features = self._new_image_features | getattr(
                    rbd, feature)
            except AttributeError:
                raise ConfigurationError(
                    '{}: Unknown image feature {}.'.format(
                        module_configuration.full_name, feature))

        self._pool_name = None
        self._image_name = None
        self._snapshot_name = None
        self._rbd_image = None

        self._simultaneous_reads = config.get_from_dict(module_configuration,
                                                        'simultaneousReads',
                                                        types=int)
        self._simultaneous_writes = config.get_from_dict(module_configuration,
                                                         'simultaneousWrites',
                                                         types=int)
        self._read_queue: Deque[DereferencedBlock] = deque()
        self._write_queue: Deque[Tuple[DereferencedBlock, bytes]] = deque()
        self._outstanding_aio_reads = 0
        self._outstanding_aio_writes = 0
        self._aio_write_complete = threading.Event()
        # Set the queue limit to two times the number of simultaneous writes plus one to ensure that there are always
        # enough writes available even when all outstanding aio writes finish at the same time.
        self._max_write_queue_len = 2 * self._simultaneous_writes + 1
        self._read_completion_queue: queue.Queue[Tuple[rbd.Completion, float,
                                                       float,
                                                       DereferencedBlock,
                                                       bytes]] = queue.Queue()
        self._write_completion_queue: queue.Queue[Tuple[
            rbd.Completion, float, float, DereferencedBlock]] = queue.Queue()
示例#24
0
    def __init__(self, *, config: Config, name: str, storage_id: int, module_configuration: ConfigDict):
        aws_access_key_id = Config.get_from_dict(module_configuration, 'awsAccessKeyId', None, types=str)
        if aws_access_key_id is None:
            aws_access_key_id_file = Config.get_from_dict(module_configuration, 'awsAccessKeyIdFile', types=str)
            with open(aws_access_key_id_file, 'r') as f:
                aws_access_key_id = f.read().rstrip()
        aws_secret_access_key = Config.get_from_dict(module_configuration, 'awsSecretAccessKey', None, types=str)
        if aws_secret_access_key is None:
            aws_secret_access_key_file = Config.get_from_dict(module_configuration, 'awsSecretAccessKeyFile', types=str)
            with open(aws_secret_access_key_file, 'r') as f:
                aws_secret_access_key = f.read().rstrip()
        region_name = Config.get_from_dict(module_configuration, 'regionName', None, types=str)
        endpoint_url = Config.get_from_dict(module_configuration, 'endpointUrl', None, types=str)
        use_ssl = Config.get_from_dict(module_configuration, 'useSsl', None, types=bool)
        addressing_style = Config.get_from_dict(module_configuration, 'addressingStyle', None, types=str)
        signature_version = Config.get_from_dict(module_configuration, 'signatureVersion', None, types=str)

        self._bucket_name = Config.get_from_dict(module_configuration, 'bucketName', types=str)
        self._disable_encoding_type = Config.get_from_dict(module_configuration, 'disableEncodingType', types=bool)

        self._resource_config = {
            'aws_access_key_id': aws_access_key_id,
            'aws_secret_access_key': aws_secret_access_key,
        }

        if region_name:
            self._resource_config['region_name'] = region_name

        if endpoint_url:
            self._resource_config['endpoint_url'] = endpoint_url

        if use_ssl:
            self._resource_config['use_ssl'] = use_ssl

        resource_config = {}
        if addressing_style:
            resource_config['s3'] = {'addressing_style': addressing_style}

        if signature_version:
            resource_config['signature_version'] = signature_version

        self._resource_config['config'] = BotoCoreClientConfig(**resource_config)
        self._local = threading.local()
        self._init_connection()
        self._local.bucket = self._local.resource.Bucket(self._bucket_name)

        super().__init__(config=config, name=name, storage_id=storage_id, module_configuration=module_configuration)
示例#25
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict):
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration)

        account_id = Config.get_from_dict(module_configuration,
                                          'accountId',
                                          None,
                                          types=str)
        if account_id is None:
            account_id_file = Config.get_from_dict(module_configuration,
                                                   'accountIdFile',
                                                   types=str)
            with open(account_id_file, 'r') as f:
                account_id = f.read().rstrip()
        application_key = Config.get_from_dict(module_configuration,
                                               'applicationKey',
                                               None,
                                               types=str)
        if application_key is None:
            application_key_file = Config.get_from_dict(module_configuration,
                                                        'applicationKeyFile',
                                                        types=str)
            with open(application_key_file, 'r') as f:
                application_key = f.read().rstrip()

        bucket_name = Config.get_from_dict(module_configuration,
                                           'bucketName',
                                           types=str)

        account_info_file = Config.get_from_dict(module_configuration,
                                                 'accountInfoFile',
                                                 None,
                                                 types=str)
        if account_info_file is not None:
            account_info = SqliteAccountInfo(file_name=account_info_file)
        else:
            account_info = InMemoryAccountInfo()

        b2.bucket.Bucket.MAX_UPLOAD_ATTEMPTS = Config.get_from_dict(
            module_configuration, 'uploadAttempts', types=int)

        self._write_object_attempts = Config.get_from_dict(
            module_configuration, 'writeObjectAttempts', types=int)

        self._read_object_attempts = Config.get_from_dict(module_configuration,
                                                          'readObjectAttempts',
                                                          types=int)

        self.service = b2.api.B2Api(account_info)
        if account_info_file is not None:
            try:
                # This temporarily disables all logging as the b2 library does some very verbose logging
                # of the exception we're trying to catch here...
                logging.disable(logging.ERROR)
                _ = self.service.get_account_id()
                logging.disable(logging.NOTSET)
            except MissingAccountData:
                self.service.authorize_account('production', account_id,
                                               application_key)
        else:
            self.service.authorize_account('production', account_id,
                                           application_key)

        self.bucket = self.service.get_bucket_by_name(bucket_name)
示例#26
0
    def __init__(self, *, config: Config, name: str,
                 module_configuration: ConfigDict, url: str,
                 block_size: int) -> None:
        super().__init__(config=config,
                         name=name,
                         module_configuration=module_configuration,
                         url=url,
                         block_size=block_size)

        if self.parsed_url.username or self.parsed_url.password or self.parsed_url.hostname or self.parsed_url.port \
                    or self.parsed_url.params or self.parsed_url.fragment:
            raise UsageError('The supplied URL {} is invalid.'.format(
                self.url))
        if self.parsed_url.query:
            try:
                extra_ceph_conf = parse_qs(self.parsed_url.query,
                                           keep_blank_values=True,
                                           strict_parsing=True,
                                           errors='strict')
            except (ValueError, UnicodeError) as exception:
                raise UsageError('The supplied URL {} is invalid.'.format(
                    self.url)) from exception

            # parse_qs returns the values as lists, only consider the first appearance of each key in the query string.
            extra_ceph_conf = {
                key: value[0]
                for key, value in extra_ceph_conf.items()
            }
        else:
            extra_ceph_conf = {}

        ceph_config_file = config.get_from_dict(module_configuration,
                                                'cephConfigFile',
                                                types=str)
        if 'client_identifier' in extra_ceph_conf:
            client_identifier = extra_ceph_conf['client_identifier']
            del extra_ceph_conf['client_identifier']
        else:
            client_identifier = config.get_from_dict(module_configuration,
                                                     'clientIdentifier',
                                                     types=str)

        self._cluster = rados.Rados(conffile=ceph_config_file,
                                    rados_id=client_identifier,
                                    conf=extra_ceph_conf)
        self._cluster.connect()
        # create a bitwise or'd list of the configured features
        self._new_image_features = 0
        for feature in config.get_from_dict(module_configuration,
                                            'newImageFeatures',
                                            types=list):
            try:
                self._new_image_features = self._new_image_features | getattr(
                    rbd, feature)
            except AttributeError:
                raise ConfigurationError(
                    '{}: Unknown image feature {}.'.format(
                        module_configuration.full_name, feature))

        self._pool_name = None
        self._image_name = None
        self._snapshot_name = None

        self._simultaneous_reads = config.get_from_dict(module_configuration,
                                                        'simultaneousReads',
                                                        types=int)
        self._simultaneous_writes = config.get_from_dict(module_configuration,
                                                         'simultaneousWrites',
                                                         types=int)
        self._read_executor: Optional[JobExecutor] = None
        self._write_executor: Optional[JobExecutor] = None