async def store(self, obj: BaseModel): """ Store object into Storage :param Model obj: Arbitrary base object for storing into DB """ try: obj.validate( ) # validate that object is correct and perform necessary type conversions except ValidationError as e: raise DataAccessInternalError(f"{e}") except ConversionError as e: raise DataAccessInternalError(f"{e}") if self._model_scheme.keys() - obj.fields.keys(): missing_keys = self._model_scheme.keys() - obj.fields.keys() raise DataAccessInternalError( f"Store object doesn't have necessary model properties:" f"{','.join([k for k in missing_keys])}") elif obj.fields.keys() - self._model_scheme.keys(): extra_keys = obj.fields.keys() - self._model_scheme.keys() raise DataAccessInternalError( f"Object to store has new model properties:" f"{','.join([k for k in extra_keys])}")
async def get_by_id(self, obj_id: Any) -> Union[BaseModel, None]: """ Simple implementation of get function. Important note: in terms of this API 'id' means BaseModel.primary_key reference. If model contains 'id' field please use ordinary get call. For example, await db(YourBaseModel).get(Query().filter_by(Compare(YourBaseModel.id, "=", obj_id))) This API call is equivalent to await db(YourBaseModel).get(Query().filter_by( Compare(YourBaseModel.primary_key, "=", obj_id))) :param Any obj_id: :return: BaseModel if object was found by its id and None otherwise """ id_field = getattr(self._model, self._model.primary_key) try: converted = id_field.to_native(obj_id) except ConversionError as e: raise DataAccessInternalError(f"{e}") query = Query().filter_by( Compare(self._model.primary_key, "=", converted)) result = await self.get(query) if result: return result.pop() return None
def __init__(self, consul_client: Consul, model: Type[BaseModel], collection: str, process_pool: ThreadPoolExecutor, loop: asyncio.AbstractEventLoop = None): """ :param Consul consul_client: consul client :param Type[BaseModel] model: model (class object) to associate it with consul storage :param str collection: string represented collection for `model` :param ThreadPoolExecutor process_pool: thread pool executor :param AbstractEventLoop loop: asyncio event loop """ self._consul_client = consul_client self._collection = collection.lower() self._query_converter = ConsulQueryConverterWithData(model) if not isinstance(model, type) or not issubclass(model, BaseModel): raise DataAccessInternalError( "Model parameter is not a Class object or not inherited " "from cortx.utils.data.access.BaseModel") self._model = model # Needed to build returning objects # self._query_service = ConsulQueryService(self._collection, self._consul_client, # self._query_converter) # TODO: there is problems with process pool switched to thread pool self._process_pool = process_pool self._loop = loop self._templates = ConsulKeyTemplate() self._templates.set_object_type(self._collection) self._model_scheme = dict()
async def delete(self, filter_obj: IFilter) -> int: """ Delete objects in DB by Query :param IFilter filter_obj: filter object to perform delete operation :return: number of deleted entries """ raw_data = await self._get_all_raw() if not raw_data: return 0 # NOTE: use processes for parallel data calculations and make true asynchronous work suitable_models = await self._loop.run_in_executor( self._process_pool, query_converter_build, self._model, filter_obj, raw_data) suitable_models = list(suitable_models) if not suitable_models: return 0 # No models are deleted tasks = [ asyncio.ensure_future( self._consul_client.kv.delete(model[ConsulWords.KEY])) for model in suitable_models ] done, pending = await asyncio.wait(tasks) for task in done: if not task.result(): raise DataAccessInternalError( f"Error happens during object deleting") return len(suitable_models)
def __init__(self, ldap_client: ldap.ldapobject.LDAPObject, model: Type[BaseModel], collection: str, thread_pool: ThreadPoolExecutor, loop: asyncio.AbstractEventLoop = None) -> None: """ Initialize openldap client. :param ldap_client: ldap client. :param model (class object) to be stored in ldap. :param collection: base DN for stored object. :param thread_pool: thread pool executor. :param loop: asyncio event loop. :returns: None """ self._client = ldap_client self._collection = collection self._query_converter = OpenLdapQueryConverter(model) if not isinstance(model, type) or not issubclass(model, BaseModel): raise DataAccessInternalError( "Model parameter is not a Class object or not inherited " "from cortx.utils.data.access.BaseModel") self._model = model self._thread_pool = thread_pool self._loop = loop
async def get(self, query: Query) -> List[BaseModel]: """ Get object from Storage by Query. :param query: :return: empty list or list with objects which satisfy the passed query condition. """ def _sorted_key_func(_by_field, _field_type): """ Generates key function for built-in sorted function to perform correct sorting of get results. :param _by_field: field which will be used for sorting (ordering by) :param _field_type: type of the field which will be used for sorting :return: """ # TODO: for other types we can define other wrapper-functions wrapper = str.lower if _field_type is StringType else lambda x: x return lambda x: wrapper(getattr(x, _by_field)) query = query.data suitable_models = await self._get_all_raw() if not suitable_models: return list() # NOTE: use processes for parallel data calculations and make true asynchronous work if query.filter_by is not None: suitable_models = await self._loop.run_in_executor( self._process_pool, query_converter_build, self._model, query.filter_by, suitable_models) base_models = [ self._model(json.loads(entry[ConsulWords.VALUE])) for entry in suitable_models ] # NOTE: if offset parameter is set in Query then order_by option is enabled automatically if any((query.order_by, query.offset)): field = query.order_by.field if query.order_by else getattr( self._model, self._model.primary_key) field_str = field_to_str(field) field_type = type(getattr(self._model, field_str)) reverse = SortOrder.DESC == query.order_by.order if query.order_by else False key = _sorted_key_func(field_str, field_type) base_models = sorted(base_models, key=key, reverse=reverse) offset = query.offset or 0 limit = offset + query.limit if query.limit is not None else len( base_models) # NOTE: if query.limit is None then slice will be from offset to the end of array # slice(0, None) means that start is 0 and stop is not specified if offset < 0 or limit < 0: raise DataAccessInternalError( "Wrong offset and limit parameters of Query object: " f"offset={query.offset}, limit={query.limit}") model_slice = slice(offset, limit) return base_models[model_slice]
def handle_compare(self, entry: FilterOperationCompare) -> str: """ Handle all kinds of comparison filters: '<', '<=', '>', '>=', '==', '!=', 'LIKE' :param entry: filter object. :returns: compiled filter as a string. """ field = entry.get_left_operand() field_str = field_to_str(field) op = entry.get_operation() try: if isinstance(field, str): right_operand = getattr(self._model, field_str).to_native(entry.get_right_operand()) else: right_operand = field.to_native(entry.get_right_operand()) except ConversionError as e: raise DataAccessInternalError(f"{e}") field_str = OpenLdapSyntaxTools.generalize_field_name(field_str) right_operand = OpenLdapSyntaxTools.generalize_field_value(right_operand, to_bytes=False) if op == ComparisonOperation.OPERATION_LIKE: op = ComparisonOperation.OPERATION_EQ right_operand = f'*{right_operand}*' return f'{field_str}{op.value}{right_operand}'
def __init__(self, es_client: Elasticsearch, model: Type[BaseModel], collection: str, thread_pool_exec: ThreadPoolExecutor, loop: asyncio.AbstractEventLoop = None): """ :param Elasticsearch es_client: elasticsearch client :param Type[BaseModel] model: model (class object) to associate it with elasticsearch storage :param str collection: string represented collection for `model` :param ThreadPoolExecutor thread_pool_exec: thread pool executor :param BaseEventLoop loop: asyncio event loop """ self._es_client = es_client self._tread_pool_exec = thread_pool_exec self._loop = loop or asyncio.get_event_loop() self._collection = collection self._query_converter = ElasticSearchQueryConverter(model) # We are associating index name in ElasticSearch with given collection self._index = self._collection if not isinstance(model, type) or not issubclass(model, BaseModel): raise DataAccessInternalError( "Model parameter is not a Class object or not inherited " "from cortx.utils.data.access.BaseModel") self._model = model # Needed to build returning objects self._index_info = None self._model_scheme = None self._query_service = ElasticSearchQueryService( self._index, self._es_client, self._query_converter)
def _render_template(self, template: Union[Template, str], object_type: str = None, **kwargs): if not self._object_type_is_set and object_type is None: raise DataAccessInternalError("Need to set object type") elif object_type is not None: template.substitute(OBJECT_TYPE=object_type, **kwargs) return template.substitute(**kwargs)
async def update(self, filter_obj: IFilter, to_update: dict) -> int: """ Update object in Storage by filter :param IFilter filter_obj: filter which specifies what objects need to update :param dict to_update: dictionary with fields and values which should be updated :return: number of entries updated """ # Generic code for update method of particular storage unnecessary_fields = set(to_update.keys()) - set(self._model_scheme.keys()) if unnecessary_fields: raise DataAccessInternalError(f"to_update dictionary contains fields which are not " f"presented in model:{unnecessary_fields}") try: for key in to_update: to_update[key] = getattr(self._model, key).to_native(to_update[key]) except ConversionError as e: raise DataAccessInternalError(f"{e}")
def field_to_str(field: Union[str, BaseType]) -> str: """ Convert model field to its string representation :param Union[str, BaseType] field: :return: model field string representation """ if isinstance(field, str): return field elif isinstance(field, BaseType): return field.name else: raise DataAccessInternalError("Failed to convert field to string representation")
async def get(self, query: Query) -> List[BaseModel]: """ Get object from OpenLdap by query. :param query: query object. :returns: list with objects that satisfy the query condition. """ def _sorted_key_func(_by_field, _field_type): """ Generates key function for built-in sorted function to perform correct sorting of get results :param _by_field: field which will be used for sorting (ordering by) :param _field_type: type of the field which will be used for sorting :return: """ from schematics.types import StringType wrapper = str.lower if _field_type is StringType else lambda x: x return lambda x: wrapper(getattr(x, _by_field)) query = query.data models = await self._get_ldap(query.filter_by) # Sort the result if any((query.order_by, query.offset)): field = query.order_by.field if query.order_by else getattr( self._model, self._model.primary_key) field_str = field_to_str(field) field_type = type(getattr(self._model, field_str)) reverse = SortOrder.DESC == query.order_by.order if query.order_by else False key = _sorted_key_func(field_str, field_type) models = sorted(models, key=key, reverse=reverse) # Paginate the result offset = query.offset or 0 limit = offset + query.limit if query.limit is not None else len( models) # NOTE: if query.limit is None then slice will be from offset to the end of array # slice(0, None) means that start is 0 and stop is not specified if offset < 0 or limit < 0: raise DataAccessInternalError( "Wrong offset and limit parameters of Query object: " f"offset={query.offset}, limit={query.limit}") model_slice = slice(offset, limit) return models[model_slice]
def __init__(self, model: Type[BaseModel], mapping_type: str): """ :param Type[BaseModel] model: model for constructing data mapping for index in ElasticSearch """ self._model = model self._mapping_type = mapping_type if mapping_type is None: raise DataAccessInternalError("Mapping type is not specified") self._mapping = { ESWords.MAPPINGS: { mapping_type: { ESWords.PROPERTIES: {} } } }
def handle_compare(self, entry: FilterOperationCompare): super().handle_compare(entry) # Call generic code field = entry.get_left_operand() field_str = field_to_str(field) op = entry.get_operation() try: if isinstance(field, str): right_operand = getattr(self._model, field_str).to_native( entry.get_right_operand()) else: right_operand = field.to_native(entry.get_right_operand()) except ConversionError as e: raise DataAccessInternalError(f"{e}") return self.comparison_conversion[op](field_str, right_operand)
async def delete_by_id(self, obj_id: Any) -> bool: """ Delete base model by its id. :param Any obj_id: id of the object to be deleted :return: BaseModel if object was found by its id and None otherwise :return: `True` if object was deleted successfully and `False` otherwise """ # Generic implementation of delete by id functionality id_field = getattr(self._model, self._model.primary_key) try: converted = id_field.to_native(obj_id) except ConversionError as e: raise DataAccessInternalError(f"{e}") filter_condition = Compare(self._model.primary_key, "=", converted) result = await self.delete(filter_condition) return result > 0
async def update_by_id(self, obj_id: Any, to_update: dict) -> bool: """ Update base model in db by id (primary key). :param Any obj_id: id-value of the object which should be updated (primary key value) :param dict to_update: dictionary with fields and values which should be updated :return: `True` if object was updated and `False` otherwise """ # Generic code for update_by_id method of particular method id_field = getattr(self._model, self._model.primary_key) try: converted = id_field.to_native(obj_id) except ConversionError as e: raise DataAccessInternalError(f"{e}") model_filter = Compare(self._model.primary_key, "=", converted) result = await self.update(model_filter, to_update) return result > 0
async def async_wrapper(): async def _wait_db_creation(): while self._async_storage.storage_status != ServiceStatus.READY: if self._async_storage.storage_status == ServiceStatus.NOT_CREATED: # Note: Call create_database one time per Model await self._async_storage.create_database() elif self._async_storage.storage_status == ServiceStatus.IN_PROGRESS: await self._event.wait() self._event.clear() await _wait_db_creation() # Wait until db will be created database = self._async_storage.get_database() if database is None: raise DataAccessInternalError("Database is not created") attr = database.__getattribute__(self._attr_name) if callable(attr): # may be, first call the function and then check whether we need to await it # DD: I think, we assume that all storage API are async return await attr(*args, **kwargs) else: return attr
def handle_compare(self, entry: FilterOperationCompare): super().handle_compare(entry) # Call the generic code field = entry.get_left_operand() field_str = field_to_str(field) op = entry.get_operation() try: right_operand = getattr(self._model, field_str).to_native( entry.get_right_operand()) except ConversionError as e: raise DataAccessInternalError(f"{e}") if not entry.get_operation() != ComparisonOperation.OPERATION_LIKE: return set(entry_key for entry_key in filter(lambda x: self._operator[op]( getattr(self._object_data[x], field_str),right_operand), self._object_data.keys())) else: return set(entry_key for entry_key in filter(lambda x: self._operator[op]( right_operand, getattr(self._object_data[x], field_str)), self._object_data.keys()))