Esempio n. 1
0
    async def attach_to_index(self, replication: int) -> None:
        """
        Provides async method to connect storage to index bound to provided model and collection
        :return:
        """
        def _get_alias(_index):
            return self._es_client.indices.get_alias(self._index,
                                                     ignore_unavailable=True)

        def _create(_index, _body):
            self._es_client.indices.create(index=_index, body=_body)

        def _get(_index):
            return self._es_client.indices.get(self._index)

        try:
            indices = await self._loop.run_in_executor(self._tread_pool_exec,
                                                       _get_alias, self._index)
        except ConnectionError as e:
            raise DataAccessExternalError(
                f"Failed to establish connection to ElasticSearch: {e}")

        # self._obj_index = self._es_client.indices.get_alias("*")
        if indices.get(self._index, None) is None:
            data_mappings = ElasticSearchDataMapper(self._model,
                                                    self._mapping_type)
            mappings_dict = data_mappings.build_index_mappings(replication)
            # self._es_client.indices.create(index=model.__name__, ignore=400, body=mappings_dict)

            # NOTE: for newly created indexes ElasticSearch mapping type and index name coincide
            await self._loop.run_in_executor(self._tread_pool_exec, _create,
                                             self._index, mappings_dict)

        self._index_info = await self._loop.run_in_executor(
            self._tread_pool_exec, _get, self._index)

        # NOTE: if ElasticSearch index was created outside from CSM Agent there
        #  is no guarantee that index name and mapping type coincide
        self._mapping_type = next(
            iter(self._index_info[self._index][ESWords.MAPPINGS].keys()), None)
        if self._mapping_type is None:
            raise DataAccessExternalError(
                f"There are no mapping type for ElasticSearch index {self._mapping_type}"
            )
        self._model_scheme = self._index_info[self._index][ESWords.MAPPINGS][
            self._mapping_type][ESWords.PROPERTIES]
        self._model_scheme = {
            k.lower(): v
            for k, v in self._model_scheme.items()
        }
Esempio n. 2
0
    async def delete(self, filter_obj: IFilter) -> int:
        """
        Delete objects in DB by Query

        :param IFilter filter_obj: filter object to perform delete operation
        :return: number of deleted entries
        """
        def _delete(_by_filter):
            search = Search(index=self._index,
                            doc_type=self._mapping_type,
                            using=self._es_client)
            search = search.query(_by_filter)
            return search.delete()

        filter_by = self._query_converter.build(filter_obj)
        # NOTE: Needed to avoid elasticsearch.ConflictError when we perform delete quickly
        #       after store operation
        await self._refresh_index()
        try:
            result = await self._loop.run_in_executor(self._tread_pool_exec,
                                                      _delete, filter_by)
        except ConflictError as e:
            raise DataAccessExternalError(f"{e}")

        return result[ESWords.DELETED]
Esempio n. 3
0
 async def delete_by_id(self, obj_id: Union[int, str]) -> None:
     obj_path = self._templates.get_object_path(str(obj_id))
     obj_path = obj_path.lower()
     response = await self._consul_client.kv.delete(obj_path)
     if not response:
         raise DataAccessExternalError(
             f"Error happens during object deleting with id={obj_id}")
Esempio n. 4
0
    async def create_database(cls, config, collection,
                              model: Type[BaseModel]) -> IDataBase:
        """
        Creates new instance of ElasticSearch DB and performs necessary initializations

        :param DBSettings config: configuration for elasticsearch server
        :param str collection: collection for storing model onto db
        :param Type[BaseModel] model: model which instances will be stored in DB
        :return:
        """
        # NOTE: please, be sure that you avoid using this method twice (or more times) for the same
        # model
        if not all((cls.elastic_instance, cls.thread_pool, cls.loop)):
            auth = None
            if config.login:
                auth = (config.login, config.password)

            node = {"host": config.host, "port": config.port}
            cls.elastic_instance = Elasticsearch(hosts=[node], http_auth=auth)
            cls.pool = ThreadPoolExecutor(
                max_workers=multiprocessing.cpu_count())
            cls.loop = asyncio.get_event_loop()

        es_db = cls(cls.elastic_instance, model, collection, cls.thread_pool,
                    cls.loop)

        try:
            await es_db.attach_to_index(config.replication)
        except DataAccessExternalError:
            raise  # forward error to upper caller
        except Exception as e:
            raise DataAccessExternalError("Some unknown exception occurred in "
                                          f"ElasticSearch module: {e}")

        return es_db
Esempio n. 5
0
    async def _update_ldap(self, model: BaseModel, to_update: Dict) -> None:
        """
        Update the existing object in LDAP.

        :param model: object's model.
        :param to_update: fields to update.
        :returns: None.
        """

        obj_attrs = model.to_native()
        key_generalizer = OpenLdapSyntaxTools.generalize_field_name
        val_generalizer = OpenLdapSyntaxTools.generalize_field_value
        to_update_distinct = {
            key_generalizer(key): val_generalizer(val)
            for key, val in to_update.items() if val != obj_attrs[key]
        }
        old_attrs_distinct = {
            key_generalizer(key): val_generalizer(val)
            for key, val in obj_attrs.items()
            if key_generalizer(key) in to_update_distinct
        }

        if to_update_distinct:
            dn = self._get_object_dn(model)
            ldif = ldap.modlist.modifyModlist(old_attrs_distinct,
                                              to_update_distinct)
            try:
                await self._loop.run_in_executor(self._thread_pool,
                                                 self._client.modify_s, dn,
                                                 ldif)
            except ldap.LDAPError as le:
                err = (
                    f'Failed to execute update operation with OpenLdap server: {le}'
                )
                raise DataAccessExternalError(err) from None
Esempio n. 6
0
    async def create_database(cls,
                              config: Dict,
                              collection: str,
                              model: Type[BaseModel],
                              create_schema: bool = False) -> IDataBase:
        """
        Creates new instance of OpenLdap client and performs necessary initializations.

        :param DBSettings config: configuration for OpenLdap server.
        :param str collection: base DN for stored model.
        :param Type[BaseModel] model: model which instances will be stored.
        :param bool create_schema: if the flag is True, the base DN will be created.
        :returns: OpenLdap client instanse
        """

        if not all((cls._client, cls._thread_pool, cls._loop)):
            cls._client = OpenLdap._ldap_init(config.hosts[0], config.port)
            try:
                cls._client.simple_bind_s(config.login, config.password)
            except ldap.LDAPError as le:
                err = f'Failed to bind to OpenLdap server: {le}'
                raise DataAccessExternalError(err) from None
            cls._loop = asyncio.get_event_loop()
            cls._thread_pool = ThreadPoolExecutor(
                max_workers=multiprocessing.cpu_count())

        ldap_client = cls(cls._client, model, collection, cls._thread_pool,
                          cls._loop)
        if create_schema:
            # Current OpenLdap implementaion in the framework does not support schema creation.
            # Make sure the schema and collections are created before using the framework.
            pass

        return ldap_client
Esempio n. 7
0
    async def _get_ldap(self,
                        filter_obj: Optional[IFilter]) -> List[Dict[str, str]]:
        """
        Gets filtered objects from OpenLdap.

        Includes the Get logic that is empowered by OpenLdap itself.

        :param filter_obj: filter.
        :returns: list of dictionaries.
        """

        # Prepare the filter string
        ldap_filter = self._query_converter.build(
            filter_obj) if filter_obj else None
        # Query the OpenLdap
        base_dn = self._collection
        base_scope = ldap.SCOPE_ONELEVEL
        try:
            raw_attributes = await self._loop.run_in_executor(
                self._thread_pool, self._client.search_s, base_dn, base_scope,
                ldap_filter)
        except ldap.LDAPError as le:
            err = f'Failed to execute search operation with OpenLdap server: {le}'
            raise DataAccessExternalError(err) from None
        models = [self._ldif_to_model(attrs) for _, attrs in raw_attributes]
        return models
Esempio n. 8
0
    async def create_database(cls,
                              config,
                              collection: str,
                              model: Type[BaseModel],
                              create_schema: bool = True) -> IDataBase:
        """
        Creates new instance of Consul KV DB and performs necessary initializations.

        :param DBSettings config: configuration for consul kv server
        :param str collection: collection for storing model onto db
        :param Type[BaseModel] model: model which instances will be stored in DB
        :param bool create_schema: if the flag is true, the collection will be created.
        :return:
        """
        # NOTE: please, be sure that you avoid using this method twice (or more times) for the same
        # model
        if not all((cls.consul_client, cls.thread_pool, cls.loop)):
            cls.loop = asyncio.get_event_loop()
            try:
                cls.consul_client = Consul(host=config.hosts[0],
                                           port=config.port,
                                           loop=cls.loop)
            except ConnectionRefusedError as e:
                raise DataAccessExternalError(f"{e}")
            # needed to perform tree traversal in non-blocking mode
            cls.thread_pool = ThreadPoolExecutor(
                max_workers=multiprocessing.cpu_count())

        consul_db = cls(cls.consul_client, model, collection, cls.thread_pool,
                        cls.loop)

        try:
            if create_schema:
                await consul_db.create_object_root()
        except ClientConnectorError as e:
            raise DataAccessExternalError(f"{e}")
        except Exception as e:
            raise DataAccessError(
                f"Some unknown exception occurred in Consul module: {e}")

        return consul_db
Esempio n. 9
0
 async def _create_obj_dir():
     """
     Create obj dir if it does not exist and load model_scheme
     :return:
     """
     _index, _data = await self._consul_client.kv.get(obj_dir)
     if _data is None:
         self._model_scheme = dict.fromkeys(self._model.fields.keys())
         _response = await self._consul_client.kv.put(
             obj_dir, json.dumps(self._model_scheme))
         if not _response:
             raise DataAccessExternalError(
                 f"Can't put key={obj_root} and "
                 f"value={str(creation_time)}")
     else:
         self._model_scheme = json.loads(_data[ConsulWords.VALUE])
Esempio n. 10
0
    async def create_database(cls,
                              config,
                              collection: str,
                              model: Type[BaseModel],
                              create_schema=True) -> IDataBase:
        """
        Creates new instance of ElasticSearch DB and performs necessary initializations

        :param DBSettings config: configuration for elasticsearch server
        :param str collection: collection for storing model onto db
        :param Type[BaseModel] model: model which instances will be stored in DB
        :param bool create_schema: if the flag is true, the collection will be created.
        :return:
        """
        # NOTE: please, be sure that you avoid using this method twice (or more times) for the same
        # model
        if not all((cls.elastic_instance, cls.thread_pool, cls.loop)):
            auth = None
            if config.login:
                auth = (config.login, config.password)

            nodes = [{"host": x, "port": config.port} for x in config.hosts]
            cls.elastic_instance = Elasticsearch(
                hosts=nodes,
                http_auth=auth,
                sniff_on_start=True,  # sniff before doing anything
                sniff_on_connection_fail=
                True,  # refresh nodes after a node fails to respond
                sniff_timeout=10,
                sniffer_timeout=60)  # and also every 60 seconds
            cls.pool = ThreadPoolExecutor(
                max_workers=multiprocessing.cpu_count())
            cls.loop = asyncio.get_event_loop()

        es_db = cls(cls.elastic_instance, model, collection, cls.thread_pool,
                    cls.loop)

        try:
            if create_schema:
                await es_db.attach_to_index(config.replication)
        except DataAccessExternalError:
            raise  # forward error to upper caller
        except Exception as e:
            raise DataAccessExternalError("Some unknown exception occurred in "
                                          f"ElasticSearch module: {e}")

        return es_db
Esempio n. 11
0
    async def store(self, obj: BaseModel):
        """
        Store object into Storage

        :param Model obj: Arbitrary base object for storing into DB

        """
        await super().store(obj)  # Call the generic code

        obj_path = self._templates.get_object_path(obj.primary_key_val)
        obj_path = obj_path.lower()

        obj_val = json.dumps(obj.to_primitive())
        response = await self._consul_client.kv.put(obj_path, obj_val)
        if not response:
            raise DataAccessExternalError(
                f"Can't put key={obj_path} and value={obj_val}")
Esempio n. 12
0
    async def delete(self, filter_obj: IFilter) -> int:
        """
        Delete objects from OpenLdap by filter.

        :param filter_obj: filter object.
        :returns: number of deleted entries.
        """
        models = await self._get_ldap(filter_obj)
        for model in models:
            dn = self._get_object_dn(model)
            try:
                await self._loop.run_in_executor(self._thread_pool,
                                                 self._client.delete_s, dn)
            except ldap.LDAPError as le:
                err = (
                    f'Failed to execute delete operation wtih OpenLdap server: {le}'
                )
                raise DataAccessExternalError(err) from None
        return len(models)
Esempio n. 13
0
    async def store(self, obj: BaseModel) -> None:
        """
        Stores the provided object in OpenLdap.

        :param obj: object to store.
        :returns: None.
        """
        # Current OpenLdap implementation in the framework does not retrieve schema details
        # from the server.
        # Make sure the model corresponds to the pre-created collection before using the framework.
        # await super().store(obj)
        dn = self._get_object_dn(obj)
        ldif = OpenLdap._model_to_ldif(obj)
        try:
            await self._loop.run_in_executor(self._thread_pool,
                                             self._client.add_s, dn, ldif)
        except ldap.ALREADY_EXISTS:
            await self.update_by_id(obj.primary_key_val, obj.to_native())
        except ldap.LDAPError as le:
            err = f'Failed to execute add operation with OpenLdap server: {le}'
            raise DataAccessExternalError(err) from None
Esempio n. 14
0
    async def attach_to_index(self, replication: int) -> None:
        """
        Provides async method to connect storage to index bound to provided model and collection
        :return:
        """
        def _get_alias(_index):
            return self._es_client.indices.get_alias(self._index,
                                                     ignore_unavailable=True)

        def _create(_index, _body):
            self._es_client.indices.create(index=_index, body=_body)

        def _get(_index):
            return self._es_client.indices.get(self._index)

        try:
            indices = await self._loop.run_in_executor(self._tread_pool_exec,
                                                       _get_alias, self._index)
        except ConnectionError as e:
            raise DataAccessExternalError(
                f"Failed to establish connection to ElasticSearch: {e}")

        # self._obj_index = self._es_client.indices.get_alias("*")
        if indices.get(self._index, None) is None:
            data_mappings = ElasticSearchDataMapper(self._model)
            mappings_dict = data_mappings.build_index_mappings(replication)
            # self._es_client.indices.create(index=model.__name__, ignore=400, body=mappings_dict)

            await self._loop.run_in_executor(self._tread_pool_exec, _create,
                                             self._index, mappings_dict)

        self._index_info = await self._loop.run_in_executor(
            self._tread_pool_exec, _get, self._index)

        self._model_scheme = self._index_info[self._index][ESWords.MAPPINGS][
            ESWords.PROPERTIES]
        self._model_scheme = {
            k.lower(): v
            for k, v in self._model_scheme.items()
        }
Esempio n. 15
0
    async def create_object_root(self) -> None:
        """
        Provides async method to initialize key structure for given object type.

        :return:
        """
        async def _create_obj_dir():
            """
            Create obj dir if it does not exist and load model_scheme.

            :return:
            """
            _index, _data = await self._consul_client.kv.get(obj_dir)
            if _data is None:
                self._model_scheme = dict.fromkeys(self._model.fields.keys())
                _response = await self._consul_client.kv.put(
                    obj_dir, json.dumps(self._model_scheme))
                if not _response:
                    raise DataAccessExternalError(
                        f"Can't put key={obj_root} and "
                        f"value={str(creation_time)}")
            else:
                self._model_scheme = json.loads(_data[ConsulWords.VALUE])

        obj_root = self._templates.get_object_root()
        obj_dir = self._templates.get_object_dir()
        _, data = await self._consul_client.kv.get(obj_root)
        if data is None:
            # maybe need to post creation time
            creation_time = datetime.now()
            response = await self._consul_client.kv.put(
                obj_root, str(creation_time))
            if not response:
                raise DataAccessExternalError(f"Can't put key={obj_root} and "
                                              f"value={str(creation_time)}")

        await _create_obj_dir()  # create if it is not exists