Пример #1
0
class ConnectionDialects(ProtonConfig, LogUtilities):
    """
    One stop shop to get connectivity to any RDMBS for models.
    Current implementation covers:
    1. SQLite
    2. Postgres
    3. MySql
    4. SQL Server

    To add more support, add supporting config parameters into databaseConfig.ini file.

    NOTE: ConnectionDialect is reliant on databaseConfig.ini to establish valid connection. Please ensure
    you don't delete any existing config parameters.
    """
    connection_dialects_logger = LogUtilities().get_logger(log_file_name='connection_dialects_logs',
                                                           log_file_path='{}/trace/connection_dialects_logs.log'.format(
                                                               ProtonConfig.ROOT_DIR))

    @classmethod
    def dialect_store(cls):
        """
        Parse config file and prepare dialects for db supported by PROTON.
        By default PROTON ships with support for sqlite, postgresql, mysql and sqlserver.
        :return: db Dialect
        """
        supported_databases = ['postgresql', ]
        parser = ConfigParser()
        config_file = '{}/databaseConfig.ini'.format(ProtonConfig.ROOT_DIR)
        db = {}
        parser.read(config_file)

        def get_parsed_parameters(db_dialect, section):
            """
            Parser for databaseConfig.ini
            :param db_dialect: supported db dialects by PROTON.
            :param section: supported db name.
            :return: db dialect
            """

            if parser.has_section(section):
                db_dialect[section] = {}
                params = parser.items(section)
                for param in params:
                    db_dialect[section][param[0]] = param[1]
            else:
                cls.connection_dialects_logger.exception(
                    '[ConnectionDialects]: Section {} is not found in "databaseConfig.ini" '
                    'file.'.format(section))
                raise Exception('[ConnectionDialects]: Section {} is not found in "databaseConfig.ini" '
                                'file.'.format(section))
            return db_dialect

        list(map(lambda sdb: get_parsed_parameters(db, sdb), supported_databases))
        return db
Пример #2
0
class TestLogUtilities(TestCase):
    log_utilities_object = LogUtilities()

    def test_log_utilities(self):
        assert isinstance(self.log_utilities_object, LogUtilities)

    def test_logger(self):
        return_logger = self.log_utilities_object.get_logger(
            log_file_name='test_log_utillities_logs',
            log_file_path='{}/trace/test_log_utillities_logs.log'.format(
                ProtonConfig.ROOT_DIR))
        print(str(return_logger))
        assert str(type(return_logger)) == "<class 'logging.Logger'>"
        assert str(
            return_logger) == '<Logger test_log_utillities_logs.base (DEBUG)>'
Пример #3
0
class ConnectionManager(ConnectionDialects, metaclass=Singleton):
    """
    ConnectionManager manages connectivity pool for all databases supported by PROTON.

    Based on C3 MRO, ConnectionManager will have access to all parents of ConnectionDialects.
    ConnectionDialects inherit from LogUtilities and ProtonGen. So, those methods can be used in ConnectionManager.
    """

    __connection_dialects = ConnectionDialects.dialect_store()
    __alchemy_connection_strings = {}
    __alchemy_engine_store = {}
    __pg_connection_pool = None
    __sqlite_connection = {}

    connection_manager_logger = LogUtilities().get_logger(
        log_file_name='connection_manager_logs',
        log_file_path='{}/trace/connection_manager_logs.log'.format(
            ProtonConfig.ROOT_DIR))

    def __init__(self):
        super(ConnectionManager, self).__init__()
        self.pg_cursor_generator = self.__pg_cursor_generator

    @classmethod
    def sqlite_connection_generator(cls):
        if not bool(cls.__sqlite_connection):
            try:
                with open('{}/proton_vars/proton_sqlite_config.txt'.format(
                        ProtonConfig.ROOT_DIR)) as file:
                    dialect = file.read().replace('\n', '')
                    cls.__sqlite_connection = sqlite3.connect(dialect)
                    cls.connection_manager_logger.info(
                        '[connection_manager]: SQLITE connection generator invoked for the first time. '
                        'Connection successfully generated and maintained at class level.'
                    )
            except Exception as e:
                cls.connection_manager_logger.exception(
                    '[connection_manager]: SQLite connection could not be established. Stack trace to follow.'
                )
                cls.connection_manager_logger.error(str(e))
        else:
            cls.connection_manager_logger.info(
                '[connection_manager]: SQLITE connection manager is called subsequently. '
                'Connection previously generated will be reused.')
        return cls.__sqlite_connection

    @classmethod
    def __pg_pool(cls):
        """
        ConnectionPool for Postgres governed by psycopg2.
        :return:
        """
        if cls.__pg_connection_pool is None:
            try:
                connection_dialect = cls.__connection_dialects['postgresql']
                dsn = "dbname='{}' user='******' host='{}' password='******' port='{}'".format(
                    connection_dialect['database'], connection_dialect['user'],
                    connection_dialect['host'], connection_dialect['password'],
                    connection_dialect['port'])
                # connection_pool with 25 live connections. Tweak this according to convenience.
                cls.__pg_connection_pool = SimpleConnectionPool(1, 25, dsn=dsn)
                cls.connection_manager_logger.info(
                    '[connection_manager]: PG Pool class method is invoked for first time. '
                    'PG Pool will be initialized for Postgres engine of PROTON.'
                )
            except Exception as e:
                cls.connection_manager_logger.info(
                    '[connection_manager]: Error creating a PG Pool. Stack trace to follow.'
                )
                cls.connection_manager_logger.exception(str(e))
        else:
            cls.connection_manager_logger.info(
                '[connection_manager]: Request for PG Pool method is invoked subsequently. '
                'PG Pool previously initialized for all PROTON supported engines is returned.'
            )

        return cls.__pg_connection_pool

    @classmethod
    @contextmanager
    def __pg_cursor(cls, connection_pool):
        connection = connection_pool.getconn()
        try:
            yield connection.cursor()
        finally:
            connection_pool.putconn(connection)

    @classmethod
    def alchemy_engine(cls):
        """
        Returns Engine required by SQL Alchemy ORM.
        :return:
        """
        if not bool(cls.__alchemy_engine_store):
            import logging
            logging.basicConfig(
                level=logging.DEBUG,
                format=
                '[%(asctime)s] <---> [%(name)s] <---> [%(levelname)s] <---> [%(message)s]',
                handlers=[
                    logging.FileHandler(
                        '{}/trace/sqlalchemy_engine.log'.format(
                            ProtonConfig.ROOT_DIR))
                ])
            logging.getLogger('sqlalchemy.pool').setLevel(logging.DEBUG)

            cls.connection_manager_logger.info(
                '[connection_manager]: Alchemy engine class method is invoked for first time. '
                'Alchemy engine will be initialized for all PROTON supported engines.'
            )

            from sqlalchemy_utils import database_exists, create_database

            with open('{}/proton_vars/proton_sqlite_config.txt'.format(
                    ProtonConfig.ROOT_DIR)) as file:
                sqlite_dialect = file.read().replace('\n', '')
                cls.__alchemy_connection_strings['sqlite'] = '{}:///{}'.format(
                    'sqlite', sqlite_dialect)

            for dialect in cls.__connection_dialects:
                cls.__alchemy_connection_strings[
                    dialect] = '{}://{}:{}@{}:{}/{}'.format(
                        dialect, cls.__connection_dialects[dialect]['user'],
                        cls.__connection_dialects[dialect]['password'],
                        cls.__connection_dialects[dialect]['host'],
                        cls.__connection_dialects[dialect]['port'],
                        cls.__connection_dialects[dialect]['database'])

            for connection in cls.__alchemy_connection_strings:
                cls.__alchemy_engine_store[connection] = create_engine(
                    cls.__alchemy_connection_strings[connection],
                    pool_size=25,
                    max_overflow=5,
                    pool_timeout=30,
                    pool_recycle=3600,
                    poolclass=QueuePool)

                # create database if doesnt exist; as per definition in database.ini
                if not database_exists(
                        cls.__alchemy_engine_store[connection].url):
                    create_database(cls.__alchemy_engine_store[connection].url)
                    cls.connection_manager_logger.info(
                        '[connection_manager]: Proton has created target database in {} as defined in '
                        'databaseConfig.ini'.format(connection))

        else:
            cls.connection_manager_logger.info(
                '[connection_manager]: Alchemy engine class method is invoked subsequently. '
                'Alchemy engine previously initialized for all PROTON supported engines is returned.'
            )

        return cls.__alchemy_engine_store

    @classmethod
    def connection_store(cls):

        connection_manager = {
            'sqlite': {
                'getConnection': cls.sqlite_connection_generator
            }
        }

        try:
            pg_connection_pool = cls.__pg_pool()
            cls.connection_manager_logger.info(
                '[connection_manager]: Postgres operational. PROTON will successfully include PG!'
            )
            connection_manager.update({
                'postgresql': {
                    'getCursor': cls.__pg_cursor,
                    'pool': pg_connection_pool
                }
            })

        except Exception as e:
            connection_manager.update({'postgresql': None})
            cls.connection_manager_logger.exception(
                '[connection_manager]: Postgres is either not installed or not configured on port provided'
                'within ini file. PROTON will not include postgres. Stack trace to follow.'
            )
            cls.connection_manager_logger.error(str(e))

        # TODO: Add support for mysql and sqlserver
        connection_manager.update({'mysql': None, 'sqlServer': None})
        return connection_manager

    @classmethod
    def pg_schema_generator(cls, engine_copy, schema_name):
        try:
            if not engine_copy.dialect.has_schema(engine_copy, schema_name):
                engine_copy.execute(schema.CreateSchema(schema_name))
                cls.connection_manager_logger.info(
                    '[connection_manager]: Successfully generated schema: {} in respective database of '
                    'postgresql'.format(schema_name))
                return True
            cls.connection_manager_logger.info(
                '[connection_manager]: Schema: {} already exists in respective database of '
                'postgresql'.format(schema_name))
            return True
        except Exception as e:
            cls.connection_manager_logger.exception(
                '[connection_manager]: Error generating schema {} in Postgres. Stack trace to follow.'
                .format(schema_name))
            cls.connection_manager_logger.error(str(e))

    @staticmethod
    def __pg_cursor_generator(connection_store):
        """
        a simple wrapper on top of __connection_store to help users easily generate cursors without typing much!
        :return:
        """
        if 'postgresql' in connection_store:
            return connection_store['postgresql']['getCursor'](
                connection_store['postgresql']['pool'])
        else:
            raise Exception(
                '[ConnectionManager]: Connection Store does not contain an entry for postgresql.'
                'Check/Debug __connection_store in ConnectionManager.')
Пример #4
0
class JWTManager:
    from configuration import ProtonConfig
    from nucleus.generics.log_utilities import LogUtilities
    from nucleus.iam.password_manager import PasswordManager
    lg = LogUtilities()
    pm = PasswordManager()

    token_authenticator_logger = lg.get_logger(
        log_file_name='token_authenticator',
        log_file_path='{}/trace/token_authenticator.log'.format(
            ProtonConfig.ROOT_DIR))

    # In proton remote deployment, this must be k8s secret.
    with open(
            '{}/nucleus/iam/secrets/PROTON_JWT_SECRET.txt'.format(
                ProtonConfig.ROOT_DIR), 'r') as proton_secret:
        app_secret = proton_secret.read().replace('\n', '')
        encoded_app_secret = pm.hash_password(app_secret)

    @classmethod
    def generate_token(cls, encode_value):
        """
        Generate JWT Token which PROTON auth middleware will use to authenticate all protected routes.
        :param encode_value: value that must be considered for JWT encode
        :return: JWT Token
        """

        token = jwt.encode(
            {
                'encode_value': encode_value,
                'exp':
                datetime.datetime.utcnow() + datetime.timedelta(minutes=30)
            }, cls.app_secret)  # hashing with encoded, secret, decode fails
        return token.decode('UTF-8')

    @classmethod
    def authenticate(cls, jwt_token):
        """
        Validates if JWT Token still stands True.
        :param jwt_token: JWT Token issued by generate_token method
        :return: A dict containing status and payload on success
        """

        if jwt_token:
            try:
                payload = jwt.decode(jwt_token, cls.app_secret)
            except (jwt.DecodeError, jwt.ExpiredSignatureError) as e:
                cls.token_authenticator_logger.exception(
                    '[JWT Manager]: Authentication failed due to : {}'.format(
                        str(e)))
                return {
                    'status': False,
                    'message': 'Token invalid {}'.format(str(e)),
                    'encode_value': None
                }
            cls.token_authenticator_logger.info(
                '[JWT Manager]: Authentication succeded.')
            return {
                'status': True,
                'message': 'Token valid',
                'encode_value': payload['encode_value']
            }
Пример #5
0
    def concurrency_wrapper(self, type, target_function, *args):
        """
         Multi-threading functionality available should this MIC stack need it.
         self.generate_multiple_threads: Method to generate multiple threads and create a thread pool.
         self.execute_multiple_threads: Method to concurrently execute threads in a threadpool.

        :param type: Valid Options: 'http', 'non-http',
        :param target_function: Target function that threads should execute (This function should be in scope)
        :param args: Arguments expected by target function. If type=='http', *args[0] must be a list of URLS.
        :return: Results respective to type of operation specified.
        """
        from configuration import ProtonConfig
        from nucleus.generics.log_utilities import LogUtilities

        logger = LogUtilities().get_logger(
            log_file_name='parallel_programming_logs',
            log_file_path='{}/trace/parallel_programming_logs.log'.format(
                ProtonConfig.ROOT_DIR))

        def __http_calls_resolver(target_function, args):
            """

            :param target_function: Target function that threads should execute (This function should be in scope)
            :param args:[List] Arguments expected by target function. For HTTP Calls, args[0] = A list of urls to
            perform HTTP Operation.
            :return:[List] Thread Pool Results
            """
            # TODO: Validate input parameters to contain expected; fail gracefully if not.

            try:
                _args = list(args)
                urls = _args[0]
                _args.pop(0)

                # Step 1: Create number of threads required.
                threads_pool = list(
                    map(
                        lambda url: self.__generate_multiple_threads(
                            target_function, url, _args), urls))
                time_after_pool = time.time()
                logger.info(
                    '[Parallel Programming] - Threads pool created with {} threads to resolve {} '
                    'method concurrently'.format(len(urls), target_function))

                # Step 2: Execute threads concurrently.
                thread_pool_results = self.__execute_multiple_threads(
                    threads_pool, time_after_pool)
                logger.info(
                    '[Parallel Programming] - {} threads executed concurrently. Operation was completed in '
                    '{} seconds and took {} seconds since thread pool was '
                    'spawned.'.format(
                        len(urls), thread_pool_results['execution_time'],
                        thread_pool_results['execution_time_since_pool_gen']))

                return thread_pool_results

            except Exception as e:
                logger.exception(
                    '[Parallel Programming] - Error completing HTTP call resolver. Stack trace to follow'
                )
                logger.exception(str(e))

        def __non_http_resolver(target_function, args):
            """

            :param target_function: Target function that threads should execute (This function should be in scope)
            :param args:[List] Arguments expected by target function.
            :return:[List] Thread Pool Results.
            """

            try:
                # Step 1: Create number of threads required.
                threads_pool = list(
                    map(
                        lambda arg: self.__generate_multiple_threads(
                            target_function, arg), args))
                time_after_pool = time.time()
                logger.info(
                    '[Parallel Programming] - Threads pool created with {} threads to resolve {} '
                    'method concurrently'.format(len(args), target_function))

                # Step 2: Execute threads concurrently.
                thread_pool_results = self.__execute_multiple_threads(
                    threads_pool, time_after_pool)
                logger.info(
                    '[Parallel Programming] - {} threads executed concurrently. Operation was completed in '
                    '{} seconds and took {} seconds since thread pool was '
                    'spawned.'.format(
                        len(args), thread_pool_results['execution_time'],
                        thread_pool_results['execution_time_since_pool_gen']))

                return thread_pool_results

            except Exception as e:
                logger.exception(
                    '[Parallel Programming] - Error completing Non-HTTP resolver. Stack trace to follow'
                )
                logger.exception(str(e))

        __map_type = {
            'http': __http_calls_resolver,
            'non-http': __non_http_resolver
        }

        return __map_type[type](target_function, args)
Пример #6
0
class ProtonEmail(object):
    """
    PROTONs email client.
    """
    email_logger = LogUtilities().get_logger(log_file_name='emailer_logs',
                                             log_file_path='{}/trace/emailer_logs.log'.format(ProtonConfig.ROOT_DIR))
    __sg = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))

    def __init__(self):
        super(ProtonEmail, self).__init__()

    @staticmethod
    def __email_decorator(html_content):
        """
        Decorates email with disclaimer, logo and other good formatting.
        :param html_content: The content that user desires
        :return: Formatted HTML content.
        """
        dont_reply_warning_text = '<strong>PS: Please do not reply to this email. This email may not be monitored. ' \
                                  'For any queries, please contact support ' \
                                  'for {} at {}.</strong>'.format(os.environ.get('APP_NAME'),
                                                                  os.environ.get('APP_SUPPORT_EMAIL'))

        proton_promotion_text = '<span style="font-size:8pt; font-family:Arial, sans-serif; color:#6a737d;"> ' \
                                'This email & the underlying software for {} is powered by the ' \
                                '<a href="https://github.com/PruthviKumarBK/PROTON" target="_blank">' \
                                'PROTON framework</a> - ' \
                                'Ⓒ <a href="https://adroitcorp.com.au" target="_blank">' \
                                'Adroit Software Corporation</a>' \
                                '</span>'.format(os.environ.get('APP_NAME'))

        disclaimer_text = '<span style="font-size:8pt; font-family:Arial, sans-serif; color:#9b9b9b;"> ' \
                          'The content of this email is confidential and intended for the recipient specified in ' \
                          'message only. It is strictly forbidden to share any part of this message with any ' \
                          'third party, without a written consent of the sender. If you received this message by ' \
                          'mistake, please forward to {} and follow with its deletion, ' \
                          'so that we can ensure such a mistake does not occur in the future.' \
                          '</span>'.format(os.environ.get('APP_SUPPORT_EMAIL'))

        formatted_content = '{}' \
                            '<br />' \
                            '<hr />' \
                            '{}' \
                            '<br />' \
                            '<br />' \
                            '{}' \
                            '<br />' \
                            '<br />' \
                            '{}'.format(html_content, dont_reply_warning_text, disclaimer_text, proton_promotion_text)

        return formatted_content

    @classmethod
    def send_email(cls, to_email, subject, html_content, from_email='*****@*****.**'):
        """
        PROTONs postman.

        :param to_email: valid email to which email needs to be sent to.
        :param subject: Email subject
        :param html_content: Email content.(Can include HTML markups)
        :param from_email: valid email from which email has to be sent from. (default: [email protected])
        :return: A dictionary containing email status code, email body and email headers.
        """
        try:

            message = Mail(
                from_email=from_email,
                to_emails=to_email,
                subject=subject,
                html_content=ProtonEmail.__email_decorator(html_content))
            response = cls.__sg.send(message)
            return {
                'email_status_code': response.status_code,
                'email_body': response.body,
                'email_headers': response.headers
            }

        except Exception as e:
            cls.email_logger.exception('[Email]: Unable to send email. Details to follow.')
            cls.email_logger.exception(str(e))
Пример #7
0
class CacheManager(ProtonConfig, LogUtilities):
    """
    CacheManager facilitates redis to support underlying databases supported
    by PROTON. All redis activities are controlled within CacheManager.
    """
    __redisConfig = {'host': 'redis', 'port': 6379, 'db': 0}
    cache_manager_logger = LogUtilities().get_logger(
        log_file_name='cache_manager_logs',
        log_file_path='{}/trace/cache_manager_logs.log'.format(
            ProtonConfig.ROOT_DIR))

    @classmethod
    def cache_processor(cls):
        """
        Closure for CacheManager

        :return: A dictionary of all methods processed by CacheManager.
        """
        def instantiate_cache():
            """
            Instantiates redis instance.
            :return: redis_instance object.
            """
            try:
                if not hasattr(cls, 'redis_instance'):
                    pool = redis.ConnectionPool(host=cls.__redisConfig['host'],
                                                port=cls.__redisConfig['port'],
                                                db=cls.__redisConfig['db'])
                    setattr(cls, 'redis_instance',
                            redis.StrictRedis(connection_pool=pool))
                    cls.cache_manager_logger.info(
                        'Successfully generated new cache instance via pool!')
                else:
                    cls.cache_manager_logger.info(
                        'Redis instance from pool is available. The same will be returned.'
                    )
                return cls.redis_instance

            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Exception while instantiating cache. Details: {}'.format(
                        str(e)))

        def set_to_cache(redis_instance, key, value):
            """
            Set value to cache.
            :param redis_instance: A valid redis_instance as provided by instantiate_cache.
            :param key: The key
            :param value: The value
            :return: void
            """
            try:
                redis_instance.set(key, value)
                cls.cache_manager_logger.info(
                    'Cache set for key: {}'.format(key))
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Exception while setting value to cache. Details: {}'.
                    format(str(e)))

        def get_from_cache(redis_instance, key):
            """
            Getter function to extract data from cache.
            :param redis_instance: A valid redis_instance as provided by instantiate_cache
            :param key: A valid key
            :return: Data from cache.
            """
            try:
                data_from_cache = redis_instance.get(key)
                cls.cache_manager_logger.info(
                    'Data from cache successful for key: {}'.format(key))
                return data_from_cache
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Data from cache for key: {} is unsuccessful. Details: {}'.
                    format(key, str(e)))

        def ping_cache(redis_instance):
            """
            Function to check if redis is available.
            :param redis_instance: A valid redis_instance as provided by instantiate_cache
            :return: Bool
            """
            try:
                redis_instance.ping()
                cls.cache_manager_logger.info('Redis instance is available!')
                return True
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Redis instance is unavailable on ping!. Details : {}'.
                    format(str(e)))
                return False

        def delete_from_cache(redis_instance, key):
            """
            Delete an entry from Cache.
            :param redis_instance: A valid redis instance as provided by instantiate_cache.
            :param key: A valid key.
            :return: Bool
            """
            try:
                redis_instance.delete(key)
                cls.cache_manager_logger.info(
                    '{} deleted from Redis cache!'.format(key))
                return True
            except Exception as e:
                cls.cache_manager_logger.exception(
                    ('Redis instance is unavailable to delete key: {}. '
                     'Details: {}'.format(key, str(e))))
                return False

        def delete_all_containing_key(redis_instance, key_substring):
            """
            Delete all entries from cache containing given key_substring
            :param redis_instance:  A valid redis instance as provided by instantiate_cache.
            :param key_substring: substring of redis keys
            :return: List of deleted keys.
            """
            try:
                deleted_keys = []
                for key in redis_instance.scan_iter(
                        '*{}*'.format(key_substring)):
                    redis_instance.delete(key)
                    deleted_keys.append(key)
                cls.cache_manager_logger.info(
                    'Deleted all cache entries containing key - {}\nDeleted '
                    'entries are: {}'.format(
                        key_substring,
                        ' ,'.join(str(x) for x in deleted_keys)))
                return deleted_keys
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Unable to delete cache entries containing given substring - {}.'
                    'Details: {}'.format(key_substring, str(e)))

        return {
            'init_cache': instantiate_cache,
            'set_to_cache': set_to_cache,
            'get_from_cache': get_from_cache,
            'ping_cache': ping_cache,
            'delete_from_cache': delete_from_cache,
            'delete_all_containing_key': delete_all_containing_key
        }
Пример #8
0
class CacheManager(ProtonConfig, LogUtilities):
    """
    CacheManager facilitates redis to support underlying databases supported
    by PROTON. All redis activities are controlled within CacheManager.
    """
    __redisConfig = {'host': 'redis', 'port': 6379, 'db': 0}
    cache_manager_logger = LogUtilities().get_logger(
        log_file_name='cache_manager_logs',
        log_file_path='{}/trace/cache_manager_logs.log'.format(
            ProtonConfig.ROOT_DIR))

    @classmethod
    def cache_processor(cls):
        """
        Closure for CacheManager

        :return: A dictionary of all methods processed by CacheManager.
        """
        def instantiate_cache():
            """
            Instantiates redis instance.
            :return: redis_instance object.
            """
            try:
                redis_instance = redis.StrictRedis(
                    host=cls.__redisConfig['host'],
                    port=cls.__redisConfig['port'],
                    db=cls.__redisConfig['db'])
                cls.cache_manager_logger.info(
                    'Successfully instantiated cache!')
                return redis_instance
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Exception while instantiating cache. Details: {}'.format(
                        str(e)))

        def set_to_cache(redis_instance, key, value):
            """
            Set value to cache.
            :param redis_instance: A valid redis_instance as provided by instantiate_cache.
            :param key: The key
            :param value: The value
            :return: void
            """
            try:
                redis_instance.set(key, value)
                cls.cache_manager_logger.info(
                    'Cache set for key: {}'.format(key))
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Exception while setting value to cache. Details: {}'.
                    format(str(e)))

        def get_from_cache(redis_instance, key):
            """
            Getter function to extract data from cache.
            :param redis_instance: A valid redis_instance as provided by instantiate_cache
            :param key: A valid key
            :return: Data from cache.
            """
            try:
                data_from_cache = redis_instance.get(key)
                cls.cache_manager_logger.info(
                    'Data from cache successful for key: {}'.format(key))
                return data_from_cache
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Data from cache for key: {} is unsuccessful. Details: {}'.
                    format(key, str(e)))

        def ping_cache(redis_instance):
            """
            Function to check if redis is available.
            :param redis_instance: A valid redis_instance as provided by instantiate_cache
            :return: Bool
            """
            try:
                redis_instance.ping()
                cls.cache_manager_logger.info('Redis instance is available!')
                return True
            except Exception as e:
                cls.cache_manager_logger.exception(
                    'Redis instance is unavailable on ping!. Details : {}'.
                    format(str(e)))
                return False

        def delete_from_cache(redis_instance, key):
            """
            Delete an entry from Cache.
            :param redis_instance: A valid redis instance as provided by instantiate_cache.
            :param key: A valid key.
            :return: Bool
            """
            try:
                redis_instance.delete(key)
                cls.cache_manager_logger.info(
                    '{} deleted from Redis cache!'.format(key))
                return True
            except Exception as e:
                cls.cache_manager_logger.exception(
                    ('Redis instance is unavailable to delete key: {}. '
                     'Details: {}'.format(key, str(e))))
                return False

        return {
            'init_cache': instantiate_cache,
            'set_to_cache': set_to_cache,
            'get_from_cache': get_from_cache,
            'ping_cache': ping_cache,
            'delete_from_cache': delete_from_cache
        }