Пример #1
0
    def test_explicit_mode(self):
        from .. import TransactionManager
        from ..interfaces import AlreadyInTransaction, NoTransaction

        tm = TransactionManager()
        self.assertFalse(tm.explicit)

        tm = TransactionManager(explicit=True)
        self.assertTrue(tm.explicit)
        for name in 'get', 'commit', 'abort', 'doom', 'isDoomed', 'savepoint':
            with self.assertRaises(NoTransaction):
                getattr(tm, name)()

        t = tm.begin()
        with self.assertRaises(AlreadyInTransaction):
            tm.begin()

        self.assertTrue(t is tm.get())

        self.assertFalse(tm.isDoomed())
        tm.doom()
        self.assertTrue(tm.isDoomed())
        tm.abort()

        for name in 'get', 'commit', 'abort', 'doom', 'isDoomed', 'savepoint':
            with self.assertRaises(NoTransaction):
                getattr(tm, name)()

        t = tm.begin()
        self.assertFalse(tm.isDoomed())
        with self.assertRaises(AlreadyInTransaction):
            tm.begin()
        tm.savepoint()
        tm.commit()
Пример #2
0
    def test_explicit_mode(self):
        from .. import TransactionManager
        from ..interfaces import AlreadyInTransaction, NoTransaction

        tm = TransactionManager()
        self.assertFalse(tm.explicit)

        tm = TransactionManager(explicit=True)
        self.assertTrue(tm.explicit)
        for name in 'get', 'commit', 'abort', 'doom', 'isDoomed', 'savepoint':
            with self.assertRaises(NoTransaction):
                getattr(tm, name)()

        t = tm.begin()
        with self.assertRaises(AlreadyInTransaction):
            tm.begin()

        self.assertTrue(t is tm.get())

        self.assertFalse(tm.isDoomed())
        tm.doom()
        self.assertTrue(tm.isDoomed())
        tm.abort()

        for name in 'get', 'commit', 'abort', 'doom', 'isDoomed', 'savepoint':
            with self.assertRaises(NoTransaction):
                getattr(tm, name)()

        t = tm.begin()
        self.assertFalse(tm.isDoomed())
        with self.assertRaises(AlreadyInTransaction):
            tm.begin()
        tm.savepoint()
        tm.commit()
Пример #3
0
class QueryCacheBackend(object):
    """This class is the engine behind the query cache. It reads the queries
    going through the django Query and returns from the cache using
    the generation keys, or on a miss from the database and caches the results.
    Each time a model is updated the table keys for that model are re-created,
    invalidating all cached querysets for that model.

    There are different QueryCacheBackend's for different versions of django;
    call ``johnny.cache.get_backend`` to automatically get the proper class.
    """
    __shared_state = {}

    def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
        self.__dict__ = self.__shared_state
        self.prefix = settings.MIDDLEWARE_KEY_PREFIX
        if keyhandler:
            self.kh_class = keyhandler
        if keygen:
            self.kg_class = keygen
        if not cache_backend and not hasattr(self, 'cache_backend'):
            cache_backend = settings._get_backend()

        if not keygen and not hasattr(self, 'kg_class'):
            self.kg_class = KeyGen
        if keyhandler is None and not hasattr(self, 'kh_class'):
            self.kh_class = KeyHandler

        if cache_backend:
            self.cache_backend = TransactionManager(cache_backend,
                                                    self.kg_class)
            self.keyhandler = self.kh_class(self.cache_backend,
                                            self.kg_class, self.prefix)
        self._patched = getattr(self, '_patched', False)

    def _monkey_select(self, original):
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            if args:
                result_type = args[0]
            else:
                result_type = kwargs.get('result_type', MULTI)

            if any([isinstance(cls, c) for c in self._write_compilers]):
                return original(cls, *args, **kwargs)
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    # this was moved in 1.2 to compiler
                    return empty_iter()
                else:
                    return

            db = getattr(cls, 'using', 'default')
            key, val = None, NotInCache()
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            tables = get_tables_for_query(cls.query)
            # if the tables are blacklisted, send a qc_skip signal
            blacklisted = disallowed_table(*tables)
            if blacklisted:
                signals.qc_skip.send(sender=cls, tables=tables,
                    query=(sql, params, cls.ordering_aliases),
                    key=key)
            if tables and not blacklisted:
                gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.get_ordering(),
                                              result_type, db)
                val = self.cache_backend.get(key, NotInCache(), db)

            if not isinstance(val, NotInCache):
                if val == no_result_sentinel:
                    val = []

                signals.qc_hit.send(sender=cls, tables=tables,
                        query=(sql, params, cls.ordering_aliases),
                        size=len(val), key=key)
                return val

            if not blacklisted:
                signals.qc_miss.send(sender=cls, tables=tables,
                    query=(sql, params, cls.ordering_aliases),
                    key=key)

            val = original(cls, *args, **kwargs)

            if hasattr(val, '__iter__'):
                #Can't permanently cache lazy iterables without creating
                #a cacheable data structure. Note that this makes them
                #no longer lazy...
                #todo - create a smart iterable wrapper
                val = list(val)
            if key is not None:
                if not val:
                    self.cache_backend.set(key, no_result_sentinel, settings.MIDDLEWARE_SECONDS, db)
                else:
                    self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
            return val
        return newfun

    def _monkey_write(self, original):
        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            db = getattr(cls, 'using', 'default')
            from django.db.models.sql import compiler
            # we have to do this before we check the tables, since the tables
            # are actually being set in the original function
            ret = original(cls, *args, **kwargs)

            if isinstance(cls, compiler.SQLInsertCompiler):
                #Inserts are a special case where cls.tables
                #are not populated.
                tables = [cls.query.model._meta.db_table]
            else:
                #if cls.query.tables != list(cls.query.table_map):
                #    pass
                #tables = list(cls.query.table_map)
                tables = cls.query.tables
            for table in tables:
                if not disallowed_table(table):
                    self.keyhandler.invalidate_table(table, db)
            return ret
        return newfun

    def patch(self):
        """
        monkey patches django.db.models.sql.compiler.SQL*Compiler series
        """
        from django.db.models.sql import compiler

        self._read_compilers = (
            compiler.SQLCompiler,
            compiler.SQLAggregateCompiler,
            compiler.SQLDateCompiler,
        )
        self._write_compilers = (
            compiler.SQLInsertCompiler,
            compiler.SQLDeleteCompiler,
            compiler.SQLUpdateCompiler,
        )
        if not self._patched:
            self._original = {}
            for reader in self._read_compilers:
                self._original[reader] = reader.execute_sql
                reader.execute_sql = self._monkey_select(reader.execute_sql)
            for updater in self._write_compilers:
                self._original[updater] = updater.execute_sql
                updater.execute_sql = self._monkey_write(updater.execute_sql)
            self._patched = True
            self.cache_backend.patch()
            self._handle_signals()

    def unpatch(self):
        """un-applies this patch."""
        if not self._patched:
            return
        for func in self._read_compilers + self._write_compilers:
            func.execute_sql = self._original[func]
        self.cache_backend.unpatch()
        self._patched = False

    def invalidate_m2m(self, instance, **kwargs):
        if self._patched:
            table = resolve_table(instance)
            if not disallowed_table(table):
                self.keyhandler.invalidate_table(instance)

    def invalidate(self, instance, **kwargs):
        if self._patched:
            table = resolve_table(instance)
            if not disallowed_table(table):
                self.keyhandler.invalidate_table(table)

            tables = set()
            tables.add(table)

            try:
                 instance._meta._related_objects_cache
            except AttributeError:
                 instance._meta._fill_related_objects_cache()

            for obj in instance._meta._related_objects_cache.keys():
                obj_table = obj.model._meta.db_table
                if obj_table not in tables:
                    tables.add(obj_table)
                    if not disallowed_table(obj_table):
                        self.keyhandler.invalidate_table(obj_table)

    def _handle_signals(self):
        post_save.connect(self.invalidate, sender=None)
        post_delete.connect(self.invalidate, sender=None)
        # FIXME: only needed in 1.1?
        signals.qc_m2m_change.connect(self.invalidate_m2m, sender=None)

    def flush_query_cache(self):
        from django.db import connection
        tables = connection.introspection.table_names()
        #seen_models = connection.introspection.installed_models(tables)
        for table in tables:
            # we want this to just work, so invalidate even things in blacklist
            self.keyhandler.invalidate_table(table)
Пример #4
0
class QueryCacheBackend(object):
    """This class is the engine behind the query cache. It reads the queries
    going through the django Query and returns from the cache using
    the generation keys, or on a miss from the database and caches the results.
    Each time a model is updated the table keys for that model are re-created,
    invalidating all cached querysets for that model.

    There are different QueryCacheBackend's for different versions of django;
    call ``johnny.cache.get_backend`` to automatically get the proper class.
    """
    __shared_state = {}

    def __init__(self, cache_backend=None, keyhandler=None, keygen=None):
        self.__dict__ = self.__shared_state
        self.prefix = settings.MIDDLEWARE_KEY_PREFIX
        if keyhandler:
            self.kh_class = keyhandler
        if keygen:
            self.kg_class = keygen
        if not cache_backend and not hasattr(self, 'cache_backend'):
            cache_backend = settings._get_backend()

        if not keygen and not hasattr(self, 'kg_class'):
            self.kg_class = KeyGen
        if keyhandler is None and not hasattr(self, 'kh_class'):
            self.kh_class = KeyHandler

        if cache_backend:
            self.cache_backend = TransactionManager(cache_backend,
                                                    self.kg_class)
            self.keyhandler = self.kh_class(self.cache_backend, self.kg_class,
                                            self.prefix)
        self._patched = getattr(self, '_patched', False)

    def _monkey_select(self, original):
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            if args:
                result_type = args[0]
            else:
                result_type = kwargs.get('result_type', MULTI)

            if any([isinstance(cls, c) for c in self._write_compilers]):
                return original(cls, *args, **kwargs)
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    # this was moved in 1.2 to compiler
                    return empty_iter()
                else:
                    return

            db = getattr(cls, 'using', 'default')
            key, val = None, NotInCache()
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            tables = get_tables_for_query(cls.query)
            # if the tables are blacklisted, send a qc_skip signal
            blacklisted = disallowed_table(*tables)
            if blacklisted:
                signals.qc_skip.send(sender=cls,
                                     tables=tables,
                                     query=(sql, params,
                                            cls.query.ordering_aliases),
                                     key=key)
            if tables and not blacklisted and not is_query_random(
                    cls.as_sql()[0]):
                gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.get_ordering(), result_type,
                                              db)
                val = self.cache_backend.get(key, NotInCache(), db)

            if not isinstance(val, NotInCache):
                if val == no_result_sentinel:
                    val = []

                signals.qc_hit.send(sender=cls,
                                    tables=tables,
                                    query=(sql, params,
                                           cls.query.ordering_aliases),
                                    size=len(val),
                                    key=key)
                return val

            if not blacklisted:
                signals.qc_miss.send(sender=cls,
                                     tables=tables,
                                     query=(sql, params,
                                            cls.query.ordering_aliases),
                                     key=key)

            val = original(cls, *args, **kwargs)

            if hasattr(val, '__iter__'):
                #Can't permanently cache lazy iterables without creating
                #a cacheable data structure. Note that this makes them
                #no longer lazy...
                #todo - create a smart iterable wrapper
                val = list(val)
            if key is not None:
                if not val:
                    self.cache_backend.set(key, no_result_sentinel,
                                           settings.MIDDLEWARE_SECONDS, db)
                else:
                    self.cache_backend.set(key, val,
                                           settings.MIDDLEWARE_SECONDS, db)
            return val

        return newfun

    def _monkey_write(self, original):
        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            db = getattr(cls, 'using', 'default')
            from django.db.models.sql import compiler
            # we have to do this before we check the tables, since the tables
            # are actually being set in the original function
            ret = original(cls, *args, **kwargs)

            if isinstance(cls, compiler.SQLInsertCompiler):
                #Inserts are a special case where cls.tables
                #are not populated.
                tables = [cls.query.model._meta.db_table]
            else:
                #if cls.query.tables != list(cls.query.table_map):
                #    pass
                #tables = list(cls.query.table_map)
                tables = cls.query.tables
            for table in tables:
                if not disallowed_table(table):
                    self.keyhandler.invalidate_table(table, db)
            return ret

        return newfun

    def patch(self):
        """
        monkey patches django.db.models.sql.compiler.SQL*Compiler series
        """
        from django.db.models.sql import compiler

        self._read_compilers = (
            compiler.SQLCompiler,
            compiler.SQLAggregateCompiler,
            compiler.SQLDateCompiler,
        )
        self._write_compilers = (
            compiler.SQLInsertCompiler,
            compiler.SQLDeleteCompiler,
            compiler.SQLUpdateCompiler,
        )
        if not self._patched:
            self._original = {}
            for reader in self._read_compilers:
                self._original[reader] = reader.execute_sql
                reader.execute_sql = self._monkey_select(reader.execute_sql)
            for updater in self._write_compilers:
                self._original[updater] = updater.execute_sql
                updater.execute_sql = self._monkey_write(updater.execute_sql)
            self._patched = True
            self.cache_backend.patch()
            self._handle_signals()

    def unpatch(self):
        """un-applies this patch."""
        if not self._patched:
            return
        for func in self._read_compilers + self._write_compilers:
            func.execute_sql = self._original[func]
        self.cache_backend.unpatch()
        self._patched = False

    def invalidate_m2m(self, instance, **kwargs):
        if self._patched:
            table = resolve_table(instance)
            if not disallowed_table(table):
                self.keyhandler.invalidate_table(instance)

    def invalidate(self, instance, **kwargs):
        if self._patched:
            table = resolve_table(instance)
            if not disallowed_table(table):
                self.keyhandler.invalidate_table(table)

            tables = set()
            tables.add(table)

            try:
                instance._meta._related_objects_cache
            except AttributeError:
                instance._meta._fill_related_objects_cache()

            for obj in instance._meta._related_objects_cache.keys():
                obj_table = obj.model._meta.db_table
                if obj_table not in tables:
                    tables.add(obj_table)
                    if not disallowed_table(obj_table):
                        self.keyhandler.invalidate_table(obj_table)

    def _handle_signals(self):
        post_save.connect(self.invalidate, sender=None)
        post_delete.connect(self.invalidate, sender=None)
        # FIXME: only needed in 1.1?
        signals.qc_m2m_change.connect(self.invalidate_m2m, sender=None)

    def flush_query_cache(self):
        from django.db import connection
        tables = connection.introspection.table_names()
        #seen_models = connection.introspection.installed_models(tables)
        for table in tables:
            # we want this to just work, so invalidate even things in blacklist
            self.keyhandler.invalidate_table(table)
Пример #5
0
class Context:
    """
    Base class for Contexts of a ConfiguredCtxModule. Do not use this class
    directly, use the :attr:`Context <.ConfiguredCtxModule.Context>` member of a
    ConfiguredCtxModule instead.

    Every Context object needs to be destroyed manually by calling its
    :meth:`.destroy` method. Although this method will be called in the
    destructor of this class, that might already be too late. This is the reason
    why the preferred way of using this class is within a `with` statement:

    >>> with ctx_conf.Context() as ctx:
    ...     ctx.logout_user()
    ...
    """

    def __init__(self):
        if not hasattr(self, '_conf'):
            raise Exception('Unconfigured Context')
        self.log = self._conf.log
        self.log.debug('Initializing')
        self._constructed_attrs = OrderedDict()
        self._active = True
        self.tx_manager = TransactionManager()
        for callback in self._conf._create_callbacks:
            callback(self)

    def __del__(self):
        if self._active:
            self.destroy()

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.destroy(value)

    def __hasattr__(self, attr):
        if self._active:
            return attr in self._conf.registrations
        return attr in self.__dict__

    def __getattr__(self, attr):
        if '_active' in self.__dict__ and '_conf' in self.__dict__ and \
                self._active and attr in self._conf.registrations:
            value = self._conf.registrations[attr].constructor(self)
            if self._conf.registrations[attr].cached:
                self._constructed_attrs[attr] = value
                self.__dict__[attr] = value
            else:
                self._constructed_attrs[attr] = None
            self.log.debug('Creating member %s' % attr)
            return value
        raise AttributeError(attr)

    def __setattr__(self, attr, value):
        try:
            # call registered contructor, if there is one, so the destructor
            # gets called with this new value
            getattr(self, attr)
        except AttributeError:
            pass
        self.__dict__[attr] = value

    def __delattr__(self, attr):
        if attr in self._conf.registrations:
            if attr in self._constructed_attrs:
                self.__delattr(attr, None)
            elif attr in self.__dict__:
                del self.__dict__[attr]
        else:
            del self.__dict__[attr]

    def __delattr(self, attr, exception):
        """
        Deletes a previously constructed *attr*. Its destructor will receive the
        given *exception*.

        Note: this function assumes that the *attr* is indeed a registered
        context member. It will behave unexpectedly when called with an *attr*
        that has no registration.
        """
        constructor_value = self._constructed_attrs[attr]
        del self._constructed_attrs[attr]
        self.log.debug('Deleting member %s' % attr)
        destructor = self._conf.registrations[attr].destructor
        if destructor:
            self.log.debug('Calling destructor of %s' % attr)
            if self._conf.registrations[attr].cached:
                destructor(self, constructor_value, None)
            else:
                destructor(self, None)
        try:
            del self.__dict__[attr]
        except KeyError:
            # destructor might have deleted self.attr already
            pass

    def destroy(self, exception=None):
        """
        Cleans up this context and makes it unusable.

        After calling this function, this object will lose all its magic and
        behave like an empty class.

        The optional *exception*, that is the cause of this method call, will be
        passed to the destructors of every :term:`context member`.
        """
        if not self._active:
            return
        self.log.debug('Destroying')
        self._active = False
        for attr in reversed(list(self._constructed_attrs.keys())):
            self.__delattr(attr, exception)
        for callback in self._conf._destroy_callbacks:
            callback(self, exception)
        tx = self.tx_manager.get()
        if exception or tx.isDoomed():
            tx.abort()
        else:
            tx.commit()