Exemplo n.º 1
0
def celery_task_wrapper(f):
    """
    Provides a task wrapper for celery that sets up cache and ensures
    that the local store is cleared after completion
    """
    from celery.utils import fun_takes_kwargs

    @wraps(f, assigned=available_attrs(f))
    def newf(*args, **kwargs):
        backend = get_backend()
        was_patched = backend._patched
        get_backend().patch()
        # since this function takes all keyword arguments,
        # we will pass only the ones the function below accepts,
        # just as celery does
        supported_keys = fun_takes_kwargs(f, kwargs)
        new_kwargs = dict((key, val) for key, val in kwargs.items()
                                if key in supported_keys)

        try:
            ret = f(*args, **new_kwargs)
        finally:
            local.clear()
        if not was_patched:
            get_backend().unpatch()
        return ret
    return newf
Exemplo n.º 2
0
    def _monkey_execute_sql(self, original):
        from django.db.models.sql import query
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, result_type=MULTI):
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return query.empty_iter()
                else:
                    return

            val, key = None, None
            tables = get_tables_for_query11(cls)
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            if tables and not disallowed_table(*tables):
                gen_key = self.keyhandler.get_generation(*tables)
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.ordering_aliases,
                                              result_type)
                val = self.cache_backend.get(key, None)

                if val is not None:
                    signals.qc_hit.send(sender=cls,
                                        tables=tables,
                                        query=(sql, params,
                                               cls.ordering_aliases),
                                        size=len(val),
                                        key=key)
                    return val

            # we didn't find the value in the cache, so execute the query
            result = original(cls, result_type)
            if (tables and not sql.startswith('UPDATE')
                    and not sql.startswith('DELETE')):
                # I think we should always be sending a signal here if we
                # miss..
                signals.qc_miss.send(sender=cls,
                                     tables=tables,
                                     query=(sql, params, cls.ordering_aliases),
                                     key=key)
                if hasattr(result, '__iter__'):
                    result = list(result)
                # 'key' will be None here if any of these tables were
                # blacklisted, in which case we just don't care.
                if key is not None:
                    self.cache_backend.set(key, result)
            elif tables and sql.startswith('UPDATE'):
                # issue #1 in bitbucket, not invalidating on update
                for table in tables:
                    self.keyhandler.invalidate_table(table)
            return result

        return newfun
Exemplo n.º 3
0
    def _monkey_select(self, original):
        from django.db.models.sql import compiler
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            if args:
                result_type = args[0]
            else:
                result_type = kwargs.get('result_type', MULTI)

            if any([isinstance(cls, c) for c in self._write_compilers]):
                return original(cls, *args, **kwargs)
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    # this was moved in 1.2 to compiler
                    return compiler.empty_iter()
                else:
                    return

            db = getattr(cls, 'using', 'default')
            key, val = None, None
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            tables = get_tables_for_query(cls.query)
            if tables and not disallowed_table(*tables):
                gen_key = self.keyhandler.get_generation(*tables,
                                                         **{'db': db})
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.get_ordering(),
                                              result_type, db)
                val = self.cache_backend.get(key, None, db)

            if val is not None:
                signals.qc_hit.send(sender=cls, tables=tables,
                        query=(sql, params, cls.query.ordering_aliases),
                        size=len(val), key=key)
                return val

            signals.qc_miss.send(sender=cls, tables=tables,
                    query=(sql, params, cls.query.ordering_aliases),
                    key=key)

            val = original(cls, *args, **kwargs)

            if hasattr(val, '__iter__'):
                #Can't permanently cache lazy iterables without creating
                #a cacheable data structure. Note that this makes them
                #no longer lazy...
                #todo - create a smart iterable wrapper
                val = list(val)
            if key is not None:
                self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
            return val
        return newfun
Exemplo n.º 4
0
    def _monkey_execute_sql(self, original):
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, result_type=MULTI):
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return iter([])
                else:
                    return

            val, key = None, None
            tables = get_tables_for_query11(cls)
            blacklisted = disallowed_table(*tables)
            if blacklisted:
                signals.qc_skip.send(sender=cls, tables=tables,
                    query=(sql, params, cls.ordering_aliases),
                    key=key)

            if tables and not blacklisted and not is_query_random(cls.as_sql()[0]):
                gen_key = self.keyhandler.get_generation(*tables)
                key = self.keyhandler.sql_key(gen_key, sql, params,
                        cls.ordering_aliases, result_type)
                val = self.cache_backend.get(key, None)

                if val is not None:
                    signals.qc_hit.send(sender=cls, tables=tables,
                            query=(sql, params, cls.ordering_aliases),
                            size=len(val), key=key)
                    return val

            # we didn't find the value in the cache, so execute the query
            result = original(cls, result_type)
            if (tables and not sql.startswith('UPDATE') and
                    not sql.startswith('DELETE')):

                if not blacklisted:
                    # don't send a miss out on blacklist hits, since we never
                    # looked in the first place, so it wasn't a "miss"
                    signals.qc_miss.send(sender=cls, tables=tables,
                        query=(sql, params, cls.ordering_aliases),
                        key=key)
                if hasattr(result, '__iter__'):
                    result = list(result)
                # 'key' will be None here if any of these tables were
                # blacklisted, in which case we just don't care.
                if key is not None:
                    self.cache_backend.set(key, result)
            elif tables and sql.startswith('UPDATE'):
                # issue #1 in bitbucket, not invalidating on update
                for table in tables:
                    if not disallowed_table(table):
                        self.keyhandler.invalidate_table(table)
            return result
        return newfun
Exemplo n.º 5
0
    def _monkey_write(self, original):
        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            db = getattr(cls, "using", "default")
            from django.db.models.sql import compiler

            # we have to do this before we check the tables, since the tables
            # are actually being set in the original function
            ret = original(cls, *args, **kwargs)

            if isinstance(cls, compiler.SQLInsertCompiler):
                # Inserts are a special case where cls.tables
                # are not populated.
                tables = [cls.query.model._meta.db_table]
            else:
                # if cls.query.tables != list(cls.query.table_map):
                #    pass
                # tables = list(cls.query.table_map)
                tables = cls.query.tables
            for table in tables:
                if not disallowed_table(table):
                    self.keyhandler.invalidate_table(table, db)
            return ret

        return newfun
Exemplo n.º 6
0
    def _patched(self, original, commit=True):
        @wraps(original, assigned=available_attrs(original))
        def newfun(using=None):
            #1.2 version
            original(using=using)
            self._flush(commit=commit, using=using)

        return newfun
Exemplo n.º 7
0
    def _patched(self, original, commit=True):
        @wraps(original, assigned=available_attrs(original))
        def newfun(using=None):
            #1.2 version
            original(using=using)
            self._flush(commit=commit, using=using)

        return newfun
Exemplo n.º 8
0
    def _patched(self, original, commit=True):
        @wraps(original, assigned=available_attrs(original))
        def newfun(using=None):
            #1.2 version
            original(using=using)
            self._flush(commit=commit, using=using)

        @wraps(original, assigned=available_attrs(original))
        def newfun11():
            #1.1 version
            original()
            self._flush(commit=commit)

        if django.VERSION[:2] == (1, 1):
            return newfun11
        elif django.VERSION[:2] > (1, 1):
            return newfun
        return original
Exemplo n.º 9
0
    def _patched(self, original, commit=True):
        @wraps(original, assigned=available_attrs(original))
        def newfun(using=None):
            #1.2 version
            original(using=using)
            self._flush(commit=commit, using=using)

        @wraps(original, assigned=available_attrs(original))
        def newfun11():
            #1.1 version
            original()
            self._flush(commit=commit)

        if django.VERSION[:2] == (1, 1):
            return newfun11
        elif django.VERSION[:2] > (1, 1):
            return newfun
        return original
Exemplo n.º 10
0
    def _patched(self, original, commit=True, unless_managed=False):
        @wraps(original, assigned=available_attrs(original))
        def newfun(using=None):
            original(using=using)
            # copying behavior of original func
            # if it is an 'unless_managed' version we should do nothing if transaction is managed
            if not unless_managed or not self.is_managed(using=using):
                self._flush(commit=commit, using=using)

        return newfun
Exemplo n.º 11
0
    def _patched(self, original, commit=True, unless_managed=False):
        @wraps(original, assigned=available_attrs(original))
        def newfun(using=None):
            original(using=using)
            # copying behavior of original func
            # if it is an 'unless_managed' version we should do nothing if transaction is managed
            if not unless_managed or not self.is_managed(using=using):
                self._flush(commit=commit, using=using)

        return newfun
Exemplo n.º 12
0
 def _savepoint(self, original):
     @wraps(original, assigned=available_attrs(original))
     def newfun(using=None):
         if using != None:
             sid = original(using=using)
         else:
             sid = original()
         if self._uses_savepoints():
             self._create_savepoint(sid, using)
         return sid
     return newfun
Exemplo n.º 13
0
 def _savepoint(self, original):
     @wraps(original, assigned=available_attrs(original))
     def newfun(using=None):
         if using != None:
             sid = original(using=using)
         else:
             sid = original()
         if self._uses_savepoints():
             self._create_savepoint(sid, using)
         return sid
     return newfun
Exemplo n.º 14
0
    def _monkey_execute_sql(self, original):
        from django.db.models.sql import query
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, result_type=MULTI):
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return query.empty_iter()
                else:
                    return

            val, key = None, None
            tables = get_tables_for_query11(cls)
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            if tables and not disallowed_table(*tables):
                gen_key = self.keyhandler.get_generation(*tables)
                key = self.keyhandler.sql_key(gen_key, sql, params,
                        cls.ordering_aliases, result_type)
                val = self.cache_backend.get(key, None)

                if val is not None:
                    signals.qc_hit.send(sender=cls, tables=tables,
                            query=(sql, params, cls.ordering_aliases),
                            size=len(val), key=key)
                    return val

            # we didn't find the value in the cache, so execute the query
            result = original(cls, result_type)
            if (tables and not sql.startswith('UPDATE') and
                    not sql.startswith('DELETE')):
                # I think we should always be sending a signal here if we
                # miss..
                signals.qc_miss.send(sender=cls, tables=tables,
                        query=(sql, params, cls.ordering_aliases),
                        key=key)
                if hasattr(result, '__iter__'):
                    result = list(result)
                # 'key' will be None here if any of these tables were
                # blacklisted, in which case we just don't care.
                if key is not None:
                    self.cache_backend.set(key, result)
            elif tables and sql.startswith('UPDATE'):
                # issue #1 in bitbucket, not invalidating on update
                for table in tables:
                    self.keyhandler.invalidate_table(table)
            return result
        return newfun
Exemplo n.º 15
0
def timer(func):
    times = []

    @wraps(func, assigned=available_attrs(func))
    def foo(*args, **kwargs):
        t0 = time.time()
        ret = func(*args, **kwargs)
        times.append(time.time() - t0)
        print ("%d runs, %0.6f avg" %
               (len(times), sum(times) / float(len(times))))
        return ret
    return foo
Exemplo n.º 16
0
def timer(func):
    times = []

    @wraps(func, assigned=available_attrs(func))
    def foo(*args, **kwargs):
        t0 = time.time()
        ret = func(*args, **kwargs)
        times.append(time.time() - t0)
        print ("%d runs, %0.6f avg" %
               (len(times), sum(times) / float(len(times))))
        return ret
    return foo
Exemplo n.º 17
0
 def deco(func):
     @wraps(func, assigned=available_attrs(func))
     def wrapped(*args, **kwargs):
         qc_hit.connect(hit)
         qc_miss.connect(miss)
         try:
             ret = func(*args, **kwargs)
         finally:
             qc_hit.disconnect(hit)
             qc_miss.disconnect(miss)
         return ret
     return wrapped
Exemplo n.º 18
0
    def deco(func):
        @wraps(func, assigned=available_attrs(func))
        def wrapped(*args, **kwargs):
            qc_hit.connect(hit)
            qc_miss.connect(miss)
            try:
                ret = func(*args, **kwargs)
            finally:
                qc_hit.disconnect(hit)
                qc_miss.disconnect(miss)
            return ret

        return wrapped
Exemplo n.º 19
0
    def _monkey_write(self, original):
        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            db = getattr(cls, 'using', 'default')
            from django.db.models.sql import compiler
            # we have to do this before we check the tables, since the tables
            # are actually being set in the original function
            ret = original(cls, *args, **kwargs)

            if isinstance(cls, compiler.SQLInsertCompiler):
                #Inserts are a special case where cls.tables
                #are not populated.
                tables = [cls.query.model._meta.db_table]
            else:
                tables = cls.query.tables
            for table in tables:
                self.keyhandler.invalidate_table(table, db)
            return ret

        return newfun
Exemplo n.º 20
0
    def _monkey_select(self, original):
        from django.db.models.sql.constants import MULTI
        from django.db.models.sql.datastructures import EmptyResultSet

        @wraps(original, assigned=available_attrs(original))
        def newfun(cls, *args, **kwargs):
            if args:
                result_type = args[0]
            else:
                result_type = kwargs.get('result_type', MULTI)

            if any([isinstance(cls, c) for c in self._write_compilers]):
                return original(cls, *args, **kwargs)
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    # this was moved in 1.2 to compiler
                    return empty_iter()
                else:
                    return

            db = getattr(cls, 'using', 'default')
            key, val = None, NotInCache()
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            tables = get_tables_for_query(cls.query)
            # if the tables are blacklisted, send a qc_skip signal
            blacklisted = disallowed_table(*tables)
            if blacklisted:
                signals.qc_skip.send(sender=cls,
                                     tables=tables,
                                     query=(sql, params,
                                            cls.query.ordering_aliases),
                                     key=key)
            if tables and not blacklisted and not is_query_random(
                    cls.as_sql()[0]):
                gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.get_ordering(), result_type,
                                              db)
                val = self.cache_backend.get(key, NotInCache(), db)

            if not isinstance(val, NotInCache):
                if val == no_result_sentinel:
                    val = []

                signals.qc_hit.send(sender=cls,
                                    tables=tables,
                                    query=(sql, params,
                                           cls.query.ordering_aliases),
                                    size=len(val),
                                    key=key)
                return val

            if not blacklisted:
                signals.qc_miss.send(sender=cls,
                                     tables=tables,
                                     query=(sql, params,
                                            cls.query.ordering_aliases),
                                     key=key)

            val = original(cls, *args, **kwargs)

            if hasattr(val, '__iter__'):
                #Can't permanently cache lazy iterables without creating
                #a cacheable data structure. Note that this makes them
                #no longer lazy...
                #todo - create a smart iterable wrapper
                val = list(val)
            if key is not None:
                if not val:
                    self.cache_backend.set(key, no_result_sentinel,
                                           settings.MIDDLEWARE_SECONDS, db)
                else:
                    self.cache_backend.set(key, val,
                                           settings.MIDDLEWARE_SECONDS, db)
            return val

        return newfun