Example #1
0
        def newfun(cls, result_type=MULTI):
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return query.empty_iter()
                else:
                    return

            val, key = None, None
            tables = get_tables_for_query11(cls)
            blacklisted = disallowed_table(*tables)
            if blacklisted:
                signals.qc_skip.send(sender=cls, tables=tables,
                    query=(sql, params, cls.ordering_aliases),
                    key=key)

            if tables and not blacklisted:
                gen_key = self.keyhandler.get_generation(*tables)
                key = self.keyhandler.sql_key(gen_key, sql, params,
                        cls.ordering_aliases, result_type)
                val = self.cache_backend.get(key, None)

                if val is not None:
                    signals.qc_hit.send(sender=cls, tables=tables,
                            query=(sql, params, cls.ordering_aliases),
                            size=len(val), key=key)
                    return val

            # we didn't find the value in the cache, so execute the query
            result = original(cls, result_type)
            if (tables and not sql.startswith('UPDATE') and
                    not sql.startswith('DELETE')):

                if not blacklisted:
                    # don't send a miss out on blacklist hits, since we never
                    # looked in the first place, so it wasn't a "miss"
                    signals.qc_miss.send(sender=cls, tables=tables,
                        query=(sql, params, cls.ordering_aliases),
                        key=key)
                if hasattr(result, '__iter__'):
                    result = list(result)
                # 'key' will be None here if any of these tables were
                # blacklisted, in which case we just don't care.
                if key is not None:
                    self.cache_backend.set(key, result)
            elif tables and sql.startswith('UPDATE'):
                # issue #1 in bitbucket, not invalidating on update
                for table in tables:
                    if not disallowed_table(table):
                        self.keyhandler.invalidate_table(table)
            return result
Example #2
0
        def newfun(cls, result_type=MULTI):
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return query.empty_iter()
                else:
                    return

            val, key = None, None
            tables = get_tables_for_query11(cls)
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            if tables and not disallowed_table(*tables):
                gen_key = self.keyhandler.get_generation(*tables)
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.ordering_aliases,
                                              result_type)
                val = self.cache_backend.get(key, None)

                if val is not None:
                    signals.qc_hit.send(sender=cls,
                                        tables=tables,
                                        query=(sql, params,
                                               cls.ordering_aliases),
                                        size=len(val),
                                        key=key)
                    return val

            # we didn't find the value in the cache, so execute the query
            result = original(cls, result_type)
            if (tables and not sql.startswith('UPDATE')
                    and not sql.startswith('DELETE')):
                # I think we should always be sending a signal here if we
                # miss..
                signals.qc_miss.send(sender=cls,
                                     tables=tables,
                                     query=(sql, params, cls.ordering_aliases),
                                     key=key)
                if hasattr(result, '__iter__'):
                    result = list(result)
                # 'key' will be None here if any of these tables were
                # blacklisted, in which case we just don't care.
                if key is not None:
                    self.cache_backend.set(key, result)
            elif tables and sql.startswith('UPDATE'):
                # issue #1 in bitbucket, not invalidating on update
                for table in tables:
                    self.keyhandler.invalidate_table(table)
            return result
Example #3
0
        def newfun(cls, result_type=MULTI):
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return query.empty_iter()
                else:
                    return

            val, key = None, None
            tables = get_tables_for_query11(cls)
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            if tables and not disallowed_table(*tables):
                gen_key = self.keyhandler.get_generation(*tables)
                key = self.keyhandler.sql_key(gen_key, sql, params,
                        cls.ordering_aliases, result_type)
                val = self.cache_backend.get(key, None)

                if val is not None:
                    signals.qc_hit.send(sender=cls, tables=tables,
                            query=(sql, params, cls.ordering_aliases),
                            size=len(val), key=key)
                    return val

            # we didn't find the value in the cache, so execute the query
            result = original(cls, result_type)
            if (tables and not sql.startswith('UPDATE') and
                    not sql.startswith('DELETE')):
                # I think we should always be sending a signal here if we
                # miss..
                signals.qc_miss.send(sender=cls, tables=tables,
                        query=(sql, params, cls.ordering_aliases),
                        key=key)
                if hasattr(result, '__iter__'):
                    result = list(result)
                # 'key' will be None here if any of these tables were
                # blacklisted, in which case we just don't care.
                if key is not None:
                    self.cache_backend.set(key, result)
            elif tables and sql.startswith('UPDATE'):
                # issue #1 in bitbucket, not invalidating on update
                for table in tables:
                    self.keyhandler.invalidate_table(table)
            return result
Example #4
0
        def execute_sql(self, result_type='multi'):
            """
            Run the query against the database and returns the result(s). The
            return value is a single data item if result_type is SINGLE, or an
            iterator over the results if the result_type is MULTI.

            result_type is either MULTI (use fetchmany() to retrieve all rows),
            SINGLE (only retrieve a single row), or None (no results expected, but
            the cursor is returned, since it's used by subclasses such as
            InsertQuery).
            """
            from django.db.models.sql.constants import MULTI, SINGLE, GET_ITERATOR_CHUNK_SIZE

            if self.high_mark and self.high_mark <= self.low_mark:
                # Short circuit if we're slicing nothing
                return []

            # Pull in these imports from main Django DB
            # code base, but we can't import at the top level
            # or else we get circular imports 
            from django.db.models.sql.datastructures import EmptyResultSet
            from django.db.models.sql.query import empty_iter
            try:
                sql, params = self.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    return empty_iter()
                else:
                    return

            cursor = self.connection.cursor()
            if isinstance(sql, PyStatement):
                # We sometimes need to override with a PyStatement because
                # it's the only way to implement paginated results
                pycur = cursor
                while not isinstance(pycur, PyCursor):
                    pycur = pycur.cursor
                sql.execute(pycur, None, None)
            else:
                if DEBUG:
                    print sql, params
                cursor.execute(sql, params)

            if not result_type:
                return cursor
            if result_type == SINGLE:
                if self.ordering_aliases:
                    return cursor.fetchone()[:-len(self.ordering_aliases)]
                return cursor.fetchone()

            # The MULTI case.
            if self.ordering_aliases:
                from django.db.models.sql.query import order_modified_iter
                result = order_modified_iter(cursor, len(self.ordering_aliases),
                        self.connection.features.empty_fetchmany_value)
            else:
                result = iter((lambda: cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)),
                        self.connection.features.empty_fetchmany_value)

            # Need to serialize all the results because we don't want to maintain
            # state between requests
            result = list(result)

            # Force the PyStatement to close if we ever created one
            if isinstance(sql, PyStatement):
                sql.close()
                # Drop the temp table
                cur = self.connection.cursor()
                cur.execute("drop table #temp_table")
                cur.close()

            return result