Ejemplo n.º 1
0
def execute_sql(self, *args, **kwargs):
    client = get_client()
    if client is None:
        return self.__execute_sql(*args, **kwargs)
    try:
        q, params = self.as_sql()
        if not q:
            raise EmptyResultSet
    except EmptyResultSet:
        if kwargs.get('result_type', MULTI) == MULTI:
            return empty_iter()
        else:
            return
    start = datetime.now()
    try:
        return self.__execute_sql(*args, **kwargs)
    finally:
        d = (datetime.now() - start)
        client.insert({
            'query': q,
            'view': _get_current_view(),
            'type': 'sql'
        }, {
            'time': 0.0 + d.seconds * 1000 + d.microseconds / 1000,
            'count': 1
        })
Ejemplo n.º 2
0
        def newfun(cls, *args, **kwargs):
            if args:
                result_type = args[0]
            else:
                result_type = kwargs.get('result_type', MULTI)

            if any([isinstance(cls, c) for c in self._write_compilers]):
                return original(cls, *args, **kwargs)
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    # this was moved in 1.2 to compiler
                    return compiler.empty_iter()
                else:
                    return

            db = getattr(cls, 'using', 'default')
            key, val = None, None
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            tables = get_tables_for_query(cls.query)
            # if the tables are blacklisted, send a qc_skip signal
            blacklisted = disallowed_table(*tables)
            if blacklisted:
                signals.qc_skip.send(sender=cls, tables=tables,
                    query=(sql, params, cls.query.ordering_aliases),
                    key=key)
            if tables and not blacklisted:
                gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.get_ordering(),
                                              result_type, db)
                val = self.cache_backend.get(key, None, db)

            if val is not None:
                signals.qc_hit.send(sender=cls, tables=tables,
                        query=(sql, params, cls.query.ordering_aliases),
                        size=len(val), key=key)
                return val

            if not blacklisted:
                signals.qc_miss.send(sender=cls, tables=tables,
                    query=(sql, params, cls.query.ordering_aliases),
                    key=key)

            val = original(cls, *args, **kwargs)

            if hasattr(val, '__iter__'):
                #Can't permanently cache lazy iterables without creating
                #a cacheable data structure. Note that this makes them
                #no longer lazy...
                #todo - create a smart iterable wrapper
                val = list(val)
            if key is not None:
                self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS, db)
            return val
Ejemplo n.º 3
0
 def execute_sql(self, result_type=MULTI):
     try:
         return super(SQLCompiler, self).execute_sql(result_type)
     except InvalidGaeKey:
         if result_type == MULTI:
            return empty_iter()
         else:
             return
Ejemplo n.º 4
0
        def newfun(cls, *args, **kwargs):
            if args:
                result_type = args[0]
            else:
                result_type = kwargs.get('result_type', MULTI)

            if any([isinstance(cls, c) for c in self._write_compilers]):
                return original(cls, *args, **kwargs)
            try:
                sql, params = cls.as_sql()
                if not sql:
                    raise EmptyResultSet
            except EmptyResultSet:
                if result_type == MULTI:
                    # this was moved in 1.2 to compiler
                    return compiler.empty_iter()
                else:
                    return

            db = getattr(cls, 'using', 'default')
            key, val = None, None
            # check the blacklist for any of the involved tables;  if it's not
            # there, then look for the value in the cache.
            tables = get_tables_for_query(cls.query)
            if tables and not disallowed_table(*tables):
                gen_key = self.keyhandler.get_generation(*tables, **{'db': db})
                key = self.keyhandler.sql_key(gen_key, sql, params,
                                              cls.get_ordering(), result_type,
                                              db)
                val = self.cache_backend.get(key, None, db)

            if val is not None:
                signals.qc_hit.send(sender=cls,
                                    tables=tables,
                                    query=(sql, params,
                                           cls.query.ordering_aliases),
                                    size=len(val),
                                    key=key)
                return val

            signals.qc_miss.send(sender=cls,
                                 tables=tables,
                                 query=(sql, params,
                                        cls.query.ordering_aliases),
                                 key=key)

            val = original(cls, *args, **kwargs)

            if hasattr(val, '__iter__'):
                #Can't permanently cache lazy iterables without creating
                #a cacheable data structure. Note that this makes them
                #no longer lazy...
                #todo - create a smart iterable wrapper
                val = list(val)
            if key is not None:
                self.cache_backend.set(key, val, settings.MIDDLEWARE_SECONDS,
                                       db)
            return val
Ejemplo n.º 5
0
def execute_sql(self, *args, **kwargs):
    client = get_client()
    if client is None:
        return self.__execute_sql(*args, **kwargs)
    try:
        q, params = self.as_sql()
        if not q:
            raise EmptyResultSet
    except EmptyResultSet:
        if kwargs.get('result_type', MULTI) == MULTI:
            return empty_iter()
        else:
            return
    start = datetime.now()
    try:
        return self.__execute_sql(*args, **kwargs)
    finally:
        d = (datetime.now() - start)
        client.insert({'query' : q, 'view' : _get_current_view(), 'type' : 'sql'}, 
                      {'time' : 0.0 + d.seconds * 1000 + d.microseconds/1000, 'count' : 1})
Ejemplo n.º 6
0
def empty_iter():
    #making this a function as the empty_iter has changed between 1.4 and 1.5
    if django.VERSION[:2] >= (1, 5):
        return iter([])
    else:
        return compiler.empty_iter()
Ejemplo n.º 7
0
def empty_iter():
    #making this a function as the empty_iter has changed between 1.4 and 1.5
    if django.VERSION[:2] >= (1, 5):
        return iter([])
    else:
        return compiler.empty_iter()
Ejemplo n.º 8
0
def empty_iter():
    if django.VERSION[:2] >= (1, 5):
        return iter([])
    return compiler.empty_iter()
Ejemplo n.º 9
0
def empty_iter():
    if django.VERSION[:2] >= (1, 5):
        return iter([])
    return compiler.empty_iter()