def clone(self, klass=None, memo=None): # pylint: disable=arguments-differ if DJANGO_20_PLUS: query = Query.clone(self) else: query = Query.clone(self, klass, memo) # pylint: disable=too-many-function-args query.is_query_all = self.is_query_all return query
def test_repr(self): query = Query(Item) compiler = query.get_compiler(DEFAULT_DB_ALIAS, connection) self.assertEqual( repr(compiler), f"<SQLCompiler model=Item connection=" f"<DatabaseWrapper vendor={connection.vendor!r} alias='default'> " f"using='default'>")
def q_mentioned_fields(q, model): """Returns list of field names mentioned in Q object. Q(a__isnull=True, b=F('c')) -> ['a', 'b', 'c'] """ query = Query(model) where = query._add_q(q, used_aliases=set(), allow_joins=False)[0] return list(sorted(set(expression_mentioned_fields(where))))
def _get_condition_sql(self, model, schema_editor): if self.condition is None: return None query = Query(model=model, alias_cols=False) where = query.build_where(self.condition) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params)
def clone(self, klass=None, memo=None) -> 'SalesforceQuery[_T]': # pylint: disable=arguments-differ if DJANGO_20_PLUS: query = cast(SalesforceQuery, Query.clone(self)) else: # pylint: disable=too-many-function-args query = cast(SalesforceQuery, Query.clone(self, klass, memo)) # type: ignore[call-arg] # noqa query.sf_params = self.sf_params return query
def check(self, against, using=DEFAULT_DB_ALIAS): """ Do a database query to check if the expressions of the Q instance matches against the expressions. """ # Avoid circular imports. from django.db.models import Value from django.db.models.sql import Query from django.db.models.sql.constants import SINGLE query = Query(None) for name, value in against.items(): if not hasattr(value, "resolve_expression"): value = Value(value) query.add_annotation(value, name, select=False) query.add_annotation(Value(1), "_check") # This will raise a FieldError if a field is missing in "against". query.add_q(self) compiler = query.get_compiler(using=using) try: return compiler.execute_sql(SINGLE) is not None except DatabaseError as e: logger.warning("Got a database error calling check() on %r: %s", self, e) return True
def constraint_sql(self, model, schema_editor): query = Query(model) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expression_sql(compiler, schema_editor.connection, query) condition = self._get_condition_sql(compiler, schema_editor, query) return self.template % { 'name': schema_editor.quote_name(self.name), 'index_type': self.index_type, 'expressions': ', '.join(expressions), 'where': ' WHERE (%s)' % condition if condition else '', }
def _get_condition_sql(self, model, schema_editor): if self.condition is None: return '' query = Query(model=model) query.add_q(self.condition) compiler = query.get_compiler(connection=schema_editor.connection) # Only the WhereNode is of interest for the partial index. sql, params = query.where.as_sql(compiler=compiler, connection=schema_editor.connection) # BaseDatabaseSchemaEditor does the same map on the params, but since # it's handled outside of that class, the work is done here. return ' WHERE ' + (sql % tuple(map(schema_editor.quote_value, params)))
def constraint_sql(self, model, schema_editor): query = Query(model) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expression_sql(compiler, schema_editor.connection, query) condition = self._get_condition_sql(compiler, schema_editor, query) return self.template % { "name": schema_editor.quote_name(self.name), "index_type": self.index_type, "expressions": ", ".join(expressions), "where": " WHERE (%s)" % condition if condition else "", }
def q_to_sql(q, model, schema_editor): # Q -> SQL conversion based on code from Ian Foote's Check Constraints pull request: # https://github.com/django/django/pull/7615/ query = Query(model) where = query._add_q(q, used_aliases=set(), allow_joins=False)[0] connection = schema_editor.connection compiler = connection.ops.compiler('SQLCompiler')(query, connection, 'default') sql, params = where.as_sql(compiler, connection) params = tuple(map(schema_editor.quote_value, params)) where_sql = sql % params return where_sql
def constraint_sql(self, model, schema_editor): query = Query(model, alias_cols=False) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expression_sql(compiler, schema_editor.connection, query) condition = self._get_condition_sql(compiler, schema_editor, query) include = [model._meta.get_field(field_name).column for field_name in self.include] return self.template % { 'name': schema_editor.quote_name(self.name), 'index_type': self.index_type, 'expressions': ', '.join(expressions), 'include': schema_editor._index_include_sql(model, include), 'where': ' WHERE (%s)' % condition if condition else '', 'deferrable': schema_editor._deferrable_constraint_sql(self.deferrable), }
def create_sql(self, model, schema_editor, using='', **kwargs): include = [model._meta.get_field(field_name).column for field_name in self.include] condition = self._get_condition_sql(model, schema_editor) if self.expressions: index_expressions = [] for expression in self.expressions: index_expression = IndexExpression(expression) index_expression.set_wrapper_classes(schema_editor.connection) index_expressions.append(index_expression) expressions = ExpressionList(*index_expressions).resolve_expression( Query(model, alias_cols=False), ) fields = None col_suffixes = None else: fields = [ model._meta.get_field(field_name) for field_name, _ in self.fields_orders ] col_suffixes = [order[1] for order in self.fields_orders] expressions = None return schema_editor._create_index_sql( model, fields=fields, name=self.name, using=using, db_tablespace=self.db_tablespace, col_suffixes=col_suffixes, opclasses=self.opclasses, condition=condition, include=include, expressions=expressions, **kwargs, )
def format_field_value(self, field, val, connection, cast_type=False, **kwargs): if not isinstance(field, HllField): return super(HllConcatFunction, self).format_field_value(field, val, connection, cast_type=cast_type, **kwargs) if not isinstance(val, (HllValue, HllCombinedExpression)): raise ValueError('val should be HllValue instance') compiler = Query(field.model).get_compiler(connection=connection) sql, params = val.as_sql(compiler, connection) if cast_type: sql = 'CAST(%s AS %s)' % (sql, get_field_db_type( field, connection)) return sql, tuple(params)
def constraint_sql(self, model, schema_editor): query = Query(model, alias_cols=False) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expressions(schema_editor, query) table = model._meta.db_table condition = self._get_condition_sql(compiler, schema_editor, query) include = [ model._meta.get_field(field_name).column for field_name in self.include ] return Statement( self.template, table=Table(table, schema_editor.quote_name), name=schema_editor.quote_name(self.name), index_type=self.index_type, expressions=Expressions( table, expressions, compiler, schema_editor.quote_value ), where=" WHERE (%s)" % condition if condition else "", include=schema_editor._index_include_sql(model, include), deferrable=schema_editor._deferrable_constraint_sql(self.deferrable), )
def paginate(db_model: Type['models.Model'], serializer: Type['serializers.Serializer'], request: Request, query: Query = None, query_params: dict = None): if not query: query = db_model.objects if query_params is None: query_params = {k: v[0] for k, v in dict(request.query_params).items()} for key, val in query_params.items(): if key.endswith('__in') or key == 'order_by': query_params[key] = [s.strip() for s in val.split(',')] page_number = int(query_params.pop('page_number', 1)) page_size = int(query_params.pop('page_size', 10)) order_by = query_params.pop('order_by', []) query = query.filter(**query_params) query = query.order_by(*order_by) count = query.count() result_models = query[(page_number - 1) * page_size:page_number * page_size] items = [ serializer(obj, context={ 'request': request }).data for obj in result_models ] return Response( data={ 'total_size': count, 'page_number': page_number, 'page_size': len(items), 'items': items })
def constraint_sql(self, model, schema_editor): query = Query(model, alias_cols=False) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expression_sql(compiler, schema_editor, query) condition = self._get_condition_sql(compiler, schema_editor, query) include = [ model._meta.get_field(field_name).column for field_name in self.include ] return self.template % { "name": schema_editor.quote_name(self.name), "index_type": self.index_type, "expressions": ", ".join(expressions), "include": schema_editor._index_include_sql(model, include), "where": " WHERE (%s)" % condition if condition else "", "deferrable": schema_editor._deferrable_constraint_sql(self.deferrable), }
def replace_media_urls(old='stories', new='news/old', app_label='news', model='newsitem', field='full_text', new_path='/media/'): from django.db.models import get_model from django.db.models.query import QuerySet from django.db.models.sql import Query BASE_NEW_PATH = new_path BASE_OLD_RE = r'http:\/\/w{3}\.otdohniomsk\.ru\/images\/' BASE_OLD_PATH = 'http://www.otdohniomsk.ru/images/' old_path = BASE_OLD_PATH + old new_path = BASE_NEW_PATH + new p = re.compile(BASE_OLD_RE + old) total_count = 0 m = get_model(app_label, model) q = Query(m) q.add_filter(('%s__contains' % field, old_path)) qs = QuerySet(m, q) for item in qs: item.__dict__[field], count = p.subn(new_path, item.__dict__[field]) total_count += count item.save() return total_count
def get_model_siblings(Model, instance, queryset): def map_ordering(x): asc = x[0:1] != '-' name = x if asc else x[1:] operator = 'ASC' if asc else 'DESC' return '{}{} {}'.format(alias, name, operator) pk = Model._meta.pk.name ordering = queryset.query.order_by alias = '' if len(ordering) == 0 and len(Model._meta.ordering): ordering = Model._meta.ordering if not any(map(lambda x: x == pk or x == '-{}'.format(pk), ordering)): ordering = list(ordering) + ['-{}'.format(pk)] compiler = Query(Model).get_compiler(connection=connection) join_queries = [] join_args = [] for key, value in queryset.query.alias_map.items(): if isinstance(value, BaseTable): alias = '{}.'.format(key) else: query, args = value.as_sql(compiler, connection) join_queries.append(query) join_args.extend(args) join_sql = ' '.join(join_queries) where_query, where_args = queryset.query.where.as_sql(compiler, connection) where_sql = 'WHERE {}'.format(where_query) if where_query != '' else '' order_by = list(map(map_ordering, ordering)) order_by_sql = 'ORDER BY {}'.format(', '.join(order_by)) if len(order_by) else '' row_number = get_row_number(Model, instance, join_sql, join_args, where_sql, where_args, order_by_sql) if not row_number: return {} return get_row_siblings(Model, row_number, join_sql, join_args, where_sql, where_args, order_by_sql)
def _get_check_sql(self, model, schema_editor): query = Query(model=model) # Add annotations for k, v in self.annotations.items(): query.add_annotation(v, k) where = query.build_where(self.check) compiler = query.get_compiler(connection=schema_editor.connection) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params)
def test_rename_table_references_without_alias(self): compiler = Query(Person, alias_cols=False).get_compiler(connection=connection) table = Person._meta.db_table expressions = Expressions( table=table, expressions=ExpressionList( IndexExpression(Upper('last_name')), IndexExpression(F('first_name')), ).resolve_expression(compiler.query), compiler=compiler, quote_value=self.editor.quote_value, ) expressions.rename_table_references(table, 'other') self.assertIs(expressions.references_table(table), False) self.assertIs(expressions.references_table('other'), True) expected_str = '(UPPER(%s)), %s' % ( self.editor.quote_name('last_name'), self.editor.quote_name('first_name'), ) self.assertEqual(str(expressions), expected_str)
def get_queryset(self): return QuerySet(self.model, Query(self.model), self._db, self._hints)
def _stringify_query(): """ Serializes query object, so that it can be used to create cache key. We can't just do pickle because order of keys in dicts is arbitrary, we can use str(query) which compiles it to SQL, but it's too slow, so we use json.dumps with sort_keys=True and object hooks. NOTE: I like this function no more than you, it's messy and pretty hard linked to django internals. I just don't have nicer solution for now. Probably the best way out of it is optimizing SQL generation, which would be valuable by itself. The problem with it is that any significant optimization will most likely require a major refactor of sql.Query class, which is a substantial part of ORM. """ import simplejson as json from datetime import datetime, date from django.db.models.fields import Field from django.db.models.sql.where import Constraint, WhereNode, ExtraWhere from django.db.models.sql import Query from django.db.models.sql.aggregates import Aggregate from django.db.models.sql.datastructures import RawValue, Date from django.db.models.sql.expressions import SQLEvaluator attrs = {} attrs[WhereNode] = ('connector', 'negated', 'children', 'subtree_parents') attrs[ExtraWhere] = ('sqls', 'params') attrs[Aggregate] = ('source', 'is_summary', 'col', 'extra') attrs[RawValue] = ('value') attrs[Date] = ('col', 'lookup_type') q = Query(None) q_keys = q.__dict__.keys() q_ignored = [ 'join_map', 'dupe_avoidance', '_extra_select_cache', '_aggregate_select_cache' ] attrs[Query] = tuple(sorted(set(q_keys) - set(q_ignored))) for k, v in attrs.items(): attrs[k] = map(intern, v) def encode_object(obj): if isinstance(obj, set): return sorted(obj) elif isinstance(obj, type): return '%s.%s' % (obj.__module__, obj.__name__) elif hasattr(obj, '__uniq_key__'): return (obj.__class__, obj.__uniq_key__()) elif isinstance(obj, (datetime, date)): return str(obj) elif isinstance(obj, Constraint): return (obj.alias, obj.col) elif isinstance(obj, Field): return (obj.model, obj.name) elif isinstance(obj, QuerySet): return (obj.__class__, obj.query) elif obj.__class__ in attrs: return (obj.__class__, [getattr(obj, attr) for attr in attrs[obj.__class__]]) elif isinstance(obj, Aggregate): return (obj.__class__, [getattr(obj, attr) for attr in attrs[Aggregate]]) elif isinstance(obj, Query): # for custom subclasses of Query return (obj.__class__, [getattr(obj, attr) for attr in attrs[Query]]) elif isinstance(obj, SQLEvaluator): return (obj.__class__, obj.expression.__dict__.items()) else: raise TypeError("Can't encode %s" % repr(obj)) def stringify_query(query): # HACK: Catch TypeError and reraise it as ValueError # since django hides it and behave weird when gets a TypeError in Queryset.iterator() try: return json.dumps(query, default=encode_object, skipkeys=True, sort_keys=True, separators=(',', ':')) except TypeError as e: raise ValueError(*e.args) return stringify_query
def get_related_where(self, fk_name, using, type): related_where = [ "%s=%s.%s" % (self.model._meta.pk.get_attname_column()[1], type, fk_name) ] related_query = Query(self.manager.related.model) for name, value in self.filter.iteritems(): related_query.add_filter((name, value)) related_query.add_extra(None, None, [ "%s=%s.%s" % (self.model._meta.pk.get_attname_column()[1], type, self.manager.related.field.m2m_column_name()) ], None, None, None) related_query.add_count_column() related_query.clear_ordering(force_empty=True) related_query.default_cols = False related_filter_where, related_where_params = related_query.get_compiler( using=using, connection=connection).as_sql() if related_filter_where is not None: related_where.append('(' + related_filter_where + ') > 0') return related_where, related_where_params
def constraint_sql(self, model, schema_editor): query = Query(model)
def get_related_where(self, fk_name, using, type): related_where = ["%s=%s.%s" % (self.model._meta.pk.get_attname_column()[1], type, fk_name)] related_query = Query(self.manager.related.model) for name, value in self.filter.iteritems(): related_query.add_filter((name, value)) related_query.add_extra(None, None, ["%s=%s.%s" % (self.model._meta.pk.get_attname_column()[1], type, self.manager.related.field.m2m_column_name())], None, None, None) related_query.add_count_column() related_query.clear_ordering(force_empty=True) related_query.default_cols = False related_filter_where, related_where_params = related_query.get_compiler(using=using, connection=connection).as_sql() if related_filter_where is not None: related_where.append('(' + related_filter_where + ') > 0') return related_where, related_where_params
def _make_where(self, *args, **kwargs): q = Query(TestModel) for arg in args: q.add_q(arg) q.add_q(Q(**kwargs)) return q.where
def clone(self, klass=None, memo=None, **kwargs): query = Query.clone(self, klass, memo, **kwargs) query.is_query_all = self.is_query_all return query
def setUp(self): self.compiler = Query(TestModel).get_compiler(connection=connection)
def clone(self, klass=None, memo=None, **kwargs): return Query.clone(self, klass, memo, **kwargs)
def _stringify_query(): """ Serializes query object, so that it can be used to create cache key. We can't just do pickle because order of keys in dicts is arbitrary, we can use str(query) which compiles it to SQL, but it's too slow, so we use json.dumps with sort_keys=True and object hooks. NOTE: I like this function no more than you, it's messy and pretty hard linked to django internals. I just don't have nicer solution for now. Probably the best way out of it is optimizing SQL generation, which would be valuable by itself. The problem with it is that any significant optimization will most likely require a major refactor of sql.Query class, which is a substantial part of ORM. """ from datetime import datetime, date, time, timedelta from decimal import Decimal from django.db.models.expressions import ExpressionNode, F from django.db.models.fields import Field from django.db.models.fields.related import ManyToOneRel, OneToOneRel from django.db.models.sql.where import Constraint, WhereNode, ExtraWhere, \ EverythingNode, NothingNode from django.db.models.sql import Query from django.db.models.sql.aggregates import Aggregate from django.db.models.sql.datastructures import Date from django.db.models.sql.expressions import SQLEvaluator attrs = {} # Try to not require geo libs try: from django.contrib.gis.db.models.sql.where import GeoWhereNode except: # either ImportError or GEOSException GeoWhereNode = WhereNode # A new things in Django 1.6 try: from django.db.models.sql.where import EmptyWhere, SubqueryConstraint attrs[EmptyWhere] = () attrs[SubqueryConstraint] = ('alias', 'columns', 'targets', 'query_object') except ImportError: pass # RawValue removed in Django 1.7 try: from django.db.models.sql.datastructures import RawValue attrs[RawValue] = ('value',) except ImportError: pass # Moved in Django 1.7 try: from django.contrib.contenttypes.fields import GenericRel except ImportError: from django.contrib.contenttypes.generic import GenericRel # New things in Django 1.7 try: from django.db.models.lookups import Lookup from django.db.models.sql.datastructures import Col attrs[Lookup] = ('lhs', 'rhs') attrs[Col] = ('alias', 'target', 'source') except ImportError: class Lookup(object): pass attrs[WhereNode] = attrs[GeoWhereNode] = attrs[ExpressionNode] \ = ('connector', 'negated', 'children') attrs[SQLEvaluator] = ('expression',) attrs[ExtraWhere] = ('sqls', 'params') attrs[Aggregate] = ('source', 'is_summary', 'col', 'extra') attrs[Date] = ('col', 'lookup_type') attrs[F] = ('name',) attrs[ManyToOneRel] = attrs[OneToOneRel] = attrs[GenericRel] = ('field',) attrs[EverythingNode] = attrs[NothingNode] = () q = Query(None) q_keys = q.__dict__.keys() q_ignored = ['join_map', 'dupe_avoidance', '_extra_select_cache', '_aggregate_select_cache', 'used_aliases'] attrs[Query] = tuple(sorted(set(q_keys) - set(q_ignored))) try: for k, v in attrs.items(): attrs[k] = map(intern, v) except NameError: # No intern() in Python 3 pass def encode_attrs(obj, cls=None): return (obj.__class__, [getattr(obj, attr) for attr in attrs[cls or obj.__class__]]) def encode_object(obj): if isinstance(obj, set): return sorted(obj) elif isinstance(obj, type): return '%s.%s' % (obj.__module__, obj.__name__) elif hasattr(obj, '__uniq_key__'): return (obj.__class__, obj.__uniq_key__()) elif isinstance(obj, (datetime, date, time, timedelta, Decimal)): return str(obj) elif isinstance(obj, Constraint): return (obj.alias, obj.col) elif isinstance(obj, Field): return (obj.model, obj.name) elif obj.__class__ in attrs: return encode_attrs(obj) elif isinstance(obj, QuerySet): return (obj.__class__, obj.query) elif isinstance(obj, Aggregate): return encode_attrs(obj, Aggregate) elif isinstance(obj, Query): return encode_attrs(obj, Query) # for custom subclasses of Query elif isinstance(obj, Lookup): return encode_attrs(obj, Lookup) # Fall back for unknown objects elif not STRICT_STRINGIFY and hasattr(obj, '__dict__'): return (obj.__class__, obj.__dict__) else: raise TypeError("Can't stringify %s" % repr(obj)) def stringify_query(query): # HACK: Catch TypeError and reraise it as ValueError # since django hides it and behave weird when gets a TypeError in Queryset.iterator() try: return json.dumps(query, default=encode_object, skipkeys=True, sort_keys=True, separators=(',', ':')) except TypeError as e: raise ValueError(*e.args) return stringify_query