def add_special_index(model_class, field_name, indexer, operator, value=None): from djangae.utils import in_testing from django.conf import settings index_type = indexer.prepare_index_type(operator, value) field_name = field_name.encode("utf-8") # Make sure we are working with strings load_special_indexes() if special_index_exists(model_class, field_name, index_type): return if environment.is_production_environment() or ( in_testing() and not getattr(settings, "GENERATE_SPECIAL_INDEXES_DURING_TESTING", False) ): raise RuntimeError( "There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]".format( _get_table_from_model(model_class), field_name, index_type ) ) _project_special_indexes.setdefault( _get_table_from_model(model_class), {} ).setdefault(field_name, []).append(str(index_type)) write_special_indexes()
def add_special_index(model_class, field_name, indexer, operator, value=None): from djangae.utils import in_testing from django.conf import settings index_type = indexer.prepare_index_type(operator, value) field_name = field_name.encode( "utf-8") # Make sure we are working with strings load_special_indexes() if special_index_exists(model_class, field_name, index_type): return if environment.is_production_environment() or ( in_testing() and not getattr( settings, "GENERATE_SPECIAL_INDEXES_DURING_TESTING", False)): raise RuntimeError( "There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]" .format(_get_table_from_model(model_class), field_name, index_type)) _project_special_indexes.setdefault(_get_table_from_model(model_class), {}).setdefault(field_name, []).append( str(index_type)) write_special_indexes()
def add_special_index(model_class, field_name, index_type): from djangae.utils import on_production, in_testing from django.conf import settings load_special_indexes() if special_index_exists(model_class, field_name, index_type): return if on_production() or (in_testing() and not getattr(settings, "GENERATE_SPECIAL_INDEXES_DURING_TESTING", False)): raise RuntimeError( "There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]".format( _get_table_from_model(model_class), field_name, index_type ) ) _special_indexes.setdefault( _get_table_from_model(model_class), {} ).setdefault(field_name, []).append(str(index_type)) write_special_indexes()
def add_special_index(model_class, field_name, index_type): from djangae.utils import on_production, in_testing from django.conf import settings load_special_indexes() if special_index_exists(model_class, field_name, index_type): return if on_production() or (in_testing() and not getattr( settings, "GENERATE_SPECIAL_INDEXES_DURING_TESTING", False)): raise RuntimeError( "There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]" .format(_get_table_from_model(model_class), field_name, index_type)) _special_indexes.setdefault(_get_table_from_model(model_class), {}).setdefault(field_name, []).append(str(index_type)) write_special_indexes()
def __init__(self, connection, query, keys_only=False): self.original_query = query self.connection = connection self.limits = (query.low_mark, query.high_mark) opts = query.get_meta() self.distinct = query.distinct self.distinct_values = set() self.distinct_on_field = None self.distinct_field_convertor = None self.queried_fields = [] self.model = query.model self.pk_col = opts.pk.column self.is_count = query.aggregates self.extra_select = query.extra_select self._set_db_table() self._validate_query_is_possible(query) if not query.default_ordering: self.ordering = query.order_by else: self.ordering = query.order_by or opts.ordering if self.ordering: ordering = [ x for x in self.ordering if not (isinstance(x, basestring) and "__" in x) ] if len(ordering) < len(self.ordering): if not on_production() and not in_testing(): diff = set(self.ordering) - set(ordering) log_once(DJANGAE_LOG.warning, "The following orderings were ignored as cross-table orderings are not supported on the datastore: %s", diff) self.ordering = ordering #If the query uses defer()/only() then we need to process deferred. We have to get all deferred columns # for all (concrete) inherited models and then only include columns if they appear in that list deferred_columns = {} query.deferred_to_data(deferred_columns, query.deferred_to_columns_cb) inherited_db_tables = [ x._meta.db_table for x in get_concrete_parents(self.model) ] only_load = list(chain(*[ list(deferred_columns.get(x, [])) for x in inherited_db_tables ])) if query.select: for x in query.select: if hasattr(x, "field"): #In Django 1.6+ 'x' above is a SelectInfo (which is a tuple subclass), whereas in 1.5 it's a tuple # in 1.6 x[1] == Field, but 1.5 x[1] == unicode (column name) if x.field is None: column = x.col.col[1] #This is the column we are getting lookup_type = x.col.lookup_type self.distinct_on_field = column #This whole section of code is weird, and is probably better implemented as a custom Query type (like QueryByKeys) # basically, appengine gives back dates as a time since the epoch, we convert it to a date, then floor it, then convert it back # in our transform function. The transform is applied when the results are read back so that only distinct values are returned. # this is very hacky... if lookup_type in DATE_TRANSFORMS: self.distinct_field_convertor = lambda value: DATE_TRANSFORMS[lookup_type](self.connection, value) else: raise CouldBeSupportedError("Unhandled lookup_type %s" % lookup_type) else: column = x.field.column else: column = x[1] if only_load and column not in only_load: continue self.queried_fields.append(column) else: self.queried_fields = [ x.column for x in opts.fields if (not only_load) or (x.column in only_load) ] self.keys_only = keys_only or self.queried_fields == [ opts.pk.column ] assert self.queried_fields #Projection queries don't return results unless all projected fields are #indexed on the model. This means if you add a field, and all fields on the model #are projectable, you will never get any results until you've resaved all of them. #Because it's not possible to detect this situation, we only try a projection query if a #subset of fields was specified (e.g. values_list('bananas')) which makes the behaviour a #bit more predictable. It would be nice at some point to add some kind of force_projection() #thing on a queryset that would do this whenever possible, but that's for the future, maybe. try_projection = (self.keys_only is False) and bool(self.queried_fields) if not self.queried_fields: self.queried_fields = [ x.column for x in opts.fields ] self.excluded_pks = set() self.has_inequality_filter = False self.all_filters = [] self.results = None self.gae_query = None projection_fields = [] if try_projection: for field in self.queried_fields: ##We don't include the primary key in projection queries... if field == self.pk_col: order_fields = set([ x.strip("-") for x in self.ordering]) if self.pk_col in order_fields or "pk" in order_fields: #If we were ordering on __key__ we can't do a projection at all self.projection_fields = [] break continue #Text and byte fields aren't indexed, so we can't do a #projection query f = get_field_from_column(self.model, field) if not f: raise CouldBeSupportedError("Attemping a cross-table select or dates query, or something?!") assert f #If this happens, we have a cross-table select going on! #FIXME db_type = f.db_type(connection) if db_type in ("bytes", "text"): projection_fields = [] break projection_fields.append(field) self.projection = list(set(projection_fields)) or None if opts.parents: self.projection = None if isinstance(query.where, EmptyWhere): #Empty where means return nothing! raise EmptyResultSet() else: from dnf import parse_dnf self.where, columns = parse_dnf(query.where, self.connection) #DISABLE PROJECTION IF WE ARE FILTERING ON ONE OF THE PROJECTION_FIELDS for field in self.projection or []: if field in columns: self.projection = None break try: #If the PK was queried, we switch it in our queried #fields store with __key__ pk_index = self.queried_fields.index(self.pk_col) self.queried_fields[pk_index] = "__key__" except ValueError: pass
def __init__(self, connection, query, keys_only=False, all_fields=False): self.original_query = query self.connection = connection self.limits = (query.low_mark, query.high_mark) opts = query.get_meta() if not query.default_ordering: self.ordering = query.order_by else: self.ordering = query.order_by or opts.ordering if self.ordering: ordering = [ x for x in self.ordering if not (isinstance(x, basestring) and "__" in x) ] if len(ordering) < len(self.ordering): if not on_production() and not in_testing(): diff = set(self.ordering) - set(ordering) log_once(DJANGAE_LOG.warning, "The following orderings were ignored as cross-table orderings are not supported on the datastore: %s", diff) self.ordering = ordering self.distinct = query.distinct self.distinct_values = set() self.distinct_on_field = None self.distinct_field_convertor = None self.queried_fields = [] if keys_only: self.queried_fields = [ opts.pk.column ] elif not all_fields: for x in query.select: if isinstance(x, tuple): #Django < 1.6 compatibility self.queried_fields.append(x[1]) else: self.queried_fields.append(x.col[1]) if x.lookup_type == 'year': assert self.distinct_on_field is None self.distinct_on_field = x.col[1] self.distinct_field_convertor = field_conv_year_only elif x.lookup_type == 'month': assert self.distinct_on_field is None self.distinct_on_field = x.col[1] self.distinct_field_convertor = field_conv_month_only elif x.lookup_type == 'day': assert self.distinct_on_field is None self.distinct_on_field = x.col[1] self.distinct_field_convertor = field_conv_day_only else: raise NotSupportedError("Unhandled lookup type: {0}".format(x.lookup_type)) #Projection queries don't return results unless all projected fields are #indexed on the model. This means if you add a field, and all fields on the model #are projectable, you will never get any results until you've resaved all of them. #Because it's not possible to detect this situation, we only try a projection query if a #subset of fields was specified (e.g. values_list('bananas')) which makes the behaviour a #bit more predictable. It would be nice at some point to add some kind of force_projection() #thing on a queryset that would do this whenever possible, but that's for the future, maybe. try_projection = bool(self.queried_fields) if not self.queried_fields: self.queried_fields = [ x.column for x in opts.fields ] self.connection = connection self.pk_col = opts.pk.column self.model = query.model self.is_count = query.aggregates self.keys_only = False #FIXME: This should be used where possible self.exact_pk = None self.included_pks = [] self.excluded_pks = set() self.has_inequality_filter = False self.all_filters = [] self.results = None self.extra_select = query.extra_select self.gae_query = None self._set_db_table() self._validate_query_is_possible(query) projection_fields = [] if try_projection: for field in self.queried_fields: #We don't include the primary key in projection queries... if field == self.pk_col: continue #Text and byte fields aren't indexed, so we can't do a #projection query f = get_field_from_column(self.model, field) if not f: raise NotSupportedError("Attemping a cross-table select. Maybe? #FIXME") assert f #If this happens, we have a cross-table select going on! #FIXME db_type = f.db_type(connection) if db_type in ("bytes", "text"): projection_fields = [] break projection_fields.append(field) self.projection = list(set(projection_fields)) or None if opts.parents: self.projection = None self.where = self.parse_where_and_check_projection(query.where) try: #If the PK was queried, we switch it in our queried #fields store with __key__ pk_index = self.queried_fields.index(self.pk_col) self.queried_fields[pk_index] = "__key__" #If the only field queried was the key, then we can do a keys_only #query self.keys_only = len(self.queried_fields) == 1 except ValueError: pass
def __init__(self, connection, query, keys_only=False): self.original_query = query self.connection = connection self.limits = (query.low_mark, query.high_mark) opts = query.get_meta() self.distinct = query.distinct self.distinct_values = set() self.distinct_on_field = None self.distinct_field_convertor = None self.queried_fields = [] self.model = query.model self.pk_col = opts.pk.column self.is_count = query.aggregates self.extra_select = query.extra_select self._set_db_table() self._validate_query_is_possible(query) if not query.default_ordering: self.ordering = query.order_by else: self.ordering = query.order_by or opts.ordering if self.ordering: ordering = [ x for x in self.ordering if not (isinstance(x, basestring) and "__" in x) ] if len(ordering) < len(self.ordering): if not on_production() and not in_testing(): diff = set(self.ordering) - set(ordering) log_once(DJANGAE_LOG.warning, "The following orderings were ignored as cross-table orderings are not supported on the datastore: %s", diff) self.ordering = ordering #If the query uses defer()/only() then we need to process deferred. We have to get all deferred columns # for all (concrete) inherited models and then only include columns if they appear in that list deferred_columns = {} query.deferred_to_data(deferred_columns, query.deferred_to_columns_cb) inherited_db_tables = [ x._meta.db_table for x in get_concrete_parents(self.model) ] only_load = list(chain(*[ list(deferred_columns.get(x, [])) for x in inherited_db_tables ])) if query.select: for x in query.select: if hasattr(x, "field"): #In Django 1.6+ 'x' above is a SelectInfo (which is a tuple subclass), whereas in 1.5 it's a tuple # in 1.6 x[1] == Field, but 1.5 x[1] == unicode (column name) if x.field is None: column = x.col.col[1] #This is the column we are getting lookup_type = x.col.lookup_type self.distinct_on_field = column #This whole section of code is weird, and is probably better implemented as a custom Query type (like QueryByKeys) # basically, appengine gives back dates as a time since the epoch, we convert it to a date, then floor it, then convert it back # in our transform function. The transform is applied when the results are read back so that only distinct values are returned. # this is very hacky... if lookup_type in DATE_TRANSFORMS: self.distinct_field_convertor = lambda value: DATE_TRANSFORMS[lookup_type](self.connection, value) else: raise CouldBeSupportedError("Unhandled lookup_type %s" % lookup_type) else: column = x.field.column else: column = x[1] if only_load and column not in only_load: continue self.queried_fields.append(column) else: self.queried_fields = [ x.column for x in opts.fields if (not only_load) or (x.column in only_load) ] self.keys_only = keys_only or self.queried_fields == [ opts.pk.column ] assert self.queried_fields #Projection queries don't return results unless all projected fields are #indexed on the model. This means if you add a field, and all fields on the model #are projectable, you will never get any results until you've resaved all of them. #Because it's not possible to detect this situation, we only try a projection query if a #subset of fields was specified (e.g. values_list('bananas')) which makes the behaviour a #bit more predictable. It would be nice at some point to add some kind of force_projection() #thing on a queryset that would do this whenever possible, but that's for the future, maybe. try_projection = (self.keys_only is False) and bool(self.queried_fields) if not self.queried_fields: self.queried_fields = [ x.column for x in opts.fields ] self.excluded_pks = set() self.has_inequality_filter = False self.all_filters = [] self.results = None self.gae_query = None projection_fields = [] if try_projection: for field in self.queried_fields: #We don't include the primary key in projection queries... if field == self.pk_col: continue #Text and byte fields aren't indexed, so we can't do a #projection query f = get_field_from_column(self.model, field) if not f: raise CouldBeSupportedError("Attemping a cross-table select or dates query, or something?!") assert f #If this happens, we have a cross-table select going on! #FIXME db_type = f.db_type(connection) if db_type in ("bytes", "text"): projection_fields = [] break projection_fields.append(field) self.projection = list(set(projection_fields)) or None if opts.parents: self.projection = None columns = set() if isinstance(query.where, EmptyWhere): #Empty where means return nothing! raise EmptyResultSet() else: self.where = normalize_query(query.where, self.connection, filtered_columns=columns) #DISABLE PROJECTION IF WE ARE FILTERING ON ONE OF THE PROJECTION_FIELDS for field in self.projection or []: if field in columns: self.projection = None break try: #If the PK was queried, we switch it in our queried #fields store with __key__ pk_index = self.queried_fields.index(self.pk_col) self.queried_fields[pk_index] = "__key__" except ValueError: pass