def _IndexXmlFromIndexes(self, indexes, counts): """Create <datastore-indexes> XML for the given indexes and query counts. Args: indexes: a list of datastore_index.Index objects that are the required indexes. counts: a list of integers that are the corresponding counts. Returns: the corresponding XML, with root node <datastore-indexes>. """ lines = ['<datastore-indexes>'] for index, count in zip(indexes, counts): lines.append(' <!-- Used %d time%s in query history -->' % (count, 's' if count != 1 else '')) kind, ancestor, props = datastore_index.IndexToKey(index) xml_fragment = datastore_index.IndexXmlForQuery(kind, ancestor, props) lines.append(xml_fragment) lines.append('</datastore-indexes>') return '\n'.join(lines) + '\n'
def _Dynamic_RunQuery(self, query, query_result): if not self.__tx_lock.acquire(False): raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Can\'t query inside a transaction.') else: self.__tx_lock.release() if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Too big query offset.') num_components = len(query.filter_list()) + len(query.order_list()) if query.has_ancestor(): num_components += 1 if num_components > _MAX_QUERY_COMPONENTS: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, ('query is too large. may not have more than %s filters' ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS)) app = query.app() if self.__require_indexes: required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query) if required: required_key = kind, ancestor, props indexes = self.__indexes.get(app) if not indexes: raise apiproxy_errors.ApplicationError( datastore_pb.Error.NEED_INDEX, "This query requires a composite index, but none are defined. " "You must create an index.yaml file in your application root.") eq_filters_set = set(props[:num_eq_filters]) remaining_filters = props[num_eq_filters:] for index in indexes: definition = datastore_admin.ProtoToIndexDefinition(index) index_key = datastore_index.IndexToKey(definition) if required_key == index_key: break if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]: this_props = index_key[2] this_eq_filters_set = set(this_props[:num_eq_filters]) this_remaining_filters = this_props[num_eq_filters:] if (eq_filters_set == this_eq_filters_set and remaining_filters == this_remaining_filters): break else: raise apiproxy_errors.ApplicationError( datastore_pb.Error.NEED_INDEX, "This query requires a composite index that is not defined. " "You must update the index.yaml file in your application root.") try: query.set_app(app) results = self.__entities[app, query.kind()].values() results = [entity.native for entity in results] except KeyError: results = [] if query.has_ancestor(): ancestor_path = query.ancestor().path().element_list() def is_descendant(entity): path = entity.key()._Key__reference.path().element_list() return path[:len(ancestor_path)] == ancestor_path results = filter(is_descendant, results) operators = {datastore_pb.Query_Filter.LESS_THAN: '<', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=', datastore_pb.Query_Filter.GREATER_THAN: '>', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=', datastore_pb.Query_Filter.EQUAL: '==', } for filt in query.filter_list(): assert filt.op() != datastore_pb.Query_Filter.IN prop = filt.property(0).name().decode('utf-8') op = operators[filt.op()] filter_val_list = [datastore_types.FromPropertyPb(filter_prop) for filter_prop in filt.property_list()] def passes(entity): """ Returns True if the entity passes the filter, False otherwise. """ if prop in datastore_types._SPECIAL_PROPERTIES: entity_vals = self.__GetSpecialPropertyValue(entity, prop) else: entity_vals = entity.get(prop, []) if not isinstance(entity_vals, list): entity_vals = [entity_vals] for fixed_entity_val in entity_vals: if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES: continue for filter_val in filter_val_list: fixed_entity_type = self._PROPERTY_TYPE_TAGS.get( fixed_entity_val.__class__) filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__) if fixed_entity_type == filter_type: comp = u'%r %s %r' % (fixed_entity_val, op, filter_val) elif op != '==': comp = '%r %s %r' % (fixed_entity_type, op, filter_type) else: continue logging.log(logging.DEBUG - 1, 'Evaling filter expression "%s"', comp) try: ret = eval(comp) if ret and ret != NotImplementedError: return True except TypeError: pass return False results = filter(passes, results) def has_prop_indexed(entity, prop): """Returns True if prop is in the entity and is not a raw property, or is a special property.""" if prop in datastore_types._SPECIAL_PROPERTIES: return True values = entity.get(prop, []) if not isinstance(values, (tuple, list)): values = [values] for value in values: if type(value) not in datastore_types._RAW_PROPERTY_TYPES: return True return False for order in query.order_list(): prop = order.property().decode('utf-8') results = [entity for entity in results if has_prop_indexed(entity, prop)] def order_compare_entities(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. """ cmped = 0 for o in query.order_list(): prop = o.property().decode('utf-8') reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING) if prop in datastore_types._SPECIAL_PROPERTIES: a_val = self.__GetSpecialPropertyValue(a, prop) b_val = self.__GetSpecialPropertyValue(b, prop) else: a_val = a[prop] if isinstance(a_val, list): a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0] b_val = b[prop] if isinstance(b_val, list): b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0] cmped = order_compare_properties(a_val, b_val) if o.direction() is datastore_pb.Query_Order.DESCENDING: cmped = -cmped if cmped != 0: return cmped if cmped == 0: return cmp(a.key(), b.key()) def order_compare_properties(x, y): """Return a negative, zero or positive number depending on whether property value x is considered smaller than, equal to, or larger than property value y. If x and y are different types, they're compared based on the type ordering used in the real datastore, which is based on the tag numbers in the PropertyValue PB. """ if isinstance(x, datetime.datetime): x = datastore_types.DatetimeToTimestamp(x) if isinstance(y, datetime.datetime): y = datastore_types.DatetimeToTimestamp(y) x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__) y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__) if x_type == y_type: try: return cmp(x, y) except TypeError: return 0 else: return cmp(x_type, y_type) results.sort(order_compare_entities) offset = 0 limit = len(results) if query.has_offset(): offset = query.offset() if query.has_limit(): limit = query.limit() if limit > _MAXIMUM_RESULTS: limit = _MAXIMUM_RESULTS results = results[offset:limit + offset] clone = datastore_pb.Query() clone.CopyFrom(query) clone.clear_hint() if clone in self.__query_history: self.__query_history[clone] += 1 else: self.__query_history[clone] = 1 self.__WriteHistory() self.__cursor_lock.acquire() cursor = self.__next_cursor self.__next_cursor += 1 self.__cursor_lock.release() self.__queries[cursor] = (results, len(results)) query_result.mutable_cursor().set_cursor(cursor) query_result.set_more_results(len(results) > 0)
def _Dynamic_RunQuery(self, query, query_result): if query.has_transaction(): self.__ValidateTransaction(query.transaction()) if not query.has_ancestor(): raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Only ancestor queries are allowed inside transactions.') entities = self.__tx_snapshot else: entities = self.__entities app_id = query.app() namespace = query.name_space() self.__ValidateAppId(app_id) if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Too big query offset.') num_components = len(query.filter_list()) + len(query.order_list()) if query.has_ancestor(): num_components += 1 if num_components > _MAX_QUERY_COMPONENTS: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, ('query is too large. may not have more than %s filters' ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS)) (filters, orders) = datastore_index.Normalize(query.filter_list(), query.order_list()) if self.__require_indexes: required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query) if required: required_key = kind, ancestor, props indexes = self.__indexes.get(app_id) if not indexes: raise apiproxy_errors.ApplicationError( datastore_pb.Error.NEED_INDEX, "This query requires a composite index, but none are defined. " "You must create an index.yaml file in your application root.") eq_filters_set = set(props[:num_eq_filters]) remaining_filters = props[num_eq_filters:] for index in indexes: definition = datastore_index.ProtoToIndexDefinition(index) index_key = datastore_index.IndexToKey(definition) if required_key == index_key: break if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]: this_props = index_key[2] this_eq_filters_set = set(this_props[:num_eq_filters]) this_remaining_filters = this_props[num_eq_filters:] if (eq_filters_set == this_eq_filters_set and remaining_filters == this_remaining_filters): break else: raise apiproxy_errors.ApplicationError( datastore_pb.Error.NEED_INDEX, "This query requires a composite index that is not defined. " "You must update the index.yaml file in your application root.") try: query.set_app(app_id) datastore_types.SetNamespace(query, namespace) encoded = datastore_types.EncodeAppIdNamespace(app_id, namespace) if query.has_kind(): results = entities[encoded, query.kind()].values() results = [entity.native for entity in results] else: results = [] for key in entities: if key[0] == encoded: results += [entity.native for entity in entities[key].values()] except KeyError: results = [] if query.has_ancestor(): ancestor_path = query.ancestor().path().element_list() def is_descendant(entity): path = entity.key()._Key__reference.path().element_list() return path[:len(ancestor_path)] == ancestor_path results = filter(is_descendant, results) operators = {datastore_pb.Query_Filter.LESS_THAN: '<', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=', datastore_pb.Query_Filter.GREATER_THAN: '>', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=', datastore_pb.Query_Filter.EQUAL: '==', } def has_prop_indexed(entity, prop): """Returns True if prop is in the entity and is indexed.""" if prop in datastore_types._SPECIAL_PROPERTIES: return True elif prop in entity.unindexed_properties(): return False values = entity.get(prop, []) if not isinstance(values, (tuple, list)): values = [values] for value in values: if type(value) not in datastore_types._RAW_PROPERTY_TYPES: return True return False for filt in filters: assert filt.op() != datastore_pb.Query_Filter.IN prop = filt.property(0).name().decode('utf-8') op = operators[filt.op()] filter_val_list = [datastore_types.FromPropertyPb(filter_prop) for filter_prop in filt.property_list()] def passes_filter(entity): """Returns True if the entity passes the filter, False otherwise. The filter being evaluated is filt, the current filter that we're on in the list of filters in the query. """ if not has_prop_indexed(entity, prop): return False try: entity_vals = datastore._GetPropertyValue(entity, prop) except KeyError: entity_vals = [] if not isinstance(entity_vals, list): entity_vals = [entity_vals] for fixed_entity_val in entity_vals: for filter_val in filter_val_list: fixed_entity_type = self._PROPERTY_TYPE_TAGS.get( fixed_entity_val.__class__) filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__) if fixed_entity_type == filter_type: comp = u'%r %s %r' % (fixed_entity_val, op, filter_val) elif op != '==': comp = '%r %s %r' % (fixed_entity_type, op, filter_type) else: continue logging.log(logging.DEBUG - 1, 'Evaling filter expression "%s"', comp) try: ret = eval(comp) if ret and ret != NotImplementedError: return True except TypeError: pass return False results = filter(passes_filter, results) for order in orders: prop = order.property().decode('utf-8') results = [entity for entity in results if has_prop_indexed(entity, prop)] def order_compare_entities(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. """ cmped = 0 for o in orders: prop = o.property().decode('utf-8') reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING) a_val = datastore._GetPropertyValue(a, prop) if isinstance(a_val, list): a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0] b_val = datastore._GetPropertyValue(b, prop) if isinstance(b_val, list): b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0] cmped = order_compare_properties(a_val, b_val) if o.direction() is datastore_pb.Query_Order.DESCENDING: cmped = -cmped if cmped != 0: return cmped if cmped == 0: return cmp(a.key(), b.key()) def order_compare_properties(x, y): """Return a negative, zero or positive number depending on whether property value x is considered smaller than, equal to, or larger than property value y. If x and y are different types, they're compared based on the type ordering used in the real datastore, which is based on the tag numbers in the PropertyValue PB. """ if isinstance(x, datetime.datetime): x = datastore_types.DatetimeToTimestamp(x) if isinstance(y, datetime.datetime): y = datastore_types.DatetimeToTimestamp(y) x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__) y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__) if x_type == y_type: try: return cmp(x, y) except TypeError: return 0 else: return cmp(x_type, y_type) results.sort(order_compare_entities) clone = datastore_pb.Query() clone.CopyFrom(query) clone.clear_hint() if clone in self.__query_history: self.__query_history[clone] += 1 else: self.__query_history[clone] = 1 cursor = _Cursor(query, results, order_compare_entities) self.__queries[cursor.cursor] = cursor if query.has_count(): count = query.count() elif query.has_limit(): count = query.limit() else: count = _BATCH_SIZE cursor.PopulateQueryResult(query_result, count, compile=query.compile()) if query.compile(): compiled_query = query_result.mutable_compiled_query() compiled_query.set_keys_only(query.keys_only()) compiled_query.mutable_primaryscan().set_index_name(query.Encode())
def _Dynamic_RunQuery(self, query, query_result): if not self.__tx_lock.acquire(False): raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, "Can't query inside a transaction.") else: self.__tx_lock.release() app = self.ResolveAppId(query.app()) if self.__require_indexes: required_index = datastore_index.CompositeIndexForQuery(query) if required_index is not None: kind, ancestor, props, num_eq_filters = required_index required_key = kind, ancestor, props indexes = self.__indexes.get(app) if not indexes: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, "This query requires a composite index, but none are defined. " "You must create an index.yaml file in your application root." ) eq_filters_set = set(props[:num_eq_filters]) remaining_filters = props[num_eq_filters:] for index in indexes: definition = datastore_admin.ProtoToIndexDefinition(index) index_key = datastore_index.IndexToKey(definition) if required_key == index_key: break if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]: this_props = index_key[2] this_eq_filters_set = set(this_props[:num_eq_filters]) this_remaining_filters = this_props[num_eq_filters:] if (eq_filters_set == this_eq_filters_set and remaining_filters == this_remaining_filters): break else: raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, "This query requires a composite index that is not defined. " "You must update the index.yaml file in your application root." ) try: query.set_app(app) results = self.__entities[app, query.kind()].values() results = [datastore.Entity._FromPb(pb) for pb in results] except KeyError: results = [] if query.has_ancestor(): ancestor_path = query.ancestor().path().element_list() def is_descendant(entity): path = entity.key()._Key__reference.path().element_list() return path[:len(ancestor_path)] == ancestor_path results = filter(is_descendant, results) operators = { datastore_pb.Query_Filter.LESS_THAN: '<', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=', datastore_pb.Query_Filter.GREATER_THAN: '>', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=', datastore_pb.Query_Filter.EQUAL: '==', } for filt in query.filter_list(): assert filt.op() != datastore_pb.Query_Filter.IN prop = filt.property(0).name().decode('utf-8') op = operators[filt.op()] def passes(entity): """ Returns True if the entity passes the filter, False otherwise. """ entity_vals = entity.get(prop, []) if type(entity_vals) is not types.ListType: entity_vals = [entity_vals] entity_property_list = [ datastore_types.ToPropertyPb(prop, value) for value in entity_vals ] for entity_prop in entity_property_list: fixed_entity_val = datastore_types.FromPropertyPb( entity_prop) for filter_prop in filt.property_list(): filter_val = datastore_types.FromPropertyPb( filter_prop) comp = u'%r %s %r' % (fixed_entity_val, op, filter_val) logging.log(logging.DEBUG - 1, 'Evaling filter expression "%s"', comp) if eval(comp): return True return False results = filter(passes, results) for order in query.order_list(): prop = order.property().decode('utf-8') results = [entity for entity in results if prop in entity] def order_compare(a, b): """ Return a negative, zero or positive number depending on whether entity a is considered smaller than, equal to, or larger than b, according to the query's orderings. """ for o in query.order_list(): prop = o.property().decode('utf-8') a_values = a[prop] if not isinstance(a_values, types.ListType): a_values = [a_values] b_values = b[prop] if not isinstance(b_values, types.ListType): b_values = [b_values] cmped = cmp(min(a_values), min(b_values)) if o.direction() is datastore_pb.Query_Order.DESCENDING: cmped = -cmped if cmped != 0: return cmped return 0 results.sort(order_compare) if query.has_limit(): results = results[:query.limit()] clone = datastore_pb.Query() clone.CopyFrom(query) clone.clear_hint() if clone in self.__query_history: self.__query_history[clone] += 1 else: self.__query_history[clone] = 1 self.__WriteHistory() results = [e._ToPb() for e in results] self.__cursor_lock.acquire() cursor = self.__next_cursor self.__next_cursor += 1 self.__cursor_lock.release() self.__queries[cursor] = (results, len(results)) query_result.mutable_cursor().set_cursor(cursor) query_result.set_more_results(len(results) > 0)