def GenerateIndexFromHistory(query_history, all_indexes=None, manual_indexes=None): """Generate most of the text for index.yaml from the query history. Args: query_history: Query history, a dict mapping datastore_pb.Query to a count of the number of times that query has been issued. all_indexes: Optional datastore_index.IndexDefinitions instance representing all the indexes found in the input file. May be None. manual_indexes: Optional datastore_index.IndexDefinitions instance containing indexes for which we should not generate output. May be None. Returns: A string representation that can safely be appended to an existing index.yaml file. Returns the empty string if it would generate no output. """ indexes = GenerateIndexDictFromHistory( query_history, all_indexes, manual_indexes) if not indexes: return '' res = [] for (kind, ancestor, props), _ in sorted(indexes.iteritems()): res.append('') res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props)) res.append('') return '\n'.join(res)
def __query_result_hook(self, rpc): """Internal method used as get_result_hook for RunQuery/Next operation.""" try: self.__conn.check_rpc_success(rpc) except datastore_errors.NeedIndexError, exc: yaml = datastore_index.IndexYamlForQuery( *datastore_index.CompositeIndexForQuery(rpc.request)[1:-1]) raise datastore_errors.NeedIndexError( str(exc) + '\nThis query needs this index:\n' + yaml)
def run_rpc_handler(rpc): try: rpc.check_success() except apiproxy_errors.ApplicationError, err: try: _ToDatastoreError(err) except datastore_errors.NeedIndexError, exc: yaml = datastore_index.IndexYamlForQuery( *datastore_index.CompositeIndexForQuery(rpc.request)[1:-1]) raise datastore_errors.NeedIndexError( str(exc) + '\nThis query needs this index:\n' + yaml)
def rpc_callback(self, rpc): try: rpc.check_success() except ApplicationError, err: try: raise _ToDatastoreError(err) except datastore_errors.NeedIndexError, exc: yaml = datastore_index.IndexYamlForQuery( *datastore_index.CompositeIndexForQuery(rpc.request)[1:-1]) raise datastore_errors.NeedIndexError( str(exc) + '\nThis query needs this index:\n' + yaml)
def GenerateIndexFromHistory(query_history, all_indexes=None, manual_indexes=None): """Generate most of the text for index.yaml from the query history. Args: query_history: Query history, a dict mapping query all_indexes: Optional datastore_index.IndexDefinitions instance representing all the indexes found in the input file. May be None. manual_indexes: Optional datastore_index.IndexDefinitions instance containing indexes for which we should not generate output. May be None. Returns: A string representation that can safely be appended to an existing index.yaml file. Returns the empty string if it would generate no output. """ all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes) manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes) indexes = dict((key, 0) for key in all_keys - manual_keys) for query, count in query_history.iteritems(): required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query) if required: key = (kind, ancestor, props) if key not in manual_keys: if key in indexes: indexes[key] += count else: indexes[key] = count if not indexes: return '' res = [] for (kind, ancestor, props), count in sorted(indexes.iteritems()): res.append('') res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props)) res.append('') return '\n'.join(res)
def _CustomQueryRun(original, query, conn, query_options=None): """Patched datastore_query.Query.run() method.""" query_pb = query._to_pb(conn, query_options) # pylint: disable-msg=W0212 # Check if composite index is required. req, kind, ancestor, props = datastore_index.CompositeIndexForQuery( query_pb) if req: # Keep track of the composite index for generation of index.yaml text. props = datastore_index.GetRecommendedIndexProperties(props) index_yaml = datastore_index.IndexYamlForQuery(kind, ancestor, props) _RecordIndex(index_yaml) wide_pb = _WidenQueryProto(query_pb) if wide_pb is not None: # pylint: disable-msg=W0212 wide_query = datastore_query.Query._from_pb(wide_pb) # TODO: query_options are ignored here since we pass None. # It might be possible to pass query_options through - future # investigation is required. batcher = original(wide_query, conn, None) results = [] for batch in batcher: results.extend([entity.ToPb() for entity in batch.results]) # Apply the original query and slice. results = datastore_query.apply_query(query, results) offset = query_options.offset or 0 limit = query_options.limit if limit is None: limit = len(results) results = results[offset:offset + limit] # Convert protos to to entities or keys. if query_pb.keys_only(): results = [datastore.Entity.FromPb(pb).key() for pb in results] else: results = [datastore.Entity.FromPb(pb) for pb in results] return _FakeBatcher(results) # The query is either a simple query or a composite query that cannot be # widened - invoke the normal Query.run() implementation and let it fulfill # the request or raise an exception. return original(query, conn, query_options=query_options)