def __query_result_hook(self, rpc):
   """Internal method used as get_result_hook for RunQuery/Next operation."""
   try:
     self.__conn.check_rpc_success(rpc)
   except datastore_errors.NeedIndexError, exc:
     yaml = datastore_index.IndexYamlForQuery(
         *datastore_index.CompositeIndexForQuery(rpc.request)[1:-1])
     raise datastore_errors.NeedIndexError(
         str(exc) + '\nThis query needs this index:\n' + yaml)
Example #2
0
def run_rpc_handler(rpc):
  try:
    rpc.check_success()
  except apiproxy_errors.ApplicationError, err:
    try:
      _ToDatastoreError(err)
    except datastore_errors.NeedIndexError, exc:
      yaml = datastore_index.IndexYamlForQuery(
        *datastore_index.CompositeIndexForQuery(rpc.request)[1:-1])
      raise datastore_errors.NeedIndexError(
        str(exc) + '\nThis query needs this index:\n' + yaml)
def GenerateIndexFromHistory(query_history,
                             all_indexes=None,
                             manual_indexes=None):
    """Generate most of the text for index.yaml from the query history.

  Args:
    query_history: Query history, a dict mapping query
    all_indexes: Optional datastore_index.IndexDefinitions instance
      representing all the indexes found in the input file.  May be None.
    manual_indexes: Optional datastore_index.IndexDefinitions instance
      containing indexes for which we should not generate output.  May be None.

  Returns:
    A string representation that can safely be appended to an
    existing index.yaml file.
  """

    all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes)
    manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes)

    indexes = dict((key, 0) for key in all_keys - manual_keys)

    for query, count in query_history.iteritems():
        key = datastore_index.CompositeIndexForQuery(query)
        if key is not None:
            key = key[:3]
            if key not in manual_keys:
                if key in indexes:
                    indexes[key] += count
                else:
                    indexes[key] = count

    res = []
    for (kind, ancestor, props), count in sorted(indexes.iteritems()):
        res.append('')
        if count == 0:
            message = '# Unused in query history -- copied from input.'
        elif count == 1:
            message = '# Used once in query history.'
        else:
            message = '# Used %d times in query history.' % count
        res.append(message)
        res.append('- kind: %s' % kind)
        if ancestor:
            res.append('  ancestor: yes')
        if props:
            res.append('  properties:')
            for name, direction in props:
                res.append('  - name: %s' % name)
                if direction == datastore_index.DESCENDING:
                    res.append('    direction: desc')

    res.append('')
    return '\n'.join(res)
Example #4
0
    def rpc_callback(self, rpc):

        try:
            rpc.check_success()
        except ApplicationError, err:
            try:
                raise _ToDatastoreError(err)
            except datastore_errors.NeedIndexError, exc:
                yaml = datastore_index.IndexYamlForQuery(
                    *datastore_index.CompositeIndexForQuery(rpc.request)[1:-1])
                raise datastore_errors.NeedIndexError(
                    str(exc) + '\nThis query needs this index:\n' + yaml)
Example #5
0
 def hook(service, call, request, response):
     assert service == 'datastore_v3'
     if call == 'Put':
         for entity in request.entity_list():
             db_log(model_name_from_key(entity.key()), call)
     elif call in ('Get', 'Delete'):
         for key in request.key_list():
             db_log(model_name_from_key(key), call)
     elif call == 'RunQuery':
         kind = datastore_index.CompositeIndexForQuery(request)[1]
         db_log(kind, call)
     else:
         db_log(None, call)
def GenerateIndexFromHistory(query_history,
                             all_indexes=None, manual_indexes=None):
  """Generate most of the text for index.yaml from the query history.

  Args:
    query_history: Query history, a dict mapping query
    all_indexes: Optional datastore_index.IndexDefinitions instance
      representing all the indexes found in the input file.  May be None.
    manual_indexes: Optional datastore_index.IndexDefinitions instance
      containing indexes for which we should not generate output.  May be None.

  Returns:
    A string representation that can safely be appended to an existing
    index.yaml file. Returns the empty string if it would generate no output.
  """





  all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes)
  manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes)


  indexes = dict((key, 0) for key in all_keys - manual_keys)


  for query, count in query_history.iteritems():
    required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
    if required:
      key = (kind, ancestor, props)
      if key not in manual_keys:
        if key in indexes:
          indexes[key] += count
        else:
          indexes[key] = count

  if not indexes:
    return ''




  res = []
  for (kind, ancestor, props), count in sorted(indexes.iteritems()):

    res.append('')
    res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props))

  res.append('')
  return '\n'.join(res)
def GenerateIndexDictFromHistory(query_history,
                                 all_indexes=None, manual_indexes=None):
  """Generate a dict of automatic index entries from the query history.

  Args:
    query_history: Query history, a dict mapping datastore_pb.Query to a count
      of the number of times that query has been issued.
    all_indexes: Optional datastore_index.IndexDefinitions instance
      representing all the indexes found in the input file.  May be None.
    manual_indexes: Optional datastore_index.IndexDefinitions instance
      containing indexes for which we should not generate output.  May be None.

  Returns:
    A dict where each key is a tuple (kind, ancestor, properties) and the
      corresponding value is a count of the number of times that query has been
      issued. The dict contains no entries for keys that appear in manual_keys.
      In the tuple, "properties" is itself a tuple of tuples, where each
      contained tuple is (name, direction), with "name" being a string and
      "direction" being datastore_index.ASCENDING or .DESCENDING.
  """





  all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes)
  manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes)


  indexes = dict((key, 0) for key in all_keys - manual_keys)


  for query, count in six.iteritems(query_history):
    required, kind, ancestor, props = (
        datastore_index.CompositeIndexForQuery(query))
    if required:
      props = datastore_index.GetRecommendedIndexProperties(props)
      key = (kind, ancestor, props)
      if key not in manual_keys:
        if key in indexes:
          indexes[key] += count
        else:
          indexes[key] = count

  return indexes
Example #8
0
def _CustomQueryRun(original, query, conn, query_options=None):
    """Patched datastore_query.Query.run() method."""
    query_pb = query._to_pb(conn, query_options)  # pylint: disable-msg=W0212
    # Check if composite index is required.
    req, kind, ancestor, props = datastore_index.CompositeIndexForQuery(
        query_pb)
    if req:
        # Keep track of the composite index for generation of index.yaml text.
        props = datastore_index.GetRecommendedIndexProperties(props)
        index_yaml = datastore_index.IndexYamlForQuery(kind, ancestor, props)
        _RecordIndex(index_yaml)

        wide_pb = _WidenQueryProto(query_pb)
        if wide_pb is not None:
            # pylint: disable-msg=W0212
            wide_query = datastore_query.Query._from_pb(wide_pb)
            # TODO: query_options are ignored here since we pass None.
            # It might be possible to pass query_options through - future
            # investigation is required.
            batcher = original(wide_query, conn, None)
            results = []
            for batch in batcher:
                results.extend([entity.ToPb() for entity in batch.results])
            # Apply the original query and slice.
            results = datastore_query.apply_query(query, results)
            offset = query_options.offset or 0
            limit = query_options.limit
            if limit is None:
                limit = len(results)
            results = results[offset:offset + limit]
            # Convert protos to to entities or keys.
            if query_pb.keys_only():
                results = [datastore.Entity.FromPb(pb).key() for pb in results]
            else:
                results = [datastore.Entity.FromPb(pb) for pb in results]
            return _FakeBatcher(results)

    # The query is either a simple query or a composite query that cannot be
    # widened - invoke the normal Query.run() implementation and let it fulfill
    # the request or raise an exception.
    return original(query, conn, query_options=query_options)
Example #9
0
def hook(service, call, request, response):
    try:
        assert service == 'datastore_v3'
        ds_stats = DSStatsHolder.get()
        if ds_stats is None:
            return
        if call == 'Put':
            for entity in request.entity_list():
                ds_stats[model_name_from_key(entity.key())][call] += 1
            if len(request.entity_list()) == 1:
                model_type = model_name_from_key(request.entity_list()[0].key())
                ds_stats[model_type]['entity_writes'] += response.cost().entity_writes()
                ds_stats[model_type]['index_writes'] += response.cost().index_writes()
        elif call in ('Get', 'Delete'):
            for key in request.key_list():
                ds_stats[model_name_from_key(key)][call] += 1
            if call == 'Get':
                ds_stats['_cost']['reads'] += len(request.key_list())
            elif len(request.key_list()) == 1:
                model_type = model_name_from_key(request.key_list()[0])
                ds_stats[model_type]['entity_writes'] += response.cost().entity_writes()
                ds_stats[model_type]['index_writes'] += response.cost().index_writes()
        elif call in ('RunQuery',):  # 'Next'):
            kind = datastore_index.CompositeIndexForQuery(request)[1]
            ds_stats[kind][call] += 1
            if call == 'RunQuery':
                ds_stats[kind]['reads'] += 1
                ds_stats['_cost']['reads'] += 1
            cost_type = 'small_reads' if response.keys_only() else 'reads'
            num_results = len(response.result_list()) + response.skipped_results()
            ds_stats[kind][cost_type] += num_results
            ds_stats['_cost'][cost_type] += num_results
        else:
            ds_stats['?'][call] += 1
        if hasattr(response, 'cost'):
            ds_stats['_cost']['entity_writes'] += response.cost().entity_writes()
            ds_stats['_cost']['index_writes'] += response.cost().index_writes()
    except:
        logging.exception('Exception occurred during datastore stats')
def __IndexListForQuery(query):
  """Get the composite index definition used by the query, if any, as a list.

  Args:
    query: the datastore_pb.Query to compute the index list for

  Returns:
    A singleton list of the composite index definition pb used by the query,
  """
  required, kind, ancestor, props = (
      datastore_index.CompositeIndexForQuery(query))
  if not required:
    return []

  index_pb = entity_pb.Index()
  index_pb.set_entity_type(kind)
  index_pb.set_ancestor(bool(ancestor))
  for name, direction in datastore_index.GetRecommendedIndexProperties(props):
    prop_pb = entity_pb.Index_Property()
    prop_pb.set_name(name)
    prop_pb.set_direction(direction)
    index_pb.property_list().append(prop_pb)
  return [index_pb]
  def _Dynamic_RunQuery(self, query, query_result):
    if not self.__tx_lock.acquire(False):
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST, 'Can\'t query inside a transaction.')
    else:
      self.__tx_lock.release()

    if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

    num_components = len(query.filter_list()) + len(query.order_list())
    if query.has_ancestor():
      num_components += 1
    if num_components > _MAX_QUERY_COMPONENTS:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          ('query is too large. may not have more than %s filters'
           ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

    app = query.app()

    if self.__require_indexes:
      required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
      if required:
        required_key = kind, ancestor, props
        indexes = self.__indexes.get(app)
        if not indexes:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index, but none are defined. "
              "You must create an index.yaml file in your application root.")
        eq_filters_set = set(props[:num_eq_filters])
        remaining_filters = props[num_eq_filters:]
        for index in indexes:
          definition = datastore_admin.ProtoToIndexDefinition(index)
          index_key = datastore_index.IndexToKey(definition)
          if required_key == index_key:
            break
          if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
            this_props = index_key[2]
            this_eq_filters_set = set(this_props[:num_eq_filters])
            this_remaining_filters = this_props[num_eq_filters:]
            if (eq_filters_set == this_eq_filters_set and
                remaining_filters == this_remaining_filters):
              break
        else:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index that is not defined. "
              "You must update the index.yaml file in your application root.")

    try:
      query.set_app(app)
      results = self.__entities[app, query.kind()].values()
      results = [entity.native for entity in results]
    except KeyError:
      results = []

    if query.has_ancestor():
      ancestor_path = query.ancestor().path().element_list()
      def is_descendant(entity):
        path = entity.key()._Key__reference.path().element_list()
        return path[:len(ancestor_path)] == ancestor_path
      results = filter(is_descendant, results)

    operators = {datastore_pb.Query_Filter.LESS_THAN:             '<',
                 datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL:    '<=',
                 datastore_pb.Query_Filter.GREATER_THAN:          '>',
                 datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
                 datastore_pb.Query_Filter.EQUAL:                 '==',
                 }

    for filt in query.filter_list():
      assert filt.op() != datastore_pb.Query_Filter.IN

      prop = filt.property(0).name().decode('utf-8')
      op = operators[filt.op()]

      filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                         for filter_prop in filt.property_list()]

      def passes(entity):
        """ Returns True if the entity passes the filter, False otherwise. """
        if prop in datastore_types._SPECIAL_PROPERTIES:
          entity_vals = self.__GetSpecialPropertyValue(entity, prop)
        else:
          entity_vals = entity.get(prop, [])

        if not isinstance(entity_vals, list):
          entity_vals = [entity_vals]

        for fixed_entity_val in entity_vals:
          if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
            continue

          for filter_val in filter_val_list:
            fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
              fixed_entity_val.__class__)
            filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
            if fixed_entity_type == filter_type:
              comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
            elif op != '==':
              comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
            else:
              continue

            logging.log(logging.DEBUG - 1,
                        'Evaling filter expression "%s"', comp)

            try:
              ret = eval(comp)
              if ret and ret != NotImplementedError:
                return True
            except TypeError:
              pass

        return False

      results = filter(passes, results)

    def has_prop_indexed(entity, prop):
      """Returns True if prop is in the entity and is not a raw property, or
      is a special property."""
      if prop in datastore_types._SPECIAL_PROPERTIES:
        return True

      values = entity.get(prop, [])
      if not isinstance(values, (tuple, list)):
        values = [values]

      for value in values:
        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
          return True
      return False

    for order in query.order_list():
      prop = order.property().decode('utf-8')
      results = [entity for entity in results if has_prop_indexed(entity, prop)]

    def order_compare_entities(a, b):
      """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
      cmped = 0
      for o in query.order_list():
        prop = o.property().decode('utf-8')

        reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)

        if prop in datastore_types._SPECIAL_PROPERTIES:
          a_val = self.__GetSpecialPropertyValue(a, prop)
          b_val = self.__GetSpecialPropertyValue(b, prop)
        else:
          a_val = a[prop]
          if isinstance(a_val, list):
            a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]

          b_val = b[prop]
          if isinstance(b_val, list):
            b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]

        cmped = order_compare_properties(a_val, b_val)

        if o.direction() is datastore_pb.Query_Order.DESCENDING:
          cmped = -cmped

        if cmped != 0:
          return cmped

      if cmped == 0:
        return cmp(a.key(), b.key())

    def order_compare_properties(x, y):
      """Return a negative, zero or positive number depending on whether
      property value x is considered smaller than, equal to, or larger than
      property value y. If x and y are different types, they're compared based
      on the type ordering used in the real datastore, which is based on the
      tag numbers in the PropertyValue PB.
      """
      if isinstance(x, datetime.datetime):
        x = datastore_types.DatetimeToTimestamp(x)
      if isinstance(y, datetime.datetime):
        y = datastore_types.DatetimeToTimestamp(y)

      x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
      y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)

      if x_type == y_type:
        try:
          return cmp(x, y)
        except TypeError:
          return 0
      else:
        return cmp(x_type, y_type)

    results.sort(order_compare_entities)

    offset = 0
    limit = len(results)
    if query.has_offset():
      offset = query.offset()
    if query.has_limit():
      limit = query.limit()
    if limit > _MAXIMUM_RESULTS:
      limit = _MAXIMUM_RESULTS
    results = results[offset:limit + offset]

    clone = datastore_pb.Query()
    clone.CopyFrom(query)
    clone.clear_hint()
    if clone in self.__query_history:
      self.__query_history[clone] += 1
    else:
      self.__query_history[clone] = 1
    self.__WriteHistory()

    self.__cursor_lock.acquire()
    cursor = self.__next_cursor
    self.__next_cursor += 1
    self.__cursor_lock.release()
    self.__queries[cursor] = (results, len(results))

    query_result.mutable_cursor().set_cursor(cursor)
    query_result.set_more_results(len(results) > 0)
Example #12
0
def FindIndexToUse(query, indexes):
    """ Matches the query with one of the composite indexes.

  Args:
    query: A datastore_pb.Query.
    indexes: A list of entity_pb.CompsiteIndex.
  Returns:
    The composite index of the list for which the composite index matches
    the query. Returns None if there is no match.
  """
    if not query.has_kind():
        return None

    index_list = IndexListForQuery(query)
    if not index_list:
        return None

    index_match = index_list[0]
    for index in indexes:
        if index_match.Equals(index.definition()):
            return index

    _, kind, ancestor, (prefix, (ordered, group_by, unordered)) = (
        datastore_index.CompositeIndexForQuery(query))
    # TODO: Support group_by and unordered.
    if group_by or unordered:
        raise dbconstants.NeedsIndex(u'Query requires an index')

    prefix = sorted(prefix)
    for index in indexes:
        if index.definition().entity_type() != kind:
            continue

        if index.definition().ancestor() != ancestor:
            continue

        if index.definition().property_size() != len(prefix) + len(ordered):
            continue

        index_prefix = sorted([
            prop.name()
            for prop in index.definition().property_list()[:len(prefix)]
        ])
        if index_prefix != prefix:
            continue

        index_matches = True
        for offset, (prop_name, direction) in enumerate(ordered):
            index_prop = index.definition().property(len(prefix) + offset)
            if index_prop.name() != prop_name:
                index_matches = False
                break

            if direction is not None and direction != index_prop.direction():
                index_matches = False
                break

        if index_matches:
            return index

    raise dbconstants.NeedsIndex(u'Query requires an index')
  def _Dynamic_RunQuery(self, query, query_result):
    if query.has_transaction():
      self.__ValidateTransaction(query.transaction())
      if not query.has_ancestor():
        raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          'Only ancestor queries are allowed inside transactions.')
      entities = self.__tx_snapshot
    else:
      entities = self.__entities

    app_id = query.app()
    namespace = query.name_space()
    self.__ValidateAppId(app_id)

    if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

    num_components = len(query.filter_list()) + len(query.order_list())
    if query.has_ancestor():
      num_components += 1
    if num_components > _MAX_QUERY_COMPONENTS:
      raise apiproxy_errors.ApplicationError(
          datastore_pb.Error.BAD_REQUEST,
          ('query is too large. may not have more than %s filters'
           ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

    (filters, orders) = datastore_index.Normalize(query.filter_list(),
                                                  query.order_list())

    if self.__require_indexes:
      required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
      if required:
        required_key = kind, ancestor, props
        indexes = self.__indexes.get(app_id)
        if not indexes:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index, but none are defined. "
              "You must create an index.yaml file in your application root.")
        eq_filters_set = set(props[:num_eq_filters])
        remaining_filters = props[num_eq_filters:]
        for index in indexes:
          definition = datastore_index.ProtoToIndexDefinition(index)
          index_key = datastore_index.IndexToKey(definition)
          if required_key == index_key:
            break
          if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
            this_props = index_key[2]
            this_eq_filters_set = set(this_props[:num_eq_filters])
            this_remaining_filters = this_props[num_eq_filters:]
            if (eq_filters_set == this_eq_filters_set and
                remaining_filters == this_remaining_filters):
              break
        else:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.NEED_INDEX,
              "This query requires a composite index that is not defined. "
              "You must update the index.yaml file in your application root.")

    try:
      query.set_app(app_id)
      datastore_types.SetNamespace(query, namespace)
      encoded = datastore_types.EncodeAppIdNamespace(app_id, namespace)
      if query.has_kind():
        results = entities[encoded, query.kind()].values()
        results = [entity.native for entity in results]
      else:
        results = []
        for key in entities:
          if key[0] == encoded:
            results += [entity.native for entity in entities[key].values()]
    except KeyError:
      results = []

    if query.has_ancestor():
      ancestor_path = query.ancestor().path().element_list()
      def is_descendant(entity):
        path = entity.key()._Key__reference.path().element_list()
        return path[:len(ancestor_path)] == ancestor_path
      results = filter(is_descendant, results)

    operators = {datastore_pb.Query_Filter.LESS_THAN:             '<',
                 datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL:    '<=',
                 datastore_pb.Query_Filter.GREATER_THAN:          '>',
                 datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
                 datastore_pb.Query_Filter.EQUAL:                 '==',
                 }

    def has_prop_indexed(entity, prop):
      """Returns True if prop is in the entity and is indexed."""
      if prop in datastore_types._SPECIAL_PROPERTIES:
        return True
      elif prop in entity.unindexed_properties():
        return False

      values = entity.get(prop, [])
      if not isinstance(values, (tuple, list)):
        values = [values]

      for value in values:
        if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
          return True
      return False

    for filt in filters:
      assert filt.op() != datastore_pb.Query_Filter.IN

      prop = filt.property(0).name().decode('utf-8')
      op = operators[filt.op()]

      filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
                         for filter_prop in filt.property_list()]

      def passes_filter(entity):
        """Returns True if the entity passes the filter, False otherwise.

        The filter being evaluated is filt, the current filter that we're on
        in the list of filters in the query.
        """
        if not has_prop_indexed(entity, prop):
          return False

        try:
          entity_vals = datastore._GetPropertyValue(entity, prop)
        except KeyError:
          entity_vals = []

        if not isinstance(entity_vals, list):
          entity_vals = [entity_vals]

        for fixed_entity_val in entity_vals:
          for filter_val in filter_val_list:
            fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
              fixed_entity_val.__class__)
            filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
            if fixed_entity_type == filter_type:
              comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
            elif op != '==':
              comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
            else:
              continue

            logging.log(logging.DEBUG - 1,
                        'Evaling filter expression "%s"', comp)

            try:
              ret = eval(comp)
              if ret and ret != NotImplementedError:
                return True
            except TypeError:
              pass

        return False

      results = filter(passes_filter, results)

    for order in orders:
      prop = order.property().decode('utf-8')
      results = [entity for entity in results if has_prop_indexed(entity, prop)]

    def order_compare_entities(a, b):
      """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
      cmped = 0
      for o in orders:
        prop = o.property().decode('utf-8')

        reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)

        a_val = datastore._GetPropertyValue(a, prop)
        if isinstance(a_val, list):
          a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]

        b_val = datastore._GetPropertyValue(b, prop)
        if isinstance(b_val, list):
          b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]

        cmped = order_compare_properties(a_val, b_val)

        if o.direction() is datastore_pb.Query_Order.DESCENDING:
          cmped = -cmped

        if cmped != 0:
          return cmped

      if cmped == 0:
        return cmp(a.key(), b.key())

    def order_compare_properties(x, y):
      """Return a negative, zero or positive number depending on whether
      property value x is considered smaller than, equal to, or larger than
      property value y. If x and y are different types, they're compared based
      on the type ordering used in the real datastore, which is based on the
      tag numbers in the PropertyValue PB.
      """
      if isinstance(x, datetime.datetime):
        x = datastore_types.DatetimeToTimestamp(x)
      if isinstance(y, datetime.datetime):
        y = datastore_types.DatetimeToTimestamp(y)

      x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
      y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)

      if x_type == y_type:
        try:
          return cmp(x, y)
        except TypeError:
          return 0
      else:
        return cmp(x_type, y_type)

    results.sort(order_compare_entities)

    clone = datastore_pb.Query()
    clone.CopyFrom(query)
    clone.clear_hint()
    if clone in self.__query_history:
      self.__query_history[clone] += 1
    else:
      self.__query_history[clone] = 1

    cursor = _Cursor(query, results, order_compare_entities)
    self.__queries[cursor.cursor] = cursor

    if query.has_count():
      count = query.count()
    elif query.has_limit():
      count = query.limit()
    else:
      count = _BATCH_SIZE

    cursor.PopulateQueryResult(query_result, count, compile=query.compile())

    if query.compile():
      compiled_query = query_result.mutable_compiled_query()
      compiled_query.set_keys_only(query.keys_only())
      compiled_query.mutable_primaryscan().set_index_name(query.Encode())
Example #14
0
    def _Dynamic_RunQuery(self, query, query_result):
        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            (required, kind, ancestor, props,
             num_eq_filters) = (datastore_index.CompositeIndexForQuery(query))
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()
        if query.has_name_space():
            collection = query.name_space(
            ) + _NAMESPACE_CONCAT_STR + collection

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        try:
            prototype = self.__db[collection].find_one()
        except pymongo.errors.InvalidName:
            raise datastore_errors.BadRequestError('query without kind')
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if (not isinstance(spec[key], types.DictType)
                        and not isinstance(value, types.DictType)):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        offset = 0
        # Cursor magic
        if query.has_compiled_cursor():
            offset, query_pb, unused_spec, incl = self._DecodeCompiledCursor(
                query.compiled_cursor())

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.offset() == datastore._MAX_INT_32:
            query.set_offset(0)
            query.set_limit(datastore._MAX_INT_32)

        if offset:
            cursor = cursor.skip(int(offset))
        elif query.has_offset() and query.offset() != _MAX_QUERY_OFFSET:
            cursor = cursor.skip(int(query.offset()))
        if query.has_limit():
            cursor = cursor.limit(int(query.limit()))

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        # Cursor magic
        compiled_cursor = query_result.mutable_compiled_cursor()
        position = compiled_cursor.add_position()
        query_info = self._MinimalQueryInfo(query)
        cloned_cursor = cursor.clone()
        results = list(cloned_cursor)
        if results:
            start_key = _CURSOR_CONCAT_STR.join(
                (str(len(results) + offset), query_info.Encode(),
                 self.__entity_for_mongo_document(results[-1]).Encode()))
            # Populate query result
            result_list = query_result.result_list()
            for doc in results:
                result_list.append(self.__entity_for_mongo_document(doc))
            query_result.set_skipped_results(len(results))
            position.set_start_key(str(start_key))
            position.set_start_inclusive(False)
        del cloned_cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(False)
Example #15
0
    def _Dynamic_RunQuery(self, query, query_result):
        if not self.__tx_lock.acquire(False):
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                "Can't query inside a transaction.")
        else:
            self.__tx_lock.release()

        app = self.ResolveAppId(query.app())

        if self.__require_indexes:
            required_index = datastore_index.CompositeIndexForQuery(query)
            if required_index is not None:
                kind, ancestor, props, num_eq_filters = required_index
                required_key = kind, ancestor, props
                indexes = self.__indexes.get(app)
                if not indexes:
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.BAD_REQUEST,
                        "This query requires a composite index, but none are defined. "
                        "You must create an index.yaml file in your application root."
                    )
                eq_filters_set = set(props[:num_eq_filters])
                remaining_filters = props[num_eq_filters:]
                for index in indexes:
                    definition = datastore_admin.ProtoToIndexDefinition(index)
                    index_key = datastore_index.IndexToKey(definition)
                    if required_key == index_key:
                        break
                    if num_eq_filters > 1 and (kind,
                                               ancestor) == index_key[:2]:
                        this_props = index_key[2]
                        this_eq_filters_set = set(this_props[:num_eq_filters])
                        this_remaining_filters = this_props[num_eq_filters:]
                        if (eq_filters_set == this_eq_filters_set and
                                remaining_filters == this_remaining_filters):
                            break
                else:
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.BAD_REQUEST,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        try:
            query.set_app(app)
            results = self.__entities[app, query.kind()].values()
            results = [datastore.Entity._FromPb(pb) for pb in results]
        except KeyError:
            results = []

        if query.has_ancestor():
            ancestor_path = query.ancestor().path().element_list()

            def is_descendant(entity):
                path = entity.key()._Key__reference.path().element_list()
                return path[:len(ancestor_path)] == ancestor_path

            results = filter(is_descendant, results)

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            def passes(entity):
                """ Returns True if the entity passes the filter, False otherwise. """
                entity_vals = entity.get(prop, [])
                if type(entity_vals) is not types.ListType:
                    entity_vals = [entity_vals]

                entity_property_list = [
                    datastore_types.ToPropertyPb(prop, value)
                    for value in entity_vals
                ]

                for entity_prop in entity_property_list:
                    fixed_entity_val = datastore_types.FromPropertyPb(
                        entity_prop)

                    for filter_prop in filt.property_list():
                        filter_val = datastore_types.FromPropertyPb(
                            filter_prop)

                        comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)

                        logging.log(logging.DEBUG - 1,
                                    'Evaling filter expression "%s"', comp)

                        if eval(comp):
                            return True

                return False

            results = filter(passes, results)

        for order in query.order_list():
            prop = order.property().decode('utf-8')
            results = [entity for entity in results if prop in entity]

        def order_compare(a, b):
            """ Return a negative, zero or positive number depending on whether
      entity a is considered smaller than, equal to, or larger than b,
      according to the query's orderings. """
            for o in query.order_list():
                prop = o.property().decode('utf-8')

                a_values = a[prop]
                if not isinstance(a_values, types.ListType):
                    a_values = [a_values]

                b_values = b[prop]
                if not isinstance(b_values, types.ListType):
                    b_values = [b_values]

                cmped = cmp(min(a_values), min(b_values))

                if o.direction() is datastore_pb.Query_Order.DESCENDING:
                    cmped = -cmped

                if cmped != 0:
                    return cmped

            return 0

        results.sort(order_compare)

        if query.has_limit():
            results = results[:query.limit()]

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1
        self.__WriteHistory()

        results = [e._ToPb() for e in results]
        self.__cursor_lock.acquire()
        cursor = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor] = (results, len(results))

        query_result.mutable_cursor().set_cursor(cursor)
        query_result.set_more_results(len(results) > 0)
    def _Dynamic_RunQuery(self, query, query_result):
        if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(
                query)
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        prototype = self.__db[collection].find_one()
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if not isinstance(spec[key],
                                  types.DictType) and not isinstance(
                                      value, types.DictType):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.has_offset():
            cursor = cursor.skip(query.offset())
        if query.has_limit():
            cursor = cursor.limit(query.limit())

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(True)