Exemplo n.º 1
0
  def test_insert_composite_indexes(self):
    composite_index = entity_pb.CompositeIndex()
    composite_index.set_id(123)
    composite_index.set_app_id("appid")

    definition = composite_index.mutable_definition()
    definition.set_entity_type("kind")

    prop1 = definition.add_property()
    prop1.set_name("prop1")
    prop1.set_direction(1) # ascending
    prop2 = definition.add_property()
    prop2.set_name("prop2")
    prop1.set_direction(1) # ascending

    ent = self.get_new_entity_proto(
      "appid", "kind", "entity_name", "prop1", "value", ns="")

    db_batch = flexmock()
    db_batch.should_receive('valid_data_version_sync').and_return(True)
    db_batch.should_receive("batch_put_entity").and_return(ASYNC_NONE).once()
    transaction_manager = flexmock()
    dd = DatastoreDistributed(db_batch, transaction_manager,
                              self.get_zookeeper())
    yield dd.insert_composite_indexes([ent], [composite_index])
Exemplo n.º 2
0
  def load_composite_cache(self, app_id):
    """ Load the composite index cache for an application ID.

    Args:
      app_id: A str, the application ID.
    Returns:
      True if the application has composites. False otherwise.
    """
    start_key = dbconstants.KEY_DELIMITER.join([app_id, 'index', ''])
    end_key = dbconstants.KEY_DELIMITER.join(
      [app_id, 'index', dbconstants.TERMINATING_STRING])

    results = self.db_access.range_query(dbconstants.METADATA_TABLE,
      dbconstants.METADATA_TABLE, start_key, end_key,
      dbconstants.MAX_NUMBER_OF_COMPOSITE_INDEXES)
    list_result = []
    for list_item in results:
      for _, value in list_item.iteritems():
        list_result.append(value['data'])

    self.composite_index_cache[app_id] = self.NO_COMPOSITES
    kind_index_dictionary = {}
    for index in list_result:
      new_index = entity_pb.CompositeIndex()
      new_index.ParseFromString(index)
      kind = new_index.definition().entity_type()
      if kind in kind_index_dictionary:
        kind_index_dictionary[kind].append(new_index)
      else:
        kind_index_dictionary[kind] = [new_index]
    if kind_index_dictionary:
      self.composite_index_cache[app_id] = kind_index_dictionary
      return True

    return False
Exemplo n.º 3
0
  def create_index_request(self, app_id, http_request_data):
    """ High level function for creating composite indexes.

    Args:
       app_id: Name of the application.
       http_request_data: Stores the protocol buffer request from the 
               AppServer.
    Returns: 
       Returns an encoded response.
    """
    global datastore_access
    request = entity_pb.CompositeIndex(http_request_data)
    response = api_base_pb.Integer64Proto()

    if READ_ONLY:
      logger.warning('Unable to create in read-only mode: {}'.format(request))
      raise gen.Return(
        ('', datastore_pb.Error.CAPABILITY_DISABLED,
         'Datastore is in read-only mode.'))

    try:
      index_id = yield datastore_access.create_composite_index(app_id, request)
      response.set_value(index_id)
    except dbconstants.AppScaleDBConnectionError as error:
      logger.exception('DB connection error during index creation')
      raise gen.Return(('', datastore_pb.Error.INTERNAL_ERROR, str(error)))
    raise gen.Return((response.Encode(), 0, ''))
Exemplo n.º 4
0
  def delete_index_request(self, app_id, http_request_data):
    """ Deletes a composite index for a given application.
  
    Args:
      app_id: Name of the application.
      http_request_data: A serialized CompositeIndices item
    Returns:
      A Tuple of an encoded entity_pb.VoidProto, error code, and 
      error explanation.
    """
    global datastore_access
    request = entity_pb.CompositeIndex(http_request_data)
    response = api_base_pb.VoidProto()

    if READ_ONLY:
      logger.warning('Unable to delete in read-only mode: {}'.format(request))
      raise gen.Return(
        ('', datastore_pb.Error.CAPABILITY_DISABLED,
         'Datastore is in read-only mode.'))

    try:
      yield datastore_access.delete_composite_index_metadata(app_id, request)
    except (dbconstants.AppScaleDBConnectionError,
            dbconstants.InternalError) as error:
      logger.exception('DB connection error during index deletion')
      raise gen.Return(('', datastore_pb.Error.INTERNAL_ERROR, str(error)))
    except dbconstants.BadRequest as error:
      raise gen.Return(('', datastore_pb.Error.BAD_REQUEST, str(error)))

    raise gen.Return((response.Encode(), 0, ''))
    def test_delete_composite_indexes(self):
        db_batch = flexmock()
        db_batch.should_receive("batch_delete").and_return(None)
        dd = DatastoreDistributed(db_batch, self.get_zookeeper())
        dd = flexmock(dd)
        dd.should_receive("get_composite_index_key").and_return("somekey")
        dd.should_receive("get_entity_kind").and_return("kind")
        item1 = self.get_new_entity_proto("appid",
                                          "kind",
                                          "ent_name",
                                          "prop1",
                                          "propvalue",
                                          ns="")
        item2 = self.get_new_entity_proto("appid",
                                          "kind",
                                          "ent_name1",
                                          "prop1",
                                          "propvalue",
                                          ns="")
        composite_index = entity_pb.CompositeIndex()
        composite_index.set_id(123)
        composite_index.set_app_id("appid")

        definition = composite_index.mutable_definition()
        definition.set_entity_type("kind")
        dd.delete_composite_indexes([item1, item2], [composite_index])
Exemplo n.º 6
0
    def test_get_composite_index_key(self):
        db_batch = flexmock()
        dd = DatastoreDistributed(db_batch, self.get_zookeeper())
        dd = flexmock(dd)

        composite_index = entity_pb.CompositeIndex()
        composite_index.set_id(123)
        composite_index.set_app_id("appid")

        definition = composite_index.mutable_definition()
        definition.set_entity_type("kind")

        prop1 = definition.add_property()
        prop1.set_name("prop1")
        prop1.set_direction(1)  # ascending
        prop2 = definition.add_property()
        prop2.set_name("prop2")
        prop1.set_direction(1)  # ascending

        ent = self.get_new_entity_proto("appid",
                                        "kind",
                                        "entity_name",
                                        "prop1",
                                        "value",
                                        ns="")

        self.assertEquals(
            dd.get_composite_index_key(composite_index, ent),
            "appid\x00\x00123\x00\x9avalue\x00\x00kind:entity_name!")
Exemplo n.º 7
0
  def update_index_request(self, app_id, http_request_data):
    """ High level function for updating a composite index.

    Args:
      app_id: A string containing the application ID.
      http_request_data: A string containing the protocol buffer request
        from the AppServer.
    Returns:
       A tuple containing an encoded response, error code, and error details.
    """
    global datastore_access
    index = entity_pb.CompositeIndex(http_request_data)
    response = api_base_pb.VoidProto()

    if READ_ONLY:
      logger.warning('Unable to update in read-only mode: {}'.format(index))
      return ('', datastore_pb.Error.CAPABILITY_DISABLED,
              'Datastore is in read-only mode.')

    state = index.state()
    if state not in [index.READ_WRITE, index.WRITE_ONLY]:
      state_name = entity_pb.CompositeIndex.State_Name(state)
      error_message = 'Unable to update index because state is {}. '\
        'Index: {}'.format(state_name, index)
      logger.error(error_message)
      return '', datastore_pb.Error.PERMISSION_DENIED, error_message
    else:
      # Updating index in background so we can return a response quickly.
      IOLoop.current().spawn_callback(
        datastore_access.update_composite_index, app_id, index)

    return response.Encode(), 0, ''
Exemplo n.º 8
0
  def _Dynamic_CreateIndex(self, index, id_response):
    if index.id() != 0:
      raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
                                             'New index id must be 0.')
    elif self.__FindIndex(index):
      raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
                                             'Index already exists.')

    self.__index_id_lock.acquire()
    index.set_id(self.__next_index_id)
    id_response.set_value(self.__next_index_id)
    self.__next_index_id += 1
    self.__index_id_lock.release()

    clone = entity_pb.CompositeIndex()
    clone.CopyFrom(index)
    app = index.app_id()
    clone.set_app_id(app)

    self.__indexes_lock.acquire()
    try:
      if app not in self.__indexes:
        self.__indexes[app] = []
      self.__indexes[app].append(clone)
    finally:
      self.__indexes_lock.release()
Exemplo n.º 9
0
    def _Dynamic_GetIndices(self, app_str, composite_indices):
        if app_str.value() != self.__db.name():
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                'Getting indexes for a different '
                'app unsupported.')

        def from_index_name(name):
            elements = name.split("_")
            index = []
            while len(elements):
                if not elements[0]:
                    elements = elements[1:]
                    elements[0] = "_" + elements[0]
                index.append((elements[0], int(elements[1])))
                elements = elements[2:]
            return index

        for collection in self.__db.collection_names():
            info = self.__db[collection].index_information()
            for index in info.keys():
                index_pb = entity_pb.CompositeIndex()
                index_pb.set_app_id(self.__db.name())
                index_pb.mutable_definition().set_entity_type(collection)
                index_pb.mutable_definition().set_ancestor(False)
                index_pb.set_state(2)  # READ_WRITE
                index_pb.set_id(1)  # bogus id
                for (k, v) in from_index_name(index):
                    if k == "_id":
                        k = "__key__"
                    p = index_pb.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v == pymongo.ASCENDING and 1 or 2)
                composite_indices.index_list().append(index_pb)
def IndexDefinitionToProto(app_id, index_definition):
    """Transform individual Index definition to protocol buffer.

  Args:
    app_id: Application id for new protocol buffer CompositeIndex.
    index_definition: datastore_index.Index object to transform.

  Returns:
    New entity_pb.CompositeIndex with default values set and index
    information filled in.
  """
    proto = entity_pb.CompositeIndex()

    proto.set_app_id(app_id)
    proto.set_id(0)
    proto.set_state(entity_pb.CompositeIndex.WRITE_ONLY)

    definition_proto = proto.mutable_definition()
    definition_proto.set_entity_type(index_definition.kind)
    definition_proto.set_ancestor(index_definition.ancestor)

    if index_definition.properties is not None:
        for prop in index_definition.properties:
            prop_proto = definition_proto.add_property()
            prop_proto.set_name(prop.name)
            prop_proto.set_direction(_DIRECTION_MAP[prop.direction])

    return proto
Exemplo n.º 11
0
def migrate_composite_index_metadata(cluster, session, zk_client):
  """  Moves any existing datastore index metadata to ZooKeeper.

  Args:
    cluster: A cassandra.cluster.Cluster object.
    session: A cassandra.cluster.Session object.
    zk_client: A kazoo.client.KazooClient object.
  """
  keyspace_metadata = cluster.metadata.keyspaces[KEYSPACE]
  if dbconstants.METADATA_TABLE not in keyspace_metadata.tables:
    return

  logging.info('Fetching previously-defined index definitions')
  results = session.execute(
    'SELECT * FROM "{}"'.format(dbconstants.METADATA_TABLE))
  indexes_by_project = defaultdict(list)
  for result in results:
    try:
      index_pb = entity_pb.CompositeIndex(result.value)
    except ProtocolBufferDecodeError:
      logging.warning('Invalid composite index: {}'.format(result.value))
      continue

    index = DatastoreIndex.from_pb(index_pb)
    # Assume the index is complete.
    index.ready = True
    indexes_by_project[index.project_id].append(index)

  for project_id, indexes in indexes_by_project.items():
    logging.info('Adding indexes for {}'.format(project_id))
    merge_indexes(zk_client, project_id, indexes)

  logging.info('Removing previously-defined index definitions from Cassandra')
  session.execute('DROP TABLE "{}"'.format(dbconstants.METADATA_TABLE),
                  timeout=SCHEMA_CHANGE_TIMEOUT)
Exemplo n.º 12
0
def IndexDefinitionToProto(app_id, index_definition):
  """Transform individual Index definition to protocol buffer.

  Args:
    app_id: Application id for new protocol buffer CompositeIndex.
    index_definition: datastore_index.Index object to transform.

  Returns:
    New entity_pb.CompositeIndex with default values set and index
    information filled in.
  """
  proto = entity_pb.CompositeIndex()

  proto.set_app_id(app_id)
  proto.set_id(0)
  proto.set_state(entity_pb.CompositeIndex.WRITE_ONLY)

  definition_proto = proto.mutable_definition()
  definition_proto.set_entity_type(index_definition.kind)
  definition_proto.set_ancestor(index_definition.ancestor)

  if index_definition.properties is not None:
    for prop in index_definition.properties:
      prop_proto = definition_proto.add_property()
      prop_proto.set_name(prop.name)

      if prop.mode == 'geospatial':
        prop_proto.set_mode(entity_pb.Index_Property.GEOSPATIAL)
      elif prop.IsAscending():
        prop_proto.set_direction(entity_pb.Index_Property.ASCENDING)
      else:
        prop_proto.set_direction(entity_pb.Index_Property.DESCENDING)

  return proto
Exemplo n.º 13
0
  def test_get_composite_index_key(self):
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version_sync').and_return(True)
    transaction_manager = flexmock()
    dd = DatastoreDistributed(db_batch, transaction_manager,
                              self.get_zookeeper())
    dd = flexmock(dd)

    composite_index = entity_pb.CompositeIndex()
    composite_index.set_id(123)
    composite_index.set_app_id("appid")

    definition = composite_index.mutable_definition()
    definition.set_entity_type("kind")

    prop1 = definition.add_property()
    prop1.set_name("prop1")
    prop1.set_direction(1) # ascending
    prop2 = definition.add_property()
    prop2.set_name("prop2")
    prop1.set_direction(1) # ascending

    ent = self.get_new_entity_proto("appid", "kind", "entity_name", "prop1", "value", ns="")

    self.assertEquals(
      dd.get_composite_index_key(composite_index, ent),
      "appid\x00\x00123\x00\x9avalue\x01\x01\x00\x00kind:entity_name\x01")
Exemplo n.º 14
0
  def test_create_composite_index(self):
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version_sync').and_return(True)
    db_batch.should_receive("batch_put_entity").and_return(ASYNC_NONE)
    transaction_manager = flexmock()
    dd = DatastoreDistributed(db_batch, transaction_manager,
                              self.get_zookeeper())
    zk_handle = flexmock(ensure_path=lambda path: None,
                         get=lambda path: (None, flexmock(version=None)),
                         set=lambda path, value, version: None)
    dd.zookeeper.handle = zk_handle
    dd = flexmock(dd)
    index = entity_pb.CompositeIndex()
    index.set_app_id("appid")
    index.set_state(2)
    definition = index.mutable_definition()
    definition.set_entity_type("kind")
    definition.set_ancestor(0)
    prop1 = definition.add_property()
    prop1.set_name("prop1")
    prop1.set_direction(1) # ascending
    prop2 = definition.add_property()
    prop2.set_name("prop2")
    prop1.set_direction(1) # ascending

    index_id = yield dd.create_composite_index("appid", index)
    assert index_id > 0
 def test_delete_composite_index_metadata(self):
     db_batch = flexmock()
     db_batch.should_receive("batch_delete").and_return(None)
     dd = DatastoreDistributed(db_batch, self.get_zookeeper())
     dd = flexmock(dd)
     dd.should_receive("get_meta_data_key").and_return("somekey")
     composite_index = entity_pb.CompositeIndex()
     composite_index.set_id(1)
     dd.delete_composite_index_metadata("appid", composite_index)
Exemplo n.º 16
0
 def test_delete_composite_index_metadata(self):
   db_batch = flexmock()
   db_batch.should_receive('valid_data_version_sync').and_return(True)
   db_batch.should_receive("batch_delete").and_return(ASYNC_NONE)
   transaction_manager = flexmock()
   dd = DatastoreDistributed(db_batch, transaction_manager,
                             self.get_zookeeper())
   dd = flexmock(dd)
   composite_index = entity_pb.CompositeIndex()
   composite_index.set_id(1)
   yield dd.delete_composite_index_metadata("appid", composite_index)
Exemplo n.º 17
0
  def test_index_deletions(self):
    old_entity = self.get_new_entity_proto(*self.BASIC_ENTITY)

    # No deletions should occur when the entity doesn't change.
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version').and_return(True)
    dd = DatastoreDistributed(db_batch, None)
    self.assertListEqual([], index_deletions(old_entity, old_entity))

    # When a property changes, the previous index entries should be deleted.
    new_entity = entity_pb.EntityProto()
    new_entity.MergeFrom(old_entity)
    new_entity.property_list()[0].value().set_stringvalue('updated content')

    deletions = index_deletions(old_entity, new_entity)
    self.assertEqual(len(deletions), 2)
    self.assertEqual(deletions[0]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(deletions[1]['table'], dbconstants.DSC_PROPERTY_TABLE)

    prop = old_entity.add_property()
    prop.set_name('author')
    value = prop.mutable_value()
    value.set_stringvalue('author1')

    prop = new_entity.add_property()
    prop.set_name('author')
    value = prop.mutable_value()
    value.set_stringvalue('author1')

    # When given an index, an entry should be removed from the composite table.
    composite_index = entity_pb.CompositeIndex()
    composite_index.set_id(123)
    composite_index.set_app_id('guestbook')
    definition = composite_index.mutable_definition()
    definition.set_entity_type('Greeting')
    prop1 = definition.add_property()
    prop1.set_name('content')
    prop1.set_direction(datastore_pb.Query_Order.ASCENDING)
    prop2 = definition.add_property()
    prop2.set_name('author')
    prop1.set_direction(datastore_pb.Query_Order.ASCENDING)
    deletions = index_deletions(old_entity, new_entity, (composite_index,))
    self.assertEqual(len(deletions), 3)
    self.assertEqual(deletions[0]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(deletions[1]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(deletions[2]['table'], dbconstants.COMPOSITE_TABLE)

    # No composite deletions should occur when the entity type differs.
    definition.set_entity_type('TestEntity')
    deletions = index_deletions(old_entity, new_entity, (composite_index,))
    self.assertEqual(len(deletions), 2)
Exemplo n.º 18
0
    def test_delete_composite_indexes(self):
        db_batch = flexmock()
        db_batch.should_receive("batch_delete").and_return(None)
        dd = DatastoreDistributed(db_batch, self.get_zookeeper())
        dd = flexmock(dd)
        dd.should_receive("get_composite_index_key").and_return("somekey")
        dd.should_receive("get_entity_kind").and_return("kind")
        item1 = Item(key_name="Bob", name="Bob", _app="hello")
        item2 = Item(key_name="Sally", name="Sally", _app="hello")
        composite_index = entity_pb.CompositeIndex()
        composite_index.set_id(123)
        composite_index.set_app_id("appid")

        definition = composite_index.mutable_definition()
        definition.set_entity_type("kind")

        dd.delete_composite_indexes([item1, item2], [composite_index])
Exemplo n.º 19
0
  def test_deletions_for_entity(self):
    entity = self.get_new_entity_proto(*self.BASIC_ENTITY)

    # Deleting an entity with one property should remove four entries.
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version').and_return(True)
    dd = DatastoreDistributed(db_batch, None)
    deletions = deletions_for_entity(entity)
    self.assertEqual(len(deletions), 4)
    self.assertEqual(deletions[0]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(deletions[1]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(deletions[2]['table'], dbconstants.APP_ENTITY_TABLE)
    self.assertEqual(deletions[3]['table'], dbconstants.APP_KIND_TABLE)

    prop = entity.add_property()
    prop.set_name('author')
    value = prop.mutable_value()
    value.set_stringvalue('author1')

    # Deleting an entity with two properties and one composite index should
    # remove seven entries.
    composite_index = entity_pb.CompositeIndex()
    composite_index.set_id(123)
    composite_index.set_app_id('guestbook')
    definition = composite_index.mutable_definition()
    definition.set_entity_type('Greeting')
    prop1 = definition.add_property()
    prop1.set_name('content')
    prop1.set_direction(datastore_pb.Query_Order.ASCENDING)
    prop2 = definition.add_property()
    prop2.set_name('author')
    prop1.set_direction(datastore_pb.Query_Order.ASCENDING)
    deletions = deletions_for_entity(entity, (composite_index,))
    self.assertEqual(len(deletions), 7)
    self.assertEqual(deletions[0]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(deletions[1]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(deletions[2]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(deletions[3]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(deletions[4]['table'], dbconstants.COMPOSITE_TABLE)
    self.assertEqual(deletions[5]['table'], dbconstants.APP_ENTITY_TABLE)
    self.assertEqual(deletions[6]['table'], dbconstants.APP_KIND_TABLE)
    def test_create_composite_index(self):
        db_batch = flexmock()
        db_batch.should_receive("batch_put_entity").and_return(None)
        dd = DatastoreDistributed(db_batch, self.get_zookeeper())
        dd = flexmock(dd)
        dd.should_receive("get_meta_data_key").and_return("somekey")
        index = entity_pb.CompositeIndex()
        index.set_app_id("appid")
        index.set_state(2)
        definition = index.mutable_definition()
        definition.set_entity_type("kind")
        definition.set_ancestor(0)
        prop1 = definition.add_property()
        prop1.set_name("prop1")
        prop1.set_direction(1)  # ascending
        prop2 = definition.add_property()
        prop2.set_name("prop2")
        prop1.set_direction(1)  # ascending

        dd.create_composite_index("appid", index)
        assert index.id() > 0
Exemplo n.º 21
0
    def _SetupIndexes(self, _open=open):
        """Ensure that the set of existing composite indexes matches index.yaml.
    
    Create any new indexes, and delete indexes which are no longer required.
   
    Args:
      _open: Function used to open a file.
    """
        if not self.__root_path:
            logging.warning("No index.yaml was loaded.")
            return
        index_yaml_file = os.path.join(self.__root_path, 'index.yaml')
        if (self.__cached_yaml[0] == index_yaml_file
                and os.path.exists(index_yaml_file) and
                os.path.getmtime(index_yaml_file) == self.__cached_yaml[1]):
            requested_indexes = self.__cached_yaml[2]
        else:
            try:
                index_yaml_mtime = os.path.getmtime(index_yaml_file)
                fh = _open(index_yaml_file, 'r')
            except (OSError, IOError):
                logging.info("Error reading file")
                index_yaml_data = None
            else:
                try:
                    index_yaml_data = fh.read()
                finally:
                    fh.close()
            requested_indexes = []
            if index_yaml_data is not None:
                index_defs = datastore_index.ParseIndexDefinitions(
                    index_yaml_data)
                if index_defs is not None and index_defs.indexes is not None:
                    requested_indexes = datastore_index.IndexDefinitionsToProtos(
                        self.__app_id, index_defs.indexes)
                    self.__cached_yaml = (index_yaml_file, index_yaml_mtime,
                                          requested_indexes)

        existing_indexes = datastore_pb.CompositeIndices()
        app_str = api_base_pb.StringProto()
        app_str.set_value(self.__app_id)
        self._Dynamic_GetIndices(app_str, existing_indexes)

        requested = dict(
            (x.definition().Encode(), x) for x in requested_indexes)
        existing = dict((x.definition().Encode(), x)
                        for x in existing_indexes.index_list())

        # Delete any indexes that are no longer requested.
        deleted = 0
        for key, index in existing.iteritems():
            if key not in requested:
                self._Dynamic_DeleteIndex(index, api_base_pb.VoidProto())
                deleted += 1

        # Add existing indexes in the index cache.
        for key, index in existing.iteritems():
            new_index = entity_pb.CompositeIndex()
            new_index.CopyFrom(index)
            ent_kind = new_index.definition().entity_type()
            if ent_kind in self.__index_cache:
                new_indexes = self.__index_cache[ent_kind]
                new_indexes.append(new_index)
                self.__index_cache[ent_kind] = new_indexes
            else:
                self.__index_cache[ent_kind] = [new_index]

        # Compared the existing indexes to the requested ones and create any
        # new indexes requested.
        created = 0
        for key, index in requested.iteritems():
            if key not in existing:
                new_index = entity_pb.CompositeIndex()
                new_index.CopyFrom(index)
                new_index.set_id(
                    self._Dynamic_CreateIndex(
                        new_index, api_base_pb.Integer64Proto()).value())
                new_index.set_state(entity_pb.CompositeIndex.READ_WRITE)
                self._Dynamic_UpdateIndex(new_index, api_base_pb.VoidProto())
                created += 1

                ent_kind = new_index.definition().entity_type()
                if ent_kind in self.__index_cache:
                    new_indexes = self.__index_cache[ent_kind]

                    new_indexes.append(new_index)
                    self.__index_cache[ent_kind] = new_indexes
                else:
                    self.__index_cache[ent_kind] = [new_index]

        if created or deleted:
            logging.info('Created %d and deleted %d index(es); total %d',
                         created, deleted, len(requested))
Exemplo n.º 22
0
  def test_mutations_for_entity(self):
    entity = self.get_new_entity_proto(*self.BASIC_ENTITY)
    txn = 1

    # Adding an entity with one property should add four entries.
    db_batch = flexmock()
    db_batch.should_receive('valid_data_version_sync').and_return(True)
    mutations = mutations_for_entity(entity, txn)
    self.assertEqual(len(mutations), 4)
    self.assertEqual(mutations[0]['table'], dbconstants.APP_ENTITY_TABLE)
    self.assertEqual(mutations[1]['table'], dbconstants.APP_KIND_TABLE)
    self.assertEqual(mutations[2]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(mutations[3]['table'], dbconstants.DSC_PROPERTY_TABLE)

    # Updating an entity with one property should delete two entries and add
    # four more.
    new_entity = entity_pb.EntityProto()
    new_entity.MergeFrom(entity)
    new_entity.property_list()[0].value().set_stringvalue('updated content')
    mutations = mutations_for_entity(entity, txn, new_entity)
    self.assertEqual(len(mutations), 6)
    self.assertEqual(mutations[0]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(mutations[0]['operation'], dbconstants.Operations.DELETE)
    self.assertEqual(mutations[1]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(mutations[1]['operation'], dbconstants.Operations.DELETE)
    self.assertEqual(mutations[2]['table'], dbconstants.APP_ENTITY_TABLE)
    self.assertEqual(mutations[3]['table'], dbconstants.APP_KIND_TABLE)
    self.assertEqual(mutations[4]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(mutations[5]['table'], dbconstants.DSC_PROPERTY_TABLE)

    prop = entity.add_property()
    prop.set_name('author')
    prop.set_multiple(0)
    value = prop.mutable_value()
    value.set_stringvalue('author1')

    prop = new_entity.add_property()
    prop.set_name('author')
    prop.set_multiple(0)
    value = prop.mutable_value()
    value.set_stringvalue('author1')

    # Updating one property of an entity with two properties and one composite
    # index should remove three entries and add seven more.
    composite_index = entity_pb.CompositeIndex()
    composite_index.set_id(123)
    composite_index.set_app_id('guestbook')
    definition = composite_index.mutable_definition()
    definition.set_entity_type('Greeting')
    prop1 = definition.add_property()
    prop1.set_name('content')
    prop1.set_direction(datastore_pb.Query_Order.ASCENDING)
    prop2 = definition.add_property()
    prop2.set_name('author')
    prop1.set_direction(datastore_pb.Query_Order.ASCENDING)

    mutations = mutations_for_entity(entity, txn, new_entity,
                                     (composite_index,))
    self.assertEqual(len(mutations), 10)
    self.assertEqual(mutations[0]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(mutations[0]['operation'], dbconstants.Operations.DELETE)
    self.assertEqual(mutations[1]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(mutations[1]['operation'], dbconstants.Operations.DELETE)
    self.assertEqual(mutations[2]['table'], dbconstants.COMPOSITE_TABLE)
    self.assertEqual(mutations[2]['operation'], dbconstants.Operations.DELETE)
    self.assertEqual(mutations[3]['table'], dbconstants.APP_ENTITY_TABLE)
    self.assertEqual(mutations[4]['table'], dbconstants.APP_KIND_TABLE)
    self.assertEqual(mutations[5]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(mutations[6]['table'], dbconstants.ASC_PROPERTY_TABLE)
    self.assertEqual(mutations[7]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(mutations[8]['table'], dbconstants.DSC_PROPERTY_TABLE)
    self.assertEqual(mutations[9]['table'], dbconstants.COMPOSITE_TABLE)
    def _Dynamic_RunQuery(self, query, query_result):
        if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')

        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(
                query)
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        prototype = self.__db[collection].find_one()
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if not isinstance(spec[key],
                                  types.DictType) and not isinstance(
                                      value, types.DictType):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.has_offset():
            cursor = cursor.skip(query.offset())
        if query.has_limit():
            cursor = cursor.limit(query.limit())

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(True)
Exemplo n.º 24
0
def SetupIndexes(app_id, root_path):
    """Ensure that the set of existing composite indexes matches index.yaml.

  Note: this is similar to the algorithm used by the admin console for
  the same purpose.

  Args:
    app_id: Application ID being served.
    root_path: Path to the root of the application.
  """
    index_yaml_file = os.path.join(root_path, 'index.yaml')
    global _cached_yaml
    if _cached_yaml[0] == index_yaml_file and os.path.exists(
            index_yaml_file) and os.path.getmtime(
                index_yaml_file) == _cached_yaml[1]:
        requested_indexes = _cached_yaml[2]
    else:
        try:
            index_yaml_mtime = os.path.getmtime(index_yaml_file)
            fh = open(index_yaml_file, 'r')
        except (OSError, IOError):
            index_yaml_data = None
        else:
            try:
                index_yaml_data = fh.read()
            finally:
                fh.close()

        requested_indexes = []
        if index_yaml_data is not None:

            index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
            if index_defs is not None and index_defs.indexes is not None:

                requested_indexes = datastore_index.IndexDefinitionsToProtos(
                    app_id, index_defs.indexes)
                _cached_yaml = (index_yaml_file, index_yaml_mtime,
                                requested_indexes)

    existing_indexes = datastore_admin.GetIndices(app_id)

    requested = dict((x.definition().Encode(), x) for x in requested_indexes)
    existing = dict((x.definition().Encode(), x) for x in existing_indexes)

    created = 0
    for key, index in requested.iteritems():
        if key not in existing:
            new_index = entity_pb.CompositeIndex()
            new_index.CopyFrom(index)
            id = datastore_admin.CreateIndex(new_index)
            new_index.set_id(id)
            new_index.set_state(entity_pb.CompositeIndex.READ_WRITE)
            datastore_admin.UpdateIndex(new_index)
            created += 1

    deleted = 0
    for key, index in existing.iteritems():
        if key not in requested:
            datastore_admin.DeleteIndex(index)
            deleted += 1

    if created or deleted:
        logging.info("Created %d and deleted %d index(es); total %d", created,
                     deleted, len(requested))
Exemplo n.º 25
0
    def _Dynamic_RunQuery(self, query, query_result):
        if query.keys_only():
            query_result.set_keys_only(True)

        num_components = len(query.filter_list()) + len(query.order_list())
        if query.has_ancestor():
            num_components += 1
        if num_components > _MAX_QUERY_COMPONENTS:
            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.BAD_REQUEST,
                ('query is too large. may not have more than %s filters'
                 ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))

        app = query.app()

        query_result.mutable_cursor().set_cursor(0)
        query_result.set_more_results(False)

        if self.__require_indexes:
            (required, kind, ancestor, props,
             num_eq_filters) = (datastore_index.CompositeIndexForQuery(query))
            if required:
                index = entity_pb.CompositeIndex()
                index.mutable_definition().set_entity_type(kind)
                index.mutable_definition().set_ancestor(ancestor)
                for (k, v) in props:
                    p = index.mutable_definition().add_property()
                    p.set_name(k)
                    p.set_direction(v)

                if props and not self.__has_index(index):
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.NEED_INDEX,
                        "This query requires a composite index that is not defined. "
                        "You must update the index.yaml file in your application root."
                    )

        collection = query.kind()
        if query.has_name_space():
            collection = query.name_space(
            ) + _NAMESPACE_CONCAT_STR + collection

        clone = datastore_pb.Query()
        clone.CopyFrom(query)
        clone.clear_hint()
        if clone in self.__query_history:
            self.__query_history[clone] += 1
        else:
            self.__query_history[clone] = 1

        # HACK we need to get one Entity from this collection so we know what the
        # property types are (because we need to construct queries that depend on
        # the types of the properties)...
        try:
            prototype = self.__db[collection].find_one()
        except pymongo.errors.InvalidName:
            raise datastore_errors.BadRequestError('query without kind')
        if prototype is None:
            return
        prototype = datastore.Entity._FromPb(
            self.__entity_for_mongo_document(prototype))

        spec = {}

        if query.has_ancestor():
            spec["_id"] = re.compile("^%s.*$" %
                                     self.__id_for_key(query.ancestor()))

        operators = {
            datastore_pb.Query_Filter.LESS_THAN: '<',
            datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
            datastore_pb.Query_Filter.GREATER_THAN: '>',
            datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
            datastore_pb.Query_Filter.EQUAL: '==',
        }

        for filt in query.filter_list():
            assert filt.op() != datastore_pb.Query_Filter.IN

            prop = filt.property(0).name().decode('utf-8')
            op = operators[filt.op()]

            filter_val_list = [
                datastore_types.FromPropertyPb(filter_prop)
                for filter_prop in filt.property_list()
            ]

            (key, value) = self.__filter_binding(prop, filter_val_list[0], op,
                                                 prototype)

            if key in spec:
                if (not isinstance(spec[key], types.DictType)
                        and not isinstance(value, types.DictType)):
                    if spec[key] != value:
                        return
                elif not isinstance(spec[key], types.DictType):
                    value["$in"] = [spec[key]]
                    spec[key] = value
                elif not isinstance(value, types.DictType):
                    spec[key]["$in"] = [value]
                else:
                    spec[key].update(value)
            else:
                spec[key] = value

        offset = 0
        # Cursor magic
        if query.has_compiled_cursor():
            offset, query_pb, unused_spec, incl = self._DecodeCompiledCursor(
                query.compiled_cursor())

        cursor = self.__db[collection].find(spec)

        order = self.__translate_order_for_mongo(query.order_list(), prototype)
        if order is None:
            return
        if order:
            cursor = cursor.sort(order)

        if query.offset() == datastore._MAX_INT_32:
            query.set_offset(0)
            query.set_limit(datastore._MAX_INT_32)

        if offset:
            cursor = cursor.skip(int(offset))
        elif query.has_offset() and query.offset() != _MAX_QUERY_OFFSET:
            cursor = cursor.skip(int(query.offset()))
        if query.has_limit():
            cursor = cursor.limit(int(query.limit()))

        self.__cursor_lock.acquire()
        cursor_index = self.__next_cursor
        self.__next_cursor += 1
        self.__cursor_lock.release()
        self.__queries[cursor_index] = cursor

        # Cursor magic
        compiled_cursor = query_result.mutable_compiled_cursor()
        position = compiled_cursor.add_position()
        query_info = self._MinimalQueryInfo(query)
        cloned_cursor = cursor.clone()
        results = list(cloned_cursor)
        if results:
            start_key = _CURSOR_CONCAT_STR.join(
                (str(len(results) + offset), query_info.Encode(),
                 self.__entity_for_mongo_document(results[-1]).Encode()))
            # Populate query result
            result_list = query_result.result_list()
            for doc in results:
                result_list.append(self.__entity_for_mongo_document(doc))
            query_result.set_skipped_results(len(results))
            position.set_start_key(str(start_key))
            position.set_start_inclusive(False)
        del cloned_cursor

        query_result.mutable_cursor().set_cursor(cursor_index)
        query_result.set_more_results(False)