def setUp(self):
        """Sets up test environment and regisers stub."""

        # Set required environment variables
        os.environ['APPLICATION_ID'] = 'test'
        os.environ['AUTH_DOMAIN'] = 'mydomain.local'
        os.environ['USER_EMAIL'] = '*****@*****.**'
        os.environ['USER_IS_ADMIN'] = '1'

        # Read index definitions.
        index_yaml = open(
            os.path.join(os.path.dirname(__file__), 'index.yaml'), 'r')

        try:
            self.indices = datastore_index.IndexDefinitionsToProtos(
                'test',
                datastore_index.ParseIndexDefinitions(index_yaml).indexes)
        except TypeError:
            self.indices = []

        index_yaml.close()

        # Register API proxy stub.
        apiproxy_stub_map.apiproxy = (apiproxy_stub_map.APIProxyStubMap())

        datastore = typhoonae.mongodb.datastore_mongo_stub.DatastoreMongoStub(
            'test', '', require_indexes=True)

        try:
            apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
        except apiproxy_errors.ApplicationError, e:
            raise RuntimeError('These tests require a running MongoDB server '
                               '(%s)' % e)
def SetupIndexes(app_id, root_path):
  """Ensure that the set of existing composite indexes matches index.yaml.

  Note: this is similar to the algorithm used by the admin console for
  the same purpose.

  Args:
    app_id: Application ID being served.
    root_path: Path to the root of the application.
  """
  index_yaml_file = os.path.join(root_path, 'index.yaml')
  try:
    fh = open(index_yaml_file, 'r')
  except IOError:
    index_yaml_data = None
  else:
    try:
      index_yaml_data = fh.read()
    finally:
      fh.close()

  indexes = []
  if index_yaml_data is not None:

    index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
    if index_defs is not None:
      indexes = index_defs.indexes
      if indexes is None:
        indexes = []


  requested_indexes = datastore_index.IndexDefinitionsToProtos(app_id, indexes)


  existing_indexes = datastore_admin.GetIndices(app_id)


  requested = dict((x.definition().Encode(), x) for x in requested_indexes)
  existing = dict((x.definition().Encode(), x) for x in existing_indexes)


  created = 0
  for key, index in requested.iteritems():
    if key not in existing:
      datastore_admin.CreateIndex(index)
      created += 1


  deleted = 0
  for key, index in existing.iteritems():
    if key not in requested:
      datastore_admin.DeleteIndex(index)
      deleted += 1


  if created or deleted:
    logging.info("Created %d and deleted %d index(es); total %d",
                 created, deleted, len(requested))
    def setUp(self):
        """Sets up test environment and regisers stub."""

        # Set required environment variables
        os.environ['APPLICATION_ID'] = 'test'
        os.environ['AUTH_DOMAIN'] = 'mydomain.local'
        os.environ['USER_EMAIL'] = '*****@*****.**'
        os.environ['USER_IS_ADMIN'] = '1'

        # Read index definitions.
        index_yaml = open(
            os.path.join(os.path.dirname(__file__), 'index.yaml'), 'r')

        try:
            self.indices = datastore_index.IndexDefinitionsToProtos(
                'test',
                datastore_index.ParseIndexDefinitions(index_yaml).indexes)
        except TypeError:
            self.indices = []

        index_yaml.close()

        # Register API proxy stub.
        apiproxy_stub_map.apiproxy = (apiproxy_stub_map.APIProxyStubMap())

        database_info = {
            "host": "127.0.0.1",
            "user": "******",
            "passwd": "",
            "db": "testdb"
        }

        # Suppress waring messages from MySQL
        filterwarnings('ignore', category=MySQLdb.Warning)

        datastore = typhoonae.mysql.datastore_mysql_stub.DatastoreMySQLStub(
            'test', database_info)

        try:
            apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
        except apiproxy_errors.ApplicationError, e:
            raise RuntimeError('These tests require a running MySQL server '
                               '(%s)' % e)
Esempio n. 4
0
    def _SetupIndexes(self, _open=open):
        """Ensure that the set of existing composite indexes matches index.yaml.
    
    Create any new indexes, and delete indexes which are no longer required.
   
    Args:
      _open: Function used to open a file.
    """
        if not self.__root_path:
            logging.warning("No index.yaml was loaded.")
            return
        index_yaml_file = os.path.join(self.__root_path, 'index.yaml')
        if (self.__cached_yaml[0] == index_yaml_file
                and os.path.exists(index_yaml_file) and
                os.path.getmtime(index_yaml_file) == self.__cached_yaml[1]):
            requested_indexes = self.__cached_yaml[2]
        else:
            try:
                index_yaml_mtime = os.path.getmtime(index_yaml_file)
                fh = _open(index_yaml_file, 'r')
            except (OSError, IOError):
                logging.info("Error reading file")
                index_yaml_data = None
            else:
                try:
                    index_yaml_data = fh.read()
                finally:
                    fh.close()
            requested_indexes = []
            if index_yaml_data is not None:
                index_defs = datastore_index.ParseIndexDefinitions(
                    index_yaml_data)
                if index_defs is not None and index_defs.indexes is not None:
                    requested_indexes = datastore_index.IndexDefinitionsToProtos(
                        self.__app_id, index_defs.indexes)
                    self.__cached_yaml = (index_yaml_file, index_yaml_mtime,
                                          requested_indexes)

        existing_indexes = datastore_pb.CompositeIndices()
        app_str = api_base_pb.StringProto()
        app_str.set_value(self.__app_id)
        self._Dynamic_GetIndices(app_str, existing_indexes)

        requested = dict(
            (x.definition().Encode(), x) for x in requested_indexes)
        existing = dict((x.definition().Encode(), x)
                        for x in existing_indexes.index_list())

        # Delete any indexes that are no longer requested.
        deleted = 0
        for key, index in existing.iteritems():
            if key not in requested:
                self._Dynamic_DeleteIndex(index, api_base_pb.VoidProto())
                deleted += 1

        # Add existing indexes in the index cache.
        for key, index in existing.iteritems():
            new_index = entity_pb.CompositeIndex()
            new_index.CopyFrom(index)
            ent_kind = new_index.definition().entity_type()
            if ent_kind in self.__index_cache:
                new_indexes = self.__index_cache[ent_kind]
                new_indexes.append(new_index)
                self.__index_cache[ent_kind] = new_indexes
            else:
                self.__index_cache[ent_kind] = [new_index]

        # Compared the existing indexes to the requested ones and create any
        # new indexes requested.
        created = 0
        for key, index in requested.iteritems():
            if key not in existing:
                new_index = entity_pb.CompositeIndex()
                new_index.CopyFrom(index)
                new_index.set_id(
                    self._Dynamic_CreateIndex(
                        new_index, api_base_pb.Integer64Proto()).value())
                new_index.set_state(entity_pb.CompositeIndex.READ_WRITE)
                self._Dynamic_UpdateIndex(new_index, api_base_pb.VoidProto())
                created += 1

                ent_kind = new_index.definition().entity_type()
                if ent_kind in self.__index_cache:
                    new_indexes = self.__index_cache[ent_kind]

                    new_indexes.append(new_index)
                    self.__index_cache[ent_kind] = new_indexes
                else:
                    self.__index_cache[ent_kind] = [new_index]

        if created or deleted:
            logging.info('Created %d and deleted %d index(es); total %d',
                         created, deleted, len(requested))
def SetupIndexes(app_id, root_path):
    """Ensure that the set of existing composite indexes matches index.yaml.

  Note: this is similar to the algorithm used by the admin console for
  the same purpose.

  Args:
    app_id: Application ID being served.
    root_path: Path to the root of the application.
  """
    index_yaml_file = os.path.join(root_path, 'index.yaml')
    global _cached_yaml
    if _cached_yaml[0] == index_yaml_file and os.path.exists(
            index_yaml_file) and os.path.getmtime(
                index_yaml_file) == _cached_yaml[1]:
        requested_indexes = _cached_yaml[2]
    else:
        try:
            index_yaml_mtime = os.path.getmtime(index_yaml_file)
            fh = open(index_yaml_file, 'r')
        except (OSError, IOError):
            index_yaml_data = None
        else:
            try:
                index_yaml_data = fh.read()
            finally:
                fh.close()

        requested_indexes = []
        if index_yaml_data is not None:

            index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
            if index_defs is not None and index_defs.indexes is not None:

                requested_indexes = datastore_index.IndexDefinitionsToProtos(
                    app_id, index_defs.indexes)
                _cached_yaml = (index_yaml_file, index_yaml_mtime,
                                requested_indexes)

    existing_indexes = datastore_admin.GetIndices(app_id)

    requested = dict((x.definition().Encode(), x) for x in requested_indexes)
    existing = dict((x.definition().Encode(), x) for x in existing_indexes)

    created = 0
    for key, index in requested.iteritems():
        if key not in existing:
            new_index = entity_pb.CompositeIndex()
            new_index.CopyFrom(index)
            id = datastore_admin.CreateIndex(new_index)
            new_index.set_id(id)
            new_index.set_state(entity_pb.CompositeIndex.READ_WRITE)
            datastore_admin.UpdateIndex(new_index)
            created += 1

    deleted = 0
    for key, index in existing.iteritems():
        if key not in requested:
            datastore_admin.DeleteIndex(index)
            deleted += 1

    if created or deleted:
        logging.info("Created %d and deleted %d index(es); total %d", created,
                     deleted, len(requested))