def _ProjectionPartialEntityGenerator(cursor):
  """Generator that creates partial entities for projection.

  Generate partial datastore entities from a cursor, holding only the values
  being projected. These entities might share a key.

  Args:
    cursor: a SQLite3.Cursor or subclass.

  Yields:
    Partial entities resulting from the projection.
  """
  for row in cursor:
    storage_entity = entity_pb.EntityProto(row[1])
    record = datastore_stub_util._FromStorageEntity(storage_entity)
    original_entity = record.entity

    entity = entity_pb.EntityProto()
    entity.mutable_key().MergeFrom(original_entity.key())
    entity.mutable_entity_group().MergeFrom(original_entity.entity_group())

    for name, value_data in zip(row[2::2], row[3::2]):
      prop_to_add = entity.add_property()
      prop_to_add.set_name(ToUtf8(name))


      value_decoder = sortable_pb_encoder.Decoder(
          array.array('B', str(value_data)))
      prop_to_add.mutable_value().Merge(value_decoder)
      prop_to_add.set_multiple(False)

    datastore_stub_util.PrepareSpecialPropertiesForLoad(entity)
    yield datastore_stub_util.EntityRecord(entity)
  def _GetEntitiesInEntityGroup(self, entity_group):
    query = datastore_pb.Query()
    query.set_app(entity_group.app())
    if entity_group.name_space():
      query.set_name_space(entity_group.name_space())
    query.mutable_ancestor().CopyFrom(entity_group)

    filter_info = self.__GenerateFilterInfo(query.filter_list(), query)
    order_info = self.__GenerateOrderInfo(query.order_list())
    sql_stmt, params = self.__KindQuery(query, filter_info, order_info)






    conn = self._GetConnection()
    try:
      db_cursor = conn.execute(sql_stmt, params)
      entities = {}
      for row in db_cursor.fetchall():
        entity = entity_pb.EntityProto(row[1])
        key = datastore_types.ReferenceToKeyValue(entity.key())
        entities[key] = datastore_stub_util._FromStorageEntity(entity)
      return entities
    finally:

      self._ReleaseConnection(conn)
Пример #3
0
def _ProjectionPartialEntityGenerator(cursor):
  """Generator that creates partial entities for projection.

  Generate partial datastore entities from a cursor, holding only the values
  being projected. These entities might share a key.

  Args:
    cursor: a SQLite3.Cursor or subclass.

  Yields:
    Partial entities resulting from the projection.
  """
  for row in cursor:
    storage_entity = entity_pb.EntityProto(row[1])
    record = datastore_stub_util._FromStorageEntity(storage_entity)
    original_entity = record.entity

    entity = entity_pb.EntityProto()
    entity.mutable_key().MergeFrom(original_entity.key())
    entity.mutable_entity_group().MergeFrom(original_entity.entity_group())

    for name, value_data in zip(row[2::2], row[3::2]):
      prop_to_add = entity.add_property()
      prop_to_add.set_name(ToUtf8(name))


      value_decoder = sortable_pb_encoder.Decoder(
          array.array('B', str(value_data)))
      prop_to_add.mutable_value().Merge(value_decoder)
      prop_to_add.set_multiple(False)

    datastore_stub_util.PrepareSpecialPropertiesForLoad(entity)
    yield datastore_stub_util.EntityRecord(entity)
Пример #4
0
  def _GetEntitiesInEntityGroup(self, entity_group):
    query = datastore_pb.Query()
    query.set_app(entity_group.app())
    if entity_group.name_space():
      query.set_name_space(entity_group.name_space())
    query.mutable_ancestor().CopyFrom(entity_group)

    filter_info = self.__GenerateFilterInfo(query.filter_list(), query)
    order_info = self.__GenerateOrderInfo(query.order_list())
    sql_stmt, params = self.__KindQuery(query, filter_info, order_info)






    conn = self._GetConnection()
    try:
      db_cursor = conn.execute(sql_stmt, params)
      entities = {}
      for row in db_cursor.fetchall():
        entity = entity_pb.EntityProto(row[1])
        key = datastore_types.ReferenceToKeyValue(entity.key())
        entities[key] = datastore_stub_util._FromStorageEntity(entity)
      return entities
    finally:

      self._ReleaseConnection(conn)
Пример #5
0
    def Read(self):
        """ Reads the datastore and history files into memory.

    The in-memory query history is cleared, but the datastore is *not*
    cleared; the entities in the files are merged into the entities in memory.
    If you want them to overwrite the in-memory datastore, call Clear() before
    calling Read().

    If the datastore file contains an entity with the same app name, kind, and
    key as an entity already in the datastore, the entity from the file
    overwrites the entity in the datastore.

    Also sets each ID counter to one greater than the highest ID allocated so
    far in that counter's ID space.
    """
        if self.__datastore_file and self.__datastore_file != '/dev/null':
            entities = self.__ReadPickled(self.__datastore_file)
            if entities and isinstance(entities[-1], (int, long)):
                self._commit_timestamp = int(entities.pop())
            for encoded_entity in entities:
                try:
                    entity = entity_pb.EntityProto(encoded_entity)
                    record = datastore_stub_util._FromStorageEntity(entity)
                except self.READ_PB_EXCEPTIONS, e:
                    raise apiproxy_errors.ApplicationError(
                        datastore_pb.Error.INTERNAL_ERROR,
                        self.READ_ERROR_MSG % (self.__datastore_file, e))
                except struct.error, e:
                    raise

                self._StoreRecord(record)

                last_path = _FinalElement(entity.key())
                if last_path.id():
                    self._SetMaxId(last_path.id())
 def _Get(self, key):
   conn = self._GetConnection()
   try:
     prefix = self._GetTablePrefix(key)
     c = conn.execute(
         'SELECT entity FROM "%s!Entities" WHERE __path__ = ?' % (prefix,),
         (self.__EncodeIndexPB(key.path()),))
     row = c.fetchone()
     if row:
       entity = entity_pb.EntityProto()
       entity.ParseFromString(row[0])
       record = datastore_stub_util._FromStorageEntity(entity)
       return datastore_stub_util.LoadRecord(record)
   finally:
     self._ReleaseConnection(conn)
Пример #7
0
 def _Get(self, key):
   conn = self._GetConnection()
   try:
     prefix = self._GetTablePrefix(key)
     c = conn.execute(
         'SELECT entity FROM "%s!Entities" WHERE __path__ = ?' % (prefix,),
         (self.__EncodeIndexPB(key.path()),))
     row = c.fetchone()
     if row:
       entity = entity_pb.EntityProto()
       entity.ParseFromString(row[0])
       record = datastore_stub_util._FromStorageEntity(entity)
       return datastore_stub_util.LoadRecord(record)
   finally:
     self._ReleaseConnection(conn)
  def Read(self):
    """ Reads the datastore and history files into memory.

    The in-memory query history is cleared, but the datastore is *not*
    cleared; the entities in the files are merged into the entities in memory.
    If you want them to overwrite the in-memory datastore, call Clear() before
    calling Read().

    If the datastore file contains an entity with the same app name, kind, and
    key as an entity already in the datastore, the entity from the file
    overwrites the entity in the datastore.

    Also sets each ID counter to one greater than the highest ID allocated so
    far in that counter's ID space.
    """
    if self.__datastore_file and self.__datastore_file != '/dev/null':
      entities = self.__ReadPickled(self.__datastore_file)
      if entities and isinstance(entities[-1], (int, long)):
        self._commit_timestamp = int(entities.pop())
      for encoded_entity in entities:
        try:
          entity = entity_pb.EntityProto(encoded_entity)
          record = datastore_stub_util._FromStorageEntity(entity)
        except self.READ_PB_EXCEPTIONS, e:
          raise apiproxy_errors.ApplicationError(
              datastore_pb.Error.INTERNAL_ERROR,
              self.READ_ERROR_MSG % (self.__datastore_file, e))
        except struct.error, e:
          if (sys.version_info[0:3] == (2, 5, 0)
              and e.message.startswith('unpack requires a string argument')):


            raise apiproxy_errors.ApplicationError(
                datastore_pb.Error.INTERNAL_ERROR,
                self.READ_PY250_MSG + self.READ_ERROR_MSG %
                (self.__datastore_file, e))
          else:
            raise

        self._StoreRecord(record)

        last_path = _FinalElement(entity.key())
        if last_path.id():
          self._SetMaxId(last_path.id())
def _DedupingEntityGenerator(cursor):
  """Generator that removes duplicate entities from the results.

  Generate datastore entities from a cursor, skipping the duplicates

  Args:
    cursor: a SQLite3.Cursor or subclass.

  Yields:
    Entities that do not share a key.
  """
  seen = set()
  for row in cursor:
    row_key, row_entity = row[:2]
    encoded_row_key = str(row_key)
    if encoded_row_key in seen:
      continue

    seen.add(encoded_row_key)
    storage_entity = entity_pb.EntityProto(row_entity)
    record = datastore_stub_util._FromStorageEntity(storage_entity)
    record = datastore_stub_util.LoadRecord(record)
    yield record
Пример #10
0
def _DedupingEntityGenerator(cursor):
  """Generator that removes duplicate entities from the results.

  Generate datastore entities from a cursor, skipping the duplicates

  Args:
    cursor: a SQLite3.Cursor or subclass.

  Yields:
    Entities that do not share a key.
  """
  seen = set()
  for row in cursor:
    row_key, row_entity = row[:2]
    encoded_row_key = str(row_key)
    if encoded_row_key in seen:
      continue

    seen.add(encoded_row_key)
    storage_entity = entity_pb.EntityProto(row_entity)
    record = datastore_stub_util._FromStorageEntity(storage_entity)
    record = datastore_stub_util.LoadRecord(record)
    yield record