Esempio n. 1
0
 def result(foo, bar=0, baz=""):
     return _datastore_query._Result(
         result_type=None,
         result_pb=query_pb2.EntityResult(
             entity=entity_pb2.Entity(
                 properties={
                     "foo": entity_pb2.Value(string_value=foo),
                     "bar": entity_pb2.Value(integer_value=bar),
                     "baz": entity_pb2.Value(string_value=baz),
                 }
             )
         ),
         order_by=[
             query_module.PropertyOrder("foo"),
             query_module.PropertyOrder("bar", reverse=True),
         ],
     )
    def test_meaning_with_change(self):
        from google.cloud.datastore_v1.proto import entity_pb2
        from google.cloud.datastore.entity import Entity
        from google.cloud.datastore.helpers import _new_value_pb

        entity = Entity()
        name = 'foo'
        entity[name] = value = 42
        entity._meanings[name] = (9, 1337)
        entity_pb = self._call_fut(entity)

        expected_pb = entity_pb2.Entity()
        value_pb = _new_value_pb(expected_pb, name)
        value_pb.integer_value = value
        # NOTE: No meaning is used since the value differs from the
        #       value stored.
        self._compare_entity_proto(entity_pb, expected_pb)
Esempio n. 3
0
    def test_index_mismatch_ignores_empty_list(self):
        from google.cloud.datastore_v1.proto import entity_pb2

        _PROJECT = "PROJECT"
        _KIND = "KIND"
        _ID = 1234

        array_val_pb = entity_pb2.Value(array_value=entity_pb2.ArrayValue(
            values=[]))

        entity_pb = entity_pb2.Entity(properties={"baz": array_val_pb})
        entity_pb.key.partition_id.project_id = _PROJECT
        entity_pb.key.path.add(kind=_KIND, id=_ID)

        entity = self._call_fut(entity_pb)
        entity_dict = dict(entity)
        self.assertEqual(entity_dict["baz"], [])
Esempio n. 4
0
    def test_key_only(self):
        from google.cloud.datastore_v1.proto import entity_pb2
        from google.cloud.datastore.entity import Entity
        from google.cloud.datastore.key import Key

        kind, name = 'PATH', 'NAME'
        project = 'PROJECT'
        key = Key(kind, name, project=project)
        entity = Entity(key=key)
        entity_pb = self._call_fut(entity)

        expected_pb = entity_pb2.Entity()
        expected_pb.key.partition_id.project_id = project
        path_elt = expected_pb.key.path.add()
        path_elt.kind = kind
        path_elt.name = name

        self._compare_entity_proto(entity_pb, expected_pb)
Esempio n. 5
0
    def test_run_query_w_namespace_nonempty_result(self):
        from google.cloud.datastore_v1.proto import datastore_pb2
        from google.cloud.datastore_v1.proto import entity_pb2
        from google.cloud.datastore_v1.proto import query_pb2

        project = "PROJECT"
        kind = "Kind"
        namespace = "NS"
        query_pb = self._make_query_pb(kind)
        partition_id = entity_pb2.PartitionId(
            project_id=project, namespace_id=namespace
        )
        read_options = datastore_pb2.ReadOptions()
        rsp_pb = datastore_pb2.RunQueryResponse(
            batch=query_pb2.QueryResultBatch(
                entity_result_type=query_pb2.EntityResult.FULL,
                entity_results=[query_pb2.EntityResult(entity=entity_pb2.Entity())],
                more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS,
            )
        )

        # Create mock HTTP and client with response.
        http = _make_requests_session(
            [_make_response(content=rsp_pb.SerializeToString())]
        )
        client_info = _make_client_info()
        client = mock.Mock(
            _http=http,
            _base_url="test.invalid",
            _client_info=client_info,
            spec=["_http", "_base_url", "_client_info"],
        )

        # Make request.
        ds_api = self._make_one(client)
        response = ds_api.run_query(project, partition_id, read_options, query=query_pb)

        # Check the result and verify the callers.
        self.assertEqual(response, rsp_pb)

        uri = _build_expected_url(client._base_url, project, "runQuery")
        request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest())
        self.assertEqual(request.partition_id, partition_id)
        self.assertEqual(request.query, query_pb)
Esempio n. 6
0
    def test_simple_fields(self):
        from google.cloud.datastore_v1.proto import entity_pb2
        from google.cloud.datastore.entity import Entity
        from google.cloud.datastore.helpers import _new_value_pb

        entity = Entity()
        name1 = 'foo'
        entity[name1] = value1 = 42
        name2 = 'bar'
        entity[name2] = value2 = u'some-string'
        entity_pb = self._call_fut(entity)

        expected_pb = entity_pb2.Entity()
        val_pb1 = _new_value_pb(expected_pb, name1)
        val_pb1.integer_value = value1
        val_pb2 = _new_value_pb(expected_pb, name2)
        val_pb2.string_value = value2

        self._compare_entity_proto(entity_pb, expected_pb)
Esempio n. 7
0
    def test_lookup_single_key_nonempty_response(self):
        from google.cloud.datastore_v1.proto import datastore_pb2
        from google.cloud.datastore_v1.proto import entity_pb2

        project = "PROJECT"
        key_pb = _make_key_pb(project)
        rsp_pb = datastore_pb2.LookupResponse()
        entity = entity_pb2.Entity()
        entity.key.CopyFrom(key_pb)
        rsp_pb.found.add(entity=entity)
        read_options = datastore_pb2.ReadOptions()

        # Create mock HTTP and client with response.
        http = _make_requests_session(
            [_make_response(content=rsp_pb.SerializeToString())]
        )
        client_info = _make_client_info()
        client = mock.Mock(
            _http=http,
            _base_url="test.invalid",
            _client_info=client_info,
            spec=["_http", "_base_url", "_client_info"],
        )

        # Make request.
        ds_api = self._make_one(client)
        response = ds_api.lookup(project, [key_pb], read_options=read_options)

        # Check the result and verify the callers.
        self.assertEqual(response, rsp_pb)
        uri = _build_expected_url(client._base_url, project, "lookup")
        self.assertEqual(len(response.found), 1)
        self.assertEqual(len(response.missing), 0)
        self.assertEqual(len(response.deferred), 0)
        found = response.found[0].entity
        self.assertEqual(found.key.path[0].kind, "Kind")
        self.assertEqual(found.key.path[0].id, 1234)

        request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
        self.assertEqual(list(request.keys), [key_pb])
        self.assertEqual(request.read_options, read_options)
Esempio n. 8
0
def entity_to_protobuf(entity):
    """Converts an entity into a protobuf.

    :type entity: :class:`google.cloud.datastore.entity.Entity`
    :param entity: The entity to be turned into a protobuf.

    :rtype: :class:`.entity_pb2.Entity`
    :returns: The protobuf representing the entity.
    """
    entity_pb = entity_pb2.Entity()
    if entity.key is not None:
        key_pb = entity.key.to_protobuf()
        entity_pb.key.CopyFrom(key_pb)

    for name, value in entity.items():
        value_is_list = isinstance(value, list)
        if value_is_list and len(value) == 0:
            continue

        value_pb = _new_value_pb(entity_pb, name)
        # Set the appropriate value.
        _set_protobuf_value(value_pb, value)

        # Add index information to protobuf.
        if name in entity.exclude_from_indexes:
            if not value_is_list:
                value_pb.exclude_from_indexes = True

            for sub_value in value_pb.array_value.values:
                sub_value.exclude_from_indexes = True

        # Add meaning information to protobuf.
        _set_pb_meaning_from_entity(entity,
                                    name,
                                    value,
                                    value_pb,
                                    is_list=value_is_list)

    return entity_pb
Esempio n. 9
0
def get_entity_pb_for_value(value):
    # type: (Any) -> entity_pb2.Entity
    """
    Return Entity protobuf object for the provided Python value.
    """
    entity_pb = entity_pb2.Entity()

    attr_type = get_pb_attr_type(value)

    if attr_type == 'dict_value':
        if six.PY2:
            value = dict(value)

        for key, value in six.iteritems(value):
            value_pb = datastore.helpers._new_value_pb(entity_pb, key)
            value_pb = set_value_pb_item_value(value_pb=value_pb,
                                               value=value,
                                               is_struct=True)
    else:
        raise ValueError('Unsupported attribute type: %s' % (attr_type))

    return entity_pb
Esempio n. 10
0
    def test_mismatched_value_indexed(self):
        from google.cloud.datastore_v1.proto import entity_pb2
        from google.cloud.datastore.helpers import _new_value_pb

        _PROJECT = 'PROJECT'
        _KIND = 'KIND'
        _ID = 1234
        entity_pb = entity_pb2.Entity()
        entity_pb.key.partition_id.project_id = _PROJECT
        entity_pb.key.path.add(kind=_KIND, id=_ID)

        array_val_pb = _new_value_pb(entity_pb, 'baz')
        array_pb = array_val_pb.array_value.values

        unindexed_value_pb1 = array_pb.add()
        unindexed_value_pb1.integer_value = 10
        unindexed_value_pb1.exclude_from_indexes = True

        unindexed_value_pb2 = array_pb.add()
        unindexed_value_pb2.integer_value = 11

        with self.assertRaises(ValueError):
            self._call_fut(entity_pb)
Esempio n. 11
0
    def test_get_multi_miss_w_missing(self):
        from google.cloud.datastore_v1.proto import entity_pb2
        from google.cloud.datastore_v1.proto import datastore_pb2
        from google.cloud.datastore.key import Key

        KIND = 'Kind'
        ID = 1234

        # Make a missing entity pb to be returned from mock backend.
        missed = entity_pb2.Entity()
        missed.key.partition_id.project_id = self.PROJECT
        path_element = missed.key.path.add()
        path_element.kind = KIND
        path_element.id = ID

        creds = _make_credentials()
        client = self._make_one(credentials=creds)
        # Set missing entity on mock connection.
        lookup_response = _make_lookup_response(missing=[missed])
        ds_api = _make_datastore_api(lookup_response=lookup_response)
        client._datastore_api_internal = ds_api

        key = Key(KIND, ID, project=self.PROJECT)
        missing = []
        entities = client.get_multi([key], missing=missing)
        self.assertEqual(entities, [])
        key_pb = key.to_protobuf()
        self.assertEqual(
            [missed.key.to_protobuf() for missed in missing], [key_pb])

        read_options = datastore_pb2.ReadOptions()
        ds_api.lookup.assert_called_once_with(
            self.PROJECT,
            [key_pb],
            read_options=read_options,
        )
Esempio n. 12
0
 def Mutation():
     path = [entity_pb2.Key.PathElement(kind="SomeKind")]
     return datastore_pb2.Mutation(
         upsert=entity_pb2.Entity(key=entity_pb2.Key(path=path))
     )
Esempio n. 13
0
    def test_complex_model2(self):
        import binascii
        import datetime
        from protorpc import messages
        from ndb_orm import msgprop

        class Items(ndb.Model):
            has_hat = ndb.BooleanProperty("hh")
            number_of_socks = ndb.IntegerProperty("ns")

        class Gender(messages.Enum):
            male = 1
            female = 2
            neutral = 3

        class Human(ndb.Model):
            name = ndb.StringProperty("na", indexed=True)
            gender = msgprop.EnumProperty(Gender,
                                          "g",
                                          required=True,
                                          indexed=True)
            age = ndb.IntegerProperty("ag", indexed=False)
            items = ndb.StructuredProperty(Items, "i", required=True)
            numbers = ndb.JsonProperty('json', indexed=False)
            description = ndb.TextProperty("t", indexed=False)
            description2 = ndb.TextProperty("t2",
                                            compressed=True,
                                            indexed=False)
            meters_tall = ndb.FloatProperty("mtrs", indexed=False)
            datetime_of_birth = ndb.DateTimeProperty("dtb", indexed=False)
            date_of_birth = ndb.DateProperty("db", indexed=False)
            time_of_birth = ndb.TimeProperty("tb", indexed=False)
            hobbies = ndb.StringProperty('hob', repeated=True, indexed=False)
            pickle = ndb.PickleProperty('pi', indexed=False)
            binary = ndb.BlobProperty("bi", indexed=False)
            home = ndb.GeoPtProperty("ho", indexed=False)
            generic = ndb.GenericProperty("gen", indexed=False)
            model = ndb.LocalStructuredProperty(Items, "mo", indexed=False)

            number_of_hobbies = ndb.ComputedProperty(
                name="num_hob",
                func=lambda self: len(self.hobbies),
                indexed=False)
            default_info = ndb.StringProperty("di",
                                              indexed=False,
                                              default='unknown')
            update = ndb.DateTimeProperty("up", indexed=False, auto_now=True)

        # the entity was created from within app engine standard with the code below
        # and the the protocolbuffer was fetch with google-cloud-datastore and hexlified
        pb_binary_string = binascii.unhexlify(
            '0a240a10120e6b756e7374616b726f626174656e12100a0548756d616e108080808080e4910a1a4b0a027069124592013f80025d71017d7102550b666f6f7462616c6c5f61747103636461746574696d650a6461746574696d650a7104550a07e1081a0f0a2a01e2408552710573612e9801011a1f0a026d6f121932141a080a026e73120210031a080a026868120208019801011a100a026269120a920104616263009801011a140a027462120e520908f2aa03108094ef3a9801011a140a026e61120e8a010b4172746875722044656e741a1a0a046a736f6e121292010c5b31322c2031332c2031345d9801011a100a076e756d5f686f62120510029801011a0a0a04692e6e73120210031a0c0a0367656e120510079801011a0a0a04692e6868120208011a150a026469120f700f8a0107756e6b6e6f776e9801011a110a026462120b52060880f482cd059801011a270a03686f6212204a1e0a10700f8a0108666f6f7462616c6c9801010a0a700f8a010274769801011a0b0a0261671205102a9801011a1d0a02686f12174212098fc2f5285c2f4a401185eb51b81e8513409801011a070a0167120210011a290a0274321223701692011b789c4b54284a4dcc51c84dccd351c8cf50a84c4dcc00003f14066c9801011a140a046d747273120c191f85eb51b81efd3f9801011a170a036474621210520b08f29e86cd05108094ef3a9801011a170a0275701211520c08abc791cd051080abfd8e039801011a170a01741212700f8a010a61207265616c206d616e980101'
        )  # pylint:disable=line-too-long

        #     human = Human(
        #       name='Arthur Dent',
        #       gender=Gender.male,
        #       age=42,
        #       items=Items(has_hat=True, number_of_socks=3), #namespace=namespace),
        #       numbers=[12, 13, 14],
        #       description="a real man",
        #       description2="a real man, oh yeah",
        #       meters_tall=1.82,
        #       datetime_of_birth=datetime.datetime(2017, 8, 26, 15, 10, 42, 123456),
        #       date_of_birth=datetime.datetime(2017, 8, 26, 15, 10, 42, 123456).date(),
        #       time_of_birth=datetime.datetime(2017, 8, 26, 15, 10, 42, 123456).time(),
        #       hobbies=[u"football", u"tv"],
        #       pickle=[{"football_at": datetime.datetime(2017, 8, 26, 15, 10, 42, 123456)}],
        #       binary=binascii.unhexlify("61626300"),
        #       home=ndb.GeoPt("52.37, 4.88"),
        #       generic=7,
        #       model=Items(has_hat=True, number_of_socks=3), #namespace=namespace),
        #     )
        #     human.put()

        pb = entity_pb2.Entity()
        pb.ParseFromString(pb_binary_string)
        human_recovered = ndb.helpers.model_from_protobuf(pb)

        # now do the tests
        self.assertEqual(human_recovered.name, 'Arthur Dent')
        self.assertEqual(human_recovered.gender, Gender.male)
        self.assertEqual(human_recovered.age, 42)
        self.assertEqual(human_recovered.items.has_hat, True)
        self.assertEqual(human_recovered.items.number_of_socks, 3)
        self.assertEqual(human_recovered.numbers, [12, 13, 14])
        self.assertEqual(human_recovered.description, "a real man")
        self.assertEqual(human_recovered.description2, "a real man, oh yeah")
        self.assertAlmostEqual(human_recovered.meters_tall, 1.82)
        # DateTime always have a timezone attached
        self.assertEqual(
            human_recovered.datetime_of_birth.replace(tzinfo=None),
            datetime.datetime(2017, 8, 26, 15, 10, 42, 123456))
        self.assertEqual(
            human_recovered.date_of_birth,
            datetime.datetime(2017, 8, 26, 15, 10, 42, 123456).date())
        self.assertEqual(human_recovered.hobbies, [u"football", u"tv"])
        # following is a PickleProperty and thus not recoverable from python3
        #self.assertEqual(human_recovered.pickle[0]["football_at"], datetime.datetime(2017, 8, 26, 15, 10, 42, 123456))
        self.assertEqual(human_recovered.binary,
                         binascii.unhexlify("61626300"))
        self.assertAlmostEqual(human_recovered.home.lat, 52.37)
        self.assertAlmostEqual(human_recovered.home.lon, 4.88)
        self.assertEqual(human_recovered.generic, 7)
        self.assertEqual(human_recovered.model.has_hat, True)
        self.assertEqual(human_recovered.model.number_of_socks, 3)

        # these were set automatically
        self.assertEqual(human_recovered.number_of_hobbies, 2)
        self.assertEqual(human_recovered.default_info, "unknown")
        self.assertEqual(isinstance(human_recovered.update, datetime.date),
                         True)
Esempio n. 14
0
def model_pb_to_entity_pb(model_pb,
                          exclude_falsy_values=False,
                          exclude_from_index=None):
    # type: (message.Message, bool, Optional[List[str]]) -> entity_pb2.Entity
    """
    Translate Protobuf based database model object to Entity object which can be used with Google
    Datastore client library.

    :param model_pb: Instance of a custom Protobuf object to translate.

    :param exclude_falsy_values: True to exclude field values which are falsy (e.g. None, False,
                                 '', 0, etc.) and match the default values.

                                 NOTE: Due to the design of protobuf v3, there is no way to
                                 distinguish between a user explicitly providing a value which is
                                 the same as a default value (e.g. 0 for an integer field) and
                                 user not providing a value and default value being used instead.

    :param exclude_from_index: Optional list of field names which should not be indexed. By
                               default, all the simple fields are indexed.

                               NOTE: If provided, this value has high precedence over
                               "exclude_from_index" message option defined on the model.
    """
    exclude_from_index = exclude_from_index or []

    if not isinstance(model_pb, message.Message):
        raise ValueError(
            'model_pb argument is not a valid Protobuf class instance')

    fields = list(iter(model_pb.DESCRIPTOR.fields))
    fields = [field for field in fields if field not in ['key']]

    entity_pb = entity_pb2.Entity()

    exclude_from_index = cast(list, exclude_from_index)

    for field_descriptor in fields:
        field_type = field_descriptor.type
        field_name = field_descriptor.name
        field_value = getattr(model_pb, field_name, None)

        if field_value is None:
            # Value not set or it uses a default value, skip it
            # NOTE: proto3 syntax doesn't support HasField() anymore so there is now way for us to
            # determine if a value is set / provided so we just use and return default values.
            continue

        if exclude_falsy_values and not field_value:
            continue

        attr_type = get_pb_attr_type(field_value)

        value_pb = None
        if attr_type == 'array_value':
            if len(field_value) == 0:
                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)
                array_value = entity_pb2.ArrayValue(values=[])
                value_pb.array_value.CopyFrom(array_value)
            else:
                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)

                for value in field_value:
                    if field_type == descriptor.FieldDescriptor.TYPE_MESSAGE:
                        # Nested message type
                        entity_pb_item = model_pb_to_entity_pb(value)
                        value_pb_item = entity_pb2.Value()

                        # pylint: disable=no-member
                        value_pb_item.entity_value.CopyFrom(entity_pb_item)
                        # pylint: enable=no-member
                    else:
                        # Simple type
                        value_pb_item = entity_pb2.Value()
                        value_pb_item = set_value_pb_item_value(
                            value_pb=value_pb_item, value=value)

                    value_pb.array_value.values.append(value_pb_item)
        elif field_type == descriptor.FieldDescriptor.TYPE_STRING:
            value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)
            value_pb.string_value = field_value
        elif field_type in [
                descriptor.FieldDescriptor.TYPE_DOUBLE,
                descriptor.FieldDescriptor.TYPE_FLOAT
        ]:
            # NOTE: Datastore only supports double type so we map float to double
            value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)
            value_pb.double_value = field_value
        elif field_type in [
                descriptor.FieldDescriptor.TYPE_INT32,
                descriptor.FieldDescriptor.TYPE_INT64
        ]:
            value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)
            value_pb.integer_value = field_value
        elif field_type == descriptor.FieldDescriptor.TYPE_ENUM:
            value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)

            if field_descriptor.enum_type.name == 'NullValue':
                # NULL value
                value_pb.null_value = struct_pb2.NULL_VALUE
            else:
                # Regular ENUM
                value_pb.integer_value = field_value
        elif field_type == descriptor.FieldDescriptor.TYPE_BOOL:
            value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)
            value_pb.boolean_value = field_value
        elif field_type == descriptor.FieldDescriptor.TYPE_BYTES:
            value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)

            if isinstance(field_value, six.string_types):
                field_value = field_value.encode('utf-8')

            value_pb.blob_value = field_value
        elif field_type == descriptor.FieldDescriptor.TYPE_MESSAGE:
            # Complex type, convert to entity
            field_type = model_pb.DESCRIPTOR.fields_by_name[field_name]

            if field_type.message_type.full_name == 'google.protobuf.Timestamp':
                if str(field_value) == '':
                    # Value not set
                    # TODO: Include default empty value?
                    # value_pb = datastore.helpers._new_value_pb(entity_pb, field_name)
                    # value_pb.timestamp_value.CopyFrom(field_value)
                    continue

                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)
                value_pb.timestamp_value.CopyFrom(field_value)
            elif field_type.message_type.full_name == 'google.type.LatLng':
                if str(field_value) == '':
                    # Value not set
                    continue
                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)
                value_pb.geo_point_value.CopyFrom(field_value)
            elif isinstance(field_value, MessageMapContainer):
                # Nested dictionary on a struct, set a value directory on a passed in pb object
                # which is a parent Struct entity
                entity_pb_item = get_entity_pb_for_value(value=field_value)
                entity_pb.CopyFrom(entity_pb_item)
            elif isinstance(field_value, ScalarMapContainer):
                # Custom user defined type, recurse into it
                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)
                entity_pb_item = get_entity_pb_for_value(value=field_value)
                value_pb.entity_value.CopyFrom(entity_pb_item)
            elif field_type.message_type.full_name == 'google.protobuf.Struct':
                if not dict(field_value):
                    # Value not set, skip it
                    continue

                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)
                entity_pb_item = get_entity_pb_for_value(value=field_value)
                value_pb.entity_value.CopyFrom(entity_pb_item)
            else:
                # Nested type, potentially referenced from another Protobuf definition file
                value_pb = datastore.helpers._new_value_pb(
                    entity_pb, field_name)
                entity_pb_item = model_pb_to_entity_pb(field_value)
                value_pb.entity_value.CopyFrom(entity_pb_item)
        else:
            raise ValueError('Unsupported field type for field "%s"' %
                             (field_name))

        if not value_pb:
            continue

        value_pb = cast(Value, value_pb)

        # Determine if field should be excluded from index
        exclude_field_from_indexes = exclude_field_from_index(
            model=model_pb,
            field_descriptor=field_descriptor,
            exclude_from_index=exclude_from_index)

        if exclude_field_from_indexes:
            # Field should be excluded from the index, mark that on the Entity Value
            value_pb.exclude_from_indexes = True

    return entity_pb
Esempio n. 15
0
def lookup(key, options):
    """Look up a Datastore entity.

    Gets an entity from Datastore, asynchronously. Checks the global cache,
    first, if appropriate. Uses batching.

    Args:
        key (~datastore.Key): The key for the entity to retrieve.
        options (_options.ReadOptions): The options for the request. For
            example, ``{"read_consistency": EVENTUAL}``.

    Returns:
        :class:`~tasklets.Future`: If not an exception, future's result will be
            either an entity protocol buffer or _NOT_FOUND.
    """
    context = context_module.get_context()
    use_datastore = context._use_datastore(key, options)
    if use_datastore and options.transaction:
        use_global_cache = False
    else:
        use_global_cache = context._use_global_cache(key, options)

    if not (use_global_cache or use_datastore):
        raise TypeError("use_global_cache and use_datastore can't both be False")

    entity_pb = _NOT_FOUND
    key_locked = False

    if use_global_cache:
        cache_key = _cache.global_cache_key(key)
        result = yield _cache.global_get(cache_key)
        key_locked = _cache.is_locked_value(result)
        if not key_locked:
            if result:
                entity_pb = entity_pb2.Entity()
                entity_pb.MergeFromString(result)

            elif use_datastore:
                lock = yield _cache.global_lock_for_read(cache_key, result)
                if lock:
                    yield _cache.global_watch(cache_key, lock)

                else:
                    # Another thread locked or wrote to this key after the call to
                    # _cache.global_get above. Behave as though the key was locked by
                    # another thread and don't attempt to write our value below
                    key_locked = True

    if entity_pb is _NOT_FOUND and use_datastore:
        batch = _batch.get_batch(_LookupBatch, options)
        entity_pb = yield batch.add(key)

        # Do not cache misses
        if use_global_cache and not key_locked:
            if entity_pb is not _NOT_FOUND:
                expires = context._global_cache_timeout(key, options)
                serialized = entity_pb.SerializeToString()
                yield _cache.global_compare_and_swap(
                    cache_key, serialized, expires=expires
                )
            else:
                yield _cache.global_unwatch(cache_key)

    raise tasklets.Return(entity_pb)