def test_nested_entity_no_key(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb DATASET_ID = 's~FOO' KIND = 'KIND' INSIDE_NAME = 'IFOO' OUTSIDE_NAME = 'OBAR' INSIDE_VALUE = 1337 entity_inside = datastore_pb.Entity() inside_prop = entity_inside.property.add() inside_prop.name = INSIDE_NAME inside_prop.value.integer_value = INSIDE_VALUE entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = DATASET_ID element = entity_pb.key.path_element.add() element.kind = KIND outside_prop = entity_pb.property.add() outside_prop.name = OUTSIDE_NAME outside_prop.value.entity_value.CopyFrom(entity_inside) entity = self._callFUT(entity_pb) self.assertEqual(entity.key.dataset_id, DATASET_ID) self.assertEqual(entity.key.flat_path, (KIND, )) self.assertEqual(len(entity), 1) inside_entity = entity[OUTSIDE_NAME] self.assertEqual(inside_entity.key, None) self.assertEqual(len(inside_entity), 1) self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE)
def test_get_multi_w_deferred_from_backend_but_not_passed(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key key1 = Key('Kind', dataset_id=self.DATASET_ID) key1_pb = key1.to_protobuf() key2 = Key('Kind', 2345, dataset_id=self.DATASET_ID) key2_pb = key2.to_protobuf() entity1_pb = datastore_pb.Entity() entity1_pb.key.CopyFrom(key1_pb) entity2_pb = datastore_pb.Entity() entity2_pb.key.CopyFrom(key2_pb) creds = object() client = self._makeOne(credentials=creds) # mock up two separate requests client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) client.connection._add_lookup_result([entity2_pb]) missing = [] found = client.get_multi([key1, key2], missing=missing) self.assertEqual(len(found), 2) self.assertEqual(len(missing), 0) # Check the actual contents on the response. self.assertTrue(isinstance(found[0], Entity)) self.assertEqual(found[0].key.path, key1.path) self.assertEqual(found[0].key.dataset_id, key1.dataset_id) self.assertTrue(isinstance(found[1], Entity)) self.assertEqual(found[1].key.path, key2.path) self.assertEqual(found[1].key.dataset_id, key2.dataset_id) cw = client.connection._lookup_cw self.assertEqual(len(cw), 2) ds_id, k_pbs, eventual, tid = cw[0] self.assertEqual(ds_id, self.DATASET_ID) self.assertEqual(len(k_pbs), 2) self.assertEqual(key1_pb, k_pbs[0]) self.assertEqual(key2_pb, k_pbs[1]) self.assertFalse(eventual) self.assertTrue(tid is None) ds_id, k_pbs, eventual, tid = cw[1] self.assertEqual(ds_id, self.DATASET_ID) self.assertEqual(len(k_pbs), 1) self.assertEqual(key2_pb, k_pbs[0]) self.assertFalse(eventual) self.assertTrue(tid is None)
def test_get_multi_miss_w_missing(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection KIND = 'Kind' ID = 1234 # Make a missing entity pb to be returned from mock backend. missed = datastore_pb.Entity() missed.key.partition_id.dataset_id = self.DATASET_ID path_element = missed.key.path_element.add() path_element.kind = KIND path_element.id = ID # Set missing entity on mock connection. connection = _Connection() connection._missing = [missed] client = self._makeOne(connection=connection) key = Key(KIND, ID, dataset_id=self.DATASET_ID) missing = [] entities = client.get_multi([key], missing=missing) self.assertEqual(entities, []) self.assertEqual([missed.key.to_protobuf() for missed in missing], [key.to_protobuf()])
def test_entity_no_key(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb entity_pb = datastore_pb.Entity() entity = self._callFUT(entity_pb) self.assertEqual(entity.key, None) self.assertEqual(dict(entity), {})
def test_it(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb _DATASET_ID = 'DATASET' _KIND = 'KIND' _ID = 1234 entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = _DATASET_ID entity_pb.key.path_element.add(kind=_KIND, id=_ID) prop_pb = entity_pb.property.add() prop_pb.name = 'foo' prop_pb.value.string_value = 'Foo' unindexed_prop_pb = entity_pb.property.add() unindexed_prop_pb.name = 'bar' unindexed_prop_pb.value.integer_value = 10 unindexed_prop_pb.value.indexed = False list_prop_pb1 = entity_pb.property.add() list_prop_pb1.name = 'baz' list_pb1 = list_prop_pb1.value.list_value unindexed_value_pb = list_pb1.add() unindexed_value_pb.integer_value = 11 unindexed_value_pb.indexed = False list_prop_pb2 = entity_pb.property.add() list_prop_pb2.name = 'qux' list_pb2 = list_prop_pb2.value.list_value indexed_value_pb = list_pb2.add() indexed_value_pb.integer_value = 12 indexed_value_pb.indexed = True entity = self._callFUT(entity_pb) self.assertEqual(entity.kind, _KIND) self.assertEqual(entity.exclude_from_indexes, frozenset(['bar', 'baz'])) entity_props = dict(entity) self.assertEqual(entity_props, { 'foo': 'Foo', 'bar': 10, 'baz': [11], 'qux': [12] }) # Also check the key. key = entity.key self.assertEqual(key.dataset_id, _DATASET_ID) self.assertEqual(key.namespace, None) self.assertEqual(key.kind, _KIND) self.assertEqual(key.id, _ID)
def _make_entity_pb(dataset_id, kind, integer_id, name=None, str_val=None): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = dataset_id path_element = entity_pb.key.path_element.add() path_element.kind = kind path_element.id = integer_id if name is not None and str_val is not None: prop = entity_pb.property.add() prop.name = name prop.value.string_value = str_val return entity_pb
def _addQueryResults(self, connection, cursor=_END, more=False): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb MORE = datastore_pb.QueryResultBatch.NOT_FINISHED NO_MORE = datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT _ID = 123 entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = self._DATASET path_element = entity_pb.key.path_element.add() path_element.kind = self._KIND path_element.id = _ID prop = entity_pb.property.add() prop.name = 'foo' prop.value.string_value = u'Foo' connection._results.append( ([entity_pb], cursor, MORE if more else NO_MORE))
def lookup(self, dataset_id, key_pbs): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb # Store the arguments called with. self._called_dataset_id = dataset_id self._called_key_pbs = key_pbs key_pb, = key_pbs response = datastore_pb.Entity() response.key.CopyFrom(key_pb) response.key.partition_id.dataset_id = self.prefix + dataset_id missing = [] deferred = [] if self.from_missing: missing[:] = [response] self._lookup_result = [] else: self._lookup_result = [response] return self._lookup_result, missing, deferred
def test_mismatched_value_indexed(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb _DATASET_ID = 'DATASET' _KIND = 'KIND' _ID = 1234 entity_pb = datastore_pb.Entity() entity_pb.key.partition_id.dataset_id = _DATASET_ID entity_pb.key.path_element.add(kind=_KIND, id=_ID) list_prop_pb = entity_pb.property.add() list_prop_pb.name = 'baz' list_pb = list_prop_pb.value.list_value unindexed_value_pb1 = list_pb.add() unindexed_value_pb1.integer_value = 10 unindexed_value_pb1.indexed = False unindexed_value_pb2 = list_pb.add() unindexed_value_pb2.integer_value = 11 unindexed_value_pb2.indexed = True with self.assertRaises(ValueError): self._callFUT(entity_pb)
def test_get_multi_w_deferred_from_backend_but_not_passed(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.connection import Connection from gcloud.datastore.key import Key from gcloud.datastore import test_connection # Shortening name, import line above was too long. cmp_key_after_req = test_connection._compare_key_pb_after_request key1 = Key('Kind', dataset_id=self.DATASET_ID) key2 = Key('Kind', 2345, dataset_id=self.DATASET_ID) key_pb1 = key1.to_protobuf() key_pb2 = key2.to_protobuf() # Build mock first response. rsp_pb1 = datastore_pb.LookupResponse() entity1 = datastore_pb.Entity() entity1.key.CopyFrom(key_pb1) # Add the entity to the "found" part of the response. rsp_pb1.found.add(entity=entity1) # Add the second key to the "deferred" part of the response. rsp_pb1.deferred.add().CopyFrom(key_pb2) # Build mock second response. rsp_pb2 = datastore_pb.LookupResponse() # Add in entity that was deferred. entity2 = datastore_pb.Entity() entity2.key.CopyFrom(key_pb2) rsp_pb2.found.add(entity=entity2) connection = Connection() client = self._makeOne(connection=connection) # Add mock http object to connection with response from above. http = connection._http = _HttpMultiple( ({'status': '200'}, rsp_pb1.SerializeToString()), ({'status': '200'}, rsp_pb2.SerializeToString()), ) missing = [] found = client.get_multi([key1, key2], missing=missing) self.assertEqual(len(found), 2) self.assertEqual(len(missing), 0) # Check the actual contents on the response. self.assertEqual(found[0].key.path, key1.path) self.assertEqual(found[0].key.dataset_id, key1.dataset_id) self.assertEqual(found[1].key.path, key2.path) self.assertEqual(found[1].key.dataset_id, key2.dataset_id) # Check that our http object was called correctly. cw = http._called_with rq_class = datastore_pb.LookupRequest request = rq_class() self.assertEqual(len(cw), 2) # Make URI to check for requests. URI = '/'.join([ connection.api_base_url, 'datastore', connection.API_VERSION, 'datasets', self.DATASET_ID, 'lookup', ]) # Make sure the first called with argument checks out. self._verifyProtobufCall(cw[0], URI, connection) request.ParseFromString(cw[0]['body']) keys = list(request.key) self.assertEqual(len(keys), 2) cmp_key_after_req(self, key_pb1, keys[0]) cmp_key_after_req(self, key_pb2, keys[1]) # Make sure the second called with argument checks out. self._verifyProtobufCall(cw[1], URI, connection) request.ParseFromString(cw[1]['body']) keys = list(request.key) self.assertEqual(len(keys), 1) cmp_key_after_req(self, key_pb2, keys[0])