def test_get_multi_miss_w_missing(self): from gcloud.datastore._generated import entity_pb2 from gcloud.datastore.key import Key KIND = 'Kind' ID = 1234 # Make a missing entity pb to be returned from mock backend. missed = entity_pb2.Entity() missed.key.partition_id.dataset_id = self.PROJECT path_element = missed.key.path_element.add() path_element.kind = KIND path_element.id = ID creds = object() client = self._makeOne(credentials=creds) # Set missing entity on mock connection. client.connection._add_lookup_result(missing=[missed]) key = Key(KIND, ID, project=self.PROJECT) missing = [] entities = client.get_multi([key], missing=missing) self.assertEqual(entities, []) self.assertEqual([missed.key.to_protobuf() for missed in missing], [key.to_protobuf()])
def test_get_multi_miss_w_missing(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection KIND = 'Kind' ID = 1234 # Make a missing entity pb to be returned from mock backend. missed = datastore_pb.Entity() missed.key.partition_id.dataset_id = self.DATASET_ID path_element = missed.key.path_element.add() path_element.kind = KIND path_element.id = ID # Set missing entity on mock connection. connection = _Connection() connection._missing = [missed] client = self._makeOne(connection=connection) key = Key(KIND, ID, dataset_id=self.DATASET_ID) missing = [] entities = client.get_multi([key], missing=missing) self.assertEqual(entities, []) self.assertEqual([missed.key.to_protobuf() for missed in missing], [key.to_protobuf()])
def test_key(self): from gcloud.datastore.key import Key key = Key('PATH', 1234, project='PROJECT') name, value = self._callFUT(key) self.assertEqual(name, 'key_value') self.assertEqual(value, key.to_protobuf())
def test_key(self): from gcloud.datastore.key import Key pb = self._makePB() key = Key('KIND', 1234, project='PROJECT') self._callFUT(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf())
def test_key(self): from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key _DATASET = 'DATASET' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] key = Key(dataset=Dataset(_DATASET), path=_PATH) name, value = self._callFUT(key) self.assertEqual(name, 'key_value') self.assertEqual(value, key.to_protobuf())
def test_from_key_wo_dataset(self): from gcloud.datastore.key import Key klass = self._getTargetClass() key = Key().kind(_KIND).id(_ID) entity = klass.from_key(key) self.assertTrue(entity.dataset() is None) self.assertEqual(entity.kind(), _KIND) key = entity.key() self.assertEqual(key.kind(), _KIND) self.assertEqual(key.id(), _ID)
def test_get_multi_w_deferred_from_backend_but_not_passed(self): from gcloud.datastore._generated import entity_pb2 from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key key1 = Key("Kind", project=self.PROJECT) key1_pb = key1.to_protobuf() key2 = Key("Kind", 2345, project=self.PROJECT) key2_pb = key2.to_protobuf() entity1_pb = entity_pb2.Entity() entity1_pb.key.CopyFrom(key1_pb) entity2_pb = entity_pb2.Entity() entity2_pb.key.CopyFrom(key2_pb) creds = object() client = self._makeOne(credentials=creds) # mock up two separate requests client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb]) client.connection._add_lookup_result([entity2_pb]) missing = [] found = client.get_multi([key1, key2], missing=missing) self.assertEqual(len(found), 2) self.assertEqual(len(missing), 0) # Check the actual contents on the response. self.assertTrue(isinstance(found[0], Entity)) self.assertEqual(found[0].key.path, key1.path) self.assertEqual(found[0].key.project, key1.project) self.assertTrue(isinstance(found[1], Entity)) self.assertEqual(found[1].key.path, key2.path) self.assertEqual(found[1].key.project, key2.project) cw = client.connection._lookup_cw self.assertEqual(len(cw), 2) ds_id, k_pbs, eventual, tid = cw[0] self.assertEqual(ds_id, self.PROJECT) self.assertEqual(len(k_pbs), 2) self.assertEqual(key1_pb, k_pbs[0]) self.assertEqual(key2_pb, k_pbs[1]) self.assertFalse(eventual) self.assertTrue(tid is None) ds_id, k_pbs, eventual, tid = cw[1] self.assertEqual(ds_id, self.PROJECT) self.assertEqual(len(k_pbs), 1) self.assertEqual(key2_pb, k_pbs[0]) self.assertFalse(eventual) self.assertTrue(tid is None)
def test_from_key(self): from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key klass = self._getTargetClass() dataset = Dataset(_DATASET_ID) key = Key(dataset=dataset).kind(_KIND).id(_ID) entity = klass.from_key(key) self.assertTrue(entity.dataset() is dataset) self.assertEqual(entity.kind(), _KIND) key = entity.key() self.assertEqual(key.kind(), _KIND) self.assertEqual(key.id(), _ID)
def test_ancestor(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key from gcloud.datastore.helpers import _prepare_key_for_request ancestor = Key('Ancestor', 123, dataset_id='DATASET') pb = self._callFUT(_Query(ancestor=ancestor)) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) self.assertEqual(len(cfilter.filter), 1) pfilter = cfilter.filter[0].property_filter self.assertEqual(pfilter.property.name, '__key__') ancestor_pb = _prepare_key_for_request(ancestor.to_protobuf()) self.assertEqual(pfilter.value.key_value, ancestor_pb)
def test_ancestor(self): from gcloud.datastore.key import Key from gcloud.datastore._generated import query_pb2 ancestor = Key('Ancestor', 123, project='PROJECT') pb = self._callFUT(_Query(ancestor=ancestor)) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter self.assertEqual(pfilter.property.name, '__key__') ancestor_pb = ancestor.to_protobuf() self.assertEqual(pfilter.value.key_value, ancestor_pb)
def test_key(self): from gcloud.datastore.dataset import Dataset from gcloud.datastore.key import Key _DATASET = 'DATASET' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] pb = self._makePB() key = Key(dataset=Dataset(_DATASET), path=_PATH) self._callFUT(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf())
def test_get_multi_miss_w_deferred(self): from gcloud.datastore.key import Key key = Key("Kind", 1234, project=self.PROJECT) # Set deferred entity on mock connection. creds = object() client = self._makeOne(credentials=creds) client.connection._add_lookup_result(deferred=[key.to_protobuf()]) deferred = [] entities = client.get_multi([key], deferred=deferred) self.assertEqual(entities, []) self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key.to_protobuf()])
def test_entity_w_key(self): from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key pb = self._makePB() key = Key(path=[{'kind': 'KIND', 'id': 123}]) entity = Entity().key(key) entity['foo'] = 'Foo' self._callFUT(pb, entity) value = pb.entity_value self.assertEqual(value.key, key.to_protobuf()) props = list(value.property) self.assertEqual(len(props), 1) self.assertEqual(props[0].name, 'foo') self.assertEqual(props[0].value.string_value, 'Foo')
def test_get_entities_miss_w_deferred(self): from gcloud.datastore.key import Key DATASET_ID = 'DATASET' KIND = 'Kind' ID = 1234 PATH = [{'kind': KIND, 'id': ID}] connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) key = Key(path=PATH, dataset_id=DATASET_ID) connection._deferred = [key.to_protobuf()] deferred = [] entities = dataset.get_entities([key], deferred=deferred) self.assertEqual(entities, []) self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key.to_protobuf()])
def test_ancester_wo_existing_ancestor_query_w_list(self): from gcloud.datastore.key import Key _KIND = 'KIND' _ID = 123 key = Key(path=[{'kind': _KIND, 'id': _ID}]) query = self._makeOne() after = query.ancestor([_KIND, _ID]) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) q_pb = after.to_protobuf() self.assertEqual(q_pb.filter.composite_filter.operator, 1) # AND f_pb, = list(q_pb.filter.composite_filter.filter) p_pb = f_pb.property_filter self.assertEqual(p_pb.property.name, '__key__') self.assertEqual(p_pb.value.key_value, key.to_protobuf())
def test_lookup_single_key_empty_response_w_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() conn.transaction(Transaction(TRANSACTION)) URI = '/'.join([ conn.API_BASE_URL, 'datastore', conn.API_VERSION, 'datasets', DATASET_ID, 'lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.lookup(DATASET_ID, key_pb), None) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) self.assertEqual(keys[0], key_pb) self.assertEqual(request.read_options.transaction, TRANSACTION)
def test_delete_entities_wo_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ conn.API_BASE_URL, 'datastore', conn.API_VERSION, 'datasets', DATASET_ID, 'commit', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.delete_entities(DATASET_ID, [key_pb]) self.assertEqual(result, True) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, '') mutation = request.mutation self.assertEqual(len(mutation.insert_auto_id), 0) self.assertEqual(len(mutation.upsert), 0) deletes = list(mutation.delete) self.assertEqual(len(deletes), 1) delete = deletes[0] self.assertEqual(delete, key_pb) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_commit_wo_transaction(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() insert.key.CopyFrom(key_pb) prop = insert.property.add() prop.name = 'foo' prop.value.string_value = u'Foo' conn = self._makeOne() URI = '/'.join([ conn.API_BASE_URL, 'datastore', conn.API_VERSION, 'datasets', DATASET_ID, 'commit', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.commit(DATASET_ID, mutation) self.assertEqual(result.index_updates, 0) self.assertEqual(list(result.insert_auto_id_key), []) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, '') self.assertEqual(request.mutation, mutation) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_lookup_single_key_nonempty_response(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() entity = datastore_pb.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) conn = self._makeOne() URI = '/'.join([ conn.API_BASE_URL, 'datastore', conn.API_VERSION, 'datasets', DATASET_ID, 'lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found = conn.lookup(DATASET_ID, key_pb) self.assertEqual(found.key.path_element[0].kind, 'Kind') self.assertEqual(found.key.path_element[0].id, 1234) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) self.assertEqual(keys[0], key_pb)
def __init__(self, dataset=None, kind=None): super(Entity, self).__init__() self._dataset = dataset if kind: self._key = Key().kind(kind) else: self._key = None
def save(self): """Save the entity in the Cloud Datastore. :rtype: :class:`gcloud.datastore.entity.Entity` :returns: The entity with a possibly updated Key. """ # pylint: disable=maybe-no-member key_pb = self.dataset().connection().save_entity( dataset_id=self.dataset().id(), key_pb=self.key().to_protobuf(), properties=dict(self)) # pylint: enable=maybe-no-member # If we are in a transaction and the current entity needs an # automatically assigned ID, tell the transaction where to put that. transaction = self.dataset().connection().transaction() # pylint: disable=maybe-no-member if transaction and self.key().is_partial(): transaction.add_auto_id_entity(self) # pylint: enable=maybe-no-member if isinstance(key_pb, datastore_pb.Key): updated_key = Key.from_protobuf(key_pb) # Update the path (which may have been altered). # pylint: disable=maybe-no-member key = self.key().path(updated_key.path()) # pylint: enable=maybe-no-member self.key(key) return self
def test_get_multi_hit(self): from gcloud.datastore.key import Key KIND = 'Kind' ID = 1234 PATH = [{'kind': KIND, 'id': ID}] # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. creds = object() client = self._makeOne(credentials=creds) client.connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) result, = client.get_multi([key]) new_key = result.key # Check the returned value is as expected. self.assertFalse(new_key is key) self.assertEqual(new_key.project, self.PROJECT) self.assertEqual(new_key.path, PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo')
def test_from_key(self): key = Key(dataset=Dataset('test-dataset')).kind('TestKind').id(1234) entity = Entity.from_key(key) self.assertEqual('test-dataset', entity.dataset().id()) self.assertEqual('TestKind', entity.key().kind()) self.assertEqual(entity.key().kind(), entity.kind()) self.assertEqual(1234, entity.key().id())
def test_lookup_single_key_empty_response(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ conn.API_BASE_URL, 'datastore', conn.API_VERSION, 'datasets', DATASET_ID, 'lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.lookup(DATASET_ID, key_pb), None) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers']['Content-Type'], 'application/x-protobuf') self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT) rq_class = datastore_pb.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.key) self.assertEqual(len(keys), 1) self.assertEqual(keys[0], key_pb)
def commit(self): """Commits the transaction. This is called automatically upon exiting a with statement, however it can be called explicitly if you don't want to use a context manager. This method has necessary side-effects: - Sets the current connection's transaction reference to None. - Sets the current transaction's ID to None. - Updates paths for any keys that needed an automatically generated ID. """ # It's possible that they called commit() already, in which case # we shouldn't do any committing of our own. if self.connection().transaction(): result = self.connection().commit(self.dataset().id(), self.mutation()) # For any of the auto-id entities, make sure we update their keys. for i, entity in enumerate(self._auto_id_entities): key_pb = result.insert_auto_id_key[i] key = Key.from_protobuf(key_pb) entity.key(entity.key().path(key.path())) # Tell the connection that the transaction is over. self.connection().transaction(None) # Clear our own ID in case this gets accidentally reused. self._id = None
def test_get_multi_max_loops(self): from gcloud._testing import _Monkey from gcloud.datastore import client as _MUT from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection KIND = 'Kind' ID = 1234 # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.DATASET_ID, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. connection = _Connection(entity_pb) client = self._makeOne(connection=connection) key = Key(KIND, ID, dataset_id=self.DATASET_ID) deferred = [] missing = [] with _Monkey(_MUT, _MAX_LOOPS=-1): result = client.get_multi([key], missing=missing, deferred=deferred) # Make sure we have no results, even though the connection has been # set up as in `test_hit` to return a single result. self.assertEqual(result, []) self.assertEqual(missing, []) self.assertEqual(deferred, [])
def test_ancestor_deleter_w_key(self): from gcloud.datastore.key import Key _DATASET = 'DATASET' key = Key('KIND', 123, dataset_id='DATASET') query = self._makeOne(_DATASET, ancestor=key) del query.ancestor self.assertTrue(query.ancestor is None)
def test_get_multi_max_loops(self): from gcloud._testing import _Monkey from gcloud.datastore import client as _MUT from gcloud.datastore.key import Key KIND = 'Kind' ID = 1234 # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. creds = object() client = self._makeOne(credentials=creds) client.connection._add_lookup_result([entity_pb]) key = Key(KIND, ID, project=self.PROJECT) deferred = [] missing = [] with _Monkey(_MUT, _MAX_LOOPS=-1): result = client.get_multi([key], missing=missing, deferred=deferred) # Make sure we have no results, even though the connection has been # set up as in `test_hit` to return a single result. self.assertEqual(result, []) self.assertEqual(missing, []) self.assertEqual(deferred, [])
def test_get_multi_miss_w_deferred(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection key = Key('Kind', 1234, dataset_id=self.DATASET_ID) # Set deferred entity on mock connection. connection = _Connection() connection._deferred = [key.to_protobuf()] client = self._makeOne(connection=connection) deferred = [] entities = client.get_multi([key], deferred=deferred) self.assertEqual(entities, []) self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key.to_protobuf()])
def test_get_entity_miss(self): from gcloud.datastore.key import Key DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) key = Key(path=[{'kind': 'Kind', 'id': 1234}]) self.assertEqual(dataset.get_entity(key), None)
def key_from_protobuf(pb): """Factory method for creating a key based on a protobuf. The protobuf should be one returned from the Cloud Datastore Protobuf API. :type pb: :class:`gcloud.datastore._datastore_v1_pb2.Key` :param pb: The Protobuf representing the key. :rtype: :class:`gcloud.datastore.key.Key` :returns: a new `Key` instance """ path_args = [] for element in pb.path_element: path_args.append(element.kind) if element.HasField('id'): path_args.append(element.id) # This is safe: we expect proto objects returned will only have # one of `name` or `id` set. if element.HasField('name'): path_args.append(element.name) dataset_id = None if pb.partition_id.HasField('dataset_id'): dataset_id = pb.partition_id.dataset_id namespace = None if pb.partition_id.HasField('namespace'): namespace = pb.partition_id.namespace return Key(*path_args, namespace=namespace, dataset_id=dataset_id)
def test_add_filter___key__valid_key(self): from gcloud.datastore.key import Key _DATASET = 'DATASET' query = self._makeOne(_DATASET) key = Key('Foo', dataset_id='DATASET') query.add_filter('__key__', '=', key) self.assertEqual(query.filters, [('__key__', '=', key)])
def test_wo_connection(self): from gcloud.datastore.key import Key DATASET_ID = 'DATASET' key = Key('Kind', 1234, dataset_id=DATASET_ID) self.assertRaises(EnvironmentError, self._callFUT, [key], dataset_id=DATASET_ID)
def test_ctor_explicit(self): from gcloud.datastore.key import Key _DATASET = 'DATASET' _KIND = 'KIND' _NAMESPACE = 'NAMESPACE' ancestor = Key('ANCESTOR', 123, dataset_id=_DATASET) FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] PROJECTION = ['foo', 'bar', 'baz'] ORDER = ['foo', 'bar'] GROUP_BY = ['foo'] query = self._makeOne( dataset_id=_DATASET, kind=_KIND, namespace=_NAMESPACE, ancestor=ancestor, filters=FILTERS, projection=PROJECTION, order=ORDER, group_by=GROUP_BY, ) self.assertEqual(query.dataset_id, _DATASET) self.assertEqual(query.kind, _KIND) self.assertEqual(query.namespace, _NAMESPACE) self.assertEqual(query.ancestor.path, ancestor.path) self.assertEqual(query.filters, FILTERS) self.assertEqual(query.projection, PROJECTION) self.assertEqual(query.order, ORDER) self.assertEqual(query.group_by, GROUP_BY)
def test_miss_wo_dataset_id(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection DATASET_ID = 'DATASET' connection = _Connection() key = Key('Kind', 1234, dataset_id=DATASET_ID) results = self._callFUT([key], connection=connection) self.assertEqual(results, []) expected = { 'dataset_id': DATASET_ID, 'key_pbs': [key.to_protobuf()], 'transaction_id': None, 'eventual': False, } self.assertEqual(connection._called_with, expected)
def key_from_protobuf(pb): """Factory method for creating a key based on a protobuf. The protobuf should be one returned from the Cloud Datastore Protobuf API. :type pb: :class:`gcloud.datastore._generated.entity_pb2.Key` :param pb: The Protobuf representing the key. :rtype: :class:`gcloud.datastore.key.Key` :returns: a new `Key` instance """ path_args = [] for element in pb.path: path_args.append(element.kind) if element.id: # Simple field (int64) path_args.append(element.id) # This is safe: we expect proto objects returned will only have # one of `name` or `id` set. if element.name: # Simple field (string) path_args.append(element.name) project = None if pb.partition_id.project_id: # Simple field (string) project = pb.partition_id.project_id namespace = None if pb.partition_id.namespace_id: # Simple field (string) namespace = pb.partition_id.namespace_id return Key(*path_args, namespace=namespace, project=project)
def test_get_multi_hit(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection KIND = 'Kind' ID = 1234 PATH = [{'kind': KIND, 'id': ID}] # Make a found entity pb to be returned from mock backend. entity_pb = _make_entity_pb(self.DATASET_ID, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. connection = _Connection(entity_pb) client = self._makeOne(connection=connection) key = Key(KIND, ID, dataset_id=self.DATASET_ID) result, = client.get_multi([key]) new_key = result.key # Check the returned value is as expected. self.assertFalse(new_key is key) self.assertEqual(new_key.dataset_id, self.DATASET_ID) self.assertEqual(new_key.path, PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo')
def test_ctor_explicit(self): from gcloud.datastore.key import Key _PROJECT = 'OTHER_PROJECT' _KIND = 'KIND' _NAMESPACE = 'OTHER_NAMESPACE' client = self._makeClient() ancestor = Key('ANCESTOR', 123, project=_PROJECT) FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)] PROJECTION = ['foo', 'bar', 'baz'] ORDER = ['foo', 'bar'] GROUP_BY = ['foo'] query = self._makeOne( client, kind=_KIND, project=_PROJECT, namespace=_NAMESPACE, ancestor=ancestor, filters=FILTERS, projection=PROJECTION, order=ORDER, group_by=GROUP_BY, ) self.assertTrue(query._client is client) self.assertEqual(query.project, _PROJECT) self.assertEqual(query.kind, _KIND) self.assertEqual(query.namespace, _NAMESPACE) self.assertEqual(query.ancestor.path, ancestor.path) self.assertEqual(query.filters, FILTERS) self.assertEqual(query.projection, PROJECTION) self.assertEqual(query.order, ORDER) self.assertEqual(query.group_by, GROUP_BY)
def test_save_entity_wo_transaction_w_auto_id(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' key_pb = Key(path=[{'kind': 'Kind'}]).to_protobuf() updated_key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() rsp_pb = datastore_pb.CommitResponse() mr_pb = rsp_pb.mutation_result mr_pb.index_updates = 0 iaik_pb = mr_pb.insert_auto_id_key.add() iaik_pb.CopyFrom(updated_key_pb) conn = self._makeOne() URI = '/'.join([ conn.API_BASE_URL, 'datastore', conn.API_VERSION, 'datasets', DATASET_ID, 'commit', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result = conn.save_entity(DATASET_ID, key_pb, {'foo': u'Foo'}) self.assertEqual(result, updated_key_pb) cw = http._called_with self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers']['Content-Type'], 'application/x-protobuf') self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT) rq_class = datastore_pb.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, '') mutation = request.mutation inserts = list(mutation.insert_auto_id) insert = inserts[0] self.assertEqual(insert.key, key_pb) props = list(insert.property) self.assertEqual(len(props), 1) self.assertEqual(props[0].name, 'foo') self.assertEqual(props[0].value.string_value, u'Foo') self.assertEqual(len(inserts), 1) upserts = list(mutation.upsert) self.assertEqual(len(upserts), 0) self.assertEqual(len(mutation.delete), 0) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_filter_key(self): from gcloud.datastore.key import Key from gcloud.datastore._generated import query_pb2 key = Key('Kind', 123, project='PROJECT') query = _Query(filters=[('__key__', '=', key)]) query.OPERATORS = { '=': query_pb2.PropertyFilter.EQUAL, } pb = self._callFUT(query) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filters), 1) pfilter = cfilter.filters[0].property_filter self.assertEqual(pfilter.property.name, '__key__') key_pb = key.to_protobuf() self.assertEqual(pfilter.value.key_value, key_pb)
def test_filter_key(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key from gcloud.datastore.helpers import _prepare_key_for_request key = Key('Kind', 123, dataset_id='DATASET') query = _Query(filters=[('__key__', '=', key)]) query.OPERATORS = { '=': datastore_pb.PropertyFilter.EQUAL, } pb = self._callFUT(query) cfilter = pb.filter.composite_filter self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) self.assertEqual(len(cfilter.filter), 1) pfilter = cfilter.filter[0].property_filter self.assertEqual(pfilter.property.name, '__key__') key_pb = _prepare_key_for_request(key.to_protobuf()) self.assertEqual(pfilter.value.key_value, key_pb)
def test_miss_w_deferred(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection DATASET_ID = 'DATASET' key = Key('Kind', 1234, dataset_id=DATASET_ID) # Set deferred entity on mock connection. connection = _Connection() connection._deferred = [key.to_protobuf()] deferred = [] entities = self._callFUT([key], connection=connection, deferred=deferred) self.assertEqual(entities, []) self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key.to_protobuf()])
def test_miss_w_deferred(self): from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection DATASET_ID = 'DATASET' key = Key('Kind', 1234, dataset_id=DATASET_ID) # Set deferred entity on mock connection. connection = _Connection() connection._deferred = [key.to_protobuf()] deferred = [] entities = self._callFUT([key], connection=connection, deferred=deferred, dataset_id=DATASET_ID) self.assertEqual(entities, []) self.assertEqual([def_key.to_protobuf() for def_key in deferred], [key.to_protobuf()])
def __init__(self, dataset=None, kind=None, exclude_from_indexes=()): super(Entity, self).__init__() self._dataset = dataset if kind: self._key = Key().kind(kind) else: self._key = None self._exclude_from_indexes = set(exclude_from_indexes)
def test_ancestor_setter_w_key(self): from gcloud.datastore.key import Key _NAME = u'NAME' key = Key('KIND', 123, project=self._PROJECT) query = self._makeOne(self._makeClient()) query.add_filter('name', '=', _NAME) query.ancestor = key self.assertEqual(query.ancestor.path, key.path)
def test_get_multi_hit_multiple_keys_different_project(self): from gcloud.datastore.key import Key PROJECT1 = 'PROJECT' PROJECT2 = 'PROJECT-ALT' # Make sure our IDs are actually different. self.assertNotEqual(PROJECT1, PROJECT2) key1 = Key('KIND', 1234, project=PROJECT1) key2 = Key('KIND', 1234, project=PROJECT2) creds = object() client = self._makeOne(credentials=creds) with self.assertRaises(ValueError): client.get_multi([key1, key2])
def test_get_multi_hit_multiple_keys_different_dataset(self): from gcloud.datastore.key import Key DATASET_ID1 = 'DATASET' DATASET_ID2 = 'DATASET-ALT' # Make sure our IDs are actually different. self.assertNotEqual(DATASET_ID1, DATASET_ID2) key1 = Key('KIND', 1234, dataset_id=DATASET_ID1) key2 = Key('KIND', 1234, dataset_id=DATASET_ID2) creds = object() client = self._makeOne(credentials=creds) with self.assertRaises(ValueError): client.get_multi([key1, key2])
def key(self, *path_args, **kwargs): """Proxy to :class:`gcloud.datastore.key.Key`. Passes our ``dataset_id``. """ if 'dataset_id' in kwargs: raise TypeError('Cannot pass dataset_id') kwargs['dataset_id'] = self.dataset_id return Key(*path_args, **kwargs)
def test_key(self): from gcloud.datastore._generated import entity_pb2 from gcloud.datastore.key import Key pb = entity_pb2.Value() expected = Key('KIND', 1234, project='PROJECT').to_protobuf() pb.key_value.CopyFrom(expected) found = self._callFUT(pb) self.assertEqual(found.to_protobuf(), expected)
def test_entity_w_key(self): from gcloud.datastore.entity import Entity from gcloud.datastore.helpers import _property_tuples from gcloud.datastore.key import Key name = 'foo' value = u'Foo' pb = self._makePB() key = Key('KIND', 123, project='PROJECT') entity = Entity(key=key) entity[name] = value self._callFUT(pb, entity) entity_pb = pb.entity_value self.assertEqual(entity_pb.key, key.to_protobuf()) prop_dict = dict(_property_tuples(entity_pb)) self.assertEqual(len(prop_dict), 1) self.assertEqual(list(prop_dict.keys()), [name]) self.assertEqual(prop_dict[name].string_value, value)
def test_get_entities_miss_w_missing(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' KIND = 'Kind' ID = 1234 PATH = [{'kind': KIND, 'id': ID}] missed = datastore_pb.Entity() missed.key.partition_id.dataset_id = DATASET_ID path_element = missed.key.path_element.add() path_element.kind = KIND path_element.id = ID connection = _Connection() connection._missing = [missed] dataset = self._makeOne(DATASET_ID, connection) key = Key(path=PATH, dataset_id=DATASET_ID) missing = [] entities = dataset.get_entities([key], missing=missing) self.assertEqual(entities, []) self.assertEqual([missed.key().to_protobuf() for missed in missing], [key.to_protobuf()])
def test_implicit_wo_transaction(self): from gcloud.datastore import _implicit_environ from gcloud.datastore.key import Key from gcloud.datastore.test_connection import _Connection from gcloud._testing import _Monkey DATASET_ID = 'DATASET' KIND = 'Kind' ID = 1234 PATH = [{'kind': KIND, 'id': ID}] # Make a found entity pb to be returned from mock backend. entity_pb = self._make_entity_pb(DATASET_ID, KIND, ID, 'foo', 'Foo') # Make a connection to return the entity pb. CUSTOM_CONNECTION = _Connection(entity_pb) key = Key(KIND, ID, dataset_id=DATASET_ID) with _Monkey(_implicit_environ, CONNECTION=CUSTOM_CONNECTION, DATASET_ID=DATASET_ID): result, = self._callFUT([key]) expected_called_with = { 'dataset_id': DATASET_ID, 'key_pbs': [key.to_protobuf()], 'transaction_id': None, 'eventual': False, } self.assertEqual(CUSTOM_CONNECTION._called_with, expected_called_with) new_key = result.key # Check the returned value is as expected. self.assertFalse(new_key is key) self.assertEqual(new_key.dataset_id, DATASET_ID) self.assertEqual(new_key.path, PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo')
def test_get_entity_hit(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.key import Key DATASET_ID = 'DATASET' KIND = 'Kind' ID = 1234 PATH = [{'kind': KIND, 'id': ID}] entity_pb = datastore_pb.Entity() path_element = entity_pb.key.path_element.add() path_element.kind = KIND path_element.id = ID prop = entity_pb.property.add() prop.name = 'foo' prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) key = Key(dataset=dataset, path=PATH) result = dataset.get_entity(key) key = result.key() self.assertTrue(key.dataset() is dataset) self.assertEqual(key.path(), PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo')
def _get_value_from_protobuf(pb): """Given a protobuf for a Property, get the correct value. The Cloud Datastore Protobuf API returns a Property Protobuf which has one value set and the rest blank. This function retrieves the the one value provided. Some work is done to coerce the return value into a more useful type (particularly in the case of a timestamp value, or a key value). :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Property` :param pb: The Property Protobuf. :returns: The value provided by the Protobuf. """ if pb.value.HasField('timestamp_microseconds_value'): microseconds = pb.value.timestamp_microseconds_value naive = (datetime.utcfromtimestamp(0) + timedelta(microseconds=microseconds)) return naive.replace(tzinfo=pytz.utc) elif pb.value.HasField('key_value'): return Key.from_protobuf(pb.value.key_value) elif pb.value.HasField('boolean_value'): return pb.value.boolean_value elif pb.value.HasField('double_value'): return pb.value.double_value elif pb.value.HasField('integer_value'): return pb.value.integer_value elif pb.value.HasField('string_value'): return pb.value.string_value elif pb.value.HasField('entity_value'): return Entity.from_protobuf(pb.value.entity_value) else: return None
def get_value_from_protobuf(pb): """Given a protobuf for a Property, get the correct value. The Cloud Datastore Protobuf API returns a Property Protobuf which has one value set and the rest blank. This method retrieves the the one value provided. Some work is done to coerce the return value into a more useful type (particularly in the case of a timestamp value, or a key value). :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Property` :param pb: The Property Protobuf. :returns: The value provided by the Protobuf. """ if pb.value.HasField('timestamp_microseconds_value'): microseconds = pb.value.timestamp_microseconds_value return (datetime.utcfromtimestamp(0) + timedelta(microseconds=microseconds)) elif pb.value.HasField('key_value'): return Key.from_protobuf(pb.value.key_value) elif pb.value.HasField('boolean_value'): return pb.value.boolean_value elif pb.value.HasField('double_value'): return pb.value.double_value elif pb.value.HasField('integer_value'): return pb.value.integer_value elif pb.value.HasField('string_value'): return pb.value.string_value else: # TODO(jjg): Should we raise a ValueError here? return None