async def test_query(creds: str, kind: str, project: str) -> None: async with aiohttp.ClientSession(conn_timeout=10, read_timeout=10) as s: ds = Datastore(project, creds, session=s) query = GQLQuery(f'SELECT * FROM {kind} WHERE value = @value', named_bindings={'value': 42}) before = await ds.runQuery(query, session=s) num_results = len(before.entity_results) transaction = await ds.beginTransaction(session=s) mutations = [ ds.make_mutation(Operation.INSERT, Key(project, [PathElement(kind)]), properties={'value': 42}), ds.make_mutation(Operation.INSERT, Key(project, [PathElement(kind)]), properties={'value': 42}), ds.make_mutation(Operation.INSERT, Key(project, [PathElement(kind)]), properties={'value': 42}), ] await ds.commit(transaction, mutations=mutations, session=s) after = await ds.runQuery(query, session=s) assert len(after.entity_results) == num_results + 3
async def test_query(creds: str, kind: str, project: str) -> None: async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) property_filter = PropertyFilter(prop='value', operator=PropertyFilterOperator.EQUAL, value=Value(42)) query = Query(kind=kind, query_filter=Filter(property_filter)) before = await ds.runQuery(query, session=s) num_results = len(before.entity_results) transaction = await ds.beginTransaction(session=s) mutations = [ ds.make_mutation(Operation.INSERT, Key(project, [PathElement(kind)]), properties={'value': 42}), ds.make_mutation(Operation.INSERT, Key(project, [PathElement(kind)]), properties={'value': 42}), ] await ds.commit(mutations, transaction=transaction, session=s) after = await ds.runQuery(query, session=s) assert len(after.entity_results) == num_results + 2
async def test_item_lifecycle(creds: str, kind: str, project: str) -> None: key = Key(project, [PathElement(kind)]) async with aiohttp.ClientSession(conn_timeout=10, read_timeout=10) as s: ds = Datastore(project, creds, session=s) allocatedKeys = await ds.allocateIds([key], session=s) assert len(allocatedKeys) == 1 key.path[-1].id = allocatedKeys[0].path[-1].id assert key == allocatedKeys[0] await ds.reserveIds(allocatedKeys, session=s) props_insert = {'is_this_bad_data': True} await ds.insert(allocatedKeys[0], props_insert, session=s) actual = await ds.lookup([allocatedKeys[0]], session=s) assert actual['found'][0].entity.properties == props_insert props_update = {'animal': 'aardvark', 'overwrote_bad_data': True} await ds.update(allocatedKeys[0], props_update, session=s) actual = await ds.lookup([allocatedKeys[0]], session=s) assert actual['found'][0].entity.properties == props_update props_upsert = {'meaning_of_life': 42} await ds.upsert(allocatedKeys[0], props_upsert, session=s) actual = await ds.lookup([allocatedKeys[0]], session=s) assert actual['found'][0].entity.properties == props_upsert await ds.delete(allocatedKeys[0], session=s) actual = await ds.lookup([allocatedKeys[0]], session=s) assert len(actual['missing']) == 1
async def test_datastore_export(creds: str, project: str, export_bucket_name: str): # N.B. when modifying this test, please also see `test_table_load_copy` in # `gcloud-aio-bigquery`. kind = 'PublicTestDatastoreExportModel' rand_uuid = str(uuid.uuid4()) async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) await ds.insert(Key(project, [PathElement(kind)]), properties={'rand_str': rand_uuid}) operation = await ds.export(export_bucket_name, kinds=[kind]) count = 0 while (count < 10 and operation and operation.metadata['common']['state'] == 'PROCESSING'): await sleep(10) operation = await ds.get_datastore_operation(operation.name) count += 1 assert operation.metadata['common']['state'] == 'SUCCESSFUL' prefix_len = len(f'gs://{export_bucket_name}/') export_path = operation.metadata['outputUrlPrefix'][prefix_len:] storage = Storage(service_file=creds, session=s) files = await storage.list_objects(export_bucket_name, params={'prefix': export_path}) for file in files['items']: await storage.delete(export_bucket_name, file['name'])
async def test_gql_query_pagination(creds: str, kind: str, project: str) -> None: async with Session() as s: query_string = (f'SELECT __key__ FROM {kind}' 'WHERE value = @value LIMIT @limit OFFSET @offset') named_bindings = {'value': 42, 'limit': 2**31 - 1, 'offset': 0} ds = Datastore(project=project, service_file=creds, session=s) before = await ds.runQuery(GQLQuery(query_string, named_bindings=named_bindings), session=s) insertion_count = 8 transaction = await ds.beginTransaction(session=s) mutations = [ ds.make_mutation(Operation.INSERT, Key(project, [PathElement(kind)]), properties=named_bindings) ] * insertion_count await ds.commit(mutations, transaction=transaction, session=s) page_size = 5 named_bindings['limit'] = page_size named_bindings['offset'] = GQLCursor(before.end_cursor) first_page = await ds.runQuery(GQLQuery(query_string, named_bindings=named_bindings), session=s) assert (len(first_page.entity_results)) == page_size named_bindings['offset'] = GQLCursor(first_page.end_cursor) second_page = await ds.runQuery(GQLQuery( query_string, named_bindings=named_bindings), session=s) assert len(second_page.entity_results) == insertion_count - page_size
async def test_datastore_export(creds: str, project: str, export_bucket_name: str): kind = 'PublicTestDatastoreExportModel' rand_uuid = str(uuid.uuid4()) async with aiohttp.ClientSession(conn_timeout=10, read_timeout=10) as s: ds = Datastore(project=project, service_file=creds, session=s) await ds.insert(Key(project, [PathElement(kind)]), properties={'rand_str': rand_uuid}) operation = await ds.export(export_bucket_name, kinds=[kind]) count = 0 while (count < 10 and operation and operation.metadata['common']['state'] == 'PROCESSING'): await asyncio.sleep(10) operation = await ds.get_datastore_operation(operation.name) count += 1 assert operation.metadata['common']['state'] == 'SUCCESSFUL' prefix_len = len(f'gs://{export_bucket_name}/') export_path = operation.metadata['outputUrlPrefix'][prefix_len:] storage = Storage(service_file=creds, session=s) files = await storage.list_objects(export_bucket_name, params={'prefix': export_path}) for file in files['items']: await storage.delete(export_bucket_name, file['name'])
async def test_insert_value_object(creds: str, kind: str, project: str) -> None: key = Key(project, [PathElement(kind)]) async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) properties = {'value': Value(30, exclude_from_indexes=True)} insert_result = await ds.insert(key, properties) assert len(insert_result['mutationResults']) == 1
async def test_query_with_distinct_on(creds: str, kind: str, project: str) -> None: keys1 = [Key(project, [PathElement(kind)]) for i in range(3)] keys2 = [Key(project, [PathElement(kind)]) for i in range(3)] async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) # setup test data allocatedKeys1 = await ds.allocateIds(keys1, session=s) allocatedKeys2 = await ds.allocateIds(keys2, session=s) for key1 in allocatedKeys1: await ds.insert(key1, {'dist_value': 11}, s) for key2 in allocatedKeys2: await ds.insert(key2, {'dist_value': 22}, s) query = Query(kind=kind, limit=10, distinct_on=['dist_value']) result = await ds.runQuery(query, session=s) assert len(result.entity_results) == 2 # clean up test data for key1 in allocatedKeys1: await ds.delete(key1, s) for key2 in allocatedKeys2: await ds.delete(key2, s)
async def test_query_with_value_projection(creds: str, kind: str, project: str) -> None: async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) # setup test data await ds.insert(Key(project, [PathElement(kind)]), {'value': 30}, s) projection = [Projection.from_repr({'property': {'name': 'value'}})] query = Query(kind=kind, limit=1, projection=projection) result = await ds.runQuery(query, session=s) assert result.entity_result_type.value == 'PROJECTION' # clean up test data await ds.delete(result.entity_results[0].entity.key, s)
async def test_geo_point_value(creds: str, kind: str, project: str) -> None: key = Key(project, [PathElement(kind)]) async with Session(timeout=10) as s: ds = Datastore(project=project, service_file=creds, session=s) allocatedKeys = await ds.allocateIds([key], session=s) await ds.reserveIds(allocatedKeys, session=s) props_insert = {'location': LatLng(49.2827, 123.1207)} await ds.insert(allocatedKeys[0], props_insert, session=s) actual = await ds.lookup([allocatedKeys[0]], session=s) assert actual['found'][0].entity.properties == props_insert
async def test_mutation_result(creds: str, kind: str, project: str) -> None: key = Key(project, [PathElement(kind)]) async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) insert_result = await ds.insert(key, {'value': 12}) assert len(insert_result['mutationResults']) == 1 saved_key = insert_result['mutationResults'][0].key assert saved_key is not None update_result = await ds.update(saved_key, {'value': 83}) assert len(update_result['mutationResults']) == 1 assert update_result['mutationResults'][0].key is None delete_result = await ds.delete(saved_key) assert len(delete_result['mutationResults']) == 1 assert delete_result['mutationResults'][0].key is None
async def test_query_with_key_projection(creds: str, kind: str, project: str) -> None: async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) # setup test data await ds.insert(Key(project, [PathElement(kind)]), {'value': 30}, s) property_filter = PropertyFilter( prop='value', operator=PropertyFilterOperator.EQUAL, value=Value(30)) projection = [Projection.from_repr({'property': {'name': '__key__'}})] query = Query(kind=kind, query_filter=Filter(property_filter), limit=1, projection=projection) result = await ds.runQuery(query, session=s) assert result.entity_results[0].entity.properties == {} assert result.entity_result_type.value == 'KEY_ONLY' # clean up test data await ds.delete(result.entity_results[0].entity.key, s)
async def test_transaction(creds: str, kind: str, project: str) -> None: key = Key(project, [PathElement(kind, name=f'test_record_{uuid.uuid4()}')]) async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) transaction = await ds.beginTransaction(session=s) actual = await ds.lookup([key], transaction=transaction, session=s) assert len(actual['missing']) == 1 mutations = [ ds.make_mutation(Operation.INSERT, key, properties={'animal': 'three-toed sloth'}), ds.make_mutation(Operation.UPDATE, key, properties={'animal': 'aardvark'}), ] await ds.commit(mutations, transaction=transaction, session=s) actual = await ds.lookup([key], session=s) assert actual['found'][0].entity.properties == {'animal': 'aardvark'}
def key() -> Key: path = PathElement(kind='my-kind', name='path-name') key = Key(project='my-project', path=[path], namespace='my-namespace') return key
async def test_table_load_copy(creds: str, dataset: str, project: str, export_bucket_name: str) -> None: # pylint: disable=too-many-locals # N.B. this test relies on Datastore.export -- see `test_datastore_export` # in the `gcloud-aio-datastore` smoke tests. kind = 'PublicTestDatastoreExportModel' rand_uuid = str(uuid.uuid4()) async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) await ds.insert(Key(project, [PathElement(kind)]), properties={'rand_str': rand_uuid}) operation = await ds.export(export_bucket_name, kinds=[kind]) count = 0 while (count < 10 and operation and operation.metadata['common']['state'] == 'PROCESSING'): await sleep(10) operation = await ds.get_datastore_operation(operation.name) count += 1 assert operation.metadata['common']['state'] == 'SUCCESSFUL' # END: copy from `test_datastore_export` uuid_ = str(uuid.uuid4()).replace('-', '_') backup_entity_table = f'public_test_backup_entity_{uuid_}' copy_entity_table = f'{backup_entity_table}_copy' t = Table(dataset, backup_entity_table, project=project, service_file=creds, session=s) gs_prefix = operation.metadata['outputUrlPrefix'] gs_file = (f'{gs_prefix}/all_namespaces/kind_{kind}/' f'all_namespaces_kind_{kind}.export_metadata') await t.insert_via_load([gs_file], source_format=SourceFormat.DATASTORE_BACKUP) await sleep(10) source_table = await t.get() assert int(source_table['numRows']) > 0 await t.insert_via_copy(project, dataset, copy_entity_table) await sleep(10) t1 = Table(dataset, copy_entity_table, project=project, service_file=creds, session=s) copy_table = await t1.get() assert copy_table['numRows'] == source_table['numRows'] # delete the backup and copy table await t.delete() await t1.delete() # delete the export file in google storage # TODO: confugure the bucket with autodeletion prefix_len = len(f'gs://{export_bucket_name}/') export_path = operation.metadata['outputUrlPrefix'][prefix_len:] storage = Storage(service_file=creds, session=s) files = await storage.list_objects(export_bucket_name, params={'prefix': export_path}) for file in files['items']: await storage.delete(export_bucket_name, file['name'])
async def test_table_load_copy( # pylint: disable=too-many-locals creds: str, dataset: str, project: str, export_bucket_name: str, backup_entity_table: str, copy_entity_table: str) -> None: kind = 'PublicTestDatastoreExportModel' rand_uuid = str(uuid.uuid4()) async with Session() as s: ds = Datastore(project=project, service_file=creds, session=s) await ds.insert(Key(project, [PathElement(kind)]), properties={'rand_str': rand_uuid}) operation = await ds.export(export_bucket_name, kinds=[kind]) count = 0 while (count < 10 and operation and operation.metadata['common']['state'] == 'PROCESSING'): await sleep(10) operation = await ds.get_datastore_operation(operation.name) count += 1 assert operation.metadata['common']['state'] == 'SUCCESSFUL' t = Table(dataset, backup_entity_table, project=project, service_file=creds, session=s) gs_prefix = operation.metadata['outputUrlPrefix'] gs_file = (f'{gs_prefix}/all_namespaces/kind_{kind}/' f'all_namespaces_kind_{kind}.export_metadata') await t.load([gs_file]) await sleep(10) source_table = await t.get() assert int(source_table['numRows']) > 0 await t.copy(project, dataset, copy_entity_table) await sleep(10) t1 = Table(dataset, copy_entity_table, project=project, service_file=creds, session=s) copy_table = await t1.get() assert copy_table['numRows'] == source_table['numRows'] # delete the backup and copy table await t.delete() await t1.delete() # delete the export file in google storage prefix_len = len(f'gs://{export_bucket_name}/') export_path = operation.metadata['outputUrlPrefix'][prefix_len:] storage = Storage(service_file=creds, session=s) files = await storage.list_objects(export_bucket_name, params={'prefix': export_path}) for file in files['items']: await storage.delete(export_bucket_name, file['name'])