def _benchmark_READ_SINGLE_ROW(model_class): with create_row(model_class) as (row, key): def fn(): model_class.get([key]) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_SINGLE_ROW, seconds, CRUD_ITERATIONS, 1
def _benchmark_READ_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): def fn(): model_class.get(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_READ_MULTI_ROW(model_class): with create_rows(model_class, CRUD_BATCH_SIZE) as (rows, keys): def fn(): model_class.get(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_MULTI_ROW, seconds, CRUD_ITERATIONS, CRUD_BATCH_SIZE
def _benchmark_MODEL_TO_PROTOBUF_STRING(model_class): with create_row(model_class) as (row, key): def fn(): entity_proto = row.convert_to_proto() entity_proto.SerializeToString() seconds = benchmark_fn(fn, SERIALIZATION_ITERATIONS) return SerializationTestGroups.MODEL_TO_PROTOBUF_STRING, seconds, SERIALIZATION_ITERATIONS, 1
def _benchmark_DELETE_SINGLE_ROW(model_class): with create_rows(model_class, CRUD_ITERATIONS) as (rows, keys): def fn(): row = rows.pop(0) model_class.delete([row]) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.DELETE_SINGLE_ROW, seconds, CRUD_ITERATIONS, CRUD_ITERATIONS
def _benchmark_LAZY_READ_SINGLE_ROW(model_class): with create_row(model_class) as (row, key): if issubclass(model_class, ndb.Model): key = key.to_old_key() def fn(): model_class.get_lazy([key]) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_SINGLE_ROW, seconds, CRUD_ITERATIONS, 1
def _benchmark_DELETE_BULK_ROW(model_class): with create_rows(model_class, CRUD_ITERATIONS * CRUD_ITERATIONS) as (rows, keys): def fn(): rows_slice = [rows.pop(0) for _ in range(CRUD_ITERATIONS)] model_class.delete(rows_slice) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.DELETE_BULK_ROW, seconds, CRUD_ITERATIONS, CRUD_ITERATIONS
def _benchmark_LAZY_READ_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): if issubclass(model_class, ndb.Model): keys = [key.to_old_key() for key in keys] def fn(): model_class.get_lazy(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_UPDATE_SINGLE_ROW(model_class): with create_row(model_class) as (row, key): def fn(): for property in row._properties.keys(): setattr(row, property, str(uuid.uuid4())) model_class.put([row]) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.UPDATE_SINGLE_ROW, seconds, CRUD_ITERATIONS, 1
def _benchmark_LAZY_READ_MULTI_ROW(model_class): with create_rows(model_class, CRUD_BATCH_SIZE) as (rows, keys): if issubclass(model_class, ndb.Model): keys = [key.to_old_key() for key in keys] def fn(): model_class.get_lazy(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_MULTI_ROW, seconds, CRUD_ITERATIONS, CRUD_BATCH_SIZE
def _benchmark_UPDATE_MULTI_ROW(model_class): with create_rows(model_class, CRUD_BATCH_SIZE) as (rows, keys): def fn(): for row in rows: for property in row._properties.keys(): setattr(row, property, str(uuid.uuid4())) model_class.put(rows) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.UPDATE_MULTI_ROW, seconds, CRUD_ITERATIONS, CRUD_BATCH_SIZE
def _benchmark_PROTOBUF_STRING_TO_ENTITY_PROTO(model_class): with create_row(model_class) as (row, key): entity_proto = row.convert_to_proto() serialized = entity_proto.SerializeToString() def fn(): entity_proto = entity_pb.EntityProto(serialized) datastore.Entity.FromPb(entity_proto) seconds = benchmark_fn(fn, SERIALIZATION_ITERATIONS) return SerializationTestGroups.PROTOBUF_STRING_TO_ENTITY_PROTO, seconds, SERIALIZATION_ITERATIONS, 1
def _benchmark_READ_MISSING_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): # Delete the rows to force a read-miss model_class.delete(rows) while len(filter(lambda x: x, model_class.get(keys))) > 0: time.sleep(0.1) def fn(): model_class.get(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_MISSING_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_ENTITY_TO_PROTOBUF_STRING(model_class): with create_row(model_class) as (row, key): try: entity = row.convert_to_entity() except NotImplementedError: return SerializationTestGroups.ENTITY_TO_PROTOBUF_STRING, None, None, None def fn(): entity_proto = entity.ToPb() entity_proto.SerializeToString() seconds = benchmark_fn(fn, SERIALIZATION_ITERATIONS) return SerializationTestGroups.ENTITY_TO_PROTOBUF_STRING, seconds, SERIALIZATION_ITERATIONS, 1
def _benchmark_SINGLE_LAZY_PROPERTY_ACCESS_TIMES_PROTOBUF_TO_MODEL( model_class): with create_row(model_class) as (row, key): entity_proto = row.convert_to_proto() serialized = entity_proto.SerializeToString() def fn(): entity_proto = entity_pb.EntityProto(serialized) deserialized = datastore_lazy.LazyEntity(entity_proto) len(deserialized.prop_0) seconds = benchmark_fn(fn, SERIALIZATION_ITERATIONS) return SerializationTestGroups.SINGLE_LAZY_PROPERTY_ACCESS_TIMES_PROTOBUF_TO_MODEL, seconds, SERIALIZATION_ITERATIONS, 1
def _benchmark_LAZY_READ_MISSING_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): # Delete the rows to force a read-miss model_class.delete(rows) while len(filter(lambda x: x, model_class.get(keys))) > 0: time.sleep(0.1) if issubclass(model_class, ndb.Model): keys = [key.to_old_key() for key in keys] def fn(): model_class.get_lazy(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_MISSING_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_CREATE_SINGLE_ROW(model_class): rows = [] def fn(): row = model_class() for property in row._properties.keys(): setattr(row, property, str(uuid.uuid4())) model_class.put([row]) rows.append(row) seconds = benchmark_fn(fn, CRUD_ITERATIONS) # Cleanup after the test model_class.delete(rows) return CrudTestGroups.CREATE_SINGLE_ROW, seconds, CRUD_ITERATIONS, 1
def _benchmark_CREATE_BULK_ROW(model_class): rows = [] def fn(): local_rows = [] for _ in range(CRUD_ITERATIONS): row = model_class() for property in row._properties.keys(): setattr(row, property, str(uuid.uuid4())) local_rows.append(row) model_class.put(local_rows) rows.extend(local_rows) seconds = benchmark_fn(fn, CRUD_ITERATIONS) # Cleanup after the test model_class.delete(rows) return CrudTestGroups.CREATE_BULK_ROW, seconds, CRUD_ITERATIONS, CRUD_ITERATIONS
def _benchmark_MULTI_PROPERTY_ACCESS_TIMES_PROTOBUF_TO_MODEL(model_class): with create_row(model_class) as (row, key): entity_proto = row.convert_to_proto() serialized = entity_proto.SerializeToString() def fn(): # entity_proto = entity_pb.EntityProto(serialized) # entity = datastore.Entity.FromPb(entity_proto) # deserialized = model_class.convert_from_entity(entity) deserialized = model_class.convert_from_binary(serialized) len(deserialized.prop_0) len(deserialized.prop_1) len(deserialized.prop_2) len(deserialized.prop_3) len(deserialized.prop_4) len(deserialized.prop_5) len(deserialized.prop_6) len(deserialized.prop_7) len(deserialized.prop_8) len(deserialized.prop_9) seconds = benchmark_fn(fn, SERIALIZATION_ITERATIONS) return SerializationTestGroups.MULTI_PROPERTY_ACCESS_TIMES_PROTOBUF_TO_MODEL, seconds, SERIALIZATION_ITERATIONS, 1