def _benchmark_READ_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): def fn(): model_class.get(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_READ_MULTI_ROW(model_class): with create_rows(model_class, CRUD_BATCH_SIZE) as (rows, keys): def fn(): model_class.get(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_MULTI_ROW, seconds, CRUD_ITERATIONS, CRUD_BATCH_SIZE
def _benchmark_DELETE_SINGLE_ROW(model_class): with create_rows(model_class, CRUD_ITERATIONS) as (rows, keys): def fn(): row = rows.pop(0) model_class.delete([row]) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.DELETE_SINGLE_ROW, seconds, CRUD_ITERATIONS, CRUD_ITERATIONS
def _benchmark_DELETE_BULK_ROW(model_class): with create_rows(model_class, CRUD_ITERATIONS * CRUD_ITERATIONS) as (rows, keys): def fn(): rows_slice = [rows.pop(0) for _ in range(CRUD_ITERATIONS)] model_class.delete(rows_slice) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.DELETE_BULK_ROW, seconds, CRUD_ITERATIONS, CRUD_ITERATIONS
def _benchmark_LAZY_READ_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): if issubclass(model_class, ndb.Model): keys = [key.to_old_key() for key in keys] def fn(): model_class.get_lazy(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_LAZY_READ_MULTI_ROW(model_class): with create_rows(model_class, CRUD_BATCH_SIZE) as (rows, keys): if issubclass(model_class, ndb.Model): keys = [key.to_old_key() for key in keys] def fn(): model_class.get_lazy(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_MULTI_ROW, seconds, CRUD_ITERATIONS, CRUD_BATCH_SIZE
def _benchmark_UPDATE_BULK_ROW(model_class): with create_rows(model_class, CRUD_ITERATIONS) as (rows, keys): def fn(): for row in rows: for property in row._properties.keys(): setattr(row, property, str(uuid.uuid4())) model_class.put(rows) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.UPDATE_BULK_ROW, seconds, CRUD_ITERATIONS, CRUD_ITERATIONS
def _benchmark_READ_MISSING_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): # Delete the rows to force a read-miss model_class.delete(rows) while len(filter(lambda x: x, model_class.get(keys))) > 0: time.sleep(0.1) def fn(): model_class.get(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.READ_MISSING_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE
def _benchmark_LAZY_READ_MISSING_BULK(model_class): with create_rows(model_class, INSTANCES_TO_CREATE) as (rows, keys): # Delete the rows to force a read-miss model_class.delete(rows) while len(filter(lambda x: x, model_class.get(keys))) > 0: time.sleep(0.1) if issubclass(model_class, ndb.Model): keys = [key.to_old_key() for key in keys] def fn(): model_class.get_lazy(keys) seconds = benchmark_fn(fn, CRUD_ITERATIONS) return CrudTestGroups.LAZY_READ_MISSING_BULK, seconds, CRUD_ITERATIONS, INSTANCES_TO_CREATE