def main(): try: print(""" Benchmarking various operations on the IndexDataContainerTable. """) with closing(tables.open_file(filename, mode='w')) as handle: root = handle.root table = IndexedDataContainerTable(root, 'my_data_table') print( "Append {}:".format(n), bench(lambda: append(handle, 1000, data_container))) with closing(tables.open_file(filename, mode='w')) as handle: root = handle.root table = IndexedDataContainerTable( root, 'my_data_table', expected_number=n) print( "Append {} masked:".format(n), bench(lambda: append(handle, 1000, data_container_half))) uids = create_table(filename) sample = random.sample(uids, 300) with closing(tables.open_file(filename, mode='r')) as handle: root = handle.root table = IndexedDataContainerTable( root, 'my_data_table', expected_number=n) print("Iterate {}:".format(n), bench(lambda: iteration(table))) print( 'Getitem sample of 300:', bench(lambda: getitem_access(table, sample))) with closing(tables.open_file(filename, mode='a')) as handle: root = handle.root table = IndexedDataContainerTable( root, 'my_data_table', expected_number=n) print( "Update item of 300 sample:", bench(lambda: setitem(table, data_container_half, sample))) finally: shutil.rmtree(temp_dir)
def main(): print(""" Benchmarking various operations between different data containers .. note: Only the relative time taken for each type of container within a section is comparable. """) print('Initialization:') print("dict:", bench(lambda: dict(dict_data))) print("DataContainer:", bench(lambda: DataContainer(dict_data))) print("dict == DataContainer", dict(dict_data) == DataContainer(dict_data)) print() print('Iterations:') print("dict:", bench(lambda: iteration(dict_data))) print("DataContainer:", bench(lambda: iteration(data_container))) print() print('getitem access:') print("dict:", bench(lambda: getitem_access(dict_data, indices))) print( "DataContainer:", bench(lambda: getitem_access(data_container, indices))) print( "dict == DataContainer", getitem_access(dict_data, indices) == getitem_access(data_container, indices)) # noqa print() print('setitem with CUBA keys:') print("dict:", bench(lambda: setitem_with_CUBA_keys(dict_data))) print( "DataContainer:", bench(lambda: setitem_with_CUBA_keys(data_container))) print( "dict == DataContainer", setitem_with_CUBA_keys(dict_data) == setitem_with_CUBA_keys(data_container)) # noqa
def main(): try: print(""" Benchmarking various operations on the DataContainerTable. """) with closing(tables.open_file(filename, mode='w')) as handle: root = handle.root table = DataContainerTable(root, 'my_data_table') print( "Append {}:".format(n), bench(lambda: append(handle, 1000, data_container))) with closing(tables.open_file(filename, mode='w')) as handle: root = handle.root table = DataContainerTable(root, 'my_data_table') print( "Append {} masked:".format(n), bench(lambda: append(handle, 1000, data_container_half))) uids = [uuid.uuid4() for _ in range(n)] with closing(tables.open_file(filename, mode='w')) as handle: print( "Set item {}:".format(n), bench( lambda: set_item(handle, uids, data_container), repeat=1, adjust_runs=False)) with closing(tables.open_file(filename, mode='w')) as handle: root = handle.root table = DataContainerTable(root, 'my_data_table') print( "Set item {} masked:".format(n), bench( lambda: set_item(handle, uids, data_container), repeat=1, adjust_runs=False)) uids = create_table(filename) sample = random.sample(uids, 300) with closing(tables.open_file(filename, mode='r')) as handle: root = handle.root table = DataContainerTable(root, 'my_data_table') print("Iterate {}:".format(n), bench(lambda: iteration(table))) print( "IterSequence of 300:", bench(lambda: iteration_with_sequence(table, sample))) print( 'Getitem sample of 300:', bench(lambda: getitem_access(table, sample))) with closing(tables.open_file(filename, mode='a')) as handle: root = handle.root table = DataContainerTable(root, 'my_data_table') print( "Update item of 300 sample:", bench(lambda: setitem(table, data_container_half, sample))) print( "Delitem of 300 sample:", bench( lambda: delitem(table, sample), repeat=1, adjust_runs=False)) finally: shutil.rmtree(temp_dir)
particles, state_data = get_particles(y_range) number_particles = sum(p.count_of( CUBA.PARTICLE) for p in particles) number_time_steps = 10 SD = "DUMMY - TODO" for test in run_wrapper_tests: lammps_wrapper = lammps.LammpsWrapper( use_internal_interface=is_internal) configure_wrapper(lammps_wrapper, state_data, particles, number_time_steps=number_time_steps) results = bench(lambda: run_test(test.method, lammps_wrapper), repeat=1, adjust_runs=False) print(describe(test.name, number_particles, number_time_steps, is_internal), results) # test configuration lammps_wrapper = lammps.LammpsWrapper( use_internal_interface=is_internal) results = bench(lambda: configure_wrapper(lammps_wrapper, state_data, particles, number_time_steps),
def __enter__(self): self._file = H5CUDS.open(self._filename) self._file.add_dataset(Particles("test")) return self._file.get_dataset("test") def __exit__(self, type, value, tb): if os.path.exists(self._filename): self._file.close() shutil.rmtree(self.temp_dir) if __name__ == '__main__': print( "create_file_with_particles:", bench(lambda: create_file_with_particles(), repeat=3)) print( "create_file_with_id_particles:", bench(lambda: create_file_with_id_particles(), repeat=3)) with Container() as pc: add_particles_to_container(pc) print( "iter_particles_in_container", bench(lambda: iter_particles_in_container(pc))) with Container() as pc: add_particles_to_container(pc) print( "update_coordinates_of_particles_in_container_using_iter",