def test_open_and_discover_file(self): fname = os.path.join(self.dir, "test_open_and_discover_file.h5") data = np.array([[2.222, 3, 4, 5], [3.3, 3, 3, 3]], dtype=np.float64) with closing(io.File(fname, io.FileAction.CREATE)) as f: g = f.create_group("/my_super_group") subg = g.create_group("my_super_subgroup") g.write_double("doubleds", data) subg.write_string("stringds", u"jap") with closing( io.File(fname, io.FileAction.OPEN, flag=io.FileFlag.READ_WRITE)) as f: root_group = f.get_root_group() sg = root_group.subgroups() np.testing.assert_equal(len(sg), 1) np.testing.assert_equal(sg[0], u"my_super_group") sub_group = root_group.get_subgroup(sg[0]) np.testing.assert_equal(len(sub_group.data_sets()), 1) np.testing.assert_equal(sub_group.data_sets()[0], "doubleds") subsub_groups = sub_group.subgroups() np.testing.assert_equal(len(subsub_groups), 1) np.testing.assert_equal(subsub_groups[0], "my_super_subgroup") super_subgroup = sub_group.get_subgroup("my_super_subgroup") np.testing.assert_equal(len(super_subgroup.data_sets()), 1) np.testing.assert_equal(super_subgroup.data_sets()[0], "stringds")
def test_write_flat_trajectory(self): common.set_logging_level("error") traj_fname = os.path.join(self.dir, "flat_traj.h5") simulation = Simulation() simulation.set_kernel("SingleCPU") simulation.box_size = common.Vec(5, 5, 5) simulation.register_particle_type("A", 0.0, 0.0) def callback(_): simulation.add_particle("A", common.Vec(0, 0, 0)) simulation.register_observable_n_particles(1, ["A"], callback) traj_handle = simulation.register_observable_flat_trajectory(1) with closing( io.File(traj_fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: traj_handle.enable_write_to_file(f, u"", int(3)) simulation.run_scheme_readdy(True).configure(1).run(20) r = TrajectoryReader(traj_fname) trajectory_items = r[:] for idx, items in enumerate(trajectory_items): np.testing.assert_equal(len(items), idx + 1) for item in items: np.testing.assert_equal(item.t, idx) np.testing.assert_equal(item.position, np.array([.0, .0, .0])) common.set_logging_level("debug")
def test_particle_positions_observable(self): fname = os.path.join(self.dir, "test_observables_particle_positions.h5") sim = Simulation() sim.set_kernel("SingleCPU") sim.box_size = common.Vec(13, 13, 13) sim.register_particle_type("A", .1, .1) sim.add_particle("A", common.Vec(0, 0, 0)) # every time step, add one particle sim.register_observable_n_particles( 1, ["A"], lambda n: sim.add_particle("A", common.Vec(1.5, 2.5, 3.5))) handle = sim.register_observable_particle_positions(1, []) n_timesteps = 19 with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"particle_positions", int(3)) sim.run_scheme_readdy(True).configure(0).run(n_timesteps) handle.flush() with h5py.File(fname, "r") as f2: data = f2["readdy/observables/particle_positions/data"][:] np.testing.assert_equal(len(data), n_timesteps + 1) for t, positions in enumerate(data): # we begin with two particles np.testing.assert_equal(len(positions), t + 2) np.testing.assert_equal(positions[0]["x"], 0) np.testing.assert_equal(positions[0]["y"], 0) np.testing.assert_equal(positions[0]["z"], 0) for i in range(1, len(positions)): np.testing.assert_equal(positions[i]["x"], 1.5) np.testing.assert_equal(positions[i]["y"], 2.5) np.testing.assert_equal(positions[i]["z"], 3.5) common.set_logging_level("warn")
def test_n_particles_observable(self): common.set_logging_level("warn") fname = os.path.join(self.dir, "test_observables_n_particles.h5") simulation = Simulation() simulation.set_kernel("SingleCPU") box_size = common.Vec(10, 10, 10) simulation.kbt = 2 simulation.periodic_boundary = [True, True, True] simulation.box_size = box_size simulation.register_particle_type("A", .2, 1.) simulation.register_particle_type("B", .2, 1.) simulation.add_particle("A", common.Vec(-2.5, 0, 0)) simulation.add_particle("B", common.Vec(0, 0, 0)) n_time_steps = 50 callback_n_particles_a_b = [] callback_n_particles_all = [] def callback_ab(value): callback_n_particles_a_b.append(value) simulation.add_particle("A", common.Vec(-1, -1, -1)) def callback_all(hist): callback_n_particles_all.append(hist) simulation.add_particle("A", common.Vec(-1, -1, -1)) simulation.add_particle("B", common.Vec(-1, -1, -1)) handle_a_b_particles = simulation.register_observable_n_particles( 1, ["A", "B"], callback_ab) handle_all = simulation.register_observable_n_particles( 1, [], callback_all) with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle_a_b_particles.enable_write_to_file(f, u"n_a_b_particles", int(3)) handle_all.enable_write_to_file(f, u"n_particles", int(5)) simulation.run(n_time_steps, 0.02) handle_all.flush() handle_a_b_particles.flush() with h5py.File(fname, "r") as f2: n_a_b_particles = f2["readdy/observables/n_a_b_particles/data"][:] n_particles = f2["readdy/observables/n_particles/data"][:] time_series = f2["readdy/observables/n_a_b_particles/time"] np.testing.assert_equal(time_series, np.array(range(0, n_time_steps + 1))) for t in range(n_time_steps): np.testing.assert_equal(n_a_b_particles[t][0], callback_n_particles_a_b[t][0]) np.testing.assert_equal(n_a_b_particles[t][1], callback_n_particles_a_b[t][1]) np.testing.assert_equal(n_particles[t][0], callback_n_particles_all[t][0])
def test_readwrite_double_and_string(self): fname = os.path.join(self.dir, "test_readwrite_double_and_string.h5") data = np.array([[2.222, 3, 4, 5], [3.3, 3, 3, 3]], dtype=np.float64) with closing(io.File(fname, io.FileAction.CREATE)) as f: f.write_double("/sowas", data) f.write_string("/maeh", u"hierstehtwas") with h5py.File(fname, "r") as f2: np.testing.assert_equal(f2.get('/sowas'), data) np.testing.assert_equal( f2.get("/maeh").value.decode(), u"hierstehtwas")
def run(self, time_steps, out_file): sim = Simulation() sim.set_kernel(self.kernel) sim.box_size = common.Vec(60, 20, 20) sim.periodic_boundary = [True, True, True] typeid_b = sim.register_particle_type("B", 1.0, 1.0, ParticleTypeFlavor.NORMAL) sim.register_particle_type("Topology A", .5, .5, ParticleTypeFlavor.TOPOLOGY) sim.register_potential_harmonic_repulsion("Topology A", "Topology A", 10) sim.register_potential_harmonic_repulsion("Topology A", "B", 10) sim.register_potential_harmonic_repulsion("B", "B", 10) sim.configure_topology_bond_potential("Topology A", "Topology A", 10, 1.) sim.configure_topology_angle_potential("Topology A", "Topology A", "Topology A", 10, np.pi) # sim.configure_topology_dihedral_potential("Topology A", "Topology A", "Topology A", "Topology A", 1, 1, -np.pi) n_elements = 50. particles = [ sim.create_topology_particle("Topology A", common.Vec(-25. + i, 0, 0)) for i in range(int(n_elements)) ] topology = sim.add_topology(particles) for i in range(int(n_elements - 1)): topology.get_graph().add_edge(i, i + 1) topology.add_reaction(self._get_decay_reaction(typeid_b)) topology.add_reaction(self._get_split_reaction()) traj_handle = sim.register_observable_flat_trajectory(1) with closing( io.File(out_file, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: traj_handle.enable_write_to_file(f, u"", 50) sim.run_scheme_readdy(True)\ .evaluate_topology_reactions()\ .write_config_to_file(f)\ .configure_and_run(time_steps, self.time_step) print("currently %s topologies" % len(sim.current_topologies()))
def test_radial_distribution_observable(self): common.set_logging_level("warn") fname = os.path.join(self.dir, "test_observables_radial_distribution.h5") simulation = Simulation() simulation.set_kernel("SingleCPU") box_size = common.Vec(10, 10, 10) simulation.kbt = 2 simulation.periodic_boundary = [True, True, True] simulation.box_size = box_size simulation.register_particle_type("A", .2, 1.) simulation.register_particle_type("B", .2, 1.) simulation.register_potential_harmonic_repulsion("A", "B", 10) simulation.add_particle("A", common.Vec(-2.5, 0, 0)) simulation.add_particle("B", common.Vec(0, 0, 0)) bin_borders = np.arange(0, 5, .01) density = 1. / (box_size[0] * box_size[1] * box_size[2]) n_time_steps = 50 callback_centers = [] callback_rdf = [] def rdf_callback(pair): callback_centers.append(pair[0]) callback_rdf.append(pair[1]) handle = simulation.register_observable_radial_distribution( 1, bin_borders, ["A"], ["B"], density, rdf_callback) with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"radial_distribution", int(3)) simulation.run(n_time_steps, 0.02) handle.flush() with h5py.File(fname, "r") as f2: bin_centers = f2[ "readdy/observables/radial_distribution/bin_centers"][:] distribution = f2[ "readdy/observables/radial_distribution/distribution"][:] for t in range(n_time_steps): np.testing.assert_equal(bin_centers, np.array(callback_centers[t])) np.testing.assert_equal(distribution[t], np.array(callback_rdf[t]))
def test_groups_readwrite(self): fname = os.path.join(self.dir, "test_groups_readwrite.h5") data = np.array([[2.222, 3, 4, 5], [3.3, 3, 3, 3]], dtype=np.float64) with closing(io.File(fname, io.FileAction.CREATE)) as f: g = f.create_group("/my_super_group") subg = g.create_group("my_super_subgroup") g.write_double("doubleds", data) subg.write_string("stringds", u"jap") with h5py.File(fname, "r") as f2: np.testing.assert_equal( f2.get("/my_super_group")["doubleds"], data) np.testing.assert_equal( f2.get("/my_super_group").get("my_super_subgroup") ["stringds"].value.decode(), u"jap") np.testing.assert_equal( f2.get("/my_super_group/my_super_subgroup") ["stringds"].value.decode(), u"jap")
def setUpClass(cls): cls.kernel_provider = api.KernelProvider.get() cls.kernel_provider.load_from_dir(putils.get_readdy_plugin_dir()) cls.dir = tempfile.mkdtemp("test-config-io") cls.fname = os.path.join(cls.dir, "test_io_utils.h5") sim = api.Simulation() sim.set_kernel("CPU") sim.register_particle_type("A", 1., 0.) sim.register_particle_type("B", 2., 0.) sim.register_particle_type("C", 3., 0.) sim.register_reaction_conversion("mylabel", "A", "B", .00001) sim.register_reaction_conversion("A->B", "A", "B", 1.) fusion_rate = 0.4 educt_distance = 0.2 sim.register_reaction_fusion("B+C->A", "B", "C", "A", fusion_rate, educt_distance, .5, .5) with contextlib.closing(io.File(cls.fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: sim.run_scheme_readdy(True).write_config_to_file(f).configure_and_run(1, 0.1)
def test_particles_observable(self): fname = os.path.join(self.dir, "test_observables_particles.h5") sim = Simulation() sim.set_kernel("SingleCPU") sim.box_size = common.Vec(13, 13, 13) typeid_A = sim.register_particle_type("A", .1, .1) typeid_B = sim.register_particle_type("B", .1, .1) sim.add_particle("A", common.Vec(0, 0, 0)) sim.add_particle("B", common.Vec(0, 0, 0)) # every time step, add one particle sim.register_observable_n_particles( 1, ["A"], lambda n: sim.add_particle("A", common.Vec(1.5, 2.5, 3.5))) handle = sim.register_observable_particles(1) n_timesteps = 19 with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"particles", int(3)) sim.run_scheme_readdy(True).configure(0).run(n_timesteps) handle.flush() with h5py.File(fname, "r") as f2: types = f2["readdy/observables/particles/types"][:] ids = f2["readdy/observables/particles/ids"][:] positions = f2["readdy/observables/particles/positions"][:] for t in range(n_timesteps): np.testing.assert_equal(len(types[t]), t + 3) np.testing.assert_equal(len(ids[t]), t + 3) np.testing.assert_equal(len(positions[t]), t + 3) np.testing.assert_equal(types[t][0], typeid_A) np.testing.assert_equal(positions[t][0][0], 0) np.testing.assert_equal(positions[t][0][1], 0) np.testing.assert_equal(positions[t][0][2], 0) np.testing.assert_equal(positions[t][1][0], 0) np.testing.assert_equal(positions[t][1][1], 0) np.testing.assert_equal(positions[t][1][2], 0) np.testing.assert_equal(types[t][1], typeid_B) for others in range(2, len(types[t])): np.testing.assert_equal(types[t][others], typeid_A) np.testing.assert_equal(positions[t][others][0], 1.5) np.testing.assert_equal(positions[t][others][1], 2.5) np.testing.assert_equal(positions[t][others][2], 3.5)
def test_histogram_along_axis_observable(self): common.set_logging_level("warn") fname = os.path.join(self.dir, "test_observables_hist_along_axis.h5") simulation = Simulation() simulation.set_kernel("SingleCPU") box_size = common.Vec(10, 10, 10) simulation.kbt = 2 simulation.periodic_boundary = [True, True, True] simulation.box_size = box_size simulation.register_particle_type("A", .2, 1.) simulation.register_particle_type("B", .2, 1.) simulation.register_potential_harmonic_repulsion("A", "B", 10) simulation.add_particle("A", common.Vec(-2.5, 0, 0)) simulation.add_particle("B", common.Vec(0, 0, 0)) bin_borders = np.arange(0, 5, .01) n_time_steps = 50 callback_hist = [] def hist_callback(hist): callback_hist.append(hist) handle = simulation.register_observable_histogram_along_axis( 2, bin_borders, 0, ["A", "B"], hist_callback) with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"hist_along_x_axis", int(3)) simulation.run(n_time_steps, 0.02) handle.flush() with h5py.File(fname, "r") as f2: histogram = f2["readdy/observables/hist_along_x_axis/data"][:] time_series = f2["readdy/observables/hist_along_x_axis/time"] np.testing.assert_equal(time_series, np.array(range(0, n_time_steps + 1))[::2]) for t in range(n_time_steps // 2): np.testing.assert_equal(histogram[t], np.array(callback_hist[t]))
def test_center_of_mass_observable(self): common.set_logging_level("warn") fname = os.path.join(self.dir, "test_observables_com.h5") simulation = Simulation() simulation.set_kernel("SingleCPU") box_size = common.Vec(10, 10, 10) simulation.kbt = 2 simulation.periodic_boundary = [True, True, True] simulation.box_size = box_size simulation.register_particle_type("A", .2, 1.) simulation.register_particle_type("B", .2, 1.) simulation.add_particle("A", common.Vec(-2.5, 0, 0)) simulation.add_particle("B", common.Vec(0, 0, 0)) n_time_steps = 50 callback_com = [] def com_callback(vec): callback_com.append(vec) handle = simulation.register_observable_center_of_mass( 1, ["A", "B"], com_callback) with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"com", 3) simulation.run(n_time_steps, 0.02) handle.flush() with h5py.File(fname, "r") as f2: com = f2["readdy/observables/com/data"][:] for t in range(n_time_steps): np.testing.assert_equal(com[t]["x"], callback_com[t][0]) np.testing.assert_equal(com[t]["y"], callback_com[t][1]) np.testing.assert_equal(com[t]["z"], callback_com[t][2])
def test_reaction_counts_observable(self): common.set_logging_level("warn") fname = os.path.join(self.dir, "test_observables_particle_reaction_counts.h5") sim = Simulation() sim.set_kernel("CPU") sim.box_size = common.Vec(10, 10, 10) sim.register_particle_type("A", .0, 5.0) sim.register_particle_type("B", .0, 6.0) sim.register_particle_type("C", .0, 6.0) sim.register_reaction_conversion("mylabel", "A", "B", .00001) sim.register_reaction_conversion("A->B", "A", "B", 1.) sim.register_reaction_fusion("B+C->A", "B", "C", "A", 1.0, 1.0, .5, .5) sim.add_particle("A", common.Vec(0, 0, 0)) sim.add_particle("B", common.Vec(1.0, 1.0, 1.0)) sim.add_particle("C", common.Vec(1.1, 1.0, 1.0)) n_timesteps = 1 handle = sim.register_observable_reaction_counts(1) with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"reactions", int(3)) sim.run_scheme_readdy(True).write_config_to_file( f).with_reaction_scheduler( "GillespieParallel").configure_and_run(n_timesteps, 1) with h5py.File(fname, "r") as f2: data = f2["readdy/observables/reactions"] time_series = f2["readdy/observables/reactions/time"] np.testing.assert_equal(time_series, np.array(range(0, n_timesteps + 1))) def get_item(name, collection): return next(x for x in collection if x["name"] == name) order_1_reactions = data["registered_reactions/order1_reactions"] order_2_reactions = data["registered_reactions/order2_reactions"] mylabel_reaction = get_item("mylabel", order_1_reactions) reaction_idx_mylabel = mylabel_reaction["index"] atob_reaction = get_item("A->B", order_1_reactions) reaction_idx_atob = atob_reaction["index"] # counts of first time step, time is first index np.testing.assert_equal( data["counts/order1/A[id=0]"][0, reaction_idx_mylabel], np.array([0])) np.testing.assert_equal( data["counts/order1/A[id=0]"][0, reaction_idx_atob], np.array([0])) np.testing.assert_equal( data["counts/order2/B[id=1] + C[id=2]"][0, 0], np.array([0])) # counts of second time step np.testing.assert_equal( data["counts/order1/A[id=0]"][1, reaction_idx_mylabel], np.array([0])) np.testing.assert_equal( data["counts/order1/A[id=0]"][1, reaction_idx_atob], np.array([1])) np.testing.assert_equal( data["counts/order2/B[id=1] + C[id=2]"][1, 0], np.array([1])) common.set_logging_level("warn")
def test_reactions_observable(self): common.set_logging_level("warn") fname = os.path.join(self.dir, "test_observables_particle_reactions.h5") sim = Simulation() sim.set_kernel("CPU") sim.box_size = common.Vec(10, 10, 10) sim.register_particle_type("A", .0, 5.0) sim.register_particle_type("B", .0, 6.0) sim.register_particle_type("C", .0, 6.0) sim.register_reaction_conversion("mylabel", "A", "B", .00001) sim.register_reaction_conversion("A->B", "A", "B", 1.) sim.register_reaction_fusion("B+C->A", "B", "C", "A", 1.0, 1.0, .5, .5) sim.add_particle("A", common.Vec(0, 0, 0)) sim.add_particle("B", common.Vec(1.0, 1.0, 1.0)) sim.add_particle("C", common.Vec(1.1, 1.0, 1.0)) n_timesteps = 1 handle = sim.register_observable_reactions(1) with closing( io.File(fname, io.FileAction.CREATE, io.FileFlag.OVERWRITE)) as f: handle.enable_write_to_file(f, u"reactions", int(3)) sim.run_scheme_readdy(True).write_config_to_file( f).configure_and_run(n_timesteps, 1) type_str_to_id = ioutils.get_particle_types(fname) with h5py.File(fname, "r") as f2: data = f2["readdy/observables/reactions"] time_series = f2["readdy/observables/reactions/time"] np.testing.assert_equal(time_series, np.array(range(0, n_timesteps + 1))) def get_item(name, collection): return next(x for x in collection if x["name"] == name) order_1_reactions = data["registered_reactions/order1_reactions"] mylabel_reaction = get_item("mylabel", order_1_reactions) np.testing.assert_equal(mylabel_reaction["rate"], .00001) np.testing.assert_equal(mylabel_reaction["n_educts"], 1) np.testing.assert_equal(mylabel_reaction["n_products"], 1) np.testing.assert_equal(mylabel_reaction["educt_types"], [type_str_to_id["A"], 0]) np.testing.assert_equal(mylabel_reaction["product_types"], [type_str_to_id["B"], 0]) atob_reaction = get_item("A->B", order_1_reactions) np.testing.assert_equal(atob_reaction["rate"], 1.) np.testing.assert_equal(atob_reaction["n_educts"], 1) np.testing.assert_equal(atob_reaction["n_products"], 1) np.testing.assert_equal(mylabel_reaction["educt_types"], [type_str_to_id["A"], 0]) np.testing.assert_equal(mylabel_reaction["product_types"], [type_str_to_id["B"], 0]) order_2_reactions = data["registered_reactions/order2_reactions"] fusion_reaction = get_item("B+C->A", order_2_reactions) np.testing.assert_equal(fusion_reaction["rate"], 1.) np.testing.assert_equal(fusion_reaction["educt_distance"], 1.) np.testing.assert_equal(fusion_reaction["n_educts"], 2) np.testing.assert_equal(fusion_reaction["n_products"], 1) np.testing.assert_equal(fusion_reaction["educt_types"], [type_str_to_id["B"], type_str_to_id["C"]]) np.testing.assert_equal(fusion_reaction["product_types"], [type_str_to_id["A"], 0]) records = data["records"][:] np.testing.assert_equal(len(records), 2) # records of 1st time step for record in records[1]: np.testing.assert_equal( record["reaction_type"] == 0 or record["reaction_type"] == 1, True) if record["reaction_type"] == 0: np.testing.assert_equal(record["position"], np.array([.0, .0, .0])) np.testing.assert_equal(record["reaction_index"], 1) elif record["reaction_type"] == 1: # fusion np.testing.assert_equal(record["position"], np.array([1.05, 1.0, 1.0])) np.testing.assert_equal(record["reaction_index"], 0) common.set_logging_level("warn")