def setUp(self): self.mdtraj = md.load(data_filename("ala_small_traj.pdb")) self.traj = peng.trajectory_from_mdtraj( self.mdtraj, simple_topology=True) self.filename = data_filename("storage_test.nc") self.filename_clone = data_filename("storage_test_clone.nc") self.simplifier = ObjectJSON() self.template_snapshot = self.traj[0] self.solute_indices = range(22) self.toy_topology = toys.Topology( n_spatial=2, masses=[1.0, 1.0], pes=None ) self.engine = toys.Engine({}, self.toy_topology) self.toy_template = toys.Snapshot( coordinates=np.array([[-0.5, -0.5]]), velocities=np.array([[0.0,0.0]]), engine=self.engine )
def setUp(): class Object(): pass # Use the standard Alanine to generate snapshots to store for higher testing global this this = Object() this.options = {'temperature' : 300.0 * u.kelvin, 'collision_rate' : 1.0 / u.picoseconds, 'timestep' : 1.0 * u.femtoseconds, 'nsteps_per_frame' : 1, 'n_frames_max' : 5, 'start_time' : time.time(), 'fn_initial_pdb' : data_filename("ala_small_traj.pdb"), 'platform' : 'fastest', 'solute_indices' : range(22), 'forcefield_solute' : 'amber96.xml', 'forcefield_solvent' : 'tip3p.xml' } # create a template snapshot this.template_snapshot = paths.snapshot_from_pdb(data_filename("ala_small_traj.pdb")) # and an openmm engine this.engine = paths.OpenMMEngine(options=this.options, template=this.template_snapshot) this.engine.initialized = True # run a small trajectory of a few steps that can be used to save, etc... this.traj = this.engine.generate(this.template_snapshot, running=[paths.LengthEnsemble(2).can_append]) this.filename = data_filename("storage_test.nc") this.filename_clone = data_filename("storage_test_clone.nc")
def setUp(self): self.mdtraj = md.load(data_filename("ala_small_traj.pdb")) self.traj = paths.tools.trajectory_from_mdtraj(self.mdtraj) self.filename = data_filename("storage_test.nc") self.filename_clone = data_filename("storage_test_clone.nc") self.simplifier = ObjectJSON() self.template_snapshot = self.traj[0] self.solute_indices = range(22)
def setUp(self): # setUp is just reading in some alanine dipeptide frames: this is an # ugly hack self.storage = Storage( filename=data_filename("ala_small_traj.nc"), mode="a" ) topol = md.load(data_filename("ala_small_traj.pdb")).top.to_openmm() self.simulation = SimulationDuckPunch(topol, None) trajectory.Trajectory.simulator = self trajectory.Trajectory.storage = self.storage
def setUp(): global topology, template, system, nan_causing_template template = peng.snapshot_from_pdb(data_filename("ala_small_traj.pdb")) topology = peng.to_openmm_topology(template) # Generated using OpenMM Script Builder # http://builder.openmm.org forcefield = app.ForceField( 'amber96.xml', # solute FF 'tip3p.xml' # solvent FF ) # OpenMM System system = forcefield.createSystem( topology, nonbondedMethod=app.PME, nonbondedCutoff=1.0*u.nanometers, constraints=app.HBonds, ewaldErrorTolerance=0.0005 ) # this is crude but does the trick nan_causing_template = template.copy() kinetics = template.kinetics.copy() # this is crude but does the trick kinetics.velocities = kinetics.velocities.copy() kinetics.velocities[0] = \ (np.zeros(template.velocities.shape[1]) + 1000000.) * \ u.nanometers / u.picoseconds nan_causing_template.kinetics = kinetics
def setup(self): # As a test system, let's use 1D motion on a flat potential. If the # velocity is positive, you right the state on the right. If it is # negative, you hit the state on the left. pes = toys.LinearSlope(m=[0.0], c=[0.0]) # flat line topology = toys.Topology(n_spatial=1, masses=[1.0], pes=pes) integrator = toys.LeapfrogVerletIntegrator(0.1) options = { 'integ': integrator, 'n_frames_max': 100000, 'n_steps_per_frame': 5 } self.engine = toys.Engine(options=options, topology=topology) self.snap0 = toys.Snapshot(coordinates=np.array([[0.0]]), velocities=np.array([[1.0]]), engine=self.engine) cv = paths.FunctionCV("Id", lambda snap: snap.coordinates[0][0]) starting_volume = paths.CVDefinedVolume(cv, -0.01, 0.01) forward_ensemble = paths.LengthEnsemble(5) backward_ensemble = paths.LengthEnsemble(3) randomizer = paths.NoModification() self.filename = data_filename("shoot_from_snaps.nc") self.storage = paths.Storage(self.filename, 'w') self.simulation = ShootFromSnapshotsSimulation( storage=self.storage, engine=self.engine, starting_volume=starting_volume, forward_ensemble=forward_ensemble, backward_ensemble=backward_ensemble, randomizer=randomizer, initial_snapshots=self.snap0) self.simulation.output_stream = open(os.devnull, "w")
def test_storage(self): import os fname = data_filename("tps_network_storage_test.nc") if os.path.isfile(fname): os.remove(fname) topol = peng.Topology(n_spatial=1, masses=[1.0], pes=None) engine = peng.Engine({}, topol) self.template = peng.Snapshot(coordinates=np.array([[0.0]]), velocities=np.array([[0.0]]), engine=engine) states = [self.stateA, self.stateB, self.stateC] network_a = TPSNetwork(initial_states=states, final_states=states) assert_equal(len(network_a.sampling_transitions), 1) assert_equal(len(network_a.transitions), 6) storage_w = paths.storage.Storage(fname, "w") storage_w.snapshots.save(self.template) storage_w.save(network_a) storage_w.sync_all() storage_r = paths.storage.AnalysisStorage(fname) network_b = storage_r.networks[0] assert_equal(len(network_b.sampling_transitions), 1) assert_equal(len(network_b.transitions), 6) if os.path.isfile(fname): os.remove(fname)
def setup_package(): # this should generate the trajectory.nc file which we'll use for # everything else mdtrajectory = md.load(data_filename("ala_small_traj.pdb")) snapshot = peng.snapshot_from_pdb(data_filename("ala_small_traj.pdb")) storage = Storage( filename=data_filename("ala_small_traj.nc"), template=snapshot, mode='w' ) mytraj = peng.trajectory_from_mdtraj(mdtrajectory, simple_topology=True) storage.trajectories.save(mytraj) storage.close()
def setup_package(): # this should generate the trajectory.nc file which we'll use for # everything else mdtrajectory = md.load(data_filename("ala_small_traj.pdb")) snapshot = paths.snapshot_from_pdb(data_filename("ala_small_traj.pdb")) # once we have a template configuration (coordinates to not really matter) # we can create a storage. We might move this logic out of the dynamics engine # and keep sotrage and engine generation completely separate! storage = Storage(filename=data_filename("ala_small_traj.nc"), template=snapshot, mode='w') mytraj = paths.tools.trajectory_from_mdtraj(mdtrajectory) storage.trajectories.save(mytraj) storage.close()
def setup(self): self.mdtraj = md.load(data_filename("ala_small_traj.pdb")) self.traj_topology = peng.trajectory_from_mdtraj(self.mdtraj) self.traj_simple = peng.trajectory_from_mdtraj(self.mdtraj, simple_topology=True) self.topology = self.traj_topology[0].engine.topology if os.path.isfile("myfile.nc"): os.remove("myfile.nc")
def test_storage(self): self.storage.tag['simulation'] = self.simulation self.storage.close() read_store = paths.Storage(self.filename, 'r') sim = read_store.tag['simulation'] new_filename = data_filename("test2.nc") sim.storage = paths.Storage(new_filename, 'w') sim.output_stream = open(os.devnull, 'w') sim.run(n_per_snapshot=2) if os.path.isfile(new_filename): os.remove(new_filename)
def setup(self): self.mdtraj = md.load(data_filename("ala_small_traj.pdb")) self.traj_topology = peng.trajectory_from_mdtraj(self.mdtraj) self.traj_simple = peng.trajectory_from_mdtraj( self.mdtraj, simple_topology=True) self.topology = self.traj_topology[0].engine.topology if os.path.isfile("myfile.nc"): os.remove("myfile.nc")
def setup_package(): # this should generate the trajectory.nc file which we'll use for # everything else mdtrajectory = md.load(data_filename("ala_small_traj.pdb")) snapshot = paths.snapshot_from_pdb(data_filename("ala_small_traj.pdb")) # once we have a template configuration (coordinates to not really matter) # we can create a storage. We might move this logic out of the dynamics engine # and keep sotrage and engine generation completely separate! storage = Storage( filename=data_filename("ala_small_traj.nc"), template=snapshot, mode='w' ) mytraj = paths.Trajectory.from_mdtraj(mdtrajectory) storage.trajectories.save(mytraj) storage.close()
def setUp(self): template = paths.tools.snapshot_from_pdb(data_filename("ala_small_traj.pdb")) topology = paths.tools.to_openmm_topology(template) # Generated using OpenMM Script Builder # http://builder.openmm.org forcefield = app.ForceField( 'amber96.xml', # solute FF 'tip3p.xml' # solvent FF ) # OpenMM System system = forcefield.createSystem( topology, nonbondedMethod=app.PME, nonbondedCutoff=1.0*unit.nanometers, constraints=app.HBonds, rigidWater=True, ewaldErrorTolerance=0.0005 ) # OpenMM Integrator integrator = mm.LangevinIntegrator( 300*unit.kelvin, 1.0/unit.picoseconds, 2.0*unit.femtoseconds ) integrator.setConstraintTolerance(0.00001) # Engine options options = { 'nsteps_per_frame': 10, 'platform': 'fastest', 'solute_indices' : range(22), 'n_frames_max' : 5, 'timestep': 2.0*unit.femtoseconds } self.engine = paths.OpenMMEngine( template, system, integrator, options ) self.engine.initialize() context = self.engine.simulation.context zero_array = np.zeros((self.engine.n_atoms, 3)) context.setPositions(self.engine.template.coordinates) context.setVelocities(u.Quantity(zero_array, u.nanometers / u.picoseconds))
def setUp(self): options = {'temperature' : 300.0 * u.kelvin, 'collision_rate' : 1.0 / u.picoseconds, 'timestep' : 2.0 * u.femtoseconds, 'nsteps_per_frame' : 10, 'n_frames_max' : 5, 'start_time' : time.time(), 'fn_initial_pdb' : data_filename("ala_small_traj.pdb"), 'platform' : 'fastest', 'solute_indices' : range(22), 'forcefield_solute' : 'amber96.xml', 'forcefield_solvent' : 'tip3p.xml' } self.engine = OpenMMEngine.auto( filename=data_filename("openmmengine_test.nc"), template=data_filename("ala_small_traj.pdb"), options=options, mode='create' ) context = self.engine.simulation.context zero_array = np.zeros((self.engine.n_atoms, 3)) context.setPositions(self.engine.template.coordinates) context.setVelocities(u.Quantity(zero_array, u.nanometers / u.picoseconds))
def setup(self): # taken from the testCommittorSimulation import openpathsampling.engines.toy as toys pes = toys.LinearSlope(m=[0.0], c=[0.0]) # flat line topology = toys.Topology(n_spatial=1, masses=[1.0], pes=pes) descriptor = peng.SnapshotDescriptor.construct( toys.Snapshot, { 'n_atoms': 1, 'n_spatial': 1 } ) engine = peng.NoEngine(descriptor) self.snap0 = toys.Snapshot(coordinates=np.array([[0.0]]), velocities=np.array([[1.0]]), engine=engine) self.snap1 = toys.Snapshot(coordinates=np.array([[0.1]]), velocities=np.array([[1.0]]), engine=engine) integrator = toys.LeapfrogVerletIntegrator(0.1) options = { 'integ': integrator, 'n_frames_max': 10000, 'n_steps_per_frame': 5 } self.engine = toys.Engine(options=options, topology=topology) cv = paths.FunctionCV("Id", lambda snap : snap.coordinates[0][0]) self.left = paths.CVDefinedVolume(cv, float("-inf"), -1.0) self.right = paths.CVDefinedVolume(cv, 1.0, float("inf")) randomizer = paths.NoModification() self.filename = data_filename("shooting_analysis.nc") self.storage = paths.Storage(self.filename, mode="w") self.simulation = paths.CommittorSimulation( storage=self.storage, engine=self.engine, states=[self.left, self.right], randomizer=randomizer, initial_snapshots=[self.snap0, self.snap1] ) self.simulation.output_stream = open(os.devnull, 'w') self.simulation.run(20) # set up the analysis object self.analyzer = ShootingPointAnalysis(self.storage.steps, [self.left, self.right])
def test_storage(self): import os filename = data_filename("transitions.nc") if os.path.isfile(filename): os.remove(filename) storage = paths.Storage(filename, "w") storage.snapshots.save(self.good_traj[0]) storage.save(self.transition) assert_equal(len(storage.transitions), 1) storage.sync_all() storage_r = paths.storage.AnalysisStorage(filename) reloaded = storage_r.transitions[0] assert_equal(self.transition.ensembles[0], reloaded.ensembles[0]) if os.path.isfile(filename): os.remove(filename)
def setUp(self): template = paths.tools.snapshot_from_pdb( data_filename("ala_small_traj.pdb")) topology = paths.tools.to_openmm_topology(template) # Generated using OpenMM Script Builder # http://builder.openmm.org forcefield = app.ForceField( 'amber96.xml', # solute FF 'tip3p.xml' # solvent FF ) # OpenMM System system = forcefield.createSystem(topology, nonbondedMethod=app.PME, nonbondedCutoff=1.0 * unit.nanometers, constraints=app.HBonds, rigidWater=True, ewaldErrorTolerance=0.0005) # OpenMM Integrator integrator = mm.LangevinIntegrator(300 * unit.kelvin, 1.0 / unit.picoseconds, 2.0 * unit.femtoseconds) integrator.setConstraintTolerance(0.00001) # Engine options options = { 'nsteps_per_frame': 10, 'platform': 'fastest', 'solute_indices': range(22), 'n_frames_max': 5, 'timestep': 2.0 * unit.femtoseconds } self.engine = paths.OpenMMEngine(template, system, integrator, options) self.engine.initialize() context = self.engine.simulation.context zero_array = np.zeros((self.engine.n_atoms, 3)) context.setPositions(self.engine.template.coordinates) context.setVelocities( u.Quantity(zero_array, u.nanometers / u.picoseconds))
def test_sim_with_storage(self): tmpfile = data_filename("direct_sim_test.nc") if os.path.isfile(tmpfile): os.remove(tmpfile) storage = paths.Storage(tmpfile, "w", self.snap0) sim = DirectSimulation(storage=storage, engine=self.engine, states=[self.center, self.outside], initial_snapshot=self.snap0) sim.run(200) storage.close() read_store = paths.AnalysisStorage(tmpfile) assert_equal(len(read_store.trajectories), 1) traj = read_store.trajectories[0] assert_equal(len(traj), 201) read_store.close() os.remove(tmpfile)
def test_storage(self): import os fname = data_filename("mistis_storage_test.nc") if os.path.isfile(fname): os.remove(fname) template = self.traj['AA'][0] storage_w = paths.Storage(fname, "w") storage_w.snapshots.save(template) storage_w.save(self.mistis) storage_w.sync_all() storage_r = paths.AnalysisStorage(fname) reloaded = storage_r.networks[0] assert_equal(reloaded.strict_sampling, False) assert_equal(reloaded.sampling_transitions[0].ensembles[0], self.mistis.sampling_transitions[0].ensembles[0]) if os.path.isfile(fname): os.remove(fname)
def setup(self): # taken from the testCommittorSimulation import openpathsampling.engines.toy as toys pes = toys.LinearSlope(m=[0.0], c=[0.0]) # flat line topology = toys.Topology(n_spatial=1, masses=[1.0], pes=pes) descriptor = peng.SnapshotDescriptor.construct(toys.Snapshot, { 'n_atoms': 1, 'n_spatial': 1 }) engine = peng.NoEngine(descriptor) self.snap0 = toys.Snapshot(coordinates=np.array([[0.0]]), velocities=np.array([[1.0]]), engine=engine) self.snap1 = toys.Snapshot(coordinates=np.array([[0.1]]), velocities=np.array([[1.0]]), engine=engine) integrator = toys.LeapfrogVerletIntegrator(0.1) options = { 'integ': integrator, 'n_frames_max': 10000, 'n_steps_per_frame': 5 } self.engine = toys.Engine(options=options, topology=topology) cv = paths.FunctionCV("Id", lambda snap: snap.coordinates[0][0]) self.left = paths.CVDefinedVolume(cv, float("-inf"), -1.0) self.right = paths.CVDefinedVolume(cv, 1.0, float("inf")) randomizer = paths.NoModification() self.filename = data_filename("shooting_analysis.nc") self.storage = paths.Storage(self.filename, mode="w") self.simulation = paths.CommittorSimulation( storage=self.storage, engine=self.engine, states=[self.left, self.right], randomizer=randomizer, initial_snapshots=[self.snap0, self.snap1]) self.simulation.output_stream = open(os.devnull, 'w') self.simulation.run(20) # set up the analysis object self.analyzer = ShootingPointAnalysis(self.storage.steps, [self.left, self.right])
def test_storage(self): import os fname = data_filename("interface_set_storage_test.nc") if os.path.isfile(fname): os.remove(fname) template = make_1d_traj([0.0])[0] storage_w = paths.Storage(fname, "w", template) storage_w.save(self.increasing_set) storage_w.sync_all() storage_r = paths.AnalysisStorage(fname) reloaded = storage_r.interfacesets[0] assert_items_equal(reloaded.lambdas, self.increasing_set.lambdas) for (truth, beauty) in zip(self.increasing_set, reloaded): assert_equal(truth, beauty) for (v, l) in zip(reloaded.volumes, reloaded.lambdas): assert_equal(reloaded.get_lambda(v), l) if os.path.isfile(fname): os.remove(fname)
def setup(self): # As a test system, let's use 1D motion on a flat potential. If the # velocity is positive, you right the state on the right. If it is # negative, you hit the state on the left. pes = toys.LinearSlope(m=[0.0], c=[0.0]) # flat line topology = toys.Topology(n_spatial=1, masses=[1.0], pes=pes) integrator = toys.LeapfrogVerletIntegrator(0.1) options = { 'integ': integrator, 'n_frames_max': 100000, 'n_steps_per_frame': 5 } self.engine = toys.Engine(options=options, topology=topology) self.snap0 = toys.Snapshot(coordinates=np.array([[0.0]]), velocities=np.array([[1.0]]), engine=self.engine) cv = paths.FunctionCV("Id", lambda snap: snap.coordinates[0][0]) self.left = paths.CVDefinedVolume(cv, float("-inf"), -1.0) self.right = paths.CVDefinedVolume(cv, 1.0, float("inf")) self.state_labels = { "Left": self.left, "Right": self.right, "None": ~(self.left | self.right) } randomizer = paths.NoModification() self.filename = data_filename("committor_test.nc") self.storage = paths.Storage(self.filename, mode="w") self.storage.save(self.snap0) self.simulation = CommittorSimulation(storage=self.storage, engine=self.engine, states=[self.left, self.right], randomizer=randomizer, initial_snapshots=self.snap0) self.simulation.output_stream = open(os.devnull, 'w')
def setup(self): # As a test system, let's use 1D motion on a flat potential. If the # velocity is positive, you right the state on the right. If it is # negative, you hit the state on the left. pes = toys.LinearSlope(m=[0.0], c=[0.0]) # flat line topology = toys.Topology(n_spatial=1, masses=[1.0], pes=pes) integrator = toys.LeapfrogVerletIntegrator(0.1) options = { 'integ': integrator, 'n_frames_max': 100000, 'n_steps_per_frame': 5 } self.engine = toys.Engine(options=options, topology=topology) self.snap0 = toys.Snapshot(coordinates=np.array([[0.0]]), velocities=np.array([[1.0]]), engine=self.engine) cv = paths.FunctionCV("Id", lambda snap : snap.coordinates[0][0]) self.left = paths.CVDefinedVolume(cv, float("-inf"), -1.0) self.right = paths.CVDefinedVolume(cv, 1.0, float("inf")) self.state_labels = {"Left" : self.left, "Right" : self.right, "None" : ~(self.left | self.right)} randomizer = paths.NoModification() self.filename = data_filename("committor_test.nc") self.storage = paths.Storage(self.filename, mode="w") self.storage.save(self.snap0) self.simulation = CommittorSimulation(storage=self.storage, engine=self.engine, states=[self.left, self.right], randomizer=randomizer, initial_snapshots=self.snap0) self.simulation.output_stream = open(os.devnull, 'w')
def test_storage_sync(self): import os # test all combinations of (1) with and without UUIDs, # (2) using partial yes; all of these must work allow_incomplete = True # print # print for use_uuid in [True, False]: # print '==========================================================' # print 'UUID', use_uuid # print '==========================================================' fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj_simple)) template = traj[0] storage_w = paths.Storage(fname, "w", use_uuid=use_uuid) storage_w.snapshots.save(template) cv1 = paths.CoordinateFunctionCV( 'f1', lambda snapshot: snapshot.coordinates[0] ).with_diskcache( allow_incomplete=allow_incomplete) # let's mess up the order in which we save and # include reversed ones as well assert (len(storage_w.snapshots) == 2) storage_w.trajectories.save(traj[6:]) assert (len(storage_w.snapshots) == 10) storage_w.snapshots.save(traj[1].reversed) assert (len(storage_w.snapshots) == 12) storage_w.save(cv1) store = storage_w.cvs.cache_store(cv1) assert (len(store.vars['value']) == 0) storage_w.snapshots.sync_cv(cv1) # nothing added to the cache so no changes assert (len(store.vars['value']) == 1) # fill the cache _ = cv1(traj) storage_w.snapshots.sync_cv(cv1) # should match the number of stored snapshots assert (len(store.vars['value']) == 6) # save the rest storage_w.trajectories.save(traj.reversed) assert (len(storage_w.snapshots) == 20) # should still be unchanged assert (len(store.vars['value']) == 6) # this should store the remaining CV values storage_w.snapshots.sync_cv(cv1) assert (len(store.vars['value']) == 10) # check if the values match for idx, value in zip( store.variables['index'][:], store.vars['value']): if use_uuid: snap = storage_w.snapshots[ storage_w.snapshots.vars['uuid'][idx]] else: # * 2 because we use a symmetric cv snap = storage_w.snapshots[int(idx) * 2] assert_close_unit(cv1(snap), value) storage_w.close() if os.path.isfile(fname): os.remove(fname)
def test_sanity(self): assert_equal(os.path.isfile(data_filename("openmmengine_test.nc")), True)
def test_store_snapshots(self): fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj)) template = traj[0] for use_uuid, use_cache in [(True, True), (False, True), (True, False), (False, False)]: # print '==========================================================' # print 'UUID', use_uuid, 'CACHE', use_cache # print '==========================================================' storage_w = paths.Storage(fname, "w", use_uuid=use_uuid) storage_w.snapshots.save(template) # let's mess up the order in which we save and include # reversed ones as well assert (len(storage_w.snapshots) == 2) assert (len(storage_w.trajectories) == 0) assert (len(storage_w.stores['snapshot0']) == 2) storage_w.snapshots.save(traj[8].reversed) assert (len(storage_w.snapshots) == 4) assert (len(storage_w.trajectories) == 0) assert (len(storage_w.stores['snapshot0']) == 4) # this will store traj[6:] under pos IDX #0 storage_w.trajectories.save(traj[6:]) assert (len(storage_w.snapshots) == 10) assert (len(storage_w.trajectories) == 1) assert (len(storage_w.stores['snapshot0']) == 10) traj_rev = traj.reversed # this will store traj_rev under pos IDX #1 storage_w.trajectories.mention(traj_rev) assert (len(storage_w.snapshots) == 20) assert (len(storage_w.trajectories) == 2) assert (len(storage_w.stores['snapshot0']) == 10) # this will not do anything since traj is already saved storage_w.trajectories.save(traj_rev) assert (len(storage_w.snapshots) == 20) assert (len(storage_w.trajectories) == 2) assert (len(storage_w.stores['snapshot0']) == 10) # this will store traj under pos IDX #2 storage_w.trajectories.save(traj) assert (len(storage_w.snapshots) == 20) assert (len(storage_w.trajectories) == 3) assert (len(storage_w.stores['snapshot0']) == 20) # this will not store since traj is already stored storage_w.trajectories.save(traj) assert (len(storage_w.snapshots) == 20) assert (len(storage_w.trajectories) == 3) assert (len(storage_w.stores['snapshot0']) == 20) # we saved in this order [0f, 8r, 6f, 7f, 9f, 5r, 4r, 3r, 2r, 1r ] # these are indices [ 0, 17, 12, 14, 18, 3, 5, 7, 9, 11 ] storage_w.close() if use_cache: storage_r = paths.AnalysisStorage(fname) else: storage_r = paths.Storage(fname, 'r') storage_r.snapshots.set_caching(False) storage_r.stores['snapshot0'].set_caching(False) # check if the loaded trajectory is reproduced for s1, s2 in zip(traj, storage_r.trajectories[2]): compare_snapshot(s1, s2, True) # this is the expected order in which it is saved eff_traj = [ traj[0], traj[8].reversed, traj[6], traj[7], traj[9], traj[5].reversed, traj[4].reversed, traj[3].reversed, traj[2].reversed, traj[1].reversed, ] # load from hidden and see, if the hidden store looks as expected # we open every second snapshot from the hidden store because the # ones in between correspond to the reversed ones hidden_snapshots = storage_r.stores['snapshot0'][:] for idx in range(10): s1 = eff_traj[idx] s1r = s1.reversed s2 = hidden_snapshots[2 * idx] s2r = hidden_snapshots[2 * idx + 1] compare_snapshot(s1, s2, True) compare_snapshot(s1r, s2r, True) storage_r.close()
def test_create_template(self): store = Storage(filename=self.filename, template=self.template_snapshot, mode='w') assert(os.path.isfile(data_filename("storage_test.nc"))) store.close()
def teardown_package(): if os.path.isfile(data_filename("ala_small_traj.nc")): os.remove(data_filename("ala_small_traj.nc"))
def test_storage_cv_function(self): import os # test all combinations of (1) with and without UUIDs, # (2) using partial yes, no all of these must work for allow_incomplete in (True, False): # print '==========================================================' # print 'PARTIAL', allow_incomplete # print '==========================================================' fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj_simple)) template = traj[0] storage_w = paths.Storage(fname, "w") storage_w.snapshots.save(template) cv1 = paths.CoordinateFunctionCV( 'f1', lambda x: x.coordinates[0]).with_diskcache( allow_incomplete=allow_incomplete) storage_w.save(cv1) # let's mess up the order in which we save and # include reversed ones as well assert (len(storage_w.snapshots) == 2) storage_w.trajectories.save(traj[3:]) assert (len(storage_w.snapshots) == 16) storage_w.snapshots.save(traj[1].reversed) assert (len(storage_w.snapshots) == 18) storage_w.trajectories.save(traj.reversed) assert (len(storage_w.snapshots) == 20) # this should be ignored for all is saved already storage_w.trajectories.save(traj) storage_w.close() storage_r = paths.AnalysisStorage(fname) rcv1 = storage_r.cvs['f1'] assert (rcv1._store_dict) cv_cache = rcv1._store_dict.value_store assert (cv_cache.allow_incomplete == allow_incomplete) for idx, snap in enumerate(storage_r.trajectories[1]): # print idx, snap # if hasattr(snap, '_idx'): # print 'Proxy IDX', snap._idx # print 'ITEMS', storage_r.snapshots.index.items() # print snap, type(snap), snap.__dict__ # print snap.__uuid__ # print snap.reversed.__uuid__ # print snap.create_reversed().__uuid__ # # print 'POS', cv_cache.snapshot_pos(snap), # print 'POS', storage_r.snapshots.pos(snap), # print 'POS', storage_r.snapshots.index[snap] # # print 'POS', cv_cache.snapshot_pos(snap.reversed), # print 'POS', storage_r.snapshots.pos(snap.reversed), # print 'POS', storage_r.snapshots.index[snap.reversed] # if len(cv_cache.cache._chunkdict) > 0: # # if allow_incomplete: # print cv_cache.index # print cv_cache.vars['value'][:] # # for n, v in enumerate(cv_cache.cache._chunkdict[0]): # print n, v # # print cv1(snap) # print cv1(snap.reversed) # print cv_cache[snap] # # print cv_cache[snap.reversed] if not allow_incomplete or cv_cache[snap] is not None: assert_close_unit(cv_cache[snap], cv1(snap)) assert_close_unit(cv_cache[snap.reversed], cv1(snap.reversed)) storage_r.close() if os.path.isfile(fname): os.remove(fname)
def test_storage_sync(self): import os # test all combinations of (1) with and without UUIDs, # (2) using partial yes; all of these must work allow_incomplete = True # print # print for use_uuid in [True]: # print '==========================================================' # print 'UUID', use_uuid # print '==========================================================' fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj_simple)) template = traj[0] storage_w = paths.Storage(fname, "w") storage_w.snapshots.save(template) cv1 = paths.CoordinateFunctionCV( 'f1', lambda snapshot: snapshot.coordinates[0]).with_diskcache( allow_incomplete=allow_incomplete) # let's mess up the order in which we save and # include reversed ones as well assert (len(storage_w.snapshots) == 2) storage_w.trajectories.save(traj[6:]) assert (len(storage_w.snapshots) == 10) storage_w.snapshots.save(traj[1].reversed) assert (len(storage_w.snapshots) == 12) storage_w.save(cv1) store = storage_w.cvs.cache_store(cv1) assert (len(store.vars['value']) == 0) storage_w.snapshots.sync_cv(cv1) # nothing added to the cache so no changes assert (len(store.vars['value']) == 1) # fill the cache _ = cv1(traj) storage_w.snapshots.sync_cv(cv1) # should match the number of stored snapshots assert (len(store.vars['value']) == 6) # save the rest storage_w.trajectories.save(traj.reversed) assert (len(storage_w.snapshots) == 20) # should still be unchanged assert (len(store.vars['value']) == 6) # this should store the remaining CV values storage_w.snapshots.sync_cv(cv1) assert (len(store.vars['value']) == 10) # check if the values match for idx, value in zip(store.variables['index'][:], store.vars['value']): snap = storage_w.snapshots[storage_w.snapshots.vars['uuid'] [idx]] assert_close_unit(cv1(snap), value) storage_w.close() if os.path.isfile(fname): os.remove(fname)
def setUp(self): self.md_trajectory = md.load(data_filename("ala_small_traj.pdb")) self.ops_trajectory = trajectory_from_mdtraj(self.md_trajectory) self.md_topology = self.ops_trajectory.topology.mdtraj
def test_storage_sync_and_complete(self): import os # test all combinations of (1) with and without UUIDs, # (2) using partial yes, no all of these must work allow_incomplete = True # print # print for use_uuid in [True]: # print '==========================================================' # print 'UUID', use_uuid # print '==========================================================' fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj_simple)) template = traj[0] storage_w = paths.Storage(fname, "w") storage_w.snapshots.save(template) cv1 = paths.CoordinateFunctionCV( 'f1', lambda snapshot: snapshot.coordinates[0]).with_diskcache( allow_incomplete=allow_incomplete) # let's mess up the order in which we save and include # reversed ones as well assert (len(storage_w.snapshots) == 2) storage_w.trajectories.save(traj[3:]) assert (len(storage_w.snapshots) == 16) storage_w.snapshots.save(traj[1].reversed) assert (len(storage_w.snapshots) == 18) storage_w.trajectories.save(traj.reversed) assert (len(storage_w.snapshots) == 20) storage_w.save(cv1) store = storage_w.cvs.cache_store(cv1) assert (len(store.vars['value']) == 0) storage_w.snapshots.complete_cv(cv1) assert (len(store.vars['value']) == 10) # check if stored values match computed ones for idx, value in zip(store.variables['index'][:], store.vars['value']): snap = storage_w.snapshots[storage_w.snapshots.vars['uuid'] [idx]] assert_close_unit(cv1(snap), value) storage_w.close() if os.path.isfile(fname): os.remove(fname)
def test_ops_load_trajectory_pdb(self): pdb_file = data_filename("ala_small_traj.pdb") ops_trajectory = ops_load_trajectory(pdb_file)
def test_storage_cv_function(self): import os # test all combinations of (1) with and without UUIDs, # (2) using partial yes, no all of these must work for use_uuid, allow_incomplete in [(True, True), (False, True), (True, False), (False, False)]: # print '==========================================================' # print 'UUID', use_uuid, 'PARTIAL', allow_incomplete # print '==========================================================' fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj_simple)) template = traj[0] storage_w = paths.Storage(fname, "w", use_uuid=use_uuid) storage_w.snapshots.save(template) cv1 = paths.CoordinateFunctionCV( 'f1', lambda x: x.coordinates[0] ).with_diskcache( allow_incomplete=allow_incomplete ) storage_w.save(cv1) # let's mess up the order in which we save and # include reversed ones as well assert (len(storage_w.snapshots) == 2) storage_w.trajectories.save(traj[3:]) assert (len(storage_w.snapshots) == 16) storage_w.snapshots.save(traj[1].reversed) assert (len(storage_w.snapshots) == 18) storage_w.trajectories.save(traj.reversed) assert (len(storage_w.snapshots) == 20) # this should be ignored for all is saved already storage_w.trajectories.save(traj) storage_w.close() storage_r = paths.AnalysisStorage(fname) rcv1 = storage_r.cvs['f1'] assert(rcv1._store_dict) cv_cache = rcv1._store_dict.value_store assert (cv_cache.allow_incomplete == allow_incomplete) for idx, snap in enumerate(storage_r.trajectories[1]): # if hasattr(snap, '_idx'): # print 'IDX', snap._idx # print 'ITEMS', storage_r.snapshots.index.items() # print snap, type(snap), snap.__dict__ # print snap.__uuid__ # print snap.reversed.__uuid__ # print snap.create_reversed().__uuid__ # # print 'POS', cv_cache.snapshot_pos(snap), # print 'POS', storage_r.snapshots.pos(snap), # print 'POS', storage_r.snapshots.index[snap] # # print 'POS', cv_cache.snapshot_pos(snap.reversed), # print 'POS', storage_r.snapshots.pos(snap.reversed), # print 'POS', storage_r.snapshots.index[snap.reversed] # if len(cv_cache.cache._chunkdict) > 0: # # if allow_incomplete: # print cv_cache.index # print cv_cache.vars['value'][:] # # for n, v in enumerate(cv_cache.cache._chunkdict[0]): # print n, v # # print cv1(snap) # print cv1(snap.reversed) # print cv_cache[snap] # # print cv_cache[snap.reversed] if not allow_incomplete or cv_cache[snap] is not None: assert_close_unit(cv_cache[snap], cv1(snap)) assert_close_unit( cv_cache[snap.reversed], cv1(snap.reversed)) storage_r.close() if os.path.isfile(fname): os.remove(fname)
def teardown(self): if os.path.isfile(data_filename("openmmengine_test.nc")): os.remove(data_filename("openmmengine_test.nc"))
def setUp(self): self.mdtraj = md.load(data_filename("ala_small_traj.pdb")) self.traj = paths.tools.trajectory_from_mdtraj(self.mdtraj)
def test_store_snapshots(self): fname = data_filename("cv_storage_test.nc") if os.path.isfile(fname): os.remove(fname) traj = paths.Trajectory(list(self.traj)) template = traj[0] for use_uuid, use_cache in [ (True, True), (False, True), (True, False), (False, False)]: # print '==========================================================' # print 'UUID', use_uuid, 'CACHE', use_cache # print '==========================================================' storage_w = paths.Storage(fname, "w", use_uuid=use_uuid) storage_w.snapshots.save(template) # let's mess up the order in which we save and include # reversed ones as well assert(len(storage_w.snapshots) == 2) assert(len(storage_w.trajectories) == 0) assert(len(storage_w.stores['snapshot0']) == 2) storage_w.snapshots.save(traj[8].reversed) assert(len(storage_w.snapshots) == 4) assert(len(storage_w.trajectories) == 0) assert(len(storage_w.stores['snapshot0']) == 4) # this will store traj[6:] under pos IDX #0 storage_w.trajectories.save(traj[6:]) assert(len(storage_w.snapshots) == 10) assert(len(storage_w.trajectories) == 1) assert(len(storage_w.stores['snapshot0']) == 10) traj_rev = traj.reversed # this will store traj_rev under pos IDX #1 storage_w.trajectories.mention(traj_rev) assert(len(storage_w.snapshots) == 20) assert(len(storage_w.trajectories) == 2) assert(len(storage_w.stores['snapshot0']) == 10) # this will not do anything since traj is already saved storage_w.trajectories.save(traj_rev) assert(len(storage_w.snapshots) == 20) assert(len(storage_w.trajectories) == 2) assert(len(storage_w.stores['snapshot0']) == 10) # this will store traj under pos IDX #2 storage_w.trajectories.save(traj) assert(len(storage_w.snapshots) == 20) assert(len(storage_w.trajectories) == 3) assert(len(storage_w.stores['snapshot0']) == 20) # this will not store since traj is already stored storage_w.trajectories.save(traj) assert(len(storage_w.snapshots) == 20) assert(len(storage_w.trajectories) == 3) assert(len(storage_w.stores['snapshot0']) == 20) # we saved in this order [0f, 8r, 6f, 7f, 9f, 5r, 4r, 3r, 2r, 1r ] # these are indices [ 0, 17, 12, 14, 18, 3, 5, 7, 9, 11 ] storage_w.close() if use_cache: storage_r = paths.AnalysisStorage(fname) else: storage_r = paths.Storage(fname, 'r') storage_r.snapshots.set_caching(False) storage_r.stores['snapshot0'].set_caching(False) # check if the loaded trajectory is reproduced for s1, s2 in zip(traj, storage_r.trajectories[2]): compare_snapshot(s1, s2, True) # this is the expected order in which it is saved eff_traj = [ traj[0], traj[8].reversed, traj[6], traj[7], traj[9], traj[5].reversed, traj[4].reversed, traj[3].reversed, traj[2].reversed, traj[1].reversed, ] # load from hidden and see, if the hidden store looks as expected # we open every second snapshot from the hidden store because the # ones in between correspond to the reversed ones hidden_snapshots = storage_r.stores['snapshot0'][:] for idx in range(10): s1 = eff_traj[idx] s1r = s1.reversed s2 = hidden_snapshots[2 * idx] s2r = hidden_snapshots[2 * idx + 1] compare_snapshot(s1, s2, True) compare_snapshot(s1r, s2r, True) storage_r.close()