示例#1
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_coordinates_reshape():
    coordinates = np.random.randn(10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates)

    with HDF5TrajectoryFile(temp) as f:
        yield lambda: eq(f.root.coordinates[:], coordinates.reshape(1, 10, 3))
        yield lambda: eq(str(f.root.coordinates.attrs['units']), 'nanometers')
示例#2
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_topology():
    top = md.load_pdb(get_fn('native.pdb')).topology

    with HDF5TrajectoryFile(temp, 'w') as f:
        f.topology = top

    with HDF5TrajectoryFile(temp) as f:
        assert f.topology == top
示例#3
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_multiple():
    coordinates = np.random.randn(4, 10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates)
        f.write(coordinates)

    with HDF5TrajectoryFile(temp) as f:
        yield lambda: eq(f.root.coordinates[:],
                         np.vstack((coordinates, coordinates)))
示例#4
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_read_slice_3():
    coordinates = np.random.randn(4, 10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates, alchemicalLambda=np.arange(4))

    with HDF5TrajectoryFile(temp) as f:
        got = f.read(stride=2, atom_indices=np.array([0, 1]))
        yield lambda: eq(got.coordinates, coordinates[::2, [0, 1], :])
        yield lambda: eq(got.alchemicalLambda, np.arange(4)[::2])
示例#5
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_read_slice_0():
    coordinates = np.random.randn(4, 10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates, alchemicalLambda=np.array([1, 2, 3, 4]))

    with HDF5TrajectoryFile(temp) as f:
        got = f.read(n_frames=2)
        yield lambda: eq(got.coordinates, coordinates[:2])
        yield lambda: eq(got.velocities, None)
        yield lambda: eq(got.alchemicalLambda, np.array([1, 2]))
示例#6
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_constraints():
    c = np.array([(1, 2, 3.5)],
                 dtype=np.dtype([('atom1', np.int32), ('atom2', np.int32),
                                 ('distance', np.float32)]))

    with HDF5TrajectoryFile(temp, 'w') as f:
        f.constraints = c

    with HDF5TrajectoryFile(temp) as f:
        yield lambda: eq(f.constraints, c)
示例#7
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_append():
    x1 = np.random.randn(10, 5, 3)
    x2 = np.random.randn(8, 5, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(x1)
    with HDF5TrajectoryFile(temp, 'a') as f:
        f.write(x2)

    with HDF5TrajectoryFile(temp) as f:
        eq(f.root.coordinates[:], np.concatenate((x1, x2)))
示例#8
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_read_slice_1():
    coordinates = np.random.randn(4, 10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates)

    with HDF5TrajectoryFile(temp) as f:
        got = f.read(n_frames=2)
        yield lambda: eq(got.coordinates, coordinates[:2])
        yield lambda: eq(got.velocities, None)

        got = f.read(n_frames=2)
        yield lambda: eq(got.coordinates, coordinates[2:])
        yield lambda: eq(got.velocities, None)
示例#9
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_read_1():
    coordinates = units.Quantity(np.random.randn(4, 10, 3), units.angstroms)
    velocities = units.Quantity(np.random.randn(4, 10, 3),
                                units.angstroms / units.years)

    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates, velocities=velocities)

    with HDF5TrajectoryFile(temp) as f:
        got = f.read()
        yield lambda: eq(got.coordinates,
                         coordinates.value_in_unit(units.nanometers))
        yield lambda: eq(
            got.velocities,
            velocities.value_in_unit(units.nanometers / units.picoseconds))
示例#10
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_inconsistent_2():
    coordinates = np.random.randn(4, 10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates, velocities=coordinates)
        # we're saving a deficient set of data, since before we wrote
        # more information.
        assert_raises(ValueError, lambda: f.write(coordinates))
示例#11
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_inconsistent():
    coordinates = np.random.randn(4, 10, 3)
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates)
        # since the first frames we saved didn't contain velocities, we
        # can't save more velocities
        assert_raises(ValueError,
                      lambda: f.write(coordinates, velocities=coordinates))
示例#12
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_units_mismatch():
    velocoties = units.Quantity(np.random.randn(4, 10, 3),
                                units.angstroms / units.picosecond)

    with HDF5TrajectoryFile(temp, 'w') as f:
        # if you try to write coordinates that are unitted and not
        # in the correct units, we find that
        assert_raises(TypeError, lambda: f.write(coordinates=velocoties))
示例#13
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_attributes():
    constraints = np.zeros(10,
                           dtype=[('atom1', np.int32), ('atom2', np.int32),
                                  ('distance', np.float32)])
    with HDF5TrajectoryFile(temp, 'w') as f:
        f.title = 'mytitle'
        f.reference = 'myreference'
        f.forcefield = 'amber99'
        f.randomState = 'sdf'
        f.application = 'openmm'
        f.constraints = constraints

    with HDF5TrajectoryFile(temp) as g:
        eq(g.title, 'mytitle')
        eq(g.reference, 'myreference')
        eq(g.forcefield, 'amber99')
        eq(g.randomState, 'sdf')
        eq(g.application, 'openmm')
        eq(g.constraints, constraints)
示例#14
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_units():
    "simtk.units are automatically converted into MD units for storage on disk"
    coordinates = units.Quantity(np.random.randn(4, 10, 3), units.angstroms)
    velocities = units.Quantity(np.random.randn(4, 10, 3),
                                units.angstroms / units.year)

    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates, velocities=velocities)

    with HDF5TrajectoryFile(temp) as f:
        yield lambda: eq(f.root.coordinates[:],
                         coordinates.value_in_unit(units.nanometers))
        yield lambda: eq(str(f.root.coordinates.attrs['units']), 'nanometers')

        yield lambda: eq(
            f.root.velocities[:],
            velocities.value_in_unit(units.nanometers / units.picosecond))
        yield lambda: eq(str(f.root.velocities.attrs['units']),
                         'nanometers/picosecond')
示例#15
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_write_units2():
    from mdtraj.utils import unit as units
    coordinates = units.quantity.Quantity(np.random.randn(4, 10, 3),
                                          units.unit_definitions.angstroms)
    velocities = units.quantity.Quantity(
        np.random.randn(4, 10, 3),
        units.unit_definitions.angstroms / units.unit_definitions.year)

    with HDF5TrajectoryFile(temp, 'w') as f:
        f.write(coordinates, velocities=velocities)

    with HDF5TrajectoryFile(temp) as f:
        yield lambda: eq(
            f.root.coordinates[:],
            coordinates.value_in_unit(units.unit_definitions.nanometers))
        yield lambda: eq(str(f.root.coordinates.attrs['units']), 'nanometers')

        yield lambda: eq(
            f.root.velocities[:],
            velocities.value_in_unit(units.unit_definitions.nanometers / units.
                                     unit_definitions.picosecond))
        yield lambda: eq(str(f.root.velocities.attrs['units']),
                         'nanometers/picosecond')
示例#16
0
def test_reporter():
    tempdir = os.path.join(dir, 'test1')
    os.makedirs(tempdir)

    pdb = PDBFile(get_fn('native.pdb'))
    forcefield = ForceField('amber99sbildn.xml', 'amber99_obc.xml')
    # NO PERIODIC BOUNARY CONDITIONS
    system = forcefield.createSystem(pdb.topology, nonbondedMethod=CutoffNonPeriodic,
        nonbondedCutoff=1.0*nanometers, constraints=HBonds, rigidWater=True)
    integrator = LangevinIntegrator(300*kelvin, 1.0/picoseconds, 2.0*femtoseconds)
    integrator.setConstraintTolerance(0.00001)

    platform = Platform.getPlatformByName('Reference')
    simulation = Simulation(pdb.topology, system, integrator, platform)
    simulation.context.setPositions(pdb.positions)

    simulation.context.setVelocitiesToTemperature(300*kelvin)

    hdf5file = os.path.join(tempdir, 'traj.h5')
    ncfile = os.path.join(tempdir, 'traj.nc')
    dcdfile = os.path.join(tempdir, 'traj.dcd')

    reporter = HDF5Reporter(hdf5file, 2, coordinates=True, time=True,
        cell=True, potentialEnergy=True, kineticEnergy=True, temperature=True,
        velocities=True)
    reporter2 = NetCDFReporter(ncfile, 2, coordinates=True, time=True, cell=True)
    reporter3 = DCDReporter(dcdfile, 2)

    simulation.reporters.append(reporter)
    simulation.reporters.append(reporter2)
    simulation.reporters.append(reporter3)
    simulation.step(100)

    reporter.close()
    reporter2.close()

    with HDF5TrajectoryFile(hdf5file) as f:
        got = f.read()
        yield lambda: eq(got.temperature.shape, (50,))
        yield lambda: eq(got.potentialEnergy.shape, (50,))
        yield lambda: eq(got.kineticEnergy.shape, (50,))
        yield lambda: eq(got.coordinates.shape, (50, 22, 3))
        yield lambda: eq(got.velocities.shape, (50, 22, 3))
        yield lambda: eq(got.cell_lengths, None)
        yield lambda: eq(got.cell_angles, None)
        yield lambda: eq(got.time, 0.002*2*(1+np.arange(50)))
        yield lambda: f.topology == md.load(get_fn('native.pdb')).top

    with NetCDFTrajectoryFile(ncfile) as f:
        xyz, time, cell_lengths, cell_angles = f.read()
        yield lambda: eq(cell_lengths, None)
        yield lambda: eq(cell_angles, None)
        yield lambda: eq(time, 0.002*2*(1+np.arange(50)))

    hdf5_traj = md.load(hdf5file)
    dcd_traj = md.load(dcdfile, top=get_fn('native.pdb'))
    netcdf_traj = md.load(ncfile, top=get_fn('native.pdb'))

    # we don't have to convert units here, because md.load already
    # handles that
    assert hdf5_traj.unitcell_vectors is None
    yield lambda: eq(hdf5_traj.xyz, netcdf_traj.xyz)
    yield lambda: eq(hdf5_traj.unitcell_vectors, netcdf_traj.unitcell_vectors)
    yield lambda: eq(hdf5_traj.time, netcdf_traj.time)

    yield lambda: eq(dcd_traj.xyz, hdf5_traj.xyz)
示例#17
0
def test_reporter():
    # stolen/modified from MDTraj/tests/test_reporter.py ... thanks rmcgibbo

    tempdir = os.path.join(testdir, 'test_reporter')
    os.makedirs(tempdir)

    pdb = PDBFile(ref_file('ala2.pdb'))
    forcefield = ForceField('amber99sbildn.xml', 'amber99_obc.xml')
    system = forcefield.createSystem(pdb.topology,
                                     nonbondedMethod=CutoffNonPeriodic,
                                     nonbondedCutoff=1.0 * nanometers,
                                     constraints=HBonds,
                                     rigidWater=True)
    integrator = LangevinIntegrator(300 * kelvin, 1.0 / picoseconds,
                                    2.0 * femtoseconds)
    integrator.setConstraintTolerance(0.00001)

    platform = Platform.getPlatformByName('Reference')
    simulation = Simulation(pdb.topology, system, integrator, platform)
    simulation.context.setPositions(pdb.positions)

    simulation.context.setVelocitiesToTemperature(300 * kelvin)

    reffile = os.path.join(tempdir, 'traj.h5')
    testfile = os.path.join(tempdir, 'traj-test.h5')

    ref_reporter = HDF5Reporter(reffile,
                                2,
                                coordinates=True,
                                time=True,
                                cell=True,
                                potentialEnergy=True,
                                kineticEnergy=True,
                                temperature=True,
                                velocities=True)
    test_reporter = sample.MCReporter(testfile,
                                      2,
                                      coordinates=True,
                                      time=True,
                                      cell=True,
                                      potentialEnergy=True,
                                      kineticEnergy=True,
                                      temperature=True,
                                      velocities=True)

    simulation.reporters.append(ref_reporter)
    simulation.reporters.append(test_reporter)
    simulation.step(100)

    ref_reporter.close()
    test_reporter.close()

    with HDF5TrajectoryFile(testfile) as f:
        got = f.read()
        yield lambda: eq(got.temperature.shape, (50, ))
        yield lambda: eq(got.potentialEnergy.shape, (50, ))
        yield lambda: eq(got.kineticEnergy.shape, (50, ))
        yield lambda: eq(got.coordinates.shape, (50, 22, 3))
        yield lambda: eq(got.velocities.shape, (50, 22, 3))
        yield lambda: eq(got.time, 0.002 * 2 * (1 + np.arange(50)))
        yield lambda: f.topology == mdtraj.load(ref_file('ala2.pdb')).top

    ref_traj = mdtraj.load(reffile)
    test_traj = mdtraj.load(testfile)

    yield lambda: eq(ref_traj.xyz, test_traj.xyz)
    yield lambda: eq(ref_traj.unitcell_vectors, test_traj.unitcell_vectors)
    yield lambda: eq(ref_traj.time, test_traj.time)

    yield lambda: eq(ref_traj.xyz, test_traj.xyz)
    yield lambda: eq(ref_traj.unitcell_vectors, test_traj.unitcell_vectors)
示例#18
0
 def test(self):
     "Test the read speed of the hdf5 code (10000 frames, 100 atoms)"
     with HDF5TrajectoryFile(self.fn) as f:
         f.read()
示例#19
0
 def setUp(self):
     super(TestHDF5Read, self).setUp()
     with HDF5TrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
         f.write(self.xyz)
示例#20
0
 def test(self):
     "Test the write speed of the hdf5 code (10000 frames, 100 atoms)"
     with HDF5TrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
         f.write(self.xyz)
示例#21
0
文件: test_hdf5.py 项目: gkiss/mdtraj
def test_do_overwrite():
    with open(temp, 'w') as f:
        f.write('a')

    with HDF5TrajectoryFile(temp, 'w', force_overwrite=False) as f:
        f.write(np.random.randn(10, 5, 3))