def _overlayPoints(points1, points2): """Given two sets of points, determine the translation and rotation that matches them as closely as possible. This is based on W. Kabsch, Acta Cryst., A34, pp. 828-829 (1978).""" if len(points1) == 0: return (Vec3(0, 0, 0), np.identity(3), Vec3(0, 0, 0)) if len(points1) == 1: return (points1[0], np.identity(3), -1*points2[0]) # Compute centroids. center1 = unit.sum(points1)/float(len(points1)) center2 = unit.sum(points2)/float(len(points2)) # Compute R matrix. R = np.zeros((3, 3)) for p1, p2 in zip(points1, points2): x = p1-center1 y = p2-center2 for i in range(3): for j in range(3): R[i][j] += y[i]*x[j] # Use an SVD to compute the rotation matrix. (u, s, v) = lin.svd(R) return (-1*center2, np.dot(u, v).transpose(), center1)
def end_to_end_CA_distance(self, topology, positions): residues = list(topology.residues()) # get the index of the first and last alpha carbons i1 = [a.index for a in residues[0].atoms() if a.name == 'CA'][0] i2 = [a.index for a in residues[-1].atoms() if a.name == 'CA'][0] # get the current distanc be between the two alpha carbons return i1, i2, sqrt(sum((positions[i1] - positions[i2])**2))
def _overlayPoints(points1, points2): """Given two sets of points, determine the translation and rotation that matches them as closely as possible. Parameters ---------- points1 (numpy array of simtk.unit.Quantity with units compatible with distance) - reference set of coordinates points2 (numpy array of simtk.unit.Quantity with units compatible with distance) - set of coordinates to be rotated Returns ------- translate2 - vector to translate points2 by in order to center it rotate - rotation matrix to apply to centered points2 to map it on to points1 center1 - center of points1 Notes ----- This is based on W. Kabsch, Acta Cryst., A34, pp. 828-829 (1978). """ if len(points1) == 0: return (mm.Vec3(0, 0, 0), np.identity(3), mm.Vec3(0, 0, 0)) if len(points1) == 1: return (points1[0], np.identity(3), -1*points2[0]) # Compute centroids. center1 = unit.sum(points1)/float(len(points1)) center2 = unit.sum(points2)/float(len(points2)) # Compute R matrix. R = np.zeros((3, 3)) for p1, p2 in zip(points1, points2): x = p1-center1 y = p2-center2 for i in range(3): for j in range(3): R[i][j] += y[i]*x[j] # Use an SVD to compute the rotation matrix. (u, s, v) = lin.svd(R) return (-1*center2, np.dot(u, v).transpose(), center1)
def __init__(self, structure): """Create a new PDBFixer to fix problems in a PDB file. Parameters: - structure (PdbStructure) the starting PDB structure containing problems to be fixed """ self.structure = structure self.pdb = app.PDBFile(structure) self.topology = self.pdb.topology self.positions = self.pdb.positions self.centroid = unit.sum(self.positions)/len(self.positions) self.structureChains = list(self.structure.iter_chains()) # Load the templates. self.templates = {} templatesPath = os.path.join(os.path.dirname(__file__), 'templates') for file in os.listdir(templatesPath): templatePdb = app.PDBFile(os.path.join(templatesPath, file)) name = next(templatePdb.topology.residues()).name self.templates[name] = templatePdb
def testUnitMathModule(self): """ Tests the unit_math functions on Quantity objects """ self.assertEqual(u.sqrt(1.0*u.kilogram*u.joule), 1.0*u.kilogram*u.meter/u.second) self.assertEqual(u.sqrt(1.0*u.kilogram*u.calorie), math.sqrt(4.184)*u.kilogram*u.meter/u.second) self.assertEqual(u.sqrt(9), 3) # Test on a scalar self.assertEqual(u.sin(90*u.degrees), 1) self.assertEqual(u.sin(math.pi/2*u.radians), 1) self.assertEqual(u.sin(math.pi/2), 1) self.assertEqual(u.cos(180*u.degrees), -1) self.assertEqual(u.cos(math.pi*u.radians), -1) self.assertEqual(u.cos(math.pi), -1) self.assertAlmostEqual(u.tan(45*u.degrees), 1) self.assertAlmostEqual(u.tan(math.pi/4*u.radians), 1) self.assertAlmostEqual(u.tan(math.pi/4), 1) acos = u.acos(1.0) asin = u.asin(1.0) atan = u.atan(1.0) self.assertTrue(u.is_quantity(acos)) self.assertTrue(u.is_quantity(asin)) self.assertTrue(u.is_quantity(atan)) self.assertEqual(acos.unit, u.radians) self.assertEqual(asin.unit, u.radians) self.assertEqual(atan.unit, u.radians) self.assertEqual(acos.value_in_unit(u.degrees), 0) self.assertEqual(acos / u.radians, 0) self.assertEqual(asin.value_in_unit(u.degrees), 90) self.assertEqual(asin / u.radians, math.pi/2) self.assertAlmostEqual(atan.value_in_unit(u.degrees), 45) self.assertAlmostEqual(atan / u.radians, math.pi/4) # Check some sequence maths seq = [1, 2, 3, 4] * u.meters self.assertEqual(u.sum(seq), 10*u.meters) self.assertEqual(u.dot(seq, seq), (1+4+9+16)*u.meters**2) self.assertEqual(u.norm(seq), math.sqrt(30)*u.meters)
def _computeResidueCenter(self, residue): """Compute the centroid of a residue.""" return unit.sum([self.pdb.positions[atom.index] for atom in residue.atoms()])/len(list(residue.atoms()))
def test_velocity_assignment(mpicomm=None, verbose=True): """ Test Maxwell-Boltzmann velocity assignment subtroutine produces correct distribution, raising an exception if this test fails. """ # Stop here if not root node. if mpicomm and (mpicomm.rank != 0): return if verbose: print "Testing Maxwell-Boltzmann velocity assignment: ", # Make a list of all test system constructors. import simtk.pyopenmm.extras.testsystems as testsystems # Test parameters temperature = 298.0 * units.kelvin # test temperature kT = kB * temperature # thermal energy ntrials = 1000 # number of test trials systems_to_test = ['HarmonicOscillator', 'HarmonicOscillatorArray', 'AlanineDipeptideImplicit'] # systems to test for system_name in systems_to_test: #print '*' * 80 #print system_name # Create system. constructor = getattr(testsystems, system_name) [system, coordinates] = constructor() # Create temporary filename. import tempfile # use a temporary file for testing file = tempfile.NamedTemporaryFile() store_filename = file.name # Create repex instance. states = [ ThermodynamicState(system, temperature=temperature) ] simulation = repex.ReplicaExchange(states=states, coordinates=coordinates, store_filename=store_filename) # Create integrator and context. natoms = system.getNumParticles() velocity_trials = numpy.zeros([ntrials, natoms, 3]) kinetic_energy_trials = numpy.zeros([ntrials]) for trial in range(ntrials): velocities = simulation._assign_Maxwell_Boltzmann_velocities(system, temperature) kinetic_energy = 0.5 * units.sum(units.sum(system.masses * velocities**2)) velocity_trials[trial,:,:] = velocities / (units.nanometers / units.picosecond) kinetic_energy_trials[trial] = kinetic_energy / units.kilocalories_per_mole velocity_mean = velocity_trials.mean(0) velocity_stderr = velocity_trials.std(0) / numpy.sqrt(ntrials) kinetic_analytical = (3.0/2.0) * natoms * kT / units.kilocalories_per_mole kinetic_mean = kinetic_energy_trials.mean() kinetic_error = kinetic_mean - kinetic_analytical kinetic_stderr = kinetic_energy_trials.std() / numpy.sqrt(ntrials) # Test if violations exceed tolerance. MAX_SIGMA = 6.0 # maximum number of standard errors allowed if numpy.any(numpy.abs(kinetic_error / kinetic_stderr) > MAX_SIGMA): print "analytical kinetic energy" print kinetic_analytical print "mean kinetic energy (kcal/mol)" print kinetic_mean print "difference (kcal/mol)" print kinetic_mean - kinetic_analytical print "stderr (kcal/mol)" print kinetic_stderr print "nsigma" print (kinetic_mean - kinetic_analytical) / kinetic_stderr raise Exception("Mean kinetic energy exceeds error tolerance of %.1f standard errors." % MAX_SIGMA) if numpy.any(numpy.abs(velocity_mean / velocity_stderr) > MAX_SIGMA): print "mean velocity (nm/ps)" print velocity_mean print "stderr (nm/ps)" print velocity_stderr print "nsigma" print velocity_mean / velocity_stderr raise Exception("Mean velocity exceeds error tolerance of %.1f standard errors." % MAX_SIGMA) if verbose: print "PASSED" return
def _calculateElongation(self, state): positions = state.getPositions(asNumpy=True) displacement = positions[self._index1] - positions[self._index2] distance = unit.sqrt(unit.sum(displacement**2)) return distance.value_in_unit(unit.nanometers)
def __init__(self, filename=None, file=None, url=None, pdbid=None): """Create a new PDBFixer instance to fix problems in a PDB file. Parameters ---------- filename : str, optional, default=None A filename specifying the file from which the PDB file is to be read. file : file, optional, default=None A file-like object from which the PDB file is to be read. The file is not closed after reading. url : str, optional, default=None A URL specifying the internet location from which the PDB file contents should be retrieved. pdbid : str, optional, default=None A four-letter PDB code specifying the structure to be retrieved from the RCSB. Notes ----- Only one of structure, filename, file, url, or pdbid may be specified or an exception will be thrown. Examples -------- Start from a file object. >>> pdbid = '1VII' >>> url = 'http://www.rcsb.org/pdb/files/%s.pdb' % pdbid >>> file = urlopen(url) >>> fixer = PDBFixer(file=file) Start from a filename. >>> filename = 'test.pdb' >>> file = urlopen(url) >>> outfile = open(filename, 'w') >>> outfile.write(file.read()) >>> outfile.close() >>> fixer = PDBFixer(filename=filename) Start from a URL. >>> fixer = PDBFixer(url=url) Start from a PDB code. >>> fixer = PDBFixer(pdbid=pdbid) """ # Check to make sure only one option has been specified. if bool(filename) + bool(file) + bool(url) + bool(pdbid) != 1: raise Exception("Exactly one option [filename, file, url, pdbid] must be specified.") if filename: # A local file has been specified. file = open(filename, 'r') structure = PdbStructure(file) file.close() elif file: # A file-like object has been specified. structure = PdbStructure(file) elif url: # A URL has been specified. file = urlopen(url) structure = PdbStructure(file) file.close() elif pdbid: # A PDB id has been specified. url = 'http://www.rcsb.org/pdb/files/%s.pdb' % pdbid file = urlopen(url) # Read contents all at once and split into lines, since urlopen doesn't like it when we read one line at a time over the network. contents = file.read() lines = contents.split('\n') file.close() structure = PdbStructure(lines) # Check the structure has some atoms in it. atoms = list(structure.iter_atoms()) if len(atoms)==0: raise Exception("Structure contains no atoms.") self.structure = structure self.pdb = app.PDBFile(structure) self.topology = self.pdb.topology self.positions = self.pdb.positions self.centroid = unit.sum(self.positions)/len(self.positions) self.structureChains = list(self.structure.iter_chains()) # Load the templates. self.templates = {} templatesPath = os.path.join(os.path.dirname(__file__), 'templates') for file in os.listdir(templatesPath): templatePdb = app.PDBFile(os.path.join(templatesPath, file)) name = next(templatePdb.topology.residues()).name self.templates[name] = templatePdb return
def __init__(self, filename=None, file=None, url=None, pdbid=None): """Create a new PDBFixer instance to fix problems in a PDB file. Parameters ---------- filename : str, optional, default=None A filename specifying the file from which the PDB file is to be read. file : file, optional, default=None A file-like object from which the PDB file is to be read. The file is not closed after reading. url : str, optional, default=None A URL specifying the internet location from which the PDB file contents should be retrieved. pdbid : str, optional, default=None A four-letter PDB code specifying the structure to be retrieved from the RCSB. Notes ----- Only one of structure, filename, file, url, or pdbid may be specified or an exception will be thrown. Examples -------- Start from a file object. >>> pdbid = '1VII' >>> url = 'http://www.rcsb.org/pdb/files/%s.pdb' % pdbid >>> file = urlopen(url) >>> fixer = PDBFixer(file=file) Start from a filename. >>> filename = 'test.pdb' >>> file = urlopen(url) >>> outfile = open(filename, 'w') >>> outfile.write(file.read()) >>> outfile.close() >>> fixer = PDBFixer(filename=filename) Start from a URL. >>> fixer = PDBFixer(url=url) Start from a PDB code. >>> fixer = PDBFixer(pdbid=pdbid) """ # Check to make sure only one option has been specified. if bool(filename) + bool(file) + bool(url) + bool(pdbid) != 1: raise Exception("Exactly one option [filename, file, url, pdbid] must be specified.") self.source = None if filename: self.source = filename # A local file has been specified. file = open(filename, 'r') structure = PdbStructure(file) file.close() elif file: # A file-like object has been specified. structure = PdbStructure(file) elif url: self.source = url # A URL has been specified. file = urlopen(url) structure = PdbStructure(file) file.close() elif pdbid: # A PDB id has been specified. url = 'http://www.rcsb.org/pdb/files/%s.pdb' % pdbid self.source = url file = urlopen(url) # Read contents all at once and split into lines, since urlopen doesn't like it when we read one line at a time over the network. contents = file.read() lines = contents.split('\n') file.close() structure = PdbStructure(lines) # Check the structure has some atoms in it. atoms = list(structure.iter_atoms()) if len(atoms)==0: raise Exception("Structure contains no atoms.") self.structure = structure self.pdb = app.PDBFile(structure) self.topology = self.pdb.topology self.positions = self.pdb.positions self.centroid = unit.sum(self.positions)/len(self.positions) self.structureChains = list(self.structure.iter_chains()) # Load the templates. self.templates = {} templatesPath = os.path.join(os.path.dirname(__file__), 'templates') for file in os.listdir(templatesPath): templatePdb = app.PDBFile(os.path.join(templatesPath, file)) name = next(templatePdb.topology.residues()).name self.templates[name] = templatePdb return
def createRigidBodies(system, positions, bodies): """Modify a System to turn specified sets of particles into rigid bodies. For every rigid body, four particles are selected as "real" particles whose positions are integrated. Constraints are added between them to make them move as a rigid body. All other particles in the body are then turned into virtual sites whose positions are computed based on the "real" particles. Because virtual sites are massless, the mass properties of the rigid bodies will be slightly different from the corresponding sets of particles in the original system. The masses of the non-virtual particles are chosen to guarantee that the total mass and center of mass of each rigid body exactly match those of the original particles. The moment of inertia will be similar to that of the original particles, but not identical. Care is needed when using constraints, since virtual particles cannot participate in constraints. If the input system includes any constraints, this function will automatically remove ones that connect two particles in the same rigid body. But if there is a constraint beween a particle in a rigid body and another particle not in that body, it will likely lead to an exception when you try to create a context. Parameters: - system (System) the System to modify - positions (list) the positions of all particles in the system - bodies (list) each element of this list defines one rigid body. Each element should itself be a list of the indices of all particles that make up that rigid body. """ # Remove any constraints involving particles in rigid bodies. for i in range(system.getNumConstraints() - 1, -1, -1): p1, p2, distance = system.getConstraintParameters(i) if (any(p1 in body and p2 in body for body in bodies)): system.removeConstraint(i) # Loop over rigid bodies and process them. for particles in bodies: if len(particles) < 5: # All the particles will be "real" particles. realParticles = particles realParticleMasses = [system.getParticleMass(i) for i in particles] else: # Select four particles to use as the "real" particles. All others will be virtual sites. pos = [positions[i] for i in particles] mass = [system.getParticleMass(i) for i in particles] cm = unit.sum([p * m for p, m in zip(pos, mass)]) / unit.sum(mass) r = [p - cm for p in pos] avgR = unit.sqrt( unit.sum([unit.dot(x, x) for x in r]) / len(particles)) rank = sorted(range(len(particles)), key=lambda i: abs(unit.norm(r[i]) - avgR)) for p in combinations(rank, 4): # Select masses for the "real" particles. If any is negative, reject this set of particles # and keep going. matrix = np.zeros((4, 4)) for i in range(4): particleR = r[p[i]].value_in_unit(unit.nanometers) matrix[0][i] = particleR[0] matrix[1][i] = particleR[1] matrix[2][i] = particleR[2] matrix[3][i] = 1.0 rhs = np.array( [0.0, 0.0, 0.0, unit.sum(mass).value_in_unit(unit.amu)]) weights = lin.solve(matrix, rhs) if all(w > 0.0 for w in weights): # We have a good set of particles. realParticles = [particles[i] for i in p] realParticleMasses = [float(w) for w in weights] * unit.amu break # Set particle masses. for i, m in zip(realParticles, realParticleMasses): system.setParticleMass(i, m) # Add constraints between the real particles. for p1, p2 in combinations(realParticles, 2): distance = unit.norm(positions[p1] - positions[p2]) key = (min(p1, p2), max(p1, p2)) system.addConstraint(p1, p2, distance) # Select which three particles to use for defining virtual sites. bestNorm = 0 for p1, p2, p3 in combinations(realParticles, 3): d12 = (positions[p2] - positions[p1]).value_in_unit(unit.nanometer) d13 = (positions[p3] - positions[p1]).value_in_unit(unit.nanometer) crossNorm = unit.norm((d12[1] * d13[2] - d12[2] * d13[1], d12[2] * d13[0] - d12[0] * d13[2], d12[0] * d13[1] - d12[1] * d13[0])) if crossNorm > bestNorm: bestNorm = crossNorm vsiteParticles = (p1, p2, p3) # Create virtual sites. d12 = (positions[vsiteParticles[1]] - positions[vsiteParticles[0]]).value_in_unit(unit.nanometer) d13 = (positions[vsiteParticles[2]] - positions[vsiteParticles[0]]).value_in_unit(unit.nanometer) cross = mm.Vec3(d12[1] * d13[2] - d12[2] * d13[1], d12[2] * d13[0] - d12[0] * d13[2], d12[0] * d13[1] - d12[1] * d13[0]) matrix = np.zeros((3, 3)) for i in range(3): matrix[i][0] = d12[i] matrix[i][1] = d13[i] matrix[i][2] = cross[i] for i in particles: if i not in realParticles: system.setParticleMass(i, 0) rhs = np.array((positions[i] - positions[vsiteParticles[0]]).value_in_unit( unit.nanometer)) weights = lin.solve(matrix, rhs) system.setVirtualSite( i, mm.OutOfPlaneSite(vsiteParticles[0], vsiteParticles[1], vsiteParticles[2], weights[0], weights[1], weights[2]))
def test_velocity_assignment(mpicomm=None, verbose=True): """ Test Maxwell-Boltzmann velocity assignment subtroutine produces correct distribution, raising an exception if this test fails. """ # Stop here if not root node. if mpicomm and (mpicomm.rank != 0): return if verbose: print "Testing Maxwell-Boltzmann velocity assignment: ", # Make a list of all test system constructors. import simtk.pyopenmm.extras.testsystems as testsystems # Test parameters temperature = 298.0 * units.kelvin # test temperature kT = kB * temperature # thermal energy ntrials = 1000 # number of test trials systems_to_test = [ 'HarmonicOscillator', 'HarmonicOscillatorArray', 'AlanineDipeptideImplicit' ] # systems to test for system_name in systems_to_test: #print '*' * 80 #print system_name # Create system. constructor = getattr(testsystems, system_name) [system, coordinates] = constructor() # Create temporary filename. import tempfile # use a temporary file for testing file = tempfile.NamedTemporaryFile() store_filename = file.name # Create repex instance. states = [ThermodynamicState(system, temperature=temperature)] simulation = repex.ReplicaExchange(states=states, coordinates=coordinates, store_filename=store_filename) # Create integrator and context. natoms = system.getNumParticles() velocity_trials = numpy.zeros([ntrials, natoms, 3]) kinetic_energy_trials = numpy.zeros([ntrials]) for trial in range(ntrials): velocities = simulation._assign_Maxwell_Boltzmann_velocities( system, temperature) kinetic_energy = 0.5 * units.sum( units.sum(system.masses * velocities**2)) velocity_trials[trial, :, :] = velocities / (units.nanometers / units.picosecond) kinetic_energy_trials[ trial] = kinetic_energy / units.kilocalories_per_mole velocity_mean = velocity_trials.mean(0) velocity_stderr = velocity_trials.std(0) / numpy.sqrt(ntrials) kinetic_analytical = (3.0 / 2.0) * natoms * kT / units.kilocalories_per_mole kinetic_mean = kinetic_energy_trials.mean() kinetic_error = kinetic_mean - kinetic_analytical kinetic_stderr = kinetic_energy_trials.std() / numpy.sqrt(ntrials) # Test if violations exceed tolerance. MAX_SIGMA = 6.0 # maximum number of standard errors allowed if numpy.any(numpy.abs(kinetic_error / kinetic_stderr) > MAX_SIGMA): print "analytical kinetic energy" print kinetic_analytical print "mean kinetic energy (kcal/mol)" print kinetic_mean print "difference (kcal/mol)" print kinetic_mean - kinetic_analytical print "stderr (kcal/mol)" print kinetic_stderr print "nsigma" print(kinetic_mean - kinetic_analytical) / kinetic_stderr raise Exception( "Mean kinetic energy exceeds error tolerance of %.1f standard errors." % MAX_SIGMA) if numpy.any(numpy.abs(velocity_mean / velocity_stderr) > MAX_SIGMA): print "mean velocity (nm/ps)" print velocity_mean print "stderr (nm/ps)" print velocity_stderr print "nsigma" print velocity_mean / velocity_stderr raise Exception( "Mean velocity exceeds error tolerance of %.1f standard errors." % MAX_SIGMA) if verbose: print "PASSED" return