Esempio n. 1
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """     

        if self.architecture == 'monoprocessor':
            t = self.trajectory            
        else:
            # Load the whole trajectory set.
            t = Trajectory(None, self.trajectoryFilename, 'r')
            
        comsUniverse = t.universe.__copy__()
        
        comsUniverse.removeObject(comsUniverse.objectList()[:])

        orderedAtoms = sorted(t.universe.atomList(), key = operator.attrgetter('index'))
        groups = [Collection([orderedAtoms[ind] for ind in g]) for g in self.group]

        comp = 1
        for g in groups:
            
            comAtom = Atom('H', name = 'COM'+str(comp))
            comAtom._mass = g.mass()
            comsUniverse.addObject(comAtom)
            comp += 1
                                                            
        # traj_new is the filtered trajectory
        outputFile = Trajectory(comsUniverse, self.output, "w") 
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime()
        
        # Each time |snapshot| is called, the universe contents i flushed into the output file.
        snapshot = SnapshotGenerator(comsUniverse,\
                                     actions = [TrajectoryOutput(outputFile, ["configuration","time"], 0, None, 1)])

        # Loop over the output frames.
        for comp in range(self.nFrames):
            
            frameIndex = self.frameIndexes[comp]
            
            t.universe.setFromTrajectory(t, frameIndex)
            
            comsUniverse.setCellParameters(t.universe.cellParameters())
            
            aComp = 0
            for at in comsUniverse.atomList():
                
                at.setPosition(self.comsTrajectory[frameIndex][aComp])
                aComp += 1
                
            snapshot(data = {'time': self.times[comp]})
        
        # The output COM trajectory is closed.
        outputFile.close()
        
        self.toPlot = None
Esempio n. 2
0
	def __init__(self, netcdfPath, startFrame, endFrame):
		from MMTK.Trajectory import Trajectory
		replyobj.status("Reading NetCDF file\n", blankAfter=0)
		try:
			self.trajectory = Trajectory(None, netcdfPath)
		finally:
			replyobj.status("Done reading NetCDF file\n")

		replyobj.status("Processing trajectory\n", blankAfter=0)

		self.atomNames = []
		self.elements = []
		self.resNames = []
		self.atomIndices = {}
		self.bonds = []
		self.ipres = [1]

		from chimera import Element
		univ = self.trajectory.universe
		for i, a in enumerate(univ.atomList()):
			self.atomIndices[a] = i
			self.atomNames.append(a.name)
			self.elements.append(Element(a.getAtomProperty(a,
								"symbol")))
		for obj in univ:
			self._processObj(obj)
		delattr(self, "atomIndices")
		self.ipres.pop()
		
		self.startFrame = startFrame
		self.endFrame = endFrame

		self.name = os.path.basename(netcdfPath)

		replyobj.status("Done processing trajectory\n")
    def openTrajectory(self, event=None):
        """
        Ths method is called when the user clicks on the 'Browse' button of the trajectory visualization dialog.
        It opens a file browser. After the file selection some of the dialog widgets are updated with the informations
        coming from the loaded trajectory.
        """

        # Case where the user enters a file name directly in the entry widget without using the browser.
        if event is not None:
            if event.widget == self.fileBrowser.entry:
                filename = self.fileBrowser.getValue()
            else:
                return

        else:
            # The name of the NetCDF file to load.
            filename = askopenfilename(parent = self,\
                                       filetypes = [('NetCDF file','*.nc')],\
                                       initialdir = PREFERENCES['trajfile_path'])

        # The file must exist.
        if filename:
            try:
                # The trajectory is loaded.
                self.trajectory = Trajectory(None, filename, 'r')
            except IOError:
                raise Error('Can not read the trajectory.')
            else:
                # The control variables are updated with the informations about the loaded trajectory.
                self.fileBrowser.setValue(filename)
                self.selectedStepEntry.setValue('1')

        return 'break'
Esempio n. 4
0
    def writeTrajectory(self, trajectory_name, block_size=1):
        trajectory = Trajectory(self.universe, trajectory_name, 'w',
                                self.title, block_size=block_size)
        actions = [TrajectoryOutput(trajectory, ["all"], 0, None, 1)]
        snapshot = SnapshotGenerator(self.universe, actions=actions)
        conf = self.universe.configuration()
        vel = self.universe.velocities()
        grad = ParticleVector(self.universe)
        try:
            while 1:
                line = self.history.readline()
                if not line:
                    break
                data = FortranLine(line, history_timestep_line)
                step = data[1]
                natoms = data[2]
                nvectors = data[3]+1
                pbc = data[4]
                dt = data[5]
                step_data = {'time': step*dt}
                if nvectors > 2:
                    step_data['gradients'] = grad
                if pbc:
                    data = FortranLine(self.history.readline(), history_pbc_line)
                    box_x = data[0]*Units.Ang
                    #if data[1] != 0. or data[2] != 0.:
                    #    raise ValueError, "box shape not supported"
                    data = FortranLine(self.history.readline(), history_pbc_line)
                    box_y = data[1]*Units.Ang
                    #if data[0] != 0. or data[2] != 0.:
                    #    raise ValueError, "box shape not supported"
                    data = FortranLine(self.history.readline(), history_pbc_line)
                    box_z = data[2]*Units.Ang
                    #if data[0] != 0. or data[1] != 0.:
                    #    raise ValueError, "box shape not supported"
                    self.universe.setSize((box_x, box_y, box_z))
                for i in range(natoms):
                    self.history.readline()
                    conf.array[i] = map(float,
                                        string.split(self.history.readline()))
                    if nvectors > 1:
                        vel.array[i] = map(float,
                                           string.split(self.history.readline()))
                        if nvectors > 2:
                            grad.array[i] = map(float,
                                             string.split(self.history.readline()))
                Numeric.multiply(conf.array, Units.Ang, conf.array)
                if nvectors > 1:
                    Numeric.multiply(vel.array, Units.Ang/Units.ps, vel.array)
                if nvectors > 2:
                    Numeric.multiply(grad.array, -Units.amu*Units.Ang/Units.ps**2,
                                     grad.array)

                snapshot(data=step_data)
        finally:
            trajectory.close()
Esempio n. 5
0
 def _writeToTrajectory(self, filename, comment, path):
     trajectory = Trajectory(self.universe, filename, "w", comment)
     snapshot = SnapshotGenerator(self.universe,
                                  actions = [TrajectoryOutput(trajectory,
                                                              ["all"],
                                                              0, None, 1)])
     for step in path:
         self.universe.setConfiguration(step.conf)
         snapshot()
     trajectory.close()
Esempio n. 6
0
    def test_snapshot(self):

        initial = self.universe.copyConfiguration()

        transformation = Translation(Vector(0.,0.,0.01)) \
                         * Rotation(Vector(0.,0.,1.), 1.*Units.deg)

        trajectory = Trajectory(self.universe,
                                "test.nc",
                                "w",
                                "trajectory test",
                                double_precision=self.double_precision)
        snapshot = SnapshotGenerator(
            self.universe,
            actions=[TrajectoryOutput(trajectory, ["all"], 0, None, 1)])
        snapshot()
        for i in range(100):
            self.universe.setConfiguration(
                self.universe.contiguousObjectConfiguration())
            self.universe.applyTransformation(transformation)
            self.universe.foldCoordinatesIntoBox()
            snapshot()
        trajectory.close()

        self.universe.setConfiguration(initial)
        trajectory = Trajectory(None, "test.nc")
        t_universe = trajectory.universe
        for i in range(101):
            configuration = self.universe.configuration()
            t_configuration = trajectory[i]['configuration']
            max_diff = N.maximum.reduce(
                N.ravel(N.fabs(configuration.array - t_configuration.array)))
            self.assert_(max_diff < self.tolerance)
            if configuration.cell_parameters is not None:
                max_diff = N.maximum.reduce(
                    N.fabs(configuration.cell_parameters -
                           t_configuration.cell_parameters))
                self.assert_(max_diff < self.tolerance)
            self.universe.setConfiguration(
                self.universe.contiguousObjectConfiguration())
            self.universe.applyTransformation(transformation)
            self.universe.foldCoordinatesIntoBox()
        trajectory.close()
Esempio n. 7
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """                                    
        
        if self.architecture == 'monoprocessor':
            t = self.trajectory            
        else:
            # Load the whole trajectory set.
            t = Trajectory(None, self.trajectoryFilename, 'r')

        orderedAtoms = sorted(t.universe.atomList(), key = operator.attrgetter('index'))
        selectedAtoms = Collection([orderedAtoms[ind] for ind in self.subset])

        targetAtoms = Collection([orderedAtoms[ind] for ind in self.target])
        
        # traj_new is the filtered trajectory
        outputFile = Trajectory(targetAtoms, self.output, "w") 
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime()

        # Create the snapshot generator
        snapshot = SnapshotGenerator(t.universe, actions = [TrajectoryOutput(outputFile, ["configuration"], 0, None, 1)])

        # Loop over the output frames.
        for comp in range(self.nFrames):
            
            frameIndex = self.frameIndexes[comp]
            t.universe.setFromTrajectory(t, frameIndex)
            
            for at in targetAtoms:
                at.setPosition(Vector(self.filteredTrajectory[frameIndex][at.index]))
                
            snapshot(data = {'time': self.times[comp]})

        outputFile.close()

        t.close()
        
        self.toPlot = None
Esempio n. 8
0
    def writeTrajectory(self, trajectory_name, block_size=1):
        trajectory = Trajectory(self.universe,
                                trajectory_name,
                                'w',
                                self.title,
                                block_size=block_size)
        actions = [TrajectoryOutput(trajectory, ["all"], 0, None, 1)]
        snapshot = SnapshotGenerator(self.universe, actions=actions)
        conf = self.universe.configuration()
        vel = self.universe.velocities()
        grad = ParticleVector(self.universe)
        nvectors = N_VECTORS
        natoms = N_ATOMS
        self._setSize()

        try:
            while True:
                vline = self.velfile.readline()
                pline = self.posfile.readline()
                if not vline:
                    break

                vdata = vline.split()
                pdata = pline.split()

                if len(pdata) == 2:  # Example: ["10", "0.00120944"]
                    step = int(vdata[0])
                    step_data = {'time': step * DT}

                for i in range(natoms):
                    conf.array[i] = map(float,
                                        string.split(self.posfile.readline()))
                    vel.array[i] = map(float,
                                       string.split(self.velfile.readline()))

                conf.array = Units.Ang * conf.array
                if nvectors > 1:
                    vel.array = Units.Ang / Units.ps * vel.array

                snapshot(data=step_data)
        finally:
            trajectory.close()
Esempio n. 9
0
    def interpreteInputParameters(self):
        """Parse the input parameters for the analysis.
        """

        # Parses the parameters that are common to different analysis.
        Analysis.interpreteInputParameters(self)
        
        self.buildTimeInfo()
            
        self.subset = self.selectAtoms(self.subsetDefinition)
        self.nSelectedAtoms = len(self.subset)

        orderedAtoms = sorted(self.trajectory.universe.atomList(), key = operator.attrgetter('index'))
        selectedAtoms = Collection([orderedAtoms[ind] for ind in self.subset])

        # traj_new is the filtered trajectory
        self.outputFile = Trajectory(selectedAtoms, self.output, "w")
                 
        # Create the snapshot generator
        self.snapshot = SnapshotGenerator(self.trajectory.universe, actions = [TrajectoryOutput(self.outputFile, ["all"], 0, None, 1)])
Esempio n. 10
0
def do_analysisPerElement(analysis, element, trajname):
    """Performs the analysis element-by-element, the element being either
    an atom (atom-by-atom analysis), a frame index (frame-by-frame analysis),
    a group of atom (group-by-group analysis) or a set of q vectors.
    
    @param analysis: the selected analysis.
    @type analysis: a subclass of nMOLDYN.Analysis.Analysis.Analysis class
    
    @param element: the element on which the analysis is based.
    @type element: MMTK.Atom|integer|MMTK.Collections.Collection|nMOLDYN.Mathematics.QVectors
    
    @param trajname: a string specifying the name of the trajectory.
    @type trajname: string
    
    @return: the results of the analysis performed on one element.
    @rtype: depends on the analysis    
    """

    global trajectory
    if trajectory is None:
        trajectory = Trajectory(None, trajname)
        hierarchizeUniverse(trajectory.universe)
    return analysis.calc(element, trajectory)
Esempio n. 11
0
def viewTrajectory(trajectory,
                   first=0,
                   last=None,
                   skip=1,
                   subset=None,
                   label=None):
    """
    Launches an animation based on a trajectory using an external viewer.

    :param trajectory: the trajectory
    :type trajectory: :class:`~MMTK.Trajectory.Trajectory`
    :param first: the first trajectory step to be used
    :type first: int
    :param last: the first trajectory step NOT to be used
    :type last: int
    :param skip: the distance between two consecutive steps shown
    :type skip: int
    :param subset: the subset of the universe that is shown
                   (default: the whole universe)
    :type subset: :class:`~MMTK.Collections.GroupOfAtoms`
    :param label: an optional text string that some interfaces
                  use to pass a description of the object to the
                  visualization system.
    :type label: str
    """
    if type(trajectory) == type(''):
        from MMTK.Trajectory import Trajectory
        trajectory = Trajectory(None, trajectory, 'r')
    if last is None:
        last = len(trajectory)
    elif last < 0:
        last = len(trajectory) + last
    universe = trajectory.universe
    if subset is None:
        subset = universe
    viewSequence(subset, trajectory.configuration[first:last:skip], label)
Esempio n. 12
0
        dv = -0.5*delta_t*gradients*inv_masses
        velocities += dv
        universe.setVelocities(velocities)
        time += delta_t
        snapshot(data={'time': time,
                       'potential_energy': energy})
        if equilibration_temperature is not None \
           and step % equilibration_frequency == 0:
            universe.scaleVelocitiesToTemperature(equilibration_temperature)

# Define system
universe = InfiniteUniverse(Amber99ForceField())
universe.protein = Protein('bala1')

# Create trajectory and snapshot generator
trajectory = Trajectory(universe, "md_trajectory.nc", "w",
                        "Generated by a Python integrator")
snapshot = SnapshotGenerator(universe,
                             actions = [TrajectoryOutput(trajectory,
                                                         ["all"], 0, None, 1)])

# Initialize velocities
universe.initializeVelocitiesToTemperature(50.*Units.K)
# Heat and equilibrate
for temperature in [50., 100., 200., 300.]:
    doVelocityVerletSteps(delta_t = 1.*Units.fs, nsteps = 500,
                          equilibration_temperature = temperature*Units.K,
                          equilibration_frequency = 1)
doVelocityVerletSteps(delta_t = 1.*Units.fs, nsteps = 500,
                      equilibration_temperature = 300*Units.K,
                      equilibration_frequency = 10)
# Production run
Esempio n. 13
0
universe.environmentObjectList(
    Environment.PathIntegrals)[0].include_spring_terms = False
universe._changed(True)

universe.initializeVelocitiesToTemperature(temperature)
#re-create integrator to use calculated friction value
integrator = PIGSLangevinNormalModeIntegrator(universe,
                                              delta_t=dt,
                                              centroid_friction=friction)

print "Number of Steps : " + str(nvt_time / skip)

# NOW WE BUILD A TRAJECTORY, AND RUN OUR ACTUAL DYNAMICS!
trajectoryNVE = Trajectory(
    universe, "/scratch/mdgschmi/" + label + ".nc", "w",
    "PIMD simulation NVE using Langevin Cartesian Integrator")

# NOTE THERE IS A BRIEF INITIALIZATIONS OF 0.05/dt time-steps!

integrator(
    steps=nvt_time,
    actions=[
        TrajectoryOutput(trajectoryNVE,
                         ('configuration', 'energy', 'time', 'velocities'), 0,
                         None, skip)
    ])

trajectoryNVE.close()
os.system("mv /scratch/mdgschmi/" + label +
          ".nc /warehouse/mdgschmi/MBpolDimer/.")
Esempio n. 14
0
maxP = P / 2 - dist

c = zeros((3, maxP), float)
c2 = zeros((3, maxP), float)
counter = 0

label = "P-" + str(P) + "-" + str(start)

c0file = open("/warehouse/mdgschmi/MBpolMonomer/corr-sym-" + label, "w")
c1file = open("/warehouse/mdgschmi/MBpolMonomer/corr-bend-" + label, "w")
c2file = open("/warehouse/mdgschmi/MBpolMonomer/corr-asym-" + label, "w")

nmodes = 3 * natoms - 6

r = zeros((natoms, P, 3), float)
trajlength = len(Trajectory(universe, traj))
mean0 = 0.
mean1 = 0.
mean2 = 0.
for i in range(trajlength):

    universe.setFromTrajectory(Trajectory(universe, traj), i)

    r[0] = asarray(universe.atomList()[0].beadPositions())  #Hydrogen 1
    r[1] = asarray(universe.atomList()[1].beadPositions())  #Hydrogen 2
    r[2] = asarray(universe.atomList()[2].beadPositions())  #Oxygen

    bond_down = zeros((3, maxP), float)
    bond_up = zeros((3, maxP), float)

    for p in range(maxP):
lattice_spacing = float(traj[traj.find("R")+1:traj.find("-local")])

universe.addObject(PathIntegrals(temperature))

for i in range(nH2O):
    universe.addObject(Molecule('water', position = Vector(0., 0., i*lattice_spacing)))

for atom in universe.atomList():
    atom.setNumberOfBeads(P)

natoms = len(universe.atomList())

universe.setForceField(mbpolForceField(universe))
##################################################

trajectory = Trajectory(universe, traj)

filename = traj[0:traj.find(".nc")]

universe = trajectory.universe
natoms = universe.numberOfAtoms()
np = universe.numberOfPoints()
P = np/natoms

#oufile = open("oufile-"+filename,"w")
ozfile = open("ozfile-"+filename+".dat","w")

ozA = 0.0
stp = -1

for step in trajectory:
Esempio n. 16
0
def process(cdffile, file_output, All_Prop, SF):
    dirname, filepath = os.path.split(cdffile)
    filename, ext = os.path.splitext(filepath)

    traj = Trajectory(None, cdffile)
    universe = traj.universe
    forces = traj.gradients
    box_posvec = traj.box_coordinates
    chains = universe.objectList(AtomCluster)
    chains_indices = [[atom.index for atom in chain.atomList()]
                      for chain in chains]
    chains_ns = [chain.numberOfAtoms() for chain in chains]
    print('In postprocessing %s' % filename)
    print 'ASPeriodicity = ', periodicity
    print 'Lja2 = ', Lja2
    # Number of sample points
    Ns = len(traj)
    # Number of chains
    Nc = len(chains)
    print(Ns, Nc, chains_ns[0])
    #NAs = (int(chains_ns[0]/periodicity) + 1)*Nc # total no. of associating beads for all the chains for telechelic system
    NAs = (
        int(chains_ns[0] / periodicity)
    ) * Nc  # total no. of associating beads for all the chains for multi-sticker system
    print NAs

    if All_Prop:
        # center of mass
        r_coms = np.zeros((Ns, Nc, 3))
        # end to end distance
        q_sqs = np.zeros((Ns, Nc))
        # Radius of gyration
        rg_sqs = np.zeros((Ns, Nc))
        # tau xy
        tauxys = np.zeros(Ns)

        ## Neighbour matrix Mij(for association dynamics), added by Aritra
        #Mij = np.zeros((Ns, chains_ns[0], chains_ns[0])) # for single chain system
        Mij = np.zeros((Ns, NAs, NAs))  # for multi-chain system
        # local time correlation function
        Ft = np.zeros(Ns)

        # Connectivity matirix for cluster
        #Cij = Mij.astype(int)
        Cij = Mij.astype(int)  # for multi-chain system
        Rank = np.zeros(1000)  # Rank of the connectivity matrix
        clustsize = np.zeros(1000)  # cluster size
        closedsticker = np.zeros(1000)  # fraction of closed stickers
        opensticker = np.zeros(1000)  # fraction of open stickers
        Pn = np.zeros(
            NAs)  # binning to calculate probability dist of cluster size
        ''' 
        for tid, conf in enumerate(traj.configuration):
            tauxy = 0.0
            for cid, chain_indices in enumerate(chains_indices):
                positions = conf.array[chain_indices]
                r_com = positions.sum(axis = 0) / chains_ns[cid]
                q_sq = ((positions[0] - positions[-1])**2).sum()
                rel_pos = positions - r_com
                rg_sq = (rel_pos**2).sum() / chains_ns[cid]
                tauxy += (rel_pos[:, 0] * forces[tid].array[chain_indices][:, 1]).sum()
                r_coms[tid, cid] = r_com
                q_sqs[tid, cid] = q_sq
                rg_sqs[tid, cid] = rg_sq
            tauxys[tid] = tauxy

        # computation of Neighbour matrix for single chain
        for tid, conf in enumerate(traj.configuration):
            for cid, chain_indices in enumerate(chains_indices):
                positions = conf.array[chain_indices]
                for i in range(chains_ns[cid]-2):
                    for j in range(i+2, chains_ns[cid]):
                        rijsq = ((positions[i] - positions[j])**2).sum()
                        rij = sqrt(rijsq)
                        if rij < 1.5:
                           Mij[tid, i, j] = 1
        '''
        ''' 
        # computation of Neighbour matrix for multi-chain systems
        for tid, conf in enumerate(traj.box_coordinates):
            for cid, chain_indices in enumerate(chains_indices):
                positions = conf.array[chain_indices]
                if cid == 0:
                    beadpositions = positions
                else:
                    beadpositions = np.append(beadpositions, positions, axis=0) # writing position vectors of all the beads for all chains. 
            p = 0 # bead index for associating bead i
            q = 0 # bead index for associating bead j
            for i in range(chains_ns[0]*Nc - 1):
                nc_i = int(i/chains_ns[0]) # chain number for bead i
                if ((i - nc_i) % periodicity == 0 and i > 0):
                    p += 1
                    if (p > (NAs-1)):
                        print 'p is greater than NAs: ', p+1
                for j in range(i+1, chains_ns[0]*Nc):
                    nc_j = int(j/chains_ns[0]) # chain number for bead j 
                    if ((j - nc_j) % periodicity == 0 and j > 0):
                        q = p + 1
                        if (q > (NAs-1)):
                            print 'q is greater than NAs: ', q+1
                    if (((i - nc_i) % periodicity) == 0 and ((j - nc_j) % periodicity) == 0):
                        rijsq = ((beadpositions[i] - beadpositions[j])**2).sum()
                        rij = sqrt(rijsq)
                        if (rij < 1.5*Lja2):
                            Mij[tid, p, q] = 1
                q = 0
            if (tid % 2000 == 0):
                print 'tid = ', tid    
  
        ## compution of number of cluster and cluster size for single chain system
        for tid, conf in enumerate(traj.configuration):
            for cid, chain_indices in enumerate(chains_indices):
                positions = conf.array[chain_indices]
                for i in range(chains_ns[cid]):
                    for j in range(chains_ns[cid]):
                        rijsq = ((positions[i] - positions[j])**2).sum()
                        rij = sqrt(rijsq)
                        if rij < 2.8:
                           Cij[tid, i, j] = 1  
        # computation of rank of connectivity matrix to find out number of cluster
        for t in range(Ns):
            if ((t+1) % 1000 == 0):
                tid = t+1 
                N0 = chains_ns[0]
                N = chains_ns[0]
                #print Cij[tid,:,:]
                #print "****************"
                for i in range(N0):
                    if i < N:
                        switch = 1
                        while (switch == 1):
                            for j in range(i, N):
                                switch = 0
                                c = np.bitwise_and(Cij[tid,:,i], Cij[tid,:,j]) 
                                if ((c**2).sum() != 0 and j!=i):
                                    switch = 1
                                    Cij[tid,:,i] =  np.bitwise_or(Cij[tid,:,i], Cij[tid,:,j]) 
                                    N1 = j
                                    N = N-1
                                    for k in range(N1+1, N+1):
                                        Cij[tid,:,k-1] = Cij[tid,:,k]
                                    break     
                    else: break 
                Rank[tid/1000 - 1] = N
                clustsize[tid/1000 - 1] = float(chains_ns[0])/N
                #print Cij[tid,:,:]
                #print "#######################"
        '''
        ## computation of number of cluster and cluster size for multi-chain system
        for tid, conf in enumerate(traj.box_coordinates):
            for cid, chain_indices in enumerate(chains_indices):
                positions = conf.array[chain_indices]
                if cid == 0:
                    beadpositions = positions
                else:
                    beadpositions = np.append(
                        beadpositions, positions, axis=0
                    )  # writing position vectors of all the beads for all chains.
            p = 0  # bead index for associating bead i
            q = 0  # bead index for associating bead j
            for i in range(chains_ns[0] * Nc):
                nc_i = int(i / chains_ns[0])  # chain number for bead i
                # for telechelic system
                '''
                if ((i - nc_i) % periodicity == 0 and i > 0):
                    p += 1
                    if (p > (NAs-1)):
                        print 'p is greater than NAs: ', p+1
                '''
                # for multi-sticker system (4 : periodicity-1)
                if (((i - 4) + nc_i) % periodicity == 0 and i > 4):
                    p += 1
                    if (p > (NAs - 1)):
                        print 'p is greater than NAs: ', p + 1
                for j in range(chains_ns[0] * Nc):
                    nc_j = int(j / chains_ns[0])  # chain number for bead j
                    # for telechelic system
                    '''
                    if ((j - nc_j) % periodicity == 0 and j > 0):
                        q += 1
                        if (q > (NAs-1)):
                            print 'q is greater than NAs: ', q+1
                    '''
                    # for multi-sticker system (4: periodicity-1)
                    if (((j - 4) + nc_j) % periodicity == 0 and j > 4):
                        q += 1
                        if (q > (NAs - 1)):
                            print 'q is greater than NAs: ', q + 1
                    # for telechelic system
                    '''
                    if (((i - nc_i) % periodicity) == 0 and ((j - nc_j) % periodicity) == 0):
                        rijsq = ((beadpositions[i] - beadpositions[j])**2).sum()
                        rij = sqrt(rijsq)
                        if (rij < 1.5*Lja2):
                            Cij[tid, p, q] = 1
                    '''
                    # for multi-sticker system
                    if ((((i - 4) + nc_i) % periodicity) == 0
                            and (((j - 4) + nc_j) % periodicity) == 0):
                        rijsq = ((beadpositions[i] -
                                  beadpositions[j])**2).sum()
                        rij = sqrt(rijsq)
                        if (rij < 1.82 * Lja2):
                            Cij[tid, p, q] = 1
                q = 0
            if (tid % 1000 == 0):
                print 'tid = ', tid

        #computation of rank of connectivity matrix to find out number of cluster
        for t in range(Ns):
            if ((t + 1) % 5 == 0):
                tid = t + 1
                N0 = NAs
                N = NAs
                #print Cij[tid,:,:]
                #print "****************"
                for i in range(N0):
                    if i < N:
                        switch = 1
                        while (switch == 1):
                            for j in range(i, N):
                                switch = 0
                                c = np.bitwise_and(Cij[tid, :, i], Cij[tid, :,
                                                                       j])
                                if ((c**2).sum() != 0 and j != i):
                                    switch = 1
                                    Cij[tid, :, i] = np.bitwise_or(
                                        Cij[tid, :, i], Cij[tid, :, j])
                                    N1 = j
                                    N = N - 1
                                    for k in range(N1 + 1, N + 1):
                                        Cij[tid, :, k - 1] = Cij[tid, :, k]
                                    break
                    else:
                        break
                Rank[tid / 5 - 1] = N
                clustsize[tid / 5 - 1] = float(NAs) / N
                for nc in range(N):
                    if (tid == 5):
                        print Cij[tid, :, nc].sum()
                    if (Cij[tid, :, nc].sum() > 1):
                        closedsticker[tid / 5 - 1] += Cij[tid, :, nc].sum()
                        if (Cij[tid, :, nc].sum() < NAs):
                            Pn[Cij[tid, :, nc].sum() - 1] += 1
                        else:
                            Pn[NAs - 1] += 1
                    else:
                        opensticker[tid / 5 - 1] += 1
                        Pn[0] += 1
                #print Cij[tid,:,:]
                #print "#######################"
        '''                       
        # mean square displacement
        msds = np.zeros((Ns, Nc))
        for dtime in range(1, Ns):
            for cid in range(Nc): 
                com = r_coms[:, cid]
                msd = ((com[dtime:, :] - com[:-dtime, :])**2).sum()
                msd /= Ns - dtime 
                msds[dtime, cid] = msd
       
        # stress autocorrelation 
        gt = np.zeros(Ns)
        gt[0] = (tauxys**2).sum() / (Nc * Ns)
        for dtime in range(1, Ns):
            gt[dtime] = (tauxys[dtime:] * tauxys[:-dtime]).sum() / (Nc * (Ns - dtime))
            if (dtime % 2000 == 0):
                print 'dtime = ', dtime
        
        # local time correlation function for association dynamics
        Ft[0] = (Mij**2).sum()/(Nc*Ns*chains_ns[0]) 
        print 'dtime = 0' 
        for dtime in range(1, Ns):
            Ft[dtime] = (Mij[dtime:]*Mij[:-dtime]).sum()/(Nc*(Ns - dtime)*chains_ns[0])
            if (dtime % 2000 == 0):
                print 'dtime = ', dtime
        '''
        #data_out = np.column_stack( (traj.time, np.mean(q_sqs, axis = 1), np.mean(rg_sqs, axis = 1), np.mean(msds, axis = 1), gt, tauxys, Ft) )
        '''
        Rg2 = np.mean(rg_sqs, axis = 1)
        Rg2mean = np.mean(Rg2)
        #print Rg2mean
        with open('Rg2mean.txt','a+') as fRg2:
              fRg2.write("%lf\n" % Rg2mean)
        '''
        if file_output:
            #np.savetxt("dynamic%s.txt" % filename, data_out)
            #np.savetxt("Rg2.txt", Rg2)
            #np.savetxt("Ft%s.txt" % filename, Ft)
            #np.savetxt("Gt%s.txt" % filename, gt)
            np.savetxt("clustnum%s.txt" % filename, Rank)
            np.savetxt("clustsize%s.txt" % filename, clustsize)
            np.savetxt("closedstick%s.txt" % filename, closedsticker)
            np.savetxt("openstick%s.txt" % filename, opensticker)
            np.savetxt("clustdistb%s.txt" % filename, Pn)
        else:
            #print Ft
            #print gt
            print Rank

    if SF:
        # structure factor
        from math import sin
        structure_kmin, structure_kmax, structure_nks = 0.1, 8, 100
        ks = np.logspace(np.log10(structure_kmin), np.log10(structure_kmax),
                         structure_nks)
        structure_factors = np.zeros((structure_nks, Ns, Nc))
        Npair = ((Nbpc * Nbpc) - Nbpc) / 2
        r_mag = np.zeros((Ns, Nc, Npair))
        Ns = len(traj)
        for tid, conf in enumerate(traj.configuration):
            for cid, chain_indices in enumerate(chains_indices):
                positions = conf.array[chain_indices]
                n = len(positions)
                pairid = 0
                for i, position1 in enumerate(positions):
                    for j in range(i + 1, n):
                        position2 = positions[j]
                        rij = position2 - position1
                        r_mag[tid, cid, pairid] = ((rij * rij).sum())**.5
                        pairid = pairid + 1


#########################################################################
        for kid, k in enumerate(ks):
            print 'kid', kid
            for tid in range(Ns):
                for cid in range(Nc):
                    struct_sum = 0.0
                    pairid = 0
                    for i in range(Nbpc):
                        for j in range(i + 1, Nbpc):
                            struct_sum += sin(k * r_mag[tid, cid, pairid]) / (
                                k * r_mag[tid, cid, pairid])
                            pairid = pairid + 1
                    structure_factors[kid, tid, cid] = 1.0 + (
                        2 * struct_sum / Nbpc
                    )  #multiply by two because of symmetricity and addition of 1 is because of rij=0 terms
        print 'kid', kid
        structure_factors = np.mean(structure_factors, axis=2)
        data_structure_factor = np.column_stack(
            (ks, np.mean(structure_factors,
                         axis=1), np.std(structure_factors, ddof=1, axis=1)))
        if file_output:
            np.savetxt("structure%s.txt" % filename, data_structure_factor)

    traj.close()
Esempio n. 17
0
# Equilibration
integrator(
    steps=10000,
    actions=[  # Scale velocities every 50 steps.
        VelocityScaler(temperature, 0.1 * temperature, 0, None, 50),
        # Remove global translation every 50 steps.
        TranslationRemover(0, None, 50),
        # Remove global rotation every 50 steps.
        RotationRemover(0, None, 50),
        # Log output to screen every 500 steps.
        StandardLogOutput(500)
    ])

# "Production" run
trajectory = Trajectory(universe, "langevin.nc", "w", "Langevin test")
integrator(
    steps=10000,
    actions=[  # Remove global translation every 50 steps.
        TranslationRemover(0, None, 50),
        # Remove global rotation every 50 steps.
        RotationRemover(0, None, 50),
        # Write every fifth step to the trajectory file.
        TrajectoryOutput(trajectory,
                         ("time", "energy", "thermodynamic", "configuration"),
                         0, None, 10),
        # Log output to screen every 100 steps.
        StandardLogOutput(100)
    ])
trajectory.close()
Esempio n. 18
0
       if opt in ("-e", "--end"):
           end = int(arg)
       if opt in ("-s", "--step"):
           step = int(arg)
       if opt in ("-h", "--help"):
           usage()
           sys.exit(2)
       if opt in ("-f", "--filename"):
           trajectoryPath = arg
       if opt in ("-t", "--temperature"):
           temperature = float(arg)

   print "Using file %s as input..."%trajectoryPath


   trajectory = Trajectory(None, trajectoryPath, 'r')

   if end == -1:
       end = len(trajectory.time)

   timeinfo =  '%d:%d:%d'%(begin, end, step)
   print 'The complete trajectory size is', len(trajectory.time), ' elements'
   print "\nAnalysing trajectory from position %d to postion %d with step %d:\n"%(begin,end,step)
   print 'Temperature = ',temperature

#    print (trajectory.time[0], trajectory.time[-1], trajectory.time[1] - trajectory.time[0])

   parameters = {
                   'trajectory': trajectory,
                   'timeinfo'  : timeinfo,
                   'differentiation': 0,
Esempio n. 19
0
    denrho=denrho,
    denerot=denerot,
    denesq=denesq,
    rotstep=float(Rot_Step),
    rotskipstep=float(Rot_Skip))

integrator(
    steps=3000,
    actions=[TrajectoryOutput(None, ('configuration', 'time'), 0, None, 100)]
)  # relates to the default_options = {'first_step': 0...} section of the main code.

RunSteps = int(numsteps) * Units.fs / dt
SkipSteps = 1.0 * Units.fs / dt

trajectory = Trajectory(
    universe, outdir + "N" + str(nmolecules) + "H20T" + str(temperature) +
    "P" + str(P) + "R" + str(lattice_spacing) + "FilEFVersion" +
    str(numsteps) + "Steps" + label + ".nc", "w", "A simple test case")

Nblocks = 1

############################## BEGIN ROTATION SIMULATION ##############################

# RUN PIMD WITH PIMC ROTATION INCLUDED
print "We're going to run the Langevin integrator for ", RunSteps / SkipSteps, "independent steps of PIMD"
integrator(
    steps=RunSteps,
    # Remove global translation every 50 steps.
    actions=[
        TrajectoryOutput(
            trajectory,
            ("time", "thermodynamic", "energy", "configuration", "auxiliary"),
Esempio n. 20
0
# for all atoms, and then the average over the C-alpha atoms is determined.
# Note that calculating the fluctuations for only the C-alpha atoms is
# more complicated and no faster.
#
# This example illustrates:
# 1) Reading trajectory files
# 2) Selecting parts of a system
# 3) Calculating trajectory averages
#

from MMTK import *
from MMTK.Trajectory import Trajectory
from MMTK.Proteins import Protein

# Open the trajectory, use every tenth step
trajectory = Trajectory(None, 'lysozyme.nc')[::10]
universe = trajectory.universe

# Calculate the average conformation
average = ParticleVector(universe)
for step in trajectory:
    average += step['configuration']
average /= len(trajectory)

# Calculate the fluctiations for all atoms
fluctuations = ParticleScalar(universe)
for step in trajectory:
    d = step['configuration'] - average
    fluctuations += d * d
fluctuations /= len(trajectory)
Esempio n. 21
0
    actions=[
        Heater(50. * Units.K, 300. * Units.K, 0.5 * Units.K / Units.fs, 0,
               None, 1),
        # Remove global translation every 50 steps.
        TranslationRemover(0, None, 50),
        # Remove global rotation every 50 steps.
        RotationRemover(0, None, 50),
        # Log output to screen every 100 steps.
        StandardLogOutput(100)
    ])

print("Time: " + str(time.time() - start)),
file.write("Time: " + str(time.time() - start))

# "Production" run
trajectory = Trajectory(universe, "insulin.nc", "w", "A simple test case")

universe.protein.writeToFile('folded_protein.pdb')
'''integrator(steps=1000,
                      # Remove global translation every 50 steps.
           actions = [TranslationRemover(0, None, 50),
                      # Remove global rotation every 50 steps.
                      RotationRemover(0, None, 50),
                      # Write every second step to the trajectory file.
                      TrajectoryOutput(trajectory, ("time", "energy",
                                                    "thermodynamic",
                                                    "configuration"),
                                       0, None, 2),
                      # Write restart data every fifth step.
                      RestartTrajectoryOutput("restart.nc", 5),
                      # Log output to screen every 10 steps.
Esempio n. 22
0
    rotengmat=roteng,
    rotstep=float(rotstepval))
#integrator = Rot2DOnly_PILangevinNormalModeIntegrator(universe, delta_t=dt, centroid_friction = friction, densmat=rho,rotengmat=roteng, rotstep=float(rotstepval))

#integrator(steps=1000, actions = [ TrajectoryOutput(None,('configuration','time'), 0, None, 100)])
#raise()

RunSteps = 500.0 * Units.ps / dt
print "RunSteps:", RunSteps
SkipSteps = 50.0 * Units.fs / dt
print "SkipSteps:", SkipSteps

#trajectory = Trajectory(universe, outdir+str(nCO2)+"CO2He-P"+str(P)+"-"+"T"+str(temperature)+"-"+label+"-"+testnum+".nc", "w", "A simple test case")
trajectory = Trajectory(
    universe,
    outdir + str(nCO2) + "CO2" + "-" + str(nhelium) + "He-P" + str(P) + "-" +
    "T" + str(temperature) + "-" + label + "-" + testnum + ".nc", "w",
    "A simple test case")
Nblocks = 1

################################################################################################
########################### BEGIN TRANSLATION/ROTATION SIMULATION ##############################
################################################################################################

# RUN PIMD WITH PIMC ROTATION INCLUDED
print "We're going to run the Langevin integrator for ", RunSteps / SkipSteps, "independent steps of PIMD"
integrator(
    steps=RunSteps,
    # Remove global translation every 50 steps.
    actions=[
        TrajectoryOutput(
Esempio n. 23
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        if self.architecture == 'monoprocessor':
            t = self.trajectory
        else:
            # Load the whole trajectory set.
            t = Trajectory(None, self.trajectoryFilename, 'r')

        orderedAtoms = sorted(t.universe.atomList(),
                              key=operator.attrgetter('index'))
        groups = [
            Collection([orderedAtoms[ind] for ind in g]) for g in self.group
        ]

        # 'freqencies' = 1D Numeric array. Frequencies at which the DOS was computed
        frequencies = N.arange(self.nFrames) / (2.0 * self.nFrames * self.dt)

        # The NetCDF output file is opened for writing.
        outputFile = NetCDFFile(self.output, 'w')
        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        # Dictionnary whose keys are of the form Gi where i is the group number
        # and the entries are the list of the index of the atoms building the group.
        comp = 1
        for g in self.group:
            outputFile.jobinfo += 'Group %d: %s\n' % (comp,
                                                      [index for index in g])
            comp += 1

        # Some dimensions are created.
        outputFile.createDimension('NFRAMES', self.nFrames)

        # Creation of the NetCDF output variables.
        # The time.
        TIMES = outputFile.createVariable('time', N.Float, ('NFRAMES', ))
        TIMES[:] = self.times[:]
        TIMES.units = 'ps'

        # The resolution function.
        RESOLUTIONFUNCTION = outputFile.createVariable('resolution_function',
                                                       N.Float, ('NFRAMES', ))
        RESOLUTIONFUNCTION[:] = self.resolutionFunction[:]
        RESOLUTIONFUNCTION.units = 'unitless'

        # Creation of the NetCDF output variables.
        # The frequencies.
        FREQUENCIES = outputFile.createVariable('frequency', N.Float,
                                                ('NFRAMES', ))
        FREQUENCIES[:] = frequencies[:]
        FREQUENCIES.units = 'THz'

        OMEGAS = outputFile.createVariable('angular_frequency', N.Float,
                                           ('NFRAMES', ))
        OMEGAS[:] = 2.0 * N.pi * frequencies[:]
        OMEGAS.units = 'rad ps-1'

        avacfTotal = N.zeros((self.nFrames), typecode=N.Float)
        adosTotal = N.zeros((self.nFrames), typecode=N.Float)

        comp = 1
        totalMass = 0.0
        for g in groups:

            AVACF = outputFile.createVariable('avacf-group%s' % comp, N.Float,
                                              ('NFRAMES', ))
            AVACF[:] = self.AVACF[comp][:]
            AVACF.units = 'rad^2*ps^-2'

            N.add(avacfTotal, self.AVACF[comp], avacfTotal)

            ADOS = outputFile.createVariable('ados-group%s' % comp, N.Float,
                                             ('NFRAMES', ))
            ADOS[:] = self.ADOS[comp][:]
            ADOS.units = 'rad^2*ps^-1'

            N.add(adosTotal, g.mass() * self.ADOS[comp], adosTotal)

            comp += 1
            totalMass += g.mass()

        adosTotal *= 0.5 * self.dt / (self.nGroups * totalMass)

        AVACF = outputFile.createVariable('avacf-total', N.Float,
                                          ('NFRAMES', ))
        AVACF[:] = avacfTotal
        AVACF.units = 'rad^2*ps^-2'

        ADOS = outputFile.createVariable('ados-total', N.Float, ('NFRAMES', ))
        ADOS[:] = adosTotal
        ADOS.units = 'rad^2*ps^-1'

        asciiVar = sorted(outputFile.variables.keys())

        outputFile.close()

        self.toPlot = {
            'netcdf': self.output,
            'xVar': 'angular_frequency',
            'yVar': 'ados-total'
        }

        # Create an ASCII version of the NetCDF output file.
        convertNetCDFToASCII(inputFile = self.output,\
                             outputFile = os.path.splitext(self.output)[0] + '.cdl',\
                             variables = asciiVar)
Esempio n. 24
0
    def finalize(self):
        """Finalizes the calculations (e.g. averaging the total term, output files creations ...).
        """

        if self.architecture == 'monoprocessor':
            t = self.trajectory
        else:
            # Load the whole trajectory set.
            t = Trajectory(None, self.trajectoryFilename, 'r')

        selectedAtoms = Collection()
        orderedAtoms = sorted(t.universe.atomList(),
                              key=operator.attrgetter('index'))
        groups = [[
            selectedAtoms.addObject(orderedAtoms[index])
            for index in atomIndexes
        ] for atomIndexes in self.group]

        # Create trajectory
        outputFile = Trajectory(selectedAtoms, self.output, 'w')

        # Create the snapshot generator
        snapshot = SnapshotGenerator(
            t.universe,
            actions=[
                TrajectoryOutput(outputFile, ["configuration", "time"], 0,
                                 None, 1)
            ])

        # The output is written
        for comp in range(self.nFrames):

            frameIndex = self.frameIndexes[comp]
            t.universe.setFromTrajectory(t, frameIndex)

            for atom in selectedAtoms:
                atom.setPosition(self.RBT['trajectory'][atom.index][comp, :])
            snapshot(data={'time': self.times[comp]})

        outputFile.close()

        outputFile = NetCDFFile(self.output, 'a')

        outputFile.title = self.__class__.__name__
        outputFile.jobinfo = self.information + '\nOutput file written on: %s\n\n' % asctime(
        )

        outputFile.jobinfo += 'Input trajectory: %s\n\n' % self.trajectoryFilename

        outputFile.createDimension('NFRAMES', self.nFrames)
        outputFile.createDimension('NGROUPS', self.nGroups)
        outputFile.createDimension('QUATERNIONLENGTH', 4)

        # The NetCDF variable that stores the quaternions.
        QUATERNIONS = outputFile.createVariable(
            'quaternion', N.Float, ('NGROUPS', 'NFRAMES', 'QUATERNIONLENGTH'))

        # The NetCDF variable that stores the centers of mass.
        COM = outputFile.createVariable('com', N.Float,
                                        ('NGROUPS', 'NFRAMES', 'xyz'))

        # The NetCDF variable that stores the rigid-body fit.
        FIT = outputFile.createVariable('fit', N.Float, ('NGROUPS', 'NFRAMES'))

        # Loop over the groups.
        for comp in range(self.nGroups):

            aIndexes = self.group[comp]

            outputFile.jobinfo += 'Group %s: %s\n' % (
                comp + 1, [index for index in aIndexes])

            QUATERNIONS[comp, :, :] = self.RBT[comp]['quaternions'][:, :]
            COM[comp, :, :] = self.RBT[comp]['com'][:, :]
            FIT[comp, :] = self.RBT[comp]['fit'][:]

        outputFile.close()

        self.toPlot = None
Esempio n. 25
0
dt = 1.0*Units.fs

# Initialize velocities
universe.initializeVelocitiesToTemperature(temperature)

# USE THE FRICTION PARAMETER FROM BEFORE
friction = 0.0
integrator = RotOnlyWolff_PILangevinNormalModeIntegrator(universe, delta_t=dt, centroid_friction = friction, densmat=rho,rotengmat=roteng, rotstep=float(Rot_Step), rotskipstep=int(Rot_Skip))

integrator(steps=5000, actions = [ TrajectoryOutput(None,('configuration','time'), 0, None, 100)] )

RunSteps = 50.0*Units.ps/dt
SkipSteps = 50.0*Units.fs/dt

trajectory = Trajectory(universe, outdir+str(nmolecules)+"HF-P"+str(P)+"_"+label+".nc", "w", "A simple test case")
Nblocks=1

############################## BEGIN ROTATION SIMULATION ##############################

# RUN PIMD WITH PIMC ROTATION INCLUDED
print "We're going to run the Langevin integrator for ", RunSteps/SkipSteps, "independent steps of PIMD"
integrator(steps=RunSteps,
           # Remove global translation every 50 steps.
	   actions = [
		   TrajectoryOutput(trajectory, ("time", "thermodynamic", "energy",
						 "configuration", "auxiliary"),
                                    0, None, SkipSteps)])

npoints = len(trajectory)
universe = trajectory.universe
Esempio n. 26
0
def inputFileRead(filename):
    """
    read and process an input file
    """
    keywords = [
        'trajectory', 'output_files', 'title', 'time_info', 'time_steps',
        'frequency_points', 'q_vector_set', 'deuter', 'projection_vector',
        'reference', 'rotation_coefficients', 'ft_window', 'groups', 'weights',
        'atoms', 'units_length', 'units_frequency', 'log_file', 'groups_code',
        'atoms_code', 'filter_window', 'results_file', 'atoms_pdb',
        'differentiation', 'verbose', 'symbols', 'ar_order', 'ar_precision'
    ]
    newvars = {}
    file_text = Utility.readURL(filename)
    exec file_text in vars(sys.modules['__builtin__']), newvars
    input = Quidam()
    for key in keywords:
        if newvars.has_key(key): setattr(input, key, newvars[key])
        else: setattr(input, key, None)

    import os
    print os.getcwd()
    print input.trajectory
    #
    # general settings
    #
    if input.trajectory:
        if len(input.trajectory) == 1:
            traj = Trajectory(None, input.trajectory[0], 'r')
            if traj.variables().count('quaternion') > 0:
                traj = qTrajectory(None, input.trajectory[0], 'r')
        elif len(input.trajectory) > 1:
            traj = TrajectorySet(None, input.trajectory)

        if not input.units_length: input.units_length = Units.nm
        if not input.units_frequency:
            input.units_frequency = Units.tera * Units.Hz

        types = getTypes(traj.universe)
        if input.q_vector_set is None:
            input.q_vector_set = (N.arange(0., 100., 2.), 1., 50)  # default
        if input.time_info is None: input.time_info = (0, len(traj), 1)
        qVectors = qVectorGenerator(input.q_vector_set, traj)
        #
        # Substitute Hydrogen atoms with Deuter?
        #
        if input.deuter:
            collection = Collection()
            for i in input.deuter.keys():
                for ia in input.deuter[i]:
                    gj = getChemicalObjects({i: types[i]}, {i: ia})
                    collection.addObject(gj)
            h2d = collection.atomList()
            print 'number of Deuter atoms: ', len(h2d)
        #
        # Atom selection related keywords
        # atoms selected in a different way are stored together
        # and filtered at the end (if there're repetitions only
        # atoms which occur many times are stored and passed to
        # further calculations, else all atoms are passed)
        #
        if input.atoms:
            print 'processing atom selection:\n\t', input.atoms
            collection = []
            for i in input.atoms.keys():
                for ia in input.atoms[i]:
                    typs = {}
                    typs[i] = types[i]
                    vlst = {}
                    vlst[i] = ia
                    gj = getChemicalObjects(typs, vlst)
                    collection = collection + gj.atomList()
            input.atoms = collection
            print '\t...done\n\tstored ', len(input.atoms), ' atoms\n'
        if input.atoms_pdb:
            print 'processing atom selection from a PDB file\n\t(',
            print input.atoms_pdb, '):'
            atoms_add = parsePDBAtomSelection(input.atoms_pdb, traj)
            print '\t...done\n\tstored ', len(atoms_add.atomList()), ' atoms\n'
            if input.atoms: input.atoms = input.atoms + atoms_add.atomList()
            else: input.atoms = atoms_add.atomList()
        if input.atoms_code:
            print 'processing atom selection hardcoded in Python'
            # syntax:
            # def atoms_code(traj,nothing,dummy_a='gj')
            #     # a python code here
            #     return Collection(atom_list)
            #
            # atoms_code is a function object whose first argument is
            # a Trajectory object and which returns a Collection object.
            print '...done\n\tstored ', len(input.atoms_code(traj).atomList()),
            print ' atoms'
            if input.atoms:
                input.atoms = input.atoms + input.atoms_code(traj).atomList()
            else:
                input.atoms = input.atoms_code(traj).atomList()
        if not input.atoms:
            print ' No atom selection found, taking everything... just in case'
            input.atoms = traj.universe
        else:
            input.atoms = Collection(input.atoms)
        input.atoms = ghostBusters(input.atoms)
        print ' # atoms in selection: ', len(input.atoms.atomList())
        #
        # Group selection
        #
        if input.groups:
            input.groups, input.reference = parseGroupSelection(
                types, input.groups, input.reference, verbose=input.verbose)
        if input.groups_code:
            # previous def (if any) overwritten
            # the result returned by groups_code should be consistent
            # with that one from misc.paresGroupSelection
            input.groups, input.reference = input.groups_code(traj)
        #
        # Another piece of general settings
        #
        if input.weights == 'mass':
            weightsList = MassList(traj.universe, input.atoms)
        elif input.weights == 'incoherent':
            if input.deuter:
                weightsList = BincohList(traj.universe, input.atoms, h2d)
            else:
                weightsList = BincohList(traj.universe, input.atoms)
        elif input.weights == 'coherent':
            if input.deuter:
                weightsList = BcohList(traj.universe, input.atoms, h2d)
            else:
                weightsList = BcohList(traj.universe, input.atoms)
        else:
            weightsList = None

        # input.trajectory = (traj, input.trajectory)
        input.trajectory = traj
        input.q_vector_set = qVectors
        input.weights = weightsList

    return input
from MMTK.NormalModes import VibrationalModes
from MMTK.Trajectory import Trajectory, TrajectoryOutput, \
                            RestartTrajectoryOutput, StandardLogOutput, \
                            trajectoryInfo
from sys import argv, exit
from Scientific.Statistics import mean, standardDeviation
#from nMOLDYN.Mathematics.Analysis import correlation
from Scientific import N
from Scientific.Geometry import Vector
from numpy import *
from numpy.linalg import *

traj = argv[1]
nCO2 = 1
label = "norotskip-" + argv[2]
trajectory = Trajectory(None, traj)
universe = trajectory.universe
natoms = universe.numberOfAtoms()

np = universe.numberOfPoints()
P = np / natoms

stepcount = 0

rval = zeros(len(trajectory) * P, float)
cval = zeros(len(trajectory) * P, float)
#vfile=open("final-pot-"+str(P)+"-"+label,"w")
#rfile=open("hist-r-"+str(P)+"-"+label,"w")
#ctfile=open("hist-cost-"+str(P)+"-"+label,"w")

rfile = open("data-r-" + str(P) + "-" + label, "w")
Esempio n. 28
0
from MMTK import *
from MMTK.Proteins import Protein
from MMTK.Trajectory import Trajectory, SnapshotGenerator, TrajectoryOutput
from Scientific import N

# Construct system: lysozyme in vaccuum
universe = InfiniteUniverse()
universe.protein = Protein('~/hao/proteins/PDB/193l.pdb')

# Select residues to rotate
# (this particular choice here is completely arbitrary)
residues = [universe.protein[0][i] for i in [11, 35, 68, 110]]

# Create trajectory
trajectory = Trajectory(universe, "rotamers.nc", "w", "Sidechain rotations")

# Create the snapshot generator
snapshot = SnapshotGenerator(
    universe, actions=[TrajectoryOutput(trajectory, ["all"], 0, None, 1)])

# Perform sidechain rotations and write the configurations
snapshot()
for residue in residues:
    print(f"{residue}")
    chi = residue.chiAngle()
    for angle in N.arange(-N.pi, N.pi, N.pi / 10.):
        chi.setValue(angle)
        print(f"{angle}")
        snapshot()
Esempio n. 29
0
	atom.setNumberOfBeads(P)

natoms = len(universe.atomList())

universe.setForceField(mbpolForceField(universe))

#This is the conversion factor to Units of K
Kper1overcm=11604.505/8065.54445
conv=Kper1overcm/1.196e-2
#print 1./(Units.k_B*0.37*Units.K)

#print traj
#rotskipval=float(argv[2])
mbpol_test=True
if mbpol_test:
    trajectory = Trajectory(universe, traj)
else:
    trajectory = Trajectory(None, traj)
print 'test'
npoints = len(trajectory)
universe = trajectory.universe
natoms = universe.numberOfAtoms()
time = trajectory.time
np = universe.numberOfPoints()
P = np/natoms

#if (rotskipval < 100.0):
#    rotskipratio=1.0
#else:
#    rotskipratio=100.0/rotskipval
Esempio n. 30
0
# Scale down the system in small steps
while current_size > real_size:

    scale_factor = max(0.95, real_size / current_size)
    for object in world:
        object.translateTo(scale_factor * object.position())
    current_size = scale_factor * current_size
    world.setSize(current_size)

    print(f'Current size: {current_size}')
    stdout.flush()

    minimizer(steps=100)
    integrator(steps=200)

    save(world, 'water' + ` n_molecules ` + '.intermediate.setup')

# Final equilibration
trajectory = Trajectory(world, 'water.nc', 'w', 'Final equilibration')
integrator(
    steps=1000,
    actions=[
        TrajectoryOutput(trajectory,
                         ("time", "energy", "thermodynamic", "configuration"),
                         0, None, 10)
    ])
trajectory.close()

# Save final system
save(world, 'water' + ` n_molecules ` + '.setup')