コード例 #1
0
ファイル: zgf_refine.py プロジェクト: iwasherefirst2/ZIBMolPy
def main(argv=None):
	if(argv==None):
		argv = sys.argv
	options = options_desc.parse_args(argv)[0]

	assert(not(options.refine_all and options.extend_all)) 
	
	pool = Pool()
	needy_nodes = pool.where("isa_partition and is_sampled").multilock()
	
	# 1. Trying to detect fake convergence
	for n in pool.where("state == 'converged'"):
		means = kmeans(n.trajectory, k=2)
		d = (means[0] - means[1]).norm2()
		if(d > 2.0 and (options.refine_all or userinput("%s has converged but appears to have a bimodal distribution.\nDo you want to refine?"%n.name, "bool"))): #TODO decide upon threshold (per coordinate?)
			refine(n, options)
	
	# 2. Dealing with not-converged nodes
	for n in pool.where("state == 'not-converged'"):
		if(not(options.refine_all or options.extend_all)):
			choice = userchoice("%s has not converged. What do you want to do?"%n.name, ['_refine', '_extend', '_ignore'])
		if(options.refine_all or choice=="r"):
			refine(n, options)
		elif(options.extend_all or choice=="e"):
			extend(n)
		elif(choice=="i"):
			continue
	
	for n in needy_nodes:
		n.save()
		n.unlock()
			
	zgf_setup_nodes.main()
	zgf_grompp.main()
	zgf_cleanup.main()	
コード例 #2
0
def main(argv=None):
    if argv == None:
        argv = sys.argv
        options = options_desc.parse_args(argv)[0]

    pool = Pool()

    found_parents = [n for n in pool if n.name == options.parent_node]
    assert len(found_parents) == 1
    parent = found_parents[0]

    chosen_idx = np.linspace(start=0, stop=parent.trajectory.n_frames - 1, num=options.numnodes).astype(int)

    print "choosen_idx: ", chosen_idx

    for i in chosen_idx:
        n = Node()
        n.parent_frame_num = i
        n.parent = parent
        n.state = "created"
        n.extensions_counter = 0
        n.extensions_max = 0
        n.extensions_length = 0
        n.sampling_length = parent.sampling_length * 3
        n.internals = parent.trajectory.getframe(i)
        pool.append(n)
        n.save()
コード例 #3
0
def main(argv=None):
    if (argv == None):
        argv = sys.argv
        options = options_desc.parse_args(argv)[0]

    pool = Pool()

    found_parents = [n for n in pool if n.name == options.parent_node]
    assert (len(found_parents) == 1)
    parent = found_parents[0]

    chosen_idx = np.linspace(start=0,
                             stop=parent.trajectory.n_frames - 1,
                             num=options.numnodes).astype(int)

    print "choosen_idx: ", chosen_idx

    for i in chosen_idx:
        n = Node()
        n.parent_frame_num = i
        n.parent = parent
        n.state = "created"
        n.extensions_counter = 0
        n.extensions_max = 0
        n.extensions_length = 0
        n.sampling_length = parent.sampling_length * 3
        n.internals = parent.trajectory.getframe(i)
        pool.append(n)
        n.save()
コード例 #4
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	pool = Pool()
	
	choice = "state in ('converged', 'refined')"
	if(options.ignore_convergence):
		choice = "state in ('converged', 'not-converged', 'refined')"	

	needy_nodes = NodeList([n for n in pool.where(choice) if not n == pool.root]) # we won't touch the root

	if not(len(needy_nodes)):
		sys.exit("Nothing to do.")

	if not(userinput("Once the solvent has been removed, further refinement of the pool is not possible. This includes the generation of unrestrained transition nodes! Continue?", "bool")):
		sys.exit("Quit by user.")
		
	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes

	try:
		for n in needy_nodes:	
			discard_solvent(n, "pdb")
			discard_solvent(n, "trr")

		for n in needy_nodes:
			n.unlock()
	except:
		traceback.print_exc()
コード例 #5
0
def main():

	options = options_desc.parse_args(sys.argv)[0]
	pool = Pool()

	needy_nodes = pool.where("state == 'merge-able'").multilock()

	if(len(needy_nodes) == 0):
		return
	
	# find out about trr time step
	dt = 0	
	nodeDir = needy_nodes[0].dir.split('/')[-1]
	for fn in os.listdir(needy_nodes[0].dir):
		if re.match("^"+nodeDir+".+run\d+\.trr", fn):
			trr = TrrFile(needy_nodes[0].dir+"/"+fn)			
			dt = trr.first_frame.next().t - trr.first_frame.t
			trr.close()
			break

	# dt is sometimes noisy in the final digits (three digits is femtosecond step = enough)
	dt = np.around(dt, decimals=3)
	for n in needy_nodes:

		if(options.trr):
			# merge sampling trajectories
			trr_fns = sorted([ fn for fn in os.listdir(n.dir) if re.match("[^#].+run\d+.trr", fn) ])
			cmd = ["trjcat", "-f"]
			cmd += trr_fns
			cmd += ["-o", "../../"+n.trr_fn, "-cat"]
			print("Calling: %s"%" ".join(cmd))
			check_call(cmd, cwd=n.dir)

		if(options.edr):
			# merge edr files
			# get list of edr-files
			edr_fnames = sorted([n.dir+"/"+fn for fn in os.listdir(n.dir) if re.match("[^#].+run\d+.edr", fn)])
			assert( len(edr_fnames) ==  n.extensions_counter+1 )
			assert( len(edr_fnames) ==  n.extensions_max+1 )

			time_offset = n.sampling_length+dt

			for edr_fn in edr_fnames[1:]:	
				# adapt edr starting times
				cmd = ["eneconv", "-f", edr_fn, "-o", edr_fn, "-settime"]
				print("Calling: "+(" ".join(cmd)))
				p = Popen(cmd, stdin=PIPE)
				p.communicate(input=(str(time_offset)+"\n"))
				assert(p.wait() == 0)

				time_offset += n.extensions_length+dt

			# concatenate edr files with adapted starting times
			cmd = ["eneconv", "-f"] + edr_fnames + ["-o", n.dir+"/ener.edr"]
			print("Calling: "+(" ".join(cmd)))
			p = Popen(cmd)
			retcode = p.wait()
			assert(retcode == 0)

	needy_nodes.unlock()
コード例 #6
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    pool = Pool()

    choice = "state in ('converged', 'refined')"
    if (options.ignore_convergence):
        choice = "state in ('converged', 'not-converged', 'refined')"

    needy_nodes = NodeList([
        n for n in pool.where(choice) if not n == pool.root
    ])  # we won't touch the root

    if not (len(needy_nodes)):
        sys.exit("Nothing to do.")

    if not (userinput(
            "Once the solvent has been removed, further refinement of the pool is not possible. This includes the generation of unrestrained transition nodes! Continue?",
            "bool")):
        sys.exit("Quit by user.")

    assert (len(needy_nodes) == len(needy_nodes.multilock())
            )  # make sure we lock ALL nodes

    try:
        for n in needy_nodes:
            discard_solvent(n, "pdb")
            discard_solvent(n, "trr")

        for n in needy_nodes:
            n.unlock()
    except:
        traceback.print_exc()
コード例 #7
0
ファイル: zgf_grompp.py プロジェクト: iwasherefirst2/ZIBMolPy
def main():
	pool = Pool()

	if(len(pool.where("state == 'grompp-able'")) > 0):
		call_grompp(pool)
		
	if(len(pool.where("state == 'em-grompp-able'")) > 0):
		assert(path.exists("em.mdp")) #TODO that's not super nice yet
		call_grompp(pool, mode='em')
コード例 #8
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    zgf_cleanup.main()

    pool = Pool()

    not_reweightable = "isa_partition and state not in ('converged'"
    if (options.ignore_convergence):
        not_reweightable += ",'not-converged'"
    if (options.ignore_failed):
        not_reweightable += ",'mdrun-failed'"
    not_reweightable += ")"

    if pool.where(not_reweightable):
        print "Pool can not be reweighted due to the following nodes:"
        for bad_guy in pool.where(not_reweightable):
            print "Node %s with state %s." % (bad_guy.name, bad_guy.state)
        sys.exit("Aborting.")

    active_nodes = pool.where("isa_partition and state != 'mdrun-failed'")
    assert (len(active_nodes) == len(active_nodes.multilock())
            )  # make sure we lock ALL nodes

    if (options.check_restraint):
        for n in active_nodes:
            check_restraint_energy(n)

    if (options.method == "direct"):
        reweight_direct(active_nodes, options)
    elif (options.method == "entropy"):
        reweight_entropy(active_nodes, options)
    elif (options.method == "presampling"):
        reweight_presampling(active_nodes, options)
    else:
        raise (Exception("Method unkown: " + options.method))

    weight_sum = np.sum([n.tmp['weight'] for n in active_nodes])

    print "Thermodynamic weights calculated by method '%s':" % options.method
    for n in active_nodes:
        n.obs.weight_direct = n.tmp['weight'] / weight_sum
        if (options.method == "direct"):
            print(
                "  %s with mean_V: %f [kJ/mol], %d refpoints and weight: %f" %
                (n.name, n.obs.mean_V, n.tmp['n_refpoints'],
                 n.obs.weight_direct))
        else:
            print("  %s with A: %f [kJ/mol] and weight: %f" %
                  (n.name, n.obs.A, n.obs.weight_direct))
    print "The above weighting uses bonded energies='%s' and nonbonded energies='%s'." % (
        options.e_bonded, options.e_nonbonded)

    for n in active_nodes:
        n.save()

    active_nodes.unlock()
コード例 #9
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	zgf_cleanup.main()
	
	pool = Pool()

	#not_reweightable = "state not in ('refined','converged')"
	not_reweightable = "isa_partition and state!='converged'"
	if options.ignore_convergence:
		not_reweightable = "isa_partition and state not in ('converged','not-converged')"

	if pool.where(not_reweightable):
		print "Pool can not be reweighted due to the following nodes:"		
		for bad_guy in pool.where(not_reweightable):
			print "Node %s with state %s."%(bad_guy.name, bad_guy.state)
		sys.exit("Aborting.")
		
	active_nodes = pool.where("isa_partition")
	assert(len(active_nodes) == len(active_nodes.multilock())) # make sure we lock ALL nodes

	for n in active_nodes:
		check_restraint_energy(n)

	# find out about number of energygrps
	mdp_file = gromacs.read_mdp_file(pool.mdp_fn)
	energygrps = [str(egrp) for egrp in re.findall('[\S]+', mdp_file["energygrps"])]
	moi_energies = True	
	if len(energygrps) < 2:
		moi_energies = False # Gromacs energies are named differently when there are less than two energygrps :(

	if(options.method == "direct"): 
		reweight_direct(active_nodes, moi_energies, options.sol_energy, options.save_refpoints)
	elif(options.method == "entropy"):
		reweight_entropy(active_nodes, moi_energies, options.sol_energy, options.save_refpoints)
	elif(options.method == "presampling"):
		reweight_presampling(active_nodes, options.presamp_temp, moi_energies, options.sol_energy)
	else:
		raise(Exception("Method unkown: "+options.method))
	
	weight_sum = np.sum([n.tmp['weight'] for n in active_nodes])
	
	print "Thermodynamic weights calculated by method '%s' (sol-energy=%s):"%(options.method, options.sol_energy)
	for n in active_nodes:
		n.obs.weight_direct = n.tmp['weight'] / weight_sum
		if(options.method == "direct"):
			print("  %s with mean_V: %f [kJ/mol], %d refpoints and weight: %f" % (n.name, n.obs.mean_V, n.tmp['n_refpoints'], n.obs.weight_direct))
		else:
			print("  %s with A: %f [kJ/mol] and weight: %f" % (n.name, n.obs.A, n.obs.weight_direct))

	for n in active_nodes:
		n.save()

	active_nodes.unlock()
コード例 #10
0
ファイル: zgf_grompp.py プロジェクト: pombredanne/ZIBMolPy
def main():
    pool = Pool()
    needy_nodes = pool.where("state == 'grompp-able'").multilock()

    try:
        for n in needy_nodes:
            call_grompp(n)
    except:
        traceback.print_exc()

    for n in needy_nodes:
        n.unlock()
コード例 #11
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    pool = Pool()
    needy_nodes = pool.where("state == 'grompp-able'")
    assert (len(needy_nodes) == len(needy_nodes.multilock())
            )  # make sure we lock ALL nodes

    if (options.solv_model == "tip3p"):
        solv_box = "spc216.gro"
        solv_fn = "tip3p.itp"
    elif (options.solv_model == "tip4p"):
        solv_box = "tip4p.gro"
        solv_fn = "tip4p.itp"
    elif (options.solv_model == "tip4pew"):
        solv_box = "tip4p.gro"
        solv_fn = "tip4pew.itp"
    elif (options.solv_model == "tip5"):
        solv_box = "tip5p.gro"
        solv_fn = "tip5p.itp"
    elif (options.solv_model == "spc"):
        solv_box = "spc216.gro"
        solv_fn = "spc.itp"
    elif (options.solv_model == "spce"):
        solv_box = "spc216.gro"
        solv_fn = "spce.itp"
    elif (
            options.solv_model == "acetonitrile"
    ):  # TODO one might change this one to "custom" and let user enter name of template box
        solv_box = "acetonitrile.pdb"
        msg = "Topology update for acetonitrile is not supported. Proceed?"
        if not (userinput(msg, "bool")):
            for n in needy_nodes:
                n.unlock()
            return ("Quit by user.")

    # determine maximum length of linears, if any
    max_linear = query_linear_length(pool)

    # make box and fill with solvent
    genbox(pool, max_linear, options.bt,
           (options.box_x, options.box_y, options.box_z), solv_box)

    # update topology files (add solvent model and ions includes)
    if not (options.solv_model == "acetonitrile"):
        update_tops(pool, solv_fn)

    for n in needy_nodes:
        n.state = "em-grompp-able"
        zgf_grompp.call_grompp(
            n, mdp_file=options.grompp, final_state="em-mdrun-able"
        )  # re-grompp to get a tpr for energy minimization
        n.unlock()
コード例 #12
0
ファイル: zgf_grompp.py プロジェクト: CMD-at-ZIB/ZIBMolPy
def main():
	pool = Pool()
	needy_nodes = pool.where("state == 'grompp-able'").multilock()
		
	try:
		for n in needy_nodes:
			call_grompp(n)			
	except:
		traceback.print_exc()

	for n in needy_nodes:
		n.unlock()
コード例 #13
0
ファイル: zgf_reweight.py プロジェクト: CMD-at-ZIB/ZIBMolPy
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	zgf_cleanup.main()
	
	pool = Pool()

	not_reweightable = "isa_partition and state not in ('converged'"
	if(options.ignore_convergence):
		not_reweightable += ",'not-converged'"
	if(options.ignore_failed):
		not_reweightable += ",'mdrun-failed'"
	not_reweightable += ")"

	if pool.where(not_reweightable):
		print "Pool can not be reweighted due to the following nodes:"		
		for bad_guy in pool.where(not_reweightable):
			print "Node %s with state %s."%(bad_guy.name, bad_guy.state)
		sys.exit("Aborting.")

	active_nodes = pool.where("isa_partition and state != 'mdrun-failed'")
	assert(len(active_nodes) == len(active_nodes.multilock())) # make sure we lock ALL nodes

	if(options.check_restraint):
		for n in active_nodes:
			check_restraint_energy(n)

	if(options.method == "direct"):
		reweight_direct(active_nodes, options)
	elif(options.method == "entropy"):
		reweight_entropy(active_nodes, options)
	elif(options.method == "presampling"):
		reweight_presampling(active_nodes, options)
	else:
		raise(Exception("Method unkown: "+options.method))
	
	weight_sum = np.sum([n.tmp['weight'] for n in active_nodes])
	
	print "Thermodynamic weights calculated by method '%s':"%options.method
	for n in active_nodes:
		n.obs.weight_direct = n.tmp['weight'] / weight_sum
		if(options.method == "direct"):
			print("  %s with mean_V: %f [kJ/mol], %d refpoints and weight: %f" % (n.name, n.obs.mean_V, n.tmp['n_refpoints'], n.obs.weight_direct))
		else:
			print("  %s with A: %f [kJ/mol] and weight: %f" % (n.name, n.obs.A, n.obs.weight_direct))
	print "The above weighting uses bonded energies='%s' and nonbonded energies='%s'."%(options.e_bonded, options.e_nonbonded)

	for n in active_nodes:
		n.save()

	active_nodes.unlock()
コード例 #14
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	#TODO put somehow into Options, e.g. min_value=1 or required=True
	if(not options.doomed_nodes):
		sys.exit("Option --doomed_nodes is required.")
		
	pool = Pool()
	old_pool_size = len(pool)
	old_alpha = pool.alpha

	doomed_nodes = NodeList()
	
	#TODO: maybe this code should go into ZIBMolPy.ui 
	for name in options.doomed_nodes.split(","):
		found = [n for n in pool if n.name == name]
		if(len(found) != 1):
			sys.exit("Coult not find node '%s'"%(name))
		doomed_nodes.append(found[0])
	
	for n in doomed_nodes:
		if(n == pool.root):
			sys.exit("Node %s is the root. Removal not allowed."%(n.name))		
		#if(len(n.children) > 0):
		#	sys.exit("Node %s has children. Removal not allowed."%(n.name)) #TODO why should we forbid this?

	if not(userinput("The selected node(s) will be removed permanently. Continue?", "bool")):
		sys.exit("Quit by user.")

	assert(len(doomed_nodes) == len(doomed_nodes.multilock()))
	for n in doomed_nodes:
		print("Removing directory: "+n.dir)
		shutil.rmtree(n.dir)

	pool.reload_nodes()
	
	#TODO: this code-block also exists in zgf_create_node
	if(len(pool.where("isa_partition")) < 2):
		pool.alpha = None
	elif(options.methodalphas == "theta"):
		pool.alpha = zgf_create_nodes.calc_alpha_theta(pool)
	elif(options.methodalphas == "user"):
		pool.alpha = userinput("Please enter a value for alpha", "float")
	else:
		raise(Exception("Method unkown: "+options.methodalphas))

	pool.history.append({'removed_nodes': [(n.name, n.state) for n in doomed_nodes], 'size':old_pool_size, 'alpha':old_alpha, 'timestamp':datetime.now()})
	pool.save()

	#TODO: deal with analysis dir and dependencies
	zgf_cleanup.main()	
コード例 #15
0
def main():
	
	pool = Pool()
	needy_nodes = pool.where("state == 'created'")
	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes 
	
	extract_frames(pool)
	generate_topology(pool)
	generate_mdp(pool)
	
	for n in needy_nodes:
		n.state = "grompp-able"
		n.save()
		n.unlock()
コード例 #16
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	zgf_cleanup.main()

	pool = Pool()

	needy_nodes = pool.where("state == '%s'"%options.current_state).multilock()

	for n in needy_nodes:
		print "Recovering node %s with state %s to state %s ..."%(n.name, n.state, options.recover_state)
		n.state = options.recover_state
		n.save()		
		n.unlock()
コード例 #17
0
def main():

    pool = Pool()
    needy_nodes = pool.where("state == 'created'")
    assert (len(needy_nodes) == len(needy_nodes.multilock())
            )  # make sure we lock ALL nodes

    extract_frames(pool)
    generate_topology(pool)
    generate_mdp(pool)

    for n in needy_nodes:
        n.state = "grompp-able"
        n.save()
        n.unlock()
コード例 #18
0
ファイル: zgf_cleanup.py プロジェクト: pombredanne/ZIBMolPy
def main():
    print("Cleaning up.")

    #check locks
    pool = Pool()
    for n in pool:
        sys.stdout.write("Node %s is %s and " % (n.name, n.state))
        if (not n.is_locked):
            print("not locked.")
        elif (n.is_lock_valid):
            print("is locked and valid.")
        else:
            print("its lock is stale - removing it.")
            os.remove(n.lock_fn)

    #check zgf-dep files
    files = []
    for root, _, names in os.walk(os.getcwd()):
        for n in names:
            if (not n.endswith(".zgf-dep")):
                continue
            dep = path.join(root, n)
            fn = dep2fn(dep)
            if (not path.exists(fn)):
                print("Removing lonely dep-file: " + dep)
                os.remove(dep)
                continue
            files.append(
                fn
            )  #in extra loop - otherwise check_files and remove(dep) collide

    for fn in files:
        check_file(fn)
コード例 #19
0
ファイル: zgf_genion.py プロジェクト: CMD-at-ZIB/ZIBMolPy
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	pool = Pool()
	needy_nodes = pool.where("state == 'em-mdrun-able'")
	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes


	# add ions to simulation boxes
	call_genion(pool, options.np, options.pname, options.nn, options.nname, options.random_seed)

	
	for n in needy_nodes:
		n.state = "em-grompp-able"
		zgf_grompp.call_grompp(n, mdp_file=options.grompp, final_state="em-mdrun-able") # re-grompp to get a tpr for energy minimization
		n.unlock()
コード例 #20
0
ファイル: node.py プロジェクト: VedatDurmaz/ZIBMolPy
    def __new__(cls, name=None):
        """ Instanciates a node from a file, and (name=None) creates a new node
		
			Caution: Filenames are always given relative to the root-dir
			When no name is given, a new node is created. """

        if (name != None and cls._instances.has_key(name)):
            return (cls._instances[name])

        if (name == None):  # a new node, lets find a name
            for i in itertools.count(0):
                name = "node%.4d" % i
                if (cls._instances.has_key(name)):
                    continue  # new nodes might not been saved, yet
                if (path.exists("./nodes/" + name)): continue
                break

        self = object.__new__(cls)
        cls._instances[name] = self

        #actuall init-code
        from ZIBMolPy.pool import Pool  #avoids circular imports
        self._pool = Pool()  #Pool is a singleton
        self._name = name
        self._tmp = Store()  #for thing that need to be stored temporarly
        self._obs = Store()
        self.parent = None

        if (path.exists(self.dir)):
            self.reload()

        #self.pool.append(self) #register with pool
        return (self)
コード例 #21
0
def main():
	options = options_desc.parse_args(sys.argv)[0]

	outfile = open(options.outfile,"w")
	
	pool = Pool()
	needy_nodes = pool.where("isa_partition and state not in ('refined','mdrun-failed')")
	
	for n in needy_nodes:
		outfile.write("%s, state: '%s':\n"%(n.name,n.state))
		outfile.write(str(n.internals.array)+"\n")
		outfile.write("mean pot.: %f, std pot.: %f, free energy estimate: %f\n"%(n.obs.mean_V,n.obs.std_V,n.obs.A))
		outfile.write("#========================================================================#\n")

	outfile.close()
	print "Pool info was written to %s."%options.outfile
コード例 #22
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    zgf_cleanup.main()

    pool = Pool()

    needy_nodes = pool.where("state == '%s'" %
                             options.current_state).multilock()

    for n in needy_nodes:
        print "Recovering node %s with state %s to state %s ..." % (
            n.name, n.state, options.recover_state)
        n.state = options.recover_state
        n.save()
        n.unlock()
コード例 #23
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	pool = Pool()
	needy_nodes = pool.where("state == 'grompp-able'")
	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes

	if(options.solv_model == "tip3p"):
		solv_box = "spc216.gro"
		solv_fn = "tip3p.itp"
	elif(options.solv_model == "tip4p"):
		solv_box = "tip4p.gro"
		solv_fn = "tip4p.itp"
	elif(options.solv_model == "tip4pew"):
		solv_box = "tip4p.gro"
		solv_fn = "tip4pew.itp"
	elif(options.solv_model == "tip5"):
		solv_box = "tip5p.gro"
		solv_fn = "tip5p.itp"
	elif(options.solv_model == "spc"):
		solv_box = "spc216.gro"
		solv_fn = "spc.itp"
	elif(options.solv_model == "spce"):
		solv_box = "spc216.gro"
		solv_fn = "spce.itp"
	elif(options.solv_model == "acetonitrile"): # TODO one might change this one to "custom" and let user enter name of template box
		solv_box = "acetonitrile.pdb"
		msg = "Topology update for acetonitrile is not supported. Proceed?"
		if not(userinput(msg, "bool")):
			for n in needy_nodes:
				n.unlock()
			return("Quit by user.")
	
	# determine maximum length of linears, if any
	max_linear = query_linear_length(pool)

	# make box and fill with solvent
	genbox(pool, max_linear, options.bt, (options.box_x, options.box_y, options.box_z), solv_box)

	# update topology files (add solvent model and ions includes)
	if not(options.solv_model == "acetonitrile"):
		update_tops(pool, solv_fn)

	for n in needy_nodes:
		n.state = "em-grompp-able"
		zgf_grompp.call_grompp(n, mdp_file=options.grompp, final_state="em-mdrun-able") # re-grompp to get a tpr for energy minimization
		n.unlock()
コード例 #24
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	#TODO put somehow into Options, e.g. min_value=1 or required=True
	if(not options.doomed_nodes):
		sys.exit("Option --doomed_nodes is required.")
		
	pool = Pool()
	old_pool_size = len(pool)
	old_alpha = pool.alpha

	doomed_nodes = NodeList()
	
	#TODO: maybe this code should go into ZIBMolPy.ui 
	for name in options.doomed_nodes.split(","):
		found = [n for n in pool if n.name == name]
		if(len(found) != 1):
			sys.exit("Coult not find node '%s'"%(name))
		doomed_nodes.append(found[0])
	
	for n in doomed_nodes:
		if(n == pool.root):
			sys.exit("Node %s is the root. Removal not allowed."%(n.name))		
		#if(len(n.children) > 0):
		#	sys.exit("Node %s has children. Removal not allowed."%(n.name)) #TODO why should we forbid this?

	if not(userinput("The selected node(s) will be removed permanently. Continue?", "bool")):
		sys.exit("Quit by user.")

	assert(len(doomed_nodes) == len(doomed_nodes.multilock()))
	for n in doomed_nodes:
		print("Removing directory: "+n.dir)
		shutil.rmtree(n.dir)

	pool.reload_nodes()
	
	#TODO: this code-block also exists in zgf_create_node
	if(len(pool.where("isa_partition")) < 2):
		pool.alpha = None
	elif(options.methodalphas == "theta"):
		pool.alpha = zgf_create_nodes.calc_alpha_theta(pool)
	elif(options.methodalphas == "user"):
		pool.alpha = userinput("Please enter a value for alpha", "float")
	else:
		raise(Exception("Method unkown: "+options.methodalphas))

	pool.history.append({'removed_nodes': [(n.name, n.state) for n in doomed_nodes], 'size':old_pool_size, 'alpha':old_alpha, 'timestamp':datetime.now()})
	pool.save()

	#TODO: deal with analysis dir and dependencies
	zgf_cleanup.main()	
コード例 #25
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    pool = Pool()
    needy_nodes = pool.where("state == 'em-mdrun-able'")
    assert (len(needy_nodes) == len(needy_nodes.multilock())
            )  # make sure we lock ALL nodes

    # add ions to simulation boxes
    call_genion(pool, options.np, options.pname, options.nn, options.nname,
                options.random_seed)

    for n in needy_nodes:
        n.state = "em-grompp-able"
        zgf_grompp.call_grompp(
            n, mdp_file=options.grompp, final_state="em-mdrun-able"
        )  # re-grompp to get a tpr for energy minimization
        n.unlock()
コード例 #26
0
ファイル: zgf_genion.py プロジェクト: iwasherefirst2/ZIBMolPy
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	pool = Pool()
	needy_nodes = pool.where("state == 'em-mdrun-able'")
	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes


	# add ions to simulation boxes
	call_genion(pool, options.np, options.pname, options.nn, options.nname, options.random_seed)

	
	for n in needy_nodes:
		n.state = "em-grompp-able"
		n.save()
		n.unlock()

	zgf_grompp.main()
コード例 #27
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    outfile = open(options.outfile, "w")

    pool = Pool()
    needy_nodes = pool.where(
        "isa_partition and state not in ('refined','mdrun-failed')")

    for n in needy_nodes:
        outfile.write("%s, state: '%s':\n" % (n.name, n.state))
        outfile.write(str(n.internals.array) + "\n")
        outfile.write(
            "mean pot.: %f, std pot.: %f, free energy estimate: %f\n" %
            (n.obs.mean_V, n.obs.std_V, n.obs.A))
        outfile.write(
            "#========================================================================#\n"
        )

    outfile.close()
    print "Pool info was written to %s." % options.outfile
コード例 #28
0
def main(argv=None):
    if (argv == None):
        argv = sys.argv
    options = options_desc.parse_args(argv)[0]

    assert (not (options.refine_all and options.extend_all))

    pool = Pool()
    needy_nodes = pool.where("isa_partition and is_sampled").multilock()

    # 1. Trying to detect fake convergence
    for n in pool.where("state == 'converged'"):
        means = kmeans(n.trajectory, k=2)
        d = (means[0] - means[1]).norm2()
        if (d > 2.0 and (options.refine_all or userinput(
                "%s has converged but appears to have a bimodal distribution.\nDo you want to refine?"
                % n.name,
                "bool"))):  #TODO decide upon threshold (per coordinate?)
            refine(n, options)

    # 2. Dealing with not-converged nodes
    for n in pool.where("state == 'not-converged'"):
        if (not (options.refine_all or options.extend_all)):
            choice = userchoice(
                "%s has not converged. What do you want to do?" % n.name,
                ['_refine', '_extend', '_ignore'])
        if (options.refine_all or choice == "r"):
            refine(n, options)
        elif (options.extend_all or choice == "e"):
            extend(n)
        elif (choice == "i"):
            continue

    for n in needy_nodes:
        n.save()
        n.unlock()

    zgf_setup_nodes.main()
    zgf_grompp.main()
    zgf_cleanup.main()
コード例 #29
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	pool = Pool()
	needy_nodes = pool.where("state == 'grompp-able'")
	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes

	if(options.solv_model == "tip3p"):
		solv_box = "spc216.gro"
		solv_fn = "tip3p.itp"
	elif(options.solv_model == "tip4p"):
		solv_box = "tip4p.gro"
		solv_fn = "tip4p.itp"
	elif(options.solv_model == "tip4pew"):
		solv_box = "tip4p.gro"
		solv_fn = "tip4pew.itp"
	elif(options.solv_model == "tip5"):
		solv_box = "tip5p.gro"
		solv_fn = "tip5p.itp"
	elif(options.solv_model == "spc"):
		solv_box = "spc216.gro"
		solv_fn = "spc.itp"
	elif(options.solv_model == "spce"):
		solv_box = "spc216.gro"
		solv_fn = "spce.itp"
	
	# determine maximum length of linears, if any
	max_linear = query_linear_length(pool)

	# make box and fill with solvent
	genbox(pool, max_linear, options.bt, (options.box_x, options.box_y, options.box_z), solv_box)

	# update topology files (add solvent model and ions includes)
	update_tops(pool, solv_fn)

	for n in needy_nodes:
		n.state = "em-grompp-able"
		n.save()
		n.unlock()
コード例 #30
0
ファイル: zgf_mdrun.py プロジェクト: pombredanne/ZIBMolPy
def main():
	options = options_desc.parse_args(sys.argv)[0]
	pool = Pool()
	
	if(options.convtest):
		for n in pool.where("state in ('converged', 'not-converged')"):
			print("\n\nRunning Gelman-Rubin on %s"%n)
			conv_check_gelman_rubin(n)
		return # exit

	auto_refines_counter = 0
	while(True):		
		pool.reload()
		pool.reload_nodes()
		for n in pool:
			n.reload()

		active_node = None
		for n in pool.where("state in ('em-mdrun-able', 'mdrun-able', 'rerun-able-converged', 'rerun-able-not-converged')"):
			if(n.lock()):
				active_node = n
				break

		if(active_node == None):
			if(auto_refines_counter < options.auto_refines):
				auto_refines_counter += 1
				print("\n\nRunning 'zgf_refine --refine-all' for the %d time..."%auto_refines_counter)
				zgf_refine.main(["--refine-all"])
				continue
			else:
				break # we're done - exit
	
		try:
			process(active_node, options)
			active_node.save()
			active_node.unlock()
		except:
			print "MDRUN FAILED"
			active_node.state = "mdrun-failed"
			active_node.save()
			active_node.unlock()
			traceback.print_exc()
			continue
コード例 #31
0
def main():
    pool = Pool()
    for n in pool.where("isa_partition"):
        for cn in n.children.where("is_sampled"):
            print cn.trajectory
コード例 #32
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    if options.common_filename:
        options.molecule = options.common_filename + ".pdb"
        options.presampling = options.common_filename + ".trr"
        options.internals = options.common_filename + ".int"
        options.grompp = options.common_filename + ".mdp"
        options.topology = options.common_filename + ".top"
        options.index = options.common_filename + ".ndx"

    print("Options:\n%s\n" % pformat(eval(str(options))))

    assert path.exists(options.molecule)
    assert path.exists(options.presampling)
    assert path.exists(options.internals)
    assert path.exists(options.grompp)
    assert path.exists(options.topology)

    # TODO: what if there is no index-file? (make_ndx)
    assert path.exists(options.index)
    assert "MOI" in gromacs.read_index_file(options.index), "group MOI should be defined in index file"

    # checks e.g. if the mdp-file looks good
    mdp_options = gromacs.read_mdp_file(options.grompp)

    # options we cannot fix
    for ref_t in re.findall("[0-9]+", mdp_options["ref_t"]):
        assert int(ref_t) == options.temperature, "temperature in mdp file does not match ZIBgridfree temperature"
        # TODO drop options.temperature and get temperature directly from mdp file... ask again if temperature is above 310K

        # options we can fix
    mdp_options_dirty = False  # if set, a new mdp-file will be written
    required_mdp_options = {"dihre": "yes", "dihre_fc": "1", "disre": "simple", "disre_fc": "1"}
    for (k, v) in required_mdp_options.items():
        if mdp_options.has_key(k):
            assert mdp_options[k] == v  # check, if we would overwrite something
        else:
            mdp_options[k] = v
            mdp_options_dirty = True

    if mdp_options.has_key("energygrps"):
        assert "MOI" in [
            str(egrp) for egrp in re.findall("[\S]+", mdp_options["energygrps"])
        ], "group MOI should be among energygrps in mdp file"
    else:
        mdp_options["energygrps"] = "MOI"
        mdp_options_dirty = True

    a, b = mdp_options.has_key("nstxout"), mdp_options.has_key("nstenergy")
    if a and not b:
        mdp_options["nstenergy"] = mdp_options["nstxout"]
        mdp_options_dirty = True
    elif b and not a:
        mdp_options["nstxout"] = mdp_options["nstenergy"]
        mdp_options_dirty = True
    elif b and a:
        assert mdp_options["nstxout"] == mdp_options["nstenergy"], "nstxout should equal nstenergy"

    if int(mdp_options["nsteps"]) > 1e6:
        msg = "Number of MD-steps?"
        mdp_options["nsteps"] = str(userinput(msg, "int", default=int(mdp_options["nsteps"])))

        # create a fixed mdp-file
    if mdp_options_dirty:
        print("Creating copy of mdp-file and adding missing options.")
        out_fn = options.grompp.rsplit(".", 1)[0] + "_fixed.mdp"
        f = open(out_fn, "w")  # append
        f.write("; Generated by zgf_create_pool\n")
        for i in mdp_options.items():
            f.write("%s = %s\n" % i)
        f.write("; EOF\n")
        f.close()
        options.grompp = out_fn

        # check if subsampling is reasonable
    if os.path.getsize(options.presampling) > 100e6:  # 100MB
        print("Presampling trajectory is large")
        trr = TrrFile(options.presampling)
        dt = trr.first_frame.next().t - trr.first_frame.t
        trr.close()
        print("Presampling timestep is %.2f ps" % dt)
        if dt < 10:  # picoseconds
            # TODO: maybe calculate subsampling factor individually, or ask?
            msg = "Subsample presampling trajectory by a tenth?"
            if userinput(msg, "bool"):
                out_fn = options.presampling.rsplit(".", 1)[0] + "_tenth.trr"
                cmd = ["trjconv", "-f", options.presampling, "-o", out_fn, "-skip", "10"]
                check_call(cmd)
                options.presampling = out_fn

                # balance linears
    if options.balance_linears:
        print("Balance Linears")
        old_converter = Converter(options.internals)
        print("Loading presampling....")
        frames = old_converter.read_trajectory(options.presampling)
        new_coord_list = []
        for c in old_converter:
            if not isinstance(c, LinearCoordinate):
                new_coord_list.append(c)
                continue  # we do not work on other Coordinate-Types
                # TODO: is this a good way to determine new_weight and new_offset???
            new_weight = c.weight / sqrt(2 * frames.var().getcoord(c))
            new_offset = c.offset + frames.mean().getcoord(c)
            new_coord = LinearCoordinate(*c.atoms, label=c.label, weight=new_weight, offset=new_offset)
            new_coord_list.append(new_coord)
        new_converter = Converter(coord_list=new_coord_list)

        assert old_converter.filename.endswith(".int")
        options.internals = old_converter.filename[:-4] + "_balanced.int"
        print("Writing balanced Converter to: " + options.internals)
        f = open(options.internals, "w")
        f.write(new_converter.serialize())
        f.close()
        assert len(Converter(options.internals)) == len(new_coord_list)  # try parsing

        # Finally: Create root-node and pool
    pool = Pool()
    if len(pool) != 0:
        print("ERROR: A pool already exists here.")
        sys.exit(1)

    pool.int_fn = options.internals
    pool.mdp_fn = options.grompp
    pool.top_fn = options.topology
    pool.ndx_fn = options.index
    pool.temperature = options.temperature
    pool.gr_threshold = options.gr_threshold
    pool.gr_chains = options.gr_chains
    pool.alpha = None
    pool.save()  # save pool for the first time...

    # ... then we can save the first node...
    node0 = Node()
    node0.state = "refined"
    node0.save()  # also creates the node directory ... needed for symlink
    os.symlink(os.path.relpath(options.presampling, node0.dir), node0.trr_fn)
    os.symlink(os.path.relpath(options.molecule, node0.dir), node0.pdb_fn)

    pool.root_name = node0.name
    pool.save()  # ... now we have to save the pool again.

    if not path.exists("analysis"):
        os.mkdir("analysis")
コード例 #33
0
def is_applicable():
	pool = Pool()
	return(len(pool.where("state == 'created'")) > 0)
コード例 #34
0
def main(argv=None):
	if(argv==None): 
		argv = sys.argv
	options = options_desc.parse_args(argv)[0]
	
	print("Options:\n%s\n"%pformat(eval(str(options))))

	if(options.random_seed):
		# using numpy-random because python-random differs beetween 32 and 64 bit
		np.random.seed(hash(options.random_seed))
	
	pool = Pool()
	old_pool_size = len(pool)
	print "pool", pool
	
	if(options.parent_node == "root"):
		parent = pool.root
	else:
		found = [n for n in pool if n.name == options.parent_node]
		assert(len(found) == 1)
		parent = found[0]
	
	
	print "### Generate nodes: %s ###" % options.methodnodes
	if(options.methodnodes == "kmeans"):
		chosen_idx = mknodes_kmeans(parent, options.numnodes)
	elif(options.methodnodes == "equidist"):
		chosen_idx = mknodes_equidist(parent, options.numnodes)
	elif(options.methodnodes == "maxdist"):
		chosen_idx = mknodes_maxdist(parent, options.numnodes)
	elif(options.methodnodes == "all"):
		chosen_idx = mknodes_all(parent)
	else:
		raise(Exception("Method unknown: "+options.methodnodes))

	chosen_idx.sort() # makes preview-trajectory easier to understand 
	if(options.write_preview):
		write_node_preview(pool, parent, chosen_idx)
	
	for i in chosen_idx:
		n = Node()
		n.parent_frame_num = i
		n.parent = parent
		n.state = "creating-a-partition" # will be set to "created" at end of script
		n.extensions_counter = 0
		n.extensions_max = options.ext_max
		n.extensions_length = options.ext_length
		n.sampling_length = options.sampling_length	
		n.internals = parent.trajectory.getframe(i)
		pool.append(n)
		
	print "\n### Obtain alpha: %s ###" % options.methodalphas
	old_alpha = pool.alpha
	if(options.methodalphas == "theta"):
		pool.alpha = calc_alpha_theta(pool)
	elif(options.methodalphas == "user"):
		pool.alpha = userinput("Please enter a value for alpha", "float")
	else:
		raise(Exception("Method unknown: "+options.methodalphas))
	
	pool.history.append({'refined_node': (parent.name, parent.state), 'size':old_pool_size, 'alpha':old_alpha, 'timestamp':datetime.now()})
	
	pool.save() # alpha might have changed
	
	print "\n### Obtain phi fit: %s ###" % options.methodphifit
	if(options.methodphifit == "harmonic"):
		do_phifit_harmonic(pool)
	elif(options.methodphifit == "switch"):
		do_phifit_switch(pool)
	elif(options.methodphifit == "leastsq"):
		do_phifit_leastsq(pool)
	else:
		raise(Exception("Method unkown: "+options.methodphifit))

	for n in pool.where("state == 'creating-a-partition'"):
		n.state = "created"
		n.save()
		print "saving " +str(n)
		
	zgf_cleanup.main()
コード例 #35
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    zgf_cleanup.main()

    pool = Pool()
    npz_file = np.load(pool.chi_mat_fn)
    chi_matrix = npz_file['matrix']
    node_names = npz_file['node_names']
    n_clusters = npz_file['n_clusters']
    active_nodes = [Node(nn) for nn in node_names]

    # create and open dest_files, intialize counters for statistics
    dest_filenames = [
        pool.analysis_dir + "cluster%d.trr" % (c + 1)
        for c in range(n_clusters)
    ]
    dest_files = [open(fn, "wb") for fn in dest_filenames]
    dest_frame_counters = np.zeros(n_clusters)

    # For each active node...
    for (i, n) in enumerate(active_nodes):
        # ... find the clusters to which it belongs (might be more than one)...
        belonging_clusters = np.argwhere(
            chi_matrix[i] > options.node_threshold)

        # ... and find all typical frames of this node.
        #TODO not an optimal solution... discuss
        # per default, we take every frame with above average weight
        frame_threshold = options.frame_threshold * 2 * np.mean(n.frameweights)
        typical_frame_nums = np.argwhere(n.frameweights > frame_threshold)

        # Go through the node's trajectory ...
        trr_in = TrrFile(n.trr_fn)
        curr_frame = trr_in.first_frame
        for i in typical_frame_nums:
            # ...stop at each typical frame...
            while (i != curr_frame.number):
                curr_frame = curr_frame.next()
            assert (curr_frame.number == i)
            #... and copy it into the dest_file of each belonging cluster.
            for c in belonging_clusters:
                dest_files[c].write(curr_frame.raw_data)
                dest_frame_counters[c] += 1
        trr_in.close()  # close source file

    # close dest_files
    for f in dest_files:
        f.close()
    del (dest_files)

    # desolvate cluster-trajectories 'in-place'
    if (not options.write_sol):
        for dest_fn in dest_filenames:
            tmp_fn = mktemp(suffix='.trr', dir=pool.analysis_dir)
            os.rename(dest_fn, tmp_fn)  # works as both files are in same dir
            cmd = ["trjconv", "-f", tmp_fn, "-o", dest_fn, "-n", pool.ndx_fn]
            p = Popen(cmd, stdin=PIPE)
            p.communicate(input="MOI\n")
            assert (p.wait() == 0)
            os.remove(tmp_fn)

    # register dependencies
    for fn in dest_filenames:
        register_file_dependency(fn, pool.chi_mat_fn)

    # check number of written frames
    sys.stdout.write("Checking lenghts of written trajectories... ")
    for i in range(n_clusters):
        f = TrrFile(dest_filenames[i])
        assert (f.count_frames() == dest_frame_counters[i])
        f.close()
    print("done.")

    #output statistics
    print "\n### Extraction summary ###\nnode threshold: %1.1f, frame threshold: %1.1f" % (
        options.node_threshold, options.frame_threshold)
    print "Cluster trajectories were written to %s:" % pool.analysis_dir
    for (c, f) in enumerate(dest_frame_counters):
        print "cluster%d.trr [%d frames] from node(s):" % (c + 1, f)
        print list(np.argwhere(chi_matrix[:, c] > options.node_threshold).flat)
コード例 #36
0
def is_applicable():
	pool = Pool()
	return( len(pool.where("'weight_direct' in obs")) > 0 and len(pool.where("isa_partition and 'weight_direct' not in obs")) == 0 )
コード例 #37
0
def is_applicable():
	pool = Pool()
	return( len(pool.where("state in ('converged', 'not-converged', 'refined')")) > 1 )
コード例 #38
0
def is_applicable():
	pool = Pool()
	return(len(pool.where("state=='mdrun-able'")) > 0)
コード例 #39
0
ファイル: zgf_mdrun.py プロジェクト: pombredanne/ZIBMolPy
def is_applicable():
	pool = Pool()
	return(len(pool.where("state in ('em-mdrun-able', 'mdrun-able', 'converged', 'not-converged', 'rerun-able-converged', 'rerun-able-not-converged')")) > 0)
コード例 #40
0
ファイル: zgf_rerun.py プロジェクト: CMD-at-ZIB/ZIBMolPy
def main():
	options = options_desc.parse_args(sys.argv)[0]
	
	pool = Pool()

	if options.ignore_convergence:
		needy_nodes = pool.where("state in ('converged','not-converged')")
	else:
		needy_nodes = pool.where("state == 'converged'")

	assert(len(needy_nodes) == len(needy_nodes.multilock())) # make sure we lock ALL nodes

	for node in needy_nodes:
		
		if ( path.exists(node.dir+"/rerun_me.trr") and path.exists(node.dir+"/rerun_me.pdb") and path.exists(node.dir+"/rerun_me.top") and path.exists(node.dir+"/rerun_me.tpr") ):
			print("All four rerun files (rerun_me.*) already existing in " + node.dir + ".")
			print("Be sure you want to keep them!")
			continue
			
			
		# if "none", assume that sim is implicit or in vacuum. thus, trjconv not required. 
		if options.pbc_removal != "none":

			# desolvate trr
			if not( path.exists(node.dir+"/rerun_me.trr")):
				cmd = ["trjconv", "-f", node.trr_fn, "-o", node.dir+"/rerun_me.trr", "-s", node.tpr_fn, "-n", node.pool.ndx_fn, "-pbc", options.pbc_removal]			
				print("Calling: "+(" ".join(cmd)))
				p = Popen(cmd, stdin=PIPE)
				p.communicate(input=("MOI\n"))
				assert(p.wait() == 0)
			else:
				print("Rerun trajectory file (rerun_me.trr) already existing in " + node.dir + ".") 
				print("Be sure you want to keep it!")
				#sys.exit(0)
				#continue
	
			# desolvate pdb
			if not( path.exists(node.dir+"/rerun_me.pdb") ):
				cmd = ["trjconv", "-f", node.pdb_fn, "-o", node.dir+"/rerun_me.pdb", "-s", node.tpr_fn, "-n", node.pool.ndx_fn, "-pbc",	options.pbc_removal]			
				print("Calling: "+(" ".join(cmd)))
				p = Popen(cmd, stdin=PIPE)
				p.communicate(input=("MOI\n"))
				assert(p.wait() == 0)
	
			# desolvate topology
			infile = open(node.top_fn, "r").readlines()
			mol_section = False
			out_top = []
		
			for line in infile:
				if( re.match("\s*\[\s*(molecules)\s*\]\s*", line.lower()) ):
					# we are past the "molecules" section
					mol_section = True
				if(mol_section):
					# comment out lines that belong to solvent (SOL, CL, NA)... add more if necessary
					if( re.match("\s*(sol|cl|na|tsl|tcm|mth)\s*\d+", line.lower()) ):
						line = ";"+line
				out_top.append(line)
			outfile = open(node.dir+"/rerun_me.top","w").writelines(out_top)	
		
		else:
			if not( path.exists(node.dir+"/rerun_me.trr") ):
				symlink(node.name+".trr", node.dir+"/rerun_me.trr")
			else:
				print("Rerun trajectory file (rerun_me.trr) already existing in " + node.dir + ".") 
				print("Be sure you want to keep it!")
				#continue
			if not( path.exists(node.dir+"/rerun_me.pdb") ):
				symlink(node.name+"_conf.pdb", node.dir+"/rerun_me.pdb")
			if not( path.exists(node.dir+"/rerun_me.top") ):
				symlink(node.name+".top", node.dir+"/rerun_me.top")
		
	
		grompp2state = "rerun-able-"+node.state
	
		# get rid of old checkpoint file (it might mess up the rerun)
		if( path.exists(node.dir+"/state.cpt") ):
			remove(node.dir+"/state.cpt")
	
		#zgf_grompp.call_grompp(node, mdp_file=options.grompp, final_state=grompp2state)
		#TODO code borrowed from zgf_grompp
		#TODO make the original method fit for grompping reruns

		if not( path.exists(node.dir+"/rerun_me.trr") ):		
			cmd = ["grompp"]
			cmd += ["-f", "../../"+options.grompp]
			cmd += ["-n", "../../"+node.pool.ndx_fn]
			cmd += ["-c", "../../"+node.dir+"/rerun_me.pdb"]
			cmd += ["-p", "../../"+node.dir+"/rerun_me.top"]
			cmd += ["-o", "../../"+node.dir+"/rerun_me.tpr"]			
			print("Calling: %s"%" ".join(cmd))
			p = Popen(cmd, cwd=node.dir)
			retcode = p.wait()
			assert(retcode == 0) # grompp should never fail
		
		node.state = grompp2state
		node.save()
	
		node.unlock()
コード例 #41
0
def is_applicable():
    pool = Pool()
    return (len(pool.where("state == 'created'")) > 0)
コード例 #42
0
def is_applicable():
    pool = Pool()
    return (len(pool) > 1 and len(
        pool.where(
            "isa_partition and state in ('converged','not-converged','mdrun-failed')"
        )) == len(pool.where("isa_partition")))
コード例 #43
0
ファイル: zgf_rerun.py プロジェクト: pombredanne/ZIBMolPy
def main():
    options = options_desc.parse_args(sys.argv)[0]

    pool = Pool()

    if options.ignore_convergence:
        needy_nodes = pool.where("state in ('converged','not-converged')")
    else:
        needy_nodes = pool.where("state == 'converged'")

    assert (len(needy_nodes) == len(needy_nodes.multilock())
            )  # make sure we lock ALL nodes

    for node in needy_nodes:

        if (path.exists(node.dir + "/rerun_me.trr")
                and path.exists(node.dir + "/rerun_me.pdb")
                and path.exists(node.dir + "/rerun_me.top")
                and path.exists(node.dir + "/rerun_me.tpr")):
            print("All four rerun files (rerun_me.*) already existing in " +
                  node.dir + ".")
            print("Be sure you want to keep them!")
            continue

        # if "none", assume that sim is implicit or in vacuum. thus, trjconv not required.
        if options.pbc_removal != "none":

            # desolvate trr
            if not (path.exists(node.dir + "/rerun_me.trr")):
                cmd = [
                    "trjconv", "-f", node.trr_fn, "-o",
                    node.dir + "/rerun_me.trr", "-s", node.tpr_fn, "-n",
                    node.pool.ndx_fn, "-pbc", options.pbc_removal
                ]
                print("Calling: " + (" ".join(cmd)))
                p = Popen(cmd, stdin=PIPE)
                p.communicate(input=("MOI\n"))
                assert (p.wait() == 0)
            else:
                print(
                    "Rerun trajectory file (rerun_me.trr) already existing in "
                    + node.dir + ".")
                print("Be sure you want to keep it!")
                #sys.exit(0)
                #continue

            # desolvate pdb
            if not (path.exists(node.dir + "/rerun_me.pdb")):
                cmd = [
                    "trjconv", "-f", node.pdb_fn, "-o",
                    node.dir + "/rerun_me.pdb", "-s", node.tpr_fn, "-n",
                    node.pool.ndx_fn, "-pbc", options.pbc_removal
                ]
                print("Calling: " + (" ".join(cmd)))
                p = Popen(cmd, stdin=PIPE)
                p.communicate(input=("MOI\n"))
                assert (p.wait() == 0)

            # desolvate topology
            infile = open(node.top_fn, "r").readlines()
            mol_section = False
            out_top = []

            for line in infile:
                if (re.match("\s*\[\s*(molecules)\s*\]\s*", line.lower())):
                    # we are past the "molecules" section
                    mol_section = True
                if (mol_section):
                    # comment out lines that belong to solvent (SOL, CL, NA)... add more if necessary
                    if (re.match("\s*(sol|cl|na|tsl|tcm|mth)\s*\d+",
                                 line.lower())):
                        line = ";" + line
                out_top.append(line)
            outfile = open(node.dir + "/rerun_me.top", "w").writelines(out_top)

        else:
            if not (path.exists(node.dir + "/rerun_me.trr")):
                symlink(node.name + ".trr", node.dir + "/rerun_me.trr")
            else:
                print(
                    "Rerun trajectory file (rerun_me.trr) already existing in "
                    + node.dir + ".")
                print("Be sure you want to keep it!")
                #continue
            if not (path.exists(node.dir + "/rerun_me.pdb")):
                symlink(node.name + "_conf.pdb", node.dir + "/rerun_me.pdb")
            if not (path.exists(node.dir + "/rerun_me.top")):
                symlink(node.name + ".top", node.dir + "/rerun_me.top")

        grompp2state = "rerun-able-" + node.state

        # get rid of old checkpoint file (it might mess up the rerun)
        if (path.exists(node.dir + "/state.cpt")):
            remove(node.dir + "/state.cpt")

        #zgf_grompp.call_grompp(node, mdp_file=options.grompp, final_state=grompp2state)
        #TODO code borrowed from zgf_grompp
        #TODO make the original method fit for grompping reruns

        if not (path.exists(node.dir + "/rerun_me.trr")):
            cmd = ["grompp"]
            cmd += ["-f", "../../" + options.grompp]
            cmd += ["-n", "../../" + node.pool.ndx_fn]
            cmd += ["-c", "../../" + node.dir + "/rerun_me.pdb"]
            cmd += ["-p", "../../" + node.dir + "/rerun_me.top"]
            cmd += ["-o", "../../" + node.dir + "/rerun_me.tpr"]
            print("Calling: %s" % " ".join(cmd))
            p = Popen(cmd, cwd=node.dir)
            retcode = p.wait()
            assert (retcode == 0)  # grompp should never fail

        node.state = grompp2state
        node.save()

        node.unlock()
コード例 #44
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	zgf_cleanup.main()
	
	print("Options:\n%s\n"%pformat(eval(str(options))))
	
	pool = Pool()
	parent = pool.root
	active_nodes = pool.where("isa_partition")
	
	assert(len(active_nodes) == len(active_nodes.multilock())) # make sure we lock ALL nodes

	if active_nodes.where("'weight_direct' not in obs"):
		sys.exit("Q-Matrix calculation not possible: Not all of the nodes have been reweighted.")

	node_weights = np.array([node.obs.weight_direct for node in active_nodes])
	
	print "### Generate bins: equidist ###" 
	result = q_equidist(parent, options.numnodes)
	chosen_idx=result['chosen_idx']
	frames_chosen=result['frames_chosen']
	theta=result['theta']
	chosen_idx.sort() # makes preview-trajectory easier to understand
	dimension=len(chosen_idx)

	print "chosen_idx"
	print chosen_idx

	print "### Generate bin weights ###"
	bin_weights=np.zeros(dimension)
	for (i,n) in enumerate(active_nodes):
		w_denom = np.sum(n.frameweights) 
		for t in range(len(n.trajectory)):
			diffs = (frames_chosen - n.trajectory.getframe(t)).norm()
			j = np.argmin(diffs)
			bin_weights[j] = bin_weights[j] + node_weights[i] * n.frameweights[t] / w_denom
			
	
	print "bin_weights"
	print bin_weights
	
	print "### Generate q_all (entries only for neighboring bins) ###" 
	q_all = np.empty((dimension, dimension), dtype=np.float)
	for i in range(dimension):
		sum_row = 0.0
		diffs = (frames_chosen - frames_chosen.getframe(i)).norm()
		print "diffs"
		print diffs
		for j in range(dimension):
			if (diffs[j] < 2.0 * theta) and (bin_weights[i] > 0.0):
				q_all[i,j] = np.sqrt(bin_weights[j]) / np.sqrt(bin_weights[i])
				sum_row = sum_row + q_all[i , j]
			else:
				q_all[i,j] = 0
		q_all[i, i] = q_all[i, i]- sum_row  
			
	print "Q_All"
	print q_all
	
	if options.export_matlab:
		savemat(pool.analysis_dir+"q_all.mat", {"q_all":q_all})
		
	active_nodes.unlock()
	zgf_cleanup.main()
コード例 #45
0
def is_applicable():
    pool = Pool()
    return (len(pool.where("state == 'grompp-able'")) > 0)
コード例 #46
0
def is_applicable():
    pool = Pool()
    return (len(pool.where("state == 'em-mdrun-able'")) > 0)
コード例 #47
0
def main():
    options = options_desc.parse_args(sys.argv)[0]

    if (options.common_filename):
        options.molecule = options.common_filename + ".pdb"
        options.presampling = options.common_filename + ".trr"
        options.internals = options.common_filename + ".int"
        options.grompp = options.common_filename + ".mdp"
        options.topology = options.common_filename + ".top"
        options.index = options.common_filename + ".ndx"

    print("Options:\n%s\n" % pformat(eval(str(options))))

    assert (path.exists(options.molecule))
    assert (path.exists(options.presampling))
    assert (path.exists(options.internals))
    assert (path.exists(options.grompp))
    assert (path.exists(options.topology))

    #TODO: what if there is no index-file? (make_ndx)
    assert (path.exists(options.index))
    assert ('moi' in gromacs.read_index_file(
        options.index)), "group 'MOI' should be defined in index file"

    # checks e.g. if the mdp-file looks good
    mdp_options = gromacs.read_mdp_file(options.grompp)

    temperatures = [
        ref_t for ref_t in re.findall("[0-9]+", mdp_options["ref_t"])
    ]
    assert (len(set(temperatures)) == 1
            ), "temperature definition in mdp file is ambiguous"
    temperature = temperatures[0]

    # get sampling temperature from mdp file
    if (int(temperature) > 310):
        if not (userinput(
                "Your sampling temperature is set to %s K. Continue?" %
                temperature, "bool")):
            sys.exit("Quit by user.")

    # options we can fix
    mdp_options_dirty = False  #if set, a new mdp-file will be written

    # the value of the following options need to be fixed
    critical_mdp_options = {
        "dihre": "yes",
        "dihre_fc": "1",
        "disre": "simple",
        "disre_fc": "1",
        "gen_temp": temperature
    }
    for (k, v) in critical_mdp_options.items():
        if (mdp_options.has_key(k) and mdp_options[k].strip() != v):
            print "Error. I do not want to use '%s' for option '%s' ('%s' required). Please fix your mdp file." % (
                mdp_options[k].strip(), k, v)
            sys.exit("Quitting.")
        else:
            mdp_options[k] = v
            mdp_options_dirty = True

    # the value of the following options does not matter, but they should be there
    noncritical_mdp_options = {
        "tcoupl": "no",
        "pcoupl": "no",
        "gen_vel": "no",
        "gen_seed": "-1"
    }
    for (k, v) in noncritical_mdp_options.items():
        if not (mdp_options.has_key(k)):
            mdp_options[k] = v
            mdp_options_dirty = True

    a = mdp_options.has_key("energygrps") and "moi" not in [
        str(egrp) for egrp in re.findall('[\S]+', mdp_options["energygrps"])
    ]
    b = not (mdp_options.has_key("energygrps"))
    if (a or b):
        if not (userinput(
                "'MOI' is not defined as an energy group in your mdp file. Maybe you have forgotten to define proper 'energygrps'. Continue?",
                "bool")):
            sys.exit("Quit by user.")

    a, b = mdp_options.has_key("nstxout"), mdp_options.has_key("nstenergy")
    if (a and not b):
        mdp_options["nstenergy"] = mdp_options["nstxout"]
        mdp_options_dirty = True
    elif (b and not a):
        mdp_options["nstxout"] = mdp_options["nstenergy"]
        mdp_options_dirty = True
    elif (b and a):
        assert (mdp_options["nstxout"] == mdp_options["nstenergy"]
                ), "nstxout should equal nstenergy"

    if (int(mdp_options["nsteps"]) > 1e6):
        msg = "Number of MD-steps?"
        mdp_options["nsteps"] = str(
            userinput(msg, "int", default=int(mdp_options["nsteps"])))

    # create a fixed mdp-file
    if (mdp_options_dirty):
        print("Creating copy of mdp-file and adding missing options.")
        out_fn = options.grompp.rsplit(".", 1)[0] + "_fixed.mdp"
        f = open(out_fn, "w")  # append
        f.write("; Generated by zgf_create_pool\n")
        for i in sorted(mdp_options.items()):
            f.write("%s = %s\n" % i)
        f.write("; EOF\n")
        f.close()
        options.grompp = out_fn

    # check if subsampling is reasonable
    if (os.path.getsize(options.presampling) > 100e6):  # 100MB
        print("Presampling trajectory is large")
        trr = TrrFile(options.presampling)
        dt = trr.first_frame.next().t - trr.first_frame.t
        trr.close()
        print("Presampling timestep is %.2f ps" % dt)
        if (dt < 10):  # picoseconds
            #TODO: maybe calculate subsampling factor individually, or ask?
            msg = "Subsample presampling trajectory by a tenth?"
            if (userinput(msg, "bool")):
                out_fn = options.presampling.rsplit(".", 1)[0] + "_tenth.trr"
                cmd = [
                    "trjconv", "-f", options.presampling, "-o", out_fn,
                    "-skip", "10"
                ]
                check_call(cmd)
                options.presampling = out_fn

    # balance linears
    if (options.balance_linears):
        print("Balance Linears")
        old_converter = Converter(options.internals)
        print("Loading presampling....")
        frames = old_converter.read_trajectory(options.presampling)
        new_coord_list = []
        for c in old_converter:
            if (not isinstance(c, LinearCoordinate)):
                new_coord_list.append(c)
                continue  # we do not work on other Coordinate-Types
            #TODO: is this a good way to determine new_weight and new_offset???
            new_weight = c.weight / sqrt(2 * frames.var().getcoord(c))
            new_offset = c.offset + frames.mean().getcoord(c)
            new_coord = LinearCoordinate(*c.atoms,
                                         label=c.label,
                                         weight=new_weight,
                                         offset=new_offset)
            new_coord_list.append(new_coord)
        new_converter = Converter(coord_list=new_coord_list)

        assert (old_converter.filename.endswith(".int"))
        options.internals = old_converter.filename[:-4] + "_balanced.int"
        print("Writing balanced Converter to: " + options.internals)
        f = open(options.internals, "w")
        f.write(new_converter.serialize())
        f.close()
        assert (len(Converter(options.internals)) == len(new_coord_list)
                )  #try parsing

    # Finally: Create root-node and pool
    pool = Pool()
    if (len(pool) != 0):
        print("ERROR: A pool already exists here.")
        sys.exit(1)

    pool.int_fn = options.internals
    pool.mdp_fn = options.grompp
    pool.top_fn = options.topology
    pool.ndx_fn = options.index
    pool.temperature = int(temperature)
    pool.gr_threshold = options.gr_threshold
    pool.gr_chains = options.gr_chains
    pool.alpha = None
    pool.save()  # save pool for the first time...

    # ... then we can save the first node...
    node0 = Node()
    node0.state = "refined"
    node0.save()  # also creates the node directory ... needed for symlink
    os.symlink(os.path.relpath(options.presampling, node0.dir), node0.trr_fn)
    os.symlink(os.path.relpath(options.molecule, node0.dir), node0.pdb_fn)

    pool.root_name = node0.name
    pool.save()  #... now we have to save the pool again.

    if (not path.exists("analysis")):
        os.mkdir("analysis")
コード例 #48
0
def is_applicable():
    pool = Pool()
    return (path.exists(pool.chi_mat_fn))
コード例 #49
0
def main():
	options = options_desc.parse_args(sys.argv)[0]

	if(options.common_filename):
		options.molecule = options.common_filename+".pdb"
		options.presampling = options.common_filename+".trr"
		options.internals = options.common_filename+".int"
		options.grompp = options.common_filename+".mdp"
		options.topology = options.common_filename+".top"
		options.index = options.common_filename+".ndx"

	print("Options:\n%s\n"%pformat(eval(str(options))))

	assert(path.exists(options.molecule))
	assert(path.exists(options.presampling))
	assert(path.exists(options.internals))
	assert(path.exists(options.grompp))
	assert(path.exists(options.topology))
		
	#TODO: what if there is no index-file? (make_ndx)
	assert(path.exists(options.index))
	assert('moi' in gromacs.read_index_file(options.index)), "group 'MOI' should be defined in index file"
 
	# checks e.g. if the mdp-file looks good
	mdp_options = gromacs.read_mdp_file(options.grompp)
	
	temperatures = [ref_t for ref_t in re.findall("[0-9]+", mdp_options["ref_t"])]
	assert(len(set(temperatures)) == 1), "temperature definition in mdp file is ambiguous"
	temperature = temperatures[0]

	# get sampling temperature from mdp file
	if(int(temperature) > 310):
		if not(userinput("Your sampling temperature is set to %s K. Continue?"%temperature, "bool")):
			sys.exit("Quit by user.")

	# options we can fix 
 	mdp_options_dirty = False #if set, a new mdp-file will be written

	# the value of the following options need to be fixed
	critical_mdp_options = {"dihre":"yes", "dihre_fc":"1", "disre":"simple", "disre_fc":"1", "gen_temp":temperature}
	for (k,v) in critical_mdp_options.items():
 		if(mdp_options.has_key(k) and mdp_options[k].strip() != v):
			print "Error. I do not want to use '%s' for option '%s' ('%s' required). Please fix your mdp file."%(mdp_options[k].strip(),k,v)
			sys.exit("Quitting.")
 		else:
 			mdp_options[k] = v
 			mdp_options_dirty = True

	# the value of the following options does not matter, but they should be there
	noncritical_mdp_options = {"tcoupl":"no", "pcoupl":"no", "gen_vel":"no", "gen_seed":"-1"}
	for (k,v) in noncritical_mdp_options.items():
		if not(mdp_options.has_key(k)):
			mdp_options[k] = v
			mdp_options_dirty = True

	a = mdp_options.has_key("energygrps") and "moi" not in [str(egrp) for egrp in re.findall('[\S]+', mdp_options["energygrps"])]
	b = not(mdp_options.has_key("energygrps"))
	if(a or b):
		if not(userinput("'MOI' is not defined as an energy group in your mdp file. Maybe you have forgotten to define proper 'energygrps'. Continue?", "bool")):
			sys.exit("Quit by user.")

	a, b = mdp_options.has_key("nstxout"), mdp_options.has_key("nstenergy")
	if(a and not b):
		mdp_options["nstenergy"] = mdp_options["nstxout"]
		mdp_options_dirty = True
	elif(b and not a):
		mdp_options["nstxout"] = mdp_options["nstenergy"]
		mdp_options_dirty = True
	elif(b and a):
		assert(mdp_options["nstxout"] == mdp_options["nstenergy"]), "nstxout should equal nstenergy"
		
	if(int(mdp_options["nsteps"]) > 1e6):
		msg = "Number of MD-steps?"
		mdp_options["nsteps"] = str( userinput(msg, "int", default=int(mdp_options["nsteps"])) )
	
	# create a fixed mdp-file
	if(mdp_options_dirty):
		print("Creating copy of mdp-file and adding missing options.")
		out_fn = options.grompp.rsplit(".", 1)[0] + "_fixed.mdp"
		f = open(out_fn, "w") # append
		f.write("; Generated by zgf_create_pool\n")
		for i in sorted(mdp_options.items()):
			f.write("%s = %s\n"%i)
		f.write("; EOF\n")
		f.close()
		options.grompp = out_fn
		
	
	# check if subsampling is reasonable
	if(os.path.getsize(options.presampling) > 100e6): # 100MB
		print("Presampling trajectory is large")
		trr = TrrFile(options.presampling)
		dt = trr.first_frame.next().t - trr.first_frame.t
		trr.close()
		print("Presampling timestep is %.2f ps"%dt)
		if(dt < 10): # picoseconds
			#TODO: maybe calculate subsampling factor individually, or ask? 
			msg = "Subsample presampling trajectory by a tenth?"
			if(userinput(msg, "bool")):
				out_fn = options.presampling.rsplit(".", 1)[0] + "_tenth.trr"
				cmd = ["trjconv", "-f", options.presampling, "-o", out_fn, "-skip", "10"]
				check_call(cmd)
				options.presampling = out_fn
	
			
	# balance linears
	if(options.balance_linears):
		print("Balance Linears")
		old_converter = Converter(options.internals)
		print("Loading presampling....")
		frames = old_converter.read_trajectory(options.presampling)
		new_coord_list = []
		for c in old_converter:
			if(not isinstance(c, LinearCoordinate)):
				new_coord_list.append(c)
				continue # we do not work on other Coordinate-Types
			#TODO: is this a good way to determine new_weight and new_offset??? 
			new_weight = c.weight / sqrt(2*frames.var().getcoord(c))
			new_offset = c.offset + frames.mean().getcoord(c)
			new_coord = LinearCoordinate(*c.atoms, label=c.label, weight=new_weight, offset=new_offset)
			new_coord_list.append(new_coord)
		new_converter = Converter(coord_list=new_coord_list)
	
		assert(old_converter.filename.endswith(".int"))
		options.internals = old_converter.filename[:-4] + "_balanced.int"
		print("Writing balanced Converter to: "+options.internals)
		f = open(options.internals, "w")
		f.write(new_converter.serialize())
		f.close()
		assert(len(Converter(options.internals)) == len(new_coord_list)) #try parsing
	
	# Finally: Create root-node and pool
	pool = Pool()
	if(len(pool) != 0):
		print("ERROR: A pool already exists here.")
		sys.exit(1)
	
	pool.int_fn = options.internals
	pool.mdp_fn = options.grompp
	pool.top_fn = options.topology
	pool.ndx_fn = options.index
	pool.temperature = int(temperature)
	pool.gr_threshold = options.gr_threshold
	pool.gr_chains = options.gr_chains
	pool.alpha = None
	pool.save() # save pool for the first time...

	# ... then we can save the first node...
	node0 = Node()
	node0.state = "refined"	
	node0.save() # also creates the node directory ... needed for symlink
	os.symlink(os.path.relpath(options.presampling, node0.dir), node0.trr_fn)
	os.symlink(os.path.relpath(options.molecule, node0.dir), node0.pdb_fn)
	
	pool.root_name = node0.name
	pool.save() #... now we have to save the pool again.
	
	if(not path.exists("analysis")):
		os.mkdir("analysis")
コード例 #50
0
def main():
    pool = Pool()
    for n in pool.where("isa_partition"):
        for cn in n.children.where("is_sampled"):
            print cn.trajectory
コード例 #51
0
def is_applicable():
    pool = Pool()
    return (pool.where("isa_partition and state != 'mdrun-failed'") > 0)
コード例 #52
0
def main():
	options = options_desc.parse_args(sys.argv)[0]
	zgf_cleanup.main()
	
	print("Options:\n%s\n"%pformat(eval(str(options))))
	
	pool = Pool()
	parent = pool.root
	active_nodes = pool.where("isa_partition")
	
	assert(len(active_nodes) == len(active_nodes.multilock())) # make sure we lock ALL nodes

	if active_nodes.where("'weight_direct' not in obs"):
		sys.exit("Q-Matrix calculation not possible: Not all of the nodes have been reweighted.")

	node_weights = np.array([node.obs.weight_direct for node in active_nodes])
	
	print "### Generate bins: equidist ###" 
	result = q_equidist(parent, options.numnodes)
	chosen_idx=result['chosen_idx']
	frames_chosen=result['frames_chosen']
	theta=result['theta']
	chosen_idx.sort() # makes preview-trajectory easier to understand
	dimension=len(chosen_idx)

	print "chosen_idx"
	print chosen_idx

	print "### Generate bin weights ###"
	bin_weights=np.zeros(dimension)
	for (i,n) in enumerate(active_nodes):
		w_denom = np.sum(n.frameweights) 
		for t in range(len(n.trajectory)):
			diffs = (frames_chosen - n.trajectory.getframe(t)).norm()
			j = np.argmin(diffs)
			bin_weights[j] = bin_weights[j] + node_weights[i] * n.frameweights[t] / w_denom
			
	
	print "bin_weights"
	print bin_weights
	
	print "### Generate q_all (entries only for neighboring bins) ###" 
	q_all = np.empty((dimension, dimension), dtype=np.float)
	for i in range(dimension):
		sum_row = 0.0
		diffs = (frames_chosen - frames_chosen.getframe(i)).norm()
		print "diffs"
		print diffs
		for j in range(dimension):
			if (diffs[j] < 2.0 * theta) and (bin_weights[i] > 0.0):
				q_all[i,j] = np.sqrt(bin_weights[j]) / np.sqrt(bin_weights[i])
				sum_row = sum_row + q_all[i , j]
			else:
				q_all[i,j] = 0
		q_all[i, i] = q_all[i, i]- sum_row  
			
	print "Q_All"
	print q_all
	
	if options.export_matlab:
		savemat(pool.analysis_dir+"q_all.mat", {"q_all":q_all})
		
	active_nodes.unlock()
	zgf_cleanup.main()
コード例 #53
0
def is_applicable():
    pool = Pool()
    return (len(pool) > 0)
コード例 #54
0
def is_applicable():
	pool = Pool()
	return( len(pool.where("'weight_direct' in obs")) > 0 and len(pool.where("isa_partition and 'weight_direct' not in obs")) == 0 )
コード例 #55
0
ファイル: zgf_refine.py プロジェクト: iwasherefirst2/ZIBMolPy
def is_applicable():
	pool = Pool()
	return(len(pool.where("isa_partition and is_sampled")) > 0)
コード例 #56
0
ファイル: zgf_rerun.py プロジェクト: CMD-at-ZIB/ZIBMolPy
def is_applicable():
	pool = Pool()
	return( len(pool) > 1 and len(pool.where("isa_partition and state in ('converged','not-converged','mdrun-failed')")) == len(pool.where("isa_partition")) )
コード例 #57
0
def is_applicable():
    pool = Pool()
    return (len(pool.where("isa_partition and is_sampled")) > 0)