示例#1
0
文件: GMBI.py 项目: saamrenton/GMBI
def run_trajectories_remotely(ts, processors, walltime, memory, output_dir, trj_file, config_file):
    # List to hold the PBS job ids
    ids = []

    # Go through each trajectory
    for i, t in enumerate(ts):
        # Create the directory to hold all the outputs
        ext = str(i + 1)
        trd = output_dir + "/Trajectory" + ext
        os.makedirs(trd)

        # Determine the names for the pbs script and the individual trajectory file
        pbsfile = trd + "/runTrajectory" + str(i + 1) + ".pbs"
        tfile = trd + "/" + trj_file + "." + str(i + 1)

        # Create a file containing the current trajectory, located in the newly created directory
        io.write_trajectories([t], ptv, [], tfile)

        # Create a pbs script to run the trajectory
        write_pbs(pbsfile, processors, walltime, memory, config_file, ext)

        # Submit the script
        ids.append(sp.check_output(["qsub", "-o", trd, "-e", trd, pbsfile]).strip())

    # Wait for each of the processes to finish
    while ids != []:
        queue = sp.check_output(["qstat"]).strip()
        print "polling: " + str(len(ids)) + " trajectories remaining"
        for id_string in ids:
            if not re.search(id_string, queue):
                ids.remove(id_string)

        time.sleep(60)
示例#2
0
文件: GMBI.py 项目: saamrenton/GMBI
def run_trajectories_remotely(ts, processors, walltime, memory, output_dir, trj_file, config_file):
	#List to hold the PBS job ids
	ids = []
	
	#Go through each trajectory
	for i, t in enumerate(ts):
		#Create the directory to hold all the outputs
		ext = str(i + 1)
		trd = output_dir + "/Trajectory" + ext
		os.makedirs(trd)

		#Determine the names for the pbs script and the individual trajectory file
		pbsfile = trd + "/runTrajectory" + str(i + 1) + ".pbs"
		tfile = trd + "/" + trj_file + "." + str(i + 1) 
		
		#Create a file containing the current trajectory, located in the newly created directory
		io.write_trajectories([t], ptv, [], tfile)
		
		#Create a pbs script to run the trajectory
		write_pbs(pbsfile, processors, walltime, memory, config_file, ext)
		
		#Submit the script
		ids.append(sp.check_output(["qsub", "-o", trd, "-e", trd, pbsfile]).strip())

	#Wait for each of the processes to finish
	while ids != []:
		queue = sp.check_output(["qstat"]).strip()
		print "polling: " + str(len(ids)) + " trajectories remaining"
		for id_string in ids:
			if not re.search(id_string, queue):
				ids.remove(id_string)

		time.sleep(60)
示例#3
0
import sys as sys
import itertools as itertools

sys.path.append("./Modules")
import gmbiIO as io

if __name__ == "__main__":
    output_file = sys.argv[-1]

    for (t1, ptv, sentinels), (t2, _, _) in itertools.izip(
            io.read_trajectories_gen(sys.argv[1]),
            io.read_trajectories_gen(sys.argv[2])):
        t1.merge(t2)
        print "merged"
        io.write_trajectories([t1], ptv, sentinels, sys.argv[-1], append=True)
示例#4
0
			x[index,:] = point.independents()
			
			if np.all(x[index, :] == x[index - 1,:]):
				x[index, :] += x[index, :] / 1000.
				y[index, :] = y[index - 1, :]
			else:
				y[index,:] = point.mean()

	return x, y


if __name__ == "__main__"	:
	parser = ap.ArgumentParser()
	parser.add_argument('--config', default="config.py")
	parser.add_argument('--task', required=True)
	parser.add_argument('--design', default="oat")
	args = parser.parse_args()
	
	#Load the configuration file
	config = os.path.realpath(args.config)
	execfile(args.config)

	if "generate" == args.task:
		ps = pms.parameter_set_from_file(input_dir + "/" + organism_file)
		ts = _generate_trajectories(ps, r, levels, jumps, perc, ptv, args.design)
		io.write_trajectories(ts, ptv, [], output_dir + "/" + trajectory_file)
	elif "analyse":
		ts, ptv, sentinels = io.read_all_trajectories(output_dir + "/" + results_file)
		ees = _calculate_elementary_effects(ts, ptv, "oat")
		io.plot_ees(ees, ptv, output_dir)
示例#5
0
文件: GMBI.py 项目: saamrenton/GMBI
    # Construct the parameter set from the organism file specified in config
    ps = params.parameter_set_from_file(input_dir + "/" + organism_file)

    # Construct the landscape from the landscape file specified in config
    landscape = lscp.landscape_from_file(input_dir + "/" + landscape_file)
    ps.landscape_size = landscape.shape

    # If a single run is being performed
    if args.task == "run":
        ps.convert_to_age_classes()
        Ns = run_parrallel_iterations(ps, landscape, config)
    elif args.task == "run_trajectories":
        # Read in the trajectories from file, if ext is set, the original trajectory file has been split into multiple
        # files and each trajectory has been assigned a working sub-directory under the output_directory
        if args.ext == "":
            ts, ptv, _ = io.read_all_trajectories(input_dir + "/" + trajectory_file)
        else:
            output_dir += "/Trajectory" + args.ext
            ts, ptv, _ = io.read_all_trajectories(output_dir + "/" + trajectory_file + "." + args.ext)

        if args.remote:
            run_trajectories_remotely(ts, num_processors, walltime, memory, output_dir, trajectory_file, config_file)
            io.cat_trajectories(len(ts), ptv, ps.sentinels, output_dir)
        else:
            # If a specific trajectory is to be run
            if args.trajectory > 0:
                ts = [run_trajectory(ts[int(args.trajectory)], ps, landscape, ptv, num_iterations, pool)]
            else:
                ts = [run_trajectory(t, ps, landscape, ptv, num_iterations, pool) for t in ts]
            io.write_trajectories(ts, ptv, ps.sentinels, output_dir + "/results.txt")
示例#6
0
import sys as sys
import itertools as itertools

sys.path.append("./Modules")
import gmbiIO as io

if __name__ == "__main__":
	output_file = sys.argv[-1]

	for (t1, ptv, sentinels), (t2, _, _) in itertools.izip(io.read_trajectories_gen(sys.argv[1]), io.read_trajectories_gen(sys.argv[2])):
		t1.merge(t2)
		print "merged"
		io.write_trajectories([t1], ptv, sentinels, sys.argv[-1], append=True)
示例#7
0
文件: GMBI.py 项目: saamrenton/GMBI
	ps = params.parameter_set_from_file(input_dir + "/" + organism_file)
	
	#Construct the landscape from the landscape file specified in config
	landscape = lscp.landscape_from_file(input_dir + "/" + landscape_file)
	ps.landscape_size = landscape.shape

	#If a single run is being performed
	if args.task == "run":
		ps.convert_to_age_classes()
		Ns = run_parrallel_iterations(ps, landscape, config)
	elif args.task == "run_trajectories":
		#Read in the trajectories from file, if ext is set, the original trajectory file has been split into multiple
		#files and each trajectory has been assigned a working sub-directory under the output_directory
		if args.ext == "":
			ts, ptv, _ = io.read_all_trajectories(input_dir + "/" + trajectory_file)
		else:	
			output_dir += "/Trajectory" + args.ext
			ts, ptv, _ = io.read_all_trajectories(output_dir + "/" + trajectory_file + "." + args.ext)
		
		if args.remote:
			run_trajectories_remotely(
				ts, num_processors, walltime, memory, output_dir, trajectory_file, config_file)
			io.cat_trajectories(len(ts), ptv, ps.sentinels, output_dir)
		else:
			#If a specific trajectory is to be run
			if args.trajectory > 0:
				ts = [run_trajectory(ts[int(args.trajectory)], ps, landscape, ptv, num_iterations, pool)]
			else:
				ts = [run_trajectory(t, ps, landscape, ptv, num_iterations, pool)  for t in ts]
			io.write_trajectories(ts, ptv, ps.sentinels, output_dir + "/results.txt")