Пример #1
0
def main():
	import os
	import sys
	from optparse import OptionParser
	from global_def import SPARXVERSION
	import global_def
	arglist = []
	for arg in sys.argv:
		arglist.append( arg )

	progname = os.path.basename( arglist[0] )
	usage = progname + " stack --params='parm1 parm2 parm3 ...' --zero --one --set=number --randomize --rand_alpha --import=file --export=file --print --backup --suffix --restore --delete"
	parser = OptionParser(usage, version=SPARXVERSION)

	parser.add_option("--params",	   type="string",       default=None,    help="parameter list")
	parser.add_option("--zero",	       action="store_true", default=False,   help="set parameter to zero")
	parser.add_option("--one",	       action="store_true", default=False,   help="set parameter to one")
	parser.add_option("--set",	       type="float",        default=0.0,     help="set parameter to a value (different from 0.0)")
	parser.add_option("--randomize",   action="store_true", default=False,   help="set parameter to randomized value")
	parser.add_option("--rand_alpha",  action="store_true", default=False,   help="set all angles to randomized value")
	parser.add_option("--import",	   type="string",       dest="fimport",  default=None, help="import parameters from file")
	parser.add_option("--export",	   type="string",       dest="fexport",  default=None, help="export parameters to file")
	parser.add_option("--print",	   action="store_true", dest="fprint",   default=False, help="print parameters")
	parser.add_option("--backup",	   action="store_true", default=False,   help="backup parameters")
	parser.add_option("--suffix",	   type="string",       default="_backup",    help="suffix for xform name in backup")
	parser.add_option("--restore",     action="store_true", default=False,   help="restore parameters")
	parser.add_option("--delete",      action="store_true", default=False,   help="delete parameters")
	parser.add_option("--consecutive", action="store_true", default=False,   help="set selected parameter to consecutive integers starting from 0")

	(options,args) = parser.parse_args( arglist[1:] )

	if len(args) != 1 :
		print usage
		sys.exit(-1)

	if options.params == None:
		print "Error: no parameters given"
		exit(-1)

	if global_def.CACHE_DISABLE:
		from utilities import disable_bdb_cache
		disable_bdb_cache()
        from applications import header
	header(args[0], options.params, options.zero, options.one, options.set, options.randomize, options.rand_alpha, options.fimport, options.fexport, \
	options.fprint, options.backup, options.suffix, options.restore, options.delete, options.consecutive)
Пример #2
0
def main():
	import os
	import sys
	from optparse import OptionParser
	from global_def import SPARXVERSION
	import global_def
	arglist = []
	for arg in sys.argv:
		arglist.append( arg )

	progname = os.path.basename( arglist[0] )
	usage = progname + " stack --params='parm1 parm2 parm3 ...' --zero --one --set=number --randomize --rand_alpha --import=file --export=file --print --backup --suffix --restore --delete"
	parser = OptionParser(usage, version=SPARXVERSION)

	parser.add_option("--params",	   type="string",       default=None,    help="parameter list")
	parser.add_option("--zero",	       action="store_true", default=False,   help="set parameter to zero")
	parser.add_option("--one",	       action="store_true", default=False,   help="set parameter to one")
	parser.add_option("--set",	       type="float",        default=0.0,     help="set parameter to a value (different from 0.0)")
	parser.add_option("--randomize",   action="store_true", default=False,   help="set parameter to randomized value")
	parser.add_option("--rand_alpha",  action="store_true", default=False,   help="set all angles to randomized value")
	parser.add_option("--import",	   type="string",       dest="fimport",  default=None, help="import parameters from file")
	parser.add_option("--export",	   type="string",       dest="fexport",  default=None, help="export parameters to file")
	parser.add_option("--print",	   action="store_true", dest="fprint",   default=False, help="print parameters")
	parser.add_option("--backup",	   action="store_true", default=False,   help="backup parameters")
	parser.add_option("--suffix",	   type="string",       default="_backup",    help="suffix for xform name in backup")
	parser.add_option("--restore",     action="store_true", default=False,   help="restore parameters")
	parser.add_option("--delete",      action="store_true", default=False,   help="delete parameters")
	parser.add_option("--consecutive", action="store_true", default=False,   help="set selected parameter to consecutive integers starting from 0")

	(options,args) = parser.parse_args( arglist[1:] )

	if len(args) != 1 :
		print(usage)
		sys.exit(-1)

	if options.params == None:
		print("Error: no parameters given")
		exit(-1)

	if global_def.CACHE_DISABLE:
		from utilities import disable_bdb_cache
		disable_bdb_cache()
	from applications import header
	header(args[0], options.params, options.zero, options.one, options.set, options.randomize, options.rand_alpha, options.fimport, options.fexport, \
	options.fprint, options.backup, options.suffix, options.restore, options.delete, options.consecutive)
Пример #3
0
def runcheck(classavgstack, recon, outdir, inangles=None, selectdoc=None, displayYN=False, 
			 projstack='proj.hdf', outangles='angles.txt', outstack='comp-proj-reproj.hdf', normstack='comp-proj-reproj-norm.hdf'):
	
	print
	
	# Check if inputs exist
	check(classavgstack)
	check(recon)
	
	# Create directory if it doesn't exist
	if not os.path.isdir(outdir):
		os.makedirs(outdir)  # os.mkdir() can only operate one directory deep
		print("mkdir -p %s" % outdir)

	# Expand path for outputs
	projstack = os.path.join(outdir, projstack)
	outangles = os.path.join(outdir, outangles)
	outstack  = os.path.join(outdir, outstack)
	normstack = os.path.join(outdir, normstack)
	
	# Get number of images
	nimg0 = EMUtil.get_image_count(classavgstack)
	#print("nimg0: %s" % nimg0)
	
	# In case class averages include discarded images, apply selection file
	if selectdoc:
		goodavgs, extension = os.path.splitext(classavgstack)
		newclasses = goodavgs + "_kept" + extension
		
		# e2proc2d appends to existing files, so rename existing output
		if os.path.exists(newclasses):
			renamefile = newclasses + '.bak'
			os.rename(newclasses, renamefile)
			print("mv %s %s" % (newclasses, renamefile))
		
		cmd7="e2proc2d.py %s %s --list=%s" % (classavgstack, newclasses, selectdoc)
		print cmd7
		os.system(cmd7)
		
		# Update class-averages
		classavgstack = newclasses
	
	# Import Euler angles
	if inangles:
		cmd6="sxheader.py %s --params=xform.projection --import=%s" % (classavgstack, inangles)
		print cmd6
		header(classavgstack, 'xform.projection', fimport=inangles)
	
	cmd1="sxheader.py %s --params=xform.projection --export=%s" % (classavgstack, outangles) 
	print cmd1
	#os.system(cmd1)
	try:
		header(classavgstack, 'xform.projection', fexport=outangles)
	except RuntimeError:
		print("\nERROR!! No projection angles found in class-average stack header!\n")
		exit()
	
	cmd2="sxproject3d.py %s %s --angles=%s" % (recon, projstack, outangles)
	print cmd2
	#os.system(cmd2)
	project3d(recon, stack=projstack, listagls=outangles)
	
	imgcounter = 0  # montage will have double the number of images as number of class-averages
	result=[]
	
	# Number of images may have changed
	nimg1   = EMUtil.get_image_count(classavgstack)
	
	for imgnum in xrange(nimg1):
		#print imgnum
		classimg = get_im(classavgstack, imgnum)
		ccc1 = classimg.get_attr_default('cross-corr', -1.0)
		prjimg = get_im(projstack,imgnum)
		ccc1 = prjimg.get_attr_default('cross-corr', -1.0)
		cccoeff = ccc(prjimg,classimg)
		#print imgnum, cccoeff
		classimg.set_attr_dict({'cross-corr':cccoeff})
		prjimg.set_attr_dict({'cross-corr':cccoeff})
		prjimg.write_image(outstack,imgcounter)
		imgcounter += 1
		classimg.write_image(outstack, imgcounter)
		imgcounter += 1
		result.append(cccoeff)
	result1 = sum(result)
	#print result1

	nimg2   = EMUtil.get_image_count(outstack)
	meanccc = result1/nimg1
	print("Mean CCC is %s" % meanccc)
	
	for imgnum in xrange(nimg2):
		if (imgnum % 2 ==0):
			prjimg = get_im(outstack,imgnum)
			meanccc1 = prjimg.get_attr_default('mean-cross-corr', -1.0)
			prjimg.set_attr_dict({'mean-cross-corr':meanccc})
			write_header(outstack,prjimg,imgnum)
		if (imgnum % 100) == 0:
			print imgnum
	
	# e2proc2d appends to existing files, so delete existing output
	if os.path.exists(normstack):
		os.remove(normstack)
		print("rm %s" % normstack)
		
	cmd5="e2proc2d.py %s %s --process=normalize" % (outstack, normstack)
	print cmd5
	os.system(cmd5)
	
	# Optionally pop up e2display
	if displayYN:
		cmd8 = "e2display.py %s" % normstack
		print cmd8
		os.system(cmd8)
	
	print("Done!")
Пример #4
0
def main():

	from logger import Logger, BaseLogger_Files
	import user_functions
	from optparse import OptionParser, SUPPRESS_HELP
	from global_def import SPARXVERSION
	from EMAN2 import EMData

	main_node = 0
	mpi_init(0, [])
	mpi_comm = MPI_COMM_WORLD
	myid = mpi_comm_rank(MPI_COMM_WORLD)
	mpi_size = mpi_comm_size(MPI_COMM_WORLD)	# Total number of processes, passed by --np option.

	# mpi_barrier(mpi_comm)
	# from mpi import mpi_finalize
	# mpi_finalize()
	# print "mpi finalize"
	# from sys import exit
	# exit()

	progname = os.path.basename(sys.argv[0])
	usage = progname + " stack  [output_directory] --ir=inner_radius --radius=outer_radius --rs=ring_step --xr=x_range --yr=y_range  --ts=translational_search_step  --delta=angular_step --an=angular_neighborhood  --center=center_type --maxit1=max_iter1 --maxit2=max_iter2 --L2threshold=0.1  --fl --aa --ref_a=S --sym=c1"
	usage += """

stack			2D images in a stack file: (default required string)
output_directory: directory name into which the output files will be written.  If it does not exist, the directory will be created.  If it does exist, the program will continue executing from where it stopped (if it did not already reach the end). The "--use_latest_master_directory" option can be used to choose the most recent directory that starts with "master".
"""

	parser = OptionParser(usage,version=SPARXVERSION)
	parser.add_option("--radius",                type="int",           help="radius of the particle: has to be less than < int(nx/2)-1 (default required int)")

	parser.add_option("--ir",                    type="int",           default=1,          help="inner radius for rotational search: > 0 (default 1)")
	parser.add_option("--rs",                    type="int",           default=1,          help="step between rings in rotational search: >0 (default 1)")
	parser.add_option("--xr",                    type="string",        default='0',        help="range for translation search in x direction: search is +/xr in pixels (default '0')")
	parser.add_option("--yr",                    type="string",        default='0',        help="range for translation search in y direction: if omitted will be set to xr, search is +/yr in pixels (default '0')")
	parser.add_option("--ts",                    type="string",        default='1.0',      help="step size of the translation search in x-y directions: search is -xr, -xr+ts, 0, xr-ts, xr, can be fractional (default '1.0')")
	parser.add_option("--delta",                 type="string",        default='2.0',      help="angular step of reference projections: (default '2.0')")
	#parser.add_option("--an",       type="string", default= "-1",              help="angular neighborhood for local searches (phi and theta)")
	parser.add_option("--center",                type="float",         default=-1.0,       help="centering of 3D template: average shift method; 0: no centering; 1: center of gravity (default -1.0)")
	parser.add_option("--maxit1",                type="int",           default=400,        help="maximum number of iterations performed for the GA part: (default 400)")
	parser.add_option("--maxit2",                type="int",           default=50,         help="maximum number of iterations performed for the finishing up part: (default 50)")
	parser.add_option("--L2threshold",           type="float",         default=0.03,       help="stopping criterion of GA: given as a maximum relative dispersion of volumes' L2 norms: (default 0.03)")
	parser.add_option("--doga",                  type="float",         default=0.1,        help="do GA when fraction of orientation changes less than 1.0 degrees is at least doga: (default 0.1)")
	parser.add_option("--n_shc_runs",            type="int",           default=4,          help="number of quasi-independent shc runs (same as '--nruns' parameter from sxviper.py): (default 4)")
	parser.add_option("--n_rv_runs",             type="int",           default=10,         help="number of rviper iterations: (default 10)")
	parser.add_option("--n_v_runs",              type="int",           default=3,          help="number of viper runs for each r_viper cycle: (default 3)")
	parser.add_option("--outlier_percentile",    type="float",         default=95.0,       help="percentile above which outliers are removed every rviper iteration: (default 95.0)")
	parser.add_option("--iteration_start",       type="int",           default=0,          help="starting iteration for rviper: 0 means go to the most recent one (default 0)")
	#parser.add_option("--CTF",      action="store_true", default=False,        help="NOT IMPLEMENTED Consider CTF correction during the alignment ")
	#parser.add_option("--snr",      type="float",  default= 1.0,               help="Signal-to-Noise Ratio of the data (default 1.0)")
	parser.add_option("--ref_a",                 type="string",        default='S',        help="method for generating the quasi-uniformly distributed projection directions: (default S)")
	parser.add_option("--sym",                   type="string",        default='c1',       help="point-group symmetry of the structure: (default c1)")
	# parser.add_option("--function", type="string", default="ref_ali3d",         help="name of the reference preparation function (ref_ali3d by default)")
	##### XXXXXXXXXXXXXXXXXXXXXX option does not exist in docs XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	parser.add_option("--function", type="string", default="ref_ali3d",         help=SUPPRESS_HELP)
	parser.add_option("--npad",                  type="int",           default=2,          help="padding size for 3D reconstruction: (default 2)")
	# parser.add_option("--npad", type="int",  default= 2,            help="padding size for 3D reconstruction (default 2)")

	#options introduced for the do_volume function
	parser.add_option("--fl",                    type="float",         default=0.25,       help="cut-off frequency applied to the template volume: using a hyperbolic tangent low-pass filter (default 0.25)")
	parser.add_option("--aa",                    type="float",         default=0.1,        help="fall-off of hyperbolic tangent low-pass filter: (default 0.1)")
	parser.add_option("--pwreference",           type="string",        default='',         help="text file with a reference power spectrum: (default none)")
	parser.add_option("--mask3D",                type="string",        default=None,       help="3D mask file: (default sphere)")
	parser.add_option("--moon_elimination",      type="string",        default='',         help="elimination of disconnected pieces: two arguments: mass in KDa and pixel size in px/A separated by comma, no space (default none)")

	# used for debugging, help is supressed with SUPPRESS_HELP
	##### XXXXXXXXXXXXXXXXXXXXXX option does not exist in docs XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	parser.add_option("--my_random_seed",      type="int",  default=123,  help = SUPPRESS_HELP)
	##### XXXXXXXXXXXXXXXXXXXXXX option does not exist in docs XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	parser.add_option("--run_get_already_processed_viper_runs", action="store_true", dest="run_get_already_processed_viper_runs", default=False, help = SUPPRESS_HELP)
	##### XXXXXXXXXXXXXXXXXXXXXX option does not exist in docs XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
	parser.add_option("--use_latest_master_directory", action="store_true", dest="use_latest_master_directory", default=False, help = SUPPRESS_HELP)
	
	parser.add_option("--criterion_name",        type="string",        default='80th percentile',help="criterion deciding if volumes have a core set of stable projections: '80th percentile', other options:'fastest increase in the last quartile' (default '80th percentile')")
	parser.add_option("--outlier_index_threshold_method",type="string",        default='discontinuity_in_derivative',help="method that decides which images to keep: discontinuity_in_derivative, other options:percentile, angle_measure (default discontinuity_in_derivative)")
	parser.add_option("--angle_threshold",       type="int",           default=30,         help="angle threshold for projection removal if using 'angle_measure': (default 30)")
	

	required_option_list = ['radius']
	(options, args) = parser.parse_args(sys.argv[1:])

	options.CTF = False
	options.snr = 1.0
	options.an = -1

	if options.moon_elimination == "":
		options.moon_elimination = []
	else:
		options.moon_elimination = map(float, options.moon_elimination.split(","))

	# Making sure all required options appeared.
	for required_option in required_option_list:
		if not options.__dict__[required_option]:
			print "\n ==%s== mandatory option is missing.\n"%required_option
			print "Please run '" + progname + " -h' for detailed options"
			return 1

	mpi_barrier(MPI_COMM_WORLD)
	if(myid == main_node):
		print "****************************************************************"
		Util.version()
		print "****************************************************************"
		sys.stdout.flush()
	mpi_barrier(MPI_COMM_WORLD)

	# this is just for benefiting from a user friendly parameter name
	options.ou = options.radius 
	my_random_seed = options.my_random_seed
	criterion_name = options.criterion_name
	outlier_index_threshold_method = options.outlier_index_threshold_method
	use_latest_master_directory = options.use_latest_master_directory
	iteration_start_default = options.iteration_start
	number_of_rrr_viper_runs = options.n_rv_runs
	no_of_viper_runs_analyzed_together_from_user_options = options.n_v_runs
	no_of_shc_runs_analyzed_together = options.n_shc_runs 
	outlier_percentile = options.outlier_percentile 
	angle_threshold = options.angle_threshold 
	
	run_get_already_processed_viper_runs = options.run_get_already_processed_viper_runs
	get_already_processed_viper_runs(run_get_already_processed_viper_runs)

	import random
	random.seed(my_random_seed)

	if len(args) < 1 or len(args) > 3:
		print "usage: " + usage
		print "Please run '" + progname + " -h' for detailed options"
		return 1

	# if len(args) > 2:
	# 	ref_vol = get_im(args[2])
	# else:
	ref_vol = None
	
	# error_status = None
	# if myid == 0:
	# 	number_of_images = EMUtil.get_image_count(args[0])
	# 	if mpi_size > number_of_images:
	# 		error_status = ('Number of processes supplied by --np in mpirun needs to be less than or equal to %d (total number of images) ' % number_of_images, getframeinfo(currentframe()))
	# if_error_then_all_processes_exit_program(error_status)
	
	bdb_stack_location = ""

	masterdir = ""
	if len(args) == 2:
		masterdir = args[1]
		if masterdir[-1] != DIR_DELIM:
			masterdir += DIR_DELIM
	elif len(args) == 1:
		if use_latest_master_directory:
			all_dirs = [d for d in os.listdir(".") if os.path.isdir(d)]
			import re; r = re.compile("^master.*$")
			all_dirs = filter(r.match, all_dirs)
			if len(all_dirs)>0:
				# all_dirs = max(all_dirs, key=os.path.getctime)
				masterdir = max(all_dirs, key=os.path.getmtime)
				masterdir += DIR_DELIM

	log = Logger(BaseLogger_Files())

	error_status = 0	
	if mpi_size % no_of_shc_runs_analyzed_together != 0:
		ERROR('Number of processes needs to be a multiple of the number of quasi-independent runs (shc) within each viper run. '
		'Total quasi-independent runs by default are 3, you can change it by specifying '
		'--n_shc_runs option (in sxviper this option is called --nruns). Also, to improve communication time it is recommended that '
		'the number of processes divided by the number of quasi-independent runs is a power '
		'of 2 (e.g. 2, 4, 8 or 16 depending on how many physical cores each node has).', 'sxviper', 1)
		error_status = 1
	if_error_then_all_processes_exit_program(error_status)

	#Create folder for all results or check if there is one created already
	if(myid == main_node):
		#cmd = "{}".format("Rmycounter ccc")
		#cmdexecute(cmd)

		if( masterdir == ""):
			timestring = strftime("%Y_%m_%d__%H_%M_%S" + DIR_DELIM, localtime())
			masterdir = "master"+timestring

		if not os.path.exists(masterdir):
			cmd = "{} {}".format("mkdir", masterdir)
			cmdexecute(cmd)

		if ':' in args[0]:
			bdb_stack_location = args[0].split(":")[0] + ":" + masterdir + args[0].split(":")[1]
			org_stack_location = args[0]

			if(not os.path.exists(os.path.join(masterdir,"EMAN2DB" + DIR_DELIM))):
				# cmd = "{} {}".format("cp -rp EMAN2DB", masterdir, "EMAN2DB" DIR_DELIM)
				# cmdexecute(cmd)
				cmd = "{} {} {}".format("e2bdb.py", org_stack_location,"--makevstack=" + bdb_stack_location + "_000")
				cmdexecute(cmd)

				from applications import header
				try:
					header(bdb_stack_location + "_000", params='original_image_index', fprint=True)
					print "Images were already indexed!"
				except KeyError:
					print "Indexing images"
					header(bdb_stack_location + "_000", params='original_image_index', consecutive=True)
		else:
			filename = os.path.basename(args[0])
			bdb_stack_location = "bdb:" + masterdir + os.path.splitext(filename)[0]
			if(not os.path.exists(os.path.join(masterdir,"EMAN2DB" + DIR_DELIM))):
				cmd = "{} {} {}".format("sxcpy.py  ", args[0], bdb_stack_location + "_000")
				cmdexecute(cmd)

				from applications import header
				try:
					header(bdb_stack_location + "_000", params='original_image_index', fprint=True)
					print "Images were already indexed!"
				except KeyError:
					print "Indexing images"
					header(bdb_stack_location + "_000", params='original_image_index', consecutive=True)

	# send masterdir to all processes
	dir_len  = len(masterdir)*int(myid == main_node)
	dir_len = mpi_bcast(dir_len,1,MPI_INT,0,MPI_COMM_WORLD)[0]
	masterdir = mpi_bcast(masterdir,dir_len,MPI_CHAR,main_node,MPI_COMM_WORLD)
	masterdir = string.join(masterdir,"")
	if masterdir[-1] != DIR_DELIM:
		masterdir += DIR_DELIM
		
	global_def.LOGFILE =  os.path.join(masterdir, global_def.LOGFILE)
	print_program_start_information()
	

	# mpi_barrier(mpi_comm)
	# from mpi import mpi_finalize
	# mpi_finalize()
	# print "mpi finalize"
	# from sys import exit
	# exit()
		
	
	# send bdb_stack_location to all processes
	dir_len  = len(bdb_stack_location)*int(myid == main_node)
	dir_len = mpi_bcast(dir_len,1,MPI_INT,0,MPI_COMM_WORLD)[0]
	bdb_stack_location = mpi_bcast(bdb_stack_location,dir_len,MPI_CHAR,main_node,MPI_COMM_WORLD)
	bdb_stack_location = string.join(bdb_stack_location,"")

	iteration_start = get_latest_directory_increment_value(masterdir, "main")

	if (myid == main_node):
		if (iteration_start < iteration_start_default):
			ERROR('Starting iteration provided is greater than last iteration performed. Quiting program', 'sxviper', 1)
			error_status = 1
	if iteration_start_default!=0:
		iteration_start = iteration_start_default
	if (myid == main_node):
		if (number_of_rrr_viper_runs < iteration_start):
			ERROR('Please provide number of rviper runs (--n_rv_runs) greater than number of iterations already performed.', 'sxviper', 1)
			error_status = 1

	if_error_then_all_processes_exit_program(error_status)

	for rviper_iter in range(iteration_start, number_of_rrr_viper_runs + 1):
		if(myid == main_node):
			all_projs = EMData.read_images(bdb_stack_location + "_%03d"%(rviper_iter - 1))
			print "XXXXXXXXXXXXXXXXX"
			print "Number of projections (in loop): " + str(len(all_projs))
			print "XXXXXXXXXXXXXXXXX"
			subset = range(len(all_projs))
		else:
			all_projs = None
			subset = None

		runs_iter = get_latest_directory_increment_value(masterdir + NAME_OF_MAIN_DIR + "%03d"%rviper_iter, DIR_DELIM + NAME_OF_RUN_DIR, start_value=0) - 1
		no_of_viper_runs_analyzed_together = max(runs_iter + 2, no_of_viper_runs_analyzed_together_from_user_options)

		first_time_entering_the_loop_need_to_do_full_check_up = True
		while True:
			runs_iter += 1

			if not first_time_entering_the_loop_need_to_do_full_check_up:
				if runs_iter >= no_of_viper_runs_analyzed_together:
					break
			first_time_entering_the_loop_need_to_do_full_check_up = False

			this_run_is_NOT_complete = 0
			if (myid == main_node):
				independent_run_dir = masterdir + DIR_DELIM + NAME_OF_MAIN_DIR + ('%03d' + DIR_DELIM + NAME_OF_RUN_DIR + "%03d" + DIR_DELIM)%(rviper_iter, runs_iter)
				if run_get_already_processed_viper_runs:
					cmd = "{} {}".format("mkdir -p", masterdir + DIR_DELIM + NAME_OF_MAIN_DIR + ('%03d' + DIR_DELIM)%(rviper_iter)); cmdexecute(cmd)
					cmd = "{} {}".format("rm -rf", independent_run_dir); cmdexecute(cmd)
					cmd = "{} {}".format("cp -r", get_already_processed_viper_runs() + " " +  independent_run_dir); cmdexecute(cmd)

				if os.path.exists(independent_run_dir + "log.txt") and (string_found_in_file("Finish VIPER2", independent_run_dir + "log.txt")):
					this_run_is_NOT_complete = 0
				else:
					this_run_is_NOT_complete = 1
					cmd = "{} {}".format("rm -rf", independent_run_dir); cmdexecute(cmd)
					cmd = "{} {}".format("mkdir -p", independent_run_dir); cmdexecute(cmd)

				this_run_is_NOT_complete = mpi_bcast(this_run_is_NOT_complete,1,MPI_INT,main_node,MPI_COMM_WORLD)[0]
				dir_len = len(independent_run_dir)
				dir_len = mpi_bcast(dir_len,1,MPI_INT,main_node,MPI_COMM_WORLD)[0]
				independent_run_dir = mpi_bcast(independent_run_dir,dir_len,MPI_CHAR,main_node,MPI_COMM_WORLD)
				independent_run_dir = string.join(independent_run_dir,"")
			else:
				this_run_is_NOT_complete = mpi_bcast(this_run_is_NOT_complete,1,MPI_INT,main_node,MPI_COMM_WORLD)[0]
				dir_len = 0
				independent_run_dir = ""
				dir_len = mpi_bcast(dir_len,1,MPI_INT,main_node,MPI_COMM_WORLD)[0]
				independent_run_dir = mpi_bcast(independent_run_dir,dir_len,MPI_CHAR,main_node,MPI_COMM_WORLD)
				independent_run_dir = string.join(independent_run_dir,"")

			if this_run_is_NOT_complete:
				mpi_barrier(MPI_COMM_WORLD)

				if independent_run_dir[-1] != DIR_DELIM:
					independent_run_dir += DIR_DELIM

				log.prefix = independent_run_dir

				options.user_func = user_functions.factory[options.function]

				# for debugging purposes
				#if (myid == main_node):
					#cmd = "{} {}".format("cp ~/log.txt ", independent_run_dir)
					#cmdexecute(cmd)
					#cmd = "{} {}{}".format("cp ~/paramdir/params$(mycounter ccc).txt ", independent_run_dir, "param%03d.txt"%runs_iter)
					#cmd = "{} {}{}".format("cp ~/paramdir/params$(mycounter ccc).txt ", independent_run_dir, "params.txt")
					#cmdexecute(cmd)

				if (myid == main_node):
					store_value_of_simple_vars_in_json_file(masterdir + 'program_state_stack.json', locals(), exclude_list_of_vars=["usage"], 
						vars_that_will_show_only_size = ["subset"])
					store_value_of_simple_vars_in_json_file(masterdir + 'program_state_stack.json', options.__dict__, write_or_append='a')

				# mpi_barrier(mpi_comm)
				# from mpi import mpi_finalize
				# mpi_finalize()
				# print "mpi finalize"
				# from sys import exit
				# exit()

				out_params, out_vol, out_peaks = multi_shc(all_projs, subset, no_of_shc_runs_analyzed_together, options,
				mpi_comm=mpi_comm, log=log, ref_vol=ref_vol)

				# end of: if this_run_is_NOT_complete:

			if runs_iter >= (no_of_viper_runs_analyzed_together_from_user_options - 1):
				increment_for_current_iteration = identify_outliers(myid, main_node, rviper_iter,
				no_of_viper_runs_analyzed_together, no_of_viper_runs_analyzed_together_from_user_options, masterdir,
				bdb_stack_location, outlier_percentile, criterion_name, outlier_index_threshold_method, angle_threshold)

				if increment_for_current_iteration == MUST_END_PROGRAM_THIS_ITERATION:
					break

				no_of_viper_runs_analyzed_together += increment_for_current_iteration

		# end of independent viper loop

		calculate_volumes_after_rotation_and_save_them(options, rviper_iter, masterdir, bdb_stack_location, myid,
		mpi_size, no_of_viper_runs_analyzed_together, no_of_viper_runs_analyzed_together_from_user_options)

		if increment_for_current_iteration == MUST_END_PROGRAM_THIS_ITERATION:
			if (myid == main_node):
				print "RVIPER found a core set of stable projections for the current RVIPER iteration (%d), the maximum angle difference between corresponding projections from different VIPER volumes is less than %.2f. Finishing."%(rviper_iter, ANGLE_ERROR_THRESHOLD)
			break
	else:
		if (myid == main_node):
			print "After running the last iteration (%d), RVIPER did not find a set of projections with the maximum angle difference between corresponding projections from different VIPER volumes less than %.2f Finishing."%(rviper_iter, ANGLE_ERROR_THRESHOLD)
		
			
	# end of RVIPER loop

	#mpi_finalize()
	#sys.exit()

	mpi_barrier(MPI_COMM_WORLD)
	mpi_finalize()
Пример #5
0
def main():

	def params_3D_2D_NEW(phi, theta, psi, s2x, s2y, mirror):
		# the final ali2d parameters already combine shifts operation first and rotation operation second for parameters converted from 3D
		if mirror:
			m = 1
			alpha, sx, sy, scalen = compose_transform2(0, s2x, s2y, 1.0, 540.0-psi, 0, 0, 1.0)
		else:
			m = 0
			alpha, sx, sy, scalen = compose_transform2(0, s2x, s2y, 1.0, 360.0-psi, 0, 0, 1.0)
		return  alpha, sx, sy, m
	
	progname = os.path.basename(sys.argv[0])
	usage = progname + " prj_stack  --ave2D= --var2D=  --ave3D= --var3D= --img_per_grp= --fl=  --aa=   --sym=symmetry --CTF"
	parser = OptionParser(usage, version=SPARXVERSION)
	
	parser.add_option("--output_dir",   type="string"	   ,	default="./",				    help="Output directory")
	parser.add_option("--ave2D",		type="string"	   ,	default=False,				help="Write to the disk a stack of 2D averages")
	parser.add_option("--var2D",		type="string"	   ,	default=False,				help="Write to the disk a stack of 2D variances")
	parser.add_option("--ave3D",		type="string"	   ,	default=False,				help="Write to the disk reconstructed 3D average")
	parser.add_option("--var3D",		type="string"	   ,	default=False,				help="Compute 3D variability (time consuming!)")
	parser.add_option("--img_per_grp",	type="int"         ,	default=100,	     	    help="Number of neighbouring projections.(Default is 100)")
	parser.add_option("--no_norm",		action="store_true",	default=False,				help="Do not use normalization.(Default is to apply normalization)")
	#parser.add_option("--radius", 	    type="int"         ,	default=-1   ,				help="radius for 3D variability" )
	parser.add_option("--npad",			type="int"         ,	default=2    ,				help="Number of time to pad the original images.(Default is 2 times padding)")
	parser.add_option("--sym" , 		type="string"      ,	default="c1",				help="Symmetry. (Default is no symmetry)")
	parser.add_option("--fl",			type="float"       ,	default=0.0,				help="Low pass filter cutoff in absolute frequency (0.0 - 0.5) and is applied to decimated images. (Default - no filtration)")
	parser.add_option("--aa",			type="float"       ,	default=0.02 ,				help="Fall off of the filter. Use default value if user has no clue about falloff (Default value is 0.02)")
	parser.add_option("--CTF",			action="store_true",	default=False,				help="Use CFT correction.(Default is no CTF correction)")
	#parser.add_option("--MPI" , 		action="store_true",	default=False,				help="use MPI version")
	#parser.add_option("--radiuspca", 	type="int"         ,	default=-1   ,				help="radius for PCA" )
	#parser.add_option("--iter", 		type="int"         ,	default=40   ,				help="maximum number of iterations (stop criterion of reconstruction process)" )
	#parser.add_option("--abs", 		type="float"   ,        default=0.0  ,				help="minimum average absolute change of voxels' values (stop criterion of reconstruction process)" )
	#parser.add_option("--squ", 		type="float"   ,	    default=0.0  ,				help="minimum average squared change of voxels' values (stop criterion of reconstruction process)" )
	parser.add_option("--VAR" , 		action="store_true",	default=False,				help="Stack of input consists of 2D variances (Default False)")
	parser.add_option("--decimate",     type  ="float",         default=0.25,               help="Image decimate rate, a number less than 1. (Default is 0.25)")
	parser.add_option("--window",       type  ="int",           default=0,                  help="Target image size relative to original image size. (Default value is zero.)")
	#parser.add_option("--SND",			action="store_true",	default=False,				help="compute squared normalized differences (Default False)")
	#parser.add_option("--nvec",			type="int"         ,	default=0    ,				help="Number of eigenvectors, (Default = 0 meaning no PCA calculated)")
	parser.add_option("--symmetrize",	action="store_true",	default=False,				help="Prepare input stack for handling symmetry (Default False)")
	parser.add_option("--overhead",     type  ="float",         default=0.5,                help="python overhead per CPU.")

	(options,args) = parser.parse_args()
	#####
	from mpi import mpi_init, mpi_comm_rank, mpi_comm_size, mpi_recv, MPI_COMM_WORLD
	from mpi import mpi_barrier, mpi_reduce, mpi_bcast, mpi_send, MPI_FLOAT, MPI_SUM, MPI_INT, MPI_MAX
	#from mpi import *
	from applications   import MPI_start_end
	from reconstruction import recons3d_em, recons3d_em_MPI
	from reconstruction	import recons3d_4nn_MPI, recons3d_4nn_ctf_MPI
	from utilities      import print_begin_msg, print_end_msg, print_msg
	from utilities      import read_text_row, get_image, get_im, wrap_mpi_send, wrap_mpi_recv
	from utilities      import bcast_EMData_to_all, bcast_number_to_all
	from utilities      import get_symt

	#  This is code for handling symmetries by the above program.  To be incorporated. PAP 01/27/2015

	from EMAN2db import db_open_dict

	# Set up global variables related to bdb cache 
	if global_def.CACHE_DISABLE:
		from utilities import disable_bdb_cache
		disable_bdb_cache()
	
	# Set up global variables related to ERROR function
	global_def.BATCH = True
	
	# detect if program is running under MPI
	RUNNING_UNDER_MPI = "OMPI_COMM_WORLD_SIZE" in os.environ
	if RUNNING_UNDER_MPI: global_def.MPI = True
	if options.output_dir =="./": current_output_dir = os.path.abspath(options.output_dir)
	else: current_output_dir = options.output_dir
	if options.symmetrize :
		if RUNNING_UNDER_MPI:
			try:
				sys.argv = mpi_init(len(sys.argv), sys.argv)
				try:	
					number_of_proc = mpi_comm_size(MPI_COMM_WORLD)
					if( number_of_proc > 1 ):
						ERROR("Cannot use more than one CPU for symmetry preparation","sx3dvariability",1)
				except:
					pass
			except:
				pass
		if not os.path.exists(current_output_dir): os.mkdir(current_output_dir)
		
		#  Input
		#instack = "Clean_NORM_CTF_start_wparams.hdf"
		#instack = "bdb:data"
		
		
		from logger import Logger,BaseLogger_Files
		if os.path.exists(os.path.join(current_output_dir, "log.txt")): os.remove(os.path.join(current_output_dir, "log.txt"))
		log_main=Logger(BaseLogger_Files())
		log_main.prefix = os.path.join(current_output_dir, "./")
		
		instack = args[0]
		sym = options.sym.lower()
		if( sym == "c1" ):
			ERROR("There is no need to symmetrize stack for C1 symmetry","sx3dvariability",1)
		
		line =""
		for a in sys.argv:
			line +=" "+a
		log_main.add(line)
	
		if(instack[:4] !="bdb:"):
			#if output_dir =="./": stack = "bdb:data"
			stack = "bdb:"+current_output_dir+"/data"
			delete_bdb(stack)
			junk = cmdexecute("sxcpy.py  "+instack+"  "+stack)
		else: stack = instack
		
		qt = EMUtil.get_all_attributes(stack,'xform.projection')

		na = len(qt)
		ts = get_symt(sym)
		ks = len(ts)
		angsa = [None]*na
		
		for k in range(ks):
			#Qfile = "Q%1d"%k
			#if options.output_dir!="./": Qfile = os.path.join(options.output_dir,"Q%1d"%k)
			Qfile = os.path.join(current_output_dir, "Q%1d"%k)
			#delete_bdb("bdb:Q%1d"%k)
			delete_bdb("bdb:"+Qfile)
			#junk = cmdexecute("e2bdb.py  "+stack+"  --makevstack=bdb:Q%1d"%k)
			junk = cmdexecute("e2bdb.py  "+stack+"  --makevstack=bdb:"+Qfile)
			#DB = db_open_dict("bdb:Q%1d"%k)
			DB = db_open_dict("bdb:"+Qfile)
			for i in range(na):
				ut = qt[i]*ts[k]
				DB.set_attr(i, "xform.projection", ut)
				#bt = ut.get_params("spider")
				#angsa[i] = [round(bt["phi"],3)%360.0, round(bt["theta"],3)%360.0, bt["psi"], -bt["tx"], -bt["ty"]]
			#write_text_row(angsa, 'ptsma%1d.txt'%k)
			#junk = cmdexecute("e2bdb.py  "+stack+"  --makevstack=bdb:Q%1d"%k)
			#junk = cmdexecute("sxheader.py  bdb:Q%1d  --params=xform.projection  --import=ptsma%1d.txt"%(k,k))
			DB.close()
		#if options.output_dir =="./": delete_bdb("bdb:sdata")
		delete_bdb("bdb:" + current_output_dir + "/"+"sdata")
		#junk = cmdexecute("e2bdb.py . --makevstack=bdb:sdata --filt=Q")
		sdata = "bdb:"+current_output_dir+"/"+"sdata"
		print(sdata)
		junk = cmdexecute("e2bdb.py   " + current_output_dir +"  --makevstack="+sdata +" --filt=Q")
		#junk = cmdexecute("ls  EMAN2DB/sdata*")
		#a = get_im("bdb:sdata")
		a = get_im(sdata)
		a.set_attr("variabilitysymmetry",sym)
		#a.write_image("bdb:sdata")
		a.write_image(sdata)

	else:

		from fundamentals import window2d
		sys.argv       = mpi_init(len(sys.argv), sys.argv)
		myid           = mpi_comm_rank(MPI_COMM_WORLD)
		number_of_proc = mpi_comm_size(MPI_COMM_WORLD)
		main_node      = 0
		shared_comm  = mpi_comm_split_type(MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED,  0, MPI_INFO_NULL)
		myid_on_node = mpi_comm_rank(shared_comm)
		no_of_processes_per_group = mpi_comm_size(shared_comm)
		masters_from_groups_vs_everything_else_comm = mpi_comm_split(MPI_COMM_WORLD, main_node == myid_on_node, myid_on_node)
		color, no_of_groups, balanced_processor_load_on_nodes = get_colors_and_subsets(main_node, MPI_COMM_WORLD, myid, \
		    shared_comm, myid_on_node, masters_from_groups_vs_everything_else_comm)
		overhead_loading = options.overhead*number_of_proc
		#memory_per_node  = options.memory_per_node
		#if memory_per_node == -1.: memory_per_node = 2.*no_of_processes_per_group
		keepgoing = 1
		
		current_window   = options.window
		current_decimate = options.decimate
		
		if len(args) == 1: stack = args[0]
		else:
			print(( "usage: " + usage))
			print(( "Please run '" + progname + " -h' for detailed options"))
			return 1

		t0 = time()	
		# obsolete flags
		options.MPI  = True
		#options.nvec = 0
		options.radiuspca = -1
		options.iter = 40
		options.abs  = 0.0
		options.squ  = 0.0

		if options.fl > 0.0 and options.aa == 0.0:
			ERROR("Fall off has to be given for the low-pass filter", "sx3dvariability", 1, myid)
			
		#if options.VAR and options.SND:
		#	ERROR("Only one of var and SND can be set!", "sx3dvariability", myid)
			
		if options.VAR and (options.ave2D or options.ave3D or options.var2D): 
			ERROR("When VAR is set, the program cannot output ave2D, ave3D or var2D", "sx3dvariability", 1, myid)
			
		#if options.SND and (options.ave2D or options.ave3D):
		#	ERROR("When SND is set, the program cannot output ave2D or ave3D", "sx3dvariability", 1, myid)
		
		#if options.nvec > 0 :
		#	ERROR("PCA option not implemented", "sx3dvariability", 1, myid)
			
		#if options.nvec > 0 and options.ave3D == None:
		#	ERROR("When doing PCA analysis, one must set ave3D", "sx3dvariability", 1, myid)
		
		if current_decimate>1.0 or current_decimate<0.0:
			ERROR("Decimate rate should be a value between 0.0 and 1.0", "sx3dvariability", 1, myid)
		
		if current_window < 0.0:
			ERROR("Target window size should be always larger than zero", "sx3dvariability", 1, myid)
			
		if myid == main_node:
			img  = get_image(stack, 0)
			nx   = img.get_xsize()
			ny   = img.get_ysize()
			if(min(nx, ny) < current_window):   keepgoing = 0
		keepgoing = bcast_number_to_all(keepgoing, main_node, MPI_COMM_WORLD)
		if keepgoing == 0: ERROR("The target window size cannot be larger than the size of decimated image", "sx3dvariability", 1, myid)

		import string
		options.sym = options.sym.lower()
		# if global_def.CACHE_DISABLE:
		# 	from utilities import disable_bdb_cache
		# 	disable_bdb_cache()
		# global_def.BATCH = True
		
		if myid == main_node:
			if not os.path.exists(current_output_dir): os.mkdir(current_output_dir)# Never delete output_dir in the program!
	
		img_per_grp = options.img_per_grp
		#nvec        = options.nvec
		radiuspca   = options.radiuspca
		from logger import Logger,BaseLogger_Files
		#if os.path.exists(os.path.join(options.output_dir, "log.txt")): os.remove(os.path.join(options.output_dir, "log.txt"))
		log_main=Logger(BaseLogger_Files())
		log_main.prefix = os.path.join(current_output_dir, "./")

		if myid == main_node:
			line = ""
			for a in sys.argv: line +=" "+a
			log_main.add(line)
			log_main.add("-------->>>Settings given by all options<<<-------")
			log_main.add("Symmetry             : %s"%options.sym)
			log_main.add("Input stack          : %s"%stack)
			log_main.add("Output_dir           : %s"%current_output_dir)
			
			if options.ave3D: log_main.add("Ave3d                : %s"%options.ave3D)
			if options.var3D: log_main.add("Var3d                : %s"%options.var3D)
			if options.ave2D: log_main.add("Ave2D                : %s"%options.ave2D)
			if options.var2D: log_main.add("Var2D                : %s"%options.var2D)
			if options.VAR:   log_main.add("VAR                  : True")
			else:             log_main.add("VAR                  : False")
			if options.CTF:   log_main.add("CTF correction       : True  ")
			else:             log_main.add("CTF correction       : False ")
			
			log_main.add("Image per group      : %5d"%options.img_per_grp)
			log_main.add("Image decimate rate  : %4.3f"%current_decimate)
			log_main.add("Low pass filter      : %4.3f"%options.fl)
			current_fl = options.fl
			if current_fl == 0.0: current_fl = 0.5
			log_main.add("Current low pass filter is equivalent to cutoff frequency %4.3f for original image size"%round((current_fl*current_decimate),3))
			log_main.add("Window size          : %5d "%current_window)
			log_main.add("sx3dvariability begins")
	
		symbaselen = 0
		if myid == main_node:
			nima = EMUtil.get_image_count(stack)
			img  = get_image(stack)
			nx   = img.get_xsize()
			ny   = img.get_ysize()
			nnxo = nx
			nnyo = ny
			if options.sym != "c1" :
				imgdata = get_im(stack)
				try:
					i = imgdata.get_attr("variabilitysymmetry").lower()
					if(i != options.sym):
						ERROR("The symmetry provided does not agree with the symmetry of the input stack", "sx3dvariability", 1, myid)
				except:
					ERROR("Input stack is not prepared for symmetry, please follow instructions", "sx3dvariability", 1, myid)
				from utilities import get_symt
				i = len(get_symt(options.sym))
				if((nima/i)*i != nima):
					ERROR("The length of the input stack is incorrect for symmetry processing", "sx3dvariability", 1, myid)
				symbaselen = nima/i
			else:  symbaselen = nima
		else:
			nima = 0
			nx = 0
			ny = 0
			nnxo = 0
			nnyo = 0
		nima    = bcast_number_to_all(nima)
		nx      = bcast_number_to_all(nx)
		ny      = bcast_number_to_all(ny)
		nnxo    = bcast_number_to_all(nnxo)
		nnyo    = bcast_number_to_all(nnyo)
		if current_window > max(nx, ny):
			ERROR("Window size is larger than the original image size", "sx3dvariability", 1)
		
		if current_decimate == 1.:
			if current_window !=0:
				nx = current_window
				ny = current_window
		else:
			if current_window == 0:
				nx = int(nx*current_decimate+0.5)
				ny = int(ny*current_decimate+0.5)
			else:
				nx = int(current_window*current_decimate+0.5)
				ny = nx
		symbaselen = bcast_number_to_all(symbaselen)
		
		# check FFT prime number
		from fundamentals import smallprime
		is_fft_friendly = (nx == smallprime(nx))
		
		if not is_fft_friendly:
			if myid == main_node:
				log_main.add("The target image size is not a product of small prime numbers")
				log_main.add("Program adjusts the input settings!")
			### two cases
			if current_decimate == 1.:
				nx = smallprime(nx)
				ny = nx
				current_window = nx # update
				if myid == main_node:
					log_main.add("The window size is updated to %d."%current_window)
			else:
				if current_window == 0:
					nx = smallprime(int(nx*current_decimate+0.5))
					current_decimate = float(nx)/nnxo
					ny = nx
					if (myid == main_node):
						log_main.add("The decimate rate is updated to %f."%current_decimate)
				else:
					nx = smallprime(int(current_window*current_decimate+0.5))
					ny = nx
					current_window = int(nx/current_decimate+0.5)
					if (myid == main_node):
						log_main.add("The window size is updated to %d."%current_window)
						
		if myid == main_node:
			log_main.add("The target image size is %d"%nx)
						
		if radiuspca == -1: radiuspca = nx/2-2
		if myid == main_node: log_main.add("%-70s:  %d\n"%("Number of projection", nima))
		img_begin, img_end = MPI_start_end(nima, number_of_proc, myid)
		
		"""
		if options.SND:
			from projection		import prep_vol, prgs
			from statistics		import im_diff
			from utilities		import get_im, model_circle, get_params_proj, set_params_proj
			from utilities		import get_ctf, generate_ctf
			from filter			import filt_ctf
		
			imgdata = EMData.read_images(stack, range(img_begin, img_end))

			if options.CTF:
				vol = recons3d_4nn_ctf_MPI(myid, imgdata, 1.0, symmetry=options.sym, npad=options.npad, xysize=-1, zsize=-1)
			else:
				vol = recons3d_4nn_MPI(myid, imgdata, symmetry=options.sym, npad=options.npad, xysize=-1, zsize=-1)

			bcast_EMData_to_all(vol, myid)
			volft, kb = prep_vol(vol)

			mask = model_circle(nx/2-2, nx, ny)
			varList = []
			for i in xrange(img_begin, img_end):
				phi, theta, psi, s2x, s2y = get_params_proj(imgdata[i-img_begin])
				ref_prj = prgs(volft, kb, [phi, theta, psi, -s2x, -s2y])
				if options.CTF:
					ctf_params = get_ctf(imgdata[i-img_begin])
					ref_prj = filt_ctf(ref_prj, generate_ctf(ctf_params))
				diff, A, B = im_diff(ref_prj, imgdata[i-img_begin], mask)
				diff2 = diff*diff
				set_params_proj(diff2, [phi, theta, psi, s2x, s2y])
				varList.append(diff2)
			mpi_barrier(MPI_COMM_WORLD)
		"""
		
		if options.VAR: # 2D variance images have no shifts
			#varList   = EMData.read_images(stack, range(img_begin, img_end))
			from EMAN2 import Region
			for index_of_particle in range(img_begin,img_end):
				image = get_im(stack, index_of_proj)
				if current_window > 0: varList.append(fdecimate(window2d(image,current_window,current_window), nx,ny))
				else:   varList.append(fdecimate(image, nx,ny))
				
		else:
			from utilities		import bcast_number_to_all, bcast_list_to_all, send_EMData, recv_EMData
			from utilities		import set_params_proj, get_params_proj, params_3D_2D, get_params2D, set_params2D, compose_transform2
			from utilities		import model_blank, nearest_proj, model_circle, write_text_row, wrap_mpi_gatherv
			from applications	import pca
			from statistics		import avgvar, avgvar_ctf, ccc
			from filter		    import filt_tanl
			from morphology		import threshold, square_root
			from projection 	import project, prep_vol, prgs
			from sets		    import Set
			from utilities      import wrap_mpi_recv, wrap_mpi_bcast, wrap_mpi_send
			import numpy as np
			if myid == main_node:
				t1          = time()
				proj_angles = []
				aveList     = []
				tab = EMUtil.get_all_attributes(stack, 'xform.projection')	
				for i in range(nima):
					t     = tab[i].get_params('spider')
					phi   = t['phi']
					theta = t['theta']
					psi   = t['psi']
					x     = theta
					if x > 90.0: x = 180.0 - x
					x = x*10000+psi
					proj_angles.append([x, t['phi'], t['theta'], t['psi'], i])
				t2 = time()
				log_main.add( "%-70s:  %d\n"%("Number of neighboring projections", img_per_grp))
				log_main.add("...... Finding neighboring projections\n")
				log_main.add( "Number of images per group: %d"%img_per_grp)
				log_main.add( "Now grouping projections")
				proj_angles.sort()
				proj_angles_list = np.full((nima, 4), 0.0, dtype=np.float32)	
				for i in range(nima):
					proj_angles_list[i][0] = proj_angles[i][1]
					proj_angles_list[i][1] = proj_angles[i][2]
					proj_angles_list[i][2] = proj_angles[i][3]
					proj_angles_list[i][3] = proj_angles[i][4]
			else: proj_angles_list = 0
			proj_angles_list = wrap_mpi_bcast(proj_angles_list, main_node, MPI_COMM_WORLD)
			proj_angles      = []
			for i in range(nima):
				proj_angles.append([proj_angles_list[i][0], proj_angles_list[i][1], proj_angles_list[i][2], int(proj_angles_list[i][3])])
			del proj_angles_list
			proj_list, mirror_list = nearest_proj(proj_angles, img_per_grp, range(img_begin, img_end))
			all_proj = Set()
			for im in proj_list:
				for jm in im:
					all_proj.add(proj_angles[jm][3])
			all_proj = list(all_proj)
			index = {}
			for i in range(len(all_proj)): index[all_proj[i]] = i
			mpi_barrier(MPI_COMM_WORLD)
			if myid == main_node:
				log_main.add("%-70s:  %.2f\n"%("Finding neighboring projections lasted [s]", time()-t2))
				log_main.add("%-70s:  %d\n"%("Number of groups processed on the main node", len(proj_list)))
				log_main.add("Grouping projections took:  %12.1f [m]"%((time()-t2)/60.))
				log_main.add("Number of groups on main node: ", len(proj_list))
			mpi_barrier(MPI_COMM_WORLD)

			if myid == main_node:
				log_main.add("...... Calculating the stack of 2D variances \n")
			# Memory estimation. There are two memory consumption peaks
			# peak 1. Compute ave, var; 
			# peak 2. Var volume reconstruction;
			# proj_params = [0.0]*(nima*5)
			aveList = []
			varList = []				
			#if nvec > 0: eigList = [[] for i in range(nvec)]
			dnumber   = len(all_proj)# all neighborhood set for assigned to myid
			pnumber   = len(proj_list)*2. + img_per_grp # aveList and varList 
			tnumber   = dnumber+pnumber
			vol_size2 = nx**3*4.*8/1.e9
			vol_size1 = 2.*nnxo**3*4.*8/1.e9
			proj_size         = nnxo*nnyo*len(proj_list)*4.*2./1.e9 # both aveList and varList
			orig_data_size    = nnxo*nnyo*4.*tnumber/1.e9
			reduced_data_size = nx*nx*4.*tnumber/1.e9
			full_data         = np.full((number_of_proc, 2), -1., dtype=np.float16)
			full_data[myid]   = orig_data_size, reduced_data_size
			if myid != main_node: wrap_mpi_send(full_data, main_node, MPI_COMM_WORLD)
			if myid == main_node:
				for iproc in range(number_of_proc):
					if iproc != main_node:
						dummy = wrap_mpi_recv(iproc, MPI_COMM_WORLD)
						full_data[np.where(dummy>-1)] = dummy[np.where(dummy>-1)]
				del dummy
			mpi_barrier(MPI_COMM_WORLD)
			full_data = wrap_mpi_bcast(full_data, main_node, MPI_COMM_WORLD)
			# find the CPU with heaviest load
			minindx         = np.argsort(full_data, 0)
			heavy_load_myid = minindx[-1][1]
			total_mem       = sum(full_data)
			if myid == main_node:
				if current_window == 0:
					log_main.add("Nx:   current image size = %d. Decimated by %f from %d"%(nx, current_decimate, nnxo))
				else:
					log_main.add("Nx:   current image size = %d. Windowed to %d, and decimated by %f from %d"%(nx, current_window, current_decimate, nnxo))
				log_main.add("Nproj:       number of particle images.")
				log_main.add("Navg:        number of 2D average images.")
				log_main.add("Nvar:        number of 2D variance images.")
				log_main.add("Img_per_grp: user defined image per group for averaging = %d"%img_per_grp)
				log_main.add("Overhead:    total python overhead memory consumption   = %f"%overhead_loading)
				log_main.add("Total memory) = 4.0*nx^2*(nproj + navg +nvar+ img_per_grp)/1.0e9 + overhead: %12.3f [GB]"%\
				   (total_mem[1] + overhead_loading))
			del full_data
			mpi_barrier(MPI_COMM_WORLD)
			if myid == heavy_load_myid:
				log_main.add("Begin reading and preprocessing images on processor. Wait... ")
				ttt = time()
			#imgdata = EMData.read_images(stack, all_proj)			
			imgdata = [ None for im in range(len(all_proj))]
			for index_of_proj in range(len(all_proj)):
				#image = get_im(stack, all_proj[index_of_proj])
				if( current_window > 0): imgdata[index_of_proj] = fdecimate(window2d(get_im(stack, all_proj[index_of_proj]),current_window,current_window), nx, ny)
				else:                    imgdata[index_of_proj] = fdecimate(get_im(stack, all_proj[index_of_proj]), nx, ny)
				
				if (current_decimate> 0.0 and options.CTF):
					ctf = imgdata[index_of_proj].get_attr("ctf")
					ctf.apix = ctf.apix/current_decimate
					imgdata[index_of_proj].set_attr("ctf", ctf)
					
				if myid == heavy_load_myid and index_of_proj%100 == 0:
					log_main.add(" ...... %6.2f%% "%(index_of_proj/float(len(all_proj))*100.))
			mpi_barrier(MPI_COMM_WORLD)
			if myid == heavy_load_myid:
				log_main.add("All_proj preprocessing cost %7.2f m"%((time()-ttt)/60.))
				log_main.add("Wait untill reading on all CPUs done...")
			'''	
			imgdata2 = EMData.read_images(stack, range(img_begin, img_end))
			if options.fl > 0.0:
				for k in xrange(len(imgdata2)):
					imgdata2[k] = filt_tanl(imgdata2[k], options.fl, options.aa)
			if options.CTF:
				vol = recons3d_4nn_ctf_MPI(myid, imgdata2, 1.0, symmetry=options.sym, npad=options.npad, xysize=-1, zsize=-1)
			else:
				vol = recons3d_4nn_MPI(myid, imgdata2, symmetry=options.sym, npad=options.npad, xysize=-1, zsize=-1)
			if myid == main_node:
				vol.write_image("vol_ctf.hdf")
				print_msg("Writing to the disk volume reconstructed from averages as		:  %s\n"%("vol_ctf.hdf"))
			del vol, imgdata2
			mpi_barrier(MPI_COMM_WORLD)
			'''
			from applications import prepare_2d_forPCA
			from utilities    import model_blank
			from EMAN2        import Transform
			if not options.no_norm: 
				mask = model_circle(nx/2-2, nx, nx)
			if options.CTF: 
				from utilities import pad
				from filter import filt_ctf
			from filter import filt_tanl
			if myid == heavy_load_myid:
				log_main.add("Start computing 2D aveList and varList. Wait...")
				ttt = time()
			inner=nx//2-4
			outer=inner+2
			xform_proj_for_2D = [ None for i in range(len(proj_list))]
			for i in range(len(proj_list)):
				ki = proj_angles[proj_list[i][0]][3]
				if ki >= symbaselen:  continue
				mi = index[ki]
				dpar = Util.get_transform_params(imgdata[mi], "xform.projection", "spider")
				phiM, thetaM, psiM, s2xM, s2yM  = dpar["phi"],dpar["theta"],dpar["psi"],-dpar["tx"]*current_decimate,-dpar["ty"]*current_decimate
				grp_imgdata = []
				for j in range(img_per_grp):
					mj = index[proj_angles[proj_list[i][j]][3]]
					cpar = Util.get_transform_params(imgdata[mj], "xform.projection", "spider")
					alpha, sx, sy, mirror = params_3D_2D_NEW(cpar["phi"], cpar["theta"],cpar["psi"], -cpar["tx"]*current_decimate, -cpar["ty"]*current_decimate, mirror_list[i][j])
					if thetaM <= 90:
						if mirror == 0:  alpha, sx, sy, scale = compose_transform2(alpha, sx, sy, 1.0, phiM - cpar["phi"], 0.0, 0.0, 1.0)
						else:            alpha, sx, sy, scale = compose_transform2(alpha, sx, sy, 1.0, 180-(phiM - cpar["phi"]), 0.0, 0.0, 1.0)
					else:
						if mirror == 0:  alpha, sx, sy, scale = compose_transform2(alpha, sx, sy, 1.0, -(phiM- cpar["phi"]), 0.0, 0.0, 1.0)
						else:            alpha, sx, sy, scale = compose_transform2(alpha, sx, sy, 1.0, -(180-(phiM - cpar["phi"])), 0.0, 0.0, 1.0)
					imgdata[mj].set_attr("xform.align2d", Transform({"type":"2D","alpha":alpha,"tx":sx,"ty":sy,"mirror":mirror,"scale":1.0}))
					grp_imgdata.append(imgdata[mj])
				if not options.no_norm:
					for k in range(img_per_grp):
						ave, std, minn, maxx = Util.infomask(grp_imgdata[k], mask, False)
						grp_imgdata[k] -= ave
						grp_imgdata[k] /= std
				if options.fl > 0.0:
					for k in range(img_per_grp):
						grp_imgdata[k] = filt_tanl(grp_imgdata[k], options.fl, options.aa)

				#  Because of background issues, only linear option works.
				if options.CTF:  ave, var = aves_wiener(grp_imgdata, SNR = 1.0e5, interpolation_method = "linear")
				else:  ave, var = ave_var(grp_imgdata)
				# Switch to std dev
				# threshold is not really needed,it is just in case due to numerical accuracy something turns out negative.
				var = square_root(threshold(var))

				set_params_proj(ave, [phiM, thetaM, 0.0, 0.0, 0.0])
				set_params_proj(var, [phiM, thetaM, 0.0, 0.0, 0.0])

				aveList.append(ave)
				varList.append(var)
				xform_proj_for_2D[i] = [phiM, thetaM, 0.0, 0.0, 0.0]

				'''
				if nvec > 0:
					eig = pca(input_stacks=grp_imgdata, subavg="", mask_radius=radiuspca, nvec=nvec, incore=True, shuffle=False, genbuf=True)
					for k in range(nvec):
						set_params_proj(eig[k], [phiM, thetaM, 0.0, 0.0, 0.0])
						eigList[k].append(eig[k])
					"""
					if myid == 0 and i == 0:
						for k in xrange(nvec):
							eig[k].write_image("eig.hdf", k)
					"""
				'''
				if (myid == heavy_load_myid) and (i%100 == 0):
					log_main.add(" ......%6.2f%%  "%(i/float(len(proj_list))*100.))		
			del imgdata, grp_imgdata, cpar, dpar, all_proj, proj_angles, index
			if not options.no_norm: del mask
			if myid == main_node: del tab
			#  At this point, all averages and variances are computed
			mpi_barrier(MPI_COMM_WORLD)
			
			if (myid == heavy_load_myid):
				log_main.add("Computing aveList and varList took %12.1f [m]"%((time()-ttt)/60.))
			
			xform_proj_for_2D = wrap_mpi_gatherv(xform_proj_for_2D, main_node, MPI_COMM_WORLD)
			if (myid == main_node):
				write_text_row(xform_proj_for_2D, os.path.join(current_output_dir, "params.txt"))
			del xform_proj_for_2D
			mpi_barrier(MPI_COMM_WORLD)
			if options.ave2D:
				from fundamentals import fpol
				from applications import header
				if myid == main_node:
					log_main.add("Compute ave2D ... ")
					km = 0
					for i in range(number_of_proc):
						if i == main_node :
							for im in range(len(aveList)):
								aveList[im].write_image(os.path.join(current_output_dir, options.ave2D), km)
								km += 1
						else:
							nl = mpi_recv(1, MPI_INT, i, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
							nl = int(nl[0])
							for im in range(nl):
								ave = recv_EMData(i, im+i+70000)
								"""
								nm = mpi_recv(1, MPI_INT, i, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
								nm = int(nm[0])
								members = mpi_recv(nm, MPI_INT, i, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
								ave.set_attr('members', map(int, members))
								members = mpi_recv(nm, MPI_FLOAT, i, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
								ave.set_attr('pix_err', map(float, members))
								members = mpi_recv(3, MPI_FLOAT, i, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
								ave.set_attr('refprojdir', map(float, members))
								"""
								tmpvol=fpol(ave, nx, nx,1)								
								tmpvol.write_image(os.path.join(current_output_dir, options.ave2D), km)
								km += 1
				else:
					mpi_send(len(aveList), 1, MPI_INT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
					for im in range(len(aveList)):
						send_EMData(aveList[im], main_node,im+myid+70000)
						"""
						members = aveList[im].get_attr('members')
						mpi_send(len(members), 1, MPI_INT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
						mpi_send(members, len(members), MPI_INT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
						members = aveList[im].get_attr('pix_err')
						mpi_send(members, len(members), MPI_FLOAT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
						try:
							members = aveList[im].get_attr('refprojdir')
							mpi_send(members, 3, MPI_FLOAT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
						except:
							mpi_send([-999.0,-999.0,-999.0], 3, MPI_FLOAT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
						"""
				if myid == main_node:
					header(os.path.join(current_output_dir, options.ave2D), params='xform.projection', fimport = os.path.join(current_output_dir, "params.txt"))
				mpi_barrier(MPI_COMM_WORLD)	
			if options.ave3D:
				from fundamentals import fpol
				t5 = time()
				if myid == main_node: log_main.add("Reconstruct ave3D ... ")
				ave3D = recons3d_4nn_MPI(myid, aveList, symmetry=options.sym, npad=options.npad)
				bcast_EMData_to_all(ave3D, myid)
				if myid == main_node:
					if current_decimate != 1.0: ave3D = resample(ave3D, 1./current_decimate)
					ave3D = fpol(ave3D, nnxo, nnxo, nnxo) # always to the orignal image size
					set_pixel_size(ave3D, 1.0)
					ave3D.write_image(os.path.join(current_output_dir, options.ave3D))
					log_main.add("Ave3D reconstruction took %12.1f [m]"%((time()-t5)/60.0))
					log_main.add("%-70s:  %s\n"%("The reconstructed ave3D is saved as ", options.ave3D))
					
			mpi_barrier(MPI_COMM_WORLD)		
			del ave, var, proj_list, stack, alpha, sx, sy, mirror, aveList
			'''
			if nvec > 0:
				for k in range(nvec):
					if myid == main_node:log_main.add("Reconstruction eigenvolumes", k)
					cont = True
					ITER = 0
					mask2d = model_circle(radiuspca, nx, nx)
					while cont:
						#print "On node %d, iteration %d"%(myid, ITER)
						eig3D = recons3d_4nn_MPI(myid, eigList[k], symmetry=options.sym, npad=options.npad)
						bcast_EMData_to_all(eig3D, myid, main_node)
						if options.fl > 0.0:
							eig3D = filt_tanl(eig3D, options.fl, options.aa)
						if myid == main_node:
							eig3D.write_image(os.path.join(options.outpout_dir, "eig3d_%03d.hdf"%(k, ITER)))
						Util.mul_img( eig3D, model_circle(radiuspca, nx, nx, nx) )
						eig3Df, kb = prep_vol(eig3D)
						del eig3D
						cont = False
						icont = 0
						for l in range(len(eigList[k])):
							phi, theta, psi, s2x, s2y = get_params_proj(eigList[k][l])
							proj = prgs(eig3Df, kb, [phi, theta, psi, s2x, s2y])
							cl = ccc(proj, eigList[k][l], mask2d)
							if cl < 0.0:
								icont += 1
								cont = True
								eigList[k][l] *= -1.0
						u = int(cont)
						u = mpi_reduce([u], 1, MPI_INT, MPI_MAX, main_node, MPI_COMM_WORLD)
						icont = mpi_reduce([icont], 1, MPI_INT, MPI_SUM, main_node, MPI_COMM_WORLD)

						if myid == main_node:
							u = int(u[0])
							log_main.add(" Eigenvector: ",k," number changed ",int(icont[0]))
						else: u = 0
						u = bcast_number_to_all(u, main_node)
						cont = bool(u)
						ITER += 1

					del eig3Df, kb
					mpi_barrier(MPI_COMM_WORLD)
				del eigList, mask2d
			'''
			if options.ave3D: del ave3D
			if options.var2D:
				from fundamentals import fpol 
				from applications import header
				if myid == main_node:
					log_main.add("Compute var2D...")
					km = 0
					for i in range(number_of_proc):
						if i == main_node :
							for im in range(len(varList)):
								tmpvol=fpol(varList[im], nx, nx,1)
								tmpvol.write_image(os.path.join(current_output_dir, options.var2D), km)
								km += 1
						else:
							nl = mpi_recv(1, MPI_INT, i, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
							nl = int(nl[0])
							for im in range(nl):
								ave = recv_EMData(i, im+i+70000)
								tmpvol=fpol(ave, nx, nx,1)
								tmpvol.write_image(os.path.join(current_output_dir, options.var2D), km)
								km += 1
				else:
					mpi_send(len(varList), 1, MPI_INT, main_node, SPARX_MPI_TAG_UNIVERSAL, MPI_COMM_WORLD)
					for im in range(len(varList)):
						send_EMData(varList[im], main_node, im+myid+70000)#  What with the attributes??
				mpi_barrier(MPI_COMM_WORLD)
				if myid == main_node:
					from applications import header
					header(os.path.join(current_output_dir, options.var2D), params = 'xform.projection',fimport = os.path.join(current_output_dir, "params.txt"))
				mpi_barrier(MPI_COMM_WORLD)
		if options.var3D:
			if myid == main_node: log_main.add("Reconstruct var3D ...")
			t6 = time()
			# radiusvar = options.radius
			# if( radiusvar < 0 ):  radiusvar = nx//2 -3
			res = recons3d_4nn_MPI(myid, varList, symmetry = options.sym, npad=options.npad)
			#res = recons3d_em_MPI(varList, vol_stack, options.iter, radiusvar, options.abs, True, options.sym, options.squ)
			if myid == main_node:
				from fundamentals import fpol
				if current_decimate != 1.0: res	= resample(res, 1./current_decimate)
				res = fpol(res, nnxo, nnxo, nnxo)
				set_pixel_size(res, 1.0)
				res.write_image(os.path.join(current_output_dir, options.var3D))
				log_main.add("%-70s:  %s\n"%("The reconstructed var3D is saved as ", options.var3D))
				log_main.add("Var3D reconstruction took %f12.1 [m]"%((time()-t6)/60.0))
				log_main.add("Total computation time %f12.1 [m]"%((time()-t0)/60.0))
				log_main.add("sx3dvariability finishes")
		from mpi import mpi_finalize
		mpi_finalize()
		
		if RUNNING_UNDER_MPI: global_def.MPI = False

		global_def.BATCH = False
Пример #6
0
def runcheck(classavgstack, reconfile, outdir, inangles=None, selectdoc=None, prjmethod='trilinear', displayYN=False, 
			 projstack='proj.hdf', outangles='angles.txt', outstack='comp-proj-reproj.hdf', normstack='comp-proj-reproj-norm.hdf'):
	
	print("\n%s, Modified 2018-12-07\n" % __file__)
	
	# Check if inputs exist
	check(classavgstack)
	check(reconfile)
	
	# Create directory if it doesn't exist
	if not os.path.isdir(outdir):
		os.makedirs(outdir)  # os.mkdir() can only operate one directory deep
		print("mkdir -p %s" % outdir)

	# Expand path for outputs
	projstack = os.path.join(outdir, projstack)
	outangles = os.path.join(outdir, outangles)
	outstack  = os.path.join(outdir, outstack)
	normstack = os.path.join(outdir, normstack)
	
	# Get number of images
	nimg0 = EMAN2_cppwrap.EMUtil.get_image_count(classavgstack)
	recon = EMAN2_cppwrap.EMData(reconfile)
	nx = recon.get_xsize()
	
	# In case class averages include discarded images, apply selection file
	if selectdoc:
		goodavgs, extension = os.path.splitext(classavgstack)
		newclasses = goodavgs + "_kept" + extension
		
		# e2proc2d appends to existing files, so rename existing output
		if os.path.exists(newclasses):
			renamefile = newclasses + '.bak'
			os.rename(newclasses, renamefile)
			print("mv %s %s" % (newclasses, renamefile))
		
		cmd7="e2proc2d.py %s %s --list=%s" % (classavgstack, newclasses, selectdoc)
		print(cmd7)
		os.system(cmd7)
		
		# Update class-averages
		classavgstack = newclasses
	
	# Import Euler angles
	if inangles:
		cmd6 = "sxheader.py %s --params=xform.projection --import=%s" % (classavgstack, inangles)
		print(cmd6)
		header(classavgstack, 'xform.projection', fimport=inangles)
	
	try:
		header(classavgstack, 'xform.projection', fexport=outangles)
		cmd1 = "sxheader.py %s --params=xform.projection --export=%s" % (classavgstack, outangles) 
		print(cmd1)
	except RuntimeError:
		print("\nERROR!! No projection angles found in class-average stack header!\n")
		print('Usage:', USAGE)
		exit()
	
	#cmd2="sxproject3d.py %s %s --angles=%s" % (recon, projstack, outangles)
	#print(cmd2)
	#os.system(cmd2)
	
	#  Here if you want to be fancy, there should be an option to chose the projection method,
	#  the mechanism can be copied from sxproject3d.py  PAP
	if prjmethod=='trilinear':
		method_num = 1
	elif prjmethod=='gridding':
		method_num = -1
	elif prjmethod=='nn':
		method_num = 0
	else:
		print("\nERROR!! Valid projection methods are: trilinear (default), gridding, and nn (nearest neighbor).")
		print('Usage:', USAGE)
		exit()
	
	#project3d(recon, stack=projstack, listagls=outangles)
	recon = prep_vol(recon, npad = 2, interpolation_method = 1)

	result=[]
	#  Here you need actual radius to compute proper ccc's, but if you do, you have to deal with translations, PAP
	mask = model_circle(nx//2-2,nx,nx)
	
	# Number of images may have changed
	nimg1   = EMAN2_cppwrap.EMUtil.get_image_count(classavgstack)
	outangles = read_text_row(outangles)
	for imgnum in range(nimg1):
		# get class average
		classimg = get_im(classavgstack, imgnum)
		
		# compute re-projection
		prjimg = prgl(recon, outangles[imgnum], 1, False)
		
		# calculate 1D power spectra
		rops_dst = rops_table(classimg*mask)  
		rops_src = rops_table(prjimg)
		
		#  Set power spectrum of reprojection to the data.
		#  Since data has an envelope, it would make more sense to set data to reconstruction,
		#  but to do it one would have to know the actual resolution of the data. 
		#  you can check sxprocess.py --adjpw to see how this is done properly  PAP
		table = [0.0]*len(rops_dst)  # initialize table
		for j in range( len(rops_dst) ):
			table[j] = sqrt( old_div(rops_dst[j],rops_src[j]) )
		prjimg = fft(filt_table(prjimg, table))  # match FFT amplitdes of re-projection and class average

		cccoeff = ccc(prjimg, classimg, mask)
		#print(imgnum, cccoeff)
		classimg.set_attr_dict({'cross-corr':cccoeff})
		prjimg.set_attr_dict({'cross-corr':cccoeff})
		prjimg.write_image(outstack,2*imgnum)
		classimg.write_image(outstack, 2*imgnum+1)
		result.append(cccoeff)
	del outangles
	meanccc = old_div(sum(result),nimg1)
	print("Average CCC is %s" % meanccc)

	nimg2 = EMAN2_cppwrap.EMUtil.get_image_count(outstack)
	
	for imgnum in xrange(nimg2):
		if (imgnum % 2 ==0):
			prjimg = get_im(outstack,imgnum)
			meanccc1 = prjimg.get_attr_default('mean-cross-corr', -1.0)
			prjimg.set_attr_dict({'mean-cross-corr':meanccc})
			write_header(outstack,prjimg,imgnum)
		if (imgnum % 100) == 0:
			print(imgnum)
	
	# e2proc2d appends to existing files, so delete existing output
	if os.path.exists(normstack):
		os.remove(normstack)
		print("rm %s" % normstack)
		


	#  Why would you want to do it?  If you do, it should have been done during ccc calculations,
	#  otherwise what is see is not corresponding to actual data, thus misleading.  PAP
	#cmd5="e2proc2d.py %s %s --process=normalize" % (outstack, normstack)
	#print(cmd5)
	#os.system(cmd5)
	
	# Optionally pop up e2display
	if displayYN:
		cmd8 = "e2display.py %s" % outstack
		print(cmd8)
		os.system(cmd8)
	
	print("Done!")