Example #1
0
def main():

    progname = os.path.basename(sys.argv[0])
    usage = """This program requires ICON-GPU and IMOD. Wrapper to run ICON-GPU reconstructions using fewer parameters. The program automatically crops the tiltseries into a square, runs test alignments
	to make sure a good XY size is picked (some sizes make ICON crash), and picks the correct iteration numbers. 
	The reconstructed tomogram is also automatically rotated around x so that z is the shortest size using IMOD's clip rotx function, and can be optionally binned if --shrink is provided.

	Preprocessing and reconstruction normally happen in separate runs since preprocessing acts on the raw (or ideally, X-ray corrected) tiltseries, the .st file from IMOD after X-ray correction.
	On the other hand, the reconstruction should be performed with the preprocessed tiltseries AFTER it has been aligned with IMOD (the .ali file) and AFTER gold fiducials have been deleted.
	This program automatically backs up the .st and .ali IMOD files while replacing them with the ICONPreProcessed files (gold deletion needs to proceed in IMOD's pipeline) prior to reconstruction.
	An option is provided through --skipgolderasing to streamline the entire pipeline with a single run of this program if the sample has no gold fiducials or the user doesn't want to delete them.

	To preprocess the raw tiltseries, run:

	e2tomo_icongpu.py --tiltseries=<.st file> --iconpreproc --thickness=<value in pixels>

	This MUST be run from the IMOD reconstruction directory, with all alignment files in it, so that the preprocessed tiltseries can be realigned automatically without going through IMOD's ETOMO pipeline again.


	For reconstruction, run:

	e2tomo_icongpu.py --tiltseries=<.ali file> --sizez=<size of output tomogram in z>

	The .tlt file should be located automatically if it has the same name (but with .tlt extension) as the --tiltseries, as should be the case for an intact IMOD directory.
	Otherwise, to supply an alternative .tlt file, add --tltfile=<.tlt file from imod> to the command above.

	(This can be run in any given directory as long as the .ali and .tlt files are there).


	If you don't need or want to delete gold fiducials and want to run BOTH steps at once (ICONPreProcess and reconstruction with ICON-GPU), add --skipgolderasing
	"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--gpus",
        type=str,
        default="-1",
        help=
        """default=-1 (all available GPUs will be used). To select specific GPUs to use, provide them as a comma-separated list of integers."""
    )

    parser.add_argument(
        "--highpasspixels",
        type=int,
        default=4,
        help=
        """Default=4. Number of Fourier pixels to zero out during highpass filtering AFTER reconstruction (postprocessing). Provide 'None' or '0' to disactivate."""
    )

    parser.add_argument(
        "--iconpreproc",
        action='store_true',
        default=False,
        help=
        """default=False. If on, this will trigger a call to ICONPreProcess on the .st file supplied through --tiltseries."""
    )

    parser.add_argument(
        "--lowpassresolution",
        type=float,
        default=100.0,
        help=
        """Default=100. Resolution in angstroms to lowpass filter the tomogram AFTER reconstruction (postprocessing). Type 'None' or '0' to disactivate."""
    )

    parser.add_argument(
        "--path",
        type=str,
        default='icongpu',
        help=
        """Default=icongpu. Name of the directory where to store the output results. Only works when reconstructing the .ali file (preprocessing of the .st file will output the preprocessed tiltseries to the current directory). A numbered series of 'icongpu' directories will be created (i.e., if the program is run more than once, results will be stored in iconpu_01, icongpu_02, etc., directories, to avoid overwriting data)."""
    )
    parser.add_argument(
        "--ppid",
        type=int,
        default=-1,
        help=
        "Default=-1. Set the PID of the parent process, used for cross platform PPID"
    )

    parser.add_argument(
        "--shrink",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Shrink factor to provide IMOD's binvol program with to bin/shrink the output tomogram after rotation about the x axis."""
    )
    parser.add_argument(
        "--sizez",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Output size in Z for the reconstructed tomogram. This should be the same as the --thickness value provided during tiltseries preprocessing, or larger (it's good to make sure the entire reconstruction will fit in the reconstruction volume without being too tight). If running a reconstruction of the .ali file and both --thickness and --sizez are provided, the latter will supersede the former."""
    )
    parser.add_argument(
        "--skipgolderasing",
        action='store_true',
        default=False,
        help=
        """default=False. If on, this will call IMOD to generate a new aligned tiltseries after ICONPreProcess, and then ICON-GPU will be automatically called to perform the reconstruction. Typically, one would NOT use this option as it is useful to delete the gold fiducials prior to reconstruction."""
    )

    parser.add_argument(
        "--thickness",
        type=int,
        default=0,
        help=
        """default=0. Thickness of the specimen as seen in a preliminary weighted back projection reconstruction from IMOD (through how many slices in Z are there specimen densities?)."""
    )
    parser.add_argument(
        "--tiltseries",
        type=str,
        default=None,
        help=
        """default=None. .st file from IMOD if --iconpreproc is turned on. Otherwise, supply the .ali file from IMOD *after* X-ray correction, iconpreprocessing, and alignment with IMOD."""
    )
    parser.add_argument("--tltfile",
                        type=str,
                        default=None,
                        help="""default=None. .tlt file from IMOD.""")

    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness."
    )

    (options, args) = parser.parse_args()

    logger = E2init(sys.argv, options.ppid)

    if not options.tiltseries:
        print("\nERROR: --tiltseries required")
        sys.exit(1)

    filename, extension = os.path.splitext(options.tiltseries)

    alifile = options.tiltseries.replace(extension, '.ali')

    c = os.getcwd()
    findir = os.listdir(c)

    if not options.tltfile:
        anglesfile = os.path.basename(
            options.tiltseries.replace(extension, '.tlt'))
        if anglesfile in findir:
            options.tltfile = anglesfile
        else:
            print(
                "\nERROR: expected tlt file = {}, (text file with the list of tilt angles) not found. Supply --tltfile explicitly."
                .format(anglesfile))
            sys.exit(1)

    if options.iconpreproc and alifile not in findir:
        print(
            "\nERROR: the aligned tiltseries must be in the same directory, and should match the name of the raw .st tiltseries, except that the extension should be .ali instead of .st; the expected file is {}"
            .format(alifile))
        sys.exit(1)

    if options.verbose:
        print(
            "\n(e2tomo_icongpu)(icongpufunc) making directory {} to store reconstruction results"
            .format(options.path))

    from EMAN2_utils import makepath
    options = makepath(options)

    cmdsfilepath = options.path + '/cmds.txt'

    if options.iconpreproc:

        if '.st' not in extension:
            print(
                "\nERROR: the extension of the --tiltseries is {} instead of .st; make sure this is the correct tiltseries, and change the extension to .st"
                .format(extension))
            shutil.rmtree(options.path)
            sys.exit(1)

        if not options.thickness:
            print("\nERROR: --thickness required for ICONPreProcess.")
            shutil.rmtree(options.path)
            sys.exit(1)

        iconpreprocfunc(options, alifile, extension, cmdsfilepath)

    elif not options.iconpreproc or options.skipgolderasing:

        if not options.thickness and not options.sizez:
            print("\nERROR: --thickness or --sizez required")
            sys.exit(1)
        elif options.thickness and options.sizez:
            print(
                "\nWARNING: --thickness={} and --sizez={} were both provided; only --sizez={} will be used for reconstruction"
                .format(options.thickness, options.sizez, options.sizez))
            options.thickness = options.sizez

        if not options.tltfile:
            print(
                "\nWARNING: --tltfile not provided. The program will attempt to find it automatically"
            )
            tltfile = options.tiltseries.replace(extension, '.tlt')

            if tltfile not in findir:
                print(
                    "\nERROR: in the abscence of --tltfile, the .tlt file with tiltseries angles must be in the running (current) directory, and should match the name of the raw .st tiltseries, except that the extension should be .tlt instead of .st; the expected file is {}"
                    .format(alifile))
                sys.exit(1)

        if not options.iconpreproc and '.ali' not in extension:
            print(
                "\nWARNING: the extension of the --tiltseries is {} instead of .ali; make sure this is the correct tiltseries"
                .format(extension))

        if not options.iconpreproc:
            alifile = options.tiltseries

        icongpufunc(options, alifile, cmdsfilepath)

    E2end(logger)

    return
Example #2
0
def main():

    progname = os.path.basename(sys.argv[0])
    usage = """Plot single column or double column datasets in a high-resolution, ready to publish form. Exampek command:
	For a single file with x y values:
	e2plotfig.py --data mydata.txt <options>

	For multiple files with x y data:
	e2plotfig.py mydata*txt <options>

	or

	e2plotfig.py --data mydata1.txt,mydata2.txt,mydata3.txt,...mydataN.txt <options>

	To make one plot using separate source files for x and y data:
	e2plotfig.py --datax xvals.txt --datay yvals.txt <options>

	To make multiple plots using separate source files for x and y data:
	e2plotfig.py --datax xvals1.txt,xvals2.txt,...,xvalsN.txt --datay yvals1.txt,yvals2.txt,...,yvalsN.txt <options>


	"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--binwidth",
        type=float,
        default=0.0,
        help=
        """default=0.0 (not used). requires --histogram. Y axes. Enforce this value for the width of histogram bins (it will be used to calculate --nbins)"""
    )

    parser.add_argument(
        "--data",
        type=str,
        default='',
        help=
        """default=None (not used). Text file(s) with two column of values mean to be plotted on the X and Y axes. If supplying multiple files, separate them by commas."""
    )
    parser.add_argument(
        "--datax",
        type=str,
        default='',
        help=
        """default=None (not used). Text file(s) with a single column of values meant to be plotted on the X axis. If not provided, the X axis will go from 0 to n, where n is the number of values in --datay. If supplying multiple files, separate them by commas (the number of files for --datax and --datay must be the same)."""
    )
    parser.add_argument(
        "--datay",
        type=str,
        default='',
        help=
        """default=None (not used). Text file(s) with a single column of values meant to be plotted on the  Y axis. If not provided, the Y axis will go from 0 to n, where n is the number of values in --datax. If supplying multiple files, separate them by commas (the number of files for --datax and --datay must be the same)."""
    )

    parser.add_argument(
        "--highresolution",
        action='store_true',
        default=False,
        help=
        """default=False. If on, this option will enforce writing high-resolution plots (dpi 300), ready for publication, as opposed to lower resolution plots which take up less space on your computer (dpi 150)."""
    )
    parser.add_argument(
        "--histogram",
        action='store_true',
        default=False,
        help=
        """default=False. If on, this will make the resulting plot a histogram. If --nbins not supplied, the parameter will be automatically calculated."""
    )

    parser.add_argument(
        "--individualplots",
        action='store_true',
        default=False,
        help=
        """default=False. in addition to plotting the motion for all frames in all images in a single plot, generate individual plots per image"""
    )

    parser.add_argument(
        "--labelxaxis",
        type=str,
        default='x',
        help="""Default=x. Label for x axis (specify units through --unitsx."""
    )
    parser.add_argument(
        "--labelyaxis",
        type=str,
        default='y',
        help="""Default=y. Label for y axis (specify units through --unitsy."""
    )
    parser.add_argument("--labeltitle",
                        type=str,
                        default='',
                        help="""Default=None. Title for figure.""")
    parser.add_argument(
        "--legend",
        type=str,
        default='',
        help=
        """"Default=None. If you are plotting only 1 curve, or --individualplots is on, and you desire a specific legend for the data series in each plot, supply it here as a string with no spaces. You can provide any string without spaces; if you need spaces, add an underscore instead and the program will replace the underscore with a space; for exampe 'ribosome_80s' will appear as 'ribosome 80s'."""
    )
    parser.add_argument(
        "--linesoff",
        action='store_true',
        default=False,
        help=
        """Default=False. This requires --markerson and will get rid of the line uniting data points (the plot will be like a scatter plot)."""
    )

    parser.add_argument(
        "--markerson",
        action='store_true',
        default=False,
        help=
        """Default=False. This will enforce markers in the plot for each data point (like a scatter plot, but the points will still be united by a line)."""
    )
    parser.add_argument(
        "--marker",
        type=str,
        default='',
        help=
        """"Default=None. If you are plotting only 1 curve, or --individualplots is on, and you desire a specific marker, supply it here (for example o, or *, or x."""
    )

    parser.add_argument(
        "--maxx",
        type=float,
        default=None,
        help=
        """Default=None. Maximum value to plot in X. Automatically set to the maximum value in the data, per image, if not explicitly set."""
    )
    parser.add_argument(
        "--maxy",
        type=float,
        default=None,
        help=
        """Default=None. Maximum value to plot in Y. Automatically set to the maximum value in the data, per image, if not explicitly set."""
    )
    parser.add_argument(
        "--minx",
        type=float,
        default=None,
        help=
        """Default=None. Minimum value to plot in X. Automatically set to the maximum value in the data, per image, if not explicitly set."""
    )
    parser.add_argument(
        "--miny",
        type=float,
        default=None,
        help=
        """Default=None. Minimum value to plot in Y. Automatically set to the maximum value in the data, per image, if not explicitly set."""
    )

    parser.add_argument(
        "--mult",
        type=float,
        default=None,
        help=
        """Default=None. The data will be multiplied by this factor immediatebly prior to plotting. For example, if the data is in the order of magnitude of 10^6, you might say --mult=0.000001, and in --xunits change, e.g., nm^3 to nm^3x10^6"""
    )

    parser.add_argument(
        "--nbins",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Requires --histogram. Number of bins for histogram. If not provided, the optimal bin number will be automatically calculated based on bin-width, computed using Scott's normal reference rule, width = (3.5*std)/cuberoot(n), where 'std' is the standard deviation of the mean intensity distribution of population and n is the number of mean intensity values considered (this is affected by --removesigma). Then, bins will be nbins = (max(intensities) - min(intensities)) / width."""
    )
    parser.add_argument(
        "--nocolor",
        action='store_true',
        default=False,
        help=
        """Default=False. Plots are colored, by default; don't be cheap; clear communication and representation pays off; or consider publishing in online open source journals that don't charge extra for color figures."""
    )
    parser.add_argument(
        "--normalize",
        action='store_true',
        default=False,
        help=
        """Default=False. This option will normalize all plots to be scaled between 0 and 1."""
    )

    parser.add_argument(
        "--outputtag",
        type=str,
        default='plotfig',
        help=
        """Default=plotfig. String common to all automatically generated output files. For example, --outputtag=myplot will generate myplot1.png, myplot2.png, ..., myplotN.png"""
    )

    parser.add_argument(
        "--path",
        type=str,
        default='plotfig',
        help=
        """Default=de_plots. Name of the directory where to store the output results."""
    )
    parser.add_argument(
        "--ppid",
        type=int,
        default=-1,
        help=
        "Default=-1. Set the PID of the parent process, used for cross platform PPID"
    )

    parser.add_argument(
        "--scaleaxes",
        action='store_true',
        default=False,
        help=
        """Default=False. This will force the axes to be on the same scale.""")

    parser.add_argument(
        "--unitsx",
        type=str,
        default='AU',
        help=
        """Default=AU (arbitrary units). Units for the x axis.'microns' or 'mu' and 'angstroms' or 'A' (and '1/angstroms' or '1/A') will be replaced by the appropriate symbol. You can provide any string without spaces; if you need spaces, add an underscore instead and the program will replace the underscore with a space; for exampe 'GPU_h' will appear as 'GPU h'."""
    )
    parser.add_argument(
        "--unitsy",
        type=str,
        default='AU',
        help=
        """Default=AU (arbitrary units). Units for the y axis.'microns' or 'mu' and 'angstroms' or 'A' (and '1/angstroms' or '1/A')  will be replaced by the appropriate symbol. You can provide any string without spaces; if you need spaces, add an underscore instead and the program will replace the underscore with a space; for exampe 'GPU_h' will appear as 'GPU h'."""
    )

    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness."
    )

    (options, args) = parser.parse_args()

    logger = E2init(sys.argv, options.ppid)

    datafiles = []
    if options.data:
        if options.datax or options.datay:
            print(
                "\n(e2plotfig)(main) ERROR: provide --data OR --datax AND/OR --datay."
            )
            sys.exit(1)
    elif not options.data:
        if not options.datax and not options.datay:
            if args:
                datafiles = args
            elif not args:
                print(
                    "\n(e2plotfig)(main) ERROR: provide at least one of --data, --datax, or --datay."
                )
                sys.exit(1)

    if options.unitsx:
        if options.unitsx == 'angstroms' or options.unitsx == 'Angstroms' or options.unitsx == "A" or options.unitsx == "ANGSTROMS":
            options.unitsx = u"\u212B"
        if options.unitsx == '1/angstroms' or options.unitsx == '1/Angstroms' or options.unitsx == "1/A" or options.unitsx == "1/ANGSTROMS":
            options.unitsx = '1/' + u"\u212B"
        if options.unitsx == 'microns' or options.unitsx == 'mu' or options.unitsx == "Microns" or options.unitsx == "mu" or options.unitsx == "MICRONS" or options.unitsx == "MU":
            options.unitsx = u"\u00B5"

    if options.unitsy:
        if options.unitsy == 'angstroms' or options.unitsy == 'Angstroms' or options.unitsy == "A" or options.unitsy == "ANGSTROMS":
            options.unitsy = u"\u212B"
        if options.unitsy == '1/angstroms' or options.unitsy == '1/Angstroms' or options.unitsy == "1/A" or options.unitsy == "1/ANGSTROMS":
            options.unitsy = '1/' + u"\u212B"
        if options.unitsy == 'microns' or options.unitsy == 'mu' or options.unitsy == "Microns" or options.unitsy == "mu" or options.unitsy == "MICRONS" or options.unitsy == "MU":
            options.unitsy = u"\u00B5"

    xaxes = {}
    yaxes = {}
    datadict = {}

    lines = []
    if options.data:
        datafiles = options.data.split(',')
        k = 0

        if len(datafiles) < 2:
            options.individualplots = True

        for f in datafiles:

            xaxis = []
            yaxis = []
            with open(f) as datafile:
                lines = datafile.readlines()
                if options.verbose:
                    print("\nreading file {}".format(f))
                    if options.verbose > 9:
                        print("\nlines are", lines)

                if lines:
                    lines = fixlines(lines)
                    xaxis = [
                        float(line.replace('\n', '').split()[0])
                        for line in lines
                    ]
                    yaxis = [
                        float(line.replace('\n', '').split()[1])
                        for line in lines
                    ]
                else:
                    print("\nERROR: source file {} seems empty; no lines read".
                          format(f))
                    sys.exit(1)

                if options.normalize:
                    yaxis = normalize(yaxis)

                #if xaxis and yaxis:
                xaxes.update({k: xaxis})
                yaxes.update({k: yaxis})
                datadict.update({k: [xaxis, yaxis]})

                k += 1

    elif not options.data:
        if options.datax:
            dataxfiles = options.datax.split(',')

            if len(dataxfiles) < 2:
                options.individualplots = True

            if options.datay:
                datayfiles = options.datay.split(',')
                if len(dataxfiles) != len(datayfiles):
                    print(
                        "\n(e2plotfig)(main) ERROR: --datax and --datay must contain the same number of files. Now, nx files=%d, ny files=%d"
                        .format(len(dataxfiles), len(datayfiles)))
                    sys.exit(1)

            k = 0
            for fx in dataxfiles:
                with open(fx) as dataxfile:
                    lines = dataxfile.readlines()
                    xaxis = [
                        float(line.replace('\n', '').split()[0])
                        for line in lines
                    ]
                    xaxes.update({k: xaxis})
                    if not options.datay:
                        yaxis = list(range(len(xaxis)))
                        yaxes.update({k: yaxis})

                        if options.normalize:
                            xaxis = normalize(xaxis)

                        datadict.update({k: [xaxis, yaxis]})
                    k += 1

        lines = []
        if options.datay:
            datayfiles = options.datay.split(',')
            k = 0
            for fy in datayfiles:
                with open(fy) as datayfile:
                    lines = datayfile.readlines()
                    yaxis = [
                        float(line.replace('\n', '').split()[1])
                        for line in lines
                    ]
                    yaxes.update({k: yaxis})

                    if options.normalize:
                        yaxis = normalize(yaxis)

                    if not options.datax:
                        xaxis = list(range(len(yaxis)))
                        xaxes.update({k: xaxis})

                        datadict.update({k: [xaxis, yaxis]})
                    k += 1

    from EMAN2_utils import makepath
    options = makepath(options)

    fig, ax = resetplot()

    plotdata(options, datadict)

    E2end(logger)

    return
Example #3
0
def main():

    progname = os.path.basename(sys.argv[0])
    usage = """Calculates, plots and optionally averages the FSC between multiple images and a reference."""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--input",
        type=str,
        default=None,
        help="""Volume or stack of volumes to be compared to --ref""")

    parser.add_argument(
        "--path",
        type=str,
        default='e2sptfscs',
        help="Results directory. If not specified, defaults to e2sptfscs/")

    parser.add_argument(
        "--ref",
        type=str,
        default=None,
        help=
        "Volume that will be 'static' (the 'reference' to which volumes in --input will be aligned to). The format MUST be '.hdf' or '.mrc' "
    )

    parser.add_argument(
        "--nocolor",
        action='store_true',
        default=False,
        help=
        """Turns the ouput png(s) into grey scale figures. Instead of using different colors to distinguish between various curves on the same plot, this option will have the program automatically use different markers in black and white for each curve."""
    )

    parser.add_argument(
        "--sym",
        type=str,
        default='c1',
        help="""Will symmetrize --ref and --input prior to FSC computation.""")

    parser.add_argument(
        "--mask",
        type=str,
        default=None,
        help=
        """Mask processor applied to particles before fsc computation. Default is mask.sharp:outer_radius=-2"""
    )

    parser.add_argument(
        "--mirror",
        action="store_true",
        help=
        """If set, it will generate a mirrored version of --ref and recompute FSCs against it. This will be IN ADDITION to FSC computation of --input against the original, unmirrored --ref.""",
        default=False)

    #parser.add_argument("--preproc",type=str,default='',help="Any processor (as in e2proc3d.py) to be applied to each volumes prior to FSC computation. Typically this would be an automask.")

    #parser.add_argument("--savepreproc",action='store_true',default=False,help="""Default=False. Otherwise, save preprocessed/masked volumes for inspection.""")

    parser.add_argument(
        "--averagefscs",
        action='store_true',
        default=False,
        help=
        """Default=False. Averages FSC curves if --input contains multiple images."""
    )

    parser.add_argument(
        "--parallel",
        help="Parallelism. See http://blake.bcm.edu/emanwiki/EMAN2/Parallel",
        default="thread:2")

    parser.add_argument(
        "--apix",
        type=float,
        help=
        "Provide --apix for automatic FSC calculation if you supply --plotonly and no volumes for --input and --ref, or if the apix of these is wrong.",
        default=1.0)
    #parser.add_argument("--boxsize", type=float, help="(Probably not needed for anything)", default=0)

    parser.add_argument(
        "--maxres",
        type=float,
        help=
        "How far in resolution to extend the FSC curve on the x axis; for example, to see up to 20anstroms, provide --maxres=1.0. Default=15",
        default=1.0)
    parser.add_argument(
        "--cutoff",
        type=str,
        help=
        "Comma separated values of cutoff thresholds to plot as horizontal lines. Default=0.5, to turn of supply 'None'. ",
        default='0.5')

    parser.add_argument(
        "--smooth",
        action="store_true",
        help=
        """Smooth out FSC curves by taking the average of a low value with a subsequent maxima.""",
        default=False)

    parser.add_argument(
        "--smooththresh",
        type=float,
        help=
        """If --smooth is provided the curve will be smoothed only up to this resolution. Default is 100.""",
        default=100)

    #parser.add_argument("--fit",action="store_true", help="Smooth out FSC curves.", default=False)

    parser.add_argument("--polydegree",
                        type=int,
                        help="Degree of the polynomial to fit.",
                        default=None)

    parser.add_argument(
        "--ppid",
        type=int,
        help="Set the PID of the parent process, used for cross platform PPID.",
        default=-1)
    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness")

    parser.add_argument("--plotonly",
                        type=str,
                        help="""FSC curves to plot in separate plots. 
		Skips fsc curve generation. Provide .txt. files separated by commas 
		--plotonly=file1.txt,file2.txt,file3.txt etc...""",
                        default=None)

    parser.add_argument(
        "--singleplot",
        action="store_true",
        help=
        "It --plotonly provided, all FSC curves will be on the same plot/figure",
        default=False)

    (options, args) = parser.parse_args()

    logger = E2init(sys.argv, options.ppid)

    if options.mask:
        options.mask = parsemodopt(options.mask)

    #if options.preproc:
    #	options.preproc=parsemodopt(options.preproc)

    if options.cutoff and options.cutoff != 'None' and options.cutoff != 'none':
        options.cutoff = options.cutoff.split(',')
        print("Options.cutoff is", options.cutoff)
    else:
        options.cutoff = None

    from EMAN2_utils import makepath
    options = makepath(options, 'sptres')

    print('\n\nafter making path, options.path is', options.path)

    if options.input:
        hdr = EMData(options.input, 0, True)
        apix = hdr['apix_x']

    if options.apix:
        apix = float(options.apix)

    if options.plotonly:
        curves = options.plotonly
        curves = curves.split(',')

        if not options.singleplot:
            for curve in curves:
                print("Found this curve to plot", curve)
                fscplotter([curve], options, apix)

        elif options.singleplot:
            fscplotter(curves, options, apix)

        print("Done plotting")
        sys.exit()

    else:
        getfscs(options, apix)

        print("Done calculating FSCs and plotting them.")

    E2end(logger)
    return
Example #4
0
def main():

    progname = os.path.basename(sys.argv[0])
    usage = """plot motion from DE frames using .txt files from DE_process_frames.py"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument("--apix",
                        type=float,
                        default=0.0,
                        help="""default=0.0. apix of data""")

    parser.add_argument(
        "--bidirectionalfrom",
        type=str,
        default='',
        help=
        """Default=None (not used). Used for tiltseries data. Initial angle for the first half of the tiltseries. For example, a tiltseries from 0 to -55, then 5 to 55, --bidrectionalfrom should be set to 0."""
    )

    parser.add_argument(
        "--highestangle",
        type=str,
        default=55,
        help=
        "Default=55. Most positive tilt angle; e.g., 60 in a [-60,60] tiltseries"
    )
    parser.add_argument(
        "--highresolution",
        action='store_true',
        default=False,
        help=
        """default=False. If on, this option will enforce writing high-resolution plots (dpi 300), ready for publication, as opposed to lower resolution plots which take up less space on your computer (dpi 150)."""
    )

    parser.add_argument(
        "--individualplots",
        action='store_true',
        default=False,
        help=
        """default=False. in addition to plotting the motion for all frames in all images in a single plot, generate individual plots per image."""
    )

    parser.add_argument(
        "--lowestangle",
        type=str,
        default=-55,
        help=
        "Default=-55. Most negative tilt angle; e.g., -60 in a [-60,60] tiltseries"
    )

    #parser.add_argument("--miny",type=float,default=0.0,help="""Default=None. Minimum value to plot in y. Automatically set to the maximum value in the data, per image, if not explicitly set.""")
    #parser.add_argument("--maxy",type=float,default=0.0,help="""Default=None. Maximum value to plot in y. Automatically set to the maximum value in the data, per image, if not explicitly set.""")

    parser.add_argument(
        "--negativetiltseries",
        action='store_true',
        default=False,
        help=
        """Default=False. This means the negative tilt angles in a bidrectional tiltseries were collected first; e.g., 0 to -55, then 5 to 55."""
    )

    parser.add_argument(
        "--outputtag",
        type=str,
        default='deplot',
        help=
        """default=deplot. string common to all automatically generated output files"""
    )

    parser.add_argument(
        "--path",
        type=str,
        default='de_plots',
        help=
        """Default=de_plots. Name of the directory where to store the output results."""
    )
    parser.add_argument(
        "--ppid",
        type=int,
        default=-1,
        help=
        "Default=-1. Set the PID of the parent process, used for cross platform PPID"
    )

    parser.add_argument(
        "--savetxts",
        action='store_true',
        default=False,
        help=
        """Default=False. save user-friendly txt files easyily plotable with other software."""
    )

    parser.add_argument(
        "--tiltstep",
        type=int,
        default=0,
        help="Default=None. Angular step size between images in the tiltseries."
    )
    parser.add_argument(
        "--tltfile",
        type=str,
        default='',
        help=
        """Default=None. Name of .tlt or .rawtlt file (typically from IMOD or serialEM) with the tilt angles."""
    )

    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higher number means higher level of verboseness."
    )

    (options, args) = parser.parse_args()

    logger = E2init(sys.argv, options.ppid)

    if not options.apix:
        print(
            "\n(e2plot_de_motion)(main) ERROR: --apix required to accurately plot motion in Angstroms."
        )
        sys.exit(1)

    anglesin = []
    if not options.tltfile:
        if not options.tiltstep or not options.lowestangle or not options.highestangle:
            print(
                "\n(e2plot_de_motion)(main)ERROR: must provide --tiltstep, --lowestangle, and --highestangle in the absence of --tltfile"
            )
            sys.exit(1)

        options.lowestangle = int(options.lowestangle)
        options.highestangle = int(options.highestangle)
        options.tiltstep = int(options.tiltstep)

    elif options.tltfile:
        if options.highestangle or options.lowestangle or options.tiltstep:
            print(
                "\n(e2plot_de_motion)(main) WARNING: --tltfile was provided; therefore, --highestangle,--lowestangle, and --tiltstep will be ignored."
            )

        with open(options.tltfile, 'r') as tltfile:
            lines = tltfile.readlines()
            anglesin = [
                int(round(float(line.replace('\n', '')))) for line in lines
            ]
            if anglesin[0] < 0:
                options.lowestangle = anglesin[0]
                options.highestangle = anglesin[-1]
            elif anglesin[0] > 0:
                options.lowestangle = anglesin[-1]
                options.highestangle = anglesin[0]
            options.tiltstep = int(
                round(
                    old_div((math.fabs(anglesin[0]) + math.fabs(anglesin[-1])),
                            len(anglesin))))

    from EMAN2_utils import makepath
    options = makepath(options)

    #print "apix is", options.apix
    #print "name is", options.outputtag

    if options.path not in options.outputtag:
        options.outputtag = options.path + '/' + options.outputtag

    c = os.getcwd()

    listdir = os.listdir(c)

    ids = set([])

    for f in listdir:
        if options.verbose:
            print("\nfound file", f)

        if '_y.txt' in f or '_x.txt' in f:
            stem = f.replace('y.txt', '').replace('x.txt', '')
            ids.add(stem)
            if options.verbose:
                print("\nfound stem", stem)

    lowestangle = options.lowestangle
    highestangle = options.highestangle

    print("\nlowestangle {} type={}".format(lowestangle, type(lowestangle)))
    print("\nhighestangle {} type={}".format(highestangle, type(highestangle)))
    #print "\nstartangle {} type={}".format(startangle, type(startangle))

    step = options.tiltstep

    datadict = {}

    lastangle = highestangle

    #if not options.tltfile:
    kk = 0
    if options.bidirectionalfrom:
        startangle = int(options.bidirectionalfrom)
        print("\nstartangle {} type={}".format(startangle, type(startangle)))
        if options.negativetiltseries:
            for angle in range(startangle, lowestangle - 1, -step):
                datadict.update({kk: [angle]})
                lastangle = angle
                kk += 1

            for angle in range(startangle + step, highestangle + 1, step):
                datadict.update({kk: [angle]})
                lastangle = angle
                kk += 1

        elif not options.negativetiltseries:
            for angle in range(startangle, highestangle + 1, step):
                datadict.update({kk: [angle]})
                lastangle = angle
                kk += 1

            for angle in range(startangle - step, lowestangle - 1, -step):
                datadict.update({kk: [angle]})
                lastangle = angle
                kk += 1

    elif not options.bidirectionalfrom:
        if options.negativetiltseries:
            startangle = highestangle
            print("\nstartangle {} type={}".format(startangle,
                                                   type(startangle)))
            for angle in range(startangle, lowestangle - 1, -step):
                datadict.update({kk: [angle]})
                lastangle = angle
                kk += 1
        elif not options.negativetiltseries:
            startangle = lowestangle
            print("\nstartangle {} type={}".format(startangle,
                                                   type(startangle)))
            for angle in range(startangle, highestangle + 1, step):
                datadict.update({kk: [angle]})
                lastangle = angle
                kk += 1

    #elif options.tltfile:

    if len(datadict) != len(ids):
        dif = len(datadict) - len(ids)
        print(
            "\n(DE_translation_plotter)(main) WARNING: there are these many files {}, but only these many angles, {}, given input parameters (lowestangle={}, highestangle={}, and tiltstep={}). Therfore, {} additional angles will be added to assign a different angle to each file."
            .format(len(ids), len(datadict), lowestangle, highestangle, step,
                    int(math.fabs(dif))))

        tmpangle = lastangle + step
        for ii in range(int(math.fabs(dif))):

            datadict.update({kk: [tmpangle]})
            kk += 1
            tmpangle += step

    ids = list(ids)
    ids.sort()

    filesdict = {}

    kk = 0

    for iid in ids:
        if options.verbose > 9:
            print("\n(DE_translation_plotter)(main) or kk={} iid is {}".format(
                kk, iid))
            print("therefore datadict[kk]={}, type={}".format(
                datadict[kk], type(datadict[kk])))
        datadict[kk].append(iid)
        kk += 1

    if options.verbose > 9:
        print("\n(DE_translation_plotter)(main) ids before are {} type={} ".
              format(ids, type(ids)))
        print("\ndatadict is {}".format(datadict))

    datadictsorted = sorted(list(datadict.items()), key=lambda e: e[1][0])

    if options.verbose > 9:
        print("\n(DE_translation_plotter)(main) datadictsorted is {}".format(
            datadictsorted))
        print("\nTYPE {}".format(type(datadictsorted)))

    figx_vals = {}
    figy_vals = {}
    figr_vals = {}

    avgslist = []

    xavgs = []
    xtotals = []
    xerrors = []

    yavgs = []
    ytotals = []
    yerrors = []

    ravgs = []
    rtotals = []
    rerrors = []

    angles = []

    nimgs = len(ids)

    k = 0

    for ele in datadictsorted:

        id = ele[-1][-1]
        angletoplot = ele[-1][0]
        angles.append(angletoplot)
        if options.verbose > 9:
            print("\n igm is {} and angle is {}".format(id, angletoplot))

        if k > nimgs:
            break

        figx_values_angstroms = getvaluesfromfile(options, id, 'x')[0]
        figx_vals.update({k: [id, angletoplot, figx_values_angstroms, 'x']})

        figx_values_angstroms_abs = [
            math.fabs(x) for x in figx_values_angstroms
        ]
        #xavg = sum( [math.fabs(x) for x in figx_values_angstroms] )/len(figx_values_angstroms)

        xavg = np.mean(figx_values_angstroms_abs)
        xerror = np.std(figx_values_angstroms_abs)
        xtotal = sum(figx_values_angstroms_abs)
        xavgs.append(xavg)
        xerrors.append(xerror)
        xtotals.append(xtotal)

        figy_values_angstroms = getvaluesfromfile(options, id, 'y')[0]
        figy_vals.update({k: [id, angletoplot, figy_values_angstroms, 'y']})

        figy_values_angstroms_abs = [
            math.fabs(y) for y in figy_values_angstroms
        ]
        #yavg = sum([math.fabs(x) for y in figy_values_angstroms])/len(figy_values_angstroms)

        yavg = np.mean(figy_values_angstroms_abs)
        yerror = np.std(figy_values_angstroms_abs)
        ytotal = sum(figy_values_angstroms_abs)
        yavgs.append(yavg)
        yerrors.append(yerror)
        ytotals.append(ytotal)

        #c: write out x and y values as a single column txt file for easier plotting with other programs, compared to the original format of files from DE
        if options.savetxts:
            with open(options.path + '/' + id + "_x.txt", 'w') as outxfile:
                outxfile.writelines(
                    [str(x) + '\n' for x in figx_values_angstroms])
            with open(options.path + '/' + id + "_y.txt", 'w') as outyfile:
                outyfile.writelines(
                    [str(y) + '\n' for y in figy_values_angstroms])

        #c: compute |r| for each movie/tiltangle, write to a file, and also write x,y motion file
        rs = []
        rlines = []
        xandylines = []

        for i in range(len(figx_values_angstroms)):
            r = round(
                math.sqrt(figx_values_angstroms[i] * figx_values_angstroms[i] +
                          figy_values_angstroms[i] * figy_values_angstroms[i]),
                2)
            rs.append(r)

            line = str(i) + '\t' + str(r) + '\n'
            rlines.append(line)

            xandyline = str(figx_values_angstroms[i]) + '\t' + str(
                figy_values_angstroms[i]) + '\n'
            xandylines.append(xandyline)

        if options.savetxts:
            with open(options.path + '/' + id + '_r.txt', 'w') as rfile:
                rfile.writelines(rlines)
            with open(options.path + '/' + id + '_x_vs_y.txt', 'w') as xyfile:
                xyfile.writelines(xandylines)

        figr_vals.update({k: [id, angletoplot, rs, 'r']})

        #figxy_vals = {k:[id,angletoplot,rs,'r']}

        #compute average |r| and error(s)
        ravg = np.mean(rs)
        rerror = np.std(rs)
        rtotal = sum(rs)
        ravgs.append(ravg)
        rerrors.append(rerror)
        rtotals.append(rtotal)

        avgslist.append([angletoplot, ravg, rerror])

        if options.verbose:
            print("\nfor img %d xavg=%f, yavg=%f, ravg=%f" %
                  (k, xavg, yavg, ravg))

        filesdict.update({
            id: [
                xavg, yavg, ravg, figx_values_angstroms, figy_values_angstroms,
                rs
            ]
        })

        k += 1

    #c: write out the average motion in each direction for each movie in a user-friendly single-column text file
    if options.savetxts:
        with open(options.path + '/' + id + "_x_avgs.txt", 'w') as outxavgfile:
            outxavgfile.writelines([str(x) + '\n' for x in xavgs])
        with open(options.path + '/' + id + "_y_avgs.txt", 'w') as outyavgfile:
            outyavgfile.writelines([str(y) + '\n' for y in yavgs])
        with open(options.path + '/' + id + "_r_avgs.txt", 'w') as outravgfile:
            outravgfile.writelines([str(r) + '\n' for r in ravgs])

        with open(options.path + '/' + id + "_x_total.txt",
                  'w') as outxtotalfile:
            outxtotalfile.writelines([str(x) + '\n' for x in xtotals])
        with open(options.path + '/' + id + "_y_total.txt",
                  'w') as outytotalfile:
            outytotalfile.writelines([str(y) + '\n' for y in ytotals])
        with open(options.path + '/' + id + "_r_total.txt",
                  'w') as outrtotalfile:
            outrtotalfile.writelines([str(r) + '\n' for r in rtotals])

    plotdata(options,
             figx_vals,
             tag='x',
             title='Motion in X',
             xlabel='Frame number',
             ylabel="Translation in X (" + u"\u212B" + ")")
    plotdata(options,
             figy_vals,
             tag='y',
             title='Motion in Y',
             xlabel='Frame number',
             ylabel="Translation in Y (" + u"\u212B" + ")")
    plotdata(options,
             figr_vals,
             tag='r',
             title='Motion |r|',
             xlabel='Frame number',
             ylabel="Translation |r| (" + u"\u212B" + ")")
    plotdata(options,
             figy_vals,
             tag='x_vs_y',
             title='Motion in X vs Y',
             xlabel="Translation in X (" + u"\u212B" + ")",
             ylabel="Translation in Y (" + u"\u212B" + ")",
             altxaxisdata=figx_vals)

    fig, ax = resetplot()

    plotavgdata(options,
                xavgs,
                angles,
                xerrors,
                tag='x_avgs',
                title='Average X motion per image',
                xlabel='Tilt angle (degrees)',
                ylabel="Average translation(" + u"\u212B" + ")",
                figsize=(10, 6))
    plotavgdata(options,
                yavgs,
                angles,
                yerrors,
                tag='y_avgs',
                title='Average Y motion per image',
                xlabel='Tilt angle (degrees)',
                ylabel="Average translation(" + u"\u212B" + ")",
                figsize=(10, 6))
    plotavgdata(options,
                ravgs,
                angles,
                rerrors,
                tag='r_avgs',
                title='Average |r| motion per image',
                xlabel='Tilt angle (degrees)',
                ylabel="Average translation(" + u"\u212B" + ")",
                figsize=(10, 6))

    plotavgdata(options,
                xtotals,
                angles,
                xerrors,
                tag='x_total',
                title='Total X motion per image',
                xlabel='Tilt angle (degrees)',
                ylabel="Average translation(" + u"\u212B" + ")",
                figsize=(10, 6))
    plotavgdata(options,
                ytotals,
                angles,
                yerrors,
                tag='y_total',
                title='Total Y motion per image',
                xlabel='Tilt angle (degrees)',
                ylabel="Average translation(" + u"\u212B" + ")",
                figsize=(10, 6))
    plotavgdata(options,
                rtotals,
                angles,
                rerrors,
                tag='r_total',
                title='Total |r| motion per image',
                xlabel='Tilt angle (degrees)',
                ylabel="Average translation(" + u"\u212B" + ")",
                figsize=(10, 6))

    E2end(logger)

    return
Example #5
0
def main():

    progname = os.path.basename(sys.argv[0])
    usage = """This program allows you to examine density variations along one or more volumes.
				It calculates the mean intensity either for slices (planes) along any of the three cartesian axes (X, Y or Z), 
				or for radial consecutive shells of increasing radius, 
				or for cylindrical shells of varying or fixed height, starting from the center of the volume. 
				All mean density values are saved to .txt files, and plots are produced from them are saved as .png images. 
				To compare the density variations across different volumes, you can plot all curves in a single plot.
				To reduce complexity and anomalies induced by the missign wedge, and option is provided to project the volumes onto 2-D images first."""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--classifymaxpeaks",
        type=int,
        default=0,
        help=
        """default=0. Number of highest peaks to consider for classification. Amongst the n peaks provided, --classifymaxpeaks=n, the peak occurring at the largest radius will be used as the classifier. If --classifymaxpeaks=1, the highest peak will be the classifier. To smooth the radial density curve consider low pass filtering through --lowpass. To remove aberrant peaks consider masking with --mask."""
    )

    parser.add_argument(
        "--fitgaussian",
        action="store_true",
        default=False,
        help=
        """default=false. Fits a Gaussian to the radial density plot (only appropriate in certain cases; look at the raw plot first and rerun program if the plot looks like a bell curve)."""
    )
    parser.add_argument(
        "--fixedcylinderheight",
        type=int,
        default=0,
        help=
        """Default=0. Works only if --mode=cylinder, and keeps the height of the cylinder at a constant value, while varying the radius."""
    )

    parser.add_argument(
        "--highpass",
        type=str,
        default=None,
        help=
        """default=none. A highpass filtering processor (see e2help.py --verbose=10) applied to each volume prior to radial density plot computation."""
    )

    parser.add_argument(
        "--input",
        type=str,
        default=None,
        help=
        """Volume whose radial density plot you want to compute. For multiple volumes, either provide them as an .hdf stack, or separate them by commas --vols=first.hdf,second.hdf,etc..."""
    )

    parser.add_argument(
        "--lowpass",
        type=str,
        default=None,
        help=
        """default=None. A lowpass filtering processor (see e2help.py --verbose=10) applied to each volume prior to radial density plot computation."""
    )

    parser.add_argument(
        "--mask",
        type=str,
        default=None,
        help=
        """default=none. Masking processor (see e2help.py --verbose=10) applied to each volume prior to radial density plot computation."""
    )
    parser.add_argument(
        "--mode",
        type=str,
        default='sphere',
        help=
        """default=sphere. provide --mode=x, y, or z to get the average density per slice in the indicated direction. --mode=cylinder for concentric cylindrical shells. For MULTIPLE modes, separate them by commas, for example --mode=x,y,z,cylinder"""
    )

    parser.add_argument(
        "--normproc",
        type=str,
        default=None,
        help=
        """default=none. Normalization processor (see e2help.py --verbose=10) applied to each volume prior to radial density plot computation. If normalize.mask is used, results of the mask option will be passed in automatically."""
    )
    parser.add_argument(
        "--normalizeplot",
        action="store_true",
        default=False,
        help=
        """default=false. This will make the maximum density in each plot or curve equal to 1."""
    )

    parser.add_argument('--path',
                        type=str,
                        default='spt_radialplot',
                        help="""Directory to save the results.""")
    parser.add_argument(
        "--plotmaxima",
        action="store_true",
        default=False,
        help=
        """default=false. If on, this plots a vertical line at each maxima (peak) for easier visualization."""
    )
    parser.add_argument(
        "--preprocess",
        type=str,
        default=None,
        help=
        """Any processor (see e2help.py --verbose=10) applied to each volume prior to radial density plot computation."""
    )

    parser.add_argument(
        "--savetxt",
        action="store_true",
        default=False,
        help=
        """default=false. Save plot files as .txt, so that they can be replotted with other software if necessary."""
    )
    parser.add_argument(
        "--shrink",
        type=int,
        default=0,
        help=
        """default=0 (not used). Optionally shrink the input volumes by an integer amount."""
    )
    parser.add_argument(
        "--singlefinalplot",
        action="store_true",
        default=False,
        help=
        """default=false. Plot all the Radial Density Profiles of the volumes provided in all .hdf stacks in one FINAL single 'master' plot."""
    )
    parser.add_argument(
        "--singleplotperfile",
        action="store_true",
        default=False,
        help=
        """default=false. Plot all the Radial Density Profiles of the volumes provided in each .hdf stack in one single plot."""
    )
    parser.add_argument(
        "--subset",
        type=int,
        default=0,
        help="""An n-subset of particles from --input to use.""")
    parser.add_argument(
        "--sym",
        dest="sym",
        default='c1',
        help=
        """default=c1. Symmetry to impose choices are: c<n>, d<n>, h<n>, tet, oct, icos. For this to make any sense in the context of this program, the particles need to be aligned to the symmetry axis first, which can be accomplished by running them through e2symsearch3d.py."""
    )

    parser.add_argument(
        "--threshold",
        type=str,
        default=None,
        help=
        """default=None. A threshold  processor (see e2help.py --verbose=10) applied to each volume prior to radial density plot computation."""
    )

    parser.add_argument(
        "--ppid",
        type=int,
        help=
        """Set the PID of the parent process, used for cross platform PPID""",
        default=-1)

    parser.add_argument(
        "--verbose",
        "-v",
        default=0,
        help=
        """default=0. Verbose level [0-9], higher number means higher level of verboseness""",
        dest="verbose",
        action="store",
        metavar="n",
        type=int)

    (options, args) = parser.parse_args()

    import matplotlib.pyplot as plt

    from EMAN2_utils import makepath, runcmd, sptOptionsParser
    options = makepath(options, 'spt_radialplot')

    if not options.input:
        parser.print_help()
        exit(0)
    elif options.subset:
        subsetStack = options.path + '/subset' + str(options.subset).zfill(
            len(str(options.subset))) + '.hdf'
        print("\nSubset to be written to", subsetStack)

        subsetcmd = 'e2proc3d.py ' + options.input + ' ' + subsetStack + ' --first=0 --last=' + str(
            options.subset - 1)
        print("Subset cmd is", subsetcmd)

        runcmd(subsetcmd)
        #p=subprocess.Popen( subsetcmd, shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE )
        #text=p.communicate()
        #p.stdout.close()

        options.input = subsetStack

    logger = E2init(sys.argv, options.ppid)

    options = sptOptionsParser(options)

    #if options.normproc:
    #	options.normproc=parsemodopt(options.normproc)

    #if options.mask:
    #	options.mask=parsemodopt(options.mask)

    #if options.preprocess:
    #	options.preprocess=parsemodopt(options.preprocess)

    #if options.lowpass:
    #	options.lowpass=parsemodopt(options.lowpass)

    #if options.highpass:
    #	options.highpass=parsemodopt(options.highpass)

    #if options.threshold:
    #	options.threshold=parsemodopt(options.threshold)

    files = options.input
    files = files.split(',')

    for i in range(0, len(files)):
        for j in range(i + 1, len(files)):
            if files[i] == files[j]:
                print(
                    "ERROR: You have supplied a file twice, see file[i]={}, file[j]={}"
                    .format(files[i], files[j]))
                sys.exit(1)

    modes = options.mode.split(',')

    for m in modes:
        options.mode = m
        modetag = '_MODE' + m
        finalvalues = {}
        imgsperstack = 1

        names = []
        finalvalues = []
        maxsall = {}
        minsall = {}

        for i in files:
            n = EMUtil.get_image_count(i)

            print("Stack={} has n={} images in it".format(i, n))

            kk = 0
            stack = {}
            stackvalues = []
            suffix = modetag
            for j in range(n):
                ptcl = EMData(i, j)
                if n > 1:
                    suffix = modetag + str(kk).zfill(len(str(n)))

                values = calcvalues(ptcl, options)

                ret = calcmaxima(values)
                maxima = ret[
                    0]  #These are a list of lists [[pixel,value],[pixel,value],...] with all maxima and minima
                minima = ret[-1]

                uniquetag = i + '_indextag' + str(j)
                maxsall.update({uniquetag: maxima})
                minsall.update({uniquetag: minima})

                print("For file={} img number={} the max={}".format(
                    i, j, max(values)))

                if options.normalizeplot:

                    minv = min(values)
                    for v in range(len(values)):
                        values[v] = values[v] - minv

                    maxv = max(values)
                    for v in range(len(values)):
                        values[v] = old_div(values[v], maxv)
                    print("Therefore, max={}".format(max(values)))

                id = i.replace('.', suffix + '.')
                stackvalues.append([id, values])
                kk += 1
            stack.update({i: stackvalues})
            finalvalues.append(stack)

        plotname = 'finalplot_MODE' + m + '.png'
        fileid = ''

        if options.classifymaxpeaks:
            classifymax(options, maxsall)

        cc = 0
        for ele in finalvalues:
            thisfile = list(ele.keys())[0]
            key = thisfile

            n = EMUtil.get_image_count(thisfile)

            if options.singleplotperfile:
                fileid = i.split('.')[0]
                plotname = fileid + modetag + '.png'

            kk = 0
            maxvallocs_pixels = []
            maxvallocs_angs = []

            maxslope_pixels = []
            maxslope_postmax_pixels = []
            maxslope_postlastpeak_pixels = []

            minslope_pixels = []
            for index in range(n):

                apix = EMData(thisfile, index, True)['apix_x']

                values = ele[key][index][1]
                id = ele[key][index][0]

                xs = list(range(len(values)))

                for j in range(len(xs)):
                    xs[j] = int(round(xs[j] * apix))

                if options.savetxt:

                    txtname = options.path + '/' + thisfile.split(
                        '.')[0] + modetag + str(kk).zfill(len(str(n))) + '.txt'
                    textwriter(values, options, txtname)
                    #txtf = open(txtname,'w')
                    #lines = []
                    #for v in range(len(values)):
                    #	line = str(v) +  ' ' + str(values[v]) + '\n'
                    #	lines.append(line)
                    #txtf.writelines(lines)
                    #txtf.close()

                maxval = max(values)
                maxvalloc = values.index(maxval)
                maxvallocs_pixels.append(maxvalloc)
                maxvallocs_angs.append(maxvalloc * apix)

                uniquetag = thisfile + '_indextag' + str(
                    index)  #i=filename, f=index of img in filename
                maxima = maxsall[uniquetag]

                lastpeakloc = maxima[-1][0]

                ziplist = list(zip(values[:-1], values[1:]))
                ziplistpostmax = list(
                    zip(values[maxvalloc:-1], values[maxvalloc + 1:]))
                ziplistpostlastpeak = list(
                    zip(values[lastpeakloc:-1], values[lastpeakloc + 1:]))

                diflist = [a1 - a2 for a1, a2 in ziplist]
                diflistpostmax = [a1 - a2 for a1, a2 in ziplistpostmax]
                diflistpostlastpeak = [
                    a1 - a2 for a1, a2 in ziplistpostlastpeak
                ]
                #print("\nziplist is".format(ziplist))
                #print("\ndiflist is".format(diflist))

                max_slope = max(diflist)
                indexmaxslope = diflist.index(max_slope)
                maxslope_pixels.append(indexmaxslope)

                max_slope_postmax = max(diflistpostmax)
                indexmaxslope_postmax = diflist.index(max_slope_postmax)
                maxslope_postmax_pixels.append(indexmaxslope_postmax)

                #max_slope_postlastpeak = indexmaxslope_postlastpeak = None

                try:
                    max_slope_postlastpeak = max(diflistpostlastpeak)
                    indexmaxslope_postlastpeak = diflist.index(
                        max_slope_postlastpeak)
                    maxslope_postlastpeak_pixels.append(
                        indexmaxslope_postlastpeak)

                except:
                    if options.verbose:
                        print(
                            '\n\n!!!!!ERROR computing slope after last peak; skipping particle'
                        )

                if options.verbose:
                    print(
                        "\nmaxpeak at pixel={}; maxslope at pixel={}; after maxpeak, maxslope at pixel={}; after lastpeak, maxslope at pixel={}"
                        .format(maxvalloc, indexmaxslope,
                                indexmaxslope_postmax,
                                indexmaxslope_postlastpeak))

                min_slope = min(diflist)
                indexminslope = diflist.index(min_slope)
                print("\nmin slope is at pixel={}".format(indexminslope))

                plt.plot(xs, values, linewidth=2, alpha=0.5)

                if options.plotmaxima:
                    nm = len(maxima)
                    peaklocs = []
                    peakvals = []
                    for ii in range(nm):
                        peakloc = maxima[ii][0] * apix
                        peaklocs.append(peakloc)

                        peakval = maxima[ii][1]
                        peakvals.append(peakval)

                        plt.axvline(x=peakloc,
                                    linewidth=2,
                                    alpha=0.5,
                                    color='k',
                                    linestyle='--')

                    #takes data of the form: textwriter(yvalues,options,txtname,invert=0,xvalues=None)
                    maxtxtname = options.path + '/' + thisfile.split(
                        '.')[0] + modetag + str(kk).zfill(len(
                            str(n))) + '_maxima.txt'

                    textwriter(peakvals, options, maxtxtname, 0, peaklocs)

                if not options.singleplotperfile and not options.singlefinalplot:
                    #plotname=i.split('.')[0]+str(kk).zfill(len(str(n))) + '.png'
                    plotname = id.split('.')[0] + '.png'
                    fileid = plotname.split('.')[0]

                if options.mode == 'sphere':
                    plt.title("Spherical radial density plot " + fileid)
                    plt.xlabel("Radius (angstroms)")
                    plt.ylabel("Density (arbitrary units)")

                if options.mode == 'x':
                    plt.title("Density plot of slices along x-axis " + fileid)
                    plt.xlabel("X (angstroms)")
                    plt.ylabel("Density (arbitrary units)")

                if options.mode == 'y':
                    plt.title("Density plot of slices along y-axis " + fileid)
                    plt.xlabel("Y (angstroms)")
                    plt.ylabel("Density (arbitrary units)")

                if options.mode == 'z':
                    plt.title("Density plot of slices along z-axis " + fileid)
                    plt.xlabel("Z (angstroms)")
                    plt.ylabel("Density (arbitrary units)")

                if options.mode == 'cylinder':
                    plt.title(
                        "Density plot of concentric cylyndrical shells " +
                        fileid)
                    plt.xlabel("Radius (angstroms)")
                    plt.ylabel("Density (arbitrary units)")

                if not options.singleplotperfile and not options.singlefinalplot:
                    if options.path not in plotname:
                        plotname = options.path + '/' + plotname
                    plt.savefig(plotname)
                    plt.clf()
                else:
                    pass
                kk += 1
                cc += 1

            maxtxtname_pixels = options.path + '/' + thisfile.split(
                '.')[0] + modetag + str(kk).zfill(len(
                    str(n))) + '_maxima_pixels.txt'
            textwriter(maxvallocs_pixels, options, maxtxtname_pixels, 0)

            maxtxtname_angs = options.path + '/' + thisfile.split(
                '.')[0] + modetag + str(kk).zfill(len(
                    str(n))) + '_maxima_angstroms.txt'
            textwriter(maxvallocs_angs, options, maxtxtname_angs, 0)

            maxslopetxtname_pixels = options.path + '/' + thisfile.split(
                '.')[0] + modetag + str(kk).zfill(len(
                    str(n))) + '_maxslope_pixels.txt'
            textwriter(maxslope_pixels, options, maxslopetxtname_pixels, 0)

            maxslopetxtname_postmax_pixels = options.path + '/' + thisfile.split(
                '.')[0] + modetag + str(kk).zfill(len(
                    str(n))) + '_maxslope_postmax_pixels.txt'
            textwriter(maxslope_postmax_pixels, options,
                       maxslopetxtname_postmax_pixels, 0)

            maxslopetxtname_postlastpeak_pixels = options.path + '/' + thisfile.split(
                '.')[0] + modetag + str(kk).zfill(len(
                    str(n))) + '_maxslope_postlastpeak_pixels.txt'
            textwriter(maxslope_postlastpeak_pixels, options,
                       maxslopetxtname_postlastpeak_pixels, 0)

            if options.singleplotperfile:
                if options.path not in plotname:
                    plotname = options.path + '/' + plotname
                plt.savefig(plotname)
                plt.clf()

        if options.singlefinalplot:
            if options.path not in plotname:
                plotname = options.path + '/' + plotname
            plt.savefig(plotname)
            plt.clf()
    return
Example #6
0
def main():

    usage = """e2orthoproject.py <options> . 
			The options should be supplied in "--option=value", replacing "option" for a valid option name, and "value" for an acceptable value for that option. 
			This program produces orthogonal projections of an EM volume.
			"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--angles",
        type=str,
        default='',
        help=
        "A single comma or space separated triplet of az,alt,phi values representing the particle rotation to apply before projecting it."
    )

    parser.add_argument(
        "--input",
        type=str,
        default=None,
        help=
        """The name of the input volume from which you want to generate orthogonal projections. You can supply more than one model either by providing an .hdf stack of models, or by providing multiple files separated by commas."""
    )

    parser.add_argument(
        "--lowpass",
        type=str,
        default=None,
        help=
        "A lowpass filtering processor (as in e2proc3d.py) to be applied to each volume prior to alignment. Not applied to aligned particles before averaging."
    )

    parser.add_argument(
        "--mask",
        type=str,
        default=None,
        help=
        "Mask processor applied to particles before alignment. Default is None"
    )

    parser.add_argument(
        "--normproc",
        type=str,
        default="normalize.edgemean",
        help=
        """Normalization processor applied to particles before alignment. Default is to use normalize.mask. If normalize.mask is used, results of the mask option will be passed in automatically. If you want to turn this option off specify \'None\'"""
    )

    parser.add_argument(
        "--path",
        type=str,
        default=None,
        help=
        """Directory to store results in. The default is a numbered series of directories containing the prefix 'orthoproject'; for example, orthoproject_02 will be the directory by default if 'orthoproject_01' already exists."""
    )
    parser.add_argument(
        "--ppid",
        type=int,
        default=-1,
        help="Set the PID of the parent process, used for cross platform PPID")

    parser.add_argument(
        "--onlyx",
        action='store_true',
        default=False,
        help=
        "Only projection of the YZ plane will be generated [a 'side view'].")
    parser.add_argument(
        "--onlyy",
        action='store_true',
        default=False,
        help=
        "Only projection of the XZ plane will be generated [another 'side view']."
    )
    parser.add_argument(
        "--onlyz",
        action='store_true',
        default=False,
        help="Only projection of the XY plane will be generated a 'top view']")

    parser.add_argument(
        "--saverotvol",
        action='store_true',
        default=False,
        help=
        "Will save the volume in each rotated position used to generate a projection."
    )
    parser.add_argument(
        "--shrink",
        type=int,
        default=False,
        help=
        "Integer value to shrink the models by before generating projections.")

    parser.add_argument(
        "--tag",
        type=str,
        default='',
        help=
        "When supplying --angles, tag the output projection with a string provided through --tag"
    )
    parser.add_argument(
        "--transformsfile",
        type=str,
        default='',
        help=
        "A text files containing lines with one triplet of az,alt,phi values each, representing the transforms to use to project a single volume supplied."
    )

    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higher number means higher level of verboseness."
    )

    (options, args) = parser.parse_args()

    options = checkinput(options)

    if options.transformsfile:
        n = EMUtil.get_image_count(options.input)
        if n > 1:
            print(
                "ERROR: You cannot supply --transformsfile for particle stacks; it only works for individual volumes."
            )
            sys.exit()
    '''#
	#Check for sanity of some supplied parameters
	'''

  #
    if options.onlyz:
        if options.onlyx or options.onlyy:
            print(
                "ERROR: You can only supply one of --onlyx, --onlyy or --onlyz at a time."
            )
            sys.exit()

    if options.onlyx:
        if options.onlyy or options.onlyz:
            print(
                "ERROR: You can only supply one of --onlyx, --onlyy or --onlyz at a time."
            )
            sys.exit()

    if options.onlyy:
        if options.onlyx or options.onlyz:
            print(
                "ERROR: You can only supply one of --onlyx, --onlyy or --onlyz at a time."
            )
            sys.exit()

    if options.onlyz and options.onlyx:
        print("ERROR: Cannot supply --onlyz and --onlyx at the same time")
        sys.exit()
    if options.onlyz and options.onlyy:
        print("ERROR: Cannot supply --onlyz and --onlyy at the same time")
        sys.exit()
    if options.onlyy and options.onlyx:
        print("ERROR: Cannot supply --onlyy and --onlyx at the same time")
        sys.exit()
    '''#
	#Generate projection transforms
	'''

  #
    projectiondirections = []

    if options.onlyz:
        pz = Transform({'type': 'eman', 'az': 0, 'alt': 0, 'phi': 0})
        projectiondirections = {'pz': pz}
    elif options.onlyx:
        px = Transform({'type': 'eman', 'az': 90, 'alt': -90, 'phi': 0})
        projectiondirections = {'px': px}
    elif options.onlyy:
        py = Transform({'type': 'eman', 'az': 0, 'alt': 90, 'phi': 0})

        projectiondirections = {'py': py}
    else:
        pz = Transform({'type': 'eman', 'az': 0, 'alt': 0, 'phi': 0})
        px = Transform({'type': 'eman', 'az': 90, 'alt': -90, 'phi': 0})
        py = Transform({'type': 'eman', 'az': 0, 'alt': 90, 'phi': 0})
        projectiondirections = {'pz': pz, 'px': px, 'py': py}

    if options.transformsfile:
        f = open(options.transformsfile, 'r')
        lines = f.readlines()
        f.close()

        np = len(lines)
        k = 0
        for line in lines:
            line = line.replace(',', ' ')
            line = line.replace('\n', '')
            line = line.replace('\t', ' ')
            line = line.split()
            t = Transform({
                'type': 'eman',
                'az': float(line[0]),
                'alt': float(line[1]),
                'phi': float(line[2])
            })
            tag = 'p' + str(k).zfill(len(str(np)))
            projectiondirections.update({tag: t})
            k += 1

    logger = E2init(sys.argv, options.ppid)

    if options.mask:
        options.mask = parsemodopt(options.mask)

    if options.lowpass:
        options.lowpass = parsemodopt(options.lowpass)

    if options.normproc:
        options.normproc = parsemodopt(options.normproc)
    '''#
	#Make a directory where to store the results
	'''

  #
    from EMAN2_utils import makepath
    options = makepath(options, 'orthoprjs')
    '''#
	#Read input
	'''

  #
    print("options.input", options.input)
    models = options.input.split(',')

    rootpath = os.getcwd()
    path = rootpath + '/' + options.path

    for model in models:
        n = EMUtil.get_image_count(model)

        newpath = path
        if len(models) > 1:
            newpath = path + '/' + model.split('.hdf')[0]
            os.system('mkdir ' + newpath)

        kstack = 0
        for i in range(n):
            subpath = newpath
            submodelname = subpath + '/' + model.split('.')[0] + '_prjs.hdf'
            if n > 1:
                if not options.onlyx and not options.onlyy and not options.onlyz:
                    subpath = newpath + '/ptcl' + str(i).zfill(len(str(n)))
                    os.system('mkdir ' + subpath)
                    submodelname = subpath + '/' + model.split(
                        '.')[0] + '_ptcl' + str(i).zfill(len(
                            str(n))) + '_prjs.hdf'
                else:
                    if options.onlyx:
                        submodelname = subpath + '/' + model.split(
                            '.hdf')[0] + '_Xprjs.hdf'
                    if options.onlyy:
                        submodelname = subpath + '/' + model.split(
                            '.hdf')[0] + '_Yprjs.hdf'
                    if options.onlyz:
                        submodelname = subpath + '/' + model.split(
                            '.hdf')[0] + '_Zprjs.hdf'

            submodel = EMData(model, i)
            if options.angles:
                angles = options.angles
                angles = angles.replace(',', ' ')
                angles = angles.split()
                t = Transform({
                    'type': 'eman',
                    'az': float(angles[0]),
                    'alt': float(angles[1]),
                    'phi': float(angles[2])
                })

                submodel.transform(t)

            apix = submodel['apix_x']
            '''
			Pre-process/enhance subvolume if specified
			'''
            # Make the mask first, use it to normalize (optionally), then apply it
            mask = EMData(submodel["nx"], submodel["ny"], submodel["nz"])
            mask.to_one()

            if options.mask:
                #print "This is the mask I will apply: mask.process_inplace(%s,%s)" %(options.mask[0],options.mask[1])
                mask.process_inplace(options.mask[0], options.mask[1])

            # normalize
            if options.normproc:
                if options.normproc[0] == "normalize.mask":
                    options.normproc[1]["mask"] = mask

                submodel.process_inplace(options.normproc[0],
                                         options.normproc[1])
            '''
			#Mask after normalizing with the mask you just made, which is just a box full of 1s if no mask is specified
			'''
            submodel.mult(mask)
            '''
			#If normalizing, it's best to do mask-normalize-mask
			'''
            if options.normproc:
                #if options["normproc"][0]=="normalize.mask":
                #	options["normproc"][1]["mask"]=mask

                submodel.process_inplace(options.normproc[0],
                                         options.normproc[1])

                submodel.mult(mask)

            if options.lowpass:
                submodel.process_inplace(options.lowpass[0],
                                         options.lowpass[1])

            if options.shrink:
                submodel.process_inplace('math.meanshrink',
                                         {'n': options.shrink})

            kindividual = 0
            for d in projectiondirections:
                print("\nThis is the projection direction", d)
                print("And this the corresponding transform",
                      projectiondirections[d])
                print("\n")
                prj = submodel.project("standard", projectiondirections[d])
                prj.set_attr('xform.projection', projectiondirections[d])
                prj['apix_x'] = apix
                prj['apix_y'] = apix

                tag = ''

                if options.angles:
                    if options.tag:
                        tag = options.tag

                if options.onlyx or options.onlyy or options.onlyz:
                    if options.onlyx:
                        tag = 'onlyx'
                    elif options.onlyy:
                        tag = 'onlyy'
                    elif options.onlyz:
                        tag = 'onlyz'

                    k = kstack

                else:
                    k = kindividual

                if tag:
                    prj.write_image(
                        submodelname.replace('.hdf', '_' + tag + '.hdf'), k)
                else:
                    prj.write_image(submodelname, k)

                #print "Options.saverotvol is", options.saverotvol
                if options.saverotvol:
                    submodel_rot = submodel.copy()
                    submodel_rot.transform(projectiondirections[d])

                    volname = submodelname.replace('_prjs.', '_vol' + d + '.')
                    #print "I will save the rotated volume to this file", volname
                    submodel_rot.write_image(volname, 0)

                kindividual += 1
                kstack += 1

    return ()
Example #7
0
def main():

	usage = """e2orthoproject.py <input_file1> <input_file2> ... <input_fileN> <options> . 
			The options should be supplied in "--option=value" format (or --options=value:parameter1=value:parameter2=value... etc, 
			replacing "option" for a valid option name, and "value", "parameter" for a acceptable entries for that option. 
			This program extracts slices from a volume or multiple volumes in a stack. By default, the program will extract three orthogonal slices 
			(one from each direction X, Y and Z) that go through the middle of the volume(s). Other options also provide all the slices along any of the 3 cartesian axes. 
			"""
			
	parser = EMArgumentParser(usage=usage,version=EMANVERSION)	
	
	parser.add_argument("--allx",action='store_true',default=False,help="Get ALL the slices in a volume along the x axis.")
	parser.add_argument("--ally",action='store_true',default=False,help="Get ALL the slices in a volume along the y axis.")
	parser.add_argument("--allz",action='store_true',default=False,help="Get ALL the slices in a volume along the z axis.")
	
	parser.add_argument("--input", type=str, default='', help="""Default=None. This is redundant with supplying input files directly. The file name containing a volume or stack of volumes from which you want to generate slices. You can supply more than one file either by providing an .hdf stack of volumes, or by providing multiple files separated by commas.""")
	
	parser.add_argument("--lowpass",type=str, default='', help="Default=None. A lowpass filtering processor (as in e2proc3d.py) to be applied to each volume prior to alignment. Not applied to aligned particles before averaging.")

	parser.add_argument("--mask",type=str,default='',help="Default=None. Mask processor applied to particles before alignment." )
	
	parser.add_argument("--normproc",type=str,default='',help="""Default=None (not used). Normalization processor applied to particles before computing slices.""")
	parser.add_argument("--nslices", type=int, default=None,help="""default=None (not used). Number of slices to average average around the central sections (not compatible with --allx, --ally, or --allz)""")

	parser.add_argument("--onlymidx",action='store_true',default=False,help="Only extract the middle slice of the volume parallel to the YZ plane.")
	parser.add_argument("--onlymidy",action='store_true',default=False,help="Only extract the middle slice of the volume parallel to the XZ plane.")
	parser.add_argument("--onlymidz",action='store_true',default=False,help="Only extract the middle slice of the volume parallel to the XY plane.")
	
	parser.add_argument("--path",type=str,default=None,help="""Directory to store results in. The default is a numbered series of directories containing the prefix 'slices'; for example, slices_02 will be the directory by default if 'slices_01' already exists.""")
	parser.add_argument("--ppid", type=int, help="Set the PID of the parent process, used for cross platform PPID",default=-1)

	parser.add_argument("--singlestack",action='store_true',default=False,help="""This option will save slices from all particles into a single .hdf stack file if --onlymidz or --onlymidy or --onlymidx are provided, instead of one slice file per volume.""")
	parser.add_argument("--shrink",type=int,default=0,help="""Integer factor to use for shrinking volumes prior to extracting slices.""")
	
	parser.add_argument("--orthogonaloff",action='store_true',default=False,help="""By default, the program will extract three orthogonal slices through the middle of the input volume(s). If this parameter is specified, it will not, and only the other options that are supplied will be valid.""")
	
	#parser.add_argument("--saverotvol",action='store_true',default=False,help="Will save the volume in each rotated position used to generate a projection.")

	parser.add_argument("--verbose", "-v", dest="verbose", action="store", metavar="n",type=int, default=0, help="verbose level [0-9], higher number means higher level of verboseness.")

	#parser.add_argument("--transformsfile",type=str,help="A text files containing lines with one triplet of az,alt,phi values each, representing the transforms to use to project a single volume supplied. ", default='')
	#parser.add_argument("--angles",type=str,help="A single comma or space separated triplet of az,alt,phi values representing the particle rotation to apply before projecting it.", default='')
	#parser.add_argument("--tag",type=str,help="When supplying --angles, tag the output projection with a string provided through --tag", default='')


	(options, args) = parser.parse_args()	
	
	inputs = options.input.split(',')

	if not options.input:
		inputs = []
		for f in args:
			if '.hdf' in f[-4:] or '.HDF' in f[-4:] or '.mrc' in f[-4:] or '.MRC' in f[-4:] or '.mrcs' in f[-5:]:
				inputs.append(f) 
		#try:
		#	print 'argv[1] is', argv[1]
		#	testimg = EMData(argv[1],0,True)
		#	options.input = argv[1]
		#except:		
		#	#print 'argv[1] is', argv[1]
		#	print """\nERROR: Supply volume(s) through --input or as the first argument after the program name. For example, program.py volume.hdf --parameter1=value1 --parameter2=value2... etc."""
		#	sys.exit()
	
	print("\ninputs are {}".format(inputs))

	logger = E2init(sys.argv, options.ppid)

	if options.lowpass: 
		options.lowpass=parsemodopt(options.lowpass)

	if options.mask: 
		options.mask=parsemodopt(options.mask)
			
	if options.normproc: 
		options.normproc=parsemodopt(options.normproc)

	
	'''#
	#Check for sanity of some supplied parameters
	'''#
	if options.onlymidz:
		if options.onlymidx or options.onlymidy:
			print("ERROR: You can only supply one of --onlymidx, --onlymidy or --onlymidz at a time.")
			sys.exit()
	
	if options.onlymidx:
		if options.onlymidy or options.onlymidz:
			print("ERROR: You can only supply one of --onlymidx, --onlymidy or --onlymidz at a time.")
			sys.exit()
			
	if options.onlymidy:
		if options.onlymidx or options.onlymidz:
			print("ERROR: You can only supply one of --onlymidx, --onlymidy or --onlymidz at a time.")
			sys.exit()


	'''#
	#Make a directory where to store the results
	'''#
	from EMAN2_utils import makepath
	options = makepath(options,'sptslices')
	
	
	'''#
	#Generate orthogonal slice regions
	'''#
	for f in inputs:

		try:
			hdf = EMData( f, 0, True )
		except:
			print("ERROR: invalid image file {}".format(f))
			sys.exit(1)

		n = EMUtil.get_image_count( f )
		
		for i in range(n):
			print("\nprocessing particle {}".format(i))
			a = EMData(f,i)
			
			ap = a.copy()
			if options.shrink:
				nyquist = ap['apix_x'] * 2.0
				shnyquist = nyquist * options.shrink
				ap.process_inplace('filter.lowpass.tanh',{'cutoff_freq':old_div(1.0,shnyquist)})
				ap.process_inplace('math.fft.resample',{'n':options.shrink})
				
			nx=ap['nx']
			ny=ap['ny']
			nz=ap['nz']
			
			ptcltag = ''
			slicestag = ''
			
			if n > 1:
				slicestag = '_SLICESmid'
				
				if options.allz or options.ally or options.allx or not options.singlestack:
					ptcltag = '_ptcl' + str(i).zfill( len( str(n) ))
			
			rmid = None
						
			if options.onlymidz or options.onlymidy or options.onlymidx:
				
				if options.onlymidz:	
					rmid = Region(0, 0, old_div(nz,2), nx, ny, 1)
					if options.nslices:
						rmid = Region(0, 0, old_div(nz,2)-int(ceil(options.nslices/2.0)), nx, ny, options.nslices)
						
					print("The region for the orthogonal z slice is", rmid)
					
					slicestag += 'z'
					if n < 2:
						slicestag = '_SLICEmidz'			
										
				elif options.onlymidx:
					rmid = Region(old_div(nx,2), 0, 0, 1, ny, nz)
					if options.nslices:
						rmid = Region(old_div(nx,2)-int(ceil(options.nslices/2.0)), 0, 0, options.nslices, ny, nz)
						
					print("The region for the orthogonal x slice is", rmid)

					#slicemidx=a.get_clip(rmidx)
					#slicemidx.write_image(options.path + '/' + options.input.replace('.',ptcltag+'_SLICEmidx.'),0)
					
					slicestag += 'x'
					if n < 2:
						slicestag = '_SLICEmidx'
					
					#Tx = Transform({'type':'eman','az':0,'alt':-90,'phi':0})
					#ap.transform( Tx )
		
				elif options.onlymidy:
					rmid = Region(0, old_div(ny,2), 0, nx, 1, nz)
					if options.nslices:
						rmid = Region(0, old_div(ny,2)-int(ceil(options.nslices/2.0)), 0, nx, options.nslices, nz)
					print("The region for the orthogonal y slice is", rmid)
		
					#slicemidy=a.get_clip(rmidy)
					#slicemidy.write_image(options.path + '/' + options.input.replace('.',ptcltag+'_SLICEmidy.'),0)
					
					slicestag += 'y'
					if n < 2:
						slicestag = '_SLICEmidy'
					
					#Ty = Transform({'type':'eman','az':0,'alt':-90,'phi':-90})
					#ap.transform( Ty )
								
				
				slicemid = ap.get_clip( rmid )
				slicemid.set_size( nx, ny, 1)
				slicemid.write_image(options.path + '/' + os.path.basename( f ).replace('.',ptcltag + slicestag + '.'),i)
			
			elif not options.onlymidz and not options.onlymidy and not options.onlymidx:
				app = a.copy()
				if options.shrink:
					nyquist = app['apix_x'] * 2.0
					shnyquist = nyquist * options.shrink
					app.process_inplace('filter.lowpass.tanh',{'cutoff_freq':old_div(1.0,shnyquist)})
					app.process_inplace('math.meanshrink',{'n':options.shrink})
				
				#regions={}
				if not options.orthogonaloff:
					print("Generating orthogonal slices")
					rmidz = Region(0, 0, old_div(nz,2), nx, ny, 1)
					rmidx = Region(old_div(nx,2), 0, 0, 1, ny, nz)
					rmidy = Region(0, old_div(ny,2), 0, nx, 1, nz)
					if options.nslices:
						rmidz = Region(0, 0, old_div(nz,2)-int(ceil(options.nslices/2.0)), nx, ny, options.nslices)
						rmidx = Region(old_div(nx,2)-int(ceil(options.nslices/2.0)), 0, 0, options.nslices, ny, nz)
						rmidy = Region(0, old_div(ny,2)-int(ceil(options.nslices/2.0)), 0, nx, options.nslices, nz)

					#tz = Transform({'type':'eman','az':0,'alt':0,'phi':0})

					regions={0:rmidz,1:rmidx,2:rmidy}
					#k=0
					for kk in regions:
						z=1
						if kk == 0:
							x=nx
							y=ny
					
						elif kk == 1:
							x=nz
							y=ny
						
						elif kk == 2:
							x=nx
							y=nz
					
						#if options.threed2threed or options.threed2twod:
						#d = EMData()
						#d.read_image(options.input, 0, False, regions[tag])
					
						print("I have extracted this orthogonal region", regions[kk])
						slice = app.get_clip(regions[kk])
						slice.set_size(x,y,1)
						print("ptcl tag is", ptcltag)
						print("slice is", slice , type(slice))
						
						outname = options.path + '/' + os.path.basename( f ).replace('.',ptcltag+'_SLICESortho.')
						print("outname is", outname)
						if '.mrc' in outname:
							outname=outname.replace('.mrc','.hdf')
							print("new outname is", outname)
						slice.write_image( outname,kk)
						
						
						print("The mean and index are", slice['mean'],kk)
						#k+=1
					
				if options.allz:
					print("Generating all z slices")
					#Tz = Transform({'type':'eman','az':0,'alt':0,'phi':0})
					
					outname = options.path + '/' + os.path.basename( f ).replace('.',ptcltag+'_SLICESz.')
					os.system('e2proc2d.py ' + f + ' ' + outname + ' --threed2twod')
			
				if options.allx:
					print("Generating all x slices")
					Tx = Transform({'type':'eman','az':0,'alt':-90,'phi':0})
					volx = app.copy()
					volx.transform(Tx)
					rotvolxname = options.path + '/' + os.path.basename( ft ).replace('.', ptcltag+'rotx.')
					volx.write_image(rotvolxname,0)
				
					outname = options.path + '/' + os.path.basename( f ).replace('.',ptcltag+'_SLICESx.')
				
					os.system('e2proc2d.py ' + rotvolxname + ' ' + outname + ' --threed2twod')
			
				if options.ally:	
					print("Generating all y slices")
					Ty = Transform({'type':'eman','az':0,'alt':-90,'phi':-90})
					voly = app.copy()
					voly.transform(Ty)
					rotvolyname = options.path + '/' + os.path.basename( f ).replace('.', ptcltag+'roty.')
					voly.write_image(rotvolyname,0)
				
					outname = options.path + '/' + os.path.basename( f ).replace('.',ptcltag+'_SLICESy.')
				
					os.system('e2proc2d.py ' + rotvolyname + ' ' + outname + ' --threed2twod')
	
	E2end(logger)
	
	return()	
Example #8
0
def main():
    progname = os.path.basename(sys.argv[0])
    usage = """prog <output> [options]
	Program to build an initial subtomogram average by averaging pairs from the largest subset
	in --input that is a power of 2. For example, if you supply an input stack with 100 subtomograms,
	this program will build an initial reference using 64, since 64 is the largest power of 2 contained in 100.
	In the first iteration, particle 1 will be averaged with 2, 3 with 4, 5 with 6... etc.
	32 new averages (each an average of 2 subtomograms) will be used for the second iteration.
	Again, 1 will be averaged with 2, 3 with 4, etc... yielding 16 new averages.
	The algorithm continues until the entire subset (64) has been merged into 1 average.
	
	This program imports 'preprocfunc' from e2spt_preproc.py and 'alignment' from e2spt_classaverage.py
	
	--mask=mask.sharp:outer_radius=<safe radius>
	--preprocess=filter.lowpass.gauss:cutoff_freq=<1/resolution in A>
	"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_header(name="sptbtheader",
                      help="""Options below this label are specific to 
		sptbinarytree""",
                      title="### sptbinarytree options ###",
                      row=6,
                      col=0,
                      rowspan=1,
                      colspan=3,
                      mode="align")

    parser.add_header(
        name="caheader",
        help="""Options below this label are specific to sptclassaverage""",
        title="### sptclassaverage options ###",
        row=3,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--path",
        type=str,
        default='spt_bt',
        help=
        """Default=spt. Directory to store results in. The default is a numbered series of directories containing the prefix 'spt'; for example, spt_02 will be the directory by default if 'spt_01' already exists."""
    )

    parser.add_argument(
        "--input",
        type=str,
        default='',
        help=
        """Default=None. The name of the input volume stack. MUST be HDF since volume stack support is required.""",
        guitype='filebox',
        browser='EMSubTomosTable(withmodal=True,multiselect=False)',
        row=0,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--npeakstorefine",
        type=int,
        help=
        """Default=1. The number of best coarse alignments to refine in search of the best final alignment. Default=1.""",
        default=4,
        guitype='intbox',
        row=9,
        col=0,
        rowspan=1,
        colspan=1,
        nosharedb=True,
        mode='alignment,breaksym[1]')

    parser.add_argument(
        "--parallel",
        default="thread:1",
        help=
        """default=thread:1. Parallelism. See http://blake.bcm.edu/emanwiki/EMAN2/Parallel""",
        guitype='strbox',
        row=19,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--ppid",
        type=int,
        help=
        """Default=-1. Set the PID of the parent process, used for cross platform PPID""",
        default=-1)

    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        """Default=0. Verbose level [0-9], higner number means higher level of verboseness; 10-11 will trigger many messages that might make little sense since this level of verboseness corresponds to 'debugging mode'"""
    )

    #parser.add_argument("--resume",type=str,default='',help="""(Not working currently). tomo_fxorms.json file that contains alignment information for the particles in the set. If the information is incomplete (i.e., there are less elements in the file than particles in the stack), on the first iteration the program will complete the file by working ONLY on particle indexes that are missing. For subsequent iterations, all the particles will be used.""")

    parser.add_argument(
        "--plots",
        action='store_true',
        default=False,
        help=
        """Default=False. Turn this option on to generatea plot of the ccc scores during each iteration. Running on a cluster or via ssh remotely might not support plotting."""
    )

    parser.add_argument(
        "--subset",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Refine only this substet of particles from the stack provided through --input"""
    )

    parser.add_argument(
        "--preavgproc1",
        type=str,
        default='',
        help=
        """Default=None. A processor (see 'e2help.py processors -v 10' at the command line) to be applied to the raw particle after alignment but before averaging (for example, a threshold to exclude extreme values, or a highphass filter if you have phaseplate data.)"""
    )

    parser.add_argument(
        "--preavgproc2",
        type=str,
        default='',
        help=
        """Default=None. A processor (see 'e2help.py processors -v 10' at the command line) to be applied to the raw particle after alignment but before averaging (for example, a threshold to exclude extreme values, or a highphass filter if you have phaseplate data.)"""
    )

    parser.add_argument(
        "--weighbytiltaxis",
        type=str,
        default='',
        help=
        """Default=None. A,B, where A is an integer number and B a decimal. A represents the location of the tilt axis in the tomogram in pixels (eg.g, for a 4096x4096xZ tomogram, this value should be 2048), and B is the weight of the particles furthest from the tomogram. For example, --weighbytiltaxis=2048,0.5 means that praticles at the tilt axis (with an x coordinate of 2048) will have a weight of 1.0 during averaging, while the distance in the x coordinates of particles not-on the tilt axis will be used to weigh their contribution to the average, with particles at the edge(0+radius or 4096-radius) weighing 0.5, as specified by the value provided for B."""
    )

    parser.add_argument(
        "--weighbyscore",
        action='store_true',
        default=False,
        help=
        """Default=False. This option will weigh the contribution of each subtomogram to the average by score/bestscore."""
    )

    #parser.add_argument("--align",type=str,default="rotate_translate_3d:search=8:delta=12:dphi=12",help="""This is the aligner used to align particles to the previous class average. Default is rotate_translate_3d:search=8:delta=12:dphi=12, specify 'None' (with capital N) to disable.""", returnNone=True,guitype='comboparambox', choicelist='re_filter_list(dump_aligners_list(),\'3d\')', row=12, col=0, rowspan=1, colspan=3, nosharedb=True, mode="alignment,breaksym['rotate_symmetry_3d']")
    parser.add_argument(
        "--align",
        type=str,
        default="rotate_translate_3d_tree",
        help=
        """Default is rotate_translate_3d_tree. See e2help.py aligners to see the list of parameters the aligner takes (for example, if there's symmetry, supply --align rotate_translate_3d_tree:sym=icos). This is the aligner used to align particles to the previous class average. Specify 'None' (with capital N) to disable."""
    )

    parser.add_argument(
        "--aligncmp",
        type=str,
        default="ccc.tomo.thresh",
        help=
        """Default=ccc.tomo.thresh. The comparator used for the --align aligner. Do not specify unless you need to use anotherspecific aligner.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_cmps_list(),\'tomo\')',
        row=13,
        col=0,
        rowspan=1,
        colspan=3,
        mode="alignment,breaksym")

    #parser.add_argument("--output", type=str, default='avg.hdf', help="""Default=avg.hdf. The name of the output class-average stack. MUST be HDF since volume stack support is required.""", guitype='strbox', row=2, col=0, rowspan=1, colspan=3, mode='alignment,breaksym')

    #parser.add_argument("--classmx", type=str, default='', help="""Default=None. The name of the classification matrix specifying how particles in 'input' should be grouped. If omitted, all particles will be averaged.""")

    #parser.add_argument("--ref", type=str, default='', help="""Default=None. Reference image(s). Used as an initial alignment reference and for final orientation adjustment if present. This is typically the projections that were used for classification.""", guitype='filebox', browser='EMBrowserWidget(withmodal=True,multiselect=True)', filecheck=False, row=1, col=0, rowspan=1, colspan=3, mode='alignment')

    #parser.add_argument("--refpreprocess",action="store_true",default=False,help="""Default=False. This will preprocess the reference identically to the particles. It is off by default, but it is internally turned on when no reference is supplied.""")

    #parser.add_argument("--resultmx",type=str,default=None,help="""Default=Npone. Specify an output image to store the result matrix. This is in the same format as the classification matrix. http://blake.bcm.edu/emanwiki/EMAN2/ClassmxFiles""")

    #parser.add_argument("--refinemultireftag", type=str, default='', help="""Default=''. DO NOT USE THIS PARAMETER. It is passed on from e2spt_refinemulti.py if needed.""")
    '''
	ADVANCED parameters
	
	'''
    parser.add_argument(
        "--averager",
        type=str,
        default="mean.tomo",
        help=
        """Default=mean.tomo. The type of averager used to produce the class average. Default=mean.tomo."""
    )
    '''
	PRE-FFT processing parameters
	'''

    #parser.add_argument("--nopreprocprefft",action="store_true",default=False,help="""Turns off all preprocessing that happens only once before alignment (--normproc, --mask, --maskfile, --clip, --threshold; i.e., all preprocessing excepting filters --highpass, --lowpass, --preprocess, and --shrink.""")

    parser.add_argument(
        "--shrink",
        type=int,
        default=1,
        help=
        """Default=1 (no shrinking). Optionally shrink the input volumes by an integer amount for coarse alignment.""",
        guitype='shrinkbox',
        row=5,
        col=1,
        rowspan=1,
        colspan=1,
        mode='alignment,breaksym')

    parser.add_argument(
        "--shrinkfine",
        type=int,
        default=1,
        help=
        """Default=1 (no shrinking). Optionally shrink the input volumes by an integer amount for refine alignment.""",
        guitype='intbox',
        row=5,
        col=2,
        rowspan=1,
        colspan=1,
        mode='alignment')

    parser.add_argument(
        "--threshold",
        type=str,
        default='',
        help=
        """Default=None. A threshold applied to the subvolumes after normalization. For example, --threshold=threshold.belowtozero:minval=0 makes all negative pixels equal 0, so that they do not contribute to the correlation score.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_processors_list(),\'filter\')',
        row=10,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--mask",
        type=str,
        default='',
        help=
        """Default=None. Masking processor applied to particles before alignment. IF using --clip, make sure to express outer mask radii as negative pixels from the edge.""",
        returnNone=True,
        guitype='comboparambox',
        choicelist='re_filter_list(dump_processors_list(),\'mask\')',
        row=11,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--maskfile",
        type=str,
        default='',
        help=
        """Default=None. Mask file (3D IMAGE) applied to particles before alignment. Must be in HDF format. Default is None."""
    )

    parser.add_argument(
        "--normproc",
        type=str,
        default='',
        help=
        """Default=None (see 'e2help.py processors -v 10' at the command line). Normalization processor applied to particles before alignment. If normalize.mask is used, results of the mask option will be passed in automatically. If you want to turn this option off specify \'None\'"""
    )

    parser.add_argument(
        "--clip",
        type=int,
        default=0,
        help=
        """Default=0 (which means it's not used). Boxsize to clip particles as part of preprocessing to speed up alignment. For example, the boxsize of the particles might be 100 pixels, but the particles are only 50 pixels in diameter. Aliasing effects are not always as deleterious for all specimens, and sometimes 2x padding isn't necessary; still, there are some benefits from 'oversampling' the data during averaging; so you might still want an average of size 2x, but perhaps particles in a box of 1.5x are sufficiently good for alignment. In this case, you would supply --clip=75"""
    )
    '''
	POST-FFT filtering parameters
	'''
    parser.add_argument(
        "--preprocess",
        type=str,
        default='',
        help=
        """Any processor (see 'e2help.py processors -v 10' at the command line) to be applied to each volume prior to COARSE alignment. Not applied to aligned particles before averaging.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_processors_list(),\'filter\')',
        row=10,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--preprocessfine",
        type=str,
        default='',
        help=
        """Any processor (see 'e2help.py processors -v 10' at the command line) to be applied to each volume prior to FINE alignment. Not applied to aligned particles before averaging."""
    )

    parser.add_argument(
        "--lowpass",
        type=str,
        default='',
        help=
        """Default=None. A lowpass filtering processor (see 'e2help.py processors -v 10' at the command line) to be applied to each volume prior to COARSE alignment. Not applied to aligned particles before averaging.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_processors_list(),\'filter\')',
        row=17,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--lowpassfine",
        type=str,
        default='',
        help=
        """Default=None. A lowpass filtering processor (see 'e2help.py processors -v 10' at the command line) to be applied to each volume prior to FINE alignment. Not applied to aligned particles before averaging."""
    )

    parser.add_argument(
        "--highpass",
        type=str,
        default='',
        help=
        """Default=None. A highpass filtering processor (see 'e2help.py processors -v 10' at the command line) to be applied to each volume prior to COARSE alignment. Not applied to aligned particles before averaging.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_processors_list(),\'filter\')',
        row=18,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--highpassfine",
        type=str,
        default='',
        help=
        """Default=None. A highpass filtering processor (see 'e2help.py processors -v 10' at the command line) to be applied to each volume prior to FINE alignment. Not applied to aligned particles before averaging."""
    )

    parser.add_argument(
        "--matchimgs",
        action='store_true',
        default=False,
        help=
        """Default=False. Applies filter.matchto to one image so that it matches the other's spectral profile during preprocessing for pair-wise alignment purposes."""
    )

    parser.add_argument(
        "--filterbyfsc",
        action='store_true',
        default=False,
        help=
        """Default=False. If on, this parameter will use dynamic FSC filtering. --lowpass will be used to build initial references if no --ref supplied, then, the FSC between the even and odd initial references will be used to filter the data during preprocessing. If --ref is supplied, --lowpass will be used during the first iteration to align the particles against the reference. Thereafter, the FSC between the most current particle average and the original reference (--ref) will be used in the next iteration."""
    )
    '''
	OTHER ADVANCED parameters
	'''
    parser.add_argument(
        "--radius",
        type=float,
        default=0,
        help=
        """Default=0 (which means it's not used by default). Hydrodynamic radius of the particle in Angstroms. This will be used to automatically calculate the angular steps to use in search of the best alignment. Make sure the apix is correct on the particles' headers, sine the radius will be converted from Angstroms to pixels. Then, the fine angular step is equal to 360/(2*pi*radius), and the coarse angular step 4 times that."""
    )

    parser.add_argument(
        "--precision",
        type=float,
        default=1.0,
        help=
        """Default=1.0. Precision in pixels to use when figuring out alignment parameters automatically using --radius. Precision would be the number of pixels that the the edge of the specimen is moved (rotationally) during the finest sampling, --falign. If precision is 1, then the precision of alignment will be that of the sampling (apix of your images) times the --shrinkfine factor specified."""
    )

    parser.add_argument(
        "--search",
        type=int,
        default=8,
        help=
        """"Default=8. During COARSE alignment translational search in X, Y and Z, in pixels. Default=8. This WILL overwrite any search: provided through --align, EXCEPT if you provide --search=8, which is the default. In general, just avoid providing search twice (through here and through the aligner, --align). If you do, just be careful to make them consistent to minimize misinterpretation and error."""
    )

    parser.add_argument(
        "--searchfine",
        type=int,
        default=2,
        help=
        """"Default=2. During FINE alignment translational search in X, Y and Z, in pixels. Default=2. This WILL overwrite any search: provided through --falign, EXCEPT if you provide --searchfine=2, which is the default. In general, just avoid providing search twice (through here and through the fine aligner --falign). If you do, just be careful to make them consistent to minimize misinterpretation and error."""
    )

    #parser.add_argument("--donotaverage",action="store_true", help="""If e2spt_refinemulti.py is calling e2spt_classaverage.py, the latter need not average any particles, but rather only yield the alignment results.""", default=False)

    parser.add_argument(
        "--iterstop",
        type=int,
        default=0,
        help=
        """Default=0. (Not used). The program is called to convergence by default (all particles merge into one final average). To stop at an intermediate iteration, provide this parameter. For example, --iterstop=1, will only allow the algorithm to complete 1 iteration; --iterstop=2 will allow it to go through 2, etc."""
    )

    parser.add_argument(
        "--savesteps",
        action="store_true",
        default=False,
        help=
        """Default=False. If set, will save the average after each iteration to class_#.hdf. Each class in a separate file. Appends to existing files.""",
        guitype='boolbox',
        row=4,
        col=0,
        rowspan=1,
        colspan=1,
        mode='alignment,breaksym')

    parser.add_argument(
        "--saveali",
        action="store_true",
        default=False,
        help=
        """Default=False. If set, will save the aligned particle volumes in class_ptcl.hdf. Overwrites existing file.""",
        guitype='boolbox',
        row=4,
        col=1,
        rowspan=1,
        colspan=1,
        mode='alignment,breaksym')

    parser.add_argument(
        "--saveallalign",
        action="store_true",
        default=False,
        help=
        """Default=False. If set, will save the alignment parameters after each iteration""",
        guitype='boolbox',
        row=4,
        col=2,
        rowspan=1,
        colspan=1,
        mode='alignment,breaksym')

    parser.add_argument(
        "--sym",
        dest="sym",
        default='',
        help=
        """Default=None (equivalent to c1). Symmetry to impose -choices are: c<n>, d<n>, h<n>, tet, oct, icos""",
        guitype='symbox',
        row=9,
        col=1,
        rowspan=1,
        colspan=2,
        mode='alignment,breaksym')

    parser.add_argument(
        "--postprocess",
        type=str,
        default='',
        help=
        """A processor to be applied to the FINAL volume after averaging the raw volumes in their FINAL orientations, after all iterations are done.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_processors_list(),\'filter\')',
        row=16,
        col=0,
        rowspan=1,
        colspan=3,
        mode='alignment,breaksym')

    parser.add_argument(
        "--procfinelikecoarse",
        action='store_true',
        default=False,
        help=
        """If you supply this parameters, particles for fine alignment will be preprocessed identically to particles for coarse alignment by default. If you supply this, but want specific parameters for preprocessing particles for also supply: fine alignment, nd supply fine alignment parameters, such as --lowpassfine, --highpassfine, etc; to preprocess the particles for FINE alignment differently than for COARSE alignment."""
    )

    parser.add_argument(
        "--falign",
        type=str,
        default=None,
        help=
        """Default=None. This is the second stage aligner used to fine-tune the first alignment.""",
        returnNone=True,
        guitype='comboparambox',
        choicelist='re_filter_list(dump_aligners_list(),\'refine.*3d\')',
        row=14,
        col=0,
        rowspan=1,
        colspan=3,
        nosharedb=True,
        mode='alignment,breaksym[None]')

    parser.add_argument(
        "--faligncmp",
        type=str,
        default="ccc.tomo.thresh",
        help=
        """Default=ccc.tomo.thresh. The comparator used by the second stage aligner.""",
        guitype='comboparambox',
        choicelist='re_filter_list(dump_cmps_list(),\'tomo\')',
        row=15,
        col=0,
        rowspan=1,
        colspan=3,
        mode="alignment,breaksym")

    #parser.add_argument("--nopreprocprefft",action="store_true",default=False,help="""Turns off all preprocessing that happens only once before alignment (--normproc, --mask, --maskfile, --clip, --threshold; i.e., all preprocessing excepting filters --highpass, --lowpass, --preprocess, and --shrink.""")

    #parser.add_argument("--keep",type=float,default=1.0,help="""Default=1.0 (all particles kept). The fraction of particles to keep in each class.""", guitype='floatbox', row=6, col=0, rowspan=1, colspan=1, mode='alignment,breaksym')

    #parser.add_argument("--keepsig", action="store_true", default=False,help="""Default=False. Causes the keep argument to be interpreted in standard deviations.""", guitype='boolbox', row=6, col=1, rowspan=1, colspan=1, mode='alignment,breaksym')

    #parser.add_argument("--inixforms",type=str,default="",help="""Default=None. .json file containing a dict of transforms to apply to 'pre-align' the particles.""", guitype='dirbox', dirbasename='spt_|sptsym_', row=7, col=0,rowspan=1, colspan=2, nosharedb=True, mode='breaksym')

    parser.add_argument(
        "--breaksym",
        action="store_true",
        default=False,
        help=
        """Default=False. Break symmetry. Do not apply symmetrization after averaging, even if searching the asymmetric unit provided through --sym only for alignment. Default=False""",
        guitype='boolbox',
        row=7,
        col=2,
        rowspan=1,
        colspan=1,
        nosharedb=True,
        mode=',breaksym[True]')

    #parser.add_argument("--groups",type=int,default=0,help="""Default=0 (not used; data not split). This parameter will split the data into a user defined number of groups. For purposes of gold-standard FSC computation later, select --group=2.""")

    parser.add_argument(
        "--randomizewedge",
        action="store_true",
        default=False,
        help=
        """Default=False. This parameter is EXPERIMENTAL. It randomizes the position of the particles BEFORE alignment, to minimize missing wedge bias and artifacts during symmetric alignment where only a fraction of space is scanned"""
    )

    #parser.add_argument("--savepreproc",action="store_true",  default=False,help="""Default=False. Will save stacks of preprocessed particles (one for coarse alignment and one for fine alignment if preprocessing options are different).""")

    parser.add_argument(
        "--autocenter",
        type=str,
        default='',
        help=
        """Default=None. Autocenters each averaged pair during initial average generation with --btref and --hacref. Will also autocenter the average of all particles after each iteration of iterative refinement. Options are --autocenter=xform.centerofmass (self descriptive), or --autocenter=xform.centeracf, which applies auto-convolution on the average."""
    )

    parser.add_argument(
        "--autocentermask",
        type=str,
        default='',
        help=
        """Default=None. Masking processor to apply before autocentering. See 'e2help.py processors -v 10' at the command line."""
    )

    parser.add_argument(
        "--autocenterpreprocess",
        action='store_true',
        default=False,
        help=
        """Default=False. This will apply a highpass filter at a frequency of half the box size times the apix, shrink by 2, and apply a low pass filter at half nyquist frequency to any computed average for autocentering purposes if --autocenter is provided. Default=False."""
    )

    parser.add_argument(
        "--tweak",
        action='store_true',
        default=False,
        help=
        """WARNING: BUGGY. This will perform a final alignment with no downsampling [without using --shrink or --shrinkfine] if --shrinkfine > 1."""
    )
    '''
	BT SPECIFIC PARAMETERS
	'''

    parser.add_argument("--nseedlimit",
                        type=int,
                        default=0,
                        help="""Maximum number of particles
		to use. For example, if you supply a stack with 150 subtomograms, the program will
		automatically select 128 as the limit to use because it's the largest power of 2 that is
		smaller than 150. But if you provide, say --nseedlimit=100, then the number of particles
		used will be 64, because it's the largest power of 2 that is still smaller than 100."""
                        )

    (options, args) = parser.parse_args()
    (optionsUnparsed, args) = parser.parse_args()

    options.nopreprocprefft = False

    if options.shrink < options.shrinkfine:
        options.shrink = options.shrinkfine
        print(
            "\n(e2spt_binarytree)(main) it makes no sense for shrinkfine to be larger than shrink; therefore, shrink will be made to match shrinkfine"
        )

    options = checkinput(options)

    from e2spt_classaverage import checksaneimagesize
    checksaneimagesize(options, options.input)
    '''
	Make the directory where to create the database where the results will be stored
	'''
    #from e2spt_classaverage import sptmakepath
    #options = sptmakepath(options,'spt_bt')
    from EMAN2_utils import makepath
    options = makepath(options, 'spt_bt')
    optionsUnparsed.path = options.path

    rootpath = os.getcwd()
    if rootpath not in options.path:
        options.path = rootpath + '/' + options.path

    if not options.input:
        parser.print_help()
        exit(0)
    elif options.subset:
        subsetStack = options.path + '/subset' + str(options.subset).zfill(
            len(str(options.subset))) + '.hdf'
        print("\nSubset to be written to", subsetStack)

        subsetcmd = 'e2proc3d.py ' + options.input + ' ' + subsetStack + ' --first=0 --last=' + str(
            options.subset - 1)
        print("Subset cmd is", subsetcmd)

        p = subprocess.Popen(subsetcmd,
                             shell=True,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        text = p.communicate()
        p.stdout.close()

        options.input = subsetStack

    from e2spt_classaverage import sptParseAligner
    options = sptParseAligner(options)
    '''
	If --radius of the particle is provided, we calculate the optimal alignment steps for 
	coarse and fine alignment rounds using --shrink and --shrinkfine options and apix info
	'''

    if options.radius:
        from e2spt_classaverage import calcAliStep
        options = calcAliStep(options)
    '''c:
	c:Parse parameters such that "None" or "none" are adequately interpreted to turn off an option
	c:'''
    options = sptOptionsParser(options)
    print("\nAFTER PARSING options type={}".format(type(options)))
    print("\nAFTER PARSING optionsUnparsed type={}".format(
        type(optionsUnparsed)))

    writeParameters(options, 'e2spt_binarytree.py', 'spt_bt')

    hdr = EMData(options.input, 0, True)
    nx = hdr["nx"]
    ny = hdr["ny"]
    nz = hdr["nz"]
    if nx != ny or ny != nz:
        print("ERROR, input volumes are not cubes")
        sys.exit(1)

    logger = E2init(sys.argv, options.ppid)
    '''
	Initialize parallelism if being used; it will be turned on automatically by detectThreads unless --parallel-=None
	'''
    options = detectThreads(options)
    if options.parallel:
        print("\n\n(e2spt_classaverage.py) INITIALIZING PARALLELISM!")
        print("\n\n")

        from EMAN2PAR import EMTaskCustomer
        etc = EMTaskCustomer(options.parallel)

        pclist = [options.input]

        etc.precache(pclist)

    else:
        etc = ''

    options.raw = options.input
    """
	if 'tree' in options.align:
		options.falign = None
		options.mask = None
		options.lowpass = None
		options.highpass = None
		options.normproc = None
		options.lowpassfine = None
		options.highpassfine = None
		options.preprocess = None
		options.preprocessfine = None

	else:
		from e2spt_classaverage import cmdpreproc
		cmdpreproc( options.input, options, False )
	"""

    nptcl = EMUtil.get_image_count(options.input)
    if nptcl < 1:
        print("ERROR : at least 2 particles required in input stack")
        sys.exit(1)

    ptclnums = list(range(nptcl))
    nptclForRef = len(ptclnums)

    nseed = 2**int(
        floor(log(len(ptclnums), 2))
    )  # we stick with powers of 2 for this to make the tree easier to collapse

    if options.nseedlimit:
        nseed = 2**int(floor(log(options.nseedlimit, 2)))

    #binaryTreeRef(options,nptclForRef,nseed,-1,etc)

    binaryTreeRef(options, optionsUnparsed, nptclForRef, nseed, etc)

    print("Will end logger")
    E2end(logger)

    print("logger ended")
    sys.stdout.flush()

    return
Example #9
0
def main():

	usage = """Program to generate a cylindrical mask. It can also create a cylindrical shell if
			you specify the --height_inner and --radius_inner parameters, in addition to the
			required outer --height and --radius.
			"""
			
	parser = EMArgumentParser(usage=usage,version=EMANVERSION)	
	
	parser.add_argument("--axes",type=str,default='z',help="""Axes along which the mask will be oriented. Default=z. You can supply more than one, separated with commas. For example: --axes=x,y,z.""")

	parser.add_argument("--boxsize", type=int, default=0,help="""Size of the boxsize where the cylindrical mask will live.""")
			
	parser.add_argument("--height", type=int, default=0,help="""Height of the cylindrical mask.""")

	parser.add_argument("--heightinner", type=int, default=0,help="""Height for the inner boundary if creating a cylindrical shell mask.""")

	parser.add_argument("--path",type=str,default=None,help="""Directory to store results in. The default is a numbered series of directories containing the prefix 'cylmask'; for example, cylmask_02 will be the directory by default if 'cylmask_01' already exists.""")

	parser.add_argument("--ppid", type=int, help="""Set the PID of the parent process, used for cross platform PPID""",default=-1)

	parser.add_argument("--radius",type=int,default=0,help="""Radius of the cylindrical mask. Default=boxsize/2.""")
	
	parser.add_argument("--radiusinner",type=int,default=0,help="""Radius for the inner boundary if creating a cylindrical shell mask. Default=boxsize/2.""")

	parser.add_argument("--rotation",type=str,default='',help="""Three comma separated Euler angles az,alt,phi, to rotate the masks by before writing them out.""")
	
	parser.add_argument("--rotavg",action='store_true',default=False,help="""This will compute the rotational average of the mask(s) in addition to writing the cylindrical mask itself out.""")
	
	parser.add_argument("--translation",type=str,default='',help="""Three comma separated coordinates x,y,z, to translate the masks by before writing them out.""")
	
	parser.add_argument("--verbose", "-v", help="""verbose level [0-9], higner number means higher level of verboseness. Default=0.""",dest="verbose", action="store", metavar="n",type=int, default=0)

	(options, args) = parser.parse_args()	
	
	
	if not options.boxsize:
		print("You must provide --boxsize > 4")
		sys.exit(1)
	elif options.boxsize < 5:
		print("You must provide --boxsize > 4")
		sys.exit(1)		
	
	if options.heightinner and not options.radiusinner:
		print("If specifying --heightinner, you must also specify --radiusinner.")
		sys.exit(1)	
	if options.radiusinner and not options.heightinner:
		print("If specifying --radiusinner, you must also specify --heightinner.")
		sys.exit(1)	
	
	from EMAN2_utils import makepath
	options = makepath( options, 'cylmask')
	
	logger = E2init(sys.argv, options.ppid)
	
	axes = options.axes.split(',')
	
	print("\n(e2cylinder)(main) after splitting, axes=", axes)
	
	#axisdict ={}
	#for axis in axes:
	#	axisdict.update( { 'z } )
	ts = {}
	
	mask = cylinder(options)
	
	rt=Transform()
	if options.rotation or options.translation:
		az=alt=phi=xc=yc=zc=0
		if options.rotation:
			angles=options.rotation.split(',')
			az=float(angles[0])
			alt=float(angles[1])
			phi=float(angles[2])
		if options.translation:
			trans=options.translation.split(',')
			xc=float(trans[0])
			yc=float(trans[1])
			zc=float(trans[2])
		rt=Transform({'type':'eman','az':az,'alt':alt,'phi':phi,'tx':xc,'ty':yc,'tz':zc})
		mask.transform( rt )
		
	
	for axis in axes:
		print("axis is", axis)
		if 'z' in axis or 'Z' in axis:
			tz = Transform({'type':'eman','az':0,'alt':0,'phi':0})
			print("added z transform")
			ts.update({'z':tz})
		if 'x' in axis or 'X' in axis:
			
			tx = Transform({'type':'eman','az':0,'alt':90,'phi':90})
			ts.update({'x':tx})
			print("added x transform")
		
		if 'y' in axis or 'Y' in axis:
			ty = Transform({'type':'eman','az':0,'alt':90,'phi':0})
			ts.update({'y':ty})
			print("added y transform")
	
	masknamebase ='cylmask.hdf'

	if options.path:
		masknamebase = options.path + '/cylmask.hdf'
	
	for a in ts:
		maskt = mask.copy()
		tag = 'R'+str( options.radius ).zfill( len( str( options.radius))) + 'H'+str( options.height ).zfill( len( str( options.radius)))
		if options.radiusinner:
			tag+='RI'+str( options.radiusinner ).zfill( len( str( options.radius))) 
		if options.heightinner:
			 'HI'+str( options.height ).zfill( len( str( options.radius)))
		
		if a == 'z':
			maskz=mask.copy()
			maskname=masknamebase.replace('.hdf','_Z_') + tag + '.hdf'
			maskz.transform( ts[a] )
			maskz.write_image( maskname, 0 )
			
			if options.rotavg:
				rotavgname = maskname.replace('.','_ROTAVG.')
				maskzrotavg = maskz.rotavg_i()
				maskzrotavg.write_image( rotavgname , 0 )
				
		if a == 'x':
			maskx=mask.copy()
			maskx.transform( ts[a] )
			maskname=masknamebase.replace('.hdf','_X_') + tag + '.hdf'
			maskx.write_image( maskname, 0 )
			
			if options.rotavg:
				rotavgname = maskname.replace('.','_ROTAVG.')
				maskxrotavg = maskx.rotavg_i()
				maskxrotavg.write_image( rotavgname , 0 )
		
		if a == 'y':
			masky=mask.copy()
			masky.transform( ts[a] )
			maskname=masknamebase.replace('.hdf','_Y_') + tag + '.hdf'	
			masky.write_image( maskname, 0 )

			if options.rotavg:
				rotavgname = maskname.replace('.','_ROTAVG.')
				maskyrotavg = masky.rotavg_i()
		
				maskyrotavg.write_image( rotavgname , 0 )
	
	E2end(logger)
	return
Example #10
0
def main():
    #import pylab
    #import matplotlib.mlab as mlab
    import matplotlib.pyplot as plt

    progname = os.path.basename(sys.argv[0])
    usage = """Produces mean intensity histograms of stack of sub-volumes"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)
    parser.add_argument(
        "--bins",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Number of bins for histogram. If not provided, the optimal bin number will be automatically calculated based on bin-width, computed using Scott's normal reference rule, width = (3.5*std)/cuberoot(n), where 'std' is the standard deviation of the mean intensity distribution of population and n is the number of mean intensity values considered (this is affected by --removesigma). Then, bins will be nbins = (max(intensities) - min(intensities)) / width."""
    )
    parser.add_argument(
        "--clip",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Boxsize to clip particles to before computing mean and standard deviation values for each image. (This can act as a mask, as you'd want to clip the boxes to a smaller size than their current, original size, excluding neighboring particles and background pixels/voxels)."""
    )
    parser.add_argument(
        "--highpass",
        type=str,
        default='',
        help=
        """Default=None. A highpass filtering processor to be applied before computing mean and standard deviation values for each image. (See 'e2help.py processors' at the command line for a list of processors that can be applied through e2proc3d.py)."""
    )
    parser.add_argument(
        "--input",
        type=str,
        default='',
        help=
        """Default=None. Comma-separated stacks of images whose mean intensity distribution you want to plot."""
    )
    parser.add_argument(
        "--lowpass",
        type=str,
        default='',
        help=
        """Default=None. A lowpass filtering processor to be applied before computing mean and standard deviation values for each image. (See 'e2help.py processors' at the command line for a list of processors that can be applied through e2proc3d.py)."""
    )
    parser.add_argument(
        "--mask",
        type=str,
        default="mask.sharp:outer_radius=-2",
        help=
        "Default=mask.sharp:outer_radius=-2. Mask processor applied to the particles before alignment. (See 'e2help.py processors' at the command line for a list of processors that can be applied through e2proc3d.py)."
    )
    parser.add_argument(
        "--maskfile",
        type=str,
        default='',
        help=
        """Default=None. An image file containing an additional mask to apply besides --mask."""
    )
    parser.add_argument(
        "--normalizeplot",
        action="store_true",
        default=False,
        help=
        """Default=False. This will normalize the intensity values of the distribution to be between 0 and 1"""
    )
    parser.add_argument(
        "--normproc",
        type=str,
        default="normalize.edgemean",
        help=
        """Default=normalize.edgemean. Normalization processor applied to particles before computing mean and standard deviation values for each iamge. If normalize.mask is used, --mask will be passed in automatically. If you want to turn normalization off specify \'None\'. (See 'e2help.py processors' at the command line for a list of processors that can be applied through e2proc3d.py)."""
    )
    parser.add_argument(
        "--path",
        type=str,
        default='',
        help=
        "Directory to store results in. The default is a numbered series of directories containing the prefix 'sptsim'; for example, sptsim_02 will be the directory by default if 'sptsim_01' already exists."
    )
    parser.add_argument(
        "--ppid",
        type=int,
        help=
        "Default=1. Set the PID of the parent process, used for cross platform PPID",
        default=-1)
    parser.add_argument(
        "--preprocess",
        type=str,
        default='',
        help=
        """Any processor to be applied to each image before computing mean and standard deviation values. (See 'e2help.py processors' at the command line for a list of processors that can be applied through e2proc3d.py)."""
    )
    parser.add_argument(
        "--removesigma",
        type=int,
        default=0,
        help=
        """Default=0. Provide a value for the number of standard deviations away from the mean to consider values to exclude. For example, if --removesigma=3, values further than 3 standard deviations away from the mean will be excluded."""
    )
    parser.add_argument(
        "--savepreprocessed",
        action="store_true",
        default=False,
        help=
        """Default=False. If provided, this option will save the image stacks in --input after all preprocessing options (lowpass, highpass, preprocess, masking, etc.) have been applied."""
    )
    parser.add_argument(
        "--shrink",
        type=int,
        default=1,
        help=
        "Default=1 (no shrinking). Optionally shrink the input volumes by an integer amount n > 1."
    )
    parser.add_argument(
        "--subset",
        type=int,
        default=0,
        help=
        """Default=0 (not used). N > 2 number of particles to from each stack provided through --input to consider."""
    )
    parser.add_argument(
        "--threshold",
        type=str,
        default='',
        help=
        """A thresholding processor to be applied before computing mean and standard deviation values for each image. (See 'e2help.py processors' at the command line for a list of processors that can be applied through e2proc3d.py)."""
    )
    parser.add_argument(
        "--verbose",
        "-v",
        type=int,
        default=0,
        help=
        "Default 0. Verbose level [0-9], higher number means higher level of verboseness",
        dest="verbose",
        action="store",
        metavar="n")

    (options, args) = parser.parse_args()

    logger = E2init(sys.argv, options.ppid)
    '''
	if options.mask: 
		options.mask=parsemodopt(options.mask)
	
	if options.preprocess: 
		options.preprocess=parsemodopt(options.preprocess)
		
	if options.lowpass: 
		options.lowpass=parsemodopt(options.lowpass)
	
	if options.highpass: 
		options.highpass=parsemodopt(options.highpass)
	
	if options.threshold: 
		options.threshold=parsemodopt(options.threshold)
		
	if options.normproc: 
		options.normproc=parsemodopt(options.normproc)
	'''
    print("\n options should NOT be parsed, e.g., options.mask={}".format(
        options.mask))
    from EMAN2_utils import sptOptionsParser
    options = sptOptionsParser(options, 'e2spt_meanintensityplot')

    print("\n options should be parsed, e.g., options.mask={}".format(
        options.mask))

    datafiles = options.input.split(',')

    from EMAN2_utils import makepath
    options = makepath(options, 'meanintensityplot')

    intensitiesSeveral = []
    intenfullSeveral = []
    iminsSeveral = []
    imaxsSeveral = []
    istdsSeveral = []

    means = []
    stds = []

    from EMAN2_utils import writeParameters
    cmdwp = writeParameters(options, 'e2spt_meanintensityplot.py',
                            'sptmeanintensity')

    for datafile in datafiles:
        print("\ndatafile={}".format(datafile))
        n = EMUtil.get_image_count(datafile)

        if options.subset:
            if options.subset < 3:
                print("ERROR:Subset must be > 2.")
                sys.exit(1)

            n = options.subset

        if n < 3:
            print(
                "ERROR: All stacks must have at least 3 particles in them. This one doesn't:",
                datafile)
            sys.exit(1)

    for datafile in datafiles:
        ret = calcintensities(options, datafile)

        intensitiesSingle = ret[0]
        intenfull = ret[1]
        imins = ret[2]
        imaxs = ret[3]
        istds = ret[4]

        intensitiesSeveral.append([datafile, list(intensitiesSingle)])

        intensitiesSeveral.append([datafile, list(intenfull)])
        iminsSeveral.append([datafile, list(imins)])
        imaxsSeveral.append([datafile, list(imaxs)])
        istdsSeveral.append([datafile, list(istds)])

        intensitiesSingleNorm = intensitiesSingle

        if options.normalizeplot:
            intensitiesSingleNorm = normintensities(intensitiesSingle, 0, 0)

        #print "\]n\\n\nIntensities before plotting are", intensitiesSingleNorm
        #print "\n\n\n\n\n"

        ret = plotintensities(intensitiesSingleNorm, options, datafile)
        mean = ret[0]
        std = ret[1]
        means.append(mean)
        stds.append(std)

        print("\nfor datafile", datafile)
        print("intenfull is", intenfull)
        ret = plotintensities(intenfull, options, datafile, 'intenfull')
        ret = plotintensities(imins, options, datafile, 'mins')
        ret = plotintensities(imaxs, options, datafile, 'maxs')
        ret = plotintensities(istds, options, datafile, 'stds')

    #print "\nIntensities several len is", len( intensitiesSeveral )
    if len(intensitiesSeveral) > 1:

        datafile1 = intensitiesSeveral[0][0]
        datafile2 = intensitiesSeveral[1][0]

        intensities1 = intensitiesSeveral[0][1]
        intensities2 = intensitiesSeveral[1][1]
        n1 = len(intensities1)
        n2 = len(intensities2)

        zscore = old_div((means[0] - means[1]),
                         np.sqrt(
                             old_div((stds[0] * stds[0]), n1) +
                             old_div((stds[1] * stds[1]), n2)))

        g = open(options.path + '/MIboth_INFO.txt', 'w')
        zscoreline = 'zscore=' + str(
            zscore) + ' for ' + datafile1 + ' vs ' + datafile2 + ' \n'
        lines = [zscoreline]
        g.writelines(lines)
        g.close()

        print("\nzzzzzzz\n%s" % (zscoreline))

        absmax = absmin = 0
        if options.normalizeplot:

            minses = []
            maxes = []
            for intenS in intensitiesSeveral:
                minS = float(min(intenS[1]))
                maxS = float(max(intenS[1]))

                minses.append(minS)
                maxes.append(maxS)

            absmin = min(minses)
            absmax = max(maxes) - absmin

        for intensities in intensitiesSeveral:
            print("\nType and len of intensities is", type(intensities[1]),
                  len(intensities[1]))

            intensitiesNorm = intensities[1]
            if options.normalizeplot:
                print("Normalizeplot on")
                intensitiesNorm = normintensities(intensities[1], absmin,
                                                  absmax)

            plotintensities(intensitiesNorm, options, datafile, 'no')

        plt.savefig(options.path + '/MIbothPlot.png')
        plt.clf()

    E2end(logger)
    return
Example #11
0
def main():

    progname = os.path.basename(sys.argv[0])
    usage = """Plots the variation of correlation of a volume with itself as it is rotated in azimuth or altitude"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument("--path",
                        type=str,
                        default='',
                        help="""Directory to store results in. 
		The default is a numbered series of directories containing the prefix 'rotplot'; 
		for example, rotplot_02 will be the directory by default if 'rotplot_01' already exists."""
                        )

    parser.add_argument(
        "--vols1",
        type=str,
        help=
        "Comma-separated filenames of the .hdf volumes whose self-rotational correlation plot you want to compute.",
        default=None)
    parser.add_argument(
        "--vols2",
        type=str,
        help=
        "Comma-separated filenames of the .hdf volumes whose rotational correlation plot you want to compute against the volumes provided through --vols1.",
        default=None)

    parser.add_argument(
        "--output",
        type=str,
        help=
        "Name for the .txt file with the results and the corresponding .png plot"
    )
    parser.add_argument("--sym",
                        type=str,
                        help="Symmetry to apply after all preprocessing.",
                        default='c1')

    parser.add_argument(
        "--mask",
        type=str,
        help=
        "Mask processor applied to particles before alignment. Default is mask.sharp:outer_radius=-2",
        default="mask.sharp:outer_radius=-2")
    parser.add_argument(
        "--preprocess",
        type=str,
        help=
        "Any processor (as in e2proc3d.py) to be applied to each volume prior to alignment. Not applied to aligned particles before averaging.",
        default=None)

    parser.add_argument(
        "--lowpass",
        type=str,
        help=
        "A lowpass filtering processor (as in e2proc3d.py) to be applied to each volume prior to alignment. Not applied to aligned particles before averaging.",
        default=None)
    parser.add_argument(
        "--highpass",
        type=str,
        help=
        "A highpass filtering processor (as in e2proc3d.py) to be applied to each volume prior to alignment. Not applied to aligned particles before averaging.",
        default=None)

    parser.add_argument("--normproc",
                        type=str,
                        help="""Normalization processor applied to 
		particles before alignment. Default is 'normalize.edgemean'. 
		If normalize.mask is used, results of the mask option will be passed in automatically. 
		If you want to turn this option off specify \'None\'""",
                        default='normalize.edgemean')

    parser.add_argument(
        "--shrink",
        type=int,
        default=1,
        help=
        "Optionally shrink the input volumes by an integer amount for coarse alignment."
    )
    parser.add_argument(
        "--shrinkefine",
        type=int,
        default=1,
        help=
        "Optionally shrink the input volumes by an integer amount for refine alignment."
    )

    parser.add_argument("--daz",
                        type=int,
                        default=3,
                        help="Step size to vary azimuth.")
    parser.add_argument(
        "--icosvertices",
        action="store_true",
        help="Will produce an azimutal plot at each vertex of an icosahedron.",
        default=False)
    parser.add_argument("--dalt",
                        type=int,
                        default=181,
                        help="Step size to vary altitude.")
    parser.add_argument(
        "--alti",
        type=int,
        default=0,
        help=
        """Initial position to check in altitude. For example, for a D symmetric chaperonin, 
															if you want to check alt=0 ONLY, provide --alti=0 and --dalt=181 as options.
															if you want to check alt=180 ONLY, provide --alti=180, --dalt=1 or greater.
															if you want to check BOTH alt=0 and alt=180 in the same plot, provide --alti=0, --dalt=180"""
    )

    parser.add_argument(
        "--parallel",
        help="Parallelism. See http://blake.bcm.edu/emanwiki/EMAN2/Parallel",
        default="thread:1")

    parser.add_argument(
        "--ppid",
        type=int,
        help="Set the PID of the parent process, used for cross platform PPID",
        default=-1)
    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness."
    )
    #parser.add_argument("--plotonly",type=str, help="Provide .txt files for a given alt, with 2 columns (az ccc) with the values to plot. For example, --plotonly=alt000.txt,alt180.txt", default=None)
    parser.add_argument(
        "--normalizeplot",
        action="store_true",
        help=
        "Make maximum correlation value on plot equal to 1 and scale all other values accordingly.",
        default=False)
    parser.add_argument("--plot2d",
                        action="store_true",
                        help="Produces 2D plot if both az and alt are varied.",
                        default=False)
    parser.add_argument("--only2dplot",
                        action="store_true",
                        help="Skips all plots, except 2dplot.",
                        default=False)
    parser.add_argument(
        "--savetxt",
        action="store_true",
        help="Will save the values for each plot into .txt files.",
        default=False)

    parser.add_argument(
        "--plotonly",
        type=str,
        help=
        """If you already have the correlation variation with azimuth (for a particular altitude) in a text file in rows of 'az,ccc', 
												provide the txt file(s) separated by commas --plotonly=file1.txt,file2.txt,file3.txt etc...""",
        default=None)

    parser.add_argument("--singleplot",
                        action="store_true",
                        help="""Plot all alts, 
		or each vertex of --icosvertices is on, in a single .png file.""",
                        default=False)

    parser.add_argument("--offset",
                        type=float,
                        default=0.0,
                        help="""Default=0. Rotation in azimuth
		to apply to one of the models before computing the entire rotational correlation plot."""
                        )

    parser.add_argument(
        "--ownvalues",
        action='store_true',
        default=False,
        help=
        """Default=False. If provided and --normalizeplot is also provided, this parameter will cause all curves to go from 0 to 1 when ploted on the same plot by specifying --singleplot. Otherwise, the maximum value will drawn from the highest value amongst all the curves and the minimum value from the lowest value amongst all the curves being plotted simultaneously, preserving the relative intensity between different curves, but still effectively making the range 0 to 1."""
    )

    (options, args) = parser.parse_args()

    #print "Loaded mask is", options.mask
    '''
	Parse options
	'''
    if options.mask:
        if 'None' in options.mask:
            options.mask = 'None'
        if options.mask != 'None' and options.mask != 'none':
            print("\noptions.mask before parsing is", options.mask)
            options.mask = parsemodopt(options.mask)
        else:
            options.mask = None
        print("\nmask after parsing is", options.mask)

    if options.preprocess:
        if options.preprocess != 'None' and options.preprocess != 'none':
            options.preprocess = parsemodopt(options.preprocess)
        else:
            options.preprocess = None
        print("\nPreprocessor is", options.preprocess)

    if options.lowpass:
        if options.lowpass != 'None' and options.lowpass != 'none':
            options.lowpass = parsemodopt(options.lowpass)
        else:
            options.lowpass = None
        print("\nlowpass is", options.lowpass)

    if options.highpass:
        if options.highpass != 'None' and options.highpass != 'none':
            options.highpass = parsemodopt(options.highpass)
        else:
            options.highpass = None
        print("\nHighpass is", options.highpass)

    if options.normproc:
        if options.normproc != 'None' and options.normproc != 'none':
            options.normproc = parsemodopt(options.normproc)
        else:
            options.normproc = None
        print("\nNormproc is", options.normproc)

    #print "plotonly is", options.plotonly

    if options.only2dplot:
        options.plot2d = True

    if options.icosvertices and options.vols2:
        print(
            "ERROR: You can only use --icosvertices for volumes in --vols1. You must NOT supply --vols2."
        )
        sys.exit()

    #print "args are", args
    logger = E2init(sys.argv, options.ppid)

    from EMAN2_utils import makepath
    options = makepath(options, 'rotplot')

    if not options.plotonly:
        vols1 = []
        if options.vols1:
            vols1 = options.vols1.split(',')

        vols = vols1

        vols2 = []
        if options.vols2:
            vols2 = options.vols2.split(',')
            vols = vols1 + vols2

            for v in vols:
                if '.hdf' not in v and '.mrc' not in v:
                    print(
                        "ERROR: The input volumes must all be either .hdf, .mrc or .rec (which is also just a .mrc file)."
                    )
                    sys.exit()

                rotcccplot(v, v, options)

            for v1 in vols1:
                for v2 in vols2:
                    rotcccplot(v1, v2, options)

        else:
            for v in vols:
                if '.hdf' not in v and '.mrc' not in v:
                    print(
                        "ERROR: The input volumes must all be either .hdf, .mrc or .rec (which is also just a .mrc file)."
                    )
                    sys.exit()

                rotcccplot(v, v, options)
    else:

        files = options.plotonly.split(',')
        print("Will plot these files", files)

        values = {}

        #absMIN=1000000
        #absMAX=-1
        absMIN = 0.0
        absMAX = 0.0
        ownvalues = 1
        try:
            if options.ownvalues:
                ownvalues = options.ownvalues
        except:
            pass

        k = 0
        for F in files:
            print("Working with this file now", F)
            azs = []
            valuesforthisfile = []

            f = open(F, 'r')
            lines = f.readlines()
            f.close()

            for line in lines:
                print("Line is\n", line)
                az = line.split()[0]
                azs.append(int(az))
                value = line.split()[-1].replace('\n', '')
                valuesforthisfile.append(float(value))
                print("Thus az, value are", az, value)

            #title=F.replace('.txt','')

            if not ownvalues:

                minv = float(min(valuesforthisfile))
                if float(minv) < float(absMIN):
                    absMIN = float(minv)

                maxv = float(max(valuesforthisfile))
                if float(maxv) > float(absMAX):
                    absMAX = float(maxv)
                print("Min and max to send are", absMIN, absMAX)

            values.update({k: valuesforthisfile})
            k += 1

        title = 'plot'
        if options.output:
            tilte = options.output.replace('.txt', '')

        plotter(options, azs, values, title, None, k, absMIN, absMAX)
        print("I have returned from the plotter")

        if options.singleplot:
            print("And single plot is on")

            plotname = 'plot'
            if options.output:
                plotname = options.output.replace('.txt', '')

            if not options.only2dplot:
                print("While only 2D plot is off")
                #pylab.savefig(plotname)
                #pylab.title(title)
                pylab.ylabel('Correlation')
                pylab.xlabel('Azimuth')
                pylab.savefig(options.path + '/' + plotname)
                #clf()

            if not options.icosvertices and options.plot2d:
                print("I will call 2dplot")
                twoD_plot(plotname, values, options)
        if not options.singleplot:
            clf()

    E2end(logger)

    return ()
Example #12
0
def main():

    usage = """e2spt_transformplot.py file <options>.
	'file' can be a .json file or a stack of 3D subvolumes. In the latter case, the transformation (angles and translations) will be obtained from the header parameter 'xform.align3d'.
	Supplying the file directly is redundant with supplying the parameter --input.
	"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--input",
        type=str,
        default='',
        help=
        """A .json file with subtomogram alignment information, or a .hdf stack of aligned particles with correct alignment transformations in their header parameter 'xform.align3d'."""
    )
    parser.add_argument(
        "--inversetoo",
        action="store_true",
        default=False,
        help="""Also plots the angles for the inverse of a transform.""")

    parser.add_argument(
        "--path",
        type=str,
        default='spttransformplot',
        help=
        """Directory to store results in. The default is a numbered series of directories containing the prefix 'spttransformplot'; for example, spttransformplot_02 will be the directory by default if 'spttransformplot_01' already exists."""
    )
    parser.add_argument(
        "--ppid",
        type=int,
        default=-1,
        help="Set the PID of the parent process, used for cross platform PPID")

    parser.add_argument(
        "--subset",
        type=int,
        default=0,
        help=
        """Default=0 (not used). Plot only this substet of transforms from the hdf stack or json file provided."""
    )

    parser.add_argument(
        "--verbose",
        "-v",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness",
        dest="verbose",
        action="store",
        metavar="n")

    (options, args) = parser.parse_args()

    if not options.input:
        options.input = sys.argv[1]

    logger = E2init(sys.argv, options.ppid)

    orientations = {}

    if '.json' in options.input:
        jsonfile = options.input
        jsonfileopen = js_open_dict(jsonfile)
        n = len(jsonfileopen)
        originaln = n
        print("\nthe number of transformations to plot is %d" % (n))
        if options.subset:
            n = options.subset
            print("\nplotting only a subset, n=%d" % (n))

        for j in range(n):
            xformslabel = 'subtomo_' + str(j).zfill(len(str(originaln)))
            t = jsonfileopen[xformslabel][0]
            print("\nread transform from .json file", t)
            orientations.update({j: t})
            #jsA.setval( xformslabel, [ t , score ] )
        jsonfileopen.close()

    elif '.hdf' in options.input:
        n = EMUtil.get_image_count(options.input)
        originaln = n
        print("\nthe number of transformations to plot is %d" % (n))
        if options.subset:
            n = options.subset
            print("\nplotting only a subset, n=%d" % (n))

        for j in range(n):
            t = EMData(options.input, j, True)['xform.align3d']
            orientations.update({j: t})

    azs = []
    alts = []
    phis = []
    xs = []
    ys = []
    zs = []

    azsi = []
    altsi = []
    phisi = []
    xsi = []
    ysi = []
    zsi = []

    if len(orientations) > 2:

        from EMAN2_utils import makepath
        options = makepath(options, 'spttransformplot')

        for i in orientations:
            t = orientations[i]
            print(
                "\n t to get rotations and translations from transform number %d is"
                % (i))
            print(t)

            rots = t.get_rotation()

            az = rots['az']
            azs.append(az)

            alt = rots['alt']
            alts.append(alt)

            phi = rots['phi']
            phis.append(phi)

            trans = t.get_trans()

            x = trans[0]
            xs.append(x)

            y = trans[1]
            ys.append(y)

            z = trans[2]
            zs.append(z)

            if options.inversetoo:
                ti = t.inverse()
                print(
                    "\n t inverse to get rotations and translations from transform number %d is"
                    % (i))
                print(ti)

                rotsi = ti.get_rotation()

                azi = rotsi['az']
                azsi.append(azi)

                alti = rotsi['alt']
                altsi.append(alti)

                phii = rots['phi']
                phisi.append(phii)

                transi = ti.get_trans()

                xi = transi[0]
                xsi.append(xi)

                yi = transi[1]
                ysi.append(yi)

                zi = transi[2]
                zsi.append(zi)

        textwriter(options, azs, 'az')
        plotvals(options, azs, 'az')

        textwriter(options, alts, 'alt')
        plotvals(options, alts, 'alt')

        textwriter(options, phis, 'phi')
        plotvals(options, phis, 'phi')

        textwriter(options, xs, 'x')
        plotvals(options, xs, 'x', 1.0)

        textwriter(options, ys, 'y')
        plotvals(options, ys, 'y', 1.0)

        textwriter(options, zs, 'z')
        plotvals(options, zs, 'z', 1.0)

        if options.inversetoo:
            textwriter(options, azsi, 'az_inverse')
            plotvals(options, azsi, 'az_inverse')

            textwriter(options, altsi, 'alt_inverse')
            plotvals(options, altsi, 'alt_inverse')

            textwriter(options, phisi, 'phi_inverse')
            plotvals(options, phisi, 'phi_inverse')

            textwriter(options, xsi, 'x_inverse')
            plotvals(options, xsi, 'x_inverse', 1.0)

            textwriter(options, ysi, 'y_inverse')
            plotvals(options, ysi, 'y_inverse', 1.0)

            textwriter(options, zsi, 'z_inverse')
            plotvals(options, zsi, 'z_inverse', 1.0)

    else:
        print(
            "\nthere's fewer than 2 transforms. no point in plotting a single (or null) value."
        )
        sys.exit(1)

    E2end(logger)

    return
Example #13
0
def main():

    usage = """e2spt_tiltstacker.py <options> . 
	The options should be supplied in "--option=value" format, 
	replacing "option" for a valid option name, and "value" for an acceptable value for that option. 
	
	This program operates in 3 different modes:
	1) It can STACK individual .dm3, .tiff or .hdf images into an .mrc (or .st) stack,
	by supplying a common string to all the images to stack via --stem2stack.
	It must be run in a directory containing the numbered images only.
	It also generates a .rawtlt file with tilt angle values if --lowerend, --upperend and --tiltstep are provided.
	
	2) It can UNSTACK a tilt series into individual files (either all the images, or selected
	images, controlled through the --exclude or --include parameters).
	
	3) It can RESTACK a tilt series; that is, put together a new tilt series that excludes/includes
	specific images
	"""

    parser = EMArgumentParser(usage=usage, version=EMANVERSION)

    parser.add_argument(
        "--anglesindxinfilename",
        type=int,
        default=None,
        help=
        """Default=None. The filename of the images will be split at any occurence of the following delimiters: '_', '-', '+', '[' , ']' , ',' , ' ' (the two last ones are a comma and a blank space). Provide the index (position) of the angle in the split filename. For example, if the filename of an image is "my_specimen-oct-10-2015_-50_deg-from_k2 camera.mrc", it will be split into ['my','specimen','oct','10','2015','','50','deg','from','k2','camera','mrc']. The angle '-50', is at position 6 (starting from 0). Therefore, you would provide --anglesindxinfilename=6, assuming all images to be stacked/processed are similarly named. No worries about the minus sign disappearing. The program will look at whether there's a minus sign immediately preceeding the position where the angle info is."""
    )

    parser.add_argument(
        "--apix",
        type=float,
        default=0.0,
        help="""True apix of images to be written on final stack.""")

    parser.add_argument(
        "--bidirectional",
        action='store_true',
        default=False,
        help=
        """This will assume the first image is at 0 degrees and will stack images from --lowerend through 0, and then will stack the rest from 0+tiltstep throgh --upperend. If --negativetiltseries is supplied, images will be stacked from --upperend through 0, then from 0-tiltstep through --lowerend."""
    )

    parser.add_argument(
        "--clip",
        type=str,
        default='',
        help=
        """Resize the 2-D images in the tilt series. If one number is provided, then x and y dimensions will be made the same. To specify both dimensions, supply two numbers, --clip=x,y. Clipping will be about the center of the image."""
    )

    parser.add_argument(
        "--exclude",
        type=str,
        default='',
        help=
        """Comma separated list of numbers corresponding to images to exclude. --unstack or --restack must be supplied. You can also exclude by ranges. For example: Recall that the FIRST image INDEX is 0. --exclude=1,5-7,10,12,15-19 will exclude images 1,5,6,7,10,12,15,16,17,18,19"""
    )

    parser.add_argument(
        "--highesttilt",
        type=float,
        default=0.0,
        help=
        """Highest tilt angle. If not supplied, it will be assumed to be 1* --tiltrange."""
    )

    parser.add_argument(
        "--include",
        type=str,
        default='',
        help=
        """Comma separated list of numbers corresponding to images to include (all others will be excluded). --unstack or --restack must be supplied. Recall that the FIRST image INDEX is 0. --include=1,5-7,10,12,15-19 will include images 1,5,6,7,10,12,15,16,17,18,19"""
    )
    parser.add_argument("--invert",
                        action="store_true",
                        default=False,
                        help=""""This will multiply the pixel values by -1.""")

    parser.add_argument(
        "--lowesttilt",
        type=float,
        default=0.0,
        help=
        """Lowest tilt angle. If not supplied, it will be assumed to be -1* --tiltrange."""
    )

    parser.add_argument(
        "--mirroraxis",
        type=str,
        default='',
        help=
        """Options are x or y, and the mirrored copy of the 2-D images will be generated before being put into the tilt series."""
    )

    parser.add_argument(
        "--negativetiltseries",
        action='store_true',
        default=False,
        help=
        """This indicates that the tilt series goes from -tiltrange to +tiltrange, or 0 to -tiltrange, then +tiltstep to +tiltrange if --bidirectional is specified."""
    )
    parser.add_argument(
        "--normalizeimod",
        action='store_true',
        default=False,
        help=
        """Default=False. This will apply 'newstack -float 2' to the input stack. Requires IMOD. Does not apply to --unstack or --restack."""
    )

    parser.add_argument(
        "--outmode",
        type=str,
        default="float",
        help=
        """All EMAN2 programs write images with 4-byte floating point values when possible by default. This allows specifying an alternate format when supported: float, int8, int16, int32, uint8, uint16, uint32. Values are rescaled to fill MIN-MAX range."""
    )

    parser.add_argument(
        "--path",
        type=str,
        default='sptstacker',
        help=
        """Directory to store results in. The default is a numbered series of directories containing the prefix 'sptstacker'; for example, sptstacker_02 will be the directory by default if 'sptstacker_01' already exists."""
    )
    parser.add_argument(
        "--ppid",
        type=int,
        help="Set the PID of the parent process, used for cross platform PPID",
        default=-1)

    parser.add_argument(
        "--restack",
        type=str,
        default='',
        help=
        """.hdf, or 3D .st, .mrc, .ali, or .mrcs stack file to restack. This option can be used with --include or --exclude to unstack only specific images. Recall that the FIRST image INDEX is 0 (but unstacked image will be numbered from 1). --exclude=1,5-7,10,12,15-19 will exclude images 1,5,6,7,10,12,15,16,17,18,19"""
        "")

    parser.add_argument(
        "--shrink",
        type=float,
        default=0.0,
        help=
        """Default=0.0 (not used). Shrinking factor to do Fourier cropping of the images in a titlseries; can be a fractional number (for example, 1.5)"""
    )
    parser.add_argument(
        "--stackregardless",
        action="store_true",
        default=False,
        help=
        """"Stack images found with the common string provided through --stem2stack, even if the number of images does not match the predicted number of tilt angles."""
    )
    parser.add_argument(
        "--stem2stack",
        type=str,
        default='',
        help=
        """String common to all the files to put into an .st stack, which is in .MRC format; for example, --stem2stack=.hdf will process all .hdf files in the current directory. If not specified, all valid EM imagefiles in the current directory will be put into an .st stack."""
    )

    parser.add_argument(
        "--tltfile",
        type=str,
        default='',
        help=
        """".tlt file IF unstacking an aligned tilt series with --unstack=<stackfile> or restacking a tiltseries with --restack=<stackfile>"""
    )
    parser.add_argument(
        "--tiltrange",
        type=float,
        default=0.0,
        help=
        """If provided, this will make --lowesttilt=-1*tiltrange and --highesttilt=tiltrange. If the range is asymmetric, supply --lowesttilt and --highesttilt directly."""
    )
    parser.add_argument(
        "--tiltstep",
        type=float,
        default=0.0,
        help="""Step between tilts. Required if using --stem2stack.""")

    parser.add_argument(
        "--unstack",
        type=str,
        default='',
        help=
        """.hdf, or 3D .st, .mrc, .ali, or .mrcs stack file to unstack. This option can be used with --include or --exclude to unstack only specific images. Recall that the FIRST image INDEX is 0 (but unstacked image will be numbered from 1). --exclude=1,5-7,10,12,15-19 will exclude images 1,5,6,7,10,12,15,16,17,18,19"""
        "")

    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness."
    )

    (options, args) = parser.parse_args()

    logger = E2init(sys.argv, options.ppid)

    print("--negativetiltseries", options.negativetiltseries)

    if options.exclude and options.include:
        print(
            "\nERROR: Supplied either exclude or include. Cannot supply both at the same time."
        )
        sys.exit()

    print("\nLogging")

    from EMAN2_utils import makepath
    options = makepath(options, 'sptstacker')

    options.path = os.getcwd() + '/' + options.path

    tiltstoexclude = options.exclude.split(',')

    if options.stem2stack:
        if not options.anglesindxinfilename and not options.tltfile:
            if not options.tiltstep:
                print(
                    "ERROR: --tiltstep required when using --stem2stack, unless --anglesindxinfilename is provided"
                )
                sys.exit()

    if options.lowesttilt == 0.0 and options.tiltrange:
        options.lowesttilt = -1 * round(float(options.tiltrange), 2)

    if options.highesttilt == 0.0 and options.tiltrange:
        options.highesttilt = round(float(options.tiltrange), 2)

    if options.unstack:
        if options.tltfile:
            unstacker(options)
        else:
            print("ERROR: --tltfile required when using --unstack")
            sys.exit()

    elif options.restack:
        if options.tltfile:
            restacker(options)
            angles = getangles(
                options, 0, True
            )  #Second parameter enforces to keep the 'raw order' of the input file. Otherwise, this function returns angles from -tiltrange to +tiltrange if --negativetiltseries is supplied; from +tiltrange to -tiltrange otherwise

            #finalangles = list(angles)
            #anglestoexclude = []

            print("\n\nthere are these many angles", len(angles))
            #if tiltstoexclude:
            #	for tilt in tiltstoexclude:
            #		anglestoexclude.append( angles[ int(tilt) ] )
            #		finalangles.remove( angles[ int(tilt) ] )
            #	#for ax in anglestoexclude:
            #	#	finalangles.remove( ax )
            #
            #	print "\n\nthere are these many angles to exclude",len(anglestoexclude)
            #	print "\nexcluded angles",anglestoexclude
            #
            #	#finalangles = list( set(angles) - set(anglestoexclude) )

            #print "\nthere are these many final angles",len(finalangles)
            #print "\nfinal angles are", finalangles

            writetlt(angles, options, True)
        else:
            print("ERROR: --tltfile required when using --restack")
            sys.exit()

    else:
        kk = 0
        intilts = findtiltimgfiles(options)

        print("\nWill organize tilt imgs found")
        intiltsdict = organizetilts(
            options, intilts
        )  #Get a dictionary in the form { indexintiltseries:[ tiltfile, tiltangle, damageRank ]},
        print(
            "\nDone organizing tilt imgs"
        )  #where damageRank tells you the order in which the images where acquired
        #regardless of wether the tilt series goes from -tiltrange to +tiltrange,
        #or 0 to -tiltrange then +tiltstep to +tiltrange, or the opposite of these
        outstackhdf = options.path + '/stack.hdf'

        minindx = min(intiltsdict)
        print("minindx is", minindx)
        print("getting size from any first image, intiltsdict[ minindx ][0]",
              intiltsdict[minindx][0])

        hdr = EMData(intiltsdict[minindx][0], 0, True)
        nx = hdr['nx']
        ny = hdr['ny']
        print(nx, ny)

        print("\nOutstack is", outstackhdf)

        #orderedindexes = []
        #for index in intiltsdict:
        #	orderedindexes.append( index )

        #orderedindexes.sort()

        for index in intiltsdict:

            if str(index) not in tiltstoexclude:
                intiltimgfile = intiltsdict[index][0]

                if options.verbose > 9:
                    print(
                        "\nat index {} we have image {}, collected in turn {}".
                        format(index, intiltsdict[index][0],
                               intiltsdict[index][-1]))
                intiltimg = EMData(intiltimgfile, 0)

                tiltangle = intiltsdict[index][1]
                intiltimg['spt_tiltangle'] = tiltangle

                damageRank = intiltsdict[index][2]
                intiltimg['damageRank'] = damageRank

                if options.invert:
                    intiltimg.mult(-1)
                intiltimg.write_image(outstackhdf, -1)
            #print "\nWrote image index", index

        tmp = options.path + '/tmp.hdf'

        if options.clip:
            clip = options.clip.split(',')

            shiftx = 0
            shifty = 0
            if len(clip) == 1:
                clipx = clipy = clip[0]

            if len(clip) == 2:
                clipx = clip[0]
                clipy = clip[1]

            if len(clip) == 4:
                clipx = clip[0]
                clipy = clip[1]
                shiftx = clip[2]
                shifty = clip[3]

            cmdClip = 'e2proc2d.py ' + outstackhdf + ' ' + tmp + ' --clip=' + clipx + ',' + clipy

            if shiftx:
                xcenter = int(round(old_div(nx, 2.0) + float(shiftx)))
                cmdClip += ',' + str(xcenter)
            if shifty:
                ycenter = int(round(old_div(ny, 2.0) + float(shifty)))
                cmdClip += ',' + str(ycenter)

            runcmd(options, cmdClip)

            #cmdClip += ' && rm ' + outstackhdf + ' && mv ' + tmp + ' ' + outstackhdf
            os.remove(outstackhdf)
            os.rename(tmp, outstackhdf)

            #print "\n(e2spt_tiltstacker.py)(main) cmdClip is", cmdClip
            #p = subprocess.Popen( cmdClip , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            #text = p.communicate()
            #p.stdout.close()

            if options.verbose > 9:
                print("\nFeedback from cmdClip:")
                print(text)

        if options.shrink and options.shrink > 1.0:

            cmdBin = 'e2proc2d.py ' + outstackhdf + ' ' + tmp + ' --process=math.fft.resample:n=' + str(
                options.shrink)

            #cmdBin = 'newstack ' + outstackhdf + ' ' + tmp + ' -ftreduce ' +  str(options.shrink) + ' -antialias 5'
            print("\n(e2spt_tiltstacker.py)(main) cmdBin is", cmdBin)

            runcmd(options, cmdBin)
            #+ ' && rm ' + outstackhdf + ' && mv ' + tmp + ' ' + outstackhdf

            os.remove(outstackhdf)
            print("\nremoved {}".format(outstackhdf))
            os.rename(tmp, outstackhdf)
            print("\nrenamed {} to {}".format(tmp, outstackhdf))

        #p = subprocess.Popen( cmdBin , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        #text = p.communicate()
        #p.stdout.close()

        print("\nreading outstackhdf hdr, for file {}".format(outstackhdf))
        outtilthdr = EMData(outstackhdf, 0, True)
        currentapix = outtilthdr['apix_x']
        if float(options.apix) and float(options.apix) != float(currentapix):
            if options.shrink:
                options.apix *= options.shrink

            print("\nFixing apix")
            cmdapix = 'e2procheader.py --input=' + outstackhdf + ' --stem=apix --valtype=float --stemval=' + str(
                options.apix)

            print("\n(e2spt_tiltstacker.py)(main) cmdapix is", cmdapix)

            runcmd(options, cmdapix)

            #p = subprocess.Popen( cmdapix , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            #text = p.communicate()
            #p.stdout.close()

            if options.verbose > 9:
                print("\nFeedback from cmdapix:")
                print(text)

        outstackst = outstackhdf.replace('.hdf', '.st')
        stcmd = 'e2proc2d.py	' + outstackhdf + ' ' + outstackst + ' --twod2threed'
        if options.outmode != 'float':
            stcmd += ' --outmode=' + options.outmode + ' --fixintscaling=sane'

        #if options.apix:
        #	stcmd += ' --apix=' + str(options.apix)
        #stcmd += ' && e2procheader.py --input=' + outstackst + ' --stem=apix --valtype=float --stemval=' + str( options.apix ) + ' --output=' + outstackst.replace('.st','.mrc') + " && mv " +  outstackst.replace('.st','.mrc') + ' ' + outstackst

        print("\n(e2spt_tiltstacker.py)(main) stcmd is", stcmd)

        #p = subprocess.Popen( stcmd , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        #text = p.communicate()
        #p.stdout.close()

        runcmd(options, stcmd)

        #cmdClean = ' && rm ' + options.path + '/*~* ' + outstackhdf

        #print "\n(e2spt_tiltstacker.py)(main) cmdClean is", cmdClean
        #p = subprocess.Popen( cmdClean , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        #text = p.communicate()
        #p.stdout.close()

        os.remove(outstackhdf)

        if options.normalizeimod:
            try:
                cmd = 'newstack ' + outstackst + ' ' + outstackst + ' --float 2'
                print("normalizeimod cmd is", cmd)
                runcmd(options, cmd)

            #p = subprocess.Popen( cmd , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            #text = p.communicate()
            #p.wait()
            except:
                print(
                    "\nERROR: --normalizeimod skipped. Doesn't seem like IMOD is installed on this machine"
                )

        if options.verbose > 9:
            print("\nFeedback from stcmd:")
            print(text)

        if options.mirroraxis:
            print("\nMirroring across axis", options.mirroraxis)
            mirrorlabel = options.mirroraxis.upper()
            outstackstmirror = outstackst.replace(
                '.st', '_mirror' + mirrorlabel + '.st')

            cmdMirror = 'e2proc2d.py ' + outstackst + ' ' + outstackstmirror + ' --process=xform.mirror:axis=' + options.mirroraxis

            if options.outmode != 'float':
                cmdMirror += ' --outmode=' + options.outmode + ' --fixintscaling=sane'

            print("options.apix is", options.apix)
            if options.apix:
                cmdMirror += ' --apix=' + str(options.apix)
                cmdMirror += ' && e2fixheaderparam.py --input=' + outstackstmirror + ' --stem=apix --valtype=float --stemval=' + str(
                    options.apix) + ' --output=' + outstackstmirror.replace(
                        '.st', '.mrc') + " && mv " + outstackstmirror.replace(
                            '.st', '.mrc') + ' ' + outstackstmirror

                print("added fixheaderparam to cmdMirror!")

            print("cmdMirror is", cmdMirror)
            runcmd(options, cmdMirror)

            #p = subprocess.Popen( cmdMirror , shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            #text = p.communicate()

            if options.verbose > 9:
                print("\nFeedback from cmdMirror:")
                print(text)
                p.stdout.close()

        findir = os.listdir(options.path)
        for f in findir:
            if '~' in f:
                print("\nfile to remove", f)
                print("in path", options.path + '/' + f)
                os.remove(options.path + '/' + f)

    E2end(logger)
    return
Example #14
0
def main():

    usage = " "
    parser = EMArgumentParser(usage=usage, version=EMANVERSION)
    parser.add_argument("--path", type=str, help="path", default=None)
    parser.add_argument("--ref", type=str, help="ref", default=None)
    parser.add_argument("--sym", type=str, help="symmetry", default="c1")
    parser.add_argument("--batchsize", type=int, help="batch size", default=12)
    parser.add_argument("--niter", type=int, help="iterations", default=50)
    parser.add_argument("--learnrate",
                        type=float,
                        help="learnrate",
                        default=.1)
    parser.add_argument(
        "--verbose",
        "-v",
        dest="verbose",
        action="store",
        metavar="n",
        type=int,
        default=0,
        help=
        "verbose level [0-9], higner number means higher level of verboseness")

    (options, args) = parser.parse_args()
    logid = E2init(sys.argv)

    from EMAN2_utils import makepath
    options = makepath(options, 'sptsgd')

    #if options.path==None:
    #	for i in range(100):
    #		pname="sptsgd_{:02d}".format(i)
    #		if not os.path.isdir(pname):
    #			os.mkdir(pname)
    #			options.path=pname
    #			break
    #	else:
    #		print("something is wrong...")
    #		exit()

    path = options.path
    print("Writing in {}..".format(path))
    fname = args[0]
    num = EMUtil.get_image_count(fname)
    batchsize = options.batchsize

    if not options.ref:

        tt = parsesym("c1")
        xfs = tt.gen_orientations("rand", {"n": batchsize})
        idx = np.arange(num)
        np.random.shuffle(idx)
        avgr = Averagers.get("mean.tomo")
        for i in range(batchsize):
            p = EMData(fname, idx[i])
            p.transform(xfs[i])
            avgr.add_image(p)
        ref = avgr.finish()
        ref.process_inplace('filter.lowpass.gauss', {"cutoff_freq": .01})
        ref.process_inplace('filter.lowpass.randomphase', {"cutoff_freq": .01})
        ref.process_inplace("xform.applysym", {"sym": options.sym})
        ref.write_image(os.path.join(path, "ref.hdf"))
    else:
        ref = EMData(options.ref)

    learnrate = options.learnrate
    lrmult = .98
    tmpout = os.path.join(path, "tmpout.hdf")
    try:
        os.remove(tmpout)
    except:
        pass
    ref.write_image(tmpout, -1)
    print("iteration, learning rate, mean gradient")
    for it in range(options.niter):
        idx = np.arange(num)
        np.random.shuffle(idx)
        nbatch = num / batchsize
        cc = []
        for ib in range(nbatch):
            jsd = Queue.Queue(0)
            thrds = [
                threading.Thread(target=alifn,
                                 args=(jsd, fname, i, ref, options))
                for i in idx[ib * batchsize:(ib + 1) * batchsize]
            ]
            for t in thrds:
                t.start()
            angs = {}
            while threading.active_count() > 1:
                time.sleep(1)
                while not jsd.empty():
                    fsp, n, d = jsd.get()
                    angs[(fsp, n)] = d
            avgr = Averagers.get("mean.tomo")
            #print angs
            for ks in angs.keys():
                d = angs[ks]
                p = EMData(ks[0], ks[1])
                p.transform(d["xform.align3d"])
                avgr.add_image(p)
            avg = avgr.finish()
            avg.process_inplace("xform.applysym", {"sym": options.sym})
            dmap = avg - ref
            ddm = dmap * dmap
            #print "{:d}\t{:.3f}\t{:.3f}".format(it, ddm["mean_nonzero"], np.mean(scr))
            cc.append(ddm["mean_nonzero"])
            ref = ref + learnrate * dmap
        ref.process_inplace("xform.centerofmass")
        ref.write_image(tmpout, -1)

        #ref.write_image(tmpout,-1)

        print("\t{:d}, {:.3f}, {:.5f}".format(it, learnrate, np.mean(cc)))
        learnrate *= lrmult

    ref.write_image(os.path.join(path, "output.hdf"))
    print("Done")
    E2end(logid)