Exemplo n.º 1
0
    def filterMap(self):

        if self.resolution != 0.0:
            #fourier transform the full map
            self.filteredMap = FDRutil.lowPassFilter(
                np.fft.rfftn(self.fullMap), self.frequencyMap,
                self.apix / float(self.resolution), self.fullMap.shape)
            self.filteredMap[self.filteredMap < 0.0] = 0.0

        else:
            self.filteredMap = np.zeros((10, 10))
Exemplo n.º 2
0
    def resolution(self, embeddingData, image1, image2, apix):

        np.random.seed(3)

        self.apix = float(apix)

        #****************************
        #**** do two embeddings *****
        #****************************
        if embeddingData is not None:

            #split the localizations randomly in 2 half sets
            numLocalizations = embeddingData.shape[0]
            self.dimension = embeddingData.shape[1]
            sizeHalfSet = int(numLocalizations / 2)
            permutedSequence = np.random.permutation(
                np.arange(numLocalizations))
            self.embeddingsHalf1 = embeddingData[
                permutedSequence[0:sizeHalfSet], :]
            self.embeddingsHalf2 = embeddingData[
                permutedSequence[sizeHalfSet:], :]
            self.embedding = embeddingData
            self.make_half_maps()

        #****************************
        #***** use two images *******
        #****************************
        elif image1 is not None:

            self.halfMap1 = image1
            self.halfMap2 = image2

        self.hannWindow = FDRutil.makeHannWindow(self.halfMap1)
        maskData = self.hannWindow

        self.fullMap = self.halfMap1 + self.halfMap2
        self.frequencyMap = FSCutil.calculate_frequency_map(self.halfMap1)

        tmpResVec, FSC, _, _, qVals_FDR, resolution_FDR, _ = FSCutil.FSC(
            self.halfMap1, self.halfMap2, maskData, self.apix, 0.143, 1, False,
            True, None, True)

        self.resolution = resolution_FDR
        self.FSCdata = FSC
        #self.calcTTest();
        self.qVals = qVals_FDR
        self.resVec = tmpResVec
        self.filterMap()
        self.writeFSC()
Exemplo n.º 3
0
def localFiltration(map, locResMap, apix, localVariance, windowSize, boxCoord,
                    ECDF):

    #**************************************************
    #**** function to perform a local filtration ******
    #****** according to the local resolution *********
    #**************************************************

    #some initialization
    mapSize = map.shape

    mean = np.zeros(mapSize)
    var = np.zeros(mapSize)
    ECDFmap = np.ones(mapSize)
    filteredMapData = np.zeros(mapSize)

    #transform to numpy array
    locResMapData = np.copy(locResMap)

    locResMapData[locResMapData == 0.0] = 100.0
    locResMapData[locResMapData >= 100.0] = 100.0

    #transform to abosulte frequency units(see http://sparx-em.org/sparxwiki/absolute_frequency_units)
    with np.errstate(all='ignore'):
        locResMapData = np.divide(apix, locResMapData)

    #round to 3 decimals
    locResMapData = np.around(locResMapData, 3)

    #set resolution search range, 3 decimals exact
    locResArray = np.arange(0, 0.5, 0.001)

    #set maximum resolution, important as ResMap is masking
    limRes = np.min(locResMapData)
    counter = 0
    numRes = len(locResArray)

    try:
        import pyfftw

        #do FFT of the respective map
        fftObject = pyfftw.builders.rfftn(map)
        mapFFT = fftObject()
    except:
        mapFFT = np.fft.rfftn(map)
    #get frequency map
    frequencyMap = FDRutil.calculate_frequency_map(map)

    # Initial call to print 0% progress
    #printProgressBar(counter, numRes, prefix = 'Progress:', suffix = 'Complete', bar_length = 50)
    print("Start local filtering. This might take a few minutes ...")

    counterRes = 0
    for tmpRes in locResArray:
        counterRes = counterRes + 1
        progress = counterRes / float(numRes)
        if counterRes % (int(numRes / 20.0)) == 0:
            output = "%.1f" % (progress * 100) + "% finished ..."
            print(output)

        #get indices of voxels with the current resolution
        indices = np.where(np.abs(locResMapData - tmpRes) < 0.0000001)

        if (indices[0].size == 0):
            #this resolution is obviously not in the map, so skip
            counter = counter + 1
            continue
        elif math.fabs(tmpRes - limRes) < 0.0000001:

            #do local filtration
            tmpFilteredMapData = FDRutil.lowPassFilter(mapFFT, frequencyMap,
                                                       tmpRes, map.shape)

            #do normalization
            #tmpFilteredMapData = (tmpFilteredMapData - np.mean(tmpFilteredMapData))/np.sqrt(np.var(tmpFilteredMapData));

            #set the filtered voxels
            filteredMapData[indices] = tmpFilteredMapData[indices]
        else:

            #do local filtration
            tmpFilteredMapData = FDRutil.lowPassFilter(mapFFT, frequencyMap,
                                                       tmpRes, map.shape)

            #do normalization
            #tmpFilteredMapData = (tmpFilteredMapData - np.mean(tmpFilteredMapData))/np.sqrt(np.var(tmpFilteredMapData));

            #set the filtered voxels
            filteredMapData[indices] = tmpFilteredMapData[indices]

            if localVariance == True:
                #estimate and set noise statistic

                if ECDF == 1:
                    #if ecdf shall be used, use if to p-vals
                    tmpECDF, sampleSort = FDRutil.estimateECDFFromMap(
                        tmpFilteredMapData, windowSize, boxCoord)
                    vecECDF = np.interp(tmpFilteredMapData[indices],
                                        sampleSort,
                                        tmpECDF,
                                        left=0.0,
                                        right=1.0)
                    ECDFmap[indices] = vecECDF
                else:
                    ECDFmap = 0

                tmpMean, tmpVar, _ = FDRutil.estimateNoiseFromMap(
                    tmpFilteredMapData, windowSize, boxCoord)
                mean[indices] = tmpMean
                var[indices] = tmpVar

    print("Local filtering finished ...")

    return filteredMapData, mean, var, ECDFmap
def calculateConfidenceMap(em_map, apix, noiseBox, testProc, ecdf,
                           lowPassFilter_resolution, method, window_size,
                           locResMap, meanMap, varMap, fdr, modelMap, stepSize,
                           windowSizeLocScale, mpi):

    #*********************************************
    #******* this function calc. confMaps ********
    #*********************************************

    # get boxCoordinates
    if noiseBox is None:
        boxCoord = 0
    else:
        boxCoord = noiseBox

    # set test procdure
    if testProc is not None:
        testProc = testProc
    else:
        testProc = 'rightSided'

    # set ECDF
    if ecdf:
        ECDF = 1
    else:
        ECDF = 0

    sizeMap = em_map.shape

    if lowPassFilter_resolution is not None:
        frequencyMap = FDRutil.calculate_frequency_map(em_map)
        providedRes = apix / float(lowPassFilter_resolution)
        em_map = FDRutil.lowPassFilter(np.fft.rfftn(em_map), frequencyMap,
                                       providedRes, em_map.shape)

    # handle FDR correction procedure
    if method is not None:
        method = method
    else:
        # default is Benjamini-Yekutieli
        method = 'BY'

    if window_size is not None:
        wn = window_size
        wn = int(wn)
        if wn < 20:
            print(
                "Provided window size is quite small. Please think about potential inaccuracies of your noise estimates!"
            )
    else:
        wn = max(int(0.05 * sizeMap[0]), 10)

    if windowSizeLocScale is not None:
        wn_locscale = windowSizeLocScale
        if window_size is None:
            wn = int(wn_locscale)
    else:
        wn_locscale = None

    if stepSize is None:
        stepSize = 5

    # generate a circular Mask
    sphere_radius = (np.max(sizeMap) // 2)
    circularMaskData = mapUtil.makeCircularMask(np.copy(em_map), sphere_radius)

    # plot locations of noise estimation
    if modelMap is None:
        pp = mapUtil.makeDiagnosticPlot(em_map, wn, False, boxCoord)
        pp.savefig("diag_image.pdf")
        pp.close()
    else:
        pp = mapUtil.makeDiagnosticPlot(em_map, wn, True, boxCoord)
        pp.savefig("diag_image.pdf")
        pp.close()

    # estimate noise statistics
    if ((locResMap is None) & (modelMap is None)
        ):  # if no local Resolution map is given,don't do any filtration

        FDRutil.checkNormality(em_map, wn, boxCoord)
        mean, var, _ = FDRutil.estimateNoiseFromMap(em_map, wn, boxCoord)

        if varMap is not None:
            var = varMap
        if meanMap is not None:
            mean = meanMap

        if np.isscalar(mean) and np.isscalar(var):
            output = "Estimated noise statistics: mean: " + repr(
                mean) + " and variance: " + repr(var)
        else:
            output = "Using user provided noise statistics"
            print(output)

        locFiltMap = None
        locScaleMap = None

    elif (locResMap is not None) & (
            modelMap is
            None):  # do localFiltration and estimate statistics from this map

        FDRutil.checkNormality(em_map, wn, boxCoord)
        em_map, mean, var, ECDF = mapUtil.localFiltration(
            em_map, locResMap, apix, True, wn, boxCoord, ECDF)
        #locFiltMap = FDRutil.studentizeMap(em_map, mean, var);
        locFiltMap = em_map
        locScaleMap = None
    else:
        em_map, mean, var, ECDF = locscaleUtil.launch_amplitude_scaling(
            em_map, modelMap, apix, stepSize, wn_locscale, wn, method,
            locResMap, boxCoord, mpi, ECDF)
        #locScaleMap = FDRutil.studentizeMap(em_map, mean, var);
        locScaleMap = em_map
        locFiltMap = None

    # calculate the qMap
    qMap = FDRutil.calcQMap(em_map, mean, var, ECDF, wn, boxCoord,
                            circularMaskData, method, testProc)

    # if a explicit thresholding is wished, do so
    if (method == 'BY') | (method == 'BH'):
        error = "FDR"
    else:
        error = "FWER"

    if fdr is not None:

        fdr = fdr

        # threshold the qMap
        binMap = FDRutil.binarizeMap(qMap, fdr)

        # apply the thresholded qMap to data
        maskedMap = np.multiply(binMap, em_map)
        minMapValue = np.min(maskedMap[np.nonzero(maskedMap)])

        maskedMap = np.multiply(maskedMap, circularMaskData)

        if (locResMap is None) & (
                modelMap is None
        ):  # if no local Resolution map is give, then give the correspoding threshold, not usefule with local filtration
            output = "Calculated map threshold: %.3f" % minMapValue + " at a " + error + " of " + repr(
                fdr * 100) + " %."
            print(output)
    else:
        # threshold the qMap
        binMap1 = FDRutil.binarizeMap(qMap, 0.01)
        binMap001 = FDRutil.binarizeMap(qMap, 0.0001)

        if (locResMap is None) & (
                modelMap is None
        ):  # if no local Resolution map is give, then give the correspoding threshold, not usefule with local filtration
            # apply the thresholded qMap to data
            maskedMap1 = np.multiply(binMap1, np.copy(em_map))
            maskedMap001 = np.multiply(binMap001, np.copy(em_map))
            minMapValue1 = np.min(maskedMap1[np.nonzero(maskedMap1)])
            minMapValue001 = np.min(maskedMap001[np.nonzero(maskedMap001)])
            output = "Calculated map threshold: %.3f" % minMapValue1 + " at a " + error + " of " + repr(
                1) + " %."
            print(output)
            output = "Calculated map threshold: %.3f" % minMapValue001 + " at a " + error + " of " + repr(
                0.01) + " %."
            print(output)
        """elif (locResMap is not None) & (modelMap is None):
			# apply the thresholded qMap to data
			maskedMap = np.multiply(binMap, np.copy(locFiltMap));
			minMapValue = np.min(maskedMap[np.nonzero(maskedMap)]);
			output = "Calculated map threshold: %.3f"  %minMapValue + " at a " + error + " of " + repr(fdr*100) + "%.";
			print(output);
		elif (locResMap is None) & (modelMap is not None):
			# apply the thresholded qMap to data
			maskedMap = np.multiply(binMap, np.copy(locScaleMap));
			minMapValue = np.min(maskedMap[np.nonzero(maskedMap)]);
			output = "Calculated map threshold: %.3f" %minMapValue + " at a " + error + " of " + repr(fdr*100) + "%.";
			print(output);
		"""
        binMap = None
        maskedMap = None

    # invert qMap for visualization tools
    confidenceMap = np.subtract(1.0, qMap)

    # apply lowpass-filtered mask to maps
    confidenceMap = np.multiply(confidenceMap, circularMaskData)

    return confidenceMap, locFiltMap, locScaleMap, binMap, maskedMap
Exemplo n.º 5
0
	def runFSC(self):


		#show message box before starting
		msg = QMessageBox();
		msg.setIcon(QMessageBox.Information);
		msg.setText("Start the job with OK!")
		msg.setInformativeText("GUI will be locked until the job is finished. See terminal printouts for progress ...");
		msg.setWindowTitle("Start job");
		msg.setStandardButtons( QMessageBox.Cancel| QMessageBox.Ok);
		result = msg.exec_();

		if result == QMessageBox.Cancel:
			return;


		start = time.time();

		print('***************************************************');
		print('******* Significance analysis of FSC curves *******');
		print('***************************************************');

		#read the half maps
		try:
			half_map1 = mrcfile.open(self.fileLine_halfMap1.text(), mode='r');
			half_map2 = mrcfile.open(self.fileLine_halfMap2.text(), mode='r');
		except:
			msg = QMessageBox();
			msg.setIcon(QMessageBox.Information);
			msg.setText("Cannot read file ...");
			msg.setWindowTitle("Error");
			msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel);
			retval = msg.exec_();
			return;

		halfMap1Data = np.copy(half_map1.data);
		halfMap2Data = np.copy(half_map2.data);
		sizeMap = halfMap1Data.shape;

		# set working directory and output filename
		path = self.fileLine_output.text();
		if path == '':
			path = os.path.dirname(self.fileLine_halfMap1.text());
		os.chdir(path);
		splitFilename = os.path.splitext(os.path.basename(self.fileLine_halfMap1.text()));
		outputFilename_PostProcessed =  splitFilename[0] + "_filtered.mrc";


		# make the mask
		maskData = FSCutil.makeCircularMask(halfMap1Data, (np.min(halfMap1Data.shape) / 2.0) - 4.0);  # circular mask
		maskBFactor = FSCutil.makeCircularMask(halfMap1Data, (
					np.min(halfMap1Data.shape) / 4.0) - 4.0);  # smaller circular mask for B-factor estimation


		#**************************************
		#********* get pixel size *************
		#**************************************
		apixMap = float(half_map1.voxel_size.x);

		try:
			apix = float(self.apix.text());
		except:
			apix = None;

		if apix is not None:
			print('Pixel size set to {:.3f} Angstroem. (Pixel size encoded in map: {:.3f})'.format(apix, apixMap));
		else:
			print(
				'Pixel size was read as {:.3f} Angstroem. If this is incorrect, please specify with -p pixelSize'.format(
					apixMap));
			apix = apixMap;

		#******************************************
		#*********** get num Asym Units ***********
		#******************************************

		try:
			numAsymUnits = int(self.numAsUnit.text());
		except:
			numAsymUnits = None;

		if numAsymUnits is not None:
			print('Using user provided number of asymmetric units, given as {:d}'.format(numAsymUnits));
		else:
			symmetry = self.symmetry.text();
			numAsymUnits = FSCutil.getNumAsymUnits(symmetry);
			print('Using provided ' + symmetry + ' symmetry. Number of asymmetric units: {:d}'.format(numAsymUnits));

		#**********************************************
		#*************** get resolutions **************
		#**********************************************

		#read the mask
		#mask = mrcfile.open('/Users/mbeckers/Documents/LabBook/2019/June2019/mapModel_FSC/betaGal_refinement/mask.mrc', mode='r');
		#maskData = mask.data;

		#run the FSC
		res, FSC, percentCutoffs, pValues, qValsFDR, resolution, _ = FSCutil.FSC(halfMap1Data, halfMap2Data,
																				 maskData, apix, 0.143,
																				 numAsymUnits, False, True, None,
																				 False);

		# write the FSC
		FSCutil.writeFSC(res, FSC, qValsFDR, pValues, resolution);

		processedMap = FDRutil.sharpenMap(0.5 * (halfMap1Data + halfMap2Data), 0, apix, resolution);

		# write the post-processed map
		postProcMRC = mrcfile.new(outputFilename_PostProcessed, overwrite=True);
		postProc = np.float32(processedMap);
		postProcMRC.set_data(postProc);
		postProcMRC.voxel_size = apix;
		postProcMRC.close();

		output = "Saved filtered map to: " + outputFilename_PostProcessed;
		print(output);

		end = time.time();
		totalRuntime = end - start;

		print("****** Summary ******");
		print("Runtime: %.2f" % totalRuntime);

		self.showMessageBox(resolution);
Exemplo n.º 6
0
def localResolutions(halfMap1, halfMap2, boxSize, stepSize, cutoff, apix,
                     numAsymUnits, mask, maskPermutation):

    # ********************************************
    # ****** calculate local resolutions by ******
    # ********** local FSC-thresholding **********
    # ********************************************

    print("Starting calculations of local resolutions ...")

    sizeMap = halfMap1.shape
    locRes = np.zeros((len(range(boxSize, boxSize + sizeMap[0], stepSize)),
                       len(range(boxSize, boxSize + sizeMap[1], stepSize)),
                       len(range(boxSize, boxSize + sizeMap[2], stepSize))))

    # pad the volumes
    paddedHalfMap1 = np.zeros(
        (sizeMap[0] + 2 * boxSize, sizeMap[1] + 2 * boxSize,
         sizeMap[2] + 2 * boxSize))
    paddedHalfMap2 = np.zeros(
        (sizeMap[0] + 2 * boxSize, sizeMap[1] + 2 * boxSize,
         sizeMap[2] + 2 * boxSize))
    paddedMask = np.zeros((sizeMap[0] + 2 * boxSize, sizeMap[1] + 2 * boxSize,
                           sizeMap[2] + 2 * boxSize))
    paddedMaskPermutation = np.zeros(
        (sizeMap[0] + 2 * boxSize, sizeMap[1] + 2 * boxSize,
         sizeMap[2] + 2 * boxSize))

    paddedHalfMap1[boxSize:boxSize + sizeMap[0], boxSize:boxSize + sizeMap[1],
                   boxSize:boxSize + sizeMap[2]] = halfMap1
    paddedHalfMap2[boxSize:boxSize + sizeMap[0], boxSize:boxSize + sizeMap[1],
                   boxSize:boxSize + sizeMap[2]] = halfMap2
    paddedMask[boxSize:boxSize + sizeMap[0], boxSize:boxSize + sizeMap[1],
               boxSize:boxSize + sizeMap[2]] = mask
    paddedMaskPermutation[boxSize:boxSize + sizeMap[0],
                          boxSize:boxSize + sizeMap[1],
                          boxSize:boxSize + sizeMap[2]] = maskPermutation

    halfBoxSize = int(boxSize / 2.0)

    # make Hann window
    hannWindow = FDRutil.makeHannWindow(np.zeros((boxSize, boxSize, boxSize)))

    numCalculations = len(range(
        boxSize, boxSize + sizeMap[0], stepSize)) * len(
            range(boxSize, boxSize + sizeMap[1], stepSize)) * len(
                range(boxSize, boxSize + sizeMap[0], stepSize))
    print("Total number of calculations: " + repr(numCalculations))

    # ****************************************************
    # ********* get initial permuted CorCoeffs ***********
    # ****************************************************

    print("Do initial permuations ...")
    for i in range(10):

        xInd = np.random.randint(boxSize, sizeMap[0] + boxSize)
        yInd = np.random.randint(boxSize, sizeMap[1] + boxSize)
        zInd = np.random.randint(boxSize, sizeMap[2] + boxSize)

        #xInd = np.random.randint(sizeMap[0]/2 - sizeMap[0]/8 + boxSize, sizeMap[0]/2 + sizeMap[0]/8 + boxSize);
        #yInd = np.random.randint(sizeMap[1]/2 - sizeMap[1]/8 + boxSize, sizeMap[1]/2 + sizeMap[1]/8 + boxSize);
        #zInd = np.random.randint(sizeMap[2]/2 - sizeMap[2]/8 + boxSize, sizeMap[2]/2 + sizeMap[2]/8 + boxSize);

        #generate new locations until one is found in the mask
        while ((paddedMaskPermutation[xInd, yInd, zInd] < 0.5)):

            xInd = np.random.randint(boxSize, sizeMap[0] + boxSize)
            yInd = np.random.randint(boxSize, sizeMap[1] + boxSize)
            zInd = np.random.randint(boxSize, sizeMap[2] + boxSize)

            #xInd = np.random.randint(sizeMap[0] / 2 - sizeMap[0] / 8 + boxSize,
            #						 sizeMap[0] / 2 + sizeMap[0] / 8 + boxSize);
            #yInd = np.random.randint(sizeMap[1] / 2 - sizeMap[1] / 8 + boxSize,
            #						 sizeMap[1] / 2 + sizeMap[1] / 8 + boxSize);
            #zInd = np.random.randint(sizeMap[2] / 2 - sizeMap[2] / 8 + boxSize,
            #						 sizeMap[2] / 2 + sizeMap[2] / 8 + boxSize);

        #get windowed parts
        windowHalfmap1 = paddedHalfMap1[xInd - halfBoxSize:xInd - halfBoxSize +
                                        boxSize, yInd - halfBoxSize:yInd -
                                        halfBoxSize + boxSize,
                                        zInd - halfBoxSize:zInd - halfBoxSize +
                                        boxSize]
        windowHalfmap2 = paddedHalfMap2[xInd - halfBoxSize:xInd - halfBoxSize +
                                        boxSize, yInd - halfBoxSize:yInd -
                                        halfBoxSize + boxSize,
                                        zInd - halfBoxSize:zInd - halfBoxSize +
                                        boxSize]

        # apply hann window
        windowHalfmap1 = windowHalfmap1 * hannWindow
        windowHalfmap2 = windowHalfmap2 * hannWindow

        res, _, _, _, _, _, tmpPermutedCorCoeffs = FSCutil.FSC(
            windowHalfmap1, windowHalfmap2, None, apix, cutoff, numAsymUnits,
            True, False, None, False)

        if i == 0:
            # initialize the array of correlation coefficients
            permutedCorCoeffs = tmpPermutedCorCoeffs
        else:
            # append the correlation coefficients
            for resInd in range(len(tmpPermutedCorCoeffs)):
                permutedCorCoeffs[resInd] = np.append(
                    permutedCorCoeffs[resInd], tmpPermutedCorCoeffs[resInd])

    # ****************************************************
    # ********* calculate the local resolutions **********
    # ****************************************************

    print("Do local FSC calculations ...")

    # generate partial function to loop over the whole map
    partialLoopOverMap = functools.partial(loopOverMap,
                                           paddedMask=paddedMask,
                                           paddedHalfMap1=paddedHalfMap1,
                                           paddedHalfMap2=paddedHalfMap2,
                                           boxSize=boxSize,
                                           sizeMap=sizeMap,
                                           stepSize=stepSize,
                                           halfBoxSize=halfBoxSize,
                                           hannWindow=hannWindow,
                                           apix=apix,
                                           cutoff=cutoff,
                                           numAsymUnits=numAsymUnits,
                                           permutedCorCoeffs=permutedCorCoeffs)

    #parallelized local resolutions
    numCores = min(multiprocessing.cpu_count(), 4)
    print(
        "Using {:d} cores. This might take a few minutes ...".format(numCores))
    iIterable = range(boxSize, boxSize + sizeMap[0], stepSize)

    #initialize parallel processes
    lenInt = int(math.ceil(len(iIterable) / float(numCores)))
    queue = multiprocessing.Queue()

    #start process for each core and run in parallel
    for i in range(numCores):

        #split the iterable
        startInd = (i * lenInt)
        endInd = (i + 1) * lenInt
        if i == (numCores - 1):
            seq = range(iIterable[startInd],
                        iIterable[len(iIterable) - 1] + stepSize, stepSize)
        else:
            seq = range(iIterable[startInd], iIterable[endInd], stepSize)

        #start the respective process
        proc = multiprocessing.Process(target=partialLoopOverMap,
                                       args=(
                                           seq,
                                           queue,
                                       ))
        proc.start()

    #addition of indiviual local resolution maps to produce the final one
    for i in range(numCores):
        locRes = locRes + queue.get()

    # *************************************
    # ********** do interpolation *********
    # *************************************

    #locRes[locRes==0] = 2.2;

    print("Interpolating local Resolutions ...")
    x = np.linspace(1, 10, locRes.shape[0])
    y = np.linspace(1, 10, locRes.shape[1])
    z = np.linspace(1, 10, locRes.shape[2])

    myInterpolatingFunction = RegularGridInterpolator((x, y, z),
                                                      locRes,
                                                      method='linear')

    xNew = np.linspace(1, 10, sizeMap[0])
    yNew = np.linspace(1, 10, sizeMap[1])
    zNew = np.linspace(1, 10, sizeMap[2])

    xInd, yInd, zInd = np.meshgrid(xNew,
                                   yNew,
                                   zNew,
                                   indexing='ij',
                                   sparse=True)

    localRes = myInterpolatingFunction((xInd, yInd, zInd))

    localRes[mask <= 0.01] = 0.0

    return localRes
Exemplo n.º 7
0
def main():
    start = time.time()

    # get command line input
    args = cmdl_parser.parse_args()

    # no ampltidue scaling will be done
    print('************************************************')
    print('******* Significance analysis of EM-Maps *******')
    print('************************************************')

    # if varianceMap is given, use it
    if args.varianceMap is not None:
        varMap = mrcfile.open(args.varianceMap, mode='r')
        varMapData = np.copy(varMap.data)
    else:
        varMapData = None

    # if meanMap is given, use it
    if args.meanMap is not None:
        meanMap = mrcfile.open(args.meanMap, mode='r')
        meanMapData = np.copy(meanMap.data)
    else:
        meanMapData = None

    # load the maps
    if args.halfmap2 is not None:
        if args.em_map is None:
            print("One half map missing! Exit ...")
            sys.exit()
        else:
            # load the maps
            filename = args.em_map
            map1 = mrcfile.open(args.em_map, mode='r')
            apix = float(map1.voxel_size.x)
            halfMapData1 = np.copy(map1.data)
            sizeMap = halfMapData1.shape

            map2 = mrcfile.open(args.halfmap2, mode='r')
            halfMapData2 = np.copy(map2.data)

            print("Estimating local noise levels ...")
            varMapData = FDRutil.estimateNoiseFromHalfMaps(
                halfMapData1, halfMapData2, 20, 2)
            meanMapData = np.zeros(varMapData.shape)

            mapData = (halfMapData1 + halfMapData2) * 0.5
            halfMapData1 = 0
            halfMapData2 = 0

    else:
        # load single map
        filename = args.em_map
        map = mrcfile.open(filename, mode='r')
        apix = float(map.voxel_size.x)
        mapData = np.copy(map.data)

    if args.apix is not None:
        print(
            'Pixel size set to {:.3f} Angstroem. (Pixel size encoded in map: {:.3f})'
            .format(args.apix, apix))
        apix = args.apix
    else:
        print(
            'Pixel size was read as {:.3f} Angstroem. If this is incorrect, please specify with -p pixelSize'
            .format(apix))
        args.apix = apix

    # set output filename
    if args.outputFilename is not None:
        splitFilename = os.path.splitext(os.path.basename(args.outputFilename))
    else:
        splitFilename = os.path.splitext(os.path.basename(filename))

    # if local resolutions are given, use them
    if args.locResMap is not None:
        locResMap = mrcfile.open(args.locResMap, mode='r')
        locResMapData = np.copy(locResMap.data)
    else:
        locResMapData = None

    # get LocScale input
    if args.model_map is not None:
        modelMap = mrcfile.open(args.model_map, mode='r')
        modelMapData = np.copy(modelMap.data)
    else:
        modelMapData = None

    if args.stepSize is not None:
        stepSize = args.stepSize
    else:
        stepSize = None

    if args.window_size_locscale is not None:
        windowSizeLocScale = args.window_size_locscale
    else:
        windowSizeLocScale = None

    if args.mpi:
        mpi = True
    else:
        mpi = False

    if (args.stepSize is not None) & (args.window_size_locscale is not None):
        if args.stepSize > args.window_size_locscale:
            print(
                "Step Size cannot be bigger than the window_size. Job is killed ..."
            )
            return

    # run the actual analysis
    confidenceMap, locFiltMap, locScaleMap, mean, var = confidenceMapMain.calculateConfidenceMap(
        mapData, apix, args.noiseBox, args.testProc, args.ecdf,
        args.lowPassFilter, args.method, args.window_size, locResMapData,
        meanMapData, varMapData, args.fdr, modelMapData, stepSize,
        windowSizeLocScale, mpi)

    if locFiltMap is not None:
        locFiltMapMRC = mrcfile.new(splitFilename[0] + '_locFilt.mrc',
                                    overwrite=True)
        locFiltMap = np.float32(locFiltMap)
        locFiltMapMRC.set_data(locFiltMap)
        locFiltMapMRC.voxel_size = apix
        locFiltMapMRC.close()

    if locScaleMap is not None:
        locScaleMapMRC = mrcfile.new(splitFilename[0] + '_scaled.mrc',
                                     overwrite=True)
        locScaleMap = np.float32(locScaleMap)
        locScaleMapMRC.set_data(locScaleMap)
        locScaleMapMRC.voxel_size = apix
        locScaleMapMRC.close()
    """if (locScaleMap is not None) | (locFiltMap is not None):
		meanMapMRC = mrcfile.new(splitFilename[0] + '_mean.mrc', overwrite=True);
		mean = np.float32(mean);
		meanMapMRC.set_data(mean);
		meanMapMRC.voxel_size = apix;
		meanMapMRC.close();
		varMapMRC = mrcfile.new(splitFilename[0] + '_var.mrc', overwrite=True);
		var = np.float32(var);
		varMapMRC.set_data(var);
		varMapMRC.voxel_size = apix;
		varMapMRC.close();"""

    # write the confidence Maps
    confidenceMapMRC = mrcfile.new(splitFilename[0] + '_confidenceMap.mrc',
                                   overwrite=True)
    confidenceMap = np.float32(confidenceMap)
    confidenceMapMRC.set_data(confidenceMap)
    confidenceMapMRC.voxel_size = apix
    confidenceMapMRC.close()

    # write the confidence Maps
    confidenceMapMRC = mrcfile.new(splitFilename[0] +
                                   '_confidenceMap_-log10FDR.mrc',
                                   overwrite=True)
    confidenceMap = np.float32(1.0 - confidenceMap)
    confidenceMap[confidenceMap == 0] = 0.0000000001
    confidenceMapMRC.set_data(-np.log10(confidenceMap))
    confidenceMapMRC.voxel_size = apix
    confidenceMapMRC.close()

    end = time.time()
    totalRuntime = end - start

    FDRutil.printSummary(args, totalRuntime)
Exemplo n.º 8
0
	def runLocalFiltering(self):


		#show message box before starting
		msg = QMessageBox();
		msg.setIcon(QMessageBox.Information);
		msg.setText("Start the job with OK!")
		msg.setInformativeText("GUI will be locked until the job is finished. See terminal printouts for progress ...");
		msg.setWindowTitle("Start job");
		msg.setStandardButtons( QMessageBox.Cancel| QMessageBox.Ok);
		result = msg.exec_();

		if result == QMessageBox.Cancel:
			return;


		start = time.time();

		print('************************************************');
		print('**** Local resolution filtering of EM-Maps *****');
		print('************************************************');


		# read the maps
		try:
			em_map = mrcfile.open(self.fileLine.text(), mode='r');
			locResMap = mrcfile.open(self.fileLine_locResMap.text(), mode='r');
		except:
			msg = QMessageBox();
			msg.setIcon(QMessageBox.Information);
			msg.setText("Cannot read file ...");
			msg.setWindowTitle("Error");
			msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel);
			retval = msg.exec_();
			return;

		mapData = np.copy(em_map.data);
		locResMapData = np.copy(locResMap.data);

		# set working directory and output filename
		path = self.fileLine_output.text();
		if path == '':
			path = os.path.dirname(self.fileLine.text());
		os.chdir(path);
		splitFilename = os.path.splitext(os.path.basename(self.fileLine.text()));
		outputFilename_locallyFiltered = splitFilename[0] + "_locallyFiltered.mrc";


		#**************************************
		#********* get pixel size *************
		#**************************************
		apixMap = float(em_map.voxel_size.x);

		try:
			apix = float(self.apix.text());
		except:
			apix = None;

		if apix is not None:
			print('Pixel size set to {:.3f} Angstroem. (Pixel size encoded in map: {:.3f})'.format(apix, apixMap));
		else:
			print(
				'Pixel size was read as {:.3f} Angstroem. If this is incorrect, please specify with -p pixelSize'.format(
					apixMap));
			apix = apixMap;


		#**************************************
		#**** get noise estimation input ******
		#**************************************
		if self.localNormalization.isChecked():

			# ****************************************
			# ************ set the noiseBox **********
			# ****************************************
			try:
				boxCoord = [int(self.xCoord.text()), int(self.yCoord.text()), int(self.zCoord.text())];
			except:
				boxCoord = 0;

			# ******************************************
			# ************ set the windowSize **********
			# ******************************************
			try:
				windowSize = int(self.boxSize.text());
			except:
				print("Window size needs to be a positive integer ...");
				return;

			localVariance = True;
		else:
			localVariance = False;
			windowSize = None;
			boxCoord = None;



		#do the local filtering
		locFiltMap, meanMap, varMap, _ = mapUtil.localFiltration(mapData, locResMapData, apix, localVariance, windowSize,
															boxCoord, None);

		#if background normalization to be done, then do so
		if localVariance:
			locFiltMap = FDRutil.studentizeMap(locFiltMap, meanMap, varMap);


		# write the local resolution map
		localFiltMapMRC = mrcfile.new(outputFilename_locallyFiltered, overwrite=True);
		localFiltMap = np.float32(locFiltMap);
		localFiltMapMRC.set_data(localFiltMap);
		localFiltMapMRC.voxel_size = apix;
		localFiltMapMRC.close();

		end = time.time();
		totalRuntime = end - start;

		print("****** Summary ******");
		print("Runtime: %.2f" % totalRuntime);

		self.showMessageBox();
Exemplo n.º 9
0
def FSC(halfMap1, halfMap2, maskData, apix, cutoff, numAsymUnits, localRes,
        verbose, permutedCorCoeffs, SMLM):

    #********************************************
    #***** function that calculates the FSC *****
    #********************************************

    if localRes:
        maskCoeff = 0.23
    elif SMLM:
        maskCoeff = 0.6
    else:
        maskCoeff = 0.7

    if maskData is not None:
        halfMap1 = halfMap1 * maskData
        halfMap2 = halfMap2 * maskData

    #calculate frequency map
    freqMap = calculate_frequency_map(halfMap1)
    freqMap = freqMap / float(apix)

    #do fourier transforms
    try:
        import pyfftw
        import multiprocessing

        fftObject_half1 = pyfftw.builders.rfftn(halfMap1)
        fftObject_half2 = pyfftw.builders.rfftn(halfMap2)
        fft_half1 = fftObject_half1(halfMap1)
        fft_half2 = fftObject_half2(halfMap2)
    except:
        fft_half1 = np.fft.rfftn(halfMap1)
        fft_half2 = np.fft.rfftn(halfMap2)

    sizeMap = halfMap1.shape

    res = np.fft.rfftfreq(sizeMap[0], 1.0)
    res = res / float(apix)
    numRes = res.shape[0]

    resSpacing = (res[1] - res[0]) / 2.0
    FSC = np.ones((res.shape[0]))
    pVals = np.zeros((res.shape[0]))
    percentCutoffs = np.zeros((res.shape[0], 4))
    threeSigma = np.zeros((res.shape[0]))
    threeSigmaCorr = np.zeros((res.shape[0]))
    tmpPermutedCorCoeffs = []

    numCalculations = res.shape[0]

    if verbose:
        print("Run permutation test of each resolution shell ...")

    for i in range(res.shape[0]):
        tmpRes = res[i]
        resShell_half1 = fft_half1[((tmpRes - resSpacing) < freqMap)
                                   & (freqMap < (tmpRes + resSpacing))]
        resShell_half2 = fft_half2[((tmpRes - resSpacing) < freqMap)
                                   & (freqMap < (tmpRes + resSpacing))]

        FSC[i] = correlationCoefficient(resShell_half1, resShell_half2)

        if (permutedCorCoeffs is not None):  #for local resolution estimation
            tmpCorCoeffs = permutedCorCoeffs[i]
            pVals[i] = (tmpCorCoeffs[tmpCorCoeffs > FSC[i]].shape[0]) / (float(
                tmpCorCoeffs.shape[0]))
            tmpPermutedCorCoeffs = None
        else:
            pVals[i], percentCutoffs[i, :], threeSigma[i], threeSigmaCorr[
                i], corCoeffs = permutationTest(resShell_half1, resShell_half2,
                                                numAsymUnits, maskCoeff)
            tmpPermutedCorCoeffs.append(corCoeffs)

        if verbose:
            #print output
            progress = i / float(numCalculations)
            if i % (int(numCalculations / 20.0)) == 0:
                output = "%.1f" % (progress * 100) + "% finished ..."
                print(output)

    pVals[0] = 0.0

    #for the first two resolutions shells, use a 0.75 FSC criterion for local resolutions, as permutation not reliabele for such small sample sizes
    if localRes or SMLM:
        if FSC[0] < 0.75:
            pVals[0] = 1.0
        else:
            pVals[0] = 0.0

        if FSC[1] < 0.75:
            pVals[1] = 1.0
        else:
            pVals[1] = 0.0

    # do FDR control of p-Values
    qVals_FDR = FDRutil.pAdjust(pVals, 'BY')

    tmpFSC = np.copy(FSC)
    tmpFSC[tmpFSC > cutoff] = 1.0
    tmpFSC[tmpFSC <= cutoff] = 0.0
    tmpFSC = 1.0 - tmpFSC
    tmpFSC[0] = 0.0
    tmpFSC[1] = 0.0

    try:
        resolution = np.min(np.argwhere(tmpFSC)) - 1

        if resolution < 0:
            resolution = 0.0
        else:
            if res[int(resolution)] == 0.0:
                resolution = 0.0
            else:
                tmpFreq = res[int(
                    resolution)]  #+ (res[resolution+1] - res[resolution])/2.0;
                resolution = float(1.0 / tmpFreq)
    except:
        resolution = 2.0 * apix

    threshQVals = np.copy(qVals_FDR)
    threshQVals[threshQVals <= 0.01] = 0.0
    #signal
    threshQVals[threshQVals > 0.01] = 1.0  #no signal

    try:
        resolution_FDR = np.min(np.argwhere(threshQVals)) - 1

        if resolution_FDR < 0:
            resolution_FDR = 0.0
        else:
            if res[int(resolution_FDR)] == 0.0:
                resolution_FDR = 0.0
            else:
                tmpFreq = res[int(
                    resolution_FDR
                )]  #+ (res[resolution_FDR + 1] - res[resolution_FDR]) / 2.0;
                resolution_FDR = float(1.0 / tmpFreq)
    except:
        resolution_FDR = 2.0 * apix

    if verbose:
        print('Resolution at a unmasked ' + repr(cutoff) + ' FSC threshold: ' +
              repr(round(resolution, 2)))
        print('Resolution at 1 % FDR-FSC: ' + repr(round(resolution_FDR, 2)) +
              ' Angstrom')
        #print('Resolution at 0.01 % FDR: ' + repr(round(resolution_FDR01, 2)) + ' Angstrom');
        #print('Resolution at 1 % FWER: ' + repr(round(resolution_FWER, 2)) + ' Angstrom');

    return res, FSC, percentCutoffs, pVals, qVals_FDR, resolution_FDR, tmpPermutedCorCoeffs
Exemplo n.º 10
0
def localFiltration(map, locResMap, apix, localVariance, windowSize, boxCoord,
                    ECDF):

    #**************************************************
    #**** function to perform a local filtration ******
    #****** according to the local resolution *********
    #**************************************************

    #some initialization
    mapSize = map.shape
    numX = mapSize[0]
    numY = mapSize[1]
    numZ = mapSize[2]

    mean = np.zeros((numX, numY, numZ))
    var = np.zeros((numX, numY, numZ))
    ECDFmap = np.ones((numX, numY, numZ))
    filteredMapData = np.zeros((numX, numY, numZ))

    #transform to numpy array
    locResMapData = np.copy(locResMap)

    #set all resoltuon lower than 2.1 to 2.1
    #locResMapData[locResMapData > 2.5] = 2.5;

    locResMapData[locResMapData == 0.0] = 100.0
    locResMapData[locResMapData >= 100.0] = 100.0

    #transform to abosulte frequency units(see http://sparx-em.org/sparxwiki/absolute_frequency_units)
    locResMapData = np.divide(apix, locResMapData)

    #round to 3 decimals
    locResMapData = np.around(locResMapData, 3)

    #set resolution search range, 3 decimals exact
    locResArray = np.arange(0, 0.5, 0.001)

    #set maximum resolution, important as ResMap is masking
    limRes = np.min(locResMapData)
    counter = 0
    numRes = len(locResArray)

    #get initial noise statistics
    initMapData = np.copy(map)
    initMean, initVar, _ = FDRutil.estimateNoiseFromMap(
        initMapData, windowSize, boxCoord)
    noiseMapData = np.random.normal(initMean, math.sqrt(initVar),
                                    (100, 100, 100))

    #do FFT of the respective map
    mapFFT = np.fft.rfftn(map)

    #get frequency map
    frequencyMap = FDRutil.calculate_frequency_map(map)

    # Initial call to print 0% progress
    #printProgressBar(counter, numRes, prefix = 'Progress:', suffix = 'Complete', bar_length = 50)
    print("Start local filtering. This might take a few minutes ...")

    counterRes = 0
    for tmpRes in locResArray:
        counterRes = counterRes + 1
        progress = counterRes / float(numRes)
        if counterRes % (int(numRes / 20.0)) == 0:
            output = "%.1f" % (progress * 100) + "% finished ..."
            print(output)

        #get indices of voxels with the current resolution
        indices = np.where(locResMapData == tmpRes)

        if (indices[0].size == 0):
            #this resolution is obviously not in the map, so skip
            counter = counter + 1
            continue
        elif math.fabs(tmpRes - limRes) < 0.0000001:
            xInd, yInd, zInd = indices[0], indices[1], indices[2]

            #do local filtration
            tmpFilteredMapData = FDRutil.lowPassFilter(mapFFT, frequencyMap,
                                                       tmpRes, map.shape)

            #set the filtered voxels
            filteredMapData[xInd, yInd, zInd] = tmpFilteredMapData[xInd, yInd,
                                                                   zInd]

        else:
            xInd, yInd, zInd = indices[0], indices[1], indices[2]
            #do local filtration
            tmpFilteredMapData = FDRutil.lowPassFilter(mapFFT, frequencyMap,
                                                       tmpRes, map.shape)
            #set the filtered voxels
            filteredMapData[xInd, yInd, zInd] = tmpFilteredMapData[xInd, yInd,
                                                                   zInd]
            if localVariance == True:
                #estimate and set noise statistic

                if ECDF == 1:
                    #if ecdf shall be used, use if to p-vals
                    tmpECDF, sampleSort = FDRutil.estimateECDFFromMap(
                        tmpFilteredMapData, windowSize, boxCoord)
                    vecECDF = np.interp(tmpFilteredMapData[xInd, yInd, zInd],
                                        sampleSort,
                                        tmpECDF,
                                        left=0.0,
                                        right=1.0)
                    ECDFmap[xInd, yInd, zInd] = vecECDF
                else:
                    ECDFmap = 0

                tmpMean, tmpVar, _ = FDRutil.estimateNoiseFromMap(
                    tmpFilteredMapData, windowSize, boxCoord)
                mean[xInd, yInd, zInd] = tmpMean
                var[xInd, yInd, zInd] = tmpVar

    print("Local filtering finished ...")

    return filteredMapData, mean, var, ECDFmap
Exemplo n.º 11
0
def main():
    start = time.time()

    print('***************************************************')
    print('******* Significance analysis of FSC curves *******')
    print('***************************************************')

    # get command line input
    args = cmdl_parser.parse_args()

    #read the half maps
    halfMap1 = mrcfile.open(args.halfmap1, mode='r')
    halfMap2 = mrcfile.open(args.halfmap2, mode='r')

    halfMap1Data = np.copy(halfMap1.data)
    halfMap2Data = np.copy(halfMap2.data)

    #get size of map
    sizeMap = halfMap2Data.shape

    #set pixel size
    apix = float(halfMap1.voxel_size.x)
    if args.apix is not None:
        print(
            'Pixel size set to {:.3f} Angstroem. (Pixel size encoded in map: {:.3f})'
            .format(args.apix, apix))
        apix = args.apix
    else:
        print(
            'Pixel size was read as {:.3f} Angstroem. If this is incorrect, please specify with -p pixelSize'
            .format(apix))
        args.apix = apix

    # set output filename
    splitFilename = os.path.splitext(os.path.basename(args.halfmap1))
    print(splitFilename[0])
    outputFilename_LocRes = splitFilename[0] + "_localResolutions.mrc"
    outputFilename_PostProcessed = "postProcessed.mrc"
    outputFilename_PostProcessed_half1 = "postProcessed_half1.mrc"
    outputFilename_PostProcessed_half2 = "postProcessed_half2.mrc"
    outputFilename_averagedHalfmaps = splitFilename[0] + "_avg.mrc"

    #handle window size for local FSC
    if args.window_size is not None:
        wn = args.window_size
        wn = int(wn)
    else:
        wn = 20
        #default is 20 pixels

    #handle step size for local FSC
    if args.stepSize is None:
        stepSize = float(sizeMap[0] * sizeMap[1] * sizeMap[2]) / 300000.0
        stepSize = max(int(math.ceil(stepSize**(1.0 / 3.0))), 1)
    else:
        stepSize = int(args.stepSize)

    if not args.localResolutions:
        if args.numAsymUnits is not None:

            numAsymUnits = args.numAsymUnits
            print(
                'Using user provided number of asymmetric units, given as {:d}'
                .format(numAsymUnits))
        else:
            if args.symmetry is not None:

                numAsymUnits = FSCutil.getNumAsymUnits(args.symmetry)
                print('Using provided ' + args.symmetry +
                      ' symmetry. Number of asymmetric units: {:d}'.format(
                          numAsymUnits))
            else:
                numAsymUnits = 1
                print('Using C1 symmetry. Number of asymmetric units: {:d}'.
                      format(numAsymUnits))
    else:
        #if local resolutions are calculated, no symmetry correction needed
        print(
            "Using a step size of {:d} voxel. If you prefer another one, please specify with -step."
            .format(stepSize))
        print(
            'Calculating local resolutions. No symmetry correction necessary.')
        numAsymUnits = 1.0

    #make the mask
    print("Using a circular mask ...")
    maskData = FSCutil.makeCircularMask(
        halfMap1Data, (np.min(halfMap1Data.shape) / 2.0) - 4.0)
    #circular mask
    maskBFactor = FSCutil.makeCircularMask(
        halfMap1Data, (np.min(halfMap1Data.shape) / 4.0) - 4.0)
    #smaller circular mask for B-factor estimation

    #*******************************************
    #********** no local Resolutions ***********
    #*******************************************

    if not args.localResolutions:
        res, FSC, percentCutoffs, pValues, qValsFDR, resolution, _ = FSCutil.FSC(
            halfMap1Data, halfMap2Data, maskData, apix, 0.143, numAsymUnits,
            False, True, None, False)
        # write the FSC
        FSCutil.writeFSC(res, FSC, qValsFDR, pValues, resolution)

        if resolution < 8.0:

            #estimate b-factor and sharpen the map
            bFactor = FSCutil.estimateBfactor(
                0.5 * (halfMap1Data + halfMap2Data), resolution, apix,
                maskBFactor)
            #bFactor_half1 = FSCutil.estimateBfactor(halfMap1Data, resolution, apix, maskBFactor);
            #bFactor_half2 = FSCutil.estimateBfactor(halfMap2Data, resolution, apix, maskBFactor);

            if args.bFactor is not None:
                bFactor = args.bFactor
                print(
                    'Using a user-specified B-factor of {:.2f} for map sharpening'
                    .format(-bFactor))
            else:
                print('Using a B-factor of {:.2f} for map sharpening.'.format(
                    -bFactor))

            processedMap = FDRutil.sharpenMap(
                0.5 * (halfMap1Data + halfMap2Data), -bFactor, apix,
                resolution)

            #processed_halfMap1 = FDRutil.sharpenMap(halfMap1Data, -bFactor_half1, apix, resolution);
            #processed_halfMap2 = FDRutil.sharpenMap(halfMap2Data, -bFactor_half2, apix, resolution);

            #write the post-processed maps
            postProcMRC = mrcfile.new(outputFilename_PostProcessed,
                                      overwrite=True)
            postProc = np.float32(processedMap)
            postProcMRC.set_data(postProc)
            postProcMRC.voxel_size = apix
            postProcMRC.close()
            """
			#write the post-processed halfmaps
			postProcMRC = mrcfile.new(outputFilename_PostProcessed_half1, overwrite=True);
			postProc= np.float32(processed_halfMap1);
			postProcMRC.set_data(postProc);
			postProcMRC.voxel_size = apix;
			postProcMRC.close();

			postProcMRC = mrcfile.new(outputFilename_PostProcessed_half2, overwrite=True);
			postProc= np.float32(processed_halfMap2);
			postProcMRC.set_data(postProc);
			postProcMRC.voxel_size = apix;
			postProcMRC.close();
			"""

            output = "Saved sharpened and filtered map to: " + outputFilename_PostProcessed
            print(output)

    #*******************************************
    #********* calc local Resolutions **********
    #*******************************************
    else:

        FSCcutoff = 0.5

        #set mask for locations of permutations
        if args.mask is not None:
            maskPermuation = mrcfile.open(args.mask, mode='r')
            maskPermutationData = np.copy(maskPermuation.data)
        else:
            maskPermutationData = maskData

        localResMap = localResolutions.localResolutions(
            halfMap1Data, halfMap2Data, wn, stepSize, FSCcutoff, apix,
            numAsymUnits, maskData, maskPermutationData)

        # set lowest resolution if wished
        if args.lowRes is not None:
            lowRes = args.lowRes
            localResMap[localResMap > lowRes] = lowRes

        #write the local resolution map
        localResMapMRC = mrcfile.new(outputFilename_LocRes, overwrite=True)
        localResMap = np.float32(localResMap)
        localResMapMRC.set_data(localResMap)
        localResMapMRC.voxel_size = apix
        localResMapMRC.close()

        output = "Saved local resolutions map to: " + outputFilename_LocRes
        print(output)

    end = time.time()
    totalRuntime = end - start

    print("****** Summary ******")
    print("Runtime: %.2f" % totalRuntime)
Exemplo n.º 12
0
def calculateConfidenceMap(em_map, apix, noiseBox, testProc, ecdf, lowPassFilter_resolution, method, window_size, locResMap,
						   meanMap, varMap, fdr, modelMap, stepSize, windowSizeLocScale, mpi):

	#*********************************************
	#******* this function calc. confMaps ********
	#*********************************************

	# get boxCoordinates
	if noiseBox is None:
		boxCoord = 0;
	else:
		boxCoord = noiseBox;

	# set test procdure
	if testProc is not None:
		testProc = testProc;
	else:
		testProc = 'rightSided';

	# set ECDF
	if ecdf:
		ECDF = 1;
	else:
		ECDF = 0;

	sizeMap = em_map.shape;

	if lowPassFilter_resolution is not None:
		frequencyMap = FDRutil.calculate_frequency_map(em_map);
		providedRes = apix/float(lowPassFilter_resolution);
		em_map = FDRutil.lowPassFilter(np.fft.rfftn(em_map), frequencyMap, providedRes, em_map.shape);

	# handle FDR correction procedure
	if method is not None:
		method = method;
	else:
		# default is Benjamini-Yekutieli
		method = 'BY';

	if window_size is not None:
		wn = window_size;
		wn = int(wn);
		if wn < 20:
			print("Provided window size is quite small. Please think about potential inaccuracies of your noise estimates!");
	else:
		wn = max(int(0.05 * sizeMap[0]), 10);

	if windowSizeLocScale is not None:
		wn_locscale = windowSizeLocScale;
		if window_size is None:
			wn = int(wn_locscale);
	else:
		wn_locscale = None;

	if stepSize is None:
		stepSize = 5;

	# generate a circular Mask
	sphere_radius = (np.max(sizeMap) // 2);
	circularMaskData = mapUtil.makeCircularMask(np.copy(em_map), sphere_radius);

	# plot locations of noise estimation
	if modelMap is None:
		pp = mapUtil.makeDiagnosticPlot(em_map, wn, False, boxCoord);
		pp.savefig("diag_image.pdf");
		pp.close();
	else:
		pp = mapUtil.makeDiagnosticPlot(em_map, wn, True, boxCoord);
		pp.savefig("diag_image.pdf");
		pp.close();


	# estimate noise statistics
	if ((locResMap is None) & (modelMap is None)):  # if no local Resolution map is given,don't do any filtration

		FDRutil.checkNormality(em_map, wn, boxCoord);
		mean, var, _ = FDRutil.estimateNoiseFromMap(em_map, wn, boxCoord);

		if varMap is not None:
			var = varMap;
		if meanMap is not None:
			mean = meanMap;

		if np.isscalar(mean) and np.isscalar(var):
			output = "Estimated noise statistics: mean: " + repr(mean) + " and variance: " + repr(var);
		else:
			output = "Using user provided noise statistics";
			print(output);

		locFiltMap = None;
		locScaleMap = None;

	elif (locResMap is not None) & (modelMap is None):  # do localFiltration and estimate statistics from this map

		FDRutil.checkNormality(em_map, wn, boxCoord);
		em_map, mean, var, ECDF = mapUtil.localFiltration(em_map, locResMap, apix, True, wn, boxCoord, ECDF);
		#locFiltMap = FDRutil.studentizeMap(em_map, mean, var);
		locFiltMap = em_map;
		locScaleMap = None;
	else:
		em_map, mean, var, ECDF = locscaleUtil.launch_amplitude_scaling(em_map, modelMap, apix, stepSize, wn_locscale, wn, method, locResMap, boxCoord, mpi, ECDF );
		#locScaleMap = FDRutil.studentizeMap(em_map, mean, var);
		locScaleMap = em_map;
		locFiltMap = None;

	# calculate the qMap
	if method == 'BH':
		qMap = FDRutil.calcQMap(em_map, mean, var, ECDF, wn, boxCoord, circularMaskData, 'BH', testProc);
		error = 'FDR';
	elif method == 'Hochberg':
		qMap = FDRutil.calcQMap(em_map, mean, var, ECDF, wn, boxCoord, circularMaskData, 'Hochberg', testProc);
		error = 'FWER';
	elif method == 'Holm':
		qMap = FDRutil.calcQMap(em_map, mean, var, ECDF, wn, boxCoord, circularMaskData, 'Holm', testProc);
		error = 'FWER';
	else:
		qMap = FDRutil.calcQMap(em_map, mean, var, ECDF, wn, boxCoord, circularMaskData, 'BY', testProc);
		error = 'FDR';

	#if local processing wished, write that out
	if locFiltMap is not None:
		em_map = locFiltMap;
	if locScaleMap is not None:
		em_map = locScaleMap;

	if ((locResMap is None) & (modelMap is None)):
		# threshold the qMap
		binMap1 = FDRutil.binarizeMap(qMap, 0.01);
		binMap001 = FDRutil.binarizeMap(qMap, 0.0001);

		# apply the thresholded qMapFDR to data
		maskedMap1 = np.multiply(binMap1, np.copy(em_map));
		minMapValue = np.min(maskedMap1[np.nonzero(maskedMap1)]);
		output = "Calculated map threshold: %.3f" %minMapValue + " at a " + error + " of " + repr(1) + "%.";
		print(output);

		# apply the thresholded qMapFWER to data
		maskedMap001 = np.multiply(binMap001, np.copy(em_map));
		minMapValue = np.min(maskedMap001[np.nonzero(maskedMap001)]);
		output = "Calculated map threshold: %.3f" %minMapValue + " at a " + error + " of " + repr(0.01) + "%.";
		print(output);

	# invert qMap for visualization tools
	confidenceMap = np.subtract(1.0, qMap);

	# apply lowpass-filtered mask to maps
	confidenceMap = np.multiply(confidenceMap, circularMaskData);


	return confidenceMap, locFiltMap, locScaleMap, mean, var;
Exemplo n.º 13
0
def threeDimensionalFSC(halfMap1, halfMap2, maskData, apix, cutoff,
                        numAsymUnits, samplingAzimuth, samplingElevation,
                        coneOpening):

    #***********************************************
    #***** function that calculates the 3D FSC *****
    #***********************************************

    maskCoeff = 0.7

    if maskData is not None:
        halfMap1 = halfMap1 * maskData
        halfMap2 = halfMap2 * maskData

    sizeMap = halfMap1.shape

    # calc frequency for each voxel
    freqi = np.fft.fftfreq(sizeMap[0], 1.0)
    freqj = np.fft.fftfreq(sizeMap[1], 1.0)
    freqk = np.fft.fftfreq(sizeMap[2], 1.0)

    sizeFFT = np.array([freqi.size, freqj.size, freqk.size])
    FFT = np.zeros(sizeFFT)

    freqMapi = np.copy(FFT)
    for j in range(sizeFFT[1]):
        for k in range(sizeFFT[2]):
            freqMapi[:, j, k] = freqi * freqi

    freqMapj = np.copy(FFT)
    for i in range(sizeFFT[0]):
        for k in range(sizeFFT[2]):
            freqMapj[i, :, k] = freqj * freqj

    freqMapk = np.copy(FFT)
    for i in range(sizeFFT[0]):
        for j in range(sizeFFT[1]):
            freqMapk[i, j, :] = freqk * freqk

    freqMap = np.sqrt(freqMapi + freqMapj + freqMapk)
    freqMap = freqMap / float(apix)

    #calculate spherical coordinates for each voxel in FFT
    xInd, yInd, zInd = makeIndexVolumes(halfMap1)

    # do fourier transforms
    try:
        import pyfftw
        import multiprocessing

        fftObject_half1 = pyfftw.builders.fftn(halfMap1)
        fftObject_half2 = pyfftw.builders.fftn(halfMap2)
        fft_half1 = fftObject_half1(halfMap1)
        fft_half2 = fftObject_half2(halfMap2)
    except:
        fft_half1 = np.fft.fftn(halfMap1)
        fft_half2 = np.fft.fftn(halfMap2)

    res = np.fft.rfftfreq(sizeMap[0], 1.0)
    res = res / float(apix)
    numRes = res.shape[0]

    resSpacing = (res[1] - res[0]) / 2.0

    #initialize data
    FSC = np.ones((res.shape[0]))
    pVals = np.zeros((res.shape[0]))
    directionalResolutions = []
    permutedCorCoeffs = [[]]
    phiArray = []
    thetaArray = []
    angleSpacing = (coneOpening / 360.0) * 2 * np.pi
    #correspond to 20 degrees
    directionalResolutionMap = np.zeros((samplingAzimuth, samplingElevation))

    phiAngles = np.linspace(-np.pi, np.pi, samplingAzimuth)
    thetaAngles = np.linspace(0, (np.pi / 2.0), samplingElevation)

    #phi is azimuth (-pi, pi), theta is polar angle (0,pi), 100 sampling points each
    for phiIndex in range(samplingAzimuth):

        phi = phiAngles[phiIndex]

        for thetaIndex in range(samplingElevation):

            theta = thetaAngles[thetaIndex]

            #get point with the specified angles and radius 1
            x = np.cos(theta) * np.cos(phi)
            y = np.cos(theta) * np.sin(phi)
            z = np.sin(theta)

            #get angle to (x,y,z) for all points in the map
            with np.errstate(divide='ignore', invalid='ignore'):
                angles = np.arccos(
                    np.divide(x * xInd + y * yInd + z * zInd,
                              (np.sqrt(xInd**2 + yInd**2 + zInd**2) *
                               np.sqrt(x**2 + y**2 + z**2))))
                angles[~np.isfinite(angles)] = 0

            for i in range(res.shape[0]):

                tmpRes = res[i]

                currentIndices = (((tmpRes - resSpacing) < freqMap) &
                                  (freqMap < (tmpRes + resSpacing)) &
                                  (np.absolute(angles) < angleSpacing))

                resShell_half1 = fft_half1[currentIndices]
                resShell_half2 = fft_half2[currentIndices]

                if (resShell_half1.size < 10) and (thetaIndex == 0) and (
                        phiIndex
                        == 0):  #if no samples in this shell, accept it

                    permutedCorCoeffs.append([])
                    pVals[i] = 0.0
                    FSC[i] = 1.0

                else:
                    FSC[i] = correlationCoefficient(resShell_half1,
                                                    resShell_half2)

                    if (thetaIndex == 0) and (phiIndex == 0):
                        pVals[
                            i], _, _, _, tmpPermutedCorCoeffs = permutationTest(
                                resShell_half1, resShell_half2, numAsymUnits,
                                maskCoeff)
                        permutedCorCoeffs.append(np.copy(tmpPermutedCorCoeffs))
                    else:
                        tmpCorCoeffs = permutedCorCoeffs[i]
                        if tmpCorCoeffs == []:
                            pVals[i] = 0.0
                        else:
                            pVals[i] = (tmpCorCoeffs[
                                tmpCorCoeffs > FSC[i]].shape[0]) / (float(
                                    tmpCorCoeffs.shape[0]))

            # do FDR control of p-Values
            qVals_FDR = FDRutil.pAdjust(pVals, 'BY')

            tmpFSC = np.copy(FSC)
            tmpFSC[tmpFSC > cutoff] = 1.0
            tmpFSC[tmpFSC <= cutoff] = 0.0
            tmpFSC = 1.0 - tmpFSC
            tmpFSC[0] = 0.0
            tmpFSC[1] = 0.0

            #print(tmpFSC);

            try:
                resolution = np.min(np.argwhere(tmpFSC)) - 1

                if resolution < 0:
                    resolution = 0.0
                else:
                    if res[int(resolution)] == 0.0:
                        resolution = 0.0
                    else:
                        tmpFreq = res[int(
                            resolution
                        )]  # + (res[resolution+1] - res[resolution])/2.0;
                        resolution = float(1.0 / tmpFreq)
            except:
                resolution = 2.0 * apix

            threshQVals = np.copy(qVals_FDR)
            threshQVals[threshQVals <= 0.01] = 0.0
            # signal
            threshQVals[threshQVals > 0.01] = 1.0  # no signal

            try:
                resolution_FDR = np.min(np.argwhere(threshQVals)) - 1

                if resolution_FDR < 0:
                    resolution_FDR = 0.0
                else:
                    if res[int(resolution_FDR)] == 0.0:
                        resolution_FDR = 0.0
                    else:
                        tmpFreq = res[int(
                            resolution_FDR
                        )]  # + (res[resolution_FDR + 1] - res[resolution_FDR]) / 2.0;
                        resolution_FDR = float(1.0 / tmpFreq)
            except:
                resolution_FDR = 2.0 * apix

            #append the resolutions
            directionalResolutionMap[phiIndex, thetaIndex] = resolution_FDR
            np.append(directionalResolutions, resolution_FDR)
            np.append(phiArray, phi)
            np.append(thetaArray, theta)

        # print progress
        progress = (phiIndex + 1) / float(samplingAzimuth)
        if phiIndex % (int(math.ceil(samplingAzimuth / 20.0))) == 0:
            output = "%.1f" % (progress * 100) + "% finished ..."
            print(output)

    # *************************************
    # ********** do interpolation *********
    # *************************************
    print("Interpolating directional Resolutions ...")
    x = np.linspace(1, 10, samplingAzimuth)
    y = np.linspace(1, 10, samplingElevation)
    myInterpolatingFunction = RegularGridInterpolator((x, y),
                                                      directionalResolutionMap,
                                                      method='linear')
    xNew = np.linspace(1, 10, 15 * samplingAzimuth)
    yNew = np.linspace(1, 10, 15 * samplingElevation)
    xInd, yInd = np.meshgrid(xNew, yNew, indexing='ij', sparse=True)
    directionalResolutionMap = myInterpolatingFunction((xInd, yInd))

    dirResMap = makeDirResVolumes(halfMap1, directionalResolutionMap)

    return phiArray, thetaArray, directionalResolutions, directionalResolutionMap, dirResMap
Exemplo n.º 14
0
def calculate_scaled_map(emmap, modmap, mask, wn, wn_locscale, apix, locFilt,
                         locResMap, boxCoord, ecdfBool, stepSize):

    sizeMap = emmap.shape
    sharpened_map = np.zeros(sizeMap)
    sharpened_mean_vals = np.zeros(sizeMap)
    sharpened_var_vals = np.zeros(sizeMap)
    sharpened_ecdf_vals = np.zeros(sizeMap)
    central_pix = int(round(wn_locscale / 2.0))
    center = np.array([0.5 * sizeMap[0], 0.5 * sizeMap[1], 0.5 * sizeMap[2]])

    #get the background noise sample
    if boxCoord == 0:
        noiseMap = emmap[int(center[0] - 0.5 * wn):(int(center[0] - 0.5 * wn) +
                                                    wn),
                         int(0.02 * wn + wn_locscale / 2.0):(
                             int(0.02 * wn + wn_locscale / 2.0) + wn),
                         (int(center[2] -
                              0.5 * wn)):(int((center[2] - 0.5 * wn) + wn))]
    else:
        noiseMap = emmap[int(boxCoord[0] - 0.5 * wn + wn_locscale / 2.0):(
            int(boxCoord[0] - 0.5 * wn + wn_locscale / 2.0) + wn),
                         int(boxCoord[1] - 0.5 * wn + wn_locscale / 2.0):(
                             int(boxCoord[1] - 0.5 * wn + wn_locscale / 2.0) +
                             wn),
                         (int(boxCoord[2] - 0.5 * wn + wn_locscale / 2.0)):(
                             int((boxCoord[2] - 0.5 * wn + wn_locscale / 2.0) +
                                 wn))]

    #prepare noise map for scaling
    frequencyMap_noise = FDRutil.calculate_frequency_map(noiseMap)
    noiseMapFFT = np.fft.rfftn(noiseMap, norm='ortho')
    noise_profile, frequencies_noise = compute_radial_profile(
        noiseMapFFT, frequencyMap_noise)

    #prepare windows of particle for scaling
    frequencyMap_mapWindow = FDRutil.calculate_frequency_map(
        np.zeros((wn_locscale, wn_locscale, wn_locscale)))

    numSteps = len(range(0, sizeMap[0] - int(wn_locscale), stepSize)) * len(
        range(0, sizeMap[1] - int(wn_locscale), stepSize)) * len(
            range(0, sizeMap[2] - int(wn_locscale), stepSize))
    print("Sart LocScale. This might take a minute ...")
    counterSteps = 0
    for k in range(0, sizeMap[0] - int(wn_locscale), stepSize):
        for j in range(0, sizeMap[1] - int(wn_locscale), stepSize):
            for i in range(0, sizeMap[2] - int(wn_locscale), stepSize):

                #print progress
                counterSteps = counterSteps + 1
                progress = counterSteps / float(numSteps)
                if counterSteps % (int(numSteps / 20.0)) == 0:
                    output = "%.1f" % (progress * 100) + "% finished ..."
                    print(output)

                #crop windows
                emmap_wn = emmap[k:k + wn_locscale, j:j + wn_locscale,
                                 i:i + wn_locscale]
                modmap_wn = modmap[k:k + wn_locscale, j:j + wn_locscale,
                                   i:i + wn_locscale]

                #do sharpening of the sliding window
                emmap_wn_FFT = np.fft.rfftn(np.copy(emmap_wn), norm='ortho')
                modmap_wn_FFT = np.fft.rfftn(np.copy(modmap_wn), norm='ortho')
                em_profile, frequencies_map = compute_radial_profile(
                    emmap_wn_FFT, frequencyMap_mapWindow)
                mod_profile, _ = compute_radial_profile(
                    modmap_wn_FFT, frequencyMap_mapWindow)
                scale_factors = compute_scale_factors(em_profile, mod_profile)
                map_b_sharpened, map_b_sharpened_FFT = set_radial_profile(
                    emmap_wn_FFT, scale_factors, frequencies_map,
                    frequencyMap_mapWindow, emmap_wn.shape)

                #scale noise window with the interpolated scaling factors
                mapNoise_sharpened, mapNoise_sharpened_FFT = set_radial_profile(
                    np.copy(noiseMapFFT), scale_factors, frequencies_map,
                    frequencyMap_noise, noiseMap.shape)

                #local filtering routines
                if locFilt == True:
                    tmpRes = round(apix / locResMap[k, j, i], 3)

                    mapNoise_sharpened = FDRutil.lowPassFilter(
                        mapNoise_sharpened_FFT, frequencyMap_noise, tmpRes,
                        noiseMap.shape)
                    map_b_sharpened = FDRutil.lowPassFilter(
                        map_b_sharpened_FFT, frequencyMap_mapWindow, tmpRes,
                        emmap_wn.shape)

                    #calculate noise statistics
                    map_noise_sharpened_data = mapNoise_sharpened

                    if ecdfBool:
                        tmpECDF, sampleSort = FDRutil.estimateECDFFromMap(
                            map_noise_sharpened_data, -1, -1)
                        ecdf = np.interp(map_b_sharpened[central_pix,
                                                         central_pix,
                                                         central_pix],
                                         sampleSort,
                                         tmpECDF,
                                         left=0.0,
                                         right=1.0)
                    else:
                        ecdf = 0

                    mean = np.mean(map_noise_sharpened_data)
                    var = np.var(map_noise_sharpened_data)

                    if var < 0.5:
                        var = 0.5
                        mean = 0.0
                    if tmpRes == round(apix / 100.0, 3):
                        mean = 0.0
                        var = 0.0
                        ecdf = 0
                else:
                    #calculate noise statistics
                    map_noise_sharpened_data = np.copy(mapNoise_sharpened)

                    if ecdfBool:
                        tmpECDF, sampleSort = FDRutil.estimateECDFFromMap(
                            map_noise_sharpened_data, -1, -1)
                        ecdf = np.interp(map_b_sharpened,
                                         sampleSort,
                                         tmpECDF,
                                         left=0.0,
                                         right=1.0)
                    else:
                        ecdf = 0

                    mean = np.mean(map_noise_sharpened_data)
                    var = np.var(map_noise_sharpened_data)
                    if var < 0.5:
                        var = 0.5
                        mean = 0.0

                #put values back into the the original maps
                halfStep = int((wn_locscale / 2.0) - (stepSize / 2.0))
                sharpened_map[k + halfStep:k + halfStep + stepSize,
                              j + halfStep:j + halfStep + stepSize,
                              i + halfStep:i + halfStep + stepSize] = np.copy(
                                  map_b_sharpened[halfStep:halfStep + stepSize,
                                                  halfStep:halfStep + stepSize,
                                                  halfStep:halfStep +
                                                  stepSize])
                sharpened_mean_vals[k + halfStep:k + halfStep + stepSize,
                                    j + halfStep:j + halfStep + stepSize, i +
                                    halfStep:i + halfStep + stepSize] = mean
                sharpened_var_vals[k + halfStep:k + halfStep + stepSize,
                                   j + halfStep:j + halfStep + stepSize,
                                   i + halfStep:i + halfStep + stepSize] = var
                if ecdfBool:
                    sharpened_ecdf_vals[k + halfStep:k + halfStep + stepSize,
                                        j + halfStep:j + halfStep + stepSize,
                                        i + halfStep:i + halfStep +
                                        stepSize] = ecdf[halfStep:halfStep +
                                                         stepSize,
                                                         halfStep:halfStep +
                                                         stepSize,
                                                         halfStep:halfStep +
                                                         stepSize]
                else:
                    sharpened_ecdf_vals[k + halfStep:k + halfStep + stepSize,
                                        j + halfStep:j + halfStep + stepSize,
                                        i + halfStep:i + halfStep +
                                        stepSize] = 0.0

    return sharpened_map, sharpened_mean_vals, sharpened_var_vals, sharpened_ecdf_vals
Exemplo n.º 15
0
	def runFSC(self):

		#show message box before starting
		msg = QMessageBox();
		msg.setIcon(QMessageBox.Information);
		msg.setText("Start the job with OK!")
		msg.setInformativeText("GUI will be locked until the job is finished. See terminal printouts for progress ...");
		msg.setWindowTitle("Start job");
		msg.setStandardButtons( QMessageBox.Cancel| QMessageBox.Ok);
		result = msg.exec_();

		if result == QMessageBox.Cancel:
			return;


		start = time.time();

		print('***************************************************');
		print('**********  Sharpening of cryo-EM maps  ***********');
		print('***************************************************');

		#read the half maps
		try:
			half_map1 = mrcfile.open(self.fileLine_halfMap1.text(), mode='r');
			half_map2 = mrcfile.open(self.fileLine_halfMap2.text(), mode='r');
		except:
			msg = QMessageBox();
			msg.setIcon(QMessageBox.Information);
			msg.setText("Cannot read file ...");
			msg.setWindowTitle("Error");
			msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel);
			retval = msg.exec_();
			return;

		halfMap1Data = np.copy(half_map1.data);
		halfMap2Data = np.copy(half_map2.data);
		sizeMap = halfMap1Data.shape;

		# set working directory and output filename
		path = self.fileLine_output.text();
		if path == '':
			path = os.path.dirname(self.fileLine_halfMap1.text());
		os.chdir(path);
		splitFilename = os.path.splitext(os.path.basename(self.fileLine_halfMap1.text()));
		outputFilename_PostProcessed =  "postProcessed.mrc";
		outputFilename_PostProcessed_half1 = "postProcessed_half1.mrc";
		outputFilename_PostProcessed_half2 = "postProcessed_half2.mrc";

		# make the mask
		maskData = FSCutil.makeCircularMask(halfMap1Data, (np.min(halfMap1Data.shape) / 2.0) - 4.0);  # circular mask
		maskBFactor = FSCutil.makeCircularMask(halfMap1Data, (
					np.min(halfMap1Data.shape) / 4.0) - 4.0);  # smaller circular mask for B-factor estimation


		#**************************************
		#********* get pixel size *************
		#**************************************
		apixMap = float(half_map1.voxel_size.x);

		try:
			apix = float(self.apix.text());
		except:
			apix = None;

		if apix is not None:
			print('Pixel size set to {:.3f} Angstroem. (Pixel size encoded in map: {:.3f})'.format(apix, apixMap));
		else:
			print(
				'Pixel size was read as {:.3f} Angstroem. If this is incorrect, please specify with -p pixelSize'.format(
					apixMap));
			apix = apixMap;

		#**********************************************
		#***************** get bfactor ****************
		#**********************************************
		try:
			bFactorInput = float(self.bFactor.text());
		except:
			bFactorInput = None;

		#**********************************************
		#***************** get bfactor ****************
		#**********************************************
		try:
			resolution = float(self.resolution.text());
		except:
			msg = QMessageBox();
			msg.setIcon(QMessageBox.Information);
			msg.setText("No resolution specified ...");
			msg.setWindowTitle("Error");
			msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel);
			retval = msg.exec_();
			return;


		if (resolution > 8.0) and (bFactorInput is None):
			msg = QMessageBox();
			msg.setIcon(QMessageBox.Information);
			msg.setText("Automated B-factor estimation is unstable for low-resolution maps. Please specify a B-factor!");
			msg.setWindowTitle("Error");
			msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel);
			retval = msg.exec_();
			return;


		if bFactorInput is not None:
			bFactor = bFactorInput;
			bFactor_half1 = bFactorInput;
			bFactor_half2 = bFactorInput;
			print('Using a user-specified B-factor of {:.2f} for map sharpening'.format(-bFactor));
		else:
			# estimate b-factor and sharpen the map
			bFactor = FSCutil.estimateBfactor(0.5 * (halfMap1Data + halfMap2Data), resolution, apix, maskBFactor);
			print('Using a B-factor of {:.2f} for map sharpening.'.format(-bFactor));

			#bFactor_half1 = FSCutil.estimateBfactor(halfMap1Data, resolution, apix, maskBFactor);
			#bFactor_half2 = FSCutil.estimateBfactor(halfMap2Data, resolution, apix, maskBFactor);

			#print("B-factor of halfmap 1: {:.2f}".format(bFactor_half1));
			#print("B-factor of halfmap 2: {:.2f}".format(bFactor_half2));

		processedMap = FDRutil.sharpenMap(0.5 * (halfMap1Data + halfMap2Data), -bFactor, apix, resolution);
		#processed_halfMap1 = FDRutil.sharpenMap(halfMap1Data, -bFactor_half1, apix, resolution);
		#processed_halfMap2 = FDRutil.sharpenMap(halfMap2Data, -bFactor_half2, apix, resolution);

		# write the post-processed map
		postProcMRC = mrcfile.new(outputFilename_PostProcessed, overwrite=True);
		postProc = np.float32(processedMap);
		postProcMRC.set_data(postProc);
		postProcMRC.voxel_size = apix;
		postProcMRC.close();

		"""
		# write the post-processed halfmaps
		postProcMRC = mrcfile.new(outputFilename_PostProcessed_half1, overwrite=True);
		postProc = np.float32(processed_halfMap1);
		postProcMRC.set_data(postProc);
		postProcMRC.voxel_size = apix;
		postProcMRC.close();

		postProcMRC = mrcfile.new(outputFilename_PostProcessed_half2, overwrite=True);
		postProc = np.float32(processed_halfMap2);
		postProcMRC.set_data(postProc);
		postProcMRC.voxel_size = apix;
		postProcMRC.close();
		"""

		output = "Saved sharpened and filtered map to: " + outputFilename_PostProcessed;
		print(output);

		end = time.time();
		totalRuntime = end - start;

		print("****** Summary ******");
		print("Runtime: %.2f" % totalRuntime);

		self.showMessageBox(resolution, bFactor);
Exemplo n.º 16
0
def launch_amplitude_scaling(em_map, model_map, apix, stepSize, wn_locscale,
                             wn, method, locResMap, noiseBox, mpi, ecdf):

    startTime = time.time()
    emmap, modmap, mask, wn, wn_locscale, window_bleed_and_pad, method, locFilt, locResMap, boxCoord = prepare_mask_and_maps_for_scaling(
        em_map, model_map, apix, wn_locscale, wn, method, locResMap, noiseBox)
    meanNoise, varNoise, sample = FDRutil.estimateNoiseFromMap(
        emmap, wn, boxCoord)

    if not mpi:
        stepSize = int(stepSize)
        if stepSize == 1:
            LocScaleVol, meanVol, varVol, ecdfVol = run_window_function_including_scaling(
                emmap, modmap, mask, wn, wn_locscale, apix, locFilt, locResMap,
                boxCoord, ecdf)
        elif stepSize <= 0:
            print(
                "Invalid step size parameter. It has to be greater than 0! Quit program ..."
            )
            return
        else:
            LocScaleVol, meanVol, varVol, ecdfVol = calculate_scaled_map(
                emmap, modmap, mask, wn, wn_locscale, apix, locFilt, locResMap,
                boxCoord, ecdf, stepSize)

        print("Local amplitude scaling finished ...")

        LocScaleVol = mask * LocScaleVol

        if not ecdf:
            ecdfVol = 0
        else:
            ecdfVol = write_out_final_volume_window_back_if_required(
                wn_locscale, window_bleed_and_pad, ecdfVol)

        LocScaleVol = write_out_final_volume_window_back_if_required(
            wn_locscale, window_bleed_and_pad, LocScaleVol)
        meanVol = write_out_final_volume_window_back_if_required(
            wn_locscale, window_bleed_and_pad, meanVol)
        varVol = write_out_final_volume_window_back_if_required(
            wn_locscale, window_bleed_and_pad, varVol)

        return LocScaleVol, meanVol, varVol, ecdfVol

        #qVol = calcQMap(LocScaleVol, meanVol, varVol, ecdfVol, 0, 0, mask, method, testProc);
        #qVol = np.subtract(np.ones(qVol.shape), qVol);

        #write the volumes
        #LocScaleVol = write_out_final_volume_window_back_if_required(args, wn_locscale, window_bleed_and_pad, LocScaleVol, splitFilename[0] + '_scaled.mrc');
        #qVol = write_out_final_volume_window_back_if_required(args, wn_locscale, window_bleed_and_pad, qVol, splitFilename[0] + '_confidenceMap.mrc');

        #endTime = time.time()
        #runTime = endTime - startTime

    elif mpi:
        LocScaleVol, meanVol, varVol, ecdfVol, rank = run_window_function_including_scaling_mpi(
            emmap, modmap, mask, wn, wn_locscale, apix, locFilt, locResMap,
            boxCoord, ecdf)
        if rank == 0:
            print("Local amplitude scaling finished ...")

            if not ecdf:
                ecdfVol = 0
            else:
                ecdfVol = write_out_final_volume_window_back_if_required(
                    wn_locscale, window_bleed_and_pad, ecdfVol)

            LocScaleVol = write_out_final_volume_window_back_if_required(
                wn_locscale, window_bleed_and_pad, LocScaleVol)
            meanVol = write_out_final_volume_window_back_if_required(
                wn_locscale, window_bleed_and_pad, meanVol)
            varVol = write_out_final_volume_window_back_if_required(
                wn_locscale, window_bleed_and_pad, varVol)

            return LocScaleVol, meanVol, varVol, ecdfVol
Exemplo n.º 17
0
def main():

	start = time.time();

	#get command line input
	args = cmdl_parser.parse_args();

	#no ampltidue scaling will be done
	print('************************************************');
	print('******* Significance analysis of EM-Maps *******');
	print('************************************************');

	#load the maps
	if args.halfmap2 is not None:
		if args.em_map is None:
			print("One half map missing! Exit ...")
			sys.exit();
		else:
			#load the maps
			filename = args.em_map;
			map1 = mrcfile.open(args.em_map, mode='r');
			apix = float(map1.voxel_size.x);
			halfMapData1 = np.copy(map1.data);
			
			map2 = mrcfile.open(args.halfmap2, mode='r');
			halfMapData2 = np.copy(map2.data);

			mapData = (halfMapData1 + halfMapData2)*0.5;
			halfMapData1 = 0;
			halfMapData2 = 0;
                                
	else:
		#load single map
		filename = args.em_map;
		map = mrcfile.open(filename, mode='r');
		apix = float(map.voxel_size.x);
		mapData = np.copy(map.data);
			
	if args.apix is not None:
		print('Pixel size set to {:.3f} Angstroem. (Pixel size encoded in map: {:.3f})'.format(args.apix, apix));
		apix = args.apix;
	else:
		print('Pixel size was read as {:.3f} Angstroem. If this is incorrect, please specify with -p pixelSize'.format(apix));
		args.apix = apix;
			
	#set output filename
	if args.outputFilename is not None:
		splitFilename = os.path.splitext(os.path.basename(args.outputFilename));
	else:
		splitFilename = os.path.splitext(os.path.basename(filename));

	#if varianceMap is given, use it
	if args.varianceMap is not None:
		varMap = mrcfile.open(args.varianceMap, mode='r');
		varMapData = np.copy(varMap.data);
	else:
		varMapData = None;
			
	#if meanMap is given, use it
	if args.meanMap is not None:
		meanMap = mrcfile.open(args.meanMap, mode='r');
		meanMapData = np.copy(meanMap.data);
	else:
		meanMapData = None;

	#if local resolutions are given, use them
	if args.locResMap is not None:
		locResMap = mrcfile.open(args.locResMap, mode='r');
		locResMapData = np.copy(locResMap.data);
	else:
		locResMapData = None;

	#get LocScale input
	if args.model_map is not None:
		modelMap = mrcfile.open(args.model_map, mode='r');
		modelMapData = np.copy(modelMap.data);
	else:
		modelMapData = None;
		
	if args.stepSize is not None:
		stepSize = args.stepSize;
	else:
		stepSize = None;
		
	if args.window_size_locscale is not None:
		windowSizeLocScale = args.window_size_locscale;
	else:
		windowSizeLocScale = None;

	if args.mpi:
		mpi = True;
	else:
		mpi = False;

	if (args.stepSize is not None) & (args.window_size_locscale is not None):
		if args.stepSize > args.window_size_locscale:
			print("Step Size cannot be bigger than the window_size. Job is killed ...")
			return;

	#run the actual analysis
	confidenceMap, locFiltMap, locScaleMap, binMap, maskedMap = confidenceMapMain.calculateConfidenceMap(mapData, apix, args.noiseBox, args.testProc, args.ecdf, args.lowPassFilter, args.method, args.window_size, locResMapData, meanMapData, varMapData, args.fdr, modelMapData, stepSize, windowSizeLocScale, mpi);
			
	if locFiltMap is not None:
		locFiltMapMRC = mrcfile.new(splitFilename[0] + '_locFilt.mrc', overwrite=True);
		locFiltMap = np.float32(locFiltMap);
		locFiltMapMRC.set_data(locFiltMap);
		locFiltMapMRC.voxel_size = apix;
		locFiltMapMRC.close();

	if binMap is not None:
		binMapMRC = mrcfile.new(splitFilename[0] + '_FDR' + str(args.fdr) + '_binMap.mrc', overwrite=True);
		binMap = np.float32(binMap);
		binMapMRC.set_data(binMap);
		binMapMRC.voxel_size = apix;
		binMapMRC.close();

	if maskedMap is not None:
		maskedMapMRC = mrcfile.new(splitFilename[0] + '_FDR'+ str(args.fdr) + '_maskedMap.mrc', overwrite=True);
		maskedMap = np.float32(maskedMap);
		maskedMapMRC.set_data(maskedMap);
		maskedMapMRC.voxel_size = apix;
		maskedMapMRC.close();
		
	if locScaleMap is not None:		
		locScaleMapMRC = mrcfile.new(splitFilename[0] + '_scaled.mrc', overwrite=True);
		locScaleMap = np.float32(locScaleMap);
		locScaleMapMRC.set_data(locScaleMap);
		locScaleMapMRC.voxel_size = apix;
		locScaleMapMRC.close();

	#write the confidence Maps
	confidenceMapMRC = mrcfile.new(splitFilename[0] + '_confidenceMap.mrc', overwrite=True);
	confidenceMap = np.float32(confidenceMap);
	confidenceMapMRC.set_data(confidenceMap);
	confidenceMapMRC.voxel_size = apix;
	confidenceMapMRC.close();

	end = time.time();
	totalRuntime = end -start;

	FDRutil.printSummary(args, totalRuntime);