def writeGVE(spotsArray, fileroot, **kwargs): """ write Fable gve file from Spots object fileroot is the root string used to write the gve and ini files Outputs: No return value, but writes the following files: <fileroot>.gve <fileroot>_grainSpotter.ini (points to --> <fileroot>_grainSpotter.log) Keyword arguments: Mainly for GrainSpotter .ini file, but some are needed for gve files keyword default definitions ----------------------------------------------------------------------------------------- 'sgNum': <225> 'phaseID': <None> 'cellString': <F> 'omeRange': <-60, 60, 120, 240> the oscillation range(s) **currently pulls from spots 'deltaOme': <0.25, 0.25> the oscillation delta(s) **currently pulls from spots 'minMeas': <24> 'minCompl': <0.7> 'minUniqn': <0.5> 'uncertainty': <[0.10, 0.25, .50]> the min [tTh, eta, ome] uncertainties in degrees 'eulStep': <2> 'nSigmas': <2> 'minFracG': <0.90> 'nTrials': <100000> 'positionfit': <True> Notes: *) The omeRange is currently pulled from the spotsArray input; the kwarg has no effect as of now. Will change this to 'override' the spots info if the user, say, wants to pare down the range. *) There is no etaRange argument yet, but presumably GrainSpotter knows how to deal with this. Pending feature... """ # check on fileroot assert isinstance(fileroot, str) # keyword argument processing phaseID = None sgNum = 225 cellString = 'P' omeRange = num.r_[-60, 60] # in DEGREES deltaOme = 0.25 # in DEGREES minMeas = 24 minCompl = 0.7 minUniqn = 0.5 uncertainty = [0.10, 0.25, .50] # in DEGREES eulStep = 2 # in DEGREES nSigmas = 2 minFracG = 0.90 numTrials = 100000 positionFit = True kwarglen = len(kwargs) if kwarglen > 0: argkeys = kwargs.keys() for i in range(kwarglen): if argkeys[i] == 'sgNum': sgNum = kwargs[argkeys[i]] elif argkeys[i] == 'phaseID': phaseID = kwargs[argkeys[i]] elif argkeys[i] == 'cellString': cellString = kwargs[argkeys[i]] elif argkeys[i] == 'omeRange': omeRange = kwargs[argkeys[i]] elif argkeys[i] == 'deltaOme': deltaOme = kwargs[argkeys[i]] elif argkeys[i] == 'minMeas': minMeas = kwargs[argkeys[i]] elif argkeys[i] == 'minCompl': minCompl = kwargs[argkeys[i]] elif argkeys[i] == 'minUniqn': minUniqn = kwargs[argkeys[i]] elif argkeys[i] == 'uncertainty': uncertainty = kwargs[argkeys[i]] elif argkeys[i] == 'eulStep': eulStep = kwargs[argkeys[i]] elif argkeys[i] == 'nSigmas': nSigmas = kwargs[argkeys[i]] elif argkeys[i] == 'minFracG': minFracG = kwargs[argkeys[i]] elif argkeys[i] == 'nTrials': numTrials = kwargs[argkeys[i]] elif argkeys[i] == 'positionfit': positionFit = kwargs[argkeys[i]] else: raise RuntimeError, "Unrecognized keyword argument '%s'" % (argkeys[i]) # grab some detector geometry parameters for gve file header mmPerPixel = float(spotsArray.detectorGeom.pixelPitch) # ...these are still hard-coded to be square nrows_p = spotsArray.detectorGeom.nrows - 1 ncols_p = spotsArray.detectorGeom.ncols - 1 row_p, col_p = spotsArray.detectorGeom.pixelIndicesOfCartesianCoords(spotsArray.detectorGeom.xc, spotsArray.detectorGeom.yc) yc_p = ncols_p - col_p zc_p = nrows_p - row_p wd_mu = spotsArray.detectorGeom.workDist * 1e3 # in microns (Soeren) osc_axis = num.dot(fableSampCOB.T, Yl).flatten() # start grabbing stuff from planeData planeData = spotsArray.getPlaneData(phaseID=phaseID) cellp = planeData.latVecOps['dparms'] U0 = planeData.latVecOps['U0'] wlen = planeData.wavelength dsp = planeData.getPlaneSpacings() fHKLs = planeData.getSymHKLs() tThRng = planeData.getTThRanges() symTag = planeData.getLaueGroup() tThMin, tThMax = (r2d*tThRng.min(), r2d*tThRng.max()) # single range should be ok since entering hkls etaMin, etaMax = (0, 360) # not sure when this will ever *NOT* be the case, so setting it omeMin = spotsArray.getOmegaMins() omeMax = spotsArray.getOmegaMaxs() omeRangeString = '' for iOme in range(len(omeMin)): if hasattr(omeMin[iOme], 'getVal'): omeRangeString += 'omegarange %g %g\n' % (omeMin[iOme].getVal('degrees'), omeMax[iOme].getVal('degrees')) else: omeRangeString += 'omegarange %g %g\n' % (omeMin[iOme] * r2d, omeMax[iOme] * r2d) # convert angles cellp[3:] = r2d*cellp[3:] # make the theoretical hkls string gvecHKLString = '' for i in range(len(dsp)): for j in range(fHKLs[i].shape[1]): gvecHKLString += '%1.8f %d %d %d\n' % (1/dsp[i], fHKLs[i][0, j], fHKLs[i][1, j], fHKLs[i][2, j]) # now for the measured data section # xr yr zr xc yc ds eta omega gvecString = '' spotsIter = spotsArray.getIterPhase(phaseID, returnBothCoordTypes=True) for iSpot, angCOM, xyoCOM in spotsIter: sR, sC, sOme = xyoCOM # detector coords sTTh, sEta, sOme = angCOM # angular coords (radians) sDsp = wlen / 2. / num.sin(0.5*sTTh) # dspacing # get raw y, z (Fable frame) yraw = ncols_p - sC zraw = nrows_p - sR # convert eta to fable frame rEta = mapAngle(90. - r2d*sEta, [0, 360], units='degrees') # make mesaured G vector components in fable frame mGvec = makeMeasuredScatteringVectors(sTTh, sEta, sOme, convention='fable', frame='sample') # full Gvec components in fable lab frame (for grainspotter position fit) gveXYZ = spotsArray.detectorGeom.angToXYO(sTTh, sEta, sOme, outputGve=True) # no 4*pi mGvec = mGvec / sDsp # make contribution gvecString += '%1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %d %1.8f %1.8f %1.8f\n' \ % (mGvec[0], mGvec[1], mGvec[2], \ sR, sC, \ 1/sDsp, rEta, r2d*sOme, \ iSpot, \ gveXYZ[0, :], gveXYZ[1, :], gveXYZ[2, :]) pass # write gve file for grainspotter fid = open(fileroot+'.gve', 'w') print >> fid, '%1.8f %1.8f %1.8f %1.8f %1.8f %1.8f ' % tuple(cellp) + \ cellString + '\n' + \ '# wavelength = %1.8f\n' % (wlen) + \ '# wedge = 0.000000\n' + \ '# axis = %d %d %d\n' % tuple(osc_axis) + \ '# cell__a %1.4f\n' %(cellp[0]) + \ '# cell__b %1.4f\n' %(cellp[1]) + \ '# cell__c %1.4f\n' %(cellp[2]) + \ '# cell_alpha %1.4f\n' %(cellp[3]) + \ '# cell_beta %1.4f\n' %(cellp[4]) + \ '# cell_gamma %1.4f\n' %(cellp[5]) + \ '# cell_lattice_[P,A,B,C,I,F,R] %s\n' %(cellString) + \ '# chi 0.0\n' + \ '# distance %.4f\n' %(wd_mu) + \ '# fit_tolerance 0.5\n' + \ '# o11 1\n' + \ '# o12 0\n' + \ '# o21 0\n' + \ '# o22 -1\n' + \ '# omegasign %1.1f\n' %(num.sign(deltaOme)) + \ '# t_x 0\n' + \ '# t_y 0\n' + \ '# t_z 0\n' + \ '# tilt_x 0.000000\n' + \ '# tilt_y 0.000000\n' + \ '# tilt_z 0.000000\n' + \ '# y_center %.6f\n' %(yc_p) + \ '# y_size %.6f\n' %(mmPerPixel*1.e3) + \ '# z_center %.6f\n' %(zc_p) + \ '# z_size %.6f\n' %(mmPerPixel*1.e3) + \ '# ds h k l\n' + \ gvecHKLString + \ '# xr yr zr xc yc ds eta omega\n' + \ gvecString fid.close() ############################################################### # GrainSpotter ini parameters # # fileroot = tempfile.mktemp() if positionFit: positionString = 'positionfit' else: positionString = '!positionfit' if numTrials == 0: randomString = '!random\n' else: randomString = 'random %g\n' % (numTrials) fid = open(fileroot+'_grainSpotter.ini', 'w') # self.__tempFNameList.append(fileroot) print >> fid, \ 'spacegroup %d\n' % (sgNum) + \ 'tthrange %g %g\n' % (tThMin, tThMax) + \ 'etarange %g %g\n' % (etaMin, etaMax) + \ 'domega %g\n' % (deltaOme) + \ omeRangeString + \ 'filespecs %s.gve %s_grainSpotter.log\n' % (fileroot, fileroot) + \ 'cuts %d %g %g\n' % (minMeas, minCompl, minUniqn) + \ 'eulerstep %g\n' % (eulStep)+ \ 'uncertainties %g %g %g\n' % (uncertainty[0], uncertainty[1], uncertainty[2]) + \ 'nsigmas %d\n' % (nSigmas) + \ 'minfracg %g\n' % (minFracG) + \ randomString + \ positionString + '\n' fid.close() return
def fiberSearch(spotsArray, hklList, iPhase=0, nsteps=120, minCompleteness=0.60, minPctClaimed=0.95, preserveClaims=False, friedelOnly=True, dspTol=None, etaTol=0.025, omeTol=0.025, etaTolF=0.00225, omeTolF=0.00875, nStdDev=2, quitAfter=None, doRefinement=True, debug=True, doMultiProc=True, nCPUs=None, outputGrainList=False ): """ This indexer finds grains by performing 1-d searches along the fibers under the valid spots associated with each reflection order specified in hklList. The set of spots used to generate the candidate orientations may be restricted to Friedel pairs only. hklList *must* have length > 0; Dach hkl entry in hklList *must* be a tuple, not a list the output is a concatenated list of orientation matrices ((n, 3, 3) numpy.ndarray). """ assert hasattr(hklList, '__len__'), "the HKL list must have length, and len(hklList) > 0." nHKLs = len(hklList) grainList = [] nGrains = 0 planeData = spotsArray.getPlaneData(iPhase) csym = planeData.getLaueGroup() bMat = planeData.latVecOps['B'] if dspTol is None: dspTol = planeData.strainMag centroSymRefl = planeData.getCentroSymHKLs() candidate = Grain(spotsArray, rMat=None, etaTol=etaTol, omeTol=omeTol) multiProcMode = xrdbase.haveMultiProc and doMultiProc # global foundFlagShared global multiProcMode_MP global spotsArray_MP global candidate_MP global dspTol_MP global minCompleteness_MP global doRefinement_MP global nStdDev_MP multiProcMode_MP = multiProcMode spotsArray_MP = spotsArray candidate_MP = candidate dspTol_MP = dspTol minCompleteness_MP = minCompleteness doRefinement_MP = doRefinement nStdDev_MP = nStdDev """ set up for shared memory multiprocessing """ if multiProcMode: nCPUs = nCPUs or xrdbase.dfltNCPU spotsArray.multiprocMode = True pool = multiprocessing.Pool(nCPUs) """ HKL ITERATOR """ if isinstance(quitAfter, dict): n_hkls_to_search = quitAfter['nHKLs'] else: n_hkls_to_search = nHKLs if isinstance(quitAfter, int): quit_after_ngrains = quitAfter else: quit_after_ngrains = 0 numTotal = len(spotsArray) pctClaimed = 0. time_to_quit = False tic = time.time() for iHKL in range(n_hkls_to_search): print "\n#####################\nProcessing hkl %d of %d\n" % (iHKL+1, nHKLs) thisHKLID = planeData.getHKLID(hklList[iHKL]) thisRingSpots0 = spotsArray.getHKLSpots(thisHKLID) thisRingSpots0W = num.where(thisRingSpots0)[0] unclaimedOfThese = -spotsArray.checkClaims(indices=thisRingSpots0W) thisRingSpots = copy.deepcopy(thisRingSpots0) thisRingSpots[thisRingSpots0W] = unclaimedOfThese if friedelOnly: # first, find Friedel Pairs spotsArray.findFriedelPairsHKL(hklList[iHKL], etaTol=etaTolF, omeTol=omeTolF) spotsIteratorI = spotsArray.getIterHKL(hklList[iHKL], unclaimedOnly=True, friedelOnly=True) # make some stuff for counters maxSpots = 0.5*(sum(thisRingSpots) - sum(spotsArray.friedelPair[thisRingSpots] == -1)) else: spotsIteratorI = spotsArray.getIterHKL(hklList[iHKL], unclaimedOnly=True, friedelOnly=False) maxSpots = sum(thisRingSpots) """ SPOT ITERATOR - this is where we iterate over all 'valid' spots for the current HKL as subject to the conditions of claims and ID as a friedel pair (when requested) """ for iRefl, stuff in enumerate(spotsIteratorI): unclaimedOfThese = -spotsArray.checkClaims(indices=thisRingSpots0W) thisRingSpots = copy.deepcopy(thisRingSpots0) thisRingSpots[thisRingSpots0W] = unclaimedOfThese if friedelOnly: iSpot, jSpot, angs_I, angs_J = stuff Gplus = makeMeasuredScatteringVectors(*angs_I) Gminus = makeMeasuredScatteringVectors(*angs_J) Gvec = 0.5*(Gplus - Gminus) maxSpots = 0.5*(sum(thisRingSpots) - sum(spotsArray.friedelPair[thisRingSpots] == -1)) else: iSpot, angs_I = stuff Gvec = makeMeasuredScatteringVectors(*angs_I) maxSpots = sum(thisRingSpots) print "\nProcessing reflection %d (spot %d), %d remain unclaimed\n" % (iRefl+1, iSpot, maxSpots) if multiProcMode and debugMultiproc > 1: marks = spotsArray._Spots__marks[:] print 'marks : '+str(marks) # make the fiber; qfib = discreteFiber(hklList[iHKL], Gvec, B=bMat, ndiv=nsteps, invert=False, csym=csym, ssym=None)[0] # if +/- hkl aren't in the symmetry group, need '-' fiber if not centroSymRefl[thisHKLID]: minusHKL = -num.r_[hklList[iHKL]] qfibM = discreteFiber(minusHKL, Gvec, B=bMat, ndiv=nsteps, invert=False, csym=csym, ssym=None)[0] qfib = num.hstack([qfib, qfibM]) pass # cull out duplicate orientations qfib = mUtil.uniqueVectors(qfib, tol=1e-4) numTrials = qfib.shape[1] """ THIS IS THE BIGGIE; THE LOOP OVER THE DISCRETE ORIENTATIONS IN THE CURRENT FIBER """ if multiProcMode: foundFlagShared.value = False qfibList = map(num.array, qfib.T.tolist()) #if debugMultiproc: # print 'qfibList : '+str(qfibList) results = num.array(pool.map(testThisQ, qfibList, chunksize=1)) trialGrains = results[num.where(num.array(results, dtype=bool))] # for trialGrain in trialGrains: # trialGrain.restore(candidate) else: trialGrains = [] for iR in range(numTrials): foundGrainData = testThisQ(qfib[:, iR]) if foundGrainData is not None: trialGrains.append(foundGrainData) break 'end of if multiProcMode' if len(trialGrains) == 0: print "No grain found containing spot %d\n" % (iSpot) # import pdb;pdb.set_trace() else: asMaster = multiProcMode 'sort based on completeness' trialGrainCompletenesses = [tgd['completeness'] for tgd in trialGrains] order = num.argsort(trialGrainCompletenesses)[-1::-1] for iTrialGrain in order: foundGrainData = trialGrains[iTrialGrain] foundGrain = Grain(spotsArray, grainData=foundGrainData, claimingSpots=False) 'check completeness before accepting, especially important for multiproc' foundGrain.checkClaims() # updates completeness if debugMultiproc: print 'final completeness of candidate is %g' % (foundGrain.completeness) if foundGrain.completeness >= minCompleteness: conflicts = foundGrain.claimSpots(asMaster=asMaster) numConfl = num.sum(conflicts) if numConfl > 0: 'tried to claim %d spots that are already claimed' % (numConfl) grainList.append(foundGrain) nGrains += 1 numUnClaimed = num.sum(-spotsArray.checkClaims()) numClaimed = numTotal - numUnClaimed pctClaimed = num.float(numClaimed) / numTotal print "Found %d grains so far, %f%% claimed" % (nGrains,100*pctClaimed) time_to_quit = (pctClaimed > minPctClaimed) or\ ((quit_after_ngrains > 0) and (nGrains >= quit_after_ngrains)) if time_to_quit: break 'end of iRefl loop' if time_to_quit: break 'end of iHKL loop' rMats = num.empty((len(grainList), 3, 3)) for i in range(len(grainList)): rMats[i, :, :] = grainList[i].rMat if outputGrainList: retval = (rMats, grainList) else: retval = rMats if not preserveClaims: spotsArray.resetClaims() toc = time.time() print 'fiberSearch execution took %g seconds' % (toc-tic) if multiProcMode: pool.close() spotsArray.multiprocMode = False foundFlagShared.value = False # global foundFlagShared # global multiProcMode_MP # global spotsArray_MP # global candidate_MP # global dspTol_MP # global minCompleteness_MP # global doRefinement_MP multiProcMode_MP = None spotsArray_MP = None candidate_MP = None dspTol_MP = None minCompleteness_MP = None doRefinement_MP = None return retval
def fiberSearch(spotsArray, hklList, iPhase=0, nsteps=120, minCompleteness=0.60, minPctClaimed=0.95, preserveClaims=False, friedelOnly=True, dspTol=None, etaTol=0.025, omeTol=0.025, etaTolF=0.00225, omeTolF=0.00875, nStdDev=2, quitAfter=None, doRefinement=True, debug=True, doMultiProc=True, nCPUs=None, outputGrainList=False): """ This indexer finds grains by performing 1-d searches along the fibers under the valid spots associated with each reflection order specified in hklList. The set of spots used to generate the candidate orientations may be restricted to Friedel pairs only. hklList *must* have length > 0; Dach hkl entry in hklList *must* be a tuple, not a list the output is a concatenated list of orientation matrices ((n, 3, 3) numpy.ndarray). """ assert hasattr( hklList, '__len__'), "the HKL list must have length, and len(hklList) > 0." nHKLs = len(hklList) grainList = [] nGrains = 0 planeData = spotsArray.getPlaneData(iPhase) csym = planeData.getLaueGroup() bMat = planeData.latVecOps['B'] if dspTol is None: dspTol = planeData.strainMag centroSymRefl = planeData.getCentroSymHKLs() candidate = Grain(spotsArray, rMat=None, etaTol=etaTol, omeTol=omeTol) multiProcMode = xrdbase.haveMultiProc and doMultiProc # global foundFlagShared global multiProcMode_MP global spotsArray_MP global candidate_MP global dspTol_MP global minCompleteness_MP global doRefinement_MP global nStdDev_MP multiProcMode_MP = multiProcMode spotsArray_MP = spotsArray candidate_MP = candidate dspTol_MP = dspTol minCompleteness_MP = minCompleteness doRefinement_MP = doRefinement nStdDev_MP = nStdDev """ set up for shared memory multiprocessing """ if multiProcMode: nCPUs = nCPUs or xrdbase.dfltNCPU spotsArray.multiprocMode = True pool = multiprocessing.Pool(nCPUs) """ HKL ITERATOR """ if isinstance(quitAfter, dict): n_hkls_to_search = quitAfter['nHKLs'] else: n_hkls_to_search = nHKLs if isinstance(quitAfter, int): quit_after_ngrains = quitAfter else: quit_after_ngrains = 0 numTotal = len(spotsArray) pctClaimed = 0. time_to_quit = False tic = time.time() for iHKL in range(n_hkls_to_search): print "\n#####################\nProcessing hkl %d of %d\n" % (iHKL + 1, nHKLs) thisHKLID = planeData.getHKLID(hklList[iHKL]) thisRingSpots0 = spotsArray.getHKLSpots(thisHKLID) thisRingSpots0W = num.where(thisRingSpots0)[0] unclaimedOfThese = -spotsArray.checkClaims(indices=thisRingSpots0W) thisRingSpots = copy.deepcopy(thisRingSpots0) thisRingSpots[thisRingSpots0W] = unclaimedOfThese if friedelOnly: # first, find Friedel Pairs spotsArray.findFriedelPairsHKL(hklList[iHKL], etaTol=etaTolF, omeTol=omeTolF) spotsIteratorI = spotsArray.getIterHKL(hklList[iHKL], unclaimedOnly=True, friedelOnly=True) # make some stuff for counters maxSpots = 0.5 * (sum(thisRingSpots) - sum(spotsArray.friedelPair[thisRingSpots] == -1)) else: spotsIteratorI = spotsArray.getIterHKL(hklList[iHKL], unclaimedOnly=True, friedelOnly=False) maxSpots = sum(thisRingSpots) """ SPOT ITERATOR - this is where we iterate over all 'valid' spots for the current HKL as subject to the conditions of claims and ID as a friedel pair (when requested) """ for iRefl, stuff in enumerate(spotsIteratorI): unclaimedOfThese = -spotsArray.checkClaims(indices=thisRingSpots0W) thisRingSpots = copy.deepcopy(thisRingSpots0) thisRingSpots[thisRingSpots0W] = unclaimedOfThese if friedelOnly: iSpot, jSpot, angs_I, angs_J = stuff Gplus = makeMeasuredScatteringVectors(*angs_I) Gminus = makeMeasuredScatteringVectors(*angs_J) Gvec = 0.5 * (Gplus - Gminus) maxSpots = 0.5 * (sum(thisRingSpots) - sum( spotsArray.friedelPair[thisRingSpots] == -1)) else: iSpot, angs_I = stuff Gvec = makeMeasuredScatteringVectors(*angs_I) maxSpots = sum(thisRingSpots) print "\nProcessing reflection %d (spot %d), %d remain unclaimed\n" % ( iRefl + 1, iSpot, maxSpots) if multiProcMode and debugMultiproc > 1: marks = spotsArray._Spots__marks[:] print 'marks : ' + str(marks) # make the fiber; qfib = discreteFiber(hklList[iHKL], Gvec, B=bMat, ndiv=nsteps, invert=False, csym=csym, ssym=None)[0] # if +/- hkl aren't in the symmetry group, need '-' fiber if not centroSymRefl[thisHKLID]: minusHKL = -num.r_[hklList[iHKL]] qfibM = discreteFiber(minusHKL, Gvec, B=bMat, ndiv=nsteps, invert=False, csym=csym, ssym=None)[0] qfib = num.hstack([qfib, qfibM]) pass # cull out duplicate orientations qfib = mUtil.uniqueVectors(qfib, tol=1e-4) numTrials = qfib.shape[1] """ THIS IS THE BIGGIE; THE LOOP OVER THE DISCRETE ORIENTATIONS IN THE CURRENT FIBER """ if multiProcMode: foundFlagShared.value = False qfibList = map(num.array, qfib.T.tolist()) #if debugMultiproc: # print 'qfibList : '+str(qfibList) results = num.array(pool.map(testThisQ, qfibList, chunksize=1)) trialGrains = results[num.where(num.array(results, dtype=bool))] # for trialGrain in trialGrains: # trialGrain.restore(candidate) else: trialGrains = [] for iR in range(numTrials): foundGrainData = testThisQ(qfib[:, iR]) if foundGrainData is not None: trialGrains.append(foundGrainData) break 'end of if multiProcMode' if len(trialGrains) == 0: print "No grain found containing spot %d\n" % (iSpot) # import pdb;pdb.set_trace() else: asMaster = multiProcMode 'sort based on completeness' trialGrainCompletenesses = [ tgd['completeness'] for tgd in trialGrains ] order = num.argsort(trialGrainCompletenesses)[-1::-1] for iTrialGrain in order: foundGrainData = trialGrains[iTrialGrain] foundGrain = Grain(spotsArray, grainData=foundGrainData, claimingSpots=False) 'check completeness before accepting, especially important for multiproc' foundGrain.checkClaims() # updates completeness if debugMultiproc: print 'final completeness of candidate is %g' % ( foundGrain.completeness) if foundGrain.completeness >= minCompleteness: conflicts = foundGrain.claimSpots(asMaster=asMaster) numConfl = num.sum(conflicts) if numConfl > 0: 'tried to claim %d spots that are already claimed' % ( numConfl) grainList.append(foundGrain) nGrains += 1 numUnClaimed = num.sum(-spotsArray.checkClaims()) numClaimed = numTotal - numUnClaimed pctClaimed = num.float(numClaimed) / numTotal print "Found %d grains so far, %f%% claimed" % (nGrains, 100 * pctClaimed) time_to_quit = (pctClaimed > minPctClaimed) or\ ((quit_after_ngrains > 0) and (nGrains >= quit_after_ngrains)) if time_to_quit: break 'end of iRefl loop' if time_to_quit: break 'end of iHKL loop' rMats = num.empty((len(grainList), 3, 3)) for i in range(len(grainList)): rMats[i, :, :] = grainList[i].rMat if outputGrainList: retval = (rMats, grainList) else: retval = rMats if not preserveClaims: spotsArray.resetClaims() toc = time.time() print 'fiberSearch execution took %g seconds' % (toc - tic) if multiProcMode: pool.close() spotsArray.multiprocMode = False foundFlagShared.value = False # global foundFlagShared # global multiProcMode_MP # global spotsArray_MP # global candidate_MP # global dspTol_MP # global minCompleteness_MP # global doRefinement_MP multiProcMode_MP = None spotsArray_MP = None candidate_MP = None dspTol_MP = None minCompleteness_MP = None doRefinement_MP = None return retval
def writeGVE(spotsArray, fileroot, **kwargs): """ write Fable gve file from Spots object fileroot is the root string used to write the gve and ini files Outputs: No return value, but writes the following files: <fileroot>.gve <fileroot>_grainSpotter.ini (points to --> <fileroot>_grainSpotter.log) Keyword arguments: Mainly for GrainSpotter .ini file, but some are needed for gve files keyword default definitions ----------------------------------------------------------------------------------------- 'sgNum': <225> 'phaseID': <None> 'cellString': <F> 'omeRange': <-60, 60, 120, 240> the oscillation range(s) **currently pulls from spots 'deltaOme': <0.25, 0.25> the oscillation delta(s) **currently pulls from spots 'minMeas': <24> 'minCompl': <0.7> 'minUniqn': <0.5> 'uncertainty': <[0.10, 0.25, .50]> the min [tTh, eta, ome] uncertainties in degrees 'eulStep': <2> 'nSigmas': <2> 'minFracG': <0.90> 'nTrials': <100000> 'positionfit': <True> Notes: *) The omeRange is currently pulled from the spotsArray input; the kwarg has no effect as of now. Will change this to 'override' the spots info if the user, say, wants to pare down the range. *) There is no etaRange argument yet, but presumably GrainSpotter knows how to deal with this. Pending feature... """ # check on fileroot assert isinstance(fileroot, str) # keyword argument processing phaseID = None sgNum = 225 cellString = 'P' omeRange = num.r_[-60, 60] # in DEGREES deltaOme = 0.25 # in DEGREES minMeas = 24 minCompl = 0.7 minUniqn = 0.5 uncertainty = [0.10, 0.25, .50] # in DEGREES eulStep = 2 # in DEGREES nSigmas = 2 minFracG = 0.90 numTrials = 100000 positionFit = True kwarglen = len(kwargs) if kwarglen > 0: argkeys = kwargs.keys() for i in range(kwarglen): if argkeys[i] == 'sgNum': sgNum = kwargs[argkeys[i]] elif argkeys[i] == 'phaseID': phaseID = kwargs[argkeys[i]] elif argkeys[i] == 'cellString': cellString = kwargs[argkeys[i]] elif argkeys[i] == 'omeRange': omeRange = kwargs[argkeys[i]] elif argkeys[i] == 'deltaOme': deltaOme = kwargs[argkeys[i]] elif argkeys[i] == 'minMeas': minMeas = kwargs[argkeys[i]] elif argkeys[i] == 'minCompl': minCompl = kwargs[argkeys[i]] elif argkeys[i] == 'minUniqn': minUniqn = kwargs[argkeys[i]] elif argkeys[i] == 'uncertainty': uncertainty = kwargs[argkeys[i]] elif argkeys[i] == 'eulStep': eulStep = kwargs[argkeys[i]] elif argkeys[i] == 'nSigmas': nSigmas = kwargs[argkeys[i]] elif argkeys[i] == 'minFracG': minFracG = kwargs[argkeys[i]] elif argkeys[i] == 'nTrials': numTrials = kwargs[argkeys[i]] elif argkeys[i] == 'positionfit': positionFit = kwargs[argkeys[i]] else: raise RuntimeError, "Unrecognized keyword argument '%s'" % ( argkeys[i]) # grab some detector geometry parameters for gve file header mmPerPixel = float(spotsArray.detectorGeom.pixelPitch ) # ...these are still hard-coded to be square nrows_p = spotsArray.detectorGeom.nrows - 1 ncols_p = spotsArray.detectorGeom.ncols - 1 row_p, col_p = spotsArray.detectorGeom.pixelIndicesOfCartesianCoords( spotsArray.detectorGeom.xc, spotsArray.detectorGeom.yc) yc_p = ncols_p - col_p zc_p = nrows_p - row_p wd_mu = spotsArray.detectorGeom.workDist * 1e3 # in microns (Soeren) osc_axis = num.dot(fableSampCOB.T, Yl).flatten() # start grabbing stuff from planeData planeData = spotsArray.getPlaneData(phaseID=phaseID) cellp = planeData.latVecOps['dparms'] U0 = planeData.latVecOps['U0'] wlen = planeData.wavelength dsp = planeData.getPlaneSpacings() fHKLs = planeData.getSymHKLs() tThRng = planeData.getTThRanges() symTag = planeData.getLaueGroup() tThMin, tThMax = (r2d * tThRng.min(), r2d * tThRng.max() ) # single range should be ok since entering hkls etaMin, etaMax = ( 0, 360 ) # not sure when this will ever *NOT* be the case, so setting it omeMin = spotsArray.getOmegaMins() omeMax = spotsArray.getOmegaMaxs() omeRangeString = '' for iOme in range(len(omeMin)): if hasattr(omeMin[iOme], 'getVal'): omeRangeString += 'omegarange %g %g\n' % ( omeMin[iOme].getVal('degrees'), omeMax[iOme].getVal('degrees')) else: omeRangeString += 'omegarange %g %g\n' % (omeMin[iOme] * r2d, omeMax[iOme] * r2d) # convert angles cellp[3:] = r2d * cellp[3:] # make the theoretical hkls string gvecHKLString = '' for i in range(len(dsp)): for j in range(fHKLs[i].shape[1]): gvecHKLString += '%1.8f %d %d %d\n' % ( 1 / dsp[i], fHKLs[i][0, j], fHKLs[i][1, j], fHKLs[i][2, j]) # now for the measured data section # xr yr zr xc yc ds eta omega gvecString = '' spotsIter = spotsArray.getIterPhase(phaseID, returnBothCoordTypes=True) for iSpot, angCOM, xyoCOM in spotsIter: sR, sC, sOme = xyoCOM # detector coords sTTh, sEta, sOme = angCOM # angular coords (radians) sDsp = wlen / 2. / num.sin(0.5 * sTTh) # dspacing # get raw y, z (Fable frame) yraw = ncols_p - sC zraw = nrows_p - sR # convert eta to fable frame rEta = mapAngle(90. - r2d * sEta, [0, 360], units='degrees') # make mesaured G vector components in fable frame mGvec = makeMeasuredScatteringVectors(sTTh, sEta, sOme, convention='fable', frame='sample') # full Gvec components in fable lab frame (for grainspotter position fit) gveXYZ = spotsArray.detectorGeom.angToXYO(sTTh, sEta, sOme, outputGve=True) # no 4*pi mGvec = mGvec / sDsp # make contribution gvecString += '%1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %d %1.8f %1.8f %1.8f\n' \ % (mGvec[0], mGvec[1], mGvec[2], \ sR, sC, \ 1/sDsp, rEta, r2d*sOme, \ iSpot, \ gveXYZ[0, :], gveXYZ[1, :], gveXYZ[2, :]) pass # write gve file for grainspotter fid = open(fileroot + '.gve', 'w') print >> fid, '%1.8f %1.8f %1.8f %1.8f %1.8f %1.8f ' % tuple(cellp) + \ cellString + '\n' + \ '# wavelength = %1.8f\n' % (wlen) + \ '# wedge = 0.000000\n' + \ '# axis = %d %d %d\n' % tuple(osc_axis) + \ '# cell__a %1.4f\n' %(cellp[0]) + \ '# cell__b %1.4f\n' %(cellp[1]) + \ '# cell__c %1.4f\n' %(cellp[2]) + \ '# cell_alpha %1.4f\n' %(cellp[3]) + \ '# cell_beta %1.4f\n' %(cellp[4]) + \ '# cell_gamma %1.4f\n' %(cellp[5]) + \ '# cell_lattice_[P,A,B,C,I,F,R] %s\n' %(cellString) + \ '# chi 0.0\n' + \ '# distance %.4f\n' %(wd_mu) + \ '# fit_tolerance 0.5\n' + \ '# o11 1\n' + \ '# o12 0\n' + \ '# o21 0\n' + \ '# o22 -1\n' + \ '# omegasign %1.1f\n' %(num.sign(deltaOme)) + \ '# t_x 0\n' + \ '# t_y 0\n' + \ '# t_z 0\n' + \ '# tilt_x 0.000000\n' + \ '# tilt_y 0.000000\n' + \ '# tilt_z 0.000000\n' + \ '# y_center %.6f\n' %(yc_p) + \ '# y_size %.6f\n' %(mmPerPixel*1.e3) + \ '# z_center %.6f\n' %(zc_p) + \ '# z_size %.6f\n' %(mmPerPixel*1.e3) + \ '# ds h k l\n' + \ gvecHKLString + \ '# xr yr zr xc yc ds eta omega\n' + \ gvecString fid.close() ############################################################### # GrainSpotter ini parameters # # fileroot = tempfile.mktemp() if positionFit: positionString = 'positionfit' else: positionString = '!positionfit' if numTrials == 0: randomString = '!random\n' else: randomString = 'random %g\n' % (numTrials) fid = open(fileroot + '_grainSpotter.ini', 'w') # self.__tempFNameList.append(fileroot) print >> fid, \ 'spacegroup %d\n' % (sgNum) + \ 'tthrange %g %g\n' % (tThMin, tThMax) + \ 'etarange %g %g\n' % (etaMin, etaMax) + \ 'domega %g\n' % (deltaOme) + \ omeRangeString + \ 'filespecs %s.gve %s_grainSpotter.log\n' % (fileroot, fileroot) + \ 'cuts %d %g %g\n' % (minMeas, minCompl, minUniqn) + \ 'eulerstep %g\n' % (eulStep)+ \ 'uncertainties %g %g %g\n' % (uncertainty[0], uncertainty[1], uncertainty[2]) + \ 'nsigmas %d\n' % (nSigmas) + \ 'minfracg %g\n' % (minFracG) + \ randomString + \ positionString + '\n' fid.close() return
def __call__(self, spotsArray, **kwargs): """ A word on spacegroup numbers: it appears that grainspotter is using the 'VolA' tag for calls to SgInfo """ location = self.__class__.__name__ tic = time.time() # keyword argument processing phaseID = None gVecFName = 'tmpGve' sgNum = 225 cellString = 'F' omeRange = num.r_[-60, 60] # in DEGREES deltaOme = 0.25 # in DEGREES minMeas = 24 minCompl = 0.7 minUniqn = 0.5 uncertainty = [0.10, 0.25, .50] # in DEGREES eulStep = 2 # in DEGREES nSigmas = 2 minFracG = 0.90 numTrials = 100000 positionFit = False kwarglen = len(kwargs) if kwarglen > 0: argkeys = kwargs.keys() for i in range(kwarglen): if argkeys[i] == 'sgNum': sgNum = kwargs[argkeys[i]] elif argkeys[i] == 'phaseID': phaseID = kwargs[argkeys[i]] elif argkeys[i] == 'gVecFName': gVecFName = kwargs[argkeys[i]] elif argkeys[i] == 'cellString': cellString = kwargs[argkeys[i]] elif argkeys[i] == 'omeRange': omeRange = kwargs[argkeys[i]] elif argkeys[i] == 'deltaOme': deltaOme = kwargs[argkeys[i]] elif argkeys[i] == 'minMeas': minMeas = kwargs[argkeys[i]] elif argkeys[i] == 'minCompl': minCompl = kwargs[argkeys[i]] elif argkeys[i] == 'minUniqn': minUniqn = kwargs[argkeys[i]] elif argkeys[i] == 'uncertainty': uncertainty = kwargs[argkeys[i]] elif argkeys[i] == 'eulStep': eulStep = kwargs[argkeys[i]] elif argkeys[i] == 'nSigmas': nSigmas = kwargs[argkeys[i]] elif argkeys[i] == 'minFracG': minFracG = kwargs[argkeys[i]] elif argkeys[i] == 'nTrials': numTrials = kwargs[argkeys[i]] elif argkeys[i] == 'positionfit': positionFit = kwargs[argkeys[i]] else: raise RuntimeError, "Unrecognized keyword argument '%s'" % (argkeys[i]) # cleanup stuff from any previous run self.cleanup() planeData = spotsArray.getPlaneData(phaseID=phaseID) cellp = planeData.latVecOps['dparms'] U0 = planeData.latVecOps['U0'] wlen = planeData.wavelength dsp = planeData.getPlaneSpacings() fHKLs = planeData.getSymHKLs() tThRng = planeData.getTThRanges() symTag = planeData.getLaueGroup() tThMin, tThMax = (r2d*tThRng.min(), r2d*tThRng.max()) # single range should be ok since entering hkls etaMin, etaMax = (0, 360) # not sure when this will ever *NOT* be the case, so setting it omeMin = spotsArray.getOmegaMins() omeMax = spotsArray.getOmegaMaxs() omeRangeString = '' for iOme in range(len(omeMin)): omeRangeString += 'omegarange %g %g\n' % (omeMin[iOme] * r2d, omeMax[iOme] * r2d) # convert angles cellp[3:] = r2d*cellp[3:] # make the theoretical hkls string gvecHKLString = '' for i in range(len(dsp)): for j in range(fHKLs[i].shape[1]): gvecHKLString += '%1.8f %d %d %d\n' % (1/dsp[i], fHKLs[i][0, j], fHKLs[i][1, j], fHKLs[i][2, j]) # now for the measured data section # xr yr zr xc yc ds eta omega gvecString = '' ii = 0 spotsIter = spotsArray.getIterPhase(phaseID, returnBothCoordTypes=True) for iSpot, angCOM, xyoCOM in spotsIter: sX, sY, sOme = xyoCOM # detector coords sTTh, sEta, sOme = angCOM # angular coords (radians) sDsp = wlen / 2. / num.sin(0.5*sTTh) # dspacing # convert eta to risoe frame rEta = mapAngle(90. - r2d*sEta, [0, 360], units='degrees') # make mesaured G vector components in risoe frame mGvec = makeMeasuredScatteringVectors(sTTh, sEta, sOme, convention='risoe', frame='sample') # mQvec = makeMeasuredScatteringVectors(sTTh, sEta, sOme, convention='llnl', frame='lab') gveXYZ = spotsArray.detectorGeom.angToXYO(sTTh, sEta, sOme, outputGve=True) mGvec = mGvec / sDsp # mQvec = 4*num.pi*num.sin(0.5*sTTh)*mQvec/wlen # make contribution gvecString += '%1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %1.8f %d %1.8f %1.8f %1.8f\n' \ % (mGvec[0], mGvec[1], mGvec[2], \ sX, sY, \ 1/sDsp, rEta, r2d*sOme, \ iSpot, \ gveXYZ[0, :], gveXYZ[1, :], gveXYZ[2, :]) # advance counter ii += 1 # write gve file for grainspotter f = open(gVecFName+'.gve', 'w') print >> f, '%1.8f %1.8f %1.8f %1.8f %1.8f %1.8f ' % tuple(cellp) + \ cellString + '\n' + \ '# wavelength = %1.8f\n' % (wlen) + \ '# wedge = 0.000000\n# ds h k l\n' + \ gvecHKLString + \ '# xr yr zr xc yc ds eta omega\n' + \ gvecString f.close() ############################################################### # GrainSpotter ini parameters # # tempFNameIn = tempfile.mktemp() if positionFit: positionString = 'positionfit' else: positionString = '!positionfit' if numTrials == 0: randomString = '!random\n' else: randomString = 'random %g\n' % (numTrials) tempFNameIn = 'tmpIni' f = open(tempFNameIn, 'w') # self.__tempFNameList.append(tempFNameIn) print >> f, \ 'spacegroup %d\n' % (sgNum) + \ 'tthrange %g %g\n' % (tThMin, tThMax) + \ 'etarange %g %g\n' % (etaMin, etaMax) + \ 'domega %g\n' % (deltaOme) + \ omeRangeString + \ 'filespecs %s.gve %s.log\n' % (gVecFName, tempFNameIn) + \ 'cuts %d %g %g\n' % (minMeas, minCompl, minUniqn) + \ 'eulerstep %g\n' % (eulStep)+ \ 'uncertainties %g %g %g\n' % (uncertainty[0], uncertainty[1], uncertainty[2]) + \ 'nsigmas %d\n' % (nSigmas) + \ 'minfracg %g\n' % (minFracG) + \ randomString + \ positionString + '\n' f.close() toc = time.time() print 'in %s, setup took %g' % (location, toc-tic) tic = time.time() # tempFNameStdout = tempfile.mktemp() # self.__tempFNameList.append(tempFNameStdout) tempFNameStdout = 'tmp.out' # grainSpotterCmd = '%s %s > %s' % (self.__execName, tempFNameIn, tempFNameStdout) grainSpotterCmd = '%s %s' % (self.__execName, tempFNameIn) os.system(grainSpotterCmd) toc = time.time() print 'in %s, execution took %g' % (location, toc-tic) tic = time.time() # add output files to cleanup list # self.__tempFNameList += glob.glob(tempFNameIn+'.*') # collect data from gff file' gffFile = tempFNameIn+'.gff' gffData = num.loadtxt(gffFile) if gffData.ndim == 1: gffData = gffData.reshape(1, len(gffData)) gffData_U = gffData[:,6:6+9] # process for output retval = convertUToRotMat(gffData_U, U0, symTag=symTag) toc = time.time() print 'in %s, post-processing took %g' % (location, toc-tic) tic = time.time() return retval