def plotMultiWavelength(data, showErr=True):
	hklNames = bf.getKeyList(data)
	for hkl in hklNames:
		plotSin2Psi(data[hkl], showErr)
def multiWavelengthAnalysis(data, maxPsi=None):
    keyList = bf.getKeyList(data)
    peakCount = int(
        bf.replace(keyList[-1].split('_')[0],
                   'pv')) + 1  # must be adapted in further versions!!!
    phiVals = data['phi']
    psiVals = data['psi']
    tauMean = bf.zeros(peakCount)
    hklList = bf.zeros(peakCount, dtypeVal=np.int)
    s1Dec = bf.zeros(peakCount)
    hs2Dec = bf.zeros(peakCount)
    dStarVals = bf.zeros(peakCount)
    dStarErrVals = bf.zeros(peakCount)
    stresses = np.zeros((peakCount, 6))  # s11-33 s22-33 s13 s23 s12 s33
    accuracy = np.zeros((peakCount, 6))
    integralWidth = np.zeros(
        (peakCount, 6))  # phi0 phi90 phi180 phi270 phi45 phi225
    plotData = dict()
    for p in range(peakCount):  # perform this for all peaks
        ibVals = data['pv' + str(p) + '_sigma']
        centerVals = data['pv' + str(p) + '_center']
        centerErrVals = data['pv' + str(p) + '_center_err']
        dMinVals = conv.energies2latticeDists(centerVals - centerErrVals,
                                              data['tth'])
        dMaxVals = conv.energies2latticeDists(centerVals + centerErrVals,
                                              data['tth'])
        dErrVals = np.abs(dMaxVals - dMinVals) / 2
        tauVals = data['pv' + str(p) + '_depth']
        dVals = data['pv' + str(p) + '_dspac'] / 10
        #hklVals = data['pv' + str(p) + '_hklList']  # first version with adaptions
        #s1Vals = data['pv' + str(p) + '_s1List']  # first version with adaptions
        #hs2Vals = data['pv' + str(p) + '_s2List']  # first version with adaptions
        #hklList[p] = hklVals[0]
        hVals = data['pv' + str(p) + '_h']  # second version
        kVals = data['pv' + str(p) + '_k']  # second version
        lVals = data['pv' + str(p) + '_l']  # second version
        s1Vals = data['pv' + str(p) + '_s1']  # second version
        #hs2Vals = data['pv' + str(p) + '_s2']  # second version
        hs2Vals = data['pv' + str(p) + '_hs2']  # third version
        hklList[p] = conv.mergeHkl(hVals[0], kVals[0], lVals[0])
        s1Dec[p] = s1Vals[0]
        #hs2Dec[p] = hs2Vals[0] * 0.5  # test valid for first and second version!!!!!
        hs2Dec[p] = hs2Vals[0]
        curData = {
            'dVals': dVals,
            'dErr': dErrVals,
            'tauVals': tauVals,
            'phiVals': phiVals,
            'psiVals': psiVals,
            'hklVal': hklList[p],
            's1Val': s1Dec[p],
            'hs2Val': hs2Dec[p],
            'ibVals': ibVals
        }
        bf.extendDictionary(curData, data, ('a0Val', ))
        # perform sin2psi analysis for current peak data
        curResData, curPlotData = sin2PsiAnalysis(curData, maxPsi)
        # remember results
        tauMean[p] = curResData['tauMean']
        dStarVals[p] = curResData['dStar100']
        dStarErrVals[p] = curResData['dStar100Err']
        stresses[p] = curResData['stresses']
        accuracy[p] = curResData['accuracy']
        curMeanIB = curResData['meanIB']
        integralWidth[p, 0:len(curMeanIB)] = curMeanIB
        plotData[str(hklList[p])] = curPlotData
    # resData: hklList, s1Dec, hs2Dec, tauMean, aStar, aStarErr, stresses, accuracy, mVals???, bVals???
    resData = {
        'hklList': hklList,
        's1Dec': s1Dec,
        'hs2Dec': hs2Dec,
        'tauMean': tauMean,
        'dStar100': dStarVals,
        'dStar100Err': dStarErrVals,
        'stresses': stresses,
        'accuracy': accuracy,
        'integralWidth': integralWidth
    }
    return resData, plotData
def multiUniversalPlotAnalysis(data,
                               maxPsi=None,
                               minDistPsiStar=0.15,
                               minValPsiNormal=0.08,
                               minValPsiShear=0.8):
    keyList = bf.getKeyList(data)
    peakCount = int(
        bf.replace(keyList[-1].split('_')[0],
                   'pv')) + 1  # must be adapted in further versions!!!
    tthVal = data['tth']
    phiVals = data['phi']
    psiVals = data['psi']
    psiUni = np.unique(psiVals)
    psiUni = psiUni[psiUni != 0]  # no zero value
    psiSign = np.sign(psiUni[-1])  # sign of last psi value
    psiUni = psiUni[np.sign(psiUni) ==
                    psiSign]  # only negative or positive values
    sinpsi2Uni = bc.sind(psiUni)**2
    sin2psiUni = bc.sind(np.abs(2 * psiUni))
    tauRes = np.zeros((peakCount, len(psiUni)))
    hklRes = np.zeros((peakCount, len(psiUni)))
    psiRes = np.zeros((peakCount, len(psiUni)))
    stresses = np.zeros((peakCount, len(psiUni), 4))
    errVals = np.zeros((peakCount, len(psiUni), 4))
    tauS33 = np.zeros(peakCount)
    aStarVals = np.zeros(peakCount)
    aStarErrVals = np.zeros(peakCount)
    s33 = np.zeros(peakCount)
    dev_s33 = np.zeros(peakCount)
    hklList = np.zeros(peakCount)
    phi4 = len(np.unique(phiVals)) == 4
    validCounter = 0
    for p in range(peakCount):  # for all peaks create one plot
        centerVals = data['pv' + str(p) + '_center']
        centerErrVals = data['pv' + str(p) + '_center_err']
        dMinVals = conv.energies2latticeDists(centerVals - centerErrVals,
                                              tthVal)
        dMaxVals = conv.energies2latticeDists(centerVals + centerErrVals,
                                              tthVal)
        dErrVals = np.abs(dMaxVals - dMinVals) / 2
        tauVals = data['pv' + str(p) + '_depth']
        dVals = data['pv' + str(p) + '_dspac'] / 10  # in nm
        #hklVals = data['pv' + str(p) + '_hklList']  # first version with adaptions
        #s1Vals = data['pv' + str(p) + '_s1List']  # first version with adaptions
        #hs2Vals = data['pv' + str(p) + '_s2List']  # first version with adaptions
        #hklVal = hklVals[0]
        hVals = data['pv' + str(p) + '_h']  # second version
        kVals = data['pv' + str(p) + '_k']  # second version
        lVals = data['pv' + str(p) + '_l']  # second version
        s1Vals = data['pv' + str(p) + '_s1']  # second version
        #hs2Vals = data['pv' + str(p) + '_s2']  # second version
        hs2Vals = data['pv' + str(p) + '_hs2']  # third version
        hklVal = conv.mergeHkl(hVals[0], kVals[0], lVals[0])
        hklList[p] = hklVal
        hklRes[p] = hklVal * np.ones(len(psiUni))
        psiRes[p] = psiUni
        s1Val = s1Vals[0]
        #hs2Val = hs2Vals[0] * 0.5  # test valid for first and second version!!!!!
        hs2Val = hs2Vals[0]
        curData = {
            'tauVals': tauVals,
            'dVals': dVals,
            'dErrVals': dErrVals,
            'psiVals': psiVals,
            'phiVals': phiVals,
            'psiUni': psiUni,
            'sin2psiUni': sin2psiUni,
            'sinpsi2Uni': sinpsi2Uni,
            'phi4': phi4,
            'hklVal': hklVal,
            's1Val': s1Val,
            'hs2Val': hs2Val
        }
        bf.extendDictionary(curData, data, ('a0Val', ))
        # perform universal plot analysis for current peak data
        curResData = universalPlotAnalysis(curData, maxPsi, minDistPsiStar,
                                           minValPsiNormal, minValPsiShear)
        # remember results
        tauRes[p] = curResData['tauRes']
        stresses[p] = curResData['stresses']
        errVals[p] = curResData['errVals']
        aStarVals[p] = curResData['dStar100']
        aStarErrVals[p] = curResData['dStar100Err']
        tauS33[p] = curResData['tauS33']
        s33[p] = curResData['s33']
        dev_s33[p] = curResData['dev_s33']
        validCounter += curResData['validCounter']
    # reshape data
    tauRes = np.reshape(tauRes, np.prod(bf.size(tauRes)))
    hklRes = np.reshape(hklRes, np.prod(bf.size(hklRes)))
    psiRes = np.reshape(psiRes, np.prod(bf.size(psiRes)))
    stresses = np.reshape(
        stresses,
        (bf.size(stresses, 0) * bf.size(stresses, 1), bf.size(stresses, 2)))
    errVals = np.reshape(
        errVals,
        (bf.size(errVals, 0) * bf.size(errVals, 1), bf.size(errVals, 2)))
    # remove values with tau = 0
    hklRes = hklRes[tauRes > 0]
    psiRes = psiRes[tauRes > 0]
    stresses = stresses[tauRes > 0]
    errVals = errVals[tauRes > 0]
    tauRes = tauRes[tauRes > 0]
    # sort data concerning increasing information depth
    hklRes = hklRes[np.argsort(tauRes)]
    psiRes = psiRes[np.argsort(tauRes)]
    stresses = stresses[np.argsort(tauRes)]
    errVals = errVals[np.argsort(tauRes)]
    tauRes = tauRes[np.argsort(tauRes)]
    resData = {
        'tauVals': tauRes,
        'stresses': stresses,
        'accuracy': errVals,
        'hklVals': hklRes,
        'psiVals': psiRes,
        'validCount': validCounter
    }
    resDataS33 = {
        'tauMean': tauS33,
        'dStar100': aStarVals,
        'dStar100Err': aStarErrVals,
        's33': s33,
        'dev_s33': dev_s33,
        'hklList': hklList
    }
    return resData, resDataS33
plotResUvp = True
showErr = True
# showErr = False
checkUvpVals = True
fileNames = fg.requestFiles((("Data files", "*.dat"), ),
                            "Select P61A data file", "on")
# import all data
allData = dict()
allMetaInfo = dict()
for fileName in fileNames:
    data, metaInfo = fs.loadFileP61A2(fileName)
    allData[fileName] = data
    allMetaInfo[fileName] = metaInfo
# combine all data for one analysis
combinedData = allData[fileNames[0]]
keyList = bf.getKeyList(combinedData)
for fileName in fileNames[1:]:
    for key in keyList:
        combinedData[key] = np.concatenate(
            (combinedData[key], allData[fileName][key]))
# prepare data for multi wavelength and universal plot analysis
a0Val = 0.289
tthVal = 7
# tthVal = 15
inputData = bf.combineDictionaries({
    'a0Val': a0Val,
    'tth': tthVal
}, combinedData)
# perform multi wavelength analysis
maxPsi = 45
resDataMwl, plotDataMwl = dc.multiWavelengthAnalysis(inputData, maxPsi)