예제 #1
0
def specialSplit():
    """ ./job-panda.py [-q] -ssplit
    External pulser runs have no data cleaning cut.
    Has a memory leak (can't close both TFiles, damn you, ROOT); submit each run as a batch job.
    """
    cal = dsi.CalInfo()
    # runList = cal.GetSpecialRuns("extPulser")
    # runList = cal.GetSpecialRuns("longCal",5)
    runList = cal.GetSpecialRuns("forcedAcq", 8)
    for run in runList:

        inPath = "%s/waves/waveSkimDS%d_run%d.root" % (dsi.specialDir,
                                                       dsi.GetDSNum(run), run)
        outPath = "%s/split/splitSkimDS%d_run%d.root" % (
            dsi.specialDir, dsi.GetDSNum(run), run)

        outFiles = glob.glob("%s/split/splitSkimDS%d_run%d*.root" %
                             (dsi.specialDir, dsi.GetDSNum(run), run))
        for filename in outFiles:
            try:
                os.remove(filename)
            except OSError:
                pass

        if useJobQueue:
            sh("""./job-panda.py -splitf %s %s""" % (inPath, outPath))
        else:
            sh("""%s './job-panda.py -splitf %s %s'""" %
               (jobStr, inPath, outPath))
예제 #2
0
def specialWrite():
    """ ./job-panda.py -swrite
    Write TCuts from waveSkim files into splitSkim files.
    """
    from ROOT import TFile, TNamed, TObject
    cal = dsi.CalInfo()
    runList = cal.GetSpecialRuns("longCal", 5)

    for run in runList:
        dsNum = dsi.GetDSNum(run)
        wavePath = "%s/waves/waveSkimDS%d_run%d.root" % (dsi.specialDir, dsNum,
                                                         run)
        waveFile = TFile(wavePath)
        theCut = waveFile.Get("theCut").GetTitle()
        print(wavePath)

        splitFiles = glob.glob("%s/split/splitSkimDS%d_run%d*.root" %
                               (dsi.specialDir, dsNum, run))
        for idx in range(len(splitFiles)):
            if idx == 0:
                splitPath = "%s/split/splitSkimDS%d_run%d.root" % (
                    dsi.specialDir, dsNum, run)
            else:
                splitPath = "%s/split/splitSkimDS%d_run%d_%d.root" % (
                    dsi.specialDir, dsNum, run, idx)
            if not os.path.isfile(splitPath):
                print("File doesn't exist:", splitPath)
                return

            splitFile = TFile(splitPath, "UPDATE")
            thisCut = TNamed("theCut", theCut)
            thisCut.Write("", TObject.kOverwrite)
        print(splitFiles[-1])
예제 #3
0
def scanLAT2(dsIn=None, subIn=None, modIn=None):
    """ ./job-panda.py [-q] -lat2 """

    skipDS6Cal = True
    cal = dsi.CalInfo()

    # loop over datasets, skipping DS6 cal runs till they're processed
    for ds in [0, 1, 2, 3, 4, 5, 6]:
        if skipDS6Cal is True and ds == 6:
            continue

        if dsIn is not None and ds != dsIn:
            continue

        # loop over keys in this DS
        for key in cal.GetKeys(ds):

            mod = -1
            if "m1" in key: mod = 1
            if "m2" in key: mod = 2

            # loop over cIdx's for this key
            for cIdx in range(cal.GetIdxs(key)):
                if subIn is not None and cIdx != subIn:
                    continue

                if modIn is not None and mod != modIn:
                    continue

                job = "./lat2.py -scan %d %s %d %d" % (ds, key, mod, cIdx)
                if useJobQueue:
                    sh("%s >& ./logs/lat2-%s-%d.txt" % (job, key, cIdx))
                else:
                    sh("%s '%s'" % (jobStr, job))
예제 #4
0
def checkDS5CWaveSkim():
    from ROOT import TFile, TTree

    runList = []
    calInfo = dsi.CalInfo()
    calKeys = calInfo.GetKeys()
    for key in calKeys:
        print("key:",key)
        if key!="ds5c": continue
        for idx in range(calInfo.GetIdxs(key)):
            lst = calInfo.GetCalList(key,idx)
            print(lst)
            runList += lst

    for run in runList:

        # check waveSkim files
        # inPath = "%s/waveSkimDS%d_run%d.root" % (dsi.calWaveDir,5,run)
        # tf = TFile(inPath)
        # tt = tf.Get("skimTree")
        # chType = ""
        # for br in tt.GetListOfBranches():
        #     if br.GetName()!="channel": continue
        #     chType = br.GetClassName()
        # print(inPath.split("/")[-1], tt.GetEntries(), chType)
        # return

        # delete splitSkim files
        print(run)
        fWave = glob.glob("%s/waveSkimDS%d_run%d.root" % (dsi.calWaveDir,5,run))
        fList = glob.glob("%s/splitSkimDS%d_run%d_*.root" % (dsi.calSplitDir,5,run))
        print(fWave)
예제 #5
0
def specialSkim():
    """ ./job-panda.py [-q (use job queue)] -sskim """
    cal = dsi.CalInfo()
    # runList = cal.GetSpecialRuns("extPulser")
    # runList = cal.GetSpecialRuns("delayedTrigger")
    # runList = cal.GetSpecialRuns("longCal",5)
    runList = cal.GetSpecialRuns("forcedAcq", 8)
    for run in runList:
        if useJobQueue:
            # sh("""./skim_mjd_data -f %d -l -t 0.7 %s/skim >& ./logs/specialSkim-DS%d-%d.txt""" % (run,dsi.specialDir,dsi.GetDSNum(run),run))
            sh("""./skim_mjd_data -f %d -x -l %s/skim >& ./logs/specialSkim-DS%d-%d.txt"""
               % (run, dsi.specialDir, dsi.GetDSNum(run), run))
        else:
            sh("""%s './skim_mjd_data -f %d -x -l %s/skim'""" %
               (jobStr, run, dsi.specialDir))
예제 #6
0
def specialWave():
    """ ./job-panda.py [-q (use queue)] -swave """
    cal = dsi.CalInfo()
    # runList = cal.GetSpecialRuns("extPulser")
    # runList = cal.GetSpecialRuns("longCal",5)
    runList = cal.GetSpecialRuns("forcedAcq", 8)
    for run in runList:
        if useJobQueue:
            # sh("""./wave-skim -l -n -f %d %d -p %s/skim %s/waves >& ./logs/wave-ds%d-%d.txt""" % (dsi.GetDSNum(run),run,dsi.specialDir,dsi.specialDir,dsi.GetDSNum(run),run))
            sh("""./wave-skim -x -n -f %d %d -p %s/skim %s/waves >& ./logs/wave-ds%d-%d.txt"""
               % (dsi.GetDSNum(run), run, dsi.specialDir, dsi.specialDir,
                  dsi.GetDSNum(run), run))
        else:
            sh("""%s './wave-skim -x -n -f %d %d -p %s/skim %s/waves'""" %
               (jobStr, dsi.GetDSNum(run), run, dsi.specialDir,
                dsi.specialDir))
예제 #7
0
def getCalRunList(dsNum=None, subNum=None, runNum=None):
    """ ./job-panda.py -cal (-ds [dsNum] -sub [dsNum] [calIdx] -run [runNum])
        Create a calibration run list, using the CalInfo object in DataSetInfo.py .
        Note that the -sub option is re-defined here to mean a calibration range idx.
        Note that running with -cal alone will create a list for all datasets (mega mode).
    """
    runLimit = None  # need all the stats we can get ...
    calList = []
    calInfo = dsi.CalInfo()
    calKeys = calInfo.GetKeys(dsNum)

    # single-run mode
    if runNum != None:
        calList.append(runNum)
        print(calList)
        return calList

    # multi-run mode:
    for key in calKeys:
        print("key:", key)
        # if key=="ds6": continue # comment this out and do -cal -ds 6 to get the ds6 list

        # -cal (mega mode)
        if dsNum == None:
            for idx in range(calInfo.GetIdxs(key)):
                lst = calInfo.GetCalList(key, idx, runLimit)
                print(lst)
                calList += lst
        # -ds
        elif subNum == None:
            for idx in range(calInfo.GetIdxs(key)):
                lst = calInfo.GetCalList(key, idx, runLimit)
                print(lst)
                calList += lst
        # -sub
        else:
            lst = calInfo.GetCalList(key, subNum, runLimit)
            if lst == None: continue
            print(lst)
            calList += lst

    # remove any duplicates, but there probably aren't any
    calList = sorted(list(set(calList)))

    print("Total Runs:", len(calList))

    return calList
예제 #8
0
def specialCheck():
    """./job-panda.py -scheck
    A next step could be to 'hadd' split files back together, but we'll wait for now.
    """
    from ROOT import TFile, TTree
    cal = dsi.CalInfo()
    runList = cal.GetSpecialRuns("extPulser")
    for run in runList:
        fileList = glob.glob("%s/lat/latSkimDS%d_run%d_*.root" %
                             (dsi.specialDir, dsi.GetDSNum(run), run))
        for f in fileList:
            tf = TFile(f)
            tr = tf.Get("skimTree")
            print(f)
            print(tr.GetEntries())
            tr.GetEntry(0)
            tf.Close()
예제 #9
0
def specialLAT():
    """ ./job-panda.py [-q (use job queue)] -slat"""
    cal = dsi.CalInfo()
    # runList = cal.GetSpecialRuns("extPulser")
    # runList = cal.GetSpecialRuns("longCal",5)
    runList = cal.GetSpecialRuns("forcedAcq", 8)

    # deal with unsplit files
    # run = runList[0]
    # dsNum = dsi.GetDSNum(run)
    # inFile = "%s/waves/waveSkimDS%d_run%d.root" % (dsi.specialDir,dsNum,run)
    # outFile = "%s/lat/latSkimDS%d_run%d.root" % (dsi.specialDir,dsNum,run)
    # sh("""./lat.py -x -b -f %d %d -p %s %s""" % (dsNum,run,inFile,outFile))

    # deal with split files
    for run in runList:

        dsNum = dsi.GetDSNum(run)
        inFiles = glob.glob("%s/split/splitSkimDS%d_run%d*.root" %
                            (dsi.specialDir, dsNum, run))
        for idx in range(len(inFiles)):
            if idx == 0:
                inFile = "%s/split/splitSkimDS%d_run%d.root" % (dsi.specialDir,
                                                                dsNum, run)
            else:
                inFile = "%s/split/splitSkimDS%d_run%d_%d.root" % (
                    dsi.specialDir, dsNum, run, idx)
            if not os.path.isfile(inFile):
                print("File doesn't exist:", inFile)
                return
            outFile = "%s/lat/latSkimDS%d_run%d_%d.root" % (dsi.specialDir,
                                                            dsNum, run, idx)

            if useJobQueue:
                # this is what you would want for a normal cron queue
                # sh("""./lat.py -x -b -f %d %d -p %s %s""" % (dsNum, run, inFile, outFile))

                # this is what i need for a 1-node job pump
                # sh("""./lat.py -x -b -f %d %d -p %s %s >& ./logs/extPulser-%d-%d.txt""" % (dsNum, run, inFile, outFile, run, idx))
                # sh("""./lat.py -b -f %d %d -p %s %s >& ./logs/longCal-%d-%d.txt""" % (dsNum, run, inFile, outFile, run, idx))
                sh("""./lat.py -x -b -f %d %d -p %s %s >& ./logs/forceAcq-%d-%d.txt"""
                   % (dsNum, run, inFile, outFile, run, idx))
            else:
                sh("""%s './lat.py -x -b -f %d %d -p %s %s' """ %
                   (jobStr, dsNum, run, inFile, outFile))
예제 #10
0
def specialBuild():
    """ ./job-panda.py -sbuild
    Set 1 of the external pulser runs were built with --donotbuild.
    Let's manually build them.

    Example:
    - Make sure MkCookie has been run recently
    majorcaroot --nomultisampling --setspecialchanmap /global/homes/m/mjd/production/P3JDYspecchanmap.txt --donotbuild 2015-9-3-P3JDY_Run5942
    process_mjd_cal OR_*.root
    pulsertag 5942
    auto-thresh 5942
    process_mjd_gat OR_*.root
    """
    cal = dsi.CalInfo()
    rawDir = "/global/project/projectdirs/majorana/data/mjd/surfmjd/data/raw/P3JDY/Data"
    buildDir = dsi.dataDir + "/mjddatadir"
    os.chdir(buildDir)

    runList = []
    for pIdx in [7, 8, 9, 10, 11, 12]:
        runList.extend(cal.GetSpecialRuns("extPulser", pIdx))

    for run in runList:

        os.chdir(buildDir + "/raw")
        rawFile = glob.glob("%s/*%d*" % (rawDir, run))[0]
        rawName = rawFile.rsplit('/', 1)[1]
        rawName = rawName.rsplit('.')[0]
        # sp.call("""zcat %s > %s""" % (rawFile, rawName), shell=True)

        os.chdir(buildDir + "/built")
        sp.call(
            """majorcaroot --nomultisampling --setspecialchanmap /global/homes/m/mjdproduction/P3JDYspecchanmap.txt ../raw/%s"""
            % (rawName),
            shell=True)

        # ... i kinda lost steam here.  Set 1 runs are not worth this much hassle.

        return
예제 #11
0
def tuneCuts(argString, dsNum=None):
    """ ./job-panda.py -tuneCuts '[argString]' -- run over all ds's
        ./job-panda.py -ds [dsNum] -tuneCuts '[argString]' -- just one DS

    Submit a bunch of lat3.py jobs to the queues.
    NOTE:
        1) If processing individual dataset, the -ds option MUST come before -tuneCuts.
        2) Make sure to put argString in quotes.
        3) argString may be multiple options separated by spaces

    Options for argString:
        -all, -bcMax, -noiseWeight, -bcTime, -tailSlope, -fitSlo, -riseNoise
    """
    calInfo = dsi.CalInfo()
    if dsNum == None:
        for i in dsi.dsMap.keys():
            if i == 6: continue
            for mod in [1, 2]:
                try:
                    for j in range(calInfo.GetIdxs("ds%d_m%d" % (i, mod))):
                        print("%s './lat3.py -db -tune %s -s %d %d %d %s" %
                              (jobStr, dsi.calLatDir, i, j, mod, argString))
                        sh("""%s './lat3.py -db -tune %s -s %d %d %d %s '""" %
                           (jobStr, dsi.calLatDir, i, j, mod, argString))
                except:
                    continue
    # -ds
    else:
        for mod in [1, 2]:
            try:
                for j in range(calInfo.GetIdxs("ds%d_m%d" % (dsNum, mod))):
                    print("%s './lat3.py -db -tune %s -s %d %d %d %s" %
                          (jobStr, dsi.calLatDir, dsNum, j, mod, argString))
                    sh("""%s './lat3.py -db -tune %s -s %d %d %d %s '""" %
                       (jobStr, dsi.calLatDir, dsNum, j, mod, argString))
            except:
                continue
예제 #12
0
파일: check-files.py 프로젝트: gothman5/LAT
#!/usr/bin/env python
import sys, os
sys.argv.append("-b")
import tinydb as db
import numpy as np

import waveLibs as wl
import dsi
bkg = dsi.BkgInfo()
cal = dsi.CalInfo()
det = dsi.DetInfo()

from ROOT import TFile, TTree, MGTWaveform

# switches
fLimit = None  # set to None to run over everything
skipDS6Cal = True
verbose = True
testMode = False


def main(argv):
    """ NOTE: don't use globs when getting files.
    Manually make sure everything is here.
    Can submit these commands as separate batch jobs:
        ./check-files.py -all
        ./check-files.py -c -all
    """
    global checkCal
    checkCal = False
    if checkCal: print("Skip DS6 cal?", skipDS6Cal)
예제 #13
0
파일: chan-sel.py 프로젝트: gothman5/LAT
def fillDetInfo():
    """ ./chan-sel.py -fill
    Summarize the results of getSettings in LAT/data/runSettings-v2.npz.
    Create a file accessible by the DetInfo object in dsi.py
    It contains dictionaries of TRAP threshold, HV settings,
    detector/channel mapping, and pulser monitor lists,
    that span an entire dataset (not broken down into separate calIdx's.)
    # FORMAT: {ds : {'det' : [(run1,val1),(run2,val2)...]} }
    """
    # 1. maps of analysis channel to cpd, and pulser monitor channels
    detCH, pMons = {}, {}
    for ds in [0, 1, 2, 3, 4, 5, 6]:
        f = np.load("%s/data/ds%d_detChans.npz" % (os.environ['LATDIR'], ds))
        detCH[ds] = f['arr_0'].item()
        pMons[ds] = f['arr_1'].item()

    # 2. maps of HV and TRAP threshold settings are stored in the DB.
    # make them global, and move them to the runSettings file.
    # FORMAT: {ds : {'det' : [(run1,val1),(run2,val2)...]} }
    detHV, detTH = {}, {}

    # load all possible values, as in settingsMgr
    detDB = db.TinyDB("%s/calDB-v2.json" % dsi.latSWDir)
    detPars = db.Query()
    cal = dsi.CalInfo()
    for ds in [0, 1, 2, 3, 4, 5, 6]:
        # for ds in [3]:
        print("scanning ds", ds)
        detTH[ds] = {}
        detHV[ds] = {}
        for key in cal.GetKeys(ds):
            mod = -1
            if "m1" in key: mod = 1
            if "m2" in key: mod = 2
            for cIdx in range(cal.GetIdxs(key)):

                # load the DB records
                dbKeyTH = "trapThr_%s_c%d" % (key, cIdx)
                dbValTH = dsi.getDBRecord(dbKeyTH, calDB=detDB, pars=detPars)

                dbKeyHV = "hvBias_%s_c%d" % (key, cIdx)
                dbValHV = dsi.getDBRecord(dbKeyHV, calDB=detDB, pars=detPars)

                # debug: print the record
                # for val in sorted(dbValTH):
                # if len(dbValTH[val])>0:
                # print(val, dbValTH[val])
                # return

                # fill the first value
                if len(detTH[ds]) == 0:
                    detTH[ds] = dbValTH
                    detHV[ds] = dbValHV
                    continue

                # check for new threshold values.
                for cpd in detTH[ds]:
                    nOld, nNew = len(detTH[ds][cpd]), len(dbValTH[cpd])

                    # detector just came online
                    if nOld == 0 and nNew > 0:
                        detTH[ds][cpd] = dbValTH[cpd]
                        continue
                    # detector still offline
                    if nOld == 0 and nNew == 0:
                        continue
                    # detector just went offline
                    if nOld > 0 and nNew == 0:
                        continue

                    # check last run/trap pair against each new one
                    prevRun, prevTH = detTH[ds][cpd][-1][0], detTH[ds][cpd][
                        -1][1]
                    for val in dbValTH[cpd]:
                        thisRun, thisTH = val[0], val[1]
                        if thisTH != prevTH:
                            detTH[ds][cpd].append([thisRun, thisTH])
                        prevTH = thisTH

                # check for new HV values.
                for cpd in detHV[ds]:

                    nOld, nNew = len(detHV[ds][cpd]), len(dbValHV[cpd])

                    # detector just came online
                    if nOld == 0 and nNew > 0:
                        detHV[ds][cpd] = dbValHV[cpd]
                        continue
                    # detector still offline
                    if nOld == 0 and nNew == 0:
                        continue
                    # detector just went offline
                    if nOld > 0 and nNew == 0:
                        continue

                    # check last run/trap pair against each new one
                    prevRun, prevHV = detHV[ds][cpd][-1][0], detHV[ds][cpd][
                        -1][1]
                    for val in dbValHV[cpd]:
                        thisRun, thisHV = val[0], val[1]
                        if thisHV != prevHV:
                            print(
                                "found HV diff.  cpd %d  prev %dV (run %d)  new %dV (run %d)"
                                % (cpd, prevHV, prevRun, thisHV, thisRun))
                            detHV[ds][cpd].append([thisRun, thisHV])
                        prevHV = thisHV

                # return

    # # load the old file and compare
    # # GOAL: improve on this file.
    # # f = np.load("%s/data/runSettings.npz" % dsi.latSWDir)
    # # detHVOld = f['arr_0'].item()
    # # detTHOld = f['arr_1'].item()
    # # detCHOld = f['arr_2'].item()
    # # pMonsOld = f['arr_3'].item()
    #
    # ds = 3
    # print("old results, ds",ds)
    # for cpd in sorted(detTHOld[ds]):
    #     if cpd!="122":continue
    #     if len(detTHOld[ds][cpd]) > 0:
    #         print(cpd, detTHOld[ds][cpd])
    #
    # # for ds in [0,1,2,3,4,5,6]:
    # print("thresh results, ds:",ds)
    # for cpd in sorted(detTH[ds]):
    #     # if cpd!=122:continue
    #     if len(detTH[ds][cpd]) > 0:
    #         print(cpd, detTH[ds][cpd])

    np.savez("%s/data/runSettings-v2.npz" % dsi.latSWDir, detHV, detTH, detCH,
             pMons)