예제 #1
0
def filter_2015_pipi():
    sels = {
        'Resolved': {
            '': selection_R,
            '_LowMass': selection_R_low,
            '_HighMass': selection_R_high
        },
        'Merged': {}
    }
    for finalstate in 'pipi', :  # 'Kpi' :
        # Merged doesn't work currently cause the BDT expects lab6&7 to be the photons.
        for pi0 in 'Resolved', :  # 'Merged' :
            for mag in 'Up', 'Down':
                dataset = 'Data_2015_{finalstate}pi0_{pi0}_Mag{mag}_full'.format(
                    **locals())
                print dataset
                info = datalib.get_data_info(dataset)
                tree = datalib.get_data(dataset)
                for suff, sel in sels[pi0].items():
                    outputname = dataset.replace('_full', suff)
                    print outputname
                    outputdir = os.path.join(filtereddatadir, outputname)
                    if not os.path.exists(outputdir):
                        os.makedirs(outputdir)
                    outputfile = os.path.join(outputdir, outputname + '.root')
                    fout = ROOT.TFile.Open(outputfile, 'recreate')
                    treeout = copy_tree(tree, sel, write=True)
                    fout.Close()
def filter_2016_tuples(overwrite=False):
    weightsfile = os.path.expandvars(
        '$AGAMMAD0TOHHPI0ROOT/tmva/20180702-Lewis/TMVAClassification_BDT_Kpipi0.weights.xml'
    )
    for mag in 'Up', 'Down':
        print 'Filter Mag' + mag, 'files'
        datainfo = datalib.get_data_info('Data_2016_Kpipi0_Mag' + mag +
                                         '_full')
        outputdir = os.path.join(datadir, 'data', '2016', 'mag' + mag.lower())
        if not os.path.exists(outputdir):
            os.makedirs(outputdir)
        mod2016 = __import__(
            'AGammaD0Tohhpi0.Reco16_Charm_Mag{0}_TupleURLs'.format(mag),
            fromlist=['urls'])
        nok = 0
        for lfn, urls in mod2016.urls.items():
            print 'Process LFN', lfn
            outputfile = os.path.join(
                outputdir,
                lfn[1:].replace('/', '_').replace('.root', '_Kpipi0.root'))
            if not overwrite and os.path.exists(outputfile) and is_tfile_ok(
                    outputfile):
                print 'Output already exists, skipping'
                nok += 1
                continue
            if not urls:
                continue
            ok = False
            # Find a URL that works.
            for url in urls:
                if is_tfile_ok(url):
                    ok = True
                    break
            if not ok:
                print 'No working URL'
                continue
            inputfile = ROOT.TFile.Open(url)
            tree = inputfile.Get(datainfo['tree'])
            if not tree:
                print 'No tree named {0!r} in file {1}'.format(
                    datainfo['tree'], url)
                continue
            nok += 1
            filter_tuple_mva(tree, weightsfile, 'BDT', outputfile, bdtcut)
        print 'Successfully filtered', str(nok) + '/' + str(len(
            mod2016.urls)), 'files'
예제 #3
0
def test():
    '''Test blinding on some MINT data.'''
    timebins41 = [round(0.1*i, 10) for i in xrange(42)]
    binningname41 = 'TimeBins41'
    lifetime = 0.4101
    data3pi = 'MINT_data_worldAv_1M_noExpEff_cp'

    datainfo = datalib.get_data_info(data3pi)
    datainfo = dict(datainfo, files = datainfo['files'][:1])
    dataname = data3pi + '_' + os.path.split(datainfo['files'][0])[1]
    datalib.make_getters({dataname : datainfo})

    fpars3pi = os.path.expandvars('$AGAMMAD0TOHHPI0WORKINGDIR/hadronicParameters/pipipi0-8bins-10M/hadronicParameters.txt')

    fname = os.path.join(datalib.dataset_dir(dataname), dataname + '_' + binningname41 + '.txt')

    fitter = BinFlipFitter(datalib, dataname, 
                           #timebins41, 
                           fname,
                           lifetime = lifetime, hadronicparsfile = fpars3pi,
                           binningname = binningname41)
    chi2unblind, miniunblind = fitter.do_fit('unblind')
    chi2blind, miniblind = fitter.do_fit('blind', blindingseed = 1)
    return locals()
예제 #4
0
#These don't change with the data file, so just calculate once here
X, F, Fbar, r = computeIntegrals(nbinsPhase, diffcalc, True)
zcp, deltaz = getZvals(x, y, qoverp, phi)
for _name in 'X', 'F', 'Fbar', 'r':
    print _name
    pprint.pprint(locals()[_name])

success = 0
lim = 100
failed = []
drawRatioPlots = False

if not name.startswith('MINT_'):
    name = 'MINT_' + name
datainfo = datalib.get_data_info(name)

for fileNo in range(1, lim + 1):

    #Setting up variables and reading in events

    #Retrieve the dataset as a DalitzEventList and nTuple
    print "Processing file number {}... \n".format(fileNo)
    fname = datainfo['files'][fileNo - 1]
    print fname
    fdata = TFile.Open(fname)
    evtlist = DalitzEventList(fdata.Get('DalitzEventList'))
    evtData = fdata.Get('DalitzEventList')

    upperHists = []
    lowerHists = []
예제 #5
0
y = config.float('y')
qoverp = config.float('qoverp')
phi = config.float('phi')
lifetime = config.float('lifetime')
width = 1. / lifetime
deltam = x * width
deltagamma = y * 2 * width

rndm = ROOT.TRandom3(0)

generator = TimeDependentGenerator(pattern_D0Topipipi0, width, deltam,
                                   deltagamma, qoverp, phi, rndm)

tree = datalib.get_data('MINT_' + name)
evtlist = DiskResidentEventList(
    datalib.get_data_info('MINT_' + name)['files'][0], 'OPEN')

diffcalc = PhaseDifferenceCalc(pattern_D0Topipipi0, config.fnames[0])

for i in xrange(tree.GetEntries()):
    tree.GetEntry(i)
    if tree.tag == 1:
        evt = evtlist.getEvent(i)
        break
for i in xrange(tree.GetEntries()):
    tree.GetEntry(i)
    if tree.tag == -1:
        cpevt = evtlist.getEvent(i)
        break
print 'pattern size', evt.eventPattern().size()
s13 = evt.s(1, 2)