Example #1
0
def test_ROOT():

    try:
        import ROOT
    except ImportError:
        raise SkipTest

    filename = os.path.join(DIRNAME, 'test.root')
    root_file = ROOT.TFile.Open(filename, 'recreate')
    root_file.Close()

    grl = GRL(GRLA)
    grl.save(filename + ':/lumi')
    grl2 = GRL(filename + ':/lumi')
    assert_equal(grl, grl2)

    root_file = ROOT.TFile.Open(filename, 'recreate')
    root_file.mkdir('dir')
    root_file.Close()

    grl = GRL(GRLA)
    grl.save(filename + ':/dir/lumi')
    grl2 = GRL(filename + ':/dir/lumi')
    assert_equal(grl, grl2)

    root_file = ROOT.TFile.Open(filename)
    tobj = root_file.Get('dir/lumi')
    assert_true(isinstance(tobj, ROOT.TObjString))
    root_file.Close()
    os.unlink(filename)
Example #2
0
def grl_logic_test():

    a = GRL(GRLA)
    b = GRL(GRLB)

    a & b
    a ^ b
    a | b
    a + b
    a - b

    assert_equal(a, a)
    assert_equal(b, b)
    assert_true(a != b)
    assert_equal((a ^ b), ((a | b) - (a & b)))
    assert_equal((a - b), ((a + b) - b))
    assert_true(not (a - a))
    assert_true(not (a ^ a))
    assert_equal(a & b, b & a)
    assert_equal(a | b, b | a)
    assert_equal(a ^ b, b ^ a)

    a &= b
    a ^= b
    a |= b
    a += b
    a -= b
Example #3
0
    def work(self):

        # merge GRL XML strings
        grl = GRL()
        for fname in self.files:
            with root_open(fname) as f:
                for key in f.Lumi.keys():
                    grl |= GRL(str(key.ReadObj().GetString()), from_string=True)
        grl.save('grl.xml')
Example #4
0
    def work(self):

        # merge GRL XML strings
        grl = GRL()
        for fname in self.files:
            with root_open(fname) as f:
                for key in f.Lumi.keys():
                    grl |= GRL(str(key.ReadObj().GetString()),
                               from_string=True)
        grl.save('grl.xml')
Example #5
0
def test_read_yaml():

    try:
        import yaml
    except ImportError:
        raise SkipTest

    grl = GRL(GRLA)
    grl.save('test.yml')
    grl2 = GRL('test.yml')
    assert_equal(grl, grl2)
    os.unlink('test.yml')
Example #6
0
    def merge(inputs, output, metadata):

        # merge output trees
        root_output = output + '.root'
        subprocess.call(['hadd', root_output] + inputs)

        if metadata.datatype == datasets.DATA:
            # merge GRLs
            grl = GRL()
            for input in inputs:
                grl |= GRL('%s:/lumi' % input)
            grl.save('%s:/lumi' % root_output)
Example #7
0
def test_read_yaml():

    try:
        import yaml
    except ImportError:
        raise SkipTest

    grl = GRL(GRLA)
    grl.save('test.yml')
    grl2 = GRL('test.yml')
    assert_equal(grl, grl2)
    os.unlink('test.yml')
Example #8
0
def from_string_test():

    with open(GRLA) as f:
        lines = f.readlines()
    grl_string = ''.join(lines)
    grl = GRL(grl_string, from_string=True)

    grl = GRL()
    grl.from_string(grl_string)

    grlb = GRL(grl.str(), from_string=True)

    assert_equal(grlb, grl)
Example #9
0
def from_string_test():

    with open(GRLA) as f:
        lines = f.readlines()
    grl_string = ''.join(lines)
    grl = GRL(grl_string, from_string=True)

    grl = GRL()
    grl.from_string(grl_string)

    grlb = GRL(grl.str(), from_string=True)

    assert_equal(grlb, grl)
Example #10
0
def test_ROOT():

    try:
        import ROOT
    except ImportError:
        raise SkipTest

    filename = os.path.join(DIRNAME, 'test.root')
    root_file = ROOT.TFile.Open(filename, 'recreate')
    root_file.Close()

    grl = GRL(GRLA)
    grl.save(filename + ':/lumi')
    grl2 = GRL(filename + ':/lumi')
    assert_equal(grl, grl2)

    root_file = ROOT.TFile.Open(filename, 'recreate')
    root_file.mkdir('dir')
    root_file.Close()

    grl = GRL(GRLA)
    grl.save(filename + ':/dir/lumi')
    grl2 = GRL(filename + ':/dir/lumi')
    assert_equal(grl, grl2)

    root_file = ROOT.TFile.Open(filename)
    tobj = root_file.Get('dir/lumi')
    assert_true(isinstance(tobj, ROOT.TObjString))
    root_file.Close()
    os.unlink(filename)
Example #11
0
import ROOT
ROOT.gSystem.Load('GoodRunsLists/StandAlone/libGoodRunsLists.so')
from ROOT import Root

reader = Root.TGoodRunsListReader()
reader.AddXMLFile('grlA.xml')
reader.AddXMLFile('grlB.xml')
reader.Interpret()
grl_overlap = reader.GetMergedGoodRunsList(Root.AND)
writer = Root.TGoodRunsListWriter()
writer.SetGoodRunsList(grl_overlap)
writer.SetFilename('grl_overlap_goodrunslists.xml')
writer.WriteXMLFile()

from goodruns import GRL

grl_a = GRL('grlA.xml')
grl_b = GRL('grlB.xml')
grl_overlap = grl_a & grl_b
grl_overlap.save('grl_overlap_goodruns.xml')
Example #12
0
def save_test():

    grl = GRL(GRLA)
    grl.save('testA.xml')
    os.unlink('testA.xml')
    assert_raises(ValueError, grl.save, 'testB.badext')
Example #13
0
def file_init_test():

    with open(GRLA) as f:
        grl = GRL(f)
        assert_true((180225, 87) in grl)
Example #14
0
def dict_init_test():

    a = {1234: [(1, 2), (4, 5)]}
    grl = GRL(a)
    assert_true((1234, 1) in grl)
Example #15
0
def str_init_test():

    grl = GRL(GRLA)

    assert_true((180225, 87) in grl)
    assert_true((180225, 1) not in grl)
Example #16
0
def iter_test():

    grl = GRL(GRLA)
    for run in grl:
        lumiblocks = grl[run]
Example #17
0
def save_test():

    grl = GRL(GRLA)
    grl.save('testA.xml')
    os.unlink('testA.xml')
    assert_raises(ValueError, grl.save, 'testB.badext')
Example #18
0
    def work(self):
        """
        This is the one function that all "ATLASStudent"s must implement.
        """
        datatype = self.metadata.datatype
        year = self.metadata.year
        verbose = self.args.verbose

        OutputModel = C3POEvent

        if datatype == datasets.MC:
            # only create truth branches for MC
            OutputModel += (
                    FourVectModel.prefix('resonance_') +
                    TrueTau.prefix('truetau1_') +
                    TrueTau.prefix('truetau2_'))

        onfilechange = []
        count_funcs = {}

        if datatype in (datasets.MC, datasets.EMBED):

            def mc_weight_count(event):
                return event.mc_event_weight

            count_funcs = {
                'mc_weight': mc_weight_count,
            }

        trigger_config = None

        if datatype != datasets.EMBED:
            # trigger config tool to read trigger info in the ntuples
            trigger_config = get_trigger_config()

            # update the trigger config maps on every file change
            onfilechange.append((update_trigger_config, (trigger_config,)))

        if datatype == datasets.DATA:
            merged_grl = GRL()

            def update_grl(student, grl, name, file, tree):

                grl |= str(file.Get('Lumi/%s' % student.metadata.treename).GetString())

            onfilechange.append((update_grl, (self, merged_grl,)))

        if datatype == datasets.DATA:
            merged_cutflow = Hist(1, 0, 1, name='cutflow', type='D')
        else:
            merged_cutflow = Hist(2, 0, 2, name='cutflow', type='D')

        def update_cutflow(student, cutflow, name, file, tree):

            year = student.metadata.year
            datatype = student.metadata.datatype
            if datatype == datasets.MC:
                cutflow[0] += file.cutflow_event[0]
                cutflow[1] += file.cutflow_event_mc_weight[0]
            else:
                cutflow[0] += file.cutflow_event[0]

        onfilechange.append((update_cutflow, (self, merged_cutflow,)))

        # initialize the TreeChain of all input files
        # (each containing one tree named self.metadata.treename)
        chain = TreeChain(
                self.metadata.treename,
                files=self.files,
                events=self.events,
                read_branches_on_demand=True,
                cache=True,
                onfilechange=onfilechange)

        # create output tree
        self.output.cd()
        tree = Tree(name='higgstautauhh', model=OutputModel)

        copied_variables = [
                'actualIntPerXing',
                'averageIntPerXing',
                'RunNumber',
                'EventNumber',
                'lbn']

        tree.set_buffer(
                chain._buffer,
                branches=copied_variables,
                create_branches=True,
                visible=False)

        chain.always_read(copied_variables)

        # set the event filters
        event_filters = EventFilterList([
            CoreFlags(
                count_funcs=count_funcs),
            TauSelected(2,
                count_funcs=count_funcs),
            TruthMatching(
                passthrough=datatype != datasets.MC,
                count_funcs=count_funcs),
            MCWeight(
                datatype=datatype,
                tree=tree,
                passthrough=datatype != datasets.MC,
                count_funcs=count_funcs)
        ])

        self.filters['event'] = event_filters

        chain._filters += event_filters

        define_objects(chain, year, skim=False)

        # define tree objects
        taus = [
            tree.define_object(name='tau1', prefix='tau1_'),
            tree.define_object(name='tau2', prefix='tau2_')]

        if datatype == datasets.MC:
            truetaus = [
                tree.define_object(name='truetau1', prefix='truetau1_'),
                tree.define_object(name='truetau2', prefix='truetau2_')]

            tree.define_object(name='resonance', prefix='resonance_')

        # entering the main event loop...
        for event in chain:

            # sort taus and jets in decreasing order by pT
            event.taus.sort(key=lambda tau: tau.pt, reverse=True)

            tau1, tau2 = event.taus

            # MET
            METx = event.MET.etx
            METy = event.MET.ety
            MET_vect = Vector2(METx, METy)
            MET = event.MET.et
            MET_phi = event.MET.phi

            tree.MET = MET
            tree.MET_x = METx
            tree.MET_y = METy
            tree.MET_phi = MET_phi

            sumET = event.MET.sumet
            tree.sumET = sumET
            if sumET != 0:
                tree.MET_sig = ((2. * MET / GeV) /
                        (utils.sign(sumET) * sqrt(abs(sumET / GeV))))
            else:
                tree.MET_sig = -1.

            # use MMC values from skim
            mmc_mass = event.tau_MMC_mass
            mmc_resonance = event.tau_MMC_resonance
            mmc_met = Vector2(event.tau_MMC_MET_x, event.tau_MMC_MET_y)

            tree.mass_mmc_tau1_tau2 = mmc_mass
            tree.mmc_resonance.copy_from(mmc_resonance)
            if mmc_mass > 0:
                tree.mmc_resonance_pt = mmc_resonance.Pt()
            tree.MET_mmc = mmc_met.Mod()
            tree.MET_mmc_x = mmc_met.X()
            tree.MET_mmc_y = mmc_met.Y()
            tree.MET_mmc_phi = math.pi - mmc_met.Phi()

            # truth matching
            if datatype == datasets.MC:

                resonance, tau_decays = get_taus(event)

                if resonance is not None:

                    FourVectModel.set(tree.resonance, resonance)

                    matched_taus = []
                    decays = tau_decays[:]
                    for itau, tau in enumerate(event.taus):
                        for idecay, tau_decay in enumerate(decays):
                            if tau.matches_vect(tau_decay.fourvect_visible):
                                tau_decay.matched = True
                                tau_decay.matched_object = tau
                                tau.matched = True
                                tau.matched_object = tau_decay
                                TrueTau.set(truetaus[itau], tau_decay,
                                        verbose=verbose)
                                decays.pop(idecay)
                                matched_taus.append(itau)
                                break

                    if len(decays) > 0:
                        for idecay, decay in enumerate(decays):
                            reco_idx = -1
                            remaining_idx = range(2)
                            for imatched in remaining_idx:
                                if imatched not in matched_taus:
                                    reco_idx = imatched
                                    remaining_idx.remove(imatched)
                                    break
                            TrueTau.set(truetaus[reco_idx], tau_decay,
                                    verbose=verbose)

                    if len(tau_decays) == 2:
                        # write truth met
                        fourvect_missing = (tau_decays[0].fourvect_missing +
                                            tau_decays[1].fourvect_missing)

                        tree.MET_true = fourvect_missing.Pt()
                        tree.MET_phi_true = fourvect_missing.Phi()
                        tree.MET_x_true = tree.MET_true * math.cos(tree.MET_phi_true)
                        tree.MET_y_true = tree.MET_true * math.sin(tree.MET_phi_true)
                        tree.MET_phi_diff = Vector2.Phi_mpi_pi(tree.MET_phi_true - MET_phi)

            # tau - vertex association
            tree.tau_same_vertex = (
                    tau1.privtx_x == tau2.privtx_x and
                    tau1.privtx_y == tau2.privtx_y and
                    tau1.privtx_z == tau2.privtx_z)

            # fill tau block
            for outtau, intau in zip(taus, event.taus):
                RecoTau.set(outtau, intau, verbose=verbose)

            # fill output ntuple
            tree.Fill(reset=True)

        self.output.cd()
        tree.FlushBaskets()
        tree.Write()

        if datatype == datasets.DATA:
            xml_string = ROOT.TObjString(merged_grl.str())
            xml_string.Write('lumi')
        merged_cutflow.Write()