import uproot
from Tools.helpers import get_samples
from Tools.config_helpers import redirector_ucsd, redirector_fnal
from Tools.nano_mapping import make_fileset, nano_mapping


samples = get_samples(f_in='samples_QCD.yaml')

fileset = make_fileset(['QCD'], samples, redirector=redirector_ucsd, small=False)
print(fileset)

<<<<<<< HEAD

good = []
bad = []
#breakpoint()
for n in range(len(list(fileset.keys()))):
    for f_in in fileset[list(fileset.keys())[n]]:
=======
fileset = make_fileset(['TTW'], samples, redirector=redirector_ucsd, small=False)

good = []
bad = []

for sample in list(fileset.keys()):
    for f_in in fileset[sample]:
>>>>>>> 6987d93c61482b8369a70afe8a3071d806185974
        print (f_in)
        try:
            tree = uproot.open(f_in)["Events"]
            good.append(f_in)
Exemple #2
0
import uproot
from Tools.helpers import get_samples
from Tools.config_helpers import redirector_ucsd, redirector_fnal
from Tools.nano_mapping import make_fileset, nano_mapping

samples = get_samples()

fileset = make_fileset(['QCD'],
                       samples,
                       redirector=redirector_ucsd,
                       small=False,
                       year='UL2018')

good = []
bad = []

for sample in list(fileset.keys()):
    for f_in in fileset[sample]:
        print(f_in)
        try:
            tree = uproot.open(f_in)["Events"]
            good.append(f_in)
        except OSError:
            print("XRootD Error")
            bad.append(f_in)
Exemple #3
0
    isData = True if DASname.count('Run20') else False
    isFastSim = False if not DASname.count('Fast') else True
    era = DASname[DASname.find("Run") + len('Run2000'):DASname.find("Run") +
                  len('Run2000A')]
    if DASname.count('Autumn18') or DASname.count('Run2018'):
        return 2018, era, isData, isFastSim
    elif DASname.count('Fall17') or DASname.count('Run2017'):
        return 2017, era, isData, isFastSim
    elif DASname.count('Summer16') or DASname.count('Run2016'):
        return 2016, era, isData, isFastSim
    else:
        ### our private samples right now are all Autumn18 but have no identifier.
        return 2018, 'X', False, False


samples = get_samples()  # loads the nanoAOD samples

# load config
cfg = loadConfig()

print("Loaded version %s from config." % cfg['meta']['version'])

import argparse

argParser = argparse.ArgumentParser(description="Argument parser")
argParser.add_argument('--tag',
                       action='store',
                       default=None,
                       help="Tag on github for baby production")
argParser.add_argument('--user', action='store', help="Your github user name")
argParser.add_argument('--skim',
Exemple #4
0
    from Tools.nano_mapping import make_fileset, nano_mapping

    from processor.meta_processor import get_sample_meta
    overwrite = True
    local = True

    # load the config and the cache
    cfg = loadConfig()
    
    cacheName = 'charge_flip_check'
    cache = dir_archive(os.path.join(os.path.expandvars(cfg['caches']['base']), cacheName), serialized=True)
    histograms = sorted(list(desired_output.keys()))
    
    year = 2018
    
    samples = get_samples(2018)

    #fileset = make_fileset(['TTW', 'TTZ'], samples, redirector=redirector_ucsd, small=True, n_max=5)  # small, max 5 files per sample
    #fileset = make_fileset(['DY'], samples, redirector=redirector_ucsd, small=True, n_max=10)
    fileset = make_fileset(['top', 'DY',], redirector=redirector_ucsd, small=False)
   
    add_processes_to_output(fileset, desired_output)

    #meta = get_sample_meta(fileset, samples)
   
    if local:

        exe_args = {
            'workers': 16,
            'function_args': {'flatten': False},
            'schema': NanoAODSchema,
Exemple #5
0
nano_mapping = load_yaml(data_path + 'nano_mapping.yaml')


def make_fileset(datasets,
                 samples,
                 redirector=redirector_ucsd,
                 small=False,
                 n_max=1,
                 year=2018):
    fileset = {}
    #print (nano_mapping[year])
    for dataset in datasets:
        for nano_sample in nano_mapping[year][dataset]:
            dbs_files = DBSSample(dataset=nano_sample).get_files()
            files = [redirector + x.name for x in dbs_files]
            if not small:
                fileset.update({nano_sample: files})
            else:
                fileset.update({nano_sample: files[:n_max]})

    return fileset


if __name__ == '__main__':

    samples = get_samples()
    samples.update(get_samples('samples_QCD.yaml'))

    fileset = make_fileset(['TTW', 'TTZ', 'QCD'], samples, year=2018)