示例#1
0
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  
  ## fill config:
  # includes
  includes = [
    ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  config["JetID"] = "loose"
  config["JetIDVersion"] = "2015"
  config["PuJetIDs"] = []
  config["PuJetIDFullDiscrName"] = "pileupJetIdfullDiscriminant"
  config["JetTaggerLowerCuts"] = []
  config["JetTaggerUpperCuts"] = []
  config["JetLowerPtCuts"] = ["20.0"]
  config["JetUpperAbsEtaCuts"] = ["4.7"]
  config["JetLeptonLowerDeltaRCut"] = 0.5
  

  return config
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config["TauEnergyCorrection"] = "mssmhtt2016" if re.search(
        "Summer16|Embedding2016", nickname) else "none"

    if re.search("Run201", nickname):
        config["TauEnergyCorrectionOneProng"] = 1.0
        config["TauEnergyCorrectionOneProngPiZeros"] = 1.0
        config["TauEnergyCorrectionThreeProng"] = 1.0
        config["TauElectronFakeEnergyCorrectionOneProng"] = 1.0
        config["TauElectronFakeEnergyCorrectionOneProngPiZeros"] = 1.0
    else:
        config["TauEnergyCorrectionOneProng"] = 0.995
        config["TauEnergyCorrectionOneProngPiZeros"] = 1.011
        config["TauEnergyCorrectionThreeProng"] = 1.006
        config["TauElectronFakeEnergyCorrectionOneProng"] = 1.024
        config["TauElectronFakeEnergyCorrectionOneProngPiZeros"] = 1.076

    return config
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  
  ## fill config:
  # includes
  includes = [
    "HiggsAnalysis.KITHiggsToTauTau.data.ArtusConfigs.Run2MSSM2017.Includes.settingsMuonID"
  ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  config["LooseMuonID"] = "medium"
  
  config["LooseMuonIsoType"] = "user"
  config["LooseMuonIso"] = "none"
  config["LooseMuonIsoPtSumOverPtUpperThresholdEB"] = 0.3
  config["LooseMuonIsoPtSumOverPtUpperThresholdEE"] = 0.3
  
  config["LooseMuonLowerPtCuts"] = ["10.0"]
  config["LooseMuonUpperAbsEtaCuts"] = ["2.4"]
  config["LooseMuonTrackDxyCut"] = 0.045
  config["LooseMuonTrackDzCut"] = 0.2
  config["DirectIso"] = True

  ## further settings taken into account by ValidLooseMuonsProducer:
  # - Year (should be 2017), written into the 'base' config

  return config
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  
  ## fill config:
  # includes
  includes = [
    "HiggsAnalysis.KITHiggsToTauTau.data.ArtusConfigs.Run2MSSM.Includes.settingsMuonID"
    ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  config["VetoMuonID"] = "veto"
  
  config["VetoMuonIsoType"] = "user"
  config["VetoMuonIso"] = "none"
  config["VetoMuonIsoPtSumOverPtUpperThresholdEB"] = 0.3
  config["VetoMuonIsoPtSumOverPtUpperThresholdEE"] = 0.3
  
  config["VetoMuonLowerPtCuts"] = ["15.0"]
  config["VetoMuonUpperAbsEtaCuts"] = ["2.4"]
  config["DiVetoMuonMinDeltaRCut"] = 0.15
  config["DiVetoMuonVetoMode"] = "veto_os_keep_ss"
  config["DirectIso"] = True
  
  
  return config
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  
  # define frequently used conditions
  #isEmbedded = datasetsHelper.isEmbedded(nickname)
  #isData = datasetsHelper.isData(nickname) and (not isEmbedded)
  #isTTbar = re.search("TT(To|_|Jets)", nickname)
  #isDY = re.search("DY.?JetsToLL", nickname)
  #isWjets = re.search("W.?JetsToLNu", nickname)
  
  
  ## fill config:
  # includes
  includes = [
    ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  if not re.search("Run201|Embedding", nickname):
    config["jecUncUp"] = {
      "JetEnergyCorrectionUncertaintyShift" : 1.0,
      "SvfitCacheFileFolder" : "nominal"
    }
    config["jecUncDown"] = {
      "JetEnergyCorrectionUncertaintyShift" : -1.0,
      "SvfitCacheFileFolder" : "nominal"
    }
  
  
  return config
示例#6
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    ## fill config:
    # includes
    includes = [
        "HiggsAnalysis.KITHiggsToTauTau.data.ArtusConfigs.Run2MSSM.Includes.settingsElectronID"
    ]
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config["LooseElectronReco"] = "mvanontrig"
    config["LooseElectronID"] = "user"
    config["LooseElectronIDType"] = "mvabased2015andlater"
    config[
        "LooseElectronIDName"] = "electronMVAValueMapProducer:ElectronMVAEstimatorRun2Spring16GeneralPurposeV1Values"
    config["LooseElectronMvaIDCutEB1"] = 0.836695742607
    config["LooseElectronMvaIDCutEB2"] = 0.715337944031
    config["LooseElectronMvaIDCutEE"] = 0.356799721718
    config["LooseElectronIsoType"] = "user"
    config["LooseElectronIso"] = "none"
    config["LooseElectronIsoPtSumOverPtUpperThresholdEB"] = 0.3
    config["LooseElectronIsoPtSumOverPtUpperThresholdEE"] = 0.3
    config["LooseElectronLowerPtCuts"] = ["10.0"]
    config["LooseElectronUpperAbsEtaCuts"] = ["2.5"]
    config["LooseElectronTrackDxyCut"] = 0.045
    config["LooseElectronTrackDzCut"] = 0.2
    config["DirectIso"] = True

    return config
示例#7
0
	def get_yield_unc_rel(histogram_path, root_file, root_object_paths):
		metadata_path = histogram_path+"_metadata"
		if metadata_path in root_object_paths:
			metadata = jsonTools.JsonDict(root_file.Get(metadata_path).GetString().Data())
			return metadata.get("yield_unc_rel", None)
		else:
			return None
示例#8
0
def main():
    ROOT.gROOT.SetBatch(True)

    parser = argparse.ArgumentParser(
        description="Searcg for Kappa files containing certain events.",
        parents=[logger.loggingParser])

    parser.add_argument("files",
                        nargs="+",
                        help="Kappa skim output files to check")
    parser.add_argument(
        "-c",
        "--config",
        help="JSON config file containing the event selection.")

    args = parser.parse_args()
    logger.initLogger(args)

    config = jsonTools.JsonDict(args.config)

    run_whitelist = config.get("RunWhitelist", [])
    lumi_whitelist = config.get("LumiWhitelist", [])
    event_whitelist = config.get("EventWhitelist", [])

    run_blacklist = config.get("RunBlacklist", [])
    lumi_blacklist = config.get("LumiBlacklist", [])
    event_blacklist = config.get("EventBlacklist", [])

    selections = []

    if len(run_whitelist) > 0:
        selections.append("||".join(
            ["(KEventMetadata.nRun==%d)" % run for run in run_whitelist]))
    if len(lumi_whitelist) > 0:
        selections.append("||".join(
            ["(KEventMetadata.nLumi==%d)" % lumi for lumi in lumi_whitelist]))
    if len(event_whitelist) > 0:
        selections.append("||".join([
            "(KEventMetadata.nEvent==%d)" % event for event in event_whitelist
        ]))

    if len(run_blacklist) > 0:
        selections.append("&&".join(
            ["(KEventMetadata.nRun!=%d)" % run for run in run_blacklist]))
    if len(lumi_blacklist) > 0:
        selections.append("&&".join(
            ["(KEventMetadata.nLumi!=%d)" % lumi for lumi in lumi_blacklist]))
    if len(event_blacklist) > 0:
        selections.append("&&".join([
            "(KEventMetadata.nEvent!=%d)" % event for event in event_blacklist
        ]))

    selection = "&&".join(["(%s)" % cut for cut in selections])

    for input_file in args.files:
        tree = ROOT.TChain("Events")
        tree.AddFile(input_file)
        entries = tree.Draw("1", selection, "goff")
        log.info("%4d entries: %s" % (entries, input_file))
示例#9
0
def main():
    parser = argparse.ArgumentParser(
        description=
        "Compare different repository versions configured in Artus configs. The script has to be executed in the directory of the repository.",
        parents=[logger.loggingParser])
    parser.add_argument(
        "files",
        help=
        "Two configurations. The configs should be ordered by the expected repository version. Can be either Artus output root files or JSON text files",
        nargs=2)

    args = parser.parse_args()
    logger.initLogger(args)

    config1 = jsonTools.JsonDict(args.files[0])
    config2 = jsonTools.JsonDict(args.files[1])

    dirname = os.path.basename(os.getcwd())
    repo_key1 = sorted([
        key for key in config1.keys()
        if key.startswith("/") and key.endswith(dirname)
    ],
                       key=lambda item: len(item))[-1]
    repo_key2 = sorted([
        key for key in config2.keys()
        if key.startswith("/") and key.endswith(dirname)
    ],
                       key=lambda item: len(item))[-1]

    repo_version1 = config1[repo_key1]
    repo_version2 = config2[repo_key2]
    diff_string = "%s...%s" % (repo_version1, repo_version2)

    command = "git diff %s..%s" % (repo_version1, repo_version2)
    if log.isEnabledFor(logging.DEBUG):
        log.info("")
        logger.subprocessCall(shlex.split(command))
    log.info("\n" + command)

    popen_cout, popen_cerr = subprocess.Popen(
        "git config remote.origin.url".split(),
        stdout=subprocess.PIPE).communicate()
    remote_url = popen_cout.replace("\n", "")
    github_link = os.path.join(remote_url, "compare", diff_string)
    log.info(github_link)
    def __init__(self):
        super(SamplesBase, self).__init__()

        self.config = jsonTools.JsonDict({})
        self.postfit_scales = None
        self.expressions = expressions.ExpressionsDict()

        self.exclude_cuts = []
        self.period = "run"
示例#11
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(
        os.path.expandvars(
            "$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    # define frequently used conditions
    isEmbedded = datasetsHelper.isEmbedded(nickname)

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config[
        "BTaggedJetID_documentation"] = "https://twiki.cern.ch/twiki/bin/viewauth/CMS/HiggsToTauTauWorking2016#b_tagging"

    # settings for CSVv2 algorithm 94X recommendation
    config[
        "BTagScaleFactorFile"] = "$CMSSW_BASE/src/Artus/KappaAnalysis/data/CSVv2_94XSF_V1_B_F.csv"
    config[
        "BTagEfficiencyFile"] = "$CMSSW_BASE/src/Artus/KappaAnalysis/data/tagging_efficiencies_moriond2017.root"
    config[
        "BTaggedJetCombinedSecondaryVertexName"] = "pfCombinedInclusiveSecondaryVertexV2BJetTags"
    config["BTaggerWorkingPoints"] = [
        "tight:0.9693", "medium:0.8838", "loose:0.5803"
    ]

    # settings for DeepCSV algorithm 94X recommendation (stated to perform better than CSVv2)
    #config["BTagScaleFactorFile"] = "$CMSSW_BASE/src/Artus/KappaAnalysis/data/DeepCSV_94XSF_V1_B_F.csv"
    #config["BTagEfficiencyFile"] = "$CMSSW_BASE/src/Artus/KappaAnalysis/data/tagging_efficiencies_moriond2017.root"
    #config["BTaggedJetCombinedSecondaryVertexName"] = "pfDeepCSVJetTagsprobbb+pfDeepCSVJetTagsprobb"
    #config["BTaggerWorkingPoints"] = [
    #  "tight:0.8001",
    #  "medium:0.4941",
    #  "loose:0.1522"
    #]

    config["BTaggedJetAbsEtaCut"] = 2.5  # 2017 value
    config["ApplyBTagSF"] = True
    config["JetTaggerUpperCuts"] = []
    config["BTagSFMethod"] = "PromotionDemotion"
    config["BTagShift"] = 0
    config["BMistagShift"] = 0

    ## further settings taken into account by ValidBTaggedJetsProducer:
    # - Year (should be 2017), written into the 'base' config

    ## further hard-coded settings in the ValidBTaggedJetsProducer:
    # lower pt_cut for the Jet: 20 GeV -> valid for 2016 & 2017
    # upper pt_cut for the Jet: 1000 GeV -> valid for 2016 & 2017
    # parton flavour definition: hadron-based

    return config
示例#12
0
def main():
	
	parser = argparse.ArgumentParser(description="Tools for JSON files.", parents=[logger.loggingParser])
	
	parser.add_argument("json", nargs="+", help="JSON configs.")
	
	args = parser.parse_args()
	logger.initLogger(args)
	
	log.info(jsonTools.JsonDict(args.json).doIncludes().doComments())
	def get_yield_unc_rel(histogram_path, root_file, root_object_paths):
		"""
		Extracts the data from background estimation methods stored in the metadata TObjString.
		"""
		metadata_path = histogram_path+"_metadata"
		if metadata_path in root_object_paths:
			metadata = jsonTools.JsonDict(root_file.Get(metadata_path).GetString().Data())
			return metadata.get("yield_unc_rel", None)
		else:
			return None
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    config["PlotlevelFilterExpressionQuantities"] = [
        "extraelec_veto", "extramuon_veto"
    ]
    config[
        "PlotlevelFilterExpression"] = "(extraelec_veto < 0.5)*(extramuon_veto < 0.5)"

    return config
示例#15
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    # define frequently used conditions
    isMC = not re.search("(?<!PFembedded).Run201", nickname)

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config["GenParticles"] = "genParticles" if isMC else ""
    config["GenTaus"] = "genTaus" if isMC else ""
    config["GenTauJets"] = "tauGenJets" if isMC else ""
    config["GenMet"] = "" "genmetTrue" if isMC else ""
    config["GenJets"] = ""
    config["Electrons"] = "electrons"
    config["ElectronMetadata"] = "electronMetadata"
    config["Muons"] = "muons"
    config["Taus"] = "taus"
    config["TauMetadata"] = "taus"

    if re.search("MINIAOD|USER", nickname): config["TaggedJets"] = "ak4PF"

    if re.search("13TeV", nickname): config["PileupDensity"] = "pileupDensity"

    config["Met"] = "met"
    config["PuppiMet"] = "metPuppi" if re.search(
        "(16Dec2015v1|Fall15|Spring16|Run2015)", nickname) else ""
    #config["MvaMets"] = "MVAMET"
    #config["PFChargedHadronsPileUp"] = "pfPileUpChargedHadrons"
    #config["PFChargedHadronsNoPileUp"] = "pfNoPileUpChargedHadrons"
    #config["PFChargedHadronsNoPileUp"] = "pfAllChargedParticles"
    #config["PFNeutralHadronsNoPileUp"] = "pfNoPileUpNeutralHadrons"
    #config["PFPhotonsNoPileUp"] = "pfNoPileUpPhotons"
    #config["PackedPFCandidates"] = "packedPFCandidates"
    config["BeamSpot"] = "offlineBeamSpot"
    config["VertexSummary"] = "goodOfflinePrimaryVerticesSummary"
    config["EventMetadata"] = "eventInfo"
    config["LumiMetadata"] = "lumiInfo"
    config["GenEventInfoMetadata"] = "genEventInfoMetadata"
    config["FilterMetadata"] = ""
    config["FilterSummary"] = ""
    config["JetMetadata"] = "jetMetadata"
    config["BeamSpot"] = "offlineBeamSpot"
    config["TriggerInfos"] = "triggerObjectMetadata"
    config["TriggerObjects"] = "triggerObjects"

    return config
示例#16
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(
        os.path.expandvars(
            "$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    # define frequently used conditions
    isEmbedded = datasetsHelper.isEmbedded(nickname)
    isData = datasetsHelper.isData(nickname) and (not isEmbedded)
    #isTTbar = re.search("TT(To|_|Jets)", nickname)
    #isDY = re.search("DY.?JetsToLL", nickname)
    #isWjets = re.search("W.?JetsToLNu", nickname)
    hasBoson = re.search(
        "DY.?JetsToLLM(10to50|50|150)|EWKZ2Jets|^(GluGlu|GluGluTo|VBF|Wminus|Wplus|Z)(HToTauTau|H2JetsToTauTau)|SUSY(BB|GluGlu|GluGluTo)(BB)?HToTauTau",
        nickname)

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    #config["Processors"] = ["#producer:PrintGenParticleDecayTreeProducer",
    #				"#filter:RunLumiEventFilter"]
    config["Processors"] = ["filter:JsonFilter"] if (isData or re.search(
        "Embedding201", nickname)) else []  #["filter:RunLumiEventFilter"]
    config["Processors"].append("producer:NicknameProducer")
    if not isData:
        if hasBoson:
            config["Processors"].extend(
                ("producer:GenBosonFromGenParticlesProducer",
                 "producer:GenBosonDiLeptonDecayModeProducer",
                 "producer:ValidGenTausProducer",
                 "producer:GenDiLeptonDecayModeProducer"))
        config["Processors"].extend(
            ("producer:GenParticleProducer",
             "producer:RecoElectronGenParticleMatchingProducer",
             "producer:RecoElectronGenTauMatchingProducer",
             "producer:RecoMuonGenParticleMatchingProducer",
             "producer:RecoMuonGenTauMatchingProducer",
             "producer:RecoTauGenParticleMatchingProducer",
             "producer:RecoTauGenTauMatchingProducer",
             "producer:MatchedLeptonsProducer",
             "producer:CrossSectionWeightProducer",
             "producer:GeneratorWeightProducer",
             "producer:NumberGeneratedEventsWeightProducer"))
        if not isEmbedded:
            config["Processors"].append("producer:PUWeightProducer")
    config["Processors"].extend(
        ("filter:MetFilter", "producer:MetFilterFlagProducer"))
    return config
示例#17
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    config["PlotlevelFilterExpressionQuantities"] = [
        "againstElectronVLooseMVA6_2", "extraelec_veto", "againstMuonLoose3_2",
        "extramuon_veto", "byVLooseIsolationMVArun2v1DBoldDMwLT_1",
        "byVLooseIsolationMVArun2v1DBoldDMwLT_2"
    ]
    config[
        "PlotlevelFilterExpression"] = "(extraelec_veto < 0.5)*(extramuon_veto < 0.5)*(againstMuonLoose3_2 > 0.5)*(againstElectronVLooseMVA6_2 > 0.5)*(byVLooseIsolationMVArun2v1DBoldDMwLT_1 > 0.5)*(byVLooseIsolationMVArun2v1DBoldDMwLT_2 > 0.5)"

    return config
示例#18
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    if re.search("Run2015", nickname):
        '''
    config["JetEnergyCorrectionParameters"] = [
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_DATA_L1FastJet_AK4PFchs.txt",
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_DATA_L2Relative_AK4PFchs.txt",
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_DATA_L3Absolute_AK4PFchs.txt",
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_DATA_L2L3Residual_AK4PFchs.txt"
    ]
    '''
        config[
            "JetEnergyCorrectionUncertaintyParameters"] = "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_DATA_Uncertainty_AK4PFchs.txt"
    else:
        '''
    config["JetEnergyCorrectionParameters"] = [
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_MC_L1FastJet_AK4PFchs.txt",
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_MC_L2Relative_AK4PFchs.txt",
      "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_MC_L3Absolute_AK4PFchs.txt"
    ]
    '''
        config[
            "JetEnergyCorrectionUncertaintyParameters"] = "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Fall15/Fall15_25nsV2_MC_Uncertainty_AK4PFchs.txt"

    #config["JetEnergyCorrectionUncertaintySource"] = ""
    #config["JetEnergyCorrectionUncertaintyShift"] = 0.0
    config["JetEnergyCorrectionSplitUncertainty"] = True
    config[
        "JetEnergyCorrectionSplitUncertaintyParameters"] = "$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/data/jec/Summer16/Summer16_23Sep2016V4_DATA_UncertaintySources_AK4PFchs.txt"
    config["UseJECShiftsForBJets"] = True
    config["JetEnergyCorrectionSplitUncertaintyParameterNames"] = [
        "AbsoluteFlavMap", "AbsoluteMPFBias", "AbsoluteScale", "AbsoluteStat",
        "FlavorQCD", "Fragmentation", "PileUpDataMC", "PileUpPtBB",
        "PileUpPtEC1", "PileUpPtEC2", "PileUpPtHF", "PileUpPtRef",
        "RelativeBal", "RelativeFSR", "RelativeJEREC1", "RelativeJEREC2",
        "RelativeJERHF", "RelativePtBB", "RelativePtEC1", "RelativePtEC2",
        "RelativePtHF", "RelativeStatEC", "RelativeStatFSR", "RelativeStatHF",
        "SinglePionECAL", "SinglePionHCAL", "TimePtEta", "Total", "Closure"
    ]

    return config
示例#19
0
    def __init__(self, executable=None, userArgParsers=None):

        self._config = jsonTools.JsonDict()
        self._executable = executable

        self._parser = None
        #Load default argument parser
        self._initArgumentParser(userArgParsers)
        #Parse command line arguments and return dict
        self._args = self._parser.parse_args()
        logger.initLogger(self._args)

        # expand the environment variables only at the batch node
        if self._args.batch:
            self._args.envvar_expansion = False

        date_now = datetime.now().strftime("%Y-%m-%d_%H-%M")

        # write repository revisions to the config
        if not self._args.disable_repo_versions:
            self.setRepositoryRevisions()
            self._config["Date"] = date_now

        # write username to the config
        try:
            self._config["User"] = os.environ["USER"]
        except:
            import random
            import string
            self._config["User"] = ''.join(
                random.choice(string.ascii_uppercase + string.digits)
                for _ in range(10))

        #Expand Config
        self.expandConfig()
        self.projectPath = None
        self.localProjectPath = None
        self.remote_se = False

        if self._args.batch:
            self.projectPath = os.path.join(
                os.path.expandvars(self._args.work),
                date_now + "_" + self._args.project_name)
            self.localProjectPath = self.projectPath
            if self.projectPath.startswith("srm://"):
                self.remote_se = True
                self.localProjectPath = os.path.join(
                    os.path.expandvars(self._parser.get_default("work")),
                    date_now + "_" + self._args.project_name)
示例#20
0
	def read_input_json_dicts(self, plotData):
		"""If Artus config dict is present in root file -> append to plotdict"""
		for root_files in plotData.plotdict["files"]:
			# TODO: make TChain instead of using only first file?
			with TFileContextManager(root_files[0], "READ") as tfile:
				keys, names = zip(*roottools.RootTools.walk_root_directory(tfile))
			if jsonTools.JsonDict.PATH_TO_ROOT_CONFIG in names:
				input_json_dict = jsonTools.JsonDict(root_files)
			else:
				input_json_dict = {}
			plotData.input_json_dicts.append(input_json_dict)

		# Raise warning if config dict could be read out for some, but not for all files
		if ({} in plotData.input_json_dicts and not all([i == {} for i in plotData.input_json_dicts])):
			log.warning("'config' dict could not be read for all input files! (ignore this warning if you're not using Artus output files)")
示例#21
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config[
        "ElectronID_documentation"] = "https://twiki.cern.ch/twiki/bin/viewauth/CMS/HiggsToTauTauWorking2015#Electrons"
    config["ElectronReco"] = "mvanontrig"
    config["ElectronID"] = "user"
    config["ElectronIDType"] = "mvabased2015andlater"

    config[
        "ElectronIDName"] = "electronMVAValueMapProducer:ElectronMVAEstimatorRun2Spring15NonTrig25nsV1Values" if re.search(
            "(Run2015|Fall15MiniAODv2|Spring16)", nickname
        ) else "electronMVAValueMapProducer:ElectronMVAEstimatorRun2Spring16GeneralPurposeV1Values"

    config["ElectronMvaIDCutEB1"] = 0.967083 if re.search(
        "(Run2015|Fall15MiniAODv2|Spring16)", nickname) else 0.941

    config["ElectronMvaIDCutEB2"] = 0.929117 if re.search(
        "(Run2015|Fall15MiniAODv2|Spring16)", nickname) else 0.899

    config["ElectronMvaIDCutEE"] = 0.726311 if re.search(
        "(Run2015|Fall15MiniAODv2|Spring16)", nickname) else 0.758

    config["ElectronIDList"] = [
        "electronMVAValueMapProducer:ElectronMVAEstimatorRun2Spring16GeneralPurposeV1Values",
        "egmGsfElectronIDs:cutBasedElectronID-Summer16-80X-V1-veto",
        "egmGsfElectronIDs:cutBasedElectronID-Summer16-80X-V1-loose",
        "egmGsfElectronIDs:cutBasedElectronID-Summer16-80X-V1-medium",
        "egmGsfElectronIDs:cutBasedElectronID-Summer16-80X-V1-tight"
    ]

    config["ElectronIsoType"] = "user"
    config["ElectronIso"] = "none"
    config["ElectronIsoSignalConeSize"] = 0.3
    config["ElectronDeltaBetaCorrectionFactor"] = 0.5
    config["ElectronTrackDxyCut"] = 0.045
    config["ElectronTrackDzCut"] = 0.2

    return config
    def __init__(self, executable=None, userArgParsers=None):

        self._config = jsonTools.JsonDict()
        self._executable = "HiggsToTauTauAnalysis"

        self._parser = None
        #Load default argument parser
        self._initArgumentParser(userArgParsers)
        #Parse command line arguments and return dict
        self._args = self._parser.parse_args()
        logger.initLogger(self._args)

        self._date_now = datetime.now().strftime("%Y-%m-%d_%H-%M")

        self.tmp_directory_remote_files = None

        self._gridControlInputFiles = {}
示例#23
0
def main():

    parser = argparse.ArgumentParser(
        description="Print names of all pipelines from Artus JSON config",
        parents=[logger.loggingParser])

    parser.add_argument(
        "configs",
        nargs="+",
        help=
        "Artus JSON configs. Can be either Artus output root files or JSON text files. Only first is considered."
    )

    args = parser.parse_args()
    logger.initLogger(args)

    config = jsonTools.JsonDict(args.configs[0])
    log.info("\n".join(sorted(config.get("Pipelines", {}).keys())))
示例#24
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(
        os.path.expandvars(
            "$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    config["Quantities"] = [
        "npu", "numberGeneratedEventsWeight", "crossSectionPerEventWeight",
        "generatorWeight", "npartons", "genbosonmass"
    ]

    config["Consumers"] = ["KappaLambdaNtupleConsumer"]

    # pipelines - systematic shifts
    return ACU.apply_uncertainty_shift_configs(
        'pu', config,
        importlib.import_module(
            "HiggsAnalysis.KITHiggsToTauTau.data.ArtusConfigs.Run2MSSM2017.syst_shifts_nom"
        ).build_config(nickname))
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    # define frequently used conditions
    #isEmbedded = datasetsHelper.isEmbedded(nickname)
    #isData = datasetsHelper.isData(nickname) and (not isEmbedded)
    #isTTbar = re.search("TT(To|_|Jets)", nickname)
    #isDY = re.search("DY.?JetsToLL", nickname)
    #isWjets = re.search("W.?JetsToLNu", nickname)

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    if re.search("Summer16", nickname):
        config["metJetEnUp"] = {
            "MetUncertaintyShift": True,
            "MetUncertaintyType": "JetEnUp",
            "SvfitCacheFileFolder": "metJetEnUp"
        }
        config["metJetEnDown"] = {
            "MetUncertaintyShift": True,
            "MetUncertaintyType": "JetEnDown",
            "SvfitCacheFileFolder": "metJetEnDown"
        }
        config["metUnclusteredEnUp"] = {
            "MetUncertaintyShift": True,
            "MetUncertaintyType": "UnclusteredEnUp",
            "SvfitCacheFileFolder": "metUnclusteredEnUp"
        }
        config["metUnclusteredEnDown"] = {
            "MetUncertaintyShift": True,
            "MetUncertaintyType": "UnclusteredEnDown",
            "SvfitCacheFileFolder": "metUnclusteredEnDown"
        }

    return config
示例#26
0
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  
  # define frequently used conditions
  #isEmbedded = datasetsHelper.isEmbedded(nickname)
  #isData = datasetsHelper.isData(nickname) and (not isEmbedded)
  #isTTbar = re.search("TT(To|_|Jets)", nickname)
  #isDY = re.search("DY.?JetsToLL", nickname)
  #isWjets = re.search("W.?JetsToLNu", nickname)
  
  
  ## fill config:
  # includes
  includes = [
    ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  if re.search("DY.?JetsToLL|EWKZ2Jets", nickname):
    config["tauEleFakeEsOneProngUp"] = {
      "TauElectronFakeEnergyCorrectionOneProngShift" : 1.03,
      "SvfitCacheFileFolder" : "tauEleFakeEsOneProngUp"
    }
    config["tauEleFakeEsOneProngDown"] = {
      "TauElectronFakeEnergyCorrectionOneProngShift" : 0.97,
      "SvfitCacheFileFolder" : "tauEleFakeEsOneProngDown"
    }
    config["tauEleFakeEsOneProngPiZerosUp"] = {
      "TauElectronFakeEnergyCorrectionOneProngPiZerosShift" : 1.03,
      "SvfitCacheFileFolder" : "tauEleFakeEsOneProngPiZerosUp"
    }
    config["tauEleFakeEsOneProngPiZerosDown"] = {
      "TauElectronFakeEnergyCorrectionOneProngPiZerosShift" : 0.97,
      "SvfitCacheFileFolder" : "tauEleFakeEsOneProngPiZerosDown"
    }
  
  
  return config
示例#27
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config[
        "ElectronID_documentation"] = "https://twiki.cern.ch/twiki/bin/viewauth/CMS/HiggsToTauTauWorking2015#Electrons"
    config["ElectronReco"] = "mvanontrig"
    config["ElectronID"] = "user"
    config[
        "ElectronIDType"] = "cutbased2015andlater"  # still MVA, using boolean functionality of IsCutBased()

    # signal electron ID
    config["ElectronIDName"] = "egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wp80"
    #config["ElectronIDName"] = "egmGsfElectronIDs:mvaEleID-Fall17-noIso-V1-wp80" # worse fake rejection compared to the 'iso' version

    config["ElectronIDList"] = [
        "egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wp80",
        "egmGsfElectronIDs:mvaEleID-Fall17-iso-V1-wp90",
        "egmGsfElectronIDs:mvaEleID-Fall17-noIso-V1-wp80",
        "egmGsfElectronIDs:mvaEleID-Fall17-noIso-V1-wp90",
        "egmGsfElectronIDs:cutBasedElectronID-Fall17-94X-V1-veto",
        "egmGsfElectronIDs:cutBasedElectronID-Fall17-94X-V1-loose",
        "egmGsfElectronIDs:cutBasedElectronID-Fall17-94X-V1-medium",
        "egmGsfElectronIDs:cutBasedElectronID-Fall17-94X-V1-tight"
    ]

    config["ElectronIsoType"] = "user"
    config["ElectronIso"] = "none"
    config["ElectronIsoSignalConeSize"] = 0.3
    config["ElectronDeltaBetaCorrectionFactor"] = 0.5
    config["ElectronTrackDxyCut"] = 0.045
    config["ElectronTrackDzCut"] = 0.2

    return config
示例#28
0
def build_config(nickname):
    config = jsonTools.JsonDict()
    #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))

    # define frequently used conditions
    #isEmbedded = datasetsHelper.isEmbedded(nickname)
    #isData = datasetsHelper.isData(nickname) and (not isEmbedded)
    #isTTbar = re.search("TT(To|_|Jets)", nickname)
    #isDY = re.search("DY.?JetsToLL", nickname)
    #isWjets = re.search("W.?JetsToLNu", nickname)

    ## fill config:
    # includes
    includes = []
    for include_file in includes:
        analysis_config_module = importlib.import_module(include_file)
        config += analysis_config_module.build_config(nickname)

    # explicit configuration
    config["nominal"] = {
        "ElectronEnergyCorrectionShiftEB": 1.0,
        "ElectronEnergyCorrectionShiftEE": 1.0,
        "JetEnergyCorrectionUncertaintyShift": 0.0,
        "MetUncertaintyShift": False,
        "MetUncertaintyType": "",
        "SvfitCacheFileFolder": "nominal",
        "TauElectronFakeEnergyCorrection": 1.0,
        "TauElectronFakeEnergyCorrectionOneProngPiZerosShift": 1.0,
        "TauElectronFakeEnergyCorrectionOneProngShift": 1.0,
        "TauEnergyCorrectionOneProngPiZerosShift": 1.0,
        "TauEnergyCorrectionOneProngShift": 1.0,
        "TauEnergyCorrectionShift": 1.0,
        "TauEnergyCorrectionThreeProngShift": 1.0,
        "TauJetFakeEnergyCorrection": 0.0,
        "TauMuonFakeEnergyCorrection": 1.0,
        "TauMuonFakeEnergyCorrectionOneProngPiZerosShift": 1.0,
        "TauMuonFakeEnergyCorrectionOneProngShift": 1.0,
        "BTagShift": 0.0,
        "BMistagShift": 0.0
    }

    return config
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  
  ## fill config:
  # includes
  includes = [
    ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  #config["TauID_documentation"] = []
  config["TauDiscriminatorIsolationName"] = "byIsolationMVArun2v1DBoldDMwLTraw"
  config["TauElectronLowerDeltaRCut"] = -1.0
  config["TauMuonLowerDeltaRCut"] = -1.0


  return config
def build_config(nickname):
  config = jsonTools.JsonDict()
  #datasetsHelper = datasetsHelperTwopz.datasetsHelperTwopz(os.path.expandvars("$CMSSW_BASE/src/Kappa/Skimming/data/datasets.json"))
  
  # define frequently used conditions
  # isMC = not re.search("(?<!PFembedded).Run201", nickname)
  
  ## fill config:
  # includes
  includes = [
    ]
  for include_file in includes:
    analysis_config_module = importlib.import_module(include_file)
    config += analysis_config_module.build_config(nickname)
  
  # explicit configuration
  config["MVATestMethodsInputQuantities"] = []
  config["MVATestMethodsMethods"] = []
  config["MVATestMethodsWeights"] = []
  

  return config