#!/usr/bin/env python import os from ROOT import gROOT, TFile, TH1F, TGraphAsymmErrors, gDirectory, TH1, kFALSE, kRed, kAzure, kOrange import __main__ if gROOT.LoadMacro("rootlogon.C") != 0: from ROOT import rootlogon rootlogon() from PlotUtils import PlotObject from PyGenericUtils import GetRootObj, GetFile, CleanNameForMacro import math from array import array import time from MinloTheory import envelope, eigenvectorQuadrature, rebinme, ConvertNeventsToDSigma, NameConvert from MinloTheory import QuadratureUpDown, AsymmErrsToPlusMinusSigma, GetVariationName from MinloTheory import FlatErrorToAsymmErrs from Variables import GetSampleName, GetMinitreeFileName from Variables import SetNjetsBins, VarProps, BRCorrection from Variables import CrossSections as CrossSections_ShapeNorm from Variables import CrossSections_Shape as CrossSections_Shape from Variables import CrossSections_Errors gROOT.SetBatch(True) #TH1.AddDirectory(kFALSE) ## ## var: variable name as in tree ## histbins: e.g. (100,0,200) ## quantity options: diff, fid, eff, gen
def __init__(self, frac = 0.95): self.__frac = float(frac) from ROOT import gROOT gROOT.LoadMacro( 'Quantile.h+' )
import ROOT from ROOT import gROOT, gDirectory, TArrayI, TFile, TTree, TObject, std, AddressOf, addressof, MakeNullPointer, TObjArray, TNamed legacy_pyroot = os.environ.get('LEGACY_PYROOT') == 'True' from common import * __all__ = [ 'TFileGetNonTObject', 'TTree1ReadWriteSimpleObjectsTestCase', 'TTree2BranchCreation' ] if not os.path.exists('TTreeTypes.C'): os.chdir(os.path.dirname(__file__)) gROOT.LoadMacro("TTreeTypes.C+") SomeDataObject = ROOT.SomeDataObject SomeDataStruct = ROOT.SomeDataStruct CreateArrayTree = ROOT.CreateArrayTree ### Write/Read an std::vector to/from file =================================== class TTree1ReadWriteSimpleObjectsTestCase(MyTestCase): N, M = 5, 10 fname, tname, ttitle = 'test.root', 'test', 'test tree' testnames = ['aap', 'noot', 'mies', 'zus', 'jet'] def test01WriteStdVector(self): """Test writing of a single branched TTree with an std::vector<double>""" f = TFile(self.fname, 'RECREATE')
#!/usr/bin/python import os, sys, math, datetime from ROOT import gROOT, gStyle, TFile, TTree, TH1F, TH1D, TCanvas, TPad, TMath, TF1, TLegend, gPad, gDirectory from collections import OrderedDict import numpy sys.path.append(os.path.abspath(os.path.curdir)) from Plotter import parseLYAnaInputArgs from Plotter.CommonTools import DrawHistSimple, DrawDvsTHist options = parseLYAnaInputArgs() gROOT.Reset() gROOT.LoadMacro("Plotter/UMDStyle.C") from ROOT import SetUMDStyle SetUMDStyle() gROOT.SetBatch() #################################################################################################### #################################################################################################### debug = False if __name__ == '__main__': print options if debug: print "Style = ", gStyle.GetName() myfile = {} ## binbase, xmin, xmax : nbins = binbase*(xmax-xmin) hxrng = {} hxrng["Ref"] = [32, -1.0, 8.0] hxrng["EJ200"] = [32, -1.0, 8.0]
import hyp_analysis_utils as au import argparse import math import os import random from array import array import uproot import numpy as np import yaml from scipy import stats from ROOT import (TF1, TH1D, TH2D, TAxis, TCanvas, TColor, TFile, TFrame, TIter, TKey, TPaveText, gDirectory, gROOT, gStyle, gPad, AliPWGFunc, kBlack, kBlue, kRed) from statsmodels.robust.scale import huber gROOT.LoadMacro("../Utils/YieldMean.C") from ROOT import yieldmean random.seed(1989) parser = argparse.ArgumentParser() parser.add_argument("config", help="Path to the YAML configuration file") args = parser.parse_args() gROOT.SetBatch() bw_file = TFile(os.environ['HYPERML_UTILS'] + '/BlastWaveFits.root') bw = bw_file.Get('BlastWave/BlastWave0') params = bw.GetParameters()
def __init__(self, options): TMVA.Tools.Instance() TMVA.PyMethodBase.PyInitialize() gROOT.LoadMacro("./TMVAGui.C") self._lOutput = TFile.Open('TMVA.root', 'RECREATE') self._lFactory = TMVA.Factory( 'TMVAClassification', self._lOutput, '!V:!Silent:Color:DrawProgressBar:AnalysisType=Classification') self._lDataLoader = TMVA.DataLoader("dataset") for i0 in options.lVars: self._lDataLoader.AddVariable(i0, 'F') self._lDataLoader.Print("all") #define signal and background tree based on LV flag if options.new: self._lInputFile = TFile.Open(options.infile + '.root') self._lInputTree = self._lInputFile.Get("PFPart") self._lSigFile = TFile.Open("sig.root", "RECREATE") self._lBkgFile = TFile.Open("bkg.root", "RECREATE") self._lSigFile.cd() self._lSigTree = self._lInputTree.CopyTree( "match==1 && std::abs(eta) >= 1.7 && std::abs(eta) <= 3.0 && pt > 5" ) self._lSigTree.Write() self._lSigFile.Close() self._lBkgFile.cd() self._lBkgTree = self._lInputTree.CopyTree( "match==0 && std::abs(eta) >= 1.7 && std::abs(eta) <= 3.0 && pt > 5" ) self._lBkgTree.Write() self._lBkgFile.Close() self._lSigFile = TFile.Open("sig.root", "READ") self._lBkgFile = TFile.Open("bkg.root", "READ") self._lSigTree = self._lSigFile.Get("PFPart") self._lBkgTree = self._lBkgFile.Get("PFPart") self._lDataLoader.AddSignalTree(self._lSigTree, 1.0) self._lDataLoader.AddBackgroundTree(self._lBkgTree, 1.0) nSig = self._lSigTree.GetEntries() nBkg = self._lBkgTree.GetEntries() nSigTrain = nSig * 0.8 nBkgTrain = nSig * 0.8 self._lDataLoader.PrepareTrainingAndTestTree( TCut(""), TCut(""), "nTrain_Signal=%i:nTrain_Background=%i:nTest_Signal=%i:nTest_Background=%i:SplitMode=Random:NormMode=NumEvents:!V" % (nSigTrain, nBkgTrain, nSigTrain, nBkgTrain)) Methods = { #'Variable': TMVA.Types.kVariable, #'Cuts': TMVA.Types.kCuts, 'Likelihood': TMVA.Types.kLikelihood, #'BDT': TMVA.Types.kBDT #'PyRandomForest': TMVA.Types.kPyRandomForest, #'MaxMethod': TMVA.Types.kMaxMethod } ''' 'PDERS': TMVA.Types.kPDERS, 'HMatrix': TMVA.Types.kHMatrix, 'Fisher': TMVA.Types.kFisher, 'KNN': TMVA.Types.kKNN, 'CFMlpANN': TMVA.Types.kCFMlpANN, 'TMlpANN': TMVA.Types.kTMlpANN, 'BDT': TMVA.Types.kBDT, 'DT': TMVA.Types.kDT, 'RuleFit': TMVA.Types.kRuleFit, 'SVM': TMVA.Types.kSVM, 'MLP': TMVA.Types.kMLP, 'BayesClassifier': TMVA.Types.kBayesClassifier, #'FDA': TMVA.Types.kFDA, 'Boost': TMVA.Types.kBoost, 'PDEFoam': TMVA.Types.kPDEFoam, 'LD': TMVA.Types.kLD, 'Plugins': TMVA.Types.kPlugins, #'Category': TMVA.Types.kCategory, 'DNN': TMVA.Types.kDNN, 'PyRandomForest': TMVA.Types.kPyRandomForest, 'PyAdaBoost': TMVA.Types.kPyAdaBoost, 'PyGTB': TMVA.Types.kPyGTB, 'PyKeras': TMVA.Types.kPyKeras, 'C50': TMVA.Types.kC50, 'RSNNS': TMVA.Types.kRSNNS, 'RSVM': TMVA.Types.kRSVM, 'RXGB': TMVA.Types.kRXGB, 'MaxMethod': TMVA.Types.kMaxMethod ''' for m, t in Methods.iteritems(): self._lFactory.BookMethod(self._lDataLoader, t, m, "") self._lFactory.BookMethod( self._lDataLoader, TMVA.Types.kBDT, 'BDT', '!H:!V:NTrees=300:MinNodeSize=2.5%:MaxDepth=3:BoostType=Grad:SeparationType=GiniIndex:nCuts=100:UseBaggedBoost=True:PruneMethod=NoPruning' ) #self._lfactory.bookmethod(self._ldataloader, tmva.types.kbdt, 'BDT2', '!h:!v:ntrees=300:minnodesize=2.5%:maxdepth=4:boosttype=AdaBoost:separationtype=crossentropy:ncuts=100:prunemethod=nopruning') #self._lfactory.bookmethod(self._ldataloader, tmva.types.kbdt, 'BDT3', '!h:!v:ntrees=300:minnodesize=2.5%:maxdepth=4:boosttype=AdaBoost:separationtype=GiniIndex:ncuts=100:prunemethod=nopruning') #self._lFactory.BookMethod( self._lDataLoader, TMVA.Types.kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" ) model = Sequential() model.add( Dense(len(options.lVars), input_dim=len(options.lVars), activation='tanh')) model.add(Dense(30, activation='tanh')) model.add(Dense(20, activation='relu')) model.add(Dense(10, activation='tanh')) model.add(Dense(5, activation='relu')) model.add(Dense(2, activation='sigmoid')) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=[ 'accuracy', ]) model.save('model.h5') model.summary() #self._lFactory.BookMethod(self._lDataLoader, TMVA.Types.kFisher, 'Fisher', '!H:!V:Fisher') self._lFactory.BookMethod( self._lDataLoader, TMVA.Types.kPyKeras, 'PyKeras', 'H:!V:FilenameModel=model.h5:NumEpochs=5:BatchSize=100') #model2 = Sequential() #model2.add(Dense(len(options.lVars), input_dim=len(options.lVars),activation='tanh')) #model2.add(Dense(15, activation='tanh')) #model2.add(Dense(2, activation='sigmoid')) #model2.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy',]) #model2.save('model2.h5') #model2.summary() #self._lFactory.BookMethod(self._lDataLoader, TMVA.Types.kPyKeras, 'PyKeras2', 'H:!V:FilenameModel=model2.h5:NumEpochs=20:BatchSize=500') frozen_graph = freeze_session( K.get_session(), output_names=[out.op.name for out in model.outputs]) tf.train.write_graph(frozen_graph, "h5_files", "tf_model.pb", as_text=False) self._lFactory.Print("v")
donotdelete = [] infile = "histos_tbb_add.27.root" infile140 = "histos_tbb_140_add.27.root" infile250 = "histos_tbb_250_add.27.root" infile300 = "histos_tbb_300_add.27.root" from perf_rates import make_ptcut, make_rate # ______________________________________________________________________________ if __name__ == '__main__': from ROOT import gROOT, gPad, gStyle, TFile, TCanvas, TH1F, TH2F, TPolyLine, TLatex, TColor, TEfficiency, TLine, TLatex, TGraph, TGraphAsymmErrors # ____________________________________________________________________________ # Setup basic drawer gROOT.LoadMacro("tdrstyle.C") gROOT.ProcessLine("setTDRStyle();") #gROOT.SetBatch(True) gStyle.SetPadGridX(True) gStyle.SetPadGridY(True) gStyle.SetMarkerStyle(1) gStyle.SetEndErrorSize(0) gROOT.ForceStyle() tline = TLine() tline.SetLineColor(920 + 2) # kGray+2 tline.SetLineStyle(2) tlatexCMS1 = TLatex() tlatexCMS1.SetNDC() tlatexCMS1.SetTextFont(61)
print where+'?' print 'First argument analysis:' print ' SingleS' print ' then a list of inputs - read the code' print 'e.g. ./runReader.py SingleS PhaseI_C0_NoPU_pMSSM1' print 'e.g. ./runReader.py SingleS8TeV 8TeV_NoPU_TTbar8TeV' sys.exit(0) from operator import mul def scale(fac,list): return map(mul,len(list)*[fac],list) # choose the analysis and a sample if len(sys.argv)>1: if sys.argv[1]=='SingleS': # single lepton stop - CMS gROOT.LoadMacro('readerSingleS.C+') from ROOT import readerSingleS as reader elif sys.argv[1]=='SingleSHT': # single lepton testing version gROOT.LoadMacro('readerSingleSHT.C+') from ROOT import readerSingleSHT as reader elif sys.argv[1]=='SingleS8TeV': # single lepton testing version gROOT.LoadMacro('readerSingleS8TeV.C+') from ROOT import readerSingleS8TeV as reader elif sys.argv[1]=='SingleSdev': # single lepton testing version gROOT.LoadMacro('readerSingleSdev.C+') from ROOT import readerSingleSdev as reader elif sys.argv[1]=='EWKino': # EWKino gROOT.LoadMacro('readerEWKino.C+') # used to be EWKinoNew from ROOT import readerEWKino as reader elif sys.argv[1]=='AtlasH': # Atlas hadronic gROOT.LoadMacro('readerAtlasH.C+') # used to be ?????
import subprocess from setTDRStyle import setTDRStyle oldargv = sys.argv[:] sys.argv = [ '-b-' ] #~ import ROOT from ROOT import TF1, TCanvas, gROOT, gStyle, TFile, TH2F, TH1, kFALSE, TH1D, gSystem, RooWorkspace, RooRealVar, RooCmdArg, RooDataHist, RooArgList, TPad, TLegend, TLatex, TGraphAsymmErrors, TGraph, TGraphErrors, kBlack, kRed, kBlue, TLine from ROOT import RooDataHist, RooBreitWigner, RooFFTConvPdf, RooFit import ROOT # ~ from ROOT import * gROOT.SetBatch(True) sys.argv = oldargv ptbins = [52, 72, 100, 152, 200, 300, 452, 800] gROOT.LoadMacro("cruijff.C+") gROOT.LoadMacro("doubleCB.C+") rebinFactor = 1 xLow = 75 xHigh = 105 sampleLists = { "2017Inclusive":["ana_datamc_DYInclusive2017.root"], "2017MassBinned":["dileptonAna_resolution_dy50to120_UL2017.root","dileptonAna_resolution_dy120to200_UL2017.root","dileptonAna_resolution_dy200to400_UL2017.root","dileptonAna_resolution_dy400to800_UL2017.root","dileptonAna_resolution_dy800to1400_UL2017.root","dileptonAna_resolution_dy1400to2300_UL2017.root","dileptonAna_resolution_dy2300to3500_UL2017.root","dileptonAna_resolution_dy3500to4500_UL2017.root","dileptonAna_resolution_dy4500to6000_UL2017.root","dileptonAna_resolution_dy6000toInf_UL2017.root"], "2017PtBinned":["dileptonAna_resolution_dyInclusive_UL2017.root","dileptonAna_resolution_dyPt50To100_UL2017.root","dileptonAna_resolution_dyPt100To250_UL2017.root","dileptonAna_resolution_dyPt250To400_UL2017.root","dileptonAna_resolution_dyPt400To650_UL2017.root","dileptonAna_resolution_dyPt650ToInf_UL2017.root"], "2018PtBinned":["dileptonAna_resolution_2018_dyInclusive_UL2018.root","dileptonAna_resolution_2018_dyPt50To100_UL2018.root","dileptonAna_resolution_2018_dyPt100To250_UL2018.root","dileptonAna_resolution_2018_dyPt250To400_UL2018.root","dileptonAna_resolution_2018_dyPt400To650_UL2018.root","dileptonAna_resolution_2018_dyPt650ToInf_UL2018.root"] }
def main(): try: # retrive command line options shortopts = "m:i:j:f:g:t:o:a:vgh?" longopts = ["methods=", "inputfilesig=", "inputfilebkg=", "friendinputfilesig=", "friendinputfilebkg=", "inputtrees=", "outputfile=", "verbose", "gui", "help", "usage"] opts, args = getopt.getopt( sys.argv[1:], shortopts, longopts ) except getopt.GetoptError: # print help information and exit: print "ERROR: unknown options in argument %s" % sys.argv[1:] usage() sys.exit(1) infnameSig = DEFAULT_INFNAMESIG infnameBkg = DEFAULT_INFNAMEBKG friendfnameSig = DEFAULT_FRIENDNAMESIG friendfnameBkg = DEFAULT_FRIENDNAMEBKG treeNameSig = DEFAULT_TREESIG treeNameBkg = DEFAULT_TREEBKG outfname = DEFAULT_OUTFNAME methods = DEFAULT_METHODS verbose = False gui = False addedcuts = "" for o, a in opts: if o in ("-?", "-h", "--help", "--usage"): usage() sys.exit(0) elif o in ("-m", "--methods"): methods = a elif o in ("-i", "--inputfilesig"): infnameSig = a elif o in ("-j", "--inputfilebkg"): infnameBkg = a elif o in ("-f", "--friendinputfilesig"): friendfnameSig = a elif o in ("-g", "--friendinputfilebkg"): friendfnameBkg = a elif o in ("-o", "--outputfile"): outfname = a elif o in ("-a", "--addedcuts"): addedcuts = a elif o in ("-t", "--inputtrees"): a.strip() trees = a.rsplit( ' ' ) trees.sort() trees.reverse() if len(trees)-trees.count('') != 2: print "ERROR: need to give two trees (each one for signal and background)" print trees sys.exit(1) treeNameSig = trees[0] treeNameBkg = trees[1] elif o in ("-v", "--verbose"): verbose = True elif o in ("-g", "--gui"): gui = True # Print methods mlist = methods.replace(' ',',').split(',') print "=== TMVAClassification: use method(s)..." for m in mlist: if m.strip() != '': print "=== - <%s>" % m.strip() # Print the file print "Using file " + infnameSig + " for signal..." print "Using file " + infnameBkg + " for background..." # Import ROOT classes from ROOT import gSystem, gROOT, gApplication, TFile, TTree, TCut # check ROOT version, give alarm if 5.18 print "ROOT version is " + str(gROOT.GetVersionCode()) if gROOT.GetVersionCode() >= 332288 and gROOT.GetVersionCode() < 332544: print "*** You are running ROOT version 5.18, which has problems in PyROOT such that TMVA" print "*** does not run properly (function calls with enums in the argument are ignored)." print "*** Solution: either use CINT or a C++ compiled version (see TMVA/macros or TMVA/examples)," print "*** or use another ROOT version (e.g., ROOT 5.19)." sys.exit(1) # Logon not automatically loaded through PyROOT (logon loads TMVA library) load also GUI gROOT.SetMacroPath( "./" ) gROOT.Macro ( "./TMVAlogon.C" ) gROOT.LoadMacro ( "./TMVAGui.C" ) # Import TMVA classes from ROOT from ROOT import TMVA # Output file outputFile = TFile( outfname, 'RECREATE' ) # Create instance of TMVA factory (see TMVA/macros/TMVAClassification.C for more factory options) # All TMVA output can be suppressed by removing the "!" (not) in # front of the "Silent" argument in the option string factory = TMVA.Factory( "TMVAClassification", outputFile, "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" ) # Set verbosity factory.SetVerbose( verbose ) # If you wish to modify default settings # (please check "src/Config.h" to see all available global options) # gConfig().GetVariablePlotting()).fTimesRMS = 8.0 # gConfig().GetIONames()).fWeightFileDir = "myWeightDirectory" # Define the input variables that shall be used for the classifier training # note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)" # [all types of expressions that can also be parsed by TTree::Draw( "expression" )] factory.AddVariable( "dR_l1l2", "dR_l1l2", "", 'F' ) factory.AddVariable( "dR_b1b2", "dR_b1b2", "", 'F' ) factory.AddVariable( "dR_bl", "dR_bl", "", 'F' ) factory.AddVariable( "mass_l1l2", "mass_l1l2", "", 'F' ) factory.AddVariable( "mass_b1b2", "mass_b1b2", "", 'F' ) factory.AddVariable( "MT2", "MT2", "", 'F' ) #factory.AddVariable( "MMC_h2massweight1_prob", "MMC_h2massweight1_prob", "", 'F' ) # You can add so-called "Spectator variables", which are not used in the MVA training, # but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the # input variables, the response values of all trained MVAs, and the spectator variables # factory.AddSpectator( "spec1:=var1*2", "Spectator 1", "units", 'F' ) # factory.AddSpectator( "spec2:=var1*3", "Spectator 2", "units", 'F' ) # Read input data if gSystem.AccessPathName( infnameSig ) != 0 or gSystem.AccessPathName( infnameBkg ): gSystem.Exec( "wget http://root.cern.ch/files/" + infname ) inputSig = TFile.Open( infnameSig ) inputBkg = TFile.Open( infnameBkg ) # Get the signal and background trees for training signal = inputSig.Get( treeNameSig ) background = inputBkg.Get( treeNameBkg ) ##signal.AddFriend( "eleIDdir/isoT1 = eleIDdir/T1", friendfnameSig ) ##background.AddFriend( "eleIDdir/isoT1 = eleIDdir/T1", friendfnameBkg ) # Global event weights (see below for setting event-wise weights) signalWeight = 1. #0.085082 backgroundWeight = 1. #3.230581 #I don't think there's a general answer to this. The safest 'default' #is to use the envent weight such that you have equal amounts of signal #and background #for the training, otherwise for example: if you look for a rare #signal and you use the weight to scale the number of events according #to the expected ratio of signal and background #according to the luminosity... the classifier sees hardly any signal #events and "thinks" .. Oh I just classify everything background and do #a good job! # #One can try to 'optimize' the training a bit more in either 'high #purity' or 'high efficiency' by choosing different weights, but as I #said, there's no fixed rule. You'd have #to 'try' and see if you get better restults by playing with the weights. # ====== register trees ==================================================== # # the following method is the prefered one: # you can add an arbitrary number of signal or background trees factory.AddSignalTree ( signal, signalWeight ) factory.AddBackgroundTree( background, backgroundWeight ) # To give different trees for training and testing, do as follows: # factory.AddSignalTree( signalTrainingTree, signalTrainWeight, "Training" ) # factory.AddSignalTree( signalTestTree, signalTestWeight, "Test" ) # Use the following code instead of the above two or four lines to add signal and background # training and test events "by hand" # NOTE that in this case one should not give expressions (such as "var1+var2") in the input # variable definition, but simply compute the expression before adding the event # # # --- begin ---------------------------------------------------------- # # ... *** please lookup code in TMVA/macros/TMVAClassification.C *** # # # --- end ------------------------------------------------------------ # # ====== end of register trees ============================================== # Set individual event weights (the variables must exist in the original TTree) # for signal : factory.SetSignalWeightExpression ("weight1*weight2"); # for background: factory.SetBackgroundWeightExpression("weight1*weight2"); # Apply additional cuts on the signal and background sample. # example for cut: mycut = TCut( "abs(var1)<0.5 && abs(var2-0.5)<1" ) #mycutSig = TCut( "nu1and2_diBaxis_t>-900 && met_diBaxis_t>-900&& hasb1jet && hasb2jet && hasMET && hasGenMET && hasdRljet && hastwomuons" ) mycutSig = TCut( addedcuts ) #mycutBkg = TCut( "event_n%2!=0 && " + addedcuts ) mycutBkg = TCut( addedcuts ) #mycutBkg = TCut( "nu1and2_diBaxis_t>-900 && met_diBaxis_t>-900&& hasb1jet && hasb2jet && hasMET && hasGenMET && hasdRljet && hastwomuons" ) print mycutSig # Here, the relevant variables are copied over in new, slim trees that are # used for TMVA training and testing # "SplitMode=Random" means that the input events are randomly shuffled before # splitting them into training and test samples factory.PrepareTrainingAndTestTree( mycutSig, mycutBkg, "nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" ) # -------------------------------------------------------------------------------------------------- # ---- Book MVA methods # # please lookup the various method configuration options in the corresponding cxx files, eg: # src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html # it is possible to preset ranges in the option string in which the cut optimisation should be done: # "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable # Cut optimisation if "Cuts" in mlist: factory.BookMethod( TMVA.Types.kCuts, "Cuts", "!H:!V:FitMethod=MC:EffSel:VarProp[0]=FMax:VarProp[1]=FMin" ) if "CutsD" in mlist: factory.BookMethod( TMVA.Types.kCuts, "CutsD", "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" ) if "CutsPCA" in mlist: factory.BookMethod( TMVA.Types.kCuts, "CutsPCA", "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" ) if "CutsGA" in mlist: factory.BookMethod( TMVA.Types.kCuts, "CutsGA", "H:!V:FitMethod=GA:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95:VarProp[0]=FMin:VarProp[1]=FMax" ) if "CutsSA" in mlist: factory.BookMethod( TMVA.Types.kCuts, "CutsSA", "!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" ) # Likelihood ("naive Bayes estimator") if "Likelihood" in mlist: factory.BookMethod( TMVA.Types.kLikelihood, "Likelihood", "H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" ) # Decorrelated likelihood if "LikelihoodD" in mlist: factory.BookMethod( TMVA.Types.kLikelihood, "LikelihoodD", "!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" ) # PCA-transformed likelihood if "LikelihoodPCA" in mlist: factory.BookMethod( TMVA.Types.kLikelihood, "LikelihoodPCA", "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" ) # Use a kernel density estimator to approximate the PDFs if "LikelihoodKDE" in mlist: factory.BookMethod( TMVA.Types.kLikelihood, "LikelihoodKDE", "!H:!V:!TransformOutput:PDFInterpol=KDE:KDEtype=Gauss:KDEiter=Adaptive:KDEFineFactor=0.3:KDEborder=None:NAvEvtPerBin=50" ) # Use a variable-dependent mix of splines and kernel density estimator if "LikelihoodMIX" in mlist: factory.BookMethod( TMVA.Types.kLikelihood, "LikelihoodMIX", "!H:!V:!TransformOutput:PDFInterpolSig[0]=KDE:PDFInterpolBkg[0]=KDE:PDFInterpolSig[1]=KDE:PDFInterpolBkg[1]=KDE:PDFInterpolSig[2]=Spline2:PDFInterpolBkg[2]=Spline2:PDFInterpolSig[3]=Spline2:PDFInterpolBkg[3]=Spline2:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" ) # Test the multi-dimensional probability density estimator # here are the options strings for the MinMax and RMS methods, respectively: # "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" ); # "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" ); if "PDERS" in mlist: factory.BookMethod( TMVA.Types.kPDERS, "PDERS", "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600" ) if "PDERSD" in mlist: factory.BookMethod( TMVA.Types.kPDERS, "PDERSD", "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=Decorrelate" ) if "PDERSPCA" in mlist: factory.BookMethod( TMVA.Types.kPDERS, "PDERSPCA", "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=PCA" ) # Multi-dimensional likelihood estimator using self-adapting phase-space binning if "PDEFoam" in mlist: factory.BookMethod( TMVA.Types.kPDEFoam, "PDEFoam", "!H:!V:SigBgSeparate=F:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" ) if "PDEFoamBoost" in mlist: factory.BookMethod( TMVA.Types.kPDEFoam, "PDEFoamBoost", "!H:!V:Boost_Num=30:Boost_Transform=linear:SigBgSeparate=F:MaxDepth=4:UseYesNoCell=T:DTLogic=MisClassificationError:FillFoamWithOrigWeights=F:TailCut=0:nActiveCells=500:nBin=20:Nmin=400:Kernel=None:Compress=T" ) # K-Nearest Neighbour classifier (KNN) if "KNN" in mlist: factory.BookMethod( TMVA.Types.kKNN, "KNN", "H:nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" ) # H-Matrix (chi2-squared) method if "HMatrix" in mlist: factory.BookMethod( TMVA.Types.kHMatrix, "HMatrix", "!H:!V" ) # Linear discriminant (same as Fisher discriminant) if "LD" in mlist: factory.BookMethod( TMVA.Types.kLD, "LD", "H:!V:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" ) # Fisher discriminant (same as LD) if "Fisher" in mlist: factory.BookMethod( TMVA.Types.kFisher, "Fisher", "H:!V:Fisher:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" ) # Fisher with Gauss-transformed input variables if "FisherG" in mlist: factory.BookMethod( TMVA.Types.kFisher, "FisherG", "H:!V:VarTransform=Gauss" ) # Composite classifier: ensemble (tree) of boosted Fisher classifiers if "BoostedFisher" in mlist: factory.BookMethod( TMVA.Types.kFisher, "BoostedFisher", "H:!V:Boost_Num=20:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=0.2" ) # Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA) if "FDA_MC" in mlist: factory.BookMethod( TMVA.Types.kFDA, "FDA_MC", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1)(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:SampleSize=100000:Sigma=0.1" ); if "FDA_GA" in mlist: factory.BookMethod( TMVA.Types.kFDA, "FDA_GA", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1)(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=300:Cycles=3:Steps=20:Trim=True:SaveBestGen=1" ); if "FDA_SA" in mlist: factory.BookMethod( TMVA.Types.kFDA, "FDA_SA", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1)(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" ); if "FDA_MT" in mlist: factory.BookMethod( TMVA.Types.kFDA, "FDA_MT", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1)(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" ); if "FDA_GAMT" in mlist: factory.BookMethod( TMVA.Types.kFDA, "FDA_GAMT", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1)(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" ); if "FDA_MCMT" in mlist: factory.BookMethod( TMVA.Types.kFDA, "FDA_MCMT", "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1)(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" ); # TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons if "MLP" in mlist: factory.BookMethod( TMVA.Types.kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" ) if "MLPBFGS" in mlist: factory.BookMethod( TMVA.Types.kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" ) if "MLPBNN" in mlist: factory.BookMethod( TMVA.Types.kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ) # BFGS training with bayesian regulators # CF(Clermont-Ferrand)ANN if "CFMlpANN" in mlist: factory.BookMethod( TMVA.Types.kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ) # n_cycles:#nodes:#nodes:... # Tmlp(Root)ANN if "TMlpANN" in mlist: factory.BookMethod( TMVA.Types.kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ) #n_cycles:#nodes:#nodes:... # Support Vector Machine if "SVM" in mlist: factory.BookMethod( TMVA.Types.kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" ) # Boosted Decision Trees if "BDTG" in mlist: factory.BookMethod( TMVA.Types.kBDT, "BDTG", "!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.30:UseBaggedGrad:GradBaggingFraction=0.6:SeparationType=GiniIndex:nCuts=20:NNodesMax=5" ) if "BDT" in mlist: factory.BookMethod( TMVA.Types.kBDT, "BDT", "!H:!V:NTrees=850:nEventsMin=150:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" ) if "BDTB" in mlist: factory.BookMethod( TMVA.Types.kBDT, "BDTB", "!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" ) if "BDTD" in mlist: factory.BookMethod( TMVA.Types.kBDT, "BDTD", "!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" ) # RuleFit -- TMVA implementation of Friedman's method if "RuleFit" in mlist: factory.BookMethod( TMVA.Types.kRuleFit, "RuleFit", "H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" ) # -------------------------------------------------------------------------------------------------- # ---- Now you can tell the factory to train, test, and evaluate the MVAs. # Train MVAs factory.TrainAllMethods() # Test MVAs factory.TestAllMethods() # Evaluate MVAs factory.EvaluateAllMethods() # Save the output. outputFile.Close() print "=== wrote root file %s\n" % outfname print "=== TMVAClassification is done!\n" # open the GUI for the result macros if( gui ): gROOT.ProcessLine( "TMVAGui(\"%s\")" % outfname ) # keep the ROOT thread running gApplication.Run()
"""Pickle writing unit tests for PyROOT package.""" import os, sys, unittest try: import pickle, cPickle except ImportError: import pickle as cPickle # p3 import ROOT from ROOT import gROOT, TBufferFile, TH1F, TBuffer, std from common import * __all__ = [ 'PickleReadingSimpleObjectsTestCase', 'PickleReadingComplicationsTestCase' ] gROOT.LoadMacro("PickleTypes.C+") SomeCountedClass = ROOT.SomeCountedClass SomeDataObject = ROOT.SomeDataObject ### Read various objects with the two pickle modules ========================= class PickleReadingSimpleObjectsTestCase(MyTestCase): in1 = open(pclfn, 'rb') # names from common.py in2 = open(cpclfn, 'rb') # note that the order of these tests have to match the writing order (for # simple indexing, shelve should have been used instead); this also means # that if reading of one test fails, everything downstream fails as well def test1ReadTObjectDerived(self): """Test reading of a histogram from a pickle file""" def __doh1test(self, h1):
import os import ROOT from ROOT import gROOT, gSystem n_thres_count = 10 #how many data point in the threshold scan for count n_thres_weight = 10 #how many data point in the threshold scan for weight # tmplt_dir = "/home/wxie/AI/Spike/July_2019/Spike_July_2019/Analysis/Macros/template" test_dir = "/home/wxie/AI/Spike/July_2019/Spike_July_2019/Analysis/Macros/test" tmplt_list = [tmplt_dir + "/" + s for s in os.listdir(tmplt_dir)] test_list = [test_dir + "/" + s for s in os.listdir(test_dir)] # gROOT.LoadMacro("compare_pg.C") v_tmplt_list = ROOT.std.vector('string')() v_test_list = ROOT.std.vector('string')() for f in tmplt_list: v_tmplt_list.push_back(f) for f in test_list: v_test_list.push_back(f) ROOT.compare_pg(v_tmplt_list, v_test_list, n_thres_count, n_thres_weight)
import os, sys, math, datetime from ROOT import gROOT, gStyle, TFile, TTree, TH1F, TH1D, TCanvas, TPad, TMath, TF1, TLegend, gPad, gDirectory from ROOT import kRed, kBlue, kGreen, kWhite from collections import OrderedDict import numpy sys.path.append(os.path.abspath(os.path.curdir)) from Plotter import parseLYAnaInputArgs options = parseLYAnaInputArgs() from Plotter.CommonTools import CalcD, AlphaSourceFitter gROOT.SetBatch() gROOT.LoadMacro("Plotter/AtlasStyle.C") from ROOT import SetAtlasStyle SetAtlasStyle() #################################################################################################### #################################################################################################### if __name__ == '__main__': #gROOT.SetStyle("Plain") #gStyle.SetOptFit() #gStyle.SetOptStat(0) print options myfile = {} mytree = {} myhist = {} myfit = {} valphys = {}
# Created: 11/25/13 # Last: 11/26/13 """C++11 language interface unit tests for PyROOT package.""" import sys, os, unittest sys.path.append(os.path.dirname(os.path.dirname(__file__))) import ROOT from ROOT import TGraphErrors, gROOT from common import * __all__ = [ 'Cpp1Cpp11StandardClassesTestCase', 'Cpp2Cpp11LanguageConstructsTestCase' ] gROOT.LoadMacro("Cpp11Features.C+") MyCounterClass = ROOT.MyCounterClass CreateMyCounterClass = ROOT.CreateMyCounterClass ### C++11 standard library classes =========================================== class Cpp1Cpp11StandardClassesTestCase(MyTestCase): def test01SharedPtr(self): """Test usage and access of std::shared_ptr<>""" # proper memory accounting self.assertEqual(MyCounterClass.counter, 0) ptr1 = CreateMyCounterClass() self.assert_(not not ptr1)
################################################################ ## In principle all you have to setup is defined in this file ## ################################################################ from configManager import configMgr from ROOT import kBlack, kWhite, kGray, kRed, kPink, kMagenta, kViolet, kBlue, kAzure, kCyan, kTeal, kGreen, kSpring, kYellow, kOrange from configWriter import fitConfig, Measurement, Channel, Sample from systematic import Systematic from math import sqrt import os # Setup for ATLAS plotting from ROOT import gROOT gROOT.LoadMacro("./macros/AtlasStyle.C") import ROOT ROOT.SetAtlasStyle() ########################## # Set observed and expected number of events in counting experiment ndata = 124. # Number of events observed in data nbkg = 104. # Number of predicted bkg events nsig = 1. # Number of predicted signal events nbkgErr = 12. # (Absolute) Statistical error on bkg estimate nsigErr = 1. # (Absolute) Statistical error on signal estimate lumiError = 0.039 # Relative luminosity uncertainty # Set uncorrelated systematics for bkg and signal (1 +- relative uncertainties) ucb = Systematic("ucb", configMgr.weights, 1.115, 0.885, "user", "userOverallSys") ucs = Systematic("ucs", configMgr.weights, 1.1, 0.9, "user", "userOverallSys")
# --------------------- # python GHphase3.py # python GHphase3.py [yaml-file] # --------------------- # import # --------------------- import sys sys.argv.append('-b') # batch mode import os import ROOT import yaml from ROOT import gROOT gROOT.LoadMacro("PionID.cxx") def runPionID(): print("...................................") LocalPath = "" YamlFile = "" # Check if a yaml config file has been passed in args: print("sys.argv = %d" % len(sys.argv)) if (len(sys.argv) > 2): YamlFile = sys.argv[1] else: usage = "Usage: %s [yaml config]" % sys.argv[0]