def __init__( self, fit_data ): MapStrRootPtr = stl.map( stl.string, "TH1*" ) StrHist = stl.pair( stl.string, "TH1*" ) self.fit_data = fit_data self.models = {} self.sample = RooCategory( 'sample', 'sample' ) self.roofit_variables = [] input_hists = MapStrRootPtr() # first create observables # Since we are looking for normalisation in equivalent regions # the number of events in each sample has to be identical # Hence, pick one fit_data to create the set of observables fit_data_1 = fit_data.itervalues().next() samples = fit_data_1.samples self.observables = {} N_min = 0 N_max = fit_data_1.n_data() * 2 for sample in samples: self.observables[sample] = Observable( 'n_' + sample, 'number of ' + sample + " events", fit_data_1.normalisation[sample], N_min, N_max, "events" ) # next create the models for variable, fit_input in fit_data.iteritems(): self.models[variable] = fit_input.get_roofit_model( variable, self.observables ) self.sample.defineType( variable ) self.sample.setLabel ( variable ) data = deepcopy( fit_input.real_data_histogram() ) input_hists.insert( StrHist( variable, data ) ) self.roofit_variables.append( fit_input.fit_variable ) self.comb_data = RooDataHist( "combData", "combined data", RooArgList( self.roofit_variables[0] ), self.sample, input_hists, )
def test_stl(): generate('map<int,vector<float> >', '<vector>;<map>') generate('map<int,vector<int> >', '<vector>;<map>') generate('vector<TLorentzVector>', '<vector>;TLorentzVector.h') ROOT.std.map('int,vector<float>') ROOT.std.map('int,vector<int>') ROOT.std.vector('TLorentzVector') temp = CPPType.from_string('vector<vector<vector<int> > >') temp.ensure_built() stl.vector('vector<map<int, string> >') stl.vector(stl.string)() stl.vector('string')() stl.vector(int) stl.map("string", "string") stl.map(stl.string, stl.string) stl.map(int, stl.string) stl.map(stl.string, int) stl.map("string", ROOT.TLorentzVector) histmap = stl.map("string", ROOT.TH1D)() a = ROOT.TH1D("a", "a", 10, -1, 1) histmap["a"] = a StrHist = stl.pair(stl.string, "TH1*") generate('pair<map<string,TH1*>::iterator,bool>', '<map>;<TH1.h>') histptrmap = stl.map(stl.string, "TH1*")() histptrmap.insert(StrHist("test", a)) assert histptrmap["test"] is a
import rootpy.stl as stl import ROOT # Create a vector type StrVector = stl.vector(stl.string) # Instantiate strvector = StrVector() strvector.push_back("Hello") # etc. MapStrRoot = stl.map(stl.string, ROOT.TH1D) MapStrRootPtr = stl.map(stl.string, "TH1D*") StrHist = stl.pair(stl.string, "TH1*") m = MapStrRootPtr() a = ROOT.TH1D('t1', 't1', 10, 0, 1) m.insert(StrHist("test", a)) print m
# include "RooChebychev.h" # include "RooAddPdf.h" # include "RooSimultaneous.h" # include "RooCategory.h" # include "TCanvas.h" # include "TAxis.h" # include "RooPlot.h" from ROOT import RooFit, RooRealVar, RooGaussian, RooChebychev, RooAddPdf, \ RooArgList, RooArgSet, RooDataSet, RooCategory, RooPlot, TCanvas, gPad, \ RooSimultaneous, kDashed, RooDataHist import numpy as np from rootpy.plotting import Hist import rootpy.stl as stl MapStrRootPtr = stl.map(stl.string, "TH1*") StrHist = stl.pair(stl.string, "TH1*") def get_data(): N_bkg1_ctl = 10000 N_signal_ctl = 2000 N_bkg1_obs = 1000 N_signal_obs = 200 mu1, mu2, sigma1, sigma2 = 100, 140, 15, 5 x1_ctl = mu1 + sigma1 * np.random.randn(N_bkg1_ctl) x2_ctl = mu2 + sigma2 * np.random.randn(N_signal_ctl) x1_obs = mu1 + sigma1 * np.random.randn(N_bkg1_obs) x2_obs = mu2 + sigma2 * np.random.randn(N_signal_obs) h1 = Hist(100, 40, 200, title='data')
# include "RooSimultaneous.h" # include "RooCategory.h" # include "TCanvas.h" # include "TAxis.h" # include "RooPlot.h" from ROOT import RooFit, RooRealVar, RooGaussian, RooChebychev, RooAddPdf, \ RooArgList, RooArgSet, RooDataSet, RooCategory, RooPlot, TCanvas, gPad, \ RooSimultaneous, kDashed, RooDataHist, RooHistPdf, kRed import numpy as np from rootpy.plotting import Hist import rootpy.stl as stl n_bins = 100 min_x = 0 max_x = 200 MapStrRootPtr = stl.map( stl.string, "TH1*" ) StrHist = stl.pair( stl.string, "TH1*" ) N_bkg1_ctl = 30000 N_signal_ctl = 2000 N_bkg1_obs = 30000 N_signal_obs = 2000 N_data = N_bkg1_obs + N_signal_obs mu1, mu2, sigma1, sigma2 = 100, 140, 15, 5 mu3, mu4, sigma3, sigma4 = 80, 170, 14, 10 def get_data(): # start with data that has the same statistics # randomise it x1_ctl = mu1 + sigma1 * np.random.randn( N_bkg1_ctl ) x2_ctl = mu2 + sigma2 * np.random.randn( N_signal_ctl )