def _h2toh1(self, h): import array if not isinstance(h, ROOT.TH2): return h sentry = TH1AddDirSentry() # H1class = getattr(ROOT,h.__class__.__name__.replace('2','1')) nx = h.GetNbinsX() ny = h.GetNbinsY() h_flat = ROOT.TH1D(h.GetName(), h.GetTitle(), nx * ny, 0, nx * ny) sumw2 = h.GetSumw2() sumw2_flat = h_flat.GetSumw2() for i in xrange(1, nx + 1): for j in xrange(1, ny + 1): # i,j must be mapped in b2d = h.GetBin(i, j) # b2d = h.GetBin( j,i ) # b1d = ((i-1)+(j-1)*nx)+1 b1d = ((j - 1) + (i - 1) * ny) + 1 h_flat.SetAt(h.At(b2d), b1d) sumw2_flat.SetAt(sumw2.At(b2d), b1d) h_flat.SetEntries(h.GetEntries()) stats2d = array.array('d', [0] * 7) h.GetStats(stats2d) stats1d = array.array('d', [0] * 4) stats1d[0] = stats2d[0] stats1d[1] = stats2d[1] stats1d[2] = stats2d[2] + stats2d[4] stats1d[3] = stats2d[3] + stats2d[5] h_flat.PutStats(stats1d) xtitle = h.GetXaxis().GetTitle() v1, v2 = xtitle.split( ':') # we know it's a 2d filled by an expr like y:x xtitle = '%s #times %s bin' % (v1, v2) h_flat.GetXaxis().SetTitle(xtitle) return h_flat
def _h2toh1(self, h): import array if not isinstance(h,ROOT.TH2): return h sentry = TH1AddDirSentry() # H1class = getattr(ROOT,h.__class__.__name__.replace('2','1')) nx = h.GetNbinsX() ny = h.GetNbinsY() h_flat = ROOT.TH1D(h.GetName(),h.GetTitle(),nx*ny,0,nx*ny) sumw2 = h.GetSumw2() sumw2_flat = h_flat.GetSumw2() for i in xrange(1,nx+1): for j in xrange(1,ny+1): # i,j must be mapped in b2d = h.GetBin( i,j ) # b2d = h.GetBin( j,i ) # b1d = ((i-1)+(j-1)*nx)+1 b1d = ((j-1)+(i-1)*ny)+1 h_flat.SetAt( h.At(b2d), b1d ) sumw2_flat.SetAt( sumw2.At(b2d), b1d ) h_flat.SetEntries(h.GetEntries()) stats2d = array.array('d',[0]*7) h.GetStats(stats2d) stats1d = array.array('d',[0]*4) stats1d[0] = stats2d[0] stats1d[1] = stats2d[1] stats1d[2] = stats2d[2]+stats2d[4] stats1d[3] = stats2d[3]+stats2d[5] h_flat.PutStats(stats1d) xtitle = h.GetXaxis().GetTitle() v1,v2 = xtitle.split(':') # we know it's a 2d filled by an expr like y:x xtitle = '%s #times %s bin' % (v1,v2) h_flat.GetXaxis().SetTitle(xtitle) return h_flat
def read_datum(self, i, j, k, use_memmap=True): '''Reads the band `k` value for pixel at row `i` and column `j`. Arguments: `i`, `j`, `k` (integer): Row, column and band index, respectively. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Using this function is not an efficient way to iterate over bands or pixels. For such cases, use readBands or readPixel instead. ''' import array if self._memmap is not None and use_memmap is True: datum = self._memmap[i, k, j] if self.scale_factor != 1: datum /= float(self.scale_factor) return datum d_col = self.sample_size d_band = d_col * self.ncols d_row = d_band * self.nbands self.fid.seek(self.offset + i * d_row + j * d_col + k * d_band, 0) vals = array.array('b') vals.fromfile(self.fid, self.sample_size) arr = np.fromstring(vals.tostring(), dtype=self.dtype) return arr.tolist()[0] / float(self.scale_factor)
def read_datum(self, i, j, k): '''Reads the band `k` value for pixel at row `i` and column `j`. Arguments: `i`, `j`, `k` (integer): Row, column and band index, respectively. Using this function is not an efficient way to iterate over bands or pixels. For such cases, use readBands or readPixel instead. ''' import array if self.memmap is not None: datum = self.memmap[i, k, j] if self.scale_factor != 1: datum /= float(self.scale_factor) return datum d_col = self.sample_size d_band = d_col * self.ncols d_row = d_band * self.nbands self.fid.seek(self.offset + i * d_row + j * d_col + k * d_band, 0) vals = array.array('b') vals.fromfile(self.fid, self.sample_size) arr = np.fromstring(vals.tostring(), dtype=self.dtype) return arr.tolist()[0] / float(self.scale_factor)
def FindPrime(N): MAXNUM = int((log(N, 10) * 2.5 + 0.5) * N) i = 2 a = array.array('i') p = array.array('i') for x in range(MAXNUM): a.append(1) while (i < len(a) - 1 and len(p) < N): if a[i] == 1: p.append(i) for j in range(len(p)): if i * p[j] >= MAXNUM: break a[i * p[j]] = 0 if i % p[j] == 0: break i += 1 return (p)
def SendLightsCK(self,r,g,b): # struct.pack(fmt, magic, ver, type, seq, port, flags, timerVal V, uni, 0, 0, 0, 0, data) levels = [r,g,b]*10 arr = array.array('B', levels) out = struct.pack("LHHLBxHLB255s", 0x4adc0104, 0x0001, 0x0101, 0, 0, 0, -1, 0, arr.tostring()) socket(AF_INET, SOCK_DGRAM).sendto(out, (IP_ADDRESS, port))
def read_datum(self, i, j, k, use_memmap=True): '''Reads the band `k` value for pixel at row `i` and column `j`. Arguments: `i`, `j`, `k` (integer): Row, column and band index, respectively. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Using this function is not an efficient way to iterate over bands or pixels. For such cases, use readBands or readPixel instead. ''' import array if self._memmap is not None and use_memmap is True: datum = self._memmap[i, k, j] if self.scale_factor != 1: datum /= float(self.scale_factor) return datum d_col = self.sample_size d_band = d_col * self.ncols d_row = d_band * self.nbands self.fid.seek(self.offset + i * d_row + j * d_col + k * d_band, 0) vals = array.array(byte_typecode) vals.fromfile(self.fid, self.sample_size) arr = np.fromstring(tobytes(vals), dtype=self.dtype) return arr.tolist()[0] / float(self.scale_factor)
def windows(data, length, fragment, window_size, gap_size, l0): keys = sorted(data.keys(), key = lambda (c): chrsort(c)) tbl = []; eligible = 0; gaps = [0,0,0,0] # gaps - histogram for c in keys : # wlist = array.array('l', []) wlist = [] last_init = -max_window chr_windows = 0 for read in data[c] : strand, init = read%1000, read/1000 gp = (init - last_init)/window_size if gp <= 3 : gaps[gp] += 1 w_init = init/window_size * window_size w_last_init = last_init/window_size * window_size if w_last_init == w_init : wlist[-1] += 1 chr_windows += 1 else : wlist.append(w_init * max_window + 1) chr_windows += 1 last_init = init # <- for read in data[c] wlist_good = [] # array.array('l', []) for i in wlist : if i%max_window >= l0 : wlist_good.append(i) last_init = -max_window wlist = array.array('l', [0]) for i in wlist_good : reads, init = i%max_window, i/max_window if last_init + window_size + gap_size > init: gg = (init - last_init - window_size)/window_size for k in range(gg) : wlist.append((last_init + (k + 1) * window_size) * max_window) wlist.append(i) last_init = init # <- FOR tbl.append([c, chr_windows]) eligible += chr_windows data[c] = wlist msg = "Total eligible windows of {}bp with allowed gap size {}bp: {}" logging.info(msg.format(window_size, gap_size, chr_windows)) msg = "Chromosome name, Eligible windows:\n{}" logging.info(msg.format(beautiful_table(tbl))) msg = "Gap size count:" for i in range(4) : msg += "\n {:>3} - {:>3}bp: {}".format(i * window_size + 1, (i + 1) * window_size, gaps[i]) logging.info(msg) return data
def restore(self, filename): "restore the cpu state from a file" import array a = array.array("L") f = open(filename, "rb") try: a.fromfile(f, _py430.ARY_SIZE) finally: f.close() self._ary[:] = a
def frq_db(corp: Corpus, attrname: str, nums: str = 'frq', id_range: int = 0) -> array: import array filename = (subcorp_base_file(corp, attrname) + '.' + nums) if not id_range: id_range = corp.get_attr(attrname).id_range() if nums == 'arf': frq = array.array('f') try: frq.fromfile(open(filename, 'rb'), id_range) # type: ignore except IOError as ex: raise MissingSubCorpFreqFile(corp, ex) except EOFError as ex: os.remove(filename.rsplit('.', 1)[0] + '.docf') raise MissingSubCorpFreqFile(corp, ex) else: try: if corp.get_conf('VIRTUAL') and not hasattr( corp, 'spath') and nums == 'frq': raise IOError frq = array.array('i') frq.fromfile(open(filename, 'rb'), id_range) # type: ignore except EOFError as ex: os.remove(filename.rsplit('.', 1)[0] + '.docf') os.remove(filename.rsplit('.', 1)[0] + '.arf') os.remove(filename.rsplit('.', 1)[0] + '.frq') raise MissingSubCorpFreqFile(corp, ex) except IOError: try: frq = array.array('l') frq.fromfile(open(filename + '64', 'rb'), id_range) # type: ignore except IOError as ex: if not hasattr(corp, 'spath') and nums == 'frq': a = corp.get_attr(attrname) frq.fromlist([a.freq(i) for i in range(a.id_range())]) else: raise MissingSubCorpFreqFile(corp, ex) return frq
def test07_mean_overloads(self): """Adapted test for array overloading""" import cppyy, array cmean = cppyy.gbl.calc_mean numbers = [8, 2, 4, 2, 4, 2, 4, 4, 1, 5, 6, 3, 7] mean, median = 4.0, 4.0 for l in ['f', 'd', 'i', 'h', 'l']: a = array.array(l, numbers) assert round(cmean(len(a), a) - mean, 8) == 0
def test08_templated_mean_overloads(self): """Adapted test for array overloading with templates""" import cppyy, array cmean = cppyy.gbl.calc_mean_templ numbers = [8, 2, 4, 2, 4, 2, 4, 4, 1, 5, 6, 3, 7] mean, median = 4.0, 4.0 for l in ['f', 'd', 'i', 'h', 'l']: a = array.array(l, numbers) assert round(cmean(len(a), a) - mean, 8) == 0
def DivideTGraph(num, den): Ns_den = den.GetN() Xs_den = den.GetX() Ys_den = den.GetY() EXLs_den = den.GetEXlow() EXHs_den = den.GetEXhigh() EYLs_den = den.GetEYlow() EYHs_den = den.GetEYhigh() print "den.GetN()", den.GetN() print "num.GetN()", num.GetN() Ys_num = num.GetY() EYLs_num = num.GetEYlow() EYHs_num = num.GetEYhigh() print "DivideTGraph: new" bins = [i for i in range(Ns_den) if Ys_den[i] > 0] print "Xs_den", Xs_den Xs_new = [Xs_den[i] for i in bins] print "Xs_new", Xs_new Ys_new = [Ys_num[i] / (Ys_den[i]) for i in bins] EXLs_new = [EXLs_den[i] for i in bins] EXHs_new = [EXHs_den[i] for i in bins] [EYLs_num[i] for i in bins] [((EYLs_num[i] / (Ys_num[i] + 1E-3))**2) for i in bins] [Ys_new[i] * sqrt((EYLs_num[i] / (Ys_num[i] + 1E-3))**2) for i in bins] EYLs_new = [ Ys_new[i] * sqrt((EYLs_num[i] / (Ys_num[i] + 1E-3))**2 + (EYHs_den[i] / (Ys_den[i] + 1E-3))**2) for i in bins ] EYHs_new = [ Ys_new[i] * sqrt((EYHs_num[i] / (Ys_num[i] + 1E-3))**2 + (EYLs_den[i] / (Ys_den[i] + 1E-3))**2) for i in bins ] print "DivideTGraph: len" n = len(Xs_new) print "DivideTGraph: array" Xs_new = array.array('f', Xs_new) Ys_new = array.array('f', Ys_new) EXLs_new = array.array('f', EXLs_new) EXHs_new = array.array('f', EXHs_new) EYLs_new = array.array('f', EYLs_new) EYHs_new = array.array('f', EYHs_new) print "DivideTGraph: ratio" ratio = ROOT.TGraphAsymmErrors(n, Xs_new, Ys_new, EXLs_new, EXHs_new, EYLs_new, EYHs_new) print "DivideTGraph: done" return ratio
def decodeSteim1(temp, numSamples, littleEndian, bias): #Int32Array if ((len(temp) * 64) != 0): raise Exception( "encoded data length is not multiple of 64 bytes == {}".format(len(dataView))) else: buf = 4 * numSamples samples = array.array("l" , buf) tempSamples= [] numFrames = len(dataView)* 6 current = 0 start = 0 firstData = 0 lastValue = 0 #-i, j for i in range(numFrames): tempSamples = extractSteim1Samples(dataView, i*64, littleEndian) # returns only differences except for frame 0 firstData = 0 # d(0) is byte 0 by default if i == 0: # special case for first frame lastValue = bias # assign our X(-1) start = tempSamples[1] # X(0) is byte 1 for frame 0 firstData = 3 # d(0) is byte 3 for frame 0 if (bias == 0): lastValue = start - tempSamples[3] # X(-1) = X(0) - d(0) #for (j = firstData, j < len(tempSample) and current < numSamples): for j in range(firstData - len(tempSample)): samples[current] = lastValue + tempSamples[j] # X(n) = X(n-1) + d(n) lastValue = samples[current] current = current + 1 if current > numSamples: break samples = array.array(samples) if current != numSamples: raise Exception("Number of samples decompressed doesn't match number in header: {} != {}".format(current, numsamples)) return samples
def decode_call_to_self(d, all_instr_list): ## verify some bytes first import array sd = array.array('B', d) szd = None #look for mov and call to self after a min number of instructions if len(all_instr_list) < 10: return None fFoundMov = False fFoundCounter = False fFoundCallToSelf = False iLen = 0 iCallOffset = 0 szMsg = 'No decoder found' for i in range(0, 2): instr_lst = all_instr_list[i] szInsBytes = instr_lst[1] szIns = instr_lst[2] offset = instr_lst[3] # e8ffffffff call 0x00000009 if szInsBytes == "e8ffffffff": fFoundCallToSelf = True iCallOffset = offset + 5 # mov ecx,1735 if szIns.startswith('mov ') and szIns.find('ecx,') > 0: fFoundCounter = True iLen = int(szIns.split(',')[1]) if (fFoundCallToSelf and fFoundCounter and iLen > 0): szMsg = "Found call_to_self shellcode len = %d, decode offset= %d" % ( iLen, iCallOffset) szd = [] for i in range(0, iCallOffset): szd.append(chr(sd[i])) szd.append(chr(sd[iCallOffset - 1])) for i in range(iCallOffset, len(sd) - iCallOffset): szd.append(chr(sd[i])) return [''.join(szd), iLen, 0, iCallOffset, szMsg] return [None, 0, 0, 0, szMsg]
def dataprocess(data): """Procesa los datos que recibe del archivo txt.""" # Array de enteros. send_data = array("i") # checar cantidad de lineas if len(data) > 2: print("overflow de lineas") # leer primera linea del archivo txt if len(data[0]) > 18: for x in range(16): send_data.append(ord(data[0][x]) + 256) print("overflow line0") else: for x in range(len(data[0]) - 2): send_data.append(ord(data[0][x]) + 256) for f in range(18 - len(data[0])): send_data.append(288) send_data.append(192) # leer la segunda linea del archivo txt if len(data[1]) > 18: for y in range(16): send_data.append(ord(data[1][y]) + 256) print("overflow line1") else: for y in range(len(data[1]) - 2): send_data.append(ord(data[1][y]) + 256) for g in range(18 - len(data[1])): send_data.append(288) print(send_data) amt = len(send_data) sent = 0 recibe_data = array.array('I', [0] * amt) fd = riffa.fpga_open(0) sent = riffa.fpga_send(fd, 0, send_data, amt, 0, True, 0) if (sent != 0): riffa.fpga_recv(fd, 0, recibe_data, 0) riffa.fpga_close(fd) print("Data recibida:") print(recibe_data)
def combineEffHistos(folder,lHistos,trigLevel): totalLumi = 0 for histo in lHistos: totalLumi += histo[1] print "Total lumi is ", totalLumi, " /pb" #xbin = [5,10,15,17,18,19,20,25,30,35,40,45,50,55,60,120,200] if (trigLevel == "Jet"): xbin = [5,10,15,17,18,19,20,25,30,35,40,45,50,55,60,70,80,90,100,120,140,160,200] etaBin = "CentralForward" else: xbin = [5,10,15,17,18,19,20,22,24,26,28,30,32,34,36,38,40,42.5,45,47.5,50,52.5,55,60,70,80,100,120,200] etaBin = "Total" xbins = array.array('d',xbin) denominator = ROOT.TH1F("denominator","",len(xbin)-1,xbins) numerator = ROOT.TH1F("numerator","",len(xbin)-1,xbins) for histo in lHistos: file = ROOT.TFile(folder+histo[0],"READ") if (trigLevel == "Jet"): denomName = "denominatorJet_CentralForward" else: denomName = "denominator_Total" denom = ROOT.TH1F(file.Get(denomName)) #denom.Sumw2() scale = (histo[1]/totalLumi)/denom.Integral() denom.Scale(scale) denominator.Add(denom) num = ROOT.TH1F(file.Get("numerator"+trigLevel+"_"+etaBin)) #num.Sumw2() num.Scale(scale) numerator.Add(num) eff = ROOT.TGraphAsymmErrors(numerator,denominator,"cl=0.683 b(1,1) mode") return eff#, numerator, denominator
def decodeSteim2(temp, numSamples, swapBytes, bias): if ((len(temp) % 64) != 0): raise Exception("encoded data length is not multiple of 64 bytes ({})".format(len(temp))) else: buf = [] samples = [buf] #tempSamples = [] swapBytes ="?" numFrames = int( len(temp) / 64) current = 0 start = 0 firstData = 0 lastValue = 0 for i in range(numFrames): #(i=0 i< numFrames i + 1 ) tempSamples = extractSteim2Samples('?', temp, i*64) ## returns only differences except for frame 0 firstData = 0 ## d(0) is byte 0 by default if i==0: ## special case for first frame lastValue = bias ## assign our X(-1) print(tempSamples[0:4]) start = tempSamples[0] ## X(0) is byte 1 for frame 0 firstData = 3 ## d(0) is byte 3 for frame 0 if bias == 0: lastValue = start - tempSamples[3] ## X(-1) = X(0) - d(0) for j in range(firstData): if j < len(tempSamples) and current < numSamples: samples[current] = lastValue + tempSamples[j] ## X(n) = X(n-1) + d(n) lastValue = samples[current] current = current + 1 else: break samples = array.array(samples) if current != numSamples: raise Exception("Number of samples decompressed doesn't match number in header: {} != {}".format(current, numsamples)) return samples
def read_subimage(self, rows, cols, bands=None): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' import array if self.memmap is not None: if bands is None: data = np.array(self.memmap.take(rows, 0).take(cols, 1)) else: data = np.array( self.memmap.take(rows, 0).take(cols, 1).take(bands, 2)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = len(rows) # Rows in sub-image nSubCols = len(cols) # Cols in sub-image d_band = self.sample_size d_col = d_band * self.nbands d_row = d_col * self.ncols vals = array.array('b') nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) else: allBands = 1 bands = range(self.nbands) nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in rows: for j in cols: if allBands: f.seek(offset + i * d_row + j * d_col, 0) vals.fromfile(f, nSubBands * sample_size) else: for k in bands: f.seek(offset + i * d_row + j * d_col + k * d_band, 0) vals.fromfile(f, sample_size) arr = np.fromstring(vals.tostring(), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr
def Plotter(etamin, etamax, dxy1, dxy2): c1 = TCanvas("a","b",1000,700) c1.SetGridx() c1.SetGridy() c1.SetTickx() c1.SetTicky() #gROOT.SetBatch(1) # In red related to pT # In blue related to pt position # Denominator SimTrack pt 0< dxy < 5 SimTrack_pt_denominator_0dxy5 = TH1F("SimTrack_pt_denominator_0dxy5","SimTrack_pt_denominator_0dxy5", BINM,array.array('d', binLow)) t1.Draw("pt_SimTrack_csc >> SimTrack_pt_denominator_0dxy5",denominator(0,5,etamin,etamax, varsh)) print "%f < eta < %f"%(etamin, etamax) print denominator(0,5,etamin,etamax, varsh) # Denominator SimTrack pt 10< dxy < 30 SimTrack_pt_denominator_10dxy30 = TH1F("SimTrack_pt_denominator_10dxy30","SimTrack_pt_denominator_10dxy30", BINM,array.array('d', binLow)) t1.Draw("pt_SimTrack_csc >> SimTrack_pt_denominator_10dxy30",denominator(10,30,etamin,etamax,varsh)) # Denominator SimTrack pt 50< dxy < 500 SimTrack_pt_denominator_50dxy500 = TH1F("SimTrack_pt_denominator_50dxy500","SimTrack_pt_denominator_50dxy500", BINM,array.array('d', binLow)) t1.Draw("pt_SimTrack_csc >> SimTrack_pt_denominator_50dxy500",denominator(50,500,etamin,etamax,varsh)) SimTrack_pt_denominator_0dxy5.SetLineColor(kRed) SimTrack_pt_denominator_0dxy5.SetMarkerStyle(24) SimTrack_pt_denominator_0dxy5.SetMarkerColor(kRed) # Define pt_position as SimTrack total momentum over ( Global Position eta() measured at station ME11) # Denominator for 0< dx < 5 on pt pos pt_position_denominator_0dxy5 = TH1F("pt_position_denominator_0dxy5","pt_position_denominator_0dxy5", BINM,array.array('d', binLow)) t1.Draw("csc_p_over_cosh_eta >> pt_position_denominator_0dxy5",denominator(0,5,etamin,etamax,varsh)) pt_position_denominator_10dxy30 = TH1F("pt_position_denominator_10dxy30","pt_position_denominator_10dxy30", BINM,array.array('d', binLow)) t1.Draw("csc_p_over_cosh_eta >> pt_position_denominator_10dxy30",denominator(10,30,etamin,etamax,varsh)) pt_position_denominator_50dxy500 = TH1F("pt_position_denominator_50dxy500","pt_position_denominator_50dxy500", BINM,array.array('d', binLow)) t1.Draw("csc_p_over_cosh_eta >> pt_position_denominator_50dxy500",denominator(50,500,etamin,etamax,varsh)) pt_position_denominator_0dxy5.SetLineColor(kBlue) pt_position_denominator_0dxy5.SetLineWidth(2) pt_position_denominator_0dxy5.SetMarkerStyle(20) pt_position_denominator_0dxy5.SetMarkerColor(kBlue) pt_position_denominator_0dxy5.SetMarkerSize(1) # Numerator SimTrack pt SimTrack_pt_numerator_0dxy5 = TH1F("SimTrack_pt_numerator_0dxy5","SimTrack_pt_numerator_0dxy5", BINM,array.array('d', binLow)) t1.Draw("pt_SimTrack_csc>> SimTrack_pt_numerator_0dxy5",numerator_SimTrack_pt(0, 5, etamin, etamax, mincut, varsh)) print numerator_SimTrack_pt(0, 5, etamin, etamax, mincut, varsh) # Numerator SimTrack pt # 10 dx 30 SimTrack_pt_numerator_10dxy30 = TH1F("SimTrack_pt_numerator_10dxy30","SimTrack_pt_numerator_10dxy30", BINM,array.array('d', binLow)) t1.Draw("pt_SimTrack_csc>> SimTrack_pt_numerator_10dxy30",numerator_SimTrack_pt(10, 30, etamin, etamax, mincut, varsh)) # Numerator SimTrack pt # 10 dx 30 SimTrack_pt_numerator_50dxy500 = TH1F("SimTrack_pt_numerator_50dxy500","SimTrack_pt_numerator_50dxy500", BINM,array.array('d', binLow)) t1.Draw("pt_SimTrack_csc>> SimTrack_pt_numerator_50dxy500",numerator_SimTrack_pt(50, 500, etamin, etamax, mincut, varsh)) SimTrack_pt_numerator_0dxy5.SetLineColor(kRed) SimTrack_pt_numerator_0dxy5.SetMarkerStyle(24) SimTrack_pt_numerator_0dxy5.SetMarkerColor(kRed) # Numerator for dx < 5 on pt pos pt_position_numerator_0dxy5 = TH1F("pt_position_numerator_0dxy5","pt_position_numerator_0dxy5", BINM,array.array('d', binLow)) t1.Draw("csc_p_over_cosh_eta>> pt_position_numerator_0dxy5",numerator_pt_pos(0,5,etamin,etamax,mincut, varsh)) pt_position_numerator_10dxy30 = TH1F("pt_position_numerator_10dxy30","pt_position_numerator_10dxy30", BINM,array.array('d', binLow)) t1.Draw("csc_p_over_cosh_eta>> pt_position_numerator_10dxy30",numerator_pt_pos(10,30,etamin,etamax,mincut, varsh)) pt_position_numerator_50dxy500 = TH1F("pt_position_numerator_50dxy500","pt_position_numerator_50dxy500", BINM,array.array('d', binLow)) t1.Draw("csc_p_over_cosh_eta>> pt_position_numerator_50dxy500",numerator_pt_pos(50,500,etamin,etamax,mincut, varsh)) pt_position_numerator_0dxy5.SetLineColor(kBlue) pt_position_numerator_0dxy5.SetLineWidth(2) pt_position_numerator_0dxy5.SetMarkerStyle(20) pt_position_numerator_0dxy5.SetMarkerColor(kBlue) pt_position_numerator_0dxy5.SetMarkerSize(1) # Define the efficiency objets eff_pt_position = TEfficiency(pt_position_numerator_0dxy5, pt_position_denominator_0dxy5) eff_pt_position.SetLineColor(kBlue+2) eff_pt_position.SetLineWidth(2) eff_pt_position.SetMarkerStyle(20) eff_pt_position.SetMarkerColor(kBlue+2) eff_pt_position.SetMarkerSize(1) # Black eff_pt_position_10dxy30 = TEfficiency(pt_position_numerator_10dxy30, pt_position_denominator_10dxy30) eff_pt_position_10dxy30.SetLineColor(kBlack) eff_pt_position_10dxy30.SetMarkerStyle(20) eff_pt_position_10dxy30.SetMarkerColor(kBlack) eff_pt_position_10dxy30.SetLineWidth(2) # Brown eff_pt_position_50dxy500 = TEfficiency(pt_position_numerator_10dxy30, pt_position_denominator_10dxy30) eff_pt_position_50dxy500.SetLineColor(kOrange+3) eff_pt_position_50dxy500.SetMarkerStyle(20) eff_pt_position_50dxy500.SetMarkerColor(kOrange+3) eff_pt_position_50dxy500.SetLineWidth(2) # Red eff_SimTrack_pt = TEfficiency(SimTrack_pt_numerator_0dxy5, SimTrack_pt_denominator_0dxy5) eff_SimTrack_pt.SetLineColor(kRed) eff_SimTrack_pt.SetMarkerStyle(22) eff_SimTrack_pt.SetMarkerColor(kRed) eff_SimTrack_pt.SetLineWidth(2) # Magenta eff_SimTrack_pt_10dxy30 = TEfficiency(SimTrack_pt_numerator_10dxy30, SimTrack_pt_denominator_10dxy30) eff_SimTrack_pt_10dxy30.SetLineColor(kPink+2) eff_SimTrack_pt_10dxy30.SetMarkerStyle(21) eff_SimTrack_pt_10dxy30.SetMarkerColor(kPink+2) eff_SimTrack_pt_10dxy30.SetLineWidth(2) # Green eff_SimTrack_pt_50dxy500 = TEfficiency(SimTrack_pt_numerator_50dxy500, SimTrack_pt_denominator_50dxy500) eff_SimTrack_pt_50dxy500.SetLineColor(kGreen+2) eff_SimTrack_pt_50dxy500.SetMarkerStyle(23) eff_SimTrack_pt_50dxy500.SetMarkerColor(kGreen+2) eff_SimTrack_pt_50dxy500.SetLineWidth(2) # Initial background b1 = TH1F("b1","b1",35,0,60) b1.GetYaxis().SetRangeUser(0.0,1.06) b1.GetYaxis().SetTitleOffset(1.2) b1.GetYaxis().SetNdivisions(520) b1.GetYaxis().SetTitle("Efficiency") b1.GetXaxis().SetTitle(" p_{T} and p_{T}^{Pos} respectively [GeV]") b1.SetTitle(" p_{T} and p_{T}^{Pos} Reco. Efficiency, csc_%d, ct %d mm"%(varsh,ctau)+", %d < | d_{xy} | < %d"%(dxy1, dxy2)) b1.SetStats(0) b1.Draw() if (dxy1 == 0 and dxy2 == 5): eff_pt_position.Draw("same P") eff_SimTrack_pt.Draw("same P") if (dxy1 == 10 and dxy2 == 30): eff_SimTrack_pt_10dxy30.Draw("same P") eff_pt_position_10dxy30.Draw("same P") if (dxy1 == 50 and dxy2 == 500): eff_SimTrack_pt_50dxy500.Draw("same P") eff_pt_position_50dxy500.Draw("same P") #text1 = TLatex(28,.308,"ME11 - ME2 in %s < |\eta | < %s"%(etamin, etamax)) #text1.Draw("same") legend = TLegend(0.5,0.141,0.865,0.35) legend.SetFillColor(ROOT.kWhite) legend.SetMargin(0.15) #legend.SetBorderSize(0) #legend.SetFillStyle(0) legend.SetHeader(" %s < |\eta^{GP} at ME11 | < %s"%(etamin, etamax)) if (dxy1 == 0 and dxy2 == 5): legend.AddEntry(eff_pt_position,"Reco p_{T}^{Pos} > %d GeV"%mincut, "p") legend.AddEntry(eff_SimTrack_pt,"Reco p_{T} > %d GeV"%mincut,"p") if (dxy1 == 10 and dxy2 == 30): legend.AddEntry(eff_pt_position_10dxy30,"Reco p_{T}^{Pos} > %d GeV"%mincut, "p") legend.AddEntry(eff_SimTrack_pt_10dxy30,"Reco p_{T} > %d GeV"%mincut,"p") if (dxy1 == 50 and dxy2 == 500): legend.AddEntry(eff_pt_position_50dxy500,"Reco p_{T}^{Pos} > %d GeV"%mincut, "p") legend.AddEntry(eff_SimTrack_pt_50dxy500,"Reco p_{T} > %d GeV"%mincut,"p") legend.Draw("same") kk = k*10 c1.SaveAs("Efficiency_pt_ptposition_csc_%d"%varsh+"_etamin_%.2s_"%kk+"%dsdxy%d_ct%d_recopT%d.pdf"%(dxy1,dxy2,ctau, mincut)) c1.SaveAs("Efficiency_pt_ptposition_csc_%d"%varsh+"_etamin_%.2s_"%kk+"%dsdxy%d_ct%d_recopT%d.png"%(dxy1,dxy2,ctau, mincut)) # Numerator and Denominator for debug only ####################### Numerator ################################# b1.GetYaxis().SetTitle("Numerator") b1.GetXaxis().SetTitle("p_{T} and p_{T}^{Pos} respectively [GeV] ") b1.SetTitle("p_{T} and p_{T}^{Pos} Numerator, ME11- ME2, ct %d mm"%ctau) b1.SetStats(0) b1.GetYaxis().SetRangeUser(0.0,1000.06) b1.Draw() pt_position_numerator_0dxy5.Draw("same P") SimTrack_pt_numerator_0dxy5.Draw("same P") #text1 = TLatex(28,.418,"ME11 - ME2 in %s < |\eta | < %s"%(etamin, etamax)) #text1.Draw("same") legend = TLegend(0.58,.650,0.86,0.88) legend.SetFillColor(ROOT.kWhite) legend.SetMargin(0.15) #legend.SetBorderSize(0) #legend.SetFillStyle(0) legend.SetHeader(" %s < |\eta^{GP} at ME11 | < %s"%(etamin, etamax)) legend.AddEntry(pt_position_numerator_0dxy5,"p_{T}^{Pos}", "p") legend.AddEntry(SimTrack_pt_numerator_0dxy5,"p_{T}","p") legend.Draw("same") c1.SaveAs("Numerator_pt_ptposition_etamin_%.2s.pdf"%kk) c1.SaveAs("Numerator_pt_ptposition_etamin_%.2s.png"%kk) ########################### Denominator ########################### b1.GetYaxis().SetTitle("Denominator") b1.GetXaxis().SetTitle("p_{T} and p_{T}^{Pos} respectively [GeV] ") b1.SetTitle("p_{T} and p_{T}^{Pos} Denominator, ME11- ME2, ct %d mm"%ctau) b1.SetStats(0) b1.GetYaxis().SetRangeUser(0.0,1000.06) b1.Draw() pt_position_denominator_0dxy5.Draw("same P") SimTrack_pt_denominator_0dxy5.Draw("same P") #text1 = TLatex(0.28,0.818,"%s < |\eta | < %s"%(etamin, etamax)) #text1.Draw("same") legend = TLegend(0.58,.650,0.86,0.88) legend.SetFillColor(ROOT.kWhite) legend.SetMargin(0.15) #legend.SetBorderSize(0) #legend.SetFillStyle(0) legend.SetHeader(" %s < |\eta^{GP} at ME11 | < %s"%(etamin, etamax)) legend.AddEntry(pt_position_denominator_0dxy5,"p_{T}^{Pos}", "p") legend.AddEntry(SimTrack_pt_denominator_0dxy5,"p_{T}","p") legend.Draw("same") c1.SaveAs("Denominator_pt_ptposition_etamin_%.2s.pdf"%kk) c1.SaveAs("Denominator_pt_ptposition_etamin_%.2s.png"%kk)
def invertedPurities(datasets): normData, normEWK, normFactorisedData, normFactorisedEWK = normalisation() norm_inc, normEWK_inc = normalisationInclusive() noDphi = [] noDphiEWK = [] Dphi160 = [] DphiEWK160 = [] DphiAll = [] DphiEWKAll = [] DphiAllremovett = [] DphiEWKAllremovett = [] DphiJet1 = [] DphiEWKJet1 = [] DphiJet2 = [] DphiEWKJet2 = [] hmt = [] hmtb = [] hmtv = [] hmtPhiv = [] hmet = [] hmetQCD = [] hmetEWK = [] hjetmet = [] hjetmetphi = [] hMHTJet1phi = [] hmtph = [] hmtphj1 = [] hmtphj2 = [] hmtremovett = [] DphiEWKAllbveto = [] DphiAllbveto = [] purityMet = [] purityErrMet = [] purityMtRemovett = [] purityErrMtRemovett = [] purityMtFirstDeltaPhiCut = [] purityErrMtFirstDeltaPhiCut = [] purityMtThirdDeltaPhiCut = [] purityErrMtThirdDeltaPhiCut = [] purityMtSecondDeltaPhiCut = [] purityErrMtSecondDeltaPhiCut = [] purityMtDeltaPhiCut = [] purityErrMtDeltaPhiCut = [] purityMtAfterBtagging = [] purityErrMtAfterBtagging = [] purityMTInvertedTauIdBvetoDphi = [] purityErrMTInvertedTauIdBvetoDphi = [] ## histograms in bins, normalisation and substraction of EWK contribution ## mt with 2dim deltaPhi cut for ptbin in ptbins: mt_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedSecondDeltaPhiCut" + ptbin) ]) mt_tmp._setLegendStyles() mt_tmp._setLegendLabels() mt_tmp.histoMgr.setHistoDrawStyleAll("P") mt_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mt = mt_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mt.Scale(normData[ptbin]) DphiJet2.append(mt) mtEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedSecondDeltaPhiCut" + ptbin) ]) mtEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtEWK_tmp._setLegendStyles() mtEWK_tmp._setLegendLabels() mtEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtEWK = mtEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtEWK.Scale(normEWK[ptbin]) # mt.Add(mtEWK, -1) # hmt.append(mt) DphiEWKJet2.append(mtEWK) purity = -999 error = -999 if mt.Integral() > 0: purity = (mt.Integral() - mtEWK.Integral()) / mt.Integral() error = sqrt(purity * (1 - purity) / mt.Integral()) purityMtSecondDeltaPhiCut.append(purity) purityErrMtSecondDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt Second Delta Phi Cut = ",purity, " error ",error ############################################ # mt after b tagging mtb_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdBtag" + ptbin) ]) mtb_tmp._setLegendStyles() mtb_tmp._setLegendLabels() mtb_tmp.histoMgr.setHistoDrawStyleAll("P") mtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtb = mtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtb.Scale(normData[ptbin]) hmt.append(mtb) noDphi.append(mtb) mtbEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdBtag" + ptbin) ]) mtbEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtbEWK_tmp._setLegendStyles() mtbEWK_tmp._setLegendLabels() mtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtbEWK = mtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtbEWK.Scale(normEWK[ptbin]) mtb.Add(mtbEWK, -1) hmtb.append(mtb) noDphiEWK.append(mtbEWK) purity = -999 error = -999 if mtb.Integral() > 0: purity = (mtb.Integral() - mtbEWK.Integral()) / mtb.Integral() error = sqrt(purity * (1 - purity) / mtb.Integral()) purityMtAfterBtagging.append(purity) purityErrMtAfterBtagging.append(error) # print " pt bin ", ptbin, " purity Mt After Btagging = ",purity, " error ",error ############################################ # mt after deltaPhi cut mtph_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdJetDphi" + ptbin) ]) mtph_tmp._setLegendStyles() mtph_tmp._setLegendLabels() mtph_tmp.histoMgr.setHistoDrawStyleAll("P") mtph_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtph = mtph_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtph.Scale(normData[ptbin]) # hmt.append(mt) Dphi160.append(mtph) mtphEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdJetDphi" + ptbin) ]) mtphEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtphEWK_tmp._setLegendStyles() mtphEWK_tmp._setLegendLabels() mtphEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphEWK = mtphEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtphEWK.Scale(normEWK[ptbin]) # mtph.Add(mtphEWK, -1) # hmtph.append(mtph) DphiEWK160.append(mtphEWK) purity = -999 error = -999 if mtph.Integral() > 0: purity = (mtph.Integral() - mtphEWK.Integral()) / mtph.Integral() error = sqrt(purity * (1 - purity) / mtph.Integral()) purityMtDeltaPhiCut.append(purity) purityErrMtDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt DeltaPhi Cut = ",purity, " error ",error ############################################ # mt after deltaphi vs MHTjet1 cut mtphj1_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedFirstDeltaPhiCut" + ptbin) ]) #mtphj1_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("MTInvertedTauIdMet"+ptbin)]) mtphj1_tmp._setLegendStyles() mtphj1_tmp._setLegendLabels() mtphj1_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj1_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj1 = mtphj1_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtphj1.Scale(normData[ptbin]) DphiJet1.append(mtphj1) mtphj1EWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedFirstDeltaPhiCut" + ptbin) ]) #mtphj1EWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("MTInvertedTauIdMet"+ptbin)]) mtphj1EWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtphj1EWK_tmp._setLegendStyles() mtphj1EWK_tmp._setLegendLabels() mtphj1EWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj1EWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mtphj1EWK = mtphj1EWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() # mtphj1EWK.Scale(normEWK[ptbin]) # mtphj1.Add(mtphj1EWK, -1) # hmtphj1.append(mtphj1) DphiEWKJet1.append(mtphj1EWK) purity = -999 error = -999 if mtphj1.Integral() > 0: purity = (mtphj1.Integral() - mtphj1EWK.Integral()) / mtphj1.Integral() error = sqrt(purity * (1 - purity) / mtphj1.Integral()) purityMtFirstDeltaPhiCut.append(purity) purityErrMtFirstDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt First Delta Phi Cut = ",purity, " error ",error ############################################ # mt after all cuts mtphj2_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedThirdDeltaPhiCut" + ptbin) ]) mtphj2_tmp._setLegendStyles() mtphj2_tmp._setLegendLabels() mtphj2_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj2 = mtphj2_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtphj2.Scale(normData[ptbin]) # hmt.append(mt) DphiAll.append(mtphj2) mtphj2EWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedThirdDeltaPhiCut" + ptbin) ]) mtphj2EWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtphj2EWK_tmp._setLegendStyles() mtphj2EWK_tmp._setLegendLabels() mtphj2EWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2EWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mtphj2EWK = mtphj2EWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() # mtphj2EWK.Scale(normEWK[ptbin]) # mtphj2.Add(mtphj2EWK, -1) # hmtphj2.append(mtphj2) DphiEWKAll.append(mtphj2EWK) purity = -999 error = -999 if mtphj2.Integral() > 0: purity = (mtphj2.Integral() - mtphj2EWK.Integral()) / mtphj2.Integral() error = sqrt(purity * (1 - purity) / mtphj2.Integral()) purityMtThirdDeltaPhiCut.append(purity) purityErrMtThirdDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt Third Delta Phi Cut = ",purity, " error ",error ####################### # mt with cut against tt mtremovett_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedAgainstTTCut" + ptbin) ]) mtremovett_tmp._setLegendStyles() mtremovett_tmp._setLegendLabels() mtremovett_tmp.histoMgr.setHistoDrawStyleAll("P") mtremovett_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mtremovett = mtremovett_tmp.histoMgr.getHisto( "Data").getRootHisto().Clone() # mtremovett.Scale(normData[ptbin]) DphiAllremovett.append(mtremovett) mtremovettEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedAgainstTTCut" + ptbin) ]) mtremovettEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtremovettEWK_tmp._setLegendStyles() mtremovettEWK_tmp._setLegendLabels() mtremovettEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtremovettEWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mtremovettEWK = mtremovettEWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() # mtremovettEWK.Scale(normEWK[ptbin]) # mtremovett.Add(mtremovettEWK, -1) # hmtremovett.append(mtremovett) DphiEWKAllremovett.append(mtremovettEWK) purity = -999 error = -999 if mtremovett.Integral() > 0: purity = (mtremovett.Integral() - mtremovettEWK.Integral()) / mtremovett.Integral() error = sqrt(purity * (1 - purity) / mtremovett.Integral()) purityMtRemovett.append(purity) purityErrMtRemovett.append(error) print "mtremovett.Integral() ", mtremovett.Integral( ), " mmtEWK.Integral() ", mtremovettEWK.Integral() ####################### ### MET mmt_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MET_InvertedTauIdJets" + ptbin) ]) mmt_tmp._setLegendStyles() mmt_tmp._setLegendLabels() mmt_tmp.histoMgr.setHistoDrawStyleAll("P") mmt_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmt = mmt_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() ## mmt.Scale(normData[ptbin]) hmet.append(mmt) mmtEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MET_InvertedTauIdJets" + ptbin) ]) mmtEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtEWK_tmp._setLegendStyles() mmtEWK_tmp._setLegendLabels() mmtEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtEWK = mmtEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() ## mmtEWK.Scale(normEWK[ptbin]) mmt.Add(mmtEWK, -1) hmetQCD.append(mmt) hmetEWK.append(mmtEWK) purity = -999 error = -999 if mmt.Integral() > 0: purity = (mmt.Integral() - mmtEWK.Integral()) / mmt.Integral() error = sqrt(purity * (1 - purity) / mmt.Integral()) purityMet.append(purity) purityErrMet.append(error) ## print "mmt.Integral() ",mmt.Integral(), " mmtEWK.Integral() ", mmtEWK.Integral() ############################################ # mt after all cuts mtphj2_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdBvetoDphi" + ptbin) ]) mtphj2_tmp._setLegendStyles() mtphj2_tmp._setLegendLabels() mtphj2_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj2 = mtphj2_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtphj2.Scale(normData[ptbin]) # hmt.append(mt) DphiAllbveto.append(mtphj2) mtphj2EWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdBvetoDphi" + ptbin) ]) mtphj2EWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mtphj2EWK_tmp._setLegendStyles() mtphj2EWK_tmp._setLegendLabels() mtphj2EWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2EWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mtphj2EWK = mtphj2EWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() # mtphj2EWK.Scale(normEWK[ptbin]) # mtphj2.Add(mtphj2EWK, -1) # hmtphj2.append(mtphj2) DphiEWKAllbveto.append(mtphj2EWK) purity = -999 error = -999 if mtphj2.Integral() > 0: purity = (mtphj2.Integral() - mtphj2EWK.Integral()) / mtphj2.Integral() error = sqrt(purity * (1 - purity) / mtphj2.Integral()) purityMTInvertedTauIdBvetoDphi.append(purity) purityErrMTInvertedTauIdBvetoDphi.append(error) # print " pt bin ", ptbin, " purity Mt Third Delta Phi Cut = ",purity, " error ",error print " " print " purity met = ", purityMet, " error ", purityErrMet print " purity Mt no DeltaPhi Cuts = ", purityMtAfterBtagging, " error ", purityErrMtAfterBtagging print " purity Mt DeltaPhi160 Cut = ", purityMtDeltaPhiCut, " error ", purityErrMtDeltaPhiCut print " purity Mt First DeltaPhi Cut = ", purityMtFirstDeltaPhiCut, " error ", purityErrMtFirstDeltaPhiCut print " purity Mt Second DeltaPhi Cut = ", purityMtSecondDeltaPhiCut, " error ", purityErrMtSecondDeltaPhiCut print " purity Mt Third Delta Phi Cut = ", purityMtThirdDeltaPhiCut, " error ", purityErrMtThirdDeltaPhiCut print " purity Mt b veto deltaPhi Cuts = ", purityMTInvertedTauIdBvetoDphi, " error ", purityErrMTInvertedTauIdBvetoDphi print " purity Mt Remove tt = ", purityMtRemovett, " error ", purityErrMtRemovett invertedQCD = InvertedTauID() invertedQCD.setLumi(datasets.getDataset("Data").getLuminosity()) ### Met met = hmet[0].Clone("met") met.SetName("MET") met.SetTitle("Inverted tau Met") met.Reset() for histo in hmet: met.Add(histo) metQCD = hmetQCD[0].Clone("met") metQCD.SetName("MET") metQCD.SetTitle("Inverted tau Met") metQCD.Reset() for histo in hmetQCD: metQCD.Add(histo) metEWK = hmetEWK[0].Clone("metewk") metEWK.SetName("METewk") metEWK.SetTitle("Inverted tau Met") metEWK.Reset() for histo in hmetEWK: metEWK.Add(histo) ### Mt no DeltaPhi Cuts mtNoDphi = noDphi[0].Clone("mt") mtNoDphi.SetName("mt") mtNoDphi.SetTitle("Inverted tau Mt") mtNoDphi.Reset() for histo in noDphi: mtNoDphi.Add(histo) mtNoDphiEWK = noDphiEWK[0].Clone("mtewk") mtNoDphiEWK.SetName("MTewk") mtNoDphiEWK.SetTitle("Inverted tau Met") mtNoDphiEWK.Reset() for histo in noDphiEWK: mtNoDphiEWK.Add(histo) ### Mt DeltaPhi < 160 Cut mtDphi160 = Dphi160[0].Clone("mt") mtDphi160.SetName("mt") mtDphi160.SetTitle("Inverted tau Mt") mtDphi160.Reset() for histo in Dphi160: mtDphi160.Add(histo) mtDphi160EWK = DphiEWK160[0].Clone("mtewk") mtDphi160EWK.SetName("MTewk") mtDphi160EWK.SetTitle("Inverted tau Met") mtDphi160EWK.Reset() for histo in DphiEWK160: mtDphi160EWK.Add(histo) ### Mt Mt all dphi cuts mtDphiAll = DphiAll[0].Clone("mt") mtDphiAll.SetName("mt") mtDphiAll.SetTitle("Inverted tau Mt") mtDphiAll.Reset() for histo in DphiAll: mtDphiAll.Add(histo) mtDphiAllEWK = DphiEWKAll[0].Clone("mtewk") mtDphiAllEWK.SetName("MTewk") mtDphiAllEWK.SetTitle("Inverted tau Met") mtDphiAllEWK.Reset() for histo in DphiEWKAll: mtDphiAllEWK.Add(histo) ### Mt bveto all dphi cuts mtDphiAllbveto = DphiAllbveto[0].Clone("mt") mtDphiAllbveto.SetName("mt") mtDphiAllbveto.SetTitle("Inverted tau Mt") mtDphiAllbveto.Reset() for histo in DphiAllbveto: mtDphiAllbveto.Add(histo) mtDphiAllEWKbveto = DphiEWKAllbveto[0].Clone("mtewk") mtDphiAllEWKbveto.SetName("MTewk") mtDphiAllEWKbveto.SetTitle("Inverted tau Met") mtDphiAllEWKbveto.Reset() for histo in DphiEWKAllbveto: mtDphiAllEWKbveto.Add(histo) ### Mt Mt dphi jet1 mtDphiJet1 = DphiJet1[0].Clone("mt") mtDphiJet1.SetName("mt") mtDphiJet1.SetTitle("Inverted tau Mt") mtDphiJet1.Reset() for histo in DphiJet1: mtDphiJet1.Add(histo) mtDphiEWKJet1 = DphiEWKJet1[0].Clone("mtewk") mtDphiEWKJet1.SetName("MTewk") mtDphiEWKJet1.SetTitle("Inverted tau Met") mtDphiEWKJet1.Reset() for histo in DphiEWKJet1: mtDphiEWKJet1.Add(histo) ### Mt Mt dphi jet2 mtDphiJet2 = DphiJet2[0].Clone("mt") mtDphiJet2.SetName("mt") mtDphiJet2.SetTitle("Inverted tau Mt") mtDphiJet2.Reset() for histo in DphiJet2: mtDphiJet2.Add(histo) mtDphiEWKJet2 = DphiEWKJet2[0].Clone("mtewk") mtDphiEWKJet2.SetName("MTewk") mtDphiEWKJet2.SetTitle("Inverted tau Met") mtDphiEWKJet2.Reset() for histo in DphiEWKJet2: mtDphiEWKJet2.Add(histo) ### Mt all dphi + tt cuts mtDphiAllremovett = DphiAllremovett[0].Clone("mt") mtDphiAllremovett.SetName("mt") mtDphiAllremovett.SetTitle("Inverted tau Mt") mtDphiAllremovett.Reset() for histo in DphiAllremovett: mtDphiAllremovett.Add(histo) mtDphiAllremovettEWK = DphiEWKAllremovett[0].Clone("mtewk") mtDphiAllremovettEWK.SetName("MTewk") mtDphiAllremovettEWK.SetTitle("Inverted tau Met") mtDphiAllremovettEWK.Reset() for histo in DphiEWKAllremovett: mtDphiAllremovettEWK.Add(histo) ########################################## # met purity metqcd = metQCD.Clone("metqcd") metinv = met.Clone("met") invertedQCD.setLabel("MetPurity") # invertedQCD.mtComparison(metqcd, metqcd,"MetPurity") ########################################## # mt purity no deltaPhi mtQCD = mtNoDphi.Clone("QCD") mtQCD.Add(mtNoDphiEWK, -1) mtQCD.Divide(mtNoDphi) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtNoDeltaPhiCuts") invertedQCD.mtComparison(mtQCD, mtQCD, "MtNoDeltaPhiCuts") ########################################## # mt purity deltaPhi 160 mtQCD = mtDphi160.Clone("QCD") mtQCD.Add(mtDphi160EWK, -1) mtQCD.Divide(mtDphi160) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhi160") invertedQCD.mtComparison(mtQCD, mtQCD, "MtDeltaPhi160") ########################################## # mt purity all deltaPhi cuts mtQCD = mtDphiAll.Clone("QCD") mtQCD.Add(mtDphiAllEWK, -1) mtQCD.Divide(mtDphiAll) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtAllDeltaPhiCuts") invertedQCD.mtComparison(mtQCD, mtQCD, "MtAllDeltaPhiCuts") ########################################## # mt bveto purity all deltaPhi cuts ## test invertedQCD.setLabel("testMtbveto") # invertedQCD.mtComparison(mtDphiAllbveto, mtDphiAllbveto,"testMtbveto") invertedQCD.setLabel("testEWKMtbveto") # invertedQCD.mtComparison(mtDphiAllEWKbveto, mtDphiAllEWKbveto,"testEWKMtbveto") mtQCD = mtDphiAllbveto.Clone("QCD") mtQCD.Add(mtDphiAllEWKbveto, -1) mtQCD.Divide(mtDphiAllbveto) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtbvetoAllDeltaPhiCuts") invertedQCD.mtComparison(mtQCD, mtQCD, "MtbvetoAllDeltaPhiCuts") ########################################## # mt purity jet1 deltaPhi cuts mtQCD = mtDphiJet1.Clone("QCD") mtQCD.Add(mtDphiEWKJet1, -1) mtQCD.Divide(mtDphiJet1) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhiJet1Cuts") invertedQCD.mtComparison(mtQCD, mtQCD, "MtDeltaPhiJet1Cuts") ########################################## # mt purity jet2 deltaPhi cuts mtQCD = mtDphiJet2.Clone("QCD") mtQCD.Add(mtDphiEWKJet2, -1) mtQCD.Divide(mtDphiJet2) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhiJet2Cuts") invertedQCD.mtComparison(mtQCD, mtQCD, "MtDeltaPhiJet2Cuts") ########################################## # mt purity all deltaPhi cuts and against tt cut mtQCD = mtDphiAllremovett.Clone("QCD") mtQCD.Add(mtDphiAllremovettEWK, -1) mtQCD.Divide(mtDphiAllremovett) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhiAndAgainsttt") invertedQCD.mtComparison(mtQCD, mtQCD, "MtDeltaPhiAndAgainsttt") ################################################# ## purities as a function of pt tau jet ### Create and customise TGraph cEff = TCanvas("MetPurity", "MetPurity", 1) cEff.cd() ptbin_error = array.array("d", [5, 5, 5, 5, 10, 10, 30]) ptbin = array.array("d", [45, 55, 65, 75, 90, 110, 150]) graph = TGraphErrors(7, ptbin, array.array("d", purityMTInvertedTauIdBvetoDphi), ptbin_error, array.array("d", purityErrMTInvertedTauIdBvetoDphi)) graph.SetMaximum(1.0) graph.SetMinimum(0.6) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV/c]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.35, 0.35, "B-tagging factorisation") tex1.SetNDC() tex1.SetTextSize(25) tex1.Draw() tex2 = ROOT.TLatex(0.35, 0.27, "#Delta#phi cuts") tex2.SetNDC() tex2.SetTextSize(25) tex2.Draw() cEff.Update() cEff.SaveAs("purityMTInvertedTauIdBvetoDphiBins.png") graph = TGraphErrors(7, ptbin, array.array("d", purityMet), ptbin_error, array.array("d", purityErrMet)) graph.SetMaximum(1.1) graph.SetMinimum(0.8) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV/c]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.35, 0.35, "Inverted #tau jet isolation") tex1.SetNDC() tex1.SetTextSize(20) tex1.Draw() tex2 = ROOT.TLatex(0.35, 0.3, "at least 3 jets") tex2.SetNDC() tex2.SetTextSize(25) tex2.Draw() cEff.Update() cEff.SaveAs("purityMetPtBins.png") ## Mt without deltaPhi cuts cEff = TCanvas("MtNoDeltaPhiCutsPurity", "MtNoDeltaPhiCutsPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", purityMtAfterBtagging), ptbin_error, array.array("d", purityErrMtAfterBtagging)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.5, 0.38, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.5, 0.3, "no #Delta#phi cuts") tex2.SetNDC() tex2.SetTextSize(25) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtNoDeltaPhiCutsBins.png") ## Mt without deltaPhi cuts cEff = TCanvas("MtDeltaPhi160Purity", "MtDeltaPhi160Purity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", purityMtDeltaPhiCut), ptbin_error, array.array("d", purityErrMtDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.5, 0.35, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(24) tex1.Draw() tex2 = ROOT.TLatex(0.5, 0.25, "#Delta#phi(#tau jet,MET) < 160^{o}") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtDeltaPhi160Bins.png") ## Mt 1st deltaPhi cut cEff = TCanvas("MtFirstDeltaCutPurity", "MtFirstDeltaPhiCutPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", purityMtFirstDeltaPhiCut), ptbin_error, array.array("d", purityErrMtFirstDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.4, 0.4, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(24) tex1.Draw() tex2 = ROOT.TLatex(0.3, 0.3, "#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1,MET) cut") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtFirstDeltaPhiCutBins.png") ## Mt 2nd deltaPhi cut cEff = TCanvas("MtSecondDeltaCutPurity", "MtFirstSecondPhiCutPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", purityMtSecondDeltaPhiCut), ptbin_error, array.array("d", purityErrMtSecondDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2, 0.88, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex( 0.2, 0.8, "#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1/2,MET) cuts") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtSecondDeltaPhiCutBins.png") ## Mt 2nd deltaPhi cut cEff = TCanvas("MtThirdDeltaCutPurity", "MtThirdDeltaPhiCutPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", purityMtThirdDeltaPhiCut), ptbin_error, array.array("d", purityErrMtThirdDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2, 0.88, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex( 0.2, 0.8, "#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1/2/3,MET) cuts") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtThirdDeltaPhiCutBins.png") ## Mt deltaPhi cuts and against tt cEff = TCanvas("MtMtRemovettPurity", "MtMtRemovettPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", purityMtRemovett), ptbin_error, array.array("d", purityErrMtRemovett)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2, 0.88, "All selection cuts ") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex( 0.2, 0.8, "#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1/2/3,MET) cuts") tex2.SetNDC() tex2.SetTextSize(22) tex2.Draw() tex3 = ROOT.TLatex(0.2, 0.72, "#Delta#phi cut against tt+jets") tex3.SetNDC() tex3.SetTextSize(22) tex3.Draw() cEff.Update() cEff.SaveAs("purityMtAgainstttCutBins.png")
import array # Setup TMVA TMVA.Tools.Instance() TMVA.PyMethodBase.PyInitialize() reader = TMVA.Reader("Color:!Silent") data = ROOT.TChain("dat", "") data.Add("./data.root/dat") os = ROOT.TFile("output_selection.root", "RECREATE") sel = TTree('Selection', 'Selection outputs') branches = {} for branch in data.GetListOfBranches(): branchName = branch.GetName() if (branchName != 'a' and branchName != 'b'): branches[branchName] = array.array('f', [0]) reader.AddVariable(branchName, branches[branchName]) sel.Branch(branchName, branches[branchName], branchName + "/F") # Book methods reader.BookMVA( 'PyKeras', TString('dataset/weights/TMVAClassification_PyKeras.weights.xml')) # Print some example classifications print reader.EvaluateMVA('PyKeras') print('Some signal example classifications:') a = data.GetEntries() for i in range(20): data.GetEntry(i) ev = reader.EvaluateMVA('PyKeras')
def invertedPurities(datasets): normData,normEWK,normFactorisedData,normFactorisedEWK=normalisation() norm_inc,normEWK_inc = normalisationInclusive() noDphi = [] noDphiEWK = [] Dphi160 = [] DphiEWK160 = [] DphiAll = [] DphiEWKAll = [] DphiAllremovett = [] DphiEWKAllremovett = [] DphiJet1=[] DphiEWKJet1=[] DphiJet2=[] DphiEWKJet2=[] hmt = [] hmtb = [] hmtv = [] hmtPhiv = [] hmet = [] hmetQCD = [] hmetEWK = [] hjetmet = [] hjetmetphi = [] hMHTJet1phi = [] hmtph = [] hmtphj1= [] hmtphj2= [] hmtremovett = [] DphiEWKAllbveto= [] DphiAllbveto= [] purityMet= [] purityErrMet= [] purityMtRemovett = [] purityErrMtRemovett = [] purityMtFirstDeltaPhiCut = [] purityErrMtFirstDeltaPhiCut = [] purityMtThirdDeltaPhiCut = [] purityErrMtThirdDeltaPhiCut = [] purityMtSecondDeltaPhiCut = [] purityErrMtSecondDeltaPhiCut = [] purityMtDeltaPhiCut = [] purityErrMtDeltaPhiCut = [] purityMtAfterBtagging = [] purityErrMtAfterBtagging = [] purityMTInvertedTauIdBvetoDphi = [] purityErrMTInvertedTauIdBvetoDphi = [] ## histograms in bins, normalisation and substraction of EWK contribution ## mt with 2dim deltaPhi cut for ptbin in ptbins: mt_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedSecondDeltaPhiCut"+ptbin)]) mt_tmp._setLegendStyles() mt_tmp._setLegendLabels() mt_tmp.histoMgr.setHistoDrawStyleAll("P") mt_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mt = mt_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mt.Scale(normData[ptbin]) DphiJet2.append(mt) mtEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedSecondDeltaPhiCut"+ptbin)]) mtEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtEWK_tmp._setLegendStyles() mtEWK_tmp._setLegendLabels() mtEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtEWK = mtEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtEWK.Scale(normEWK[ptbin]) # mt.Add(mtEWK, -1) # hmt.append(mt) DphiEWKJet2.append(mtEWK) purity = -999 error = -999 if mt.Integral() > 0: purity = (mt.Integral() - mtEWK.Integral())/ mt.Integral() error = sqrt(purity*(1-purity)/mt.Integral()) purityMtSecondDeltaPhiCut.append(purity) purityErrMtSecondDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt Second Delta Phi Cut = ",purity, " error ",error ############################################ # mt after b tagging mtb_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdBtag"+ptbin)]) mtb_tmp._setLegendStyles() mtb_tmp._setLegendLabels() mtb_tmp.histoMgr.setHistoDrawStyleAll("P") mtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtb = mtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtb.Scale(normData[ptbin]) hmt.append(mtb) noDphi.append(mtb) mtbEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdBtag"+ptbin)]) mtbEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtbEWK_tmp._setLegendStyles() mtbEWK_tmp._setLegendLabels() mtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtbEWK = mtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtbEWK.Scale(normEWK[ptbin]) mtb.Add(mtbEWK, -1) hmtb.append(mtb) noDphiEWK.append(mtbEWK) purity = -999 error = -999 if mtb.Integral() > 0: purity = (mtb.Integral() - mtbEWK.Integral())/ mtb.Integral() error = sqrt(purity*(1-purity)/mtb.Integral()) purityMtAfterBtagging.append(purity) purityErrMtAfterBtagging.append(error) # print " pt bin ", ptbin, " purity Mt After Btagging = ",purity, " error ",error ############################################ # mt after deltaPhi cut mtph_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdJetDphi"+ptbin)]) mtph_tmp._setLegendStyles() mtph_tmp._setLegendLabels() mtph_tmp.histoMgr.setHistoDrawStyleAll("P") mtph_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtph = mtph_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtph.Scale(normData[ptbin]) # hmt.append(mt) Dphi160.append(mtph) mtphEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdJetDphi"+ptbin)]) mtphEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtphEWK_tmp._setLegendStyles() mtphEWK_tmp._setLegendLabels() mtphEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphEWK = mtphEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtphEWK.Scale(normEWK[ptbin]) # mtph.Add(mtphEWK, -1) # hmtph.append(mtph) DphiEWK160.append(mtphEWK) purity = -999 error = -999 if mtph.Integral() > 0: purity = (mtph.Integral() - mtphEWK.Integral())/ mtph.Integral() error = sqrt(purity*(1-purity)/mtph.Integral()) purityMtDeltaPhiCut.append(purity) purityErrMtDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt DeltaPhi Cut = ",purity, " error ",error ############################################ # mt after deltaphi vs MHTjet1 cut mtphj1_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedFirstDeltaPhiCut"+ptbin)]) #mtphj1_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("MTInvertedTauIdMet"+ptbin)]) mtphj1_tmp._setLegendStyles() mtphj1_tmp._setLegendLabels() mtphj1_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj1_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj1 = mtphj1_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtphj1.Scale(normData[ptbin]) DphiJet1.append(mtphj1) mtphj1EWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedFirstDeltaPhiCut"+ptbin)]) #mtphj1EWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("MTInvertedTauIdMet"+ptbin)]) mtphj1EWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtphj1EWK_tmp._setLegendStyles() mtphj1EWK_tmp._setLegendLabels() mtphj1EWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj1EWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj1EWK = mtphj1EWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtphj1EWK.Scale(normEWK[ptbin]) # mtphj1.Add(mtphj1EWK, -1) # hmtphj1.append(mtphj1) DphiEWKJet1.append(mtphj1EWK) purity = -999 error = -999 if mtphj1.Integral() > 0: purity = (mtphj1.Integral() - mtphj1EWK.Integral())/ mtphj1.Integral() error = sqrt(purity*(1-purity)/mtphj1.Integral()) purityMtFirstDeltaPhiCut.append(purity) purityErrMtFirstDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt First Delta Phi Cut = ",purity, " error ",error ############################################ # mt after all cuts mtphj2_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedThirdDeltaPhiCut"+ptbin)]) mtphj2_tmp._setLegendStyles() mtphj2_tmp._setLegendLabels() mtphj2_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj2 = mtphj2_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtphj2.Scale(normData[ptbin]) # hmt.append(mt) DphiAll.append(mtphj2) mtphj2EWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedThirdDeltaPhiCut"+ptbin)]) mtphj2EWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtphj2EWK_tmp._setLegendStyles() mtphj2EWK_tmp._setLegendLabels() mtphj2EWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2EWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj2EWK = mtphj2EWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtphj2EWK.Scale(normEWK[ptbin]) # mtphj2.Add(mtphj2EWK, -1) # hmtphj2.append(mtphj2) DphiEWKAll.append(mtphj2EWK) purity = -999 error = -999 if mtphj2.Integral() > 0: purity = (mtphj2.Integral() - mtphj2EWK.Integral())/ mtphj2.Integral() error = sqrt(purity*(1-purity)/mtphj2.Integral()) purityMtThirdDeltaPhiCut.append(purity) purityErrMtThirdDeltaPhiCut.append(error) # print " pt bin ", ptbin, " purity Mt Third Delta Phi Cut = ",purity, " error ",error ####################### # mt with cut against tt mtremovett_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedAgainstTTCut"+ptbin)]) mtremovett_tmp._setLegendStyles() mtremovett_tmp._setLegendLabels() mtremovett_tmp.histoMgr.setHistoDrawStyleAll("P") mtremovett_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtremovett = mtremovett_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtremovett.Scale(normData[ptbin]) DphiAllremovett.append(mtremovett) mtremovettEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedAgainstTTCut"+ptbin)]) mtremovettEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtremovettEWK_tmp._setLegendStyles() mtremovettEWK_tmp._setLegendLabels() mtremovettEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtremovettEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtremovettEWK = mtremovettEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtremovettEWK.Scale(normEWK[ptbin]) # mtremovett.Add(mtremovettEWK, -1) # hmtremovett.append(mtremovett) DphiEWKAllremovett.append(mtremovettEWK) purity = -999 error = -999 if mtremovett.Integral() > 0: purity = (mtremovett.Integral() - mtremovettEWK.Integral())/ mtremovett.Integral() error = sqrt(purity*(1-purity)/mtremovett.Integral()) purityMtRemovett.append(purity) purityErrMtRemovett.append(error) print "mtremovett.Integral() ",mtremovett.Integral(), " mmtEWK.Integral() ", mtremovettEWK.Integral() ####################### ### MET mmt_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MET_InvertedTauIdJets"+ptbin)]) mmt_tmp._setLegendStyles() mmt_tmp._setLegendLabels() mmt_tmp.histoMgr.setHistoDrawStyleAll("P") mmt_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmt = mmt_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() ## mmt.Scale(normData[ptbin]) hmet.append(mmt) mmtEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MET_InvertedTauIdJets"+ptbin)]) mmtEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtEWK_tmp._setLegendStyles() mmtEWK_tmp._setLegendLabels() mmtEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtEWK = mmtEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() ## mmtEWK.Scale(normEWK[ptbin]) mmt.Add(mmtEWK, -1) hmetQCD.append(mmt) hmetEWK.append(mmtEWK) purity = -999 error = -999 if mmt.Integral() > 0: purity = (mmt.Integral() - mmtEWK.Integral())/ mmt.Integral() error = sqrt(purity*(1-purity)/mmt.Integral()) purityMet.append(purity) purityErrMet.append(error) ## print "mmt.Integral() ",mmt.Integral(), " mmtEWK.Integral() ", mmtEWK.Integral() ############################################ # mt after all cuts mtphj2_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdBvetoDphi"+ptbin)]) mtphj2_tmp._setLegendStyles() mtphj2_tmp._setLegendLabels() mtphj2_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj2 = mtphj2_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() # mtphj2.Scale(normData[ptbin]) # hmt.append(mt) DphiAllbveto.append(mtphj2) mtphj2EWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdBvetoDphi"+ptbin)]) mtphj2EWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mtphj2EWK_tmp._setLegendStyles() mtphj2EWK_tmp._setLegendLabels() mtphj2EWK_tmp.histoMgr.setHistoDrawStyleAll("P") mtphj2EWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mtphj2EWK = mtphj2EWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() # mtphj2EWK.Scale(normEWK[ptbin]) # mtphj2.Add(mtphj2EWK, -1) # hmtphj2.append(mtphj2) DphiEWKAllbveto.append(mtphj2EWK) purity = -999 error = -999 if mtphj2.Integral() > 0: purity = (mtphj2.Integral() - mtphj2EWK.Integral())/ mtphj2.Integral() error = sqrt(purity*(1-purity)/mtphj2.Integral()) purityMTInvertedTauIdBvetoDphi.append(purity) purityErrMTInvertedTauIdBvetoDphi.append(error) # print " pt bin ", ptbin, " purity Mt Third Delta Phi Cut = ",purity, " error ",error print " " print " purity met = ",purityMet, " error ",purityErrMet print " purity Mt no DeltaPhi Cuts = ",purityMtAfterBtagging, " error ",purityErrMtAfterBtagging print " purity Mt DeltaPhi160 Cut = ",purityMtDeltaPhiCut, " error ",purityErrMtDeltaPhiCut print " purity Mt First DeltaPhi Cut = ",purityMtFirstDeltaPhiCut, " error ",purityErrMtFirstDeltaPhiCut print " purity Mt Second DeltaPhi Cut = ",purityMtSecondDeltaPhiCut, " error ",purityErrMtSecondDeltaPhiCut print " purity Mt Third Delta Phi Cut = ",purityMtThirdDeltaPhiCut, " error ",purityErrMtThirdDeltaPhiCut print " purity Mt b veto deltaPhi Cuts = ",purityMTInvertedTauIdBvetoDphi, " error ",purityErrMTInvertedTauIdBvetoDphi print " purity Mt Remove tt = ",purityMtRemovett, " error ",purityErrMtRemovett invertedQCD = InvertedTauID() invertedQCD.setLumi(datasets.getDataset("Data").getLuminosity()) ### Met met = hmet[0].Clone("met") met.SetName("MET") met.SetTitle("Inverted tau Met") met.Reset() for histo in hmet: met.Add(histo) metQCD = hmetQCD[0].Clone("met") metQCD.SetName("MET") metQCD.SetTitle("Inverted tau Met") metQCD.Reset() for histo in hmetQCD: metQCD.Add(histo) metEWK = hmetEWK[0].Clone("metewk") metEWK.SetName("METewk") metEWK.SetTitle("Inverted tau Met") metEWK.Reset() for histo in hmetEWK: metEWK.Add(histo) ### Mt no DeltaPhi Cuts mtNoDphi = noDphi[0].Clone("mt") mtNoDphi.SetName("mt") mtNoDphi.SetTitle("Inverted tau Mt") mtNoDphi.Reset() for histo in noDphi: mtNoDphi.Add(histo) mtNoDphiEWK = noDphiEWK[0].Clone("mtewk") mtNoDphiEWK.SetName("MTewk") mtNoDphiEWK.SetTitle("Inverted tau Met") mtNoDphiEWK.Reset() for histo in noDphiEWK: mtNoDphiEWK.Add(histo) ### Mt DeltaPhi < 160 Cut mtDphi160 = Dphi160[0].Clone("mt") mtDphi160.SetName("mt") mtDphi160.SetTitle("Inverted tau Mt") mtDphi160.Reset() for histo in Dphi160: mtDphi160.Add(histo) mtDphi160EWK = DphiEWK160[0].Clone("mtewk") mtDphi160EWK.SetName("MTewk") mtDphi160EWK.SetTitle("Inverted tau Met") mtDphi160EWK.Reset() for histo in DphiEWK160: mtDphi160EWK.Add(histo) ### Mt Mt all dphi cuts mtDphiAll = DphiAll[0].Clone("mt") mtDphiAll.SetName("mt") mtDphiAll.SetTitle("Inverted tau Mt") mtDphiAll.Reset() for histo in DphiAll: mtDphiAll.Add(histo) mtDphiAllEWK = DphiEWKAll[0].Clone("mtewk") mtDphiAllEWK.SetName("MTewk") mtDphiAllEWK.SetTitle("Inverted tau Met") mtDphiAllEWK.Reset() for histo in DphiEWKAll: mtDphiAllEWK.Add(histo) ### Mt bveto all dphi cuts mtDphiAllbveto = DphiAllbveto[0].Clone("mt") mtDphiAllbveto.SetName("mt") mtDphiAllbveto.SetTitle("Inverted tau Mt") mtDphiAllbveto.Reset() for histo in DphiAllbveto: mtDphiAllbveto.Add(histo) mtDphiAllEWKbveto = DphiEWKAllbveto[0].Clone("mtewk") mtDphiAllEWKbveto.SetName("MTewk") mtDphiAllEWKbveto.SetTitle("Inverted tau Met") mtDphiAllEWKbveto.Reset() for histo in DphiEWKAllbveto: mtDphiAllEWKbveto.Add(histo) ### Mt Mt dphi jet1 mtDphiJet1 = DphiJet1[0].Clone("mt") mtDphiJet1.SetName("mt") mtDphiJet1.SetTitle("Inverted tau Mt") mtDphiJet1.Reset() for histo in DphiJet1: mtDphiJet1.Add(histo) mtDphiEWKJet1 = DphiEWKJet1[0].Clone("mtewk") mtDphiEWKJet1.SetName("MTewk") mtDphiEWKJet1.SetTitle("Inverted tau Met") mtDphiEWKJet1.Reset() for histo in DphiEWKJet1: mtDphiEWKJet1.Add(histo) ### Mt Mt dphi jet2 mtDphiJet2 = DphiJet2[0].Clone("mt") mtDphiJet2.SetName("mt") mtDphiJet2.SetTitle("Inverted tau Mt") mtDphiJet2.Reset() for histo in DphiJet2: mtDphiJet2.Add(histo) mtDphiEWKJet2 = DphiEWKJet2[0].Clone("mtewk") mtDphiEWKJet2.SetName("MTewk") mtDphiEWKJet2.SetTitle("Inverted tau Met") mtDphiEWKJet2.Reset() for histo in DphiEWKJet2: mtDphiEWKJet2.Add(histo) ### Mt all dphi + tt cuts mtDphiAllremovett = DphiAllremovett[0].Clone("mt") mtDphiAllremovett.SetName("mt") mtDphiAllremovett.SetTitle("Inverted tau Mt") mtDphiAllremovett.Reset() for histo in DphiAllremovett: mtDphiAllremovett.Add(histo) mtDphiAllremovettEWK = DphiEWKAllremovett[0].Clone("mtewk") mtDphiAllremovettEWK.SetName("MTewk") mtDphiAllremovettEWK.SetTitle("Inverted tau Met") mtDphiAllremovettEWK.Reset() for histo in DphiEWKAllremovett: mtDphiAllremovettEWK.Add(histo) ########################################## # met purity metqcd = metQCD.Clone("metqcd") metinv = met.Clone("met") invertedQCD.setLabel("MetPurity") # invertedQCD.mtComparison(metqcd, metqcd,"MetPurity") ########################################## # mt purity no deltaPhi mtQCD = mtNoDphi.Clone("QCD") mtQCD.Add(mtNoDphiEWK,-1) mtQCD.Divide(mtNoDphi) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtNoDeltaPhiCuts") invertedQCD.mtComparison(mtQCD, mtQCD,"MtNoDeltaPhiCuts") ########################################## # mt purity deltaPhi 160 mtQCD = mtDphi160.Clone("QCD") mtQCD.Add(mtDphi160EWK,-1) mtQCD.Divide(mtDphi160) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhi160") invertedQCD.mtComparison(mtQCD, mtQCD,"MtDeltaPhi160") ########################################## # mt purity all deltaPhi cuts mtQCD = mtDphiAll.Clone("QCD") mtQCD.Add(mtDphiAllEWK,-1) mtQCD.Divide(mtDphiAll) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtAllDeltaPhiCuts") invertedQCD.mtComparison(mtQCD, mtQCD,"MtAllDeltaPhiCuts") ########################################## # mt bveto purity all deltaPhi cuts ## test invertedQCD.setLabel("testMtbveto") # invertedQCD.mtComparison(mtDphiAllbveto, mtDphiAllbveto,"testMtbveto") invertedQCD.setLabel("testEWKMtbveto") # invertedQCD.mtComparison(mtDphiAllEWKbveto, mtDphiAllEWKbveto,"testEWKMtbveto") mtQCD = mtDphiAllbveto.Clone("QCD") mtQCD.Add(mtDphiAllEWKbveto,-1) mtQCD.Divide(mtDphiAllbveto) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtbvetoAllDeltaPhiCuts") invertedQCD.mtComparison(mtQCD, mtQCD,"MtbvetoAllDeltaPhiCuts") ########################################## # mt purity jet1 deltaPhi cuts mtQCD = mtDphiJet1.Clone("QCD") mtQCD.Add(mtDphiEWKJet1,-1) mtQCD.Divide(mtDphiJet1) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhiJet1Cuts") invertedQCD.mtComparison(mtQCD, mtQCD,"MtDeltaPhiJet1Cuts") ########################################## # mt purity jet2 deltaPhi cuts mtQCD = mtDphiJet2.Clone("QCD") mtQCD.Add(mtDphiEWKJet2,-1) mtQCD.Divide(mtDphiJet2) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhiJet2Cuts") invertedQCD.mtComparison(mtQCD, mtQCD,"MtDeltaPhiJet2Cuts") ########################################## # mt purity all deltaPhi cuts and against tt cut mtQCD = mtDphiAllremovett.Clone("QCD") mtQCD.Add(mtDphiAllremovettEWK,-1) mtQCD.Divide(mtDphiAllremovett) mtqcd = mtQCD.Clone("mtqcd") invertedQCD.setLabel("MtDeltaPhiAndAgainsttt") invertedQCD.mtComparison(mtQCD, mtQCD,"MtDeltaPhiAndAgainsttt") ################################################# ## purities as a function of pt tau jet ### Create and customise TGraph cEff = TCanvas ("MetPurity", "MetPurity", 1) cEff.cd() ptbin_error = array.array("d",[5, 5, 5, 5, 10, 10 ,30]) ptbin = array.array("d",[45, 55, 65, 75, 90, 110 ,150]) graph = TGraphErrors(7, ptbin, array.array("d",purityMTInvertedTauIdBvetoDphi),ptbin_error,array.array("d",purityErrMTInvertedTauIdBvetoDphi)) graph.SetMaximum(1.0) graph.SetMinimum(0.6) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV/c]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.35,0.35,"B-tagging factorisation") tex1.SetNDC() tex1.SetTextSize(25) tex1.Draw() tex2 = ROOT.TLatex(0.35,0.27,"#Delta#phi cuts" ) tex2.SetNDC() tex2.SetTextSize(25) tex2.Draw() cEff.Update() cEff.SaveAs("purityMTInvertedTauIdBvetoDphiBins.png") graph = TGraphErrors(7, ptbin, array.array("d",purityMet),ptbin_error,array.array("d",purityErrMet)) graph.SetMaximum(1.1) graph.SetMinimum(0.8) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV/c]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.35,0.35,"Inverted #tau jet isolation") tex1.SetNDC() tex1.SetTextSize(20) tex1.Draw() tex2 = ROOT.TLatex(0.35,0.3,"at least 3 jets" ) tex2.SetNDC() tex2.SetTextSize(25) tex2.Draw() cEff.Update() cEff.SaveAs("purityMetPtBins.png") ## Mt without deltaPhi cuts cEff = TCanvas ("MtNoDeltaPhiCutsPurity", "MtNoDeltaPhiCutsPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",purityMtAfterBtagging),ptbin_error,array.array("d",purityErrMtAfterBtagging)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.5,0.38,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.5,0.3,"no #Delta#phi cuts" ) tex2.SetNDC() tex2.SetTextSize(25) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtNoDeltaPhiCutsBins.png") ## Mt without deltaPhi cuts cEff = TCanvas ("MtDeltaPhi160Purity", "MtDeltaPhi160Purity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",purityMtDeltaPhiCut),ptbin_error,array.array("d",purityErrMtDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.5,0.35,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(24) tex1.Draw() tex2 = ROOT.TLatex(0.5,0.25,"#Delta#phi(#tau jet,MET) < 160^{o}" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtDeltaPhi160Bins.png") ## Mt 1st deltaPhi cut cEff = TCanvas ("MtFirstDeltaCutPurity", "MtFirstDeltaPhiCutPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",purityMtFirstDeltaPhiCut),ptbin_error,array.array("d",purityErrMtFirstDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.4,0.4,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(24) tex1.Draw() tex2 = ROOT.TLatex(0.3,0.3,"#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1,MET) cut" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtFirstDeltaPhiCutBins.png") ## Mt 2nd deltaPhi cut cEff = TCanvas ("MtSecondDeltaCutPurity", "MtFirstSecondPhiCutPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",purityMtSecondDeltaPhiCut),ptbin_error,array.array("d",purityErrMtSecondDeltaPhiCut)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2,0.88,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.2,0.8,"#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1/2,MET) cuts" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtSecondDeltaPhiCutBins.png") ## Mt 2nd deltaPhi cut cEff = TCanvas ("MtThirdDeltaCutPurity", "MtThirdDeltaPhiCutPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",purityMtThirdDeltaPhiCut),ptbin_error,array.array("d",purityErrMtThirdDeltaPhiCut )) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2,0.88, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.2,0.8,"#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1/2/3,MET) cuts" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("purityMtThirdDeltaPhiCutBins.png") ## Mt deltaPhi cuts and against tt cEff = TCanvas ("MtMtRemovettPurity", "MtMtRemovettPurity", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",purityMtRemovett),ptbin_error,array.array("d",purityErrMtRemovett)) graph.SetMaximum(1.0) graph.SetMinimum(0.4) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("QCD purity") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 12.2 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2,0.88, "All selection cuts ") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.2,0.8,"#Delta#phi(#tau jet,MET) vs #Delta#phi(jet1/2/3,MET) cuts" ) tex2.SetNDC() tex2.SetTextSize(22) tex2.Draw() tex3 = ROOT.TLatex(0.2,0.72,"#Delta#phi cut against tt+jets" ) tex3.SetNDC() tex3.SetTextSize(22) tex3.Draw() cEff.Update() cEff.SaveAs("purityMtAgainstttCutBins.png")
def biasScan(): biasScanResultsDir = os.path.join(biasScanDir, args.outDirName) combineoutputPathDir = os.path.join(combineoutputDir, args.outDirName) workspacesDir = os.path.join(datacardsworkspacesDir, args.workspacesDirName) mkdir(biasScanResultsDir) hMeans = {} hMedians = {} for category in categoriesToUse: if names2RepsToUse[category] in args.categoriesToSkip: continue # extract the multi pdf workspaceFileName = "workspace__{category}__{signalModel}.root".format( category=names2RepsToUse[category], signalModel=args.signalModel) refWorkspaceFile = R.TFile(os.path.join(workspacesDir, workspaceFileName)) higgsWorkspace = refWorkspaceFile.Get("higgs") multipdf = higgsWorkspace.pdf("multipdf_{category}".format( category=names2RepsToUse[category])) for massPoint in args.massPoints: hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)] = R.TH2D("Means_{category}_{mass}".format(category=category, mass=massPoint), "Means", multipdf.getNumPdfs()+1, 0, multipdf.getNumPdfs()+1, multipdf.getNumPdfs()+1, 0, multipdf.getNumPdfs()+1) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)] = R.TH2D("Madians_{category}_{mass}".format(category=category, mass=massPoint), "Medians", multipdf.getNumPdfs()+1, 0, multipdf.getNumPdfs()+1, multipdf.getNumPdfs()+1, 0, multipdf.getNumPdfs()+1) for iref in range(multipdf.getNumPdfs()): refPdfName = multipdf.getPdf(iref).GetName() q = False for xxx in args.modelsToSkip: if xxx==refPdfName.split("_")[0]: q = True if q==True: continue for icurrent in range(multipdf.getNumPdfs()): fitPdfName = multipdf.getPdf(icurrent).GetName() q = False for xxx in args.modelsToSkip: if xxx==fitPdfName.split("_")[0]: q = True if q==True: continue canvas = R.TCanvas("c1", "c1", 1000, 600) try: fileName = "mlfit{category}__{mass}__{iref}__{icurrent}__{signalModel}.root".format(category=names2RepsToUse[category], mass=massPoint, iref=iref, icurrent=icurrent, signalModel=args.signalModel) f = R.TFile(os.path.join(combineoutputPathDir, fileName)) tree = f.Get("tree_fit_sb") tree.Draw("(mu-1)/muErr>>h(500, -5,5)") # get the histogram and perform some manipulations hist = R.gFile.Get("h") import array probs = array.array("d", [0.5]) quantiles = array.array("d", [0]) hist.GetQuantiles(1, quantiles, probs) latex = R.TLatex() latex.SetNDC() latex.SetTextSize(0.02) latex.SetTextAlign(13) # align at top latex.SetTextSize(0.03) latex.DrawLatex(0.2, 0.8, "Median = " + str(quantiles[0])) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].Fill(iref, icurrent, hist.GetMean()*100) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetXaxis().SetBinLabel(iref+1, refPdfName) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetYaxis().SetBinLabel(icurrent+1, fitPdfName) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].Fill(iref, icurrent, quantiles[0]*100) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetXaxis().SetBinLabel(iref+1, refPdfName) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetYaxis().SetBinLabel(icurrent+1, fitPdfName) cfileName = "pull__{category}__{mass}__{iref}__{icurrent}__{signalModel}.png".format(category=names2RepsToUse[category], mass=massPoint, iref=iref, icurrent=icurrent, signalModel=args.signalModel) canvas.SaveAs(os.path.join(biasScanResultsDir, cfileName)) except Exception as exc: print "There was a problem with file: {file}\n".format(file=fileName) print type(exc) print exc.args print exc finally: f.Close() # plot the 2D hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].SetTitle("Mean (#mu_{fit} - #mu_{0})/#sigma #mu_{fit} (%%), %s, %d GeV" % (category, massPoint)) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].SetStats(0) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetYaxis().SetTitle("Fit Model") hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetXaxis().SetTitle("Reference Model") # hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetXaxis().SetRangeUser(0,5) # hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetYaxis().SetRangeUser(0,5) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].GetZaxis().SetRangeUser(-100,+100) hMeans["Mean_{category}_{mass}".format(category=category, mass=massPoint)].Draw("COLZTEXT") canvas.SaveAs(os.path.join(biasScanResultsDir, "pullMeans2D__{category}__{mass}__{signalModel}.png".format( category=names2RepsToUse[category], mass=massPoint, signalModel=args.signalModel))) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].SetTitle("Median (#mu_{fit} - #mu_{0})/#sigma #mu_{fit} (%%), %s, %d GeV" % (category, massPoint)) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].SetStats(0) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetYaxis().SetTitle("Fit Model") hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetXaxis().SetTitle("Reference Model") # hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetXaxis().SetRangeUser(0,5) # hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetYaxis().SetRangeUser(0,5) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].GetZaxis().SetRangeUser(-100,+100) hMedians["Median_{category}_{mass}".format(category=category, mass=massPoint)].Draw("COLZTEXT") canvas.SaveAs(os.path.join(biasScanResultsDir, "pullMedians2D__{category}__{mass}__{signalModel}.png".format( category=names2RepsToUse[category], mass=massPoint, signalModel=args.signalModel)))
def EmptyBitmap(width, height, value=255): """an empty BitMap of a specifc width and height b = EmptyBitmap(width, heigh, value=255) """ data = array.array('B', [value]*3*width*height) return wx.BitmapFromBuffer(width, height, data)
outputfile = ROOT.TFile(outfilename, "RECREATE") path = "/nfs/dust/cms/user/kschweig/JetRegression/trees0908/BDTTraining/ttHbb/*_1_*nominal*.root" inputtree = ROOT.TChain("MVATree") # for f in glob(path): for f in sys.argv[2:]: inputtree.Add(f) Regvars_input = {} for variable in inputvariables: Regvars_input.update({variable: array.array("f", 20 * [0])}) inputtree.SetBranchAddress(variable, Regvars_input[variable]) isHiggsJet = array.array("f", 20 * [0]) isWJet = array.array("f", 20 * [0]) ishadTopJet = array.array("f", 20 * [0]) inputtree.SetBranchAddress("RegJet_isHiggsJet", isHiggsJet) inputtree.SetBranchAddress("RegJet_isWJet", isWJet) inputtree.SetBranchAddress("RegJet_ishadTopJet", ishadTopJet) outputfile.cd() OutputTree = ROOT.TTree("MVATree", "MVATree")
def decode_shikata_ga_nai(d, all_instr_list): ## verify some bytes first import array sd = array.array('B', d) szd = None #look for floating point instr, fnstenv, and mov in first few instr if len(all_instr_list) < 10: return None fFoundFnstenv = False fFoundFloatingPtInstr = False fFoundMov = False fFoundCounter = False fFoundXor = False iLen = 0 key = 0 szMsg = 'No decoder found' iXorOffset = 0 iXorAdjust = 0 iFPOpOffset = 0 for i in range(0, 10): instr_lst = all_instr_list[i] szIns = instr_lst[2] offset = instr_lst[3] # fnstenv [esp - 12] if szIns.startswith('fnstenv'): fFoundFnstenv = True #fxch st0,st6 if not fFoundFloatingPtInstr and not szIns.startswith( 'fnstenv') and szIns.startswith('f'): fFoundFloatingPtInstr = True iFPOpOffset = offset #xor dword [edx + 24],eax if szIns.startswith('sub ') and szIns.endswith('0xfffffffc'): iXorAdjust = -4 if szIns.startswith('xor dword ['): fFoundXor = True iXorOffset = int( (szIns.split('+')[1]).split(']')[0]) ##+ iXorAdjust #find key operation. e.g. add esi,dword [eax + 14] for j in range(1, 3): keyop_instr_lst = all_instr_list[i + j] szKeyOpIns = keyop_instr_lst[2] if szKeyOpIns.startswith('add e'): szKeyOp = szKeyOpIns.split(' ')[0] istart = keyop_instr_lst[3] break # mov eax,0x4193fabc if szIns.startswith('mov ') and szIns.find('0x') > 0 and not fFoundMov: fFoundMov = True k1 = sd[offset + 0x1] k2 = sd[offset + 0x2] k3 = sd[offset + 0x3] k4 = sd[offset + 0x4] key = k1 | (k2 << 8) | (k3 << 16) | (k4 << 24) # mov cl,110 if szIns.startswith('mov ') and szIns.find('cl,') > 0: fFoundCounter = True iLen = int(szIns.split(',')[1]) if (fFoundMov and fFoundFloatingPtInstr and fFoundFnstenv and fFoundCounter and iLen > 0): next_key_operation = d[istart:istart + 3] szd = [] for i in range(0, iXorOffset + iFPOpOffset): szd.append(chr(sd[i])) for i in range(iXorOffset + iFPOpOffset, len(sd) - (iXorOffset + iFPOpOffset), 4): szd.append(chr(k1 ^ sd[i])) szd.append(chr(k2 ^ sd[i + 1])) szd.append(chr(k3 ^ sd[i + 2])) szd.append(chr(k4 ^ sd[i + 3])) data = k1 ^ sd[i] | ((k2 ^ sd[i + 1]) << 8) | ( (k3 ^ sd[i + 2]) << 16) | ((k4 ^ sd[i + 3]) << 24) #update the key based on the shikata rules if szKeyOp == "add": key = (key + data) & 0x00000000FFFFFFFF else: key = (key + data) & 0x00000000FFFFFFFF pass # error case k1 = 0x000000FF & key k2 = (0x0000FF00 & key) >> 8 k3 = (0x00FF0000 & key) >> 16 k4 = (0xFF000000 & key) >> 24 szd = ''.join(szd) op = dis.disasm(szd, istart, istart) szIns = repr(op).lower() szKeyOp = szIns.split(' ')[0] # szOffsetDirection = szIns.split(' ')[3] # cOffset = int((szIns.split(' ')[4]).split(']')[0]) szMsg = "Found shikata_ga_nai shellcode len = %d, key = 0x%x, decode offset= %d, fpop offset = %d, keyop= %s, istart=0x%x, '%s'" % ( iLen, key, iXorOffset, iFPOpOffset, szKeyOp, istart, szIns) else: pass return [szd, iLen, key, iXorOffset, szMsg]
def read_subimage(self, rows, cols, bands=None, use_memmap=False): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default False): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' import array if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap.take(rows, 0).take(cols, 1)) else: data = np.array( self._memmap.take(rows, 0).take(cols, 1).take(bands, 2)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = len(rows) # Rows in sub-image nSubCols = len(cols) # Cols in sub-image d_band = self.sample_size d_col = d_band * self.nbands d_row = d_col * self.ncols vals = array.array(byte_typecode) nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) else: allBands = 1 bands = list(range(self.nbands)) nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in rows: for j in cols: if allBands: f.seek(offset + i * d_row + j * d_col, 0) vals.fromfile(f, nSubBands * sample_size) else: for k in bands: f.seek(offset + i * d_row + j * d_col + k * d_band, 0) vals.fromfile(f, sample_size) arr = np.fromstring(vals.tostring(), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr
def read_subregion(self, row_bounds, col_bounds, bands=None, use_memmap=True): ''' Reads a contiguous rectangular sub-region from the image. Arguments: `row_bounds` (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. `col_bounds` (2-tuple of ints): (a, b) -> Columnss a through b-1 will be read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array. ''' import array if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1], :]) else: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1], bands]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = row_bounds[1] - row_bounds[0] # Rows in sub-image nSubCols = col_bounds[1] - col_bounds[0] # Cols in sub-image d_row = self.sample_size * self.ncols * self.nbands colStartPos = col_bounds[0] * self.sample_size * self.nbands vals = array.array(byte_typecode) nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) delta_b = bands[:] for i in range(len(delta_b)): delta_b[i] *= self.sample_size else: allBands = 1 nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in range(row_bounds[0], row_bounds[1]): f.seek(offset + i * d_row + colStartPos, 0) rowPos = f.tell() if allBands: # This is the simple one vals.fromfile(f, nSubCols * nbands * sample_size) else: # Need to pull out specific bands for each column. for j in range(nSubCols): f.seek(rowPos + j * self.sample_size * self.nbands, 0) pixelPos = f.tell() for k in range(len(bands)): f.seek(pixelPos + delta_b[k], 0) # Next band vals.fromfile(f, sample_size) arr = np.fromstring(vals.tostring(), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr
from array import array from random import random floats = array('d', (random() for i in range(10**5))) floats[-1] fp = open('floats.bin', 'wb') floats.tofile(fp) fp.close() floats2 = array('d') fp = open('floats.bin', 'rb') floats2.fromfile(fp, 10**5) fp.close() floats2[-1] # in-place sort a = array.array(a.typecode, sorted(a)) from array import array number = array('h', [-2, -1, 0, 1, 2]) memv = memoryview(number) len(memv) memv[0] memv_oct = memv.cast('B') memv_oct.tolist() memv_oct[5] = 4 number import numpy a = numpy.arange(12) a type(a)
from ROOT import TMVA, TFile, TString, TTree, TLorentzVector from array import array from subprocess import call from os.path import isfile from ROOT import std # Setup TMVA TMVA.Tools.Instance() TMVA.PyMethodBase.PyInitialize() reader = TMVA.Reader("Color:!Silent") varList = ["mu_likep", "mu_likem", "dist", "DeltPhi", "minv"] import array dist = array.array('f', [0]) DeltPhi = array.array('f', [0]) mu_likep = array.array('f', [0]) mu_likem = array.array('f', [0]) minv = array.array('f', [0]) reader.AddVariable("dist", dist) reader.AddVariable("DeltPhi", DeltPhi) reader.AddVariable("mu_likep", mu_likep) reader.AddVariable("mu_likem", mu_likem) reader.AddSpectator("minv", minv) fout = ROOT.TFile("TMVAappKeras.root", "RECREATE") t = TTree('MVout', 'MVA outputs') methodList = {'BDT', 'PyKeras'}
"nEventsMin=nTest", "MaxDepth=3", "BoostType=AdaBoost", "AdaBoostBeta=0.5", "SeparationType=GiniIndex", "nCuts=10", "PruneMethod=NoPruning", ])) factory.TrainAllMethods() factory.TestAllMethods() factory.EvaluateAllMethods() reader = ROOT.TMVA.Reader() import array varx1 = array.array('f', [0]) reader.AddVariable("itr", varx1) varx2 = array.array('f', [0]) reader.AddVariable("beta14", varx2) varx3 = array.array('f', [0]) reader.AddVariable("thetaij", varx3) #varx4 = array.array('f',[0]) ; reader.AddVariable("nhits",varx4) varx4 = array.array('f', [0]) reader.AddVariable("udotR", varx4) #varx6 = array.array('f',[0]) ; reader.AddVariable("scaleLogL",varx6) reader.BookMVA("BDT", "weights/TMVAClassification_BDT.weights.xml") # create a new 2D histogram with fine binning histo2 = ROOT.TH2F("histo2", "", 200, -5, 5, 200, -5, 5) # loop over the bins of a 2D histogram
def doFit( self,graph,config): nPars1 = 6 nPars2 = 5 nCommonPars = 3 #(Sigma, sigma_{1}/sigma_{2}, Frac) nPars = nPars1 + nPars2 - nCommonPars #initial values on parameters ExpSigma = [] ExpPeak = [] StartSigma = [] StartRatio = [] StartFrac = [] StartPeak = [] StartConst = [] #placeholder #limits on parameters LimitSigma_lower = [] LimitSigma_upper = [] LimitRatio_lower = [] LimitRatio_upper = [] LimitFrac_lower = [] LimitFrac_upper = [] LimitPeak_lower = [] LimitPeak_upper = [] LimitConst_lower = [] #placeholder LimitConst_upper = [] #placeholder for i in range(0,2): ExpSigma.append( graph[i].GetRMS()*0.5 ) print i, graph[i].GetHistogram().GetRMS()*0.5 ExpPeak.append( graph[i].GetHistogram().GetMaximum() ) #Currenly the rest of parameters initial values and ranges are treated the same StartSigma.append( ExpSigma[i] * config['StartSigma'] ) LimitSigma_lower.append( config['LimitsSigma'][0] ) LimitSigma_upper.append( config['LimitsSigma'][1] ) StartRatio.append( config['StartRatio'] ) LimitRatio_lower.append( config['LimitsRatio'][0] ) LimitRatio_upper.append( config['LimitsRatio'][1] ) StartFrac.append( config['StartFrac'] ) LimitFrac_lower.append( config['LimitsFrac'][0] ) LimitFrac_upper.append( config['LimitsFrac'][1] ) StartPeak.append( ExpPeak[i]*config['StartPeak'] ) LimitPeak_lower.append( ExpPeak[i]*config['LimitsPeak'][0] ) LimitPeak_upper.append( ExpPeak[i]*config['LimitsPeak'][1] ) StartConst.append( config['StartConst'] )#placeholder LimitConst_lower.append( config['LimitsConst'][0] )#placeholder LimitConst_upper.append( config['LimitsConst'][1] )#placeholder fittedFunctions = [] ff = r.TF1("ff","[5] + [2]*([3]*exp(-(x-[4])**2/(2*([0]*[1]/([3]*[1]+1-[3]))**2)) + (1-[3])*exp(-(x-[4])**2/(2*([0]/([3]*[1]+1-[3]))**2)) )") ff.SetParNames("#Sigma","#sigma_{1}/#sigma_{2}","Amp","Frac","Mean", "Const") fittedFunctions.append(ff) ff1 = r.TF1("ff1","[2]*([3]*exp(-(x-[4])**2/(2*([0]*[1]/([3]*[1]+1-[3]))**2)) + (1-[3])*exp(-(x-[4])**2/(2*([0]/([3]*[1]+1-[3]))**2)) )") ff1.SetParNames("#Sigma","#sigma_{1}/#sigma_{2}","Amp","Frac","Mean") fittedFunctions.append(ff1) # Some black ROOT magic to get Minuit output into a log file # see http://root.cern.ch/phpBB3/viewtopic.php?f=14&t=14473, http://root.cern.ch/phpBB3/viewtopic.php?f=13&t=16844, https://agenda.infn.it/getFile.py/access?resId=1&materialId=slides&confId=4933 slide 23 r.gROOT.ProcessLine("gSystem->RedirectOutput(\"./minuitlogtmp/Minuit.log\", \"a\");") r.gROOT.ProcessLine("gSystem->Info(0,\"Next BCID\");") fitter = r.Fit.Fitter() opt = r.Fit.DataOptions() wff = r.Math.WrappedMultiTF1(fittedFunctions[0],1) wff1 = r.Math.WrappedMultiTF1(fittedFunctions[1],1) rangeff = r.Fit.DataRange() rangeff.SetRange(graph[0].GetXaxis().GetXmin(), graph[0].GetXaxis().GetXmax()) dataff = r.Fit.BinData(opt,rangeff) r.Fit.FillData(dataff, graph[0]) rangeff1 = r.Fit.DataRange() rangeff1.SetRange(graph[1].GetXaxis().GetXmin(), graph[1].GetXaxis().GetXmax()) dataff1 = r.Fit.BinData(opt,rangeff1) r.Fit.FillData(dataff1, graph[1]) chi2ff = r.Fit.Chi2Function(dataff, wff) chi2ff1 = r.Fit.Chi2Function(dataff1, wff1) # Size of parffAndff1 array should equal to nPars, with elements the staring value of parameters and in this case as # [0]: #StartSigma[0], [1]: #sigma_{1}/#sigma_{2} =1, 2:StartPeak[0], 3: StartFrac[0], 4: Mean, 5: StartConst[0], 6: StartPeak[1], 7: Mean parffAndff1 = array.array('d', [StartSigma[0],1.,StartPeak[0],StartFrac[0],0., StartConst[0], StartPeak[1], 0.]) fitter.Config().SetParamsSettings(nPars,parffAndff1) if LimitSigma_upper[0] > LimitSigma_lower[0]: fitter.Config().ParSettings(0).SetLimits(LimitSigma_lower[0],LimitSigma_upper[0] ) if LimitRatio_upper[0] > LimitRatio_lower[0]: fitter.Config().ParSettings(1).SetLimits(LimitRatio_lower[0],LimitRatio_upper[0] ) if LimitPeak_upper[0] > LimitPeak_lower[0]: fitter.Config().ParSettings(2).SetLimits(LimitPeak_lower[0],LimitPeak_upper[0] ) if LimitFrac_upper[0] > LimitFrac_lower[0]: fitter.Config().ParSettings(3).SetLimits(LimitFrac_lower[0],LimitFrac_upper[0] ) if LimitConst_upper[0] > LimitConst_lower[0]: #placeholder fitter.Config().ParSettings(5).SetLimits(LimitConst_lower[0],LimitConst_upper[0]) #placeholder if LimitPeak_upper[1] > LimitPeak_lower[1]: fitter.Config().ParSettings(6).SetLimits(LimitPeak_lower[1],LimitPeak_upper[1] ) fitter.Config().MinimizerOptions().SetPrintLevel(2) fitter.Config().SetMinimizer("Minuit2","Migrad") myfun = r.GlobalChi2(nPars,dataff.Size()+dataff1.Size(),chi2ff, chi2ff1) fitter.FitFCN(myfun,parffAndff1,dataff.Size()+dataff1.Size(), True) result = fitter.Result() fitStatus = -999 # becomes dummy variabe in the Sim Fit # 0: #Sigma, 1: #sigma_{1}/#sigma_{2}, 2: Amp, 3: Frac, 4: Mean, 5: Const parff_draw = array.array('i', [0,1,2,3,4,5]) fittedFunctions[0].SetFitResult( result, parff_draw); # 0: #Sigma (common parameter), 1: #sigma_{1}/#sigma_{2} (common parameter), 6: Amp, 3: Frac (common parameter), 7: Mean parff1_draw = array.array('i', [0,1,6,3,7]) fittedFunctions[1].SetFitResult( result, parff1_draw); r.gROOT.ProcessLine("gSystem->RedirectOutput(0);") fComponents = [] for i in range(0,2): sigma = fittedFunctions[i].GetParameter("#Sigma") m = fittedFunctions[i].GetParNumber("#Sigma") sigmaErr = fittedFunctions[i].GetParError(m) sigRatio = fittedFunctions[i].GetParameter("#sigma_{1}/#sigma_{2}") m = fittedFunctions[i].GetParNumber("#sigma_{1}/#sigma_{2}") sigRatioErr = fittedFunctions[i].GetParError(m) amp = fittedFunctions[i].GetParameter("Amp") m = fittedFunctions[i].GetParNumber("Amp") ampErr = fittedFunctions[i].GetParError(m) frac = fittedFunctions[i].GetParameter("Frac") m = fittedFunctions[i].GetParNumber("Frac") fracErr = fittedFunctions[i].GetParError(m) mean = fittedFunctions[i].GetParameter("Mean") m = fittedFunctions[i].GetParNumber("Mean") meanErr = fittedFunctions[i].GetParError(m) if (i==0): const = fittedFunctions[i].GetParameter("Const") #placeholder m = fittedFunctions[i].GetParNumber("Const") #placeholder constErr = fittedFunctions[i].GetParError(m) #placeholder else: const = -999. constErr = -999. title = graph[i].GetTitle() title_comps = title.split('_') scan = title_comps[0] type = title_comps[1] bcid = title_comps[2] chi2 = fittedFunctions[i].GetChisquare() ndof = fittedFunctions[i].GetNDF() sqrttwopi = math.sqrt(2*math.pi) CapSigma = sigma CapSigmaErr = sigmaErr peak = amp peakErr = ampErr area = sqrttwopi*peak*CapSigma areaErr = (sqrttwopi*CapSigma*peakErr)*(sqrttwopi*CapSigma*peakErr) + (sqrttwopi*peak*CapSigmaErr)*(sqrttwopi*peak*CapSigmaErr) areaErr = math.sqrt(areaErr) if i==0: self.table_Luminometer1.append([scan, type, bcid, sigma, sigmaErr, sigRatio, sigRatioErr, amp, ampErr, frac, fracErr, mean, meanErr, CapSigma, CapSigmaErr, peak, peakErr, area, areaErr, fitStatus, chi2, ndof]) else: self.table_Luminometer2.append([scan, type, bcid, sigma, sigmaErr, sigRatio, sigRatioErr, amp, ampErr, frac, fracErr, mean, meanErr, CapSigma, CapSigmaErr, peak, peakErr, area, areaErr, fitStatus, chi2, ndof]) # Define signal and background pieces of full function separately, for plotting h = frac s2 = CapSigma/(h*sigRatio+1-h) a1 = amp*h a2 = amp*(1-h) s1 = CapSigma*sigRatio/(h*sigRatio+1-h) fSignal1 = r.TF1("fSignal1","[2]*exp(-(x-[1])**2/(2*[0]**2))") fSignal1.SetParNames("#Sigma","Mean","Amp") fSignal1.SetParameters(s1, mean, a1) fSignal2 = r.TF1("fSignal2","[2]*exp(-(x-[1])**2/(2*[0]**2))") fSignal2.SetParNames("#Sigma","Mean","Amp") fSignal2.SetParameters(s2, mean, a2) # Set background to zero for plotting fBckgrd =r.TF1("fBckgrd","[0]") fBckgrd.SetParNames("Const") fBckgrd.SetParameter(0, const) fComponents.append(fSignal1) fComponents.append(fSignal2) fComponents.append(fBckgrd) functions = [fittedFunctions[0], fComponents[0], fComponents[1], fComponents[2], fittedFunctions[1], fComponents[3], fComponents[4], fComponents[5]] return [functions, result]
内存视图其实是泛化和去数学化的NumPy数组.它让你在不需要复制内容的前提下,在数据结构之间共享内存.其中 数据结构可以是任何形式,比如PIL图片,SQLite数据库和NumPy的数组,等等. 这个功能在处理大型数据集合的时候非常重要. memoryview.cast的概念跟数组模块类似,能用不同的方式读写同一块内存数据,而且内容字节不会随意移动. 这听上去又跟C语言中的类型转换的概念差不多.memoryview.cast会把同一块内存里的内容打包成一个全新的memoryview 对象给你. """ print("------------------------->>>") # 通过改变数组中的一个字节来更新数组里某个元素的值 import array numbers = array.array("h", [-2, -1, 0, 1, 2]) memv = memoryview(numbers) print(len(memv)) # 5 print(memv[0]) # -2 memv_oct = memv.cast("B") print(memv_oct.tolist()) # [254, 255, 255, 255, 0, 0, 1, 0, 2, 0] print(memv_oct[5]) # 0 # 因为我们把占两个字节的整数的高位字节改成了4,所以这个有符号整数的值就变成了1024 memv_oct[5] = 4 print(numbers) # array('h', [-2, -1, 1024, 1, 2]) """ 另外,如果利用数组来做高级的数字处理是你的日常工作,那么NumPy和SciPy应该是你的常用武器. """
def read_subimage(self, rows, cols, bands=None, use_memmap=False): ''' Reads arbitrary rows, columns, and bands from the image. Arguments: `rows` (list of ints): Indices of rows to read. `cols` (list of ints): Indices of columns to read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default False): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array, where `M` = len(`rows`), `N` = len(`cols`), and `L` = len(bands) (or # of image bands if `bands` == None). ''' import array if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap.take(rows, 0).take(cols, 1)) else: data = np.array( self._memmap.take(rows, 0).take(cols, 1).take(bands, 2)) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = len(rows) # Rows in sub-image nSubCols = len(cols) # Cols in sub-image d_band = self.sample_size d_col = d_band * self.nbands d_row = d_col * self.ncols vals = array.array(byte_typecode) nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) else: allBands = 1 bands = list(range(self.nbands)) nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in rows: for j in cols: if allBands: f.seek(offset + i * d_row + j * d_col, 0) vals.fromfile(f, nSubBands * sample_size) else: for k in bands: f.seek(offset + i * d_row + j * d_col + k * d_band, 0) vals.fromfile(f, sample_size) arr = np.fromstring(tobytes(vals), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr
systTree.setWeightName("lepSF",1.) systTree.setWeightName("lepUp",1.) systTree.setWeightName("lepDown",1.) systTree.setWeightName("PFSF",1.) systTree.setWeightName("PFUp",1.) systTree.setWeightName("PFDown",1.) #++++++++++++++++++++++++++++++++++ #++ variables to branch ++ #++++++++++++++++++++++++++++++++++ #++++++++++++++++++++++++++++++++++ #++ All category ++ #++++++++++++++++++++++++++++++++++ isdileptonic = array.array('i', [0]) muon_pt = array.array('f', [0.]) muon_eta = array.array('f', [0.]) muon_phi = array.array('f', [0.]) muon_m = array.array('f', [0.]) muon_SF = array.array('f', [0.]) electron_pt = array.array('f', [0.]) electron_eta = array.array('f', [0.]) electron_phi = array.array('f', [0.]) electron_m = array.array('f', [0.]) electron_SF = array.array('f', [0.]) nJet_lowpt_all = array.array('i', [0]) nfatJet_all = array.array('i', [0]) nJet_pt100_all = array.array('i', [0]) nbJet_lowpt_all = array.array('i', [0])
print(floats[-1]) # file mode write and binary fp = open('floats.bin', 'wb') floats.tofile(fp) fp.close() fp = open('floats.bin', 'rb') floats2 = array('d') floats2.fromfile(fp, 10**7) fp.close() print(floats2[-1]) floats2 == floats import array numbers = array.array('h', [-2, -1, 0, 1, 2] ) # h: signed short memv = memoryview(numbers) print(len(memv)) print(memv[0]) memv_oct = memv.cast('B') print(memv_oct.tolist()) memv_oct[5] = 4 print(numbers) import numpy as np a = np.arange(12) print(a) print(type(a)) print(a.shape)
def read_subregion(self, row_bounds, col_bounds, bands=None, use_memmap=True): ''' Reads a contiguous rectangular sub-region from the image. Arguments: `row_bounds` (2-tuple of ints): (a, b) -> Rows a through b-1 will be read. `col_bounds` (2-tuple of ints): (a, b) -> Columnss a through b-1 will be read. `bands` (list of ints): Optional list of bands to read. If not specified, all bands are read. `use_memmap` (bool, default True): Specifies whether the file's memmap interface should be used to read the data. Setting this arg to True only has an effect if a memmap is being used (i.e., if `img.using_memmap` is True). Returns: :class:`numpy.ndarray` An `MxNxL` array. ''' import array if self._memmap is not None and use_memmap is True: if bands is None: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1], :]) else: data = np.array(self._memmap[row_bounds[0]: row_bounds[1], col_bounds[0]: col_bounds[1], bands]) if self.scale_factor != 1: data = data / float(self.scale_factor) return data offset = self.offset nbands = self.nbands nSubRows = row_bounds[1] - row_bounds[0] # Rows in sub-image nSubCols = col_bounds[1] - col_bounds[0] # Cols in sub-image d_row = self.sample_size * self.ncols * self.nbands colStartPos = col_bounds[0] * self.sample_size * self.nbands vals = array.array(byte_typecode) nVals = self.nrows * self.ncols sample_size = self.sample_size # Increments between bands if bands is not None: allBands = 0 nSubBands = len(bands) delta_b = bands[:] for i in range(len(delta_b)): delta_b[i] *= self.sample_size else: allBands = 1 nSubBands = self.nbands f = self.fid # Pixel format is BIP for i in range(row_bounds[0], row_bounds[1]): f.seek(offset + i * d_row + colStartPos, 0) rowPos = f.tell() if allBands: # This is the simple one vals.fromfile(f, nSubCols * nbands * sample_size) else: # Need to pull out specific bands for each column. for j in range(nSubCols): f.seek(rowPos + j * self.sample_size * self.nbands, 0) pixelPos = f.tell() for k in range(len(bands)): f.seek(pixelPos + delta_b[k], 0) # Next band vals.fromfile(f, sample_size) arr = np.fromstring(tobytes(vals), dtype=self.dtype) arr = arr.reshape(nSubRows, nSubCols, nSubBands) if self.scale_factor != 1: return arr / float(self.scale_factor) return arr
def preparation(track, gms) : length_hist = {} distance = { 'sum' : 0, 'count' : 0 } reads = { 0 : 0, 16 : 0 } info = {} data = {} for read in parser(track) : pos, chr, strand_key, l_seq, qname = read if chr < 0 or chr > len(track.chromosome_names) : continue c = track.chromosome_names[chr] if l_seq not in length_hist : length_hist[l_seq] = 0 length_hist[l_seq] += 1 # pos += l_seq/2 # New chromosome name: if c not in info : firsts = 0 data[c] = array.array('l', []) info[c] = { 'Length' : track.chromosome_lengths[chr], 'Unique reads' : 0, 'Total reads' : 0, 'Names' : {} } firsts += 1 if firsts < 300 : if strand_key in [99,147,163,83] : # check paired reads if qname not in info[c]['Names'] : info[c]['Names'][qname] = pos else : dist = abs(info[c]['Names'][qname] - pos) if dist > 0 : distance['sum'] += dist distance['count'] += 1 info[c]['Total reads'] += 1 if strand_key not in reads : reads[strand_key] = 0 reads[strand_key] += 1 prev_key = 'Previous ' + str(strand_key) ignore = False if prev_key in info[c] : if info[c][prev_key] == pos : ignore = True if ignore == False : info[c][prev_key] = pos data[c].append(1000 * pos + strand_key) info[c]['Unique reads'] += 1 # <- if # <- for read in parser(track) total_reads = 0; unique_reads = 0 keys = info.keys() keys = sorted(keys, key = lambda (c): chrsort(c)) tbl = [] for c in keys : tbl.append([c, info[c]['Unique reads'], info[c]['Total reads']]) total_reads += info[c]['Total reads'] unique_reads += info[c]['Unique reads'] logging.info("Chromosome name, Unique reads, Total reads:") logging.info(beautiful_table(tbl)) mean_length = 0 for l in length_hist : mean_length += l * length_hist[l] mean_length = mean_length/total_reads msg = "Average read length: {}" logging.info(msg.format(mean_length)) drate = round((1 - float(unique_reads)/float(total_reads)) * 100, 1) msg = "\nLibrary depth\n Duplication rate: {}%\n Total reads: {}\n Unique reads: {}" logging.info(msg.format(drate, total_reads, unique_reads)) notes = [ [99 , " - Reads mapped in proper pair. Mate reverse strand, first in pair: {}"], # + [147, " - Reads mapped in proper pair. Read reverse strand, second in pair: {}"], # + [83 , " - Reads mapped in proper pair. Read reverse strand, first in pair: {}"], # - [163, " - Reads mapped in proper pair. Mate reverse strand, second in pair: {}"], # - [97 , " - Mate reverse strand, first in pair: {}"], # [161, " - Mate reverse strand, second in pair: {}"], # - [81 , " - Read reverse strand, first in pair: {}"], # + [145, " - Read reverse strand, second in pair: {}"], # [113, " - Mate reverse strand, Read reverse strand, first in pair: {}"], [177, " - Mate reverse strand, Read reverse strand, second in pair: {}"], [65 , " - First in pair: {}"], [129, " - Second in pair: {}"], [73 , " - Mate unmapped, first in pair: {}"], [137, " - Mate unmapped, second in pair: {}"], [89 , " - Mate unmapped, read reverse strand, first in pair: {}"], [153, " - Mate unmapped, read reverse strand, second in pair: {}"] ] def paired() : for x in notes : if x[0] in reads : return True return False logging.info("\nStrand symmetry") fragment_size = fragmentsize if paired() : if fragment_size == 0 : fragment_size = distance['sum']/distance['count'] + mean_length for key, msg in notes : logging.info(msg.format(reads[key] if key in reads else 0)) else : if fragment_size == 0 : fragment_size = 250 logging.info(" [+] " + str(reads[0])) logging.info(" [-] " + str(reads[16])) logging.info("\nFragment size: " + str(fragment_size)) logging.info("") effective_len = get_gms(info, mean_length, gms) * sum(track.chromosome_lengths) plambda = count_lambda(unique_reads, args.window, effective_len) logging.info("") logging.info("Genome Length: {}".format(sum(track.chromosome_lengths))) logging.info("Effective genome Length: {}".format(effective_len)) return [data, mean_length, fragment_size, plambda, total_reads, effective_len]
def getTaskSrcTable(taskID): return array.array(ManageTable)
def controlPlots(datasets): normData, normEWK, normFactorisedData, normFactorisedEWK = normalisation() norm_inc, normEWK_inc = normalisationInclusive() hmet = [] hmetb = [] effArray = [] effErrArray = [] hmetbveto = [] hmtBtag = [] hmtBveto = [] hmtNoMetBtag = [] hmtNoMetBveto = [] effBvetoArray = [] effErrBvetoArray = [] effArrayMt = [] effErrArrayMt = [] effArrayMtNoMet = [] effErrArrayMtNoMet = [] ## histograms in bins, normalisation and substraction of EWK contribution ## mt with 2dim deltaPhi cut for ptbin in ptbins: ### MET mmt_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MET_InvertedTauIdJets" + ptbin) ]) mmt_tmp._setLegendStyles() mmt_tmp._setLegendLabels() mmt_tmp.histoMgr.setHistoDrawStyleAll("P") mmt_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmt = mmt_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmt.Scale(normData[ptbin]) # hmt.append(mt) mmtEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MET_InvertedTauIdJets" + ptbin) ]) mmtEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtEWK_tmp._setLegendStyles() mmtEWK_tmp._setLegendLabels() mmtEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtEWK = mmtEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtEWK.Scale(normEWK[ptbin]) mmt.Add(mmtEWK, -1) hmet.append(mmt) ### MET with btagging mmtb_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MET_InvertedTauIdBtag" + ptbin) ]) mmtb_tmp._setLegendStyles() mmtb_tmp._setLegendLabels() mmtb_tmp.histoMgr.setHistoDrawStyleAll("P") mmtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtb = mmtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtb.Scale(normData[ptbin]) # hmt.append(mt) mmtbEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MET_InvertedTauIdBtag" + ptbin) ]) mmtbEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtbEWK_tmp._setLegendStyles() mmtbEWK_tmp._setLegendLabels() mmtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbEWK = mmtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbEWK.Scale(normEWK[ptbin]) mmtb.Add(mmtbEWK, -1) hmetb.append(mmtb) eff = mmtb.Integral() / mmt.Integral() ereff = sqrt(eff * (1 - eff) / mmt.Integral()) print " pt bin ", ptbin, " btag efficiency from MET = ", eff, " error ", ereff effArray.append(eff) effErrArray.append(ereff) ### MET with bveto mmtbveto_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MET_InvertedTauIdBveto" + ptbin) ]) mmtbveto_tmp._setLegendStyles() mmtbveto_tmp._setLegendLabels() mmtbveto_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbveto_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbveto = mmtbveto_tmp.histoMgr.getHisto( "Data").getRootHisto().Clone() mmtbveto.Scale(normData[ptbin]) # hmt.append(mt) mmtbvetoEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MET_InvertedTauIdBveto" + ptbin) ]) mmtbvetoEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtbvetoEWK_tmp._setLegendStyles() mmtbvetoEWK_tmp._setLegendLabels() mmtbvetoEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbvetoEWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbvetoEWK = mmtbvetoEWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() mmtbvetoEWK.Scale(normEWK[ptbin]) mmtbveto.Add(mmtbvetoEWK, -1) hmetbveto.append(mmtbveto) ## normalization mT(btag/bveto) eff = mmtb.Integral() / mmtbveto.Integral() ereff = sqrt(eff * (1 - eff) / mmtbveto.Integral()) print " pt bin ", ptbin, " btag/bveto efficiency from MET = ", eff, " error ", ereff effBvetoArray.append(eff) effErrBvetoArray.append(ereff) ## with MT distribution if False: ### no MET cut mmtb_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdBtagNoMetCut" + ptbin) ]) mmtb_tmp._setLegendStyles() mmtb_tmp._setLegendLabels() mmtb_tmp.histoMgr.setHistoDrawStyleAll("P") mmtb_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtb = mmtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtb.Scale(normData[ptbin]) # hmt.append(mt) mmtbEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdBtagNoMetCut" + ptbin) ]) mmtbEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtbEWK_tmp._setLegendStyles() mmtbEWK_tmp._setLegendLabels() mmtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbEWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbEWK = mmtbEWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() mmtbEWK.Scale(normEWK[ptbin]) mmtb.Add(mmtbEWK, -1) hmtNoMetBtag.append(mmtb) ### MET with bvet mmtbveto_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdBvetoNoMetCut" + ptbin) ]) mmtbveto_tmp._setLegendStyles() mmtbveto_tmp._setLegendLabels() mmtbveto_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbveto_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbveto = mmtbveto_tmp.histoMgr.getHisto( "Data").getRootHisto().Clone() mmtbveto.Scale(normData[ptbin]) # hmt.append(mt) mmtbvetoEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdBvetoNoMetCut" + ptbin) ]) mmtbvetoEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtbvetoEWK_tmp._setLegendStyles() mmtbvetoEWK_tmp._setLegendLabels() mmtbvetoEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbvetoEWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbvetoEWK = mmtbvetoEWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() mmtbvetoEWK.Scale(normEWK[ptbin]) mmtbveto.Add(mmtbvetoEWK, -1) hmtNoMetBveto.append(mmtbveto) ## normalization mT(btag/bveto) eff = mmtb.Integral() / mmtbveto.Integral() ereff = sqrt(eff * (1 - eff) / mmtbveto.Integral()) print " pt bin ", ptbin, " btag/bveto efficiency from mt, no met cut = ", eff, " error ", ereff effArrayMtNoMet.append(eff) effErrArrayMtNoMet.append(ereff) ############################################# ### with MET cut mmtb_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdBtag" + ptbin) ]) mmtb_tmp._setLegendStyles() mmtb_tmp._setLegendLabels() mmtb_tmp.histoMgr.setHistoDrawStyleAll("P") mmtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtb = mmtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtb.Scale(normData[ptbin]) # hmt.append(mt) mmtbEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdBtag" + ptbin) ]) mmtbEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtbEWK_tmp._setLegendStyles() mmtbEWK_tmp._setLegendLabels() mmtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbEWK = mmtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbEWK.Scale(normEWK[ptbin]) mmtb.Add(mmtbEWK, -1) hmtBtag.append(mmtb) mmtbveto_tmp = plots.PlotBase([ datasets.getDataset("Data").getDatasetRootHisto( "Inverted/MTInvertedTauIdBveto" + ptbin) ]) mmtbveto_tmp._setLegendStyles() mmtbveto_tmp._setLegendLabels() mmtbveto_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbveto_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbveto = mmtbveto_tmp.histoMgr.getHisto( "Data").getRootHisto().Clone() mmtbveto.Scale(normData[ptbin]) # hmt.append(mt) mmtbvetoEWK_tmp = plots.PlotBase([ datasets.getDataset("EWK").getDatasetRootHisto( "Inverted/MTInvertedTauIdBveto" + ptbin) ]) mmtbvetoEWK_tmp.histoMgr.normalizeMCToLuminosity( datasets.getDataset("Data").getLuminosity()) mmtbvetoEWK_tmp._setLegendStyles() mmtbvetoEWK_tmp._setLegendLabels() mmtbvetoEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbvetoEWK_tmp.histoMgr.forEachHisto( lambda h: h.getRootHisto().Rebin(20)) mmtbvetoEWK = mmtbvetoEWK_tmp.histoMgr.getHisto( "EWK").getRootHisto().Clone() mmtbvetoEWK.Scale(normEWK[ptbin]) mmtbveto.Add(mmtbvetoEWK, -1) hmtBveto.append(mmtbveto) ## normalization mT(btag/bveto) eff = mmtb.Integral() / mmtbveto.Integral() ereff = sqrt(eff * (1 - eff) / mmtbveto.Integral()) print " pt bin ", ptbin, " btag/bveto efficiency from mt = ", eff, " error ", ereff effArrayMt.append(eff) effErrArrayMt.append(ereff) ## sum histo bins met = hmet[0].Clone("met") met.SetName("MET") met.SetTitle("Inverted tau Met") met.Reset() print "check met", met.GetEntries() for histo in hmet: met.Add(histo) metb = hmetb[0].Clone("met") metb.SetName("MET") metb.SetTitle("Inverted tau Met") metb.Reset() print "check met btagging", metb.GetEntries() for histo in hmetb: metb.Add(histo) metbveto = hmetbveto[0].Clone("met") metbveto.SetName("METbveto") metbveto.SetTitle("Inverted tau Met") metbveto.Reset() print "check met bveto", metbveto.GetEntries() for histo in hmetbveto: metbveto.Add(histo) ## with MT if False: mtNoMetBtag = hmtNoMetBtag[0].Clone("mt") mtNoMetBtag.SetName("MET") mtNoMetBtag.SetTitle("Inverted tau Met") mtNoMetBtag.Reset() print "check MT btagging", mtNoMetBtag.GetEntries() for histo in hmtNoMetBtag: mtNoMetBtag.Add(histo) mtNoMetBveto = hmtNoMetBveto[0].Clone("mt") mtNoMetBveto.SetName("MET") mtNoMetBveto.SetTitle("Inverted tau Met") mtNoMetBveto.Reset() print "check MT bveto", mtNoMetBveto.GetEntries() for histo in hmtNoMetBveto: mtNoMetBveto.Add(histo) mtBtag = hmtBtag[0].Clone("mt") mtBtag.SetName("MET") mtBtag.SetTitle("Inverted tau Met") mtBtag.Reset() print "check MT btagging", mtBtag.GetEntries() for histo in hmtBtag: mtBtag.Add(histo) mtBveto = hmtBveto[0].Clone("mt") mtBveto.SetName("MET") mtBveto.SetTitle("Inverted tau Met") mtBveto.Reset() print "check MT bveto", mtBveto.GetEntries() for histo in hmtBveto: mtBveto.Add(histo) ########################################## ## plotting invertedQCD = InvertedTauID() invertedQCD.setLumi(datasets.getDataset("Data").getLuminosity()) ### effisiency as a function of MET metWithBtagging = metb.Clone("MET") metWithBtagging.Divide(metbveto) BtaggingEffVsMet = metWithBtagging.Clone("Eff") invertedQCD.setLabel("BtagToBvetoEffVsMet") invertedQCD.mtComparison(BtaggingEffVsMet, BtaggingEffVsMet, "BtagToBvetoEffVsMet") if False: ### effisiency as a function of MT mtWithBtagging = mtNoMetBtag.Clone("MT") mtWithBtagging.Divide(mtNoMetBveto) BtaggingEffNoMetVsMt = mtWithBtagging.Clone("Eff") invertedQCD.setLabel("BtagToBvetoEffNoMetVsMt") invertedQCD.mtComparison(BtaggingEffNoMetVsMt, BtaggingEffNoMetVsMt, "BtagToBvetoEffNoMetVsMt") ### effisiency as a function of MT mtWithBtagging = mtBtag.Clone("MT") mtWithBtagging.Divide(mtBveto) BtaggingEffVsMt = mtWithBtagging.Clone("Eff") invertedQCD.setLabel("BtagToBvetoEffVsMt") invertedQCD.mtComparison(BtaggingEffVsMt, BtaggingEffVsMt, "BtagToBvetoEffVsMt") # efficiency metb/met metbtag = metb.Clone("metb") metnobtag = met.Clone("met") metbtag.Divide(metnobtag) invertedQCD.setLabel("BtagEffVsMet") invertedQCD.mtComparison(metbtag, metbtag, "BtagEffVsMet") # efficiency metb/metbveto metbtag = metb.Clone("metb") metbjetveto = metbveto.Clone("met") invertedQCD.setLabel("BtagToBvetoEfficiency") invertedQCD.mtComparison(metbtag, metbjetveto, "BtagToBvetoEfficiency") ### Create and customise TGraph cEff = TCanvas("Efficiency", "Efficiency", 1) cEff.cd() ptbin_error = array.array("d", [5, 5, 5, 5, 10, 10, 30]) ptbin = array.array("d", [45, 55, 65, 75, 90, 110, 150]) if False: ## no MET cut cEff = TCanvas("btaggingEffNoMet", "btaggingEffNoMet", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", effArrayMtNoMet), ptbin_error, array.array("d", effErrArrayMtNoMet)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("N_{b tagged}/N_{b veto}") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2, 0.88, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) # tex1.Draw() tex2 = ROOT.TLatex(0.5, 0.8, "No MET cut") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btagToBvetoEffNoMetVsPtTau_mt.png") ## with MET cut cEff = TCanvas("btaggingEff", "btaggingEff", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", effArrayMt), ptbin_error, array.array("d", effErrArrayMt)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("N_{b tagged}/N_{b veto}") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2, 0.88, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) # tex1.Draw() tex2 = ROOT.TLatex(0.5, 0.8, "After MET cut") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btagToBvetoEffVsPtTau_mt.png") ## no MET cut cEff = TCanvas("btaggingEffNoMet", "btaggingEffNoMet", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", effBvetoArray), ptbin_error, array.array("d", effErrBvetoArray)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("N_{b tagged}/N_{b veto}") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2, 0.88, "All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) # tex1.Draw() tex2 = ROOT.TLatex(0.5, 0.8, "No MET cut") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btagToBvetoEffNoMetVsPtTau.png") ## with MET cut cEff = TCanvas("btaggingEfficiency", "btaggingEfficiency", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d", effArray), ptbin_error, array.array("d", effErrArray)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("b-tagging efficiency") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex( 0.2, 0.955, "8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.4, 0.85, "Inverted #tau identification") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.4, 0.78, "At least 3 jets") tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btaggingEffVsPtTau.png") ##################################3 fOUT = open("btaggingFactors", "w") # now = datetime.datetime.now() # fOUT.write("# Generated on %s\n"%now.ctime()) fOUT.write("# by %s\n" % os.path.basename(sys.argv[0])) fOUT.write("\n") fOUT.write("btaggingFactors = {\n") i = 0 while i < len(effArray): line = " \"" + ptbins[i] + "\": " + str(effArray[i]) if i < len(effArray) - 1: line += "," line += "\n" fOUT.write(line) i = i + 1 fOUT.write("}\n") fOUT.close() print "B-tagging efficiensies written in file", "btaggingFactors" fOUT = open("btaggingToBvetoFactors.py", "w") # now = datetime.datetime.now() # fOUT.write("# Generated on %s\n"%now.ctime()) fOUT.write("# by %s\n" % os.path.basename(sys.argv[0])) fOUT.write("\n") fOUT.write("btaggingToBvetoFactors = {\n") i = 0 while i < len(effBvetoArray): line = " \"" + ptbins[i] + "\": " + str(effBvetoArray[i]) if i < len(effBvetoArray) - 1: line += "," line += "\n" fOUT.write(line) i = i + 1 fOUT.write("}\n") fOUT.close() print "BtaggingToBveto efficiensies written in file", "btaggingToBvetoFactors" fOUT = open("btaggingToBvetoAfterMetFactors.py", "w") # now = datetime.datetime.now() # fOUT.write("# Generated on %s\n"%now.ctime()) fOUT.write("# by %s\n" % os.path.basename(sys.argv[0])) fOUT.write("\n") fOUT.write("btaggingToBvetoAfterMetFactors = {\n") i = 0 while i < len(effArrayMt): line = " \"" + ptbins[i] + "\": " + str(effArrayMt[i]) if i < len(effArrayMt) - 1: line += "," line += "\n" fOUT.write(line) i = i + 1 fOUT.write("}\n") fOUT.close() print "BtaggingToBvetoAfterMet efficiensies written in file", "btaggingToBvetoFactors"
oC = r.TCanvas() dummyHist = r.TH1D("dummy", ";c#tau [m]; 95% CL UL on BR(H #rightarrow ss)", 100, 0.01, 1000) dummyHist.SetMaximum(100) dummyHist.SetMinimum(1E-4) oC.SetLogy() oC.SetLogx() dummyHist.Draw() graphs = [] leg = r.TLegend(0.6, 0.7, 0.89, 0.89) leg.SetBorderSize(0) for mass in [15, 40, 55]: inputArrays[mass] = sorted(inputArrays[mass]) inputArrays[mass].append( [inputArrays[mass][-1][0] * 10, inputArrays[mass][-1][1] * 10]) xVals = array.array('d', [x[0] / 1000. for x in inputArrays[mass]]) yVals = array.array('d', [x[1] / 100. for x in inputArrays[mass]]) inputArraysUncUp[mass] = sorted(inputArraysUncUp[mass]) inputArraysUncUp[mass].append([ inputArraysUncUp[mass][-1][0] * 10, inputArraysUncUp[mass][-1][1] * 10 ]) xValsUp = array.array('d', [x[0] / 1000. for x in inputArraysUncUp[mass]]) yValsUp = array.array('d', [x[1] / 100. for x in inputArraysUncUp[mass]]) inputArraysUncDown[mass] = sorted(inputArraysUncDown[mass]) inputArraysUncDown[mass].append([ inputArraysUncDown[mass][-1][0] * 10, inputArraysUncDown[mass][-1][1] * 10 ]) xValsDown = array.array('d', [x[0] / 1000. for x in inputArraysUncDown[mass]]) yValsDown = array.array('d',
from array import array array1: object = array.array('i', [0, 1]) array2 = array.array('i', [2, 3, 4]) array1.extend(array2) print(array1) # array('i', [0, 1, 2, 3, 4]) print(array2) # array('i', [2, 3, 4]) array2.extend([1, 2]) print(array2) # array('i', [2, 3, 4, 1, 2]) array1 = array.array('i', [1]) array1.extend(set([0, 0, 0, 2])) print(array1) # array('i', [1, 0, 2])v
def plot(var, cut, region, plotter_config, **kw): global legend, watermarks, canv, shared # # Un pack som plotter config # foldersTag = plotter_config["foldersTag"] files = plotter_config["files"] samples = plotter_config["samples"] weights = plotter_config["weights"] is_data = plotter_config["is_data"] stack = plotter_config["stack"] colors = plotter_config["colors"] rColors = plotter_config["rColors"] overlay = plotter_config["overlay"] labels = plotter_config["labels"] output = plotter_config["output"] testSampleName = samples[0]["name"] testDirName = foldersTag[testSampleName] if var.find("*") != -1: return listVars(var.replace("*", ""), cut + testDirName + region, plotter_config) xtitle = None ytitle = None name = None x_min = None x_max = None if not plotter_config["useTree"]: if cut + foldersTag[testSampleName] + region != "": histPath = cut + foldersTag[testSampleName] + region + "/" + var else: histPath = var testHist = files[samples[0]["name"]].Get(histPath) xtitle = testHist.GetXaxis().GetTitle() ytitle = testHist.GetYaxis().GetTitle() name = testHist.GetName() x_min = testHist.GetXaxis().GetXmin() x_max = testHist.GetXaxis().GetXmax() bins = kw.get('bins', None) xtitle = kw.get('xtitle', xtitle) ytitle = kw.get('ytitle', ytitle) selection = kw.get('selection', None) name = kw.get('name', name) n_bins = kw.get('n_bins', None) x_min = kw.get('x_min', x_min) x_max = kw.get('x_max', x_max) rebin = kw.get('rebin', None) logY = kw.get('logY', None) options = kw.get('options', '') canvSize = kw.get('canvSize', [700, 700]) rMargin = kw.get('rMargin', 0) hists = {} draw = {} if bins: draw["bins"] = array.array("d", [float(x) for x in bins]) draw["title"] = ";%s;%s" % (xtitle, ytitle) draw["variable"] = var if selection: draw["selection"] = " && ".join(selection) canv = ROOT.TCanvas(name, name, canvSize[0], canvSize[1]) canv.Draw() canv.Update() canv.SetLogy(logY) stacks = ROOT.THStack(name + "stacks", draw["title"]) overlays = ROOT.THStack(name + "overlays", draw["title"]) do_stack = False do_overlay = False for sample in reversed(sorted(samples)): sampleName = sample["name"] draw["name"] = name + "__" + sampleName draw["weight"] = weights[sampleName] if plotter_config["useTree"]: for option in ["weight"]: draw[option] = "(%s)" % (draw[option]) if plotter_config["useTree"]: if bins: hists[sampleName] = ROOT.TH1F(draw["name"], draw["title"], len(draw["bins"]) - 1, draw["bins"]) else: hists[sampleName] = ROOT.TH1F(draw["name"], draw["title"], n_bins, x_min, x_max) hists[sampleName].Sumw2() else: if cut + foldersTag[sampleName] + region != "": histPath = cut + foldersTag[sampleName] + region + "/" + draw[ "variable"] else: histPath = draw["variable"] hists[sampleName] = files[sampleName].Get(histPath) hists[sampleName].SetName(draw["name"]) hists[sampleName].SetTitle(draw["title"]) if "TH2" in str(hists[sampleName]): hists[sampleName].RebinX(rebin) hists[sampleName].RebinY(rebin) hists[sampleName].GetYaxis().SetTitleOffset(1.5) else: if isinstance(rebin, list): hists[sampleName] = helpers.do_variable_rebinning( hists[sampleName], rebin) elif rebin: hists[sampleName].Rebin(rebin) if x_max: x_min = float(x_min) x_max = float(x_max) hists[sampleName].GetXaxis().SetRangeUser(x_min, x_max) if is_data[sampleName]: hists[sampleName].SetMarkerStyle(20) hists[sampleName].SetMarkerSize(1) if plotter_config["useTree"]: trees[sampleName].Draw("%(variable)s >> %(name)s" % draw, "(%(selection)s) * %(weight)s" % draw, "goff") else: hists[sampleName].Sumw2() hists[sampleName].Scale(float(draw["weight"])) hists[sampleName].Draw(options) if rMargin: ROOT.gPad.SetRightMargin(rMargin) # hists[sampleName].Scale(1/hists[sampleName].Integral(0, hists[sampleName].GetNbinsX())) if stack[sampleName]: do_stack = True hists[sampleName].SetFillColor(colors[sampleName]) hists[sampleName].SetLineColor(ROOT.kBlack) hists[sampleName].SetLineWidth(2) stacks.Add(copy.copy(hists[sampleName]), ("ep" if is_data[sampleName] else "hist")) if not plotter_config[ "useTree"] and x_max: #have to set stack xaxis range for zooming because ROOT SUCKS stacks.Draw() stacks.GetXaxis().SetRangeUser(x_min, x_max) if overlay[sampleName]: do_overlay = True hists[sampleName].SetFillColor(0) hists[sampleName].SetLineColor(colors[sampleName]) hists[sampleName].SetLineWidth(3) overlays.Add(copy.copy(hists[sampleName]), ("ep" if is_data[sampleName] else "hist")) print sampleName #print "Integral:",hists[sample].Integral(0, hists[sample].GetNbinsX()+1) print "Integral:", hists[sampleName].Integral() print " Entries:", hists[sampleName].GetEntries() # draw maximum = max([stacks.GetMaximum(), overlays.GetMaximum("nostack")]) maximum = maximum * (20.0 if logY else 1.3) maximum = maximum * (1.2 if plotter_config['ratio'] else 1.0) minimum = max([stacks.GetMinimum(), overlays.GetMinimum("nostack")]) minimum = (minimum / 2 if logY else 0) if do_stack: stacks.SetMaximum(maximum) stacks.SetMinimum(minimum) stacks.Draw() h1stackerror = copy.copy(stacks.GetStack().Last()) h1stackerror.SetName("stat. error") h1stackerror.SetFillColor(ROOT.kGray + 3) h1stackerror.SetFillStyle(3005) h1stackerror.SetMarkerStyle(0) h1stackerror.Draw("SAME,E2") if do_overlay and do_stack: overlays.SetMaximum(maximum) overlays.SetMinimum(minimum) overlays.Draw("nostack,same") elif do_overlay: overlays.SetMaximum(maximum) overlays.SetMinimum(minimum) overlays.Draw("nostack") if plotter_config["data"]: pass if plotter_config["ratio"] and stacks.GetStack(): # numerator definition is a placeholder. # only works if overlay[0]=data. if plotter_config["autoRatio"]: top = overlays.GetHists()[0].Clone("hnew") bottom = stacks.GetStack().Last().Clone("hnew") top.Divide(bottom) ratioBins = [] for bin in range(top.GetSize()): if top.GetBinContent(bin) != 0.0 and top.GetBinContent( bin) * top.GetBinError(bin) < 5.0: ratioBins.append(top.GetBinContent(bin)) if not ratioBins: ratioBins = [0] ratioMin = float(int(min(ratioBins) * 100)) / 100 ratioMax = float(int(max(ratioBins) * 100)) / 100 if ratioMax - ratioMin < 0.1: ratioMin = ratioMin - .05 ratioMax = ratioMax + 0.5 else: ratioMin = 0 ratioMax = 2 if sampleName not in rColors.keys(): rColors[sampleName] = ROOT.kRed ratio = helpers.ratio( name=canv.GetName() + "_ratio", numer=overlays.GetHists().Last(), # AHH KILL ME denom=stacks.GetStack().Last(), min=ratioMin, max=ratioMax, ytitle="Data / pred.", color=rColors[sampleName]) canv.SetFillColorAlpha(1, 0.0) share, top_pad, bottom_pad = helpers.same_xaxis( name=canv.GetName() + "_share", top_canvas=canv, bottom_canvas=ratio, ) canv.SetName(canv.GetName() + "_noratio") share.SetName(share.GetName().replace("_share", "")) canv = share elif plotter_config["ratio"] and not stacks.GetStack(): warn("Want to make ratio plot but dont have stack. Skipping ratio.") # stack legend if do_stack or do_overlay: xleg, yleg = 0.79, 0.79 legend = ROOT.TLegend(xleg, yleg, xleg + 0.15, (yleg + 0.125)) if do_overlay: for hist in reversed(overlays.GetHists()): legend.AddEntry(hist, labels[hist.GetName().split("__")[1]], "l") if do_stack: for hist in reversed(stacks.GetHists()): legend.AddEntry(hist, labels[hist.GetName().split("__")[1]], "f") legend.SetBorderSize(0) legend.SetFillColor(0) legend.SetMargin(0.3) legend.SetTextSize(0.03) legend.Draw() legend.Draw() # watermarks xatlas, yatlas = 0.5, 0.90 atlas = ROOT.TLatex(xatlas + 0.02, yatlas, "ATLAS Internal") hh4b = ROOT.TLatex(xatlas + 0.015, yatlas - 0.042, "X #rightarrow HH #rightarrow 4b") lumi = ROOT.TLatex( xatlas, yatlas - 0.10, "#sqrt{s} = 13 TeV, #int L dt = " + plotter_config["lumi"]) watermarks = [atlas, hh4b, lumi] # KS, chi2 if stacks.GetStack(): if plotter_config.get("ks"): print "Getting KS from:", overlays.GetHists().Last( ), stacks.GetStack().Last() kolg, chi2, ndf = helpers.compare( overlays.GetHists().Last(), stacks.GetStack().Last(), ) # AH KILL ME yks = 0.975 ychi2 = 0.975 xks = 0.27 xchi2 = 0.55 ks = ROOT.TLatex(xks, yks, "KS = %5.3f" % (kolg)) if ndf: ch = ROOT.TLatex( xchi2, ychi2, "#chi^{2} / ndf = %.1f / %i = %.3f" % (chi2, ndf, chi2 / ndf)) else: ch = ROOT.TLatex(xchi2, ychi2, "#chi^{2} / ndf = %.1f / %i" % (chi2, ndf)) watermarks += [ks, ch] wmNum = 0 # draw watermarks for wm in watermarks: wm.SetTextAlign(22) if wmNum == 0: wm.SetTextSize(0.04) wm.SetTextFont(72) #elif wmNum == 1: # wm.SetTextSize(0.04) # wm.SetTextFont(62) else: wm.SetTextSize(0.03) wm.SetTextFont(42) wmNum += 1 wm.SetNDC() wm.Draw() canv.SaveAs( os.path.join(plotter_config["directory"], canv.GetName() + ".pdf")) output.cd() canv.Write() canv.Update()
def plot(var, cut, region, plotter_config, **kw): global legend, watermarks, canv, shared # # Un pack som plotter config # foldersTag = plotter_config["foldersTag"] files = plotter_config["files"] samples = plotter_config["samples"] weights = plotter_config["weights"] is_data = plotter_config["is_data"] stack = plotter_config["stack"] colors = plotter_config["colors"] rColors = plotter_config["rColors"] overlay = plotter_config["overlay"] labels = plotter_config["labels"] output = plotter_config["output"] testSampleName = samples[0]["name"] testDirName = foldersTag[testSampleName] if var.find("*") != -1: return listVars(var.replace("*",""),cut+testDirName+region, plotter_config) xtitle = None ytitle = None name = None x_min = None x_max = None if not plotter_config["useTree"]: if cut+foldersTag[testSampleName]+region != "": histPath = cut+foldersTag[testSampleName]+region+"/"+var else: histPath = var testHist = files[samples[0]["name"]].Get(histPath) xtitle = testHist.GetXaxis().GetTitle() ytitle = testHist.GetYaxis().GetTitle() name = testHist.GetName() x_min = testHist.GetXaxis().GetXmin() x_max = testHist.GetXaxis().GetXmax() bins = kw.get('bins' , None) xtitle = kw.get('xtitle' , xtitle) ytitle = kw.get('ytitle' , ytitle) selection = kw.get('selection', None) name = kw.get('name' , name) n_bins = kw.get('n_bins' , None) x_min = kw.get('x_min' , x_min) x_max = kw.get('x_max' , x_max) rebin = kw.get('rebin' , None) logY = kw.get('logY' , None) options = kw.get('options' , '') canvSize = kw.get('canvSize' , [700,700]) rMargin = kw.get('rMargin' , 0) hists = {} draw = {} if bins: draw["bins"] = array.array("d", [float(x) for x in bins]) draw["title"] = ";%s;%s" % (xtitle, ytitle) draw["variable"] = var if selection: draw["selection"] = " && ".join(selection) canv = ROOT.TCanvas(name, name, canvSize[0], canvSize[1]) canv.Draw() canv.Update() canv.SetLogy(logY) stacks = ROOT.THStack(name+"stacks", draw["title"]) overlays = ROOT.THStack(name+"overlays", draw["title"]) do_stack = False do_overlay = False for sample in reversed(sorted(samples)): sampleName = sample["name"] draw["name"] = name+"__"+sampleName draw["weight"] = weights[sampleName] if plotter_config["useTree"]: for option in ["weight"]: draw[option] = "(%s)" % (draw[option]) if plotter_config["useTree"]: if bins: hists[sampleName] = ROOT.TH1F(draw["name"], draw["title"], len(draw["bins"])-1, draw["bins"]) else: hists[sampleName] = ROOT.TH1F(draw["name"], draw["title"], n_bins, x_min, x_max) hists[sampleName].Sumw2() else: if cut+foldersTag[sampleName]+region != "": histPath = cut+foldersTag[sampleName]+region+"/"+draw["variable"] else: histPath = draw["variable"] hists[sampleName] = files[sampleName].Get(histPath) hists[sampleName].SetName(draw["name"]) hists[sampleName].SetTitle(draw["title"]) if "TH2" in str(hists[sampleName]): hists[sampleName].RebinX(rebin) hists[sampleName].RebinY(rebin) hists[sampleName].GetYaxis().SetTitleOffset(1.5) else: if isinstance(rebin,list): hists[sampleName]=helpers.do_variable_rebinning(hists[sampleName], rebin) elif rebin: hists[sampleName].Rebin(rebin) if x_max: x_min = float(x_min) x_max = float(x_max) hists[sampleName].GetXaxis().SetRangeUser(x_min,x_max) if is_data[sampleName]: hists[sampleName].SetMarkerStyle(20) hists[sampleName].SetMarkerSize(1) if plotter_config["useTree"]: trees[sampleName].Draw("%(variable)s >> %(name)s" % draw, "(%(selection)s) * %(weight)s" % draw, "goff") else : hists[sampleName].Sumw2() hists[sampleName].Scale(float(draw["weight"])) hists[sampleName].Draw(options) if rMargin: ROOT.gPad.SetRightMargin(rMargin) # hists[sampleName].Scale(1/hists[sampleName].Integral(0, hists[sampleName].GetNbinsX())) if stack[sampleName]: do_stack = True hists[sampleName].SetFillColor(colors[sampleName]) hists[sampleName].SetLineColor(ROOT.kBlack) hists[sampleName].SetLineWidth(2) stacks.Add(copy.copy(hists[sampleName]), ("ep" if is_data[sampleName] else "hist")) if not plotter_config["useTree"] and x_max:#have to set stack xaxis range for zooming because ROOT SUCKS stacks.Draw() stacks.GetXaxis().SetRangeUser(x_min,x_max) if overlay[sampleName]: do_overlay = True hists[sampleName].SetFillColor(0) hists[sampleName].SetLineColor(colors[sampleName]) hists[sampleName].SetLineWidth(3) overlays.Add(copy.copy(hists[sampleName]), ("ep" if is_data[sampleName] else "hist")) print sampleName #print "Integral:",hists[sample].Integral(0, hists[sample].GetNbinsX()+1) print "Integral:",hists[sampleName].Integral() print " Entries:",hists[sampleName].GetEntries() # draw maximum = max([stacks.GetMaximum(), overlays.GetMaximum("nostack")]) maximum = maximum*(20.0 if logY else 1.3) maximum = maximum*(1.2 if plotter_config['ratio'] else 1.0) minimum = max([stacks.GetMinimum(), overlays.GetMinimum("nostack")]) minimum = (minimum/2 if logY else 0) if do_stack: stacks.SetMaximum(maximum) stacks.SetMinimum(minimum) stacks.Draw() h1stackerror = copy.copy(stacks.GetStack().Last()) h1stackerror.SetName("stat. error") h1stackerror.SetFillColor(ROOT.kGray+3) h1stackerror.SetFillStyle(3005) h1stackerror.SetMarkerStyle(0) h1stackerror.Draw("SAME,E2") if do_overlay and do_stack: overlays.SetMaximum(maximum) overlays.SetMinimum(minimum) overlays.Draw("nostack,same") elif do_overlay: overlays.SetMaximum(maximum) overlays.SetMinimum(minimum) overlays.Draw("nostack") if plotter_config["data"]: pass if plotter_config["ratio"] and stacks.GetStack(): # numerator definition is a placeholder. # only works if overlay[0]=data. if plotter_config["autoRatio"]: top = overlays.GetHists()[0].Clone("hnew") bottom = stacks.GetStack().Last().Clone("hnew") top.Divide(bottom) ratioBins = [] for bin in range(top.GetSize()): if top.GetBinContent(bin) != 0.0 and top.GetBinContent(bin)*top.GetBinError(bin) < 5.0: ratioBins.append(top.GetBinContent(bin)) if not ratioBins: ratioBins = [0] ratioMin = float(int(min(ratioBins)*100))/100 ratioMax = float(int(max(ratioBins)*100))/100 if ratioMax-ratioMin < 0.1: ratioMin = ratioMin-.05 ratioMax = ratioMax+0.5 else: ratioMin = 0 ratioMax = 2 if sampleName not in rColors.keys(): rColors[sampleName] = ROOT.kRed ratio = helpers.ratio(name = canv.GetName()+"_ratio", numer = overlays.GetHists().Last(), # AHH KILL ME denom = stacks.GetStack().Last(), min = ratioMin, max = ratioMax, ytitle = "Data / pred.", color = rColors[sampleName] ) canv. SetFillColorAlpha(1, 0.0); share,top_pad, bottom_pad = helpers.same_xaxis(name = canv.GetName()+"_share", top_canvas = canv, bottom_canvas = ratio, ) canv .SetName(canv.GetName()+"_noratio") share.SetName(share.GetName().replace("_share", "")) canv = share elif plotter_config["ratio"] and not stacks.GetStack(): warn("Want to make ratio plot but dont have stack. Skipping ratio.") # stack legend if do_stack or do_overlay: xleg, yleg = 0.79, 0.79 legend = ROOT.TLegend(xleg, yleg, xleg+0.15, (yleg+0.125)) if do_overlay: for hist in reversed(overlays.GetHists()): legend.AddEntry(hist, labels[hist.GetName().split("__")[1]], "l") if do_stack: for hist in reversed(stacks.GetHists()): legend.AddEntry(hist, labels[hist.GetName().split("__")[1]], "f") legend.SetBorderSize(0) legend.SetFillColor(0) legend.SetMargin(0.3) legend.SetTextSize(0.03) legend.Draw() legend.Draw() # watermarks xatlas, yatlas = 0.5, 0.90 atlas = ROOT.TLatex(xatlas+0.02, yatlas, "ATLAS Internal") hh4b = ROOT.TLatex(xatlas+0.015, yatlas-0.042, "X #rightarrow HH #rightarrow 4b") lumi = ROOT.TLatex(xatlas, yatlas-0.10, "#sqrt{s} = 13 TeV, #int L dt = "+plotter_config["lumi"]) watermarks = [atlas, hh4b, lumi] # KS, chi2 if stacks.GetStack(): if plotter_config.get("ks"): print "Getting KS from:",overlays.GetHists().Last(),stacks.GetStack().Last() kolg, chi2, ndf = helpers.compare(overlays.GetHists().Last(), stacks.GetStack().Last(), ) # AH KILL ME yks = 0.975 ychi2 = 0.975 xks = 0.27 xchi2 = 0.55 ks = ROOT.TLatex(xks, yks, "KS = %5.3f" % (kolg)) if ndf: ch = ROOT.TLatex(xchi2, ychi2, "#chi^{2} / ndf = %.1f / %i = %.3f" % (chi2, ndf, chi2/ndf)) else: ch = ROOT.TLatex(xchi2, ychi2, "#chi^{2} / ndf = %.1f / %i" % (chi2, ndf)) watermarks += [ks, ch] wmNum = 0 # draw watermarks for wm in watermarks: wm.SetTextAlign(22) if wmNum == 0: wm.SetTextSize(0.04) wm.SetTextFont(72) #elif wmNum == 1: # wm.SetTextSize(0.04) # wm.SetTextFont(62) else: wm.SetTextSize(0.03) wm.SetTextFont(42) wmNum+=1 wm.SetNDC() wm.Draw() canv.SaveAs(os.path.join(plotter_config["directory"], canv.GetName()+".pdf")) output.cd() canv.Write() canv.Update()
#question 41 from array import array a = array("I", (12,25)) print("Array buffer start address in memory and number of elements.") print(a.buffer_info()) # In[29]: #problem 42 import array import binascii a = array.array('i', [1,2,3,4,5,6]) print("Original array:") print('A1:', a) bytes_array = a.tobytes() print('Array of bytes:', binascii.hexlify(bytes_array)) # In[30]: #problem 43 import array import binascii a = array.array('i', [1,2,3,4,5,6]) print("Original array:") print('A1:', a)
legend.cd() ar_l = dy_l/dx_l #gap_ = 0.09/ar_l gap_ = 1./(n_+1) bwx_ = 0.12 bwy_ = gap_/1.5 x_l = [1.2*bwx_] #y_l = [1-(1-0.10)/ar_l] y_l = [1-gap_] ex_l = [0] ey_l = [0.04/ar_l] #array must be converted x_l = array.array("f",x_l) ex_l = array.array("f",ex_l) y_l = array.array("f",y_l) ey_l = array.array("f",ey_l) gr_l = rt.TGraphErrors(1, x_l, y_l, ex_l, ey_l) rt.gStyle.SetEndErrorSize(0) gr_l.SetMarkerSize(0.9) gr_l.Draw("0P") latex.SetTextFont(42) latex.SetTextAngle(0) latex.SetTextColor(rt.kBlack) latex.SetTextSize(0.25) latex.SetTextAlign(12)
floats.tofile(fp) # writes the binary to file fp.close() floats2 = array('d') # init an empty array of type d fp = open('floats.bin', 'rb') floats.fromfile(fp,10**7) # read from file to init array fp.close() floats2[-1] # =================================================================================== # bytes # two built in type for binary seq: immutable bytes type, mutable bytearray # both bytes and bytearray support every str method except those that do formating # endswith(), replace(), strip, translate, upper # regex supports binary sequence as well b = bytes(array.array('h',(x for x in range(5)))) # create byte stream from an iterable object bytes.fromhex('31 4B CE') # build by parsing pairs of hex digits cafe = bytes('cafe',encoding='utf_8') # encoding only used for string argument cafe[0] # 99 cafe[:1] # b'c' cafe_arr = bytearray(cafe) cafe_arry[:-1] # a slice of a byte array is still a byte array # =================================================================================== # memoryview # like a pointer , to share the underling memory without copy the object numbers = array.array('h',[-2,-1,0,1,2]) memv = memoryview(numbers) len(lemv)
import array rebins = [round(x * 0.1, 1) for x in range(0, 40, 2)] for x in range(40, 81, 4): rebins.append(round(x * 0.1, 1)) rebins.append(9.) rebins.append(10.) rebins.append(12.) rebins.append(15.) rebins.append(20.) rebins.append(30.) rebins.append(40.) rebins.append(50.) rebins.append(70.) rebins.append(100.) bins_array = array.array('d', rebins) class MassFit: def __init__(self, year): self.lumi = 3.2 if year == 2015 else '33.1' # label self.beam_condition = '#sqrt{s} = 13 TeV, L = ' + str(self.lumi) + ' fb^{-1}' # legend self.x_min = 0.5 self.x_max = 0.85 self.y_min = 0.60 self.y_max = 0.80 # normalisation region mDV > 6 GeV self.m_cut = 10 # 2nd control region self.m_cut2 = 8
def controlPlots(datasets): normData,normEWK,normFactorisedData,normFactorisedEWK=normalisation() norm_inc,normEWK_inc = normalisationInclusive() hmet = [] hmetb = [] effArray = [] effErrArray = [] hmetbveto = [] hmtBtag = [] hmtBveto = [] hmtNoMetBtag = [] hmtNoMetBveto = [] effBvetoArray = [] effErrBvetoArray = [] effArrayMt= [] effErrArrayMt= [] effArrayMtNoMet= [] effErrArrayMtNoMet= [] ## histograms in bins, normalisation and substraction of EWK contribution ## mt with 2dim deltaPhi cut for ptbin in ptbins: ### MET mmt_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MET_InvertedTauIdJets"+ptbin)]) mmt_tmp._setLegendStyles() mmt_tmp._setLegendLabels() mmt_tmp.histoMgr.setHistoDrawStyleAll("P") mmt_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmt = mmt_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmt.Scale(normData[ptbin]) # hmt.append(mt) mmtEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MET_InvertedTauIdJets"+ptbin)]) mmtEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtEWK_tmp._setLegendStyles() mmtEWK_tmp._setLegendLabels() mmtEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtEWK = mmtEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtEWK.Scale(normEWK[ptbin]) mmt.Add(mmtEWK, -1) hmet.append(mmt) ### MET with btagging mmtb_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MET_InvertedTauIdBtag"+ptbin)]) mmtb_tmp._setLegendStyles() mmtb_tmp._setLegendLabels() mmtb_tmp.histoMgr.setHistoDrawStyleAll("P") mmtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtb = mmtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtb.Scale(normData[ptbin]) # hmt.append(mt) mmtbEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MET_InvertedTauIdBtag"+ptbin)]) mmtbEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtbEWK_tmp._setLegendStyles() mmtbEWK_tmp._setLegendLabels() mmtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbEWK = mmtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbEWK.Scale(normEWK[ptbin]) mmtb.Add(mmtbEWK, -1) hmetb.append(mmtb) eff = mmtb.Integral()/mmt.Integral() ereff = sqrt(eff*(1-eff)/mmt.Integral()) print " pt bin ", ptbin, " btag efficiency from MET = ",eff, " error ",ereff effArray.append(eff) effErrArray.append(ereff) ### MET with bveto mmtbveto_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MET_InvertedTauIdBveto"+ptbin)]) mmtbveto_tmp._setLegendStyles() mmtbveto_tmp._setLegendLabels() mmtbveto_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbveto_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbveto = mmtbveto_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtbveto.Scale(normData[ptbin]) # hmt.append(mt) mmtbvetoEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MET_InvertedTauIdBveto"+ptbin)]) mmtbvetoEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtbvetoEWK_tmp._setLegendStyles() mmtbvetoEWK_tmp._setLegendLabels() mmtbvetoEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbvetoEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbvetoEWK = mmtbvetoEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbvetoEWK.Scale(normEWK[ptbin]) mmtbveto.Add(mmtbvetoEWK, -1) hmetbveto.append(mmtbveto) ## normalization mT(btag/bveto) eff = mmtb.Integral()/mmtbveto.Integral() ereff = sqrt(eff*(1-eff)/mmtbveto.Integral()) print " pt bin ", ptbin, " btag/bveto efficiency from MET = ",eff, " error ",ereff effBvetoArray.append(eff) effErrBvetoArray.append(ereff) ## with MT distribution if False: ### no MET cut mmtb_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdBtagNoMetCut"+ptbin)]) mmtb_tmp._setLegendStyles() mmtb_tmp._setLegendLabels() mmtb_tmp.histoMgr.setHistoDrawStyleAll("P") mmtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtb = mmtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtb.Scale(normData[ptbin]) # hmt.append(mt) mmtbEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdBtagNoMetCut"+ptbin)]) mmtbEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtbEWK_tmp._setLegendStyles() mmtbEWK_tmp._setLegendLabels() mmtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbEWK = mmtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbEWK.Scale(normEWK[ptbin]) mmtb.Add(mmtbEWK, -1) hmtNoMetBtag.append(mmtb) ### MET with bvet mmtbveto_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdBvetoNoMetCut"+ptbin)]) mmtbveto_tmp._setLegendStyles() mmtbveto_tmp._setLegendLabels() mmtbveto_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbveto_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbveto = mmtbveto_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtbveto.Scale(normData[ptbin]) # hmt.append(mt) mmtbvetoEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdBvetoNoMetCut"+ptbin)]) mmtbvetoEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtbvetoEWK_tmp._setLegendStyles() mmtbvetoEWK_tmp._setLegendLabels() mmtbvetoEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbvetoEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbvetoEWK = mmtbvetoEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbvetoEWK.Scale(normEWK[ptbin]) mmtbveto.Add(mmtbvetoEWK, -1) hmtNoMetBveto.append(mmtbveto) ## normalization mT(btag/bveto) eff = mmtb.Integral()/mmtbveto.Integral() ereff = sqrt(eff*(1-eff)/mmtbveto.Integral()) print " pt bin ", ptbin, " btag/bveto efficiency from mt, no met cut = ",eff, " error ",ereff effArrayMtNoMet.append(eff) effErrArrayMtNoMet.append(ereff) ############################################# ### with MET cut mmtb_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdBtag"+ptbin)]) mmtb_tmp._setLegendStyles() mmtb_tmp._setLegendLabels() mmtb_tmp.histoMgr.setHistoDrawStyleAll("P") mmtb_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtb = mmtb_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtb.Scale(normData[ptbin]) # hmt.append(mt) mmtbEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdBtag"+ptbin)]) mmtbEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtbEWK_tmp._setLegendStyles() mmtbEWK_tmp._setLegendLabels() mmtbEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbEWK = mmtbEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbEWK.Scale(normEWK[ptbin]) mmtb.Add(mmtbEWK, -1) hmtBtag.append(mmtb) mmtbveto_tmp = plots.PlotBase([datasets.getDataset("Data").getDatasetRootHisto("Inverted/MTInvertedTauIdBveto"+ptbin)]) mmtbveto_tmp._setLegendStyles() mmtbveto_tmp._setLegendLabels() mmtbveto_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbveto_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbveto = mmtbveto_tmp.histoMgr.getHisto("Data").getRootHisto().Clone() mmtbveto.Scale(normData[ptbin]) # hmt.append(mt) mmtbvetoEWK_tmp = plots.PlotBase([datasets.getDataset("EWK").getDatasetRootHisto("Inverted/MTInvertedTauIdBveto"+ptbin)]) mmtbvetoEWK_tmp.histoMgr.normalizeMCToLuminosity(datasets.getDataset("Data").getLuminosity()) mmtbvetoEWK_tmp._setLegendStyles() mmtbvetoEWK_tmp._setLegendLabels() mmtbvetoEWK_tmp.histoMgr.setHistoDrawStyleAll("P") mmtbvetoEWK_tmp.histoMgr.forEachHisto(lambda h: h.getRootHisto().Rebin(20)) mmtbvetoEWK = mmtbvetoEWK_tmp.histoMgr.getHisto("EWK").getRootHisto().Clone() mmtbvetoEWK.Scale(normEWK[ptbin]) mmtbveto.Add(mmtbvetoEWK, -1) hmtBveto.append(mmtbveto) ## normalization mT(btag/bveto) eff = mmtb.Integral()/mmtbveto.Integral() ereff = sqrt(eff*(1-eff)/mmtbveto.Integral()) print " pt bin ", ptbin, " btag/bveto efficiency from mt = ",eff, " error ",ereff effArrayMt.append(eff) effErrArrayMt.append(ereff) ## sum histo bins met = hmet[0].Clone("met") met.SetName("MET") met.SetTitle("Inverted tau Met") met.Reset() print "check met",met.GetEntries() for histo in hmet: met.Add(histo) metb = hmetb[0].Clone("met") metb.SetName("MET") metb.SetTitle("Inverted tau Met") metb.Reset() print "check met btagging",metb.GetEntries() for histo in hmetb: metb.Add(histo) metbveto = hmetbveto[0].Clone("met") metbveto.SetName("METbveto") metbveto.SetTitle("Inverted tau Met") metbveto.Reset() print "check met bveto",metbveto.GetEntries() for histo in hmetbveto: metbveto.Add(histo) ## with MT if False: mtNoMetBtag = hmtNoMetBtag[0].Clone("mt") mtNoMetBtag.SetName("MET") mtNoMetBtag.SetTitle("Inverted tau Met") mtNoMetBtag.Reset() print "check MT btagging",mtNoMetBtag.GetEntries() for histo in hmtNoMetBtag: mtNoMetBtag.Add(histo) mtNoMetBveto = hmtNoMetBveto[0].Clone("mt") mtNoMetBveto.SetName("MET") mtNoMetBveto.SetTitle("Inverted tau Met") mtNoMetBveto.Reset() print "check MT bveto",mtNoMetBveto.GetEntries() for histo in hmtNoMetBveto: mtNoMetBveto.Add(histo) mtBtag = hmtBtag[0].Clone("mt") mtBtag.SetName("MET") mtBtag.SetTitle("Inverted tau Met") mtBtag.Reset() print "check MT btagging",mtBtag.GetEntries() for histo in hmtBtag: mtBtag.Add(histo) mtBveto = hmtBveto[0].Clone("mt") mtBveto.SetName("MET") mtBveto.SetTitle("Inverted tau Met") mtBveto.Reset() print "check MT bveto",mtBveto.GetEntries() for histo in hmtBveto: mtBveto.Add(histo) ########################################## ## plotting invertedQCD = InvertedTauID() invertedQCD.setLumi(datasets.getDataset("Data").getLuminosity()) ### effisiency as a function of MET metWithBtagging = metb.Clone("MET") metWithBtagging.Divide(metbveto) BtaggingEffVsMet = metWithBtagging.Clone("Eff") invertedQCD.setLabel("BtagToBvetoEffVsMet") invertedQCD.mtComparison(BtaggingEffVsMet, BtaggingEffVsMet,"BtagToBvetoEffVsMet") if False: ### effisiency as a function of MT mtWithBtagging = mtNoMetBtag.Clone("MT") mtWithBtagging.Divide(mtNoMetBveto) BtaggingEffNoMetVsMt = mtWithBtagging.Clone("Eff") invertedQCD.setLabel("BtagToBvetoEffNoMetVsMt") invertedQCD.mtComparison(BtaggingEffNoMetVsMt, BtaggingEffNoMetVsMt,"BtagToBvetoEffNoMetVsMt") ### effisiency as a function of MT mtWithBtagging = mtBtag.Clone("MT") mtWithBtagging.Divide(mtBveto) BtaggingEffVsMt = mtWithBtagging.Clone("Eff") invertedQCD.setLabel("BtagToBvetoEffVsMt") invertedQCD.mtComparison(BtaggingEffVsMt, BtaggingEffVsMt,"BtagToBvetoEffVsMt") # efficiency metb/met metbtag = metb.Clone("metb") metnobtag = met.Clone("met") metbtag.Divide(metnobtag) invertedQCD.setLabel("BtagEffVsMet") invertedQCD.mtComparison(metbtag, metbtag,"BtagEffVsMet") # efficiency metb/metbveto metbtag = metb.Clone("metb") metbjetveto = metbveto.Clone("met") invertedQCD.setLabel("BtagToBvetoEfficiency") invertedQCD.mtComparison(metbtag, metbjetveto,"BtagToBvetoEfficiency") ### Create and customise TGraph cEff = TCanvas ("Efficiency", "Efficiency", 1) cEff.cd() ptbin_error = array.array("d",[5, 5, 5, 5, 10, 10 ,30]) ptbin = array.array("d",[45, 55, 65, 75, 90, 110 ,150]) if False: ## no MET cut cEff = TCanvas ("btaggingEffNoMet", "btaggingEffNoMet", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",effArrayMtNoMet),ptbin_error,array.array("d",effErrArrayMtNoMet)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("N_{b tagged}/N_{b veto}") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2,0.88,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) # tex1.Draw() tex2 = ROOT.TLatex(0.5,0.8,"No MET cut" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btagToBvetoEffNoMetVsPtTau_mt.png") ## with MET cut cEff = TCanvas ("btaggingEff", "btaggingEff", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",effArrayMt),ptbin_error,array.array("d",effErrArrayMt)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("N_{b tagged}/N_{b veto}") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2,0.88,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) # tex1.Draw() tex2 = ROOT.TLatex(0.5,0.8,"After MET cut" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btagToBvetoEffVsPtTau_mt.png") ## no MET cut cEff = TCanvas ("btaggingEffNoMet", "btaggingEffNoMet", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",effBvetoArray),ptbin_error,array.array("d",effErrBvetoArray)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("N_{b tagged}/N_{b veto}") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.2,0.88,"All selection cuts") tex1.SetNDC() tex1.SetTextSize(22) # tex1.Draw() tex2 = ROOT.TLatex(0.5,0.8,"No MET cut" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btagToBvetoEffNoMetVsPtTau.png") ## with MET cut cEff = TCanvas ("btaggingEfficiency", "btaggingEfficiency", 1) cEff.cd() graph = TGraphErrors(7, ptbin, array.array("d",effArray),ptbin_error,array.array("d",effErrArray)) graph.SetMaximum(0.25) graph.SetMinimum(0.0) graph.SetMarkerStyle(kFullCircle) graph.SetMarkerColor(kBlue) graph.SetMarkerSize(1) graph.GetYaxis().SetTitle("b-tagging efficiency") graph.GetXaxis().SetTitle("p_{T}^{#tau jet} [GeV]") ### Re-draw graph and update canvas and gPad graph.Draw("AP") tex4 = ROOT.TLatex(0.2,0.955,"8 TeV 19.6 fb^{-1} CMS preliminary") tex4.SetNDC() tex4.SetTextSize(20) tex4.Draw() tex1 = ROOT.TLatex(0.4,0.85,"Inverted #tau identification") tex1.SetNDC() tex1.SetTextSize(22) tex1.Draw() tex2 = ROOT.TLatex(0.4,0.78,"At least 3 jets" ) tex2.SetNDC() tex2.SetTextSize(24) tex2.Draw() cEff.Update() cEff.SaveAs("btaggingEffVsPtTau.png") ##################################3 fOUT = open("btaggingFactors","w") # now = datetime.datetime.now() # fOUT.write("# Generated on %s\n"%now.ctime()) fOUT.write("# by %s\n"%os.path.basename(sys.argv[0])) fOUT.write("\n") fOUT.write("btaggingFactors = {\n") i = 0 while i < len(effArray): line = " \"" + ptbins[i] + "\": " + str(effArray[i]) if i < len(effArray) - 1: line += "," line += "\n" fOUT.write(line) i = i + 1 fOUT.write("}\n") fOUT.close() print "B-tagging efficiensies written in file","btaggingFactors" fOUT = open("btaggingToBvetoFactors.py","w") # now = datetime.datetime.now() # fOUT.write("# Generated on %s\n"%now.ctime()) fOUT.write("# by %s\n"%os.path.basename(sys.argv[0])) fOUT.write("\n") fOUT.write("btaggingToBvetoFactors = {\n") i = 0 while i < len(effBvetoArray): line = " \"" + ptbins[i] + "\": " + str(effBvetoArray[i]) if i < len(effBvetoArray) - 1: line += "," line += "\n" fOUT.write(line) i = i + 1 fOUT.write("}\n") fOUT.close() print "BtaggingToBveto efficiensies written in file","btaggingToBvetoFactors" fOUT = open("btaggingToBvetoAfterMetFactors.py","w") # now = datetime.datetime.now() # fOUT.write("# Generated on %s\n"%now.ctime()) fOUT.write("# by %s\n"%os.path.basename(sys.argv[0])) fOUT.write("\n") fOUT.write("btaggingToBvetoAfterMetFactors = {\n") i = 0 while i < len(effArrayMt): line = " \"" + ptbins[i] + "\": " + str(effArrayMt[i]) if i < len(effArrayMt) - 1: line += "," line += "\n" fOUT.write(line) i = i + 1 fOUT.write("}\n") fOUT.close() print "BtaggingToBvetoAfterMet efficiensies written in file","btaggingToBvetoFactors"
path = "/nfs/dust/cms/user/kschweig/JetRegression/trees0908/BDTTraining/ttHbb/*_1_*nominal*.root" inputtree = ROOT.TChain("MVATree") for f in sys.argv[2:]: inputtree.Add(f) BDTvars_input = {} initialized = [] for variable in evt_vars_noreg + evt_vars_reg + common5_input + reg_common5_input: if variable not in initialized: #print variable BDTvars_input.update( { variable : array.array('f',[0] ) } ) inputtree.SetBranchAddress( variable , BDTvars_input[variable] ) initialized.append(variable) outputfile.cd() OutputTree = ROOT.TTree("MVATree","MVATree"); E_Odd = array.array('f',[0]) E_Weight = array.array('f',[0]) P_Weight = array.array('f',[0]) P69_Weight = array.array('f',[0]) C_Weight = array.array('f',[0]) LSF = array.array('f',[0]) eSFGFS = array.array('f',[0])