def createPlots(plotopts, rootopts): from ROOT import TH1D, TProfile, TH2D, TProfile2D plots = {} #creating histos for plotopt in plotopts: plots[plotopt] = {} for rootopt in rootopts: if (plotopt.profile): if (plotopt.i2d): plot = TProfile2D( str(hash(plotopt)) + str(hash(rootopt)), plotopt.display_name, plotopt.nbins[0], 0, 0, plotopt.nbins[1], 0, 0) else: plot = TProfile( str(hash(plotopt)) + str(hash(rootopt)), plotopt.display_name, plotopt.nbins, 0, 0) else: if (plotopt.i2d): plot = TH2D( str(hash(plotopt)) + str(hash(rootopt)), plotopt.display_name, plotopt.nbins[0], 0, 0, plotopt.nbins[1], 0, 0) else: plot = TH1D( str(hash(plotopt)) + str(hash(rootopt)), plotopt.display_name, plotopt.nbins, 0, 0) plot.SetBuffer(1000000) plots[plotopt][rootopt] = plot return plots
def __init__(self, run=22011, sourceDir='./', outputDir=''): print 'Creating AnalyseTransparentArea instance for run:', run self.run = run self.sourceDir = sourceDir self.outputDir = outputDir if outputDir != '' else '{s}/{r}/transparentAnalysis/'.format( r=self.run, s=self.sourceDir) self.rootFile1 = TFile( sourceDir + '/{r}/transparentAnalysis/root/hLandau1HighestHitProfile_1OutOf10.{r}.root' .format(r=self.run)) self.rootFile2 = TFile( sourceDir + '/{r}/transparentAnalysis/root/hLandau2HighestHitProfile_2OutOf10.{r}.root' .format(r=self.run)) self.histo2D_1 = TProfile2D( self.rootFile1.Get('cRoot_hLandau1HighestHitProfile_1OutOf10'). GetPrimitive('hLandau1HighestHitProfile_1OutOf10')) self.histo2D_2 = TProfile2D( self.rootFile2.Get('cRoot_hLandau2HighestHitProfile_2OutOf10'). GetPrimitive('hLandau2HighestHitProfile_2OutOf10')) self.histo2D_1_fid = 0 self.histo2D_2_fid = 0 self.histo2D_1.GetXaxis().SetTitle('X/\mu m') self.histo2D_1.GetYaxis().SetTitle('Y/\mu m') self.histo2D_2.GetXaxis().SetTitle('X/\mu m') self.histo2D_2.GetYaxis().SetTitle('Y/\mu m') self.histo1D_1 = 0 self.histo1D_2 = 0 self.sel_old = { 'x_low': self.histo2D_1.GetXaxis().GetXmin(), 'x_high': self.histo2D_1.GetXaxis().GetXmax(), 'y_low': self.histo2D_1.GetYaxis().GetXmin(), 'y_high': self.histo2D_1.GetYaxis().GetXmax() } self.fidcut_1 = 0 self.fidcut_2 = 0 self.fidpoints = [] self.nameFid = '' if not os.path.isdir('{dir}/Plots'.format(dir=self.outputDir)): os.makedirs('{dir}/Plots'.format(dir=self.outputDir)) if not os.path.isdir('{dir}/root'.format(dir=self.outputDir)): os.makedirs('{dir}/root'.format(dir=self.outputDir)) gStyle.SetPalette(55) gStyle.SetNumberContours(999) self.bla = []
def draw_signal_map(self): h = TProfile2D('psm', 'Signal Map', *self.Bins) for x, y, v in zip(self.Data['clusters']['column'], self.Data['clusters']['row'], self.Data['clusters']['vcal']): h.Fill(x, y, v) format_histo(h, x_tit='column', y_tit='row', y_off=1.2, z_tit='VCAL', z_off=1.6, stats=0) self.Plotter.draw_histo(h, lm=.13, rm=.18, draw_opt='colz', x=1.17)
def create2Dmap(varname, params, title): # use the slices to build a list of bin edges ptbins = [item[0] for item in params["ptSlices"]] etabins = [item[0] for item in params["etaSlices"]] ptbins.append(params["ptSlices"][-1][1]) etabins.append(params["etaSlices"][-1][1]) ptbinsext = [] for iedge in range(0, len(ptbins) - 1): binwidth = ptbins[iedge + 1] - ptbins[iedge] if ptbins[iedge + 1] >= 9e4: ptbinsext.append(ptbins[iedge]) continue # don't subdivide the overflow bin nsplits = params["sliceSplit"] if ptbins[iedge + 1] >= 150 or ptbins[iedge] == 100: nsplits = 2 for j in range(0, nsplits): # 0, 1, 2 if sliceSplit = 3 # low, low+0*width/3, low+width/3, low+2*width/3 ptbinsext.append(ptbins[iedge] + int(j * binwidth / nsplits)) ptbinsext.append(ptbins[-1]) etabinsext = [] for iedge in range(0, len(etabins) - 1): binwidth = etabins[iedge + 1] - etabins[iedge] if etabins[iedge + 1] >= 9e4: etabinsext.append(etabins[iedge]) continue # don't subdivide the overflow bin nsplits = params["sliceSplit"] if 'electron' in varname and etabins[iedge] == 1.5: nsplits = 7 for j in range(0, nsplits): # 0, 1, 2 if sliceSplit = 3 # low, low+0*width/3, low+width/3, low+2*width/3 etabinsext.append(etabins[iedge] + j * binwidth / nsplits) etabinsext.append(etabins[-1]) # arrays for ROOT xbins = array('d', ptbinsext) ybins = array('d', etabinsext) if "efficiency" in varname: h = TProfile2D(varname, title, len(xbins) - 1, xbins, len(ybins) - 1, ybins) h.GetXaxis().SetTitle("tau p_{T} [GeV]") h.GetYaxis().SetTitle("tau #eta") h.Sumw2() return h
def test_fill_profile(): np.random.seed(0) w1D = np.empty(1E6) w1D.fill(2.) data1D = np.random.randn(1E6, 2) data2D = np.random.randn(1E6, 3) data3D = np.random.randn(1E4, 4) a = TProfile('th1d', 'test', 1000, -5, 5) rnp.fill_profile(a, data1D) assert_true(a.Integral() !=0) a_w = TProfile('th1dw', 'test', 1000, -5, 5) rnp.fill_profile(a_w, data1D, w1D) assert_true(a_w.Integral() != 0) assert_equal(a_w.Integral(), a.Integral()) b = TProfile2D('th2d', 'test', 100, -5, 5, 100, -5, 5) rnp.fill_profile(b, data2D) assert_true(b.Integral() != 0) c = TProfile3D('th3d', 'test', 10, -5, 5, 10, -5, 5, 10, -5, 5) rnp.fill_profile(c, data3D) assert_true(c.Integral() != 0) # array and weights lengths do not match assert_raises(ValueError, rnp.fill_profile, c, data3D, np.ones(10)) # weights is not 1D assert_raises(ValueError, rnp.fill_profile, c, data3D, np.ones((data3D.shape[0], 1))) # array is not 2D assert_raises(ValueError, rnp.fill_profile, c, np.ones(10)) # length of second axis is not one more than dimensionality of the profile for h in (a, b, c): assert_raises(ValueError, rnp.fill_profile, h, np.random.randn(1E4, 5)) # wrong type assert_raises(TypeError, rnp.fill_profile, TH1D("test", "test", 1, 0, 1), data1D)
def initialize(self): basepath = self.getProperty( "Basepath" ) doJpsiee = self.getProperty( "DoJpisee" ) sg = self.getStoreGateSvc() #et_bins = zee_etbins eta_bins = default_etabins nvtx_bins.extend(high_nvtx_bins) #eta_bins = [0,0.6,0.8,1.15,1.37,1.52,1.81,2.01,2.37,2.47] et_bins = jpsiee_etbins if doJpsiee else [4.,7.,10.,15.,20.,25.,30.,35.,40.,45.,50.,60.,80.,150.] for group in self.__groups: # Get the chain object chain = group.chain() for dirname in ( self.__triggerLevels if (type(chain) is Chain or type(chain) is TDT) else ['Selector'] ): sg.mkdir( basepath+'/'+chain.name()+'/Efficiency/'+dirname ) sg.addHistogram(TH1F('et','E_{T} distribution;E_{T};Count', len(et_bins)-1, np.array(et_bins))) sg.addHistogram(TH1F('eta','#eta distribution;#eta;Count', len(eta_bins)-1, np.array(eta_bins))) sg.addHistogram(TH1F("phi", "#phi distribution; #phi ; Count", 20, -3.2, 3.2)); sg.addHistogram(TH1F('mu' ,'<#mu> distribution;<#mu>;Count', 20, 0, 100)) sg.addHistogram(TH1F('nvtx' ,'N_{vtx} distribution;N_{vtx};Count', len(nvtx_bins)-1, np.array(nvtx_bins))) sg.addHistogram(TH1F('match_et','E_{T} matched distribution;E_{T};Count', len(et_bins)-1, np.array(et_bins))) sg.addHistogram(TH1F('match_eta','#eta matched distribution;#eta;Count', len(eta_bins)-1, np.array(eta_bins))) sg.addHistogram(TH1F("match_phi", "#phi matched distribution; #phi ; Count", 20, -3.2, 3.2)); sg.addHistogram(TH1F('match_mu' ,'<#mu> matched distribution;<#mu>;Count', 20, 0, 100)) sg.addHistogram(TH1F('match_nvtx' ,'N_{vtx} matched distribution;N_{vtx};Count', len(nvtx_bins)-1, np.array(nvtx_bins))) sg.addHistogram(TProfile("eff_et", "#epsilon(E_{T}); E_{T} ; Efficiency" , len(et_bins)-1, np.array(et_bins))) sg.addHistogram(TProfile("eff_eta", "#epsilon(#eta); #eta ; Efficiency" , len(eta_bins)-1,np.array(eta_bins))) sg.addHistogram(TProfile("eff_phi", "#epsilon(#phi); #phi ; Efficiency", 20, -3.2, 3.2)); sg.addHistogram(TProfile("eff_mu", "#epsilon(<#mu>); <#mu> ; Efficiency", 20, 0, 100)); sg.addHistogram(TProfile("eff_nvtx", "#epsilon(N_{vtx}); N_{vtx} ; Efficiency", len(nvtx_bins)-1, np.array(nvtx_bins))); sg.addHistogram( TH2F('match_etVsEta', "Passed;E_{T};#eta;Count", len(et_bins)-1, np.array(et_bins), len(eta_bins)-1, np.array(eta_bins)) ) sg.addHistogram( TH2F('etVsEta' , "Total;E_{T};#eta;Count", len(et_bins)-1, np.array(et_bins), len(eta_bins)-1, np.array(eta_bins)) ) sg.addHistogram( TProfile2D('eff_etVsEta' , "Total;E_{T};#eta;Count", len(et_bins)-1, np.array(et_bins), len(eta_bins)-1, np.array(eta_bins)) ) self.init_lock() return StatusCode.SUCCESS
def dqm_getSingleHist_json(server, run, dataset, hist, rootContent=False): postfix = "?rootcontent=1" if rootContent else "" datareq = urllib2.Request(('%s/jsonfairy/archive/%s/%s/%s%s') % (server, run, dataset, hist, postfix)) datareq.add_header('User-agent', ident) # Get data data = eval(re.sub(r"\bnan\b", "0", urllib2.build_opener(X509CertOpen()).open(datareq).read()), { "__builtins__": None }, {}) histo = data['hist'] # Now convert into real ROOT histogram object if 'TH1' in histo['type']: # The following assumes a TH1F object contents = histo['bins']['content'] nbins = len(contents) xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] roothist = TH1F(histo['stats']['name'],histo['title'],nbins,xmin,xmax) for xx in range(1,nbins+1): roothist.SetBinContent(xx, contents[xx-1]) roothist.SetBinError(xx, histo['bins']['error'][xx-1]) roothist.SetEntries(histo['stats']['entries']) stats=array('d') stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries']*histo['stats']['mean']['X']['value']) stats.append((histo['stats']['rms']['X']['value']*histo['stats']['rms']['X']['value']+histo['stats']['mean']['X']['value']*histo['stats']['mean']['X']['value'])*histo['stats']['entries']) roothist.PutStats(stats) elif(histo['type']=='TProfile'): contents = histo['bins']['content'] nbins = len(contents) xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] roothist = TProfile(histo['stats']['name'],histo['title'],nbins,xmin,xmax) roothist.SetErrorOption("g") for xx in range(0,nbins): if(histo['bins']['error'][xx]!=0): ww=1./(histo['bins']['error'][xx]*histo['bins']['error'][xx]) else: ww=0. roothist.Fill(xmin+(2*xx+1)*((xmax-xmin)/(nbins*2.0)), contents[xx],ww) # roothist.SetBinContent(xx, contents[xx-1]) # roothist.SetBinError(xx, histo['bins']['error'][xx-1]) roothist.SetEntries(histo['stats']['entries']) stats=array('d') for i in range(0,6): stats.append(i) roothist.GetStats(stats) stats[0]=(histo['stats']['entries']) stats[1]=(histo['stats']['entries']) stats[2]=(histo['stats']['entries']*histo['stats']['mean']['X']['value']) stats[3]=((histo['stats']['rms']['X']['value']*histo['stats']['rms']['X']['value']+histo['stats']['mean']['X']['value']*histo['stats']['mean']['X']['value'])*histo['stats']['entries']) roothist.PutStats(stats) elif 'TH2' in histo['type']: contents = histo['bins']['content'] nbinsx = histo['xaxis']['last']['id'] xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] nbinsy = histo['yaxis']['last']['id'] ymin = histo['yaxis']['first']['value'] ymax = histo['yaxis']['last']['value'] roothist = TH2F(histo['stats']['name'],histo['title'],nbinsx,xmin,xmax,nbinsy,ymin,ymax) for xx in range(1,nbinsx+1): for yy in range(1,nbinsy+1): roothist.SetBinContent(xx,yy, contents[yy-1][xx-1]) roothist.SetEntries(histo['stats']['entries']) stats=array('d') stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries']) stats.append(histo['stats']['entries']*histo['stats']['mean']['X']['value']) stats.append((histo['stats']['rms']['X']['value']*histo['stats']['rms']['X']['value']+histo['stats']['mean']['X']['value']*histo['stats']['mean']['X']['value'])*histo['stats']['entries']) stats.append(histo['stats']['entries']*histo['stats']['mean']['Y']['value']) stats.append((histo['stats']['rms']['Y']['value']*histo['stats']['rms']['Y']['value']+histo['stats']['mean']['Y']['value']*histo['stats']['mean']['Y']['value'])*histo['stats']['entries']) roothist.PutStats(stats) elif(histo['type']=='TProfile2D'): contents = histo['bins']['content'] nbinsx = histo['xaxis']['last']['id'] xmin = histo['xaxis']['first']['value'] xmax = histo['xaxis']['last']['value'] nbinsy = histo['yaxis']['last']['id'] ymin = histo['yaxis']['first']['value'] ymax = histo['yaxis']['last']['value'] roothist = TProfile2D(histo['stats']['name'],histo['title'],nbinsx,xmin,xmax,nbinsy,ymin,ymax) for xx in range(0,nbinsx): for yy in range(0,nbinsy): roothist.Fill(xmin+(2*xx+1)*((xmax-xmin)/(nbinsx*2.0)),ymin+(2*yy+1)*((ymax-ymin)/(nbinsy*2.0)),0,1) for xx in range(1,nbinsx+1): for yy in range(1,nbinsy+1): roothist.SetBinContent(xx,yy, contents[yy-1][xx-1]) roothist.SetEntries(histo['stats']['entries']) return roothist
def create2DPlots(detector, plot, plotnum, plotmat, dosingledetector=True): """Produce the requested plot for the specified detector. Function that will plot the requested 2D-@plot for the specified @detector. The specified detector could either be a real detector or a compound one. The list of available plots are the keys of plots dictionary imported from plot_utils. """ #gStyle.Reset() #Better to use an underscore. plotmat = plotmat.replace(" ", "_") if plotmat != "": theDirname = ('Images/%s' % plotmat).replace(" ", "") else: theDirname = 'Images' if not checkFile_(theDirname): os.mkdir(theDirname) if not os.path.isdir(('Images/%s/ZPlusZoom' % plotmat).replace(" ", "")): os.mkdir(('Images/%s/ZPlusZoom' % plotmat).replace(" ", "")) if not os.path.isdir(('Images/%s/ZMinusZoom' % plotmat).replace(" ", "")): os.mkdir(('Images/%s/ZMinusZoom' % plotmat).replace(" ", "")) goodToGo, theDetectorFilename = paramsGood_(detector, plot) if not goodToGo: return theDetectorFile = TFile(theDetectorFilename) prof2d_X0_det_total = TProfile2D() prof2d_X0_det_total.Reset() # get TProfiles #prof2d_X0_det_total = theDetectorFile.Get('%s' % plots[plot].plotNumber) prof2d_X0_det_total = theDetectorFile.Get('%s' % plotnum) print "==================================================================" print plotnum # histos prof2d_X0_det_total.__class__ = TProfile2D hist_X0_total = prof2d_X0_det_total.ProjectionXY() # keep files live forever files = [] if detector in COMPOUNDS.keys() and not dosingledetector: #When the loop was: #for subDetector in COMPOUNDS[detector][1:]: #and the detector was single it never went in the loop and read the single file #from above. I alter this to COMPOUNDS[detector] to do the multi material budget plot. #This won't effect the single detector due to the alter in the if above for subDetector in COMPOUNDS[detector]: # filenames of single components subDetectorFilename = "matbdg_%s.root" % subDetector # open file if not checkFile_(subDetectorFilename): print("Error, missing file %s" % subDetectorFilename) continue subDetectorFile = TFile(subDetectorFilename) files.append(subDetectorFile) print("*** Open file... %s" % subDetectorFilename) # subdetector profiles prof2d_X0_det_total = subDetectorFile.Get('%s' % plots[plot].plotNumber) prof2d_X0_det_total.__class__ = TProfile2D # add to summary histogram hist_X0_total.Add( prof2d_X0_det_total.ProjectionXY( "B_%s" % prof2d_X0_det_total.GetName()), +1.000) # # properties #gStyle.SetPalette(1) gStyle.SetStripDecimals(False) # # # Create "null" histo minX = 1.03 * prof2d_X0_det_total.GetXaxis().GetXmin() maxX = 1.03 * prof2d_X0_det_total.GetXaxis().GetXmax() minY = 1.03 * prof2d_X0_det_total.GetYaxis().GetXmin() maxY = 1.03 * prof2d_X0_det_total.GetYaxis().GetXmax() frame = TH2F("frame", "", 10, minX, maxX, 10, minY, maxY) frame.SetMinimum(0.1) frame.SetMaximum(10.) frame.GetXaxis().SetTickLength(frame.GetXaxis().GetTickLength() * 0.50) frame.GetYaxis().SetTickLength(frame.GetXaxis().GetTickLength() / 4.) # Ratio if plots[plot].iRebin: prof2d_X0_det_total.Rebin2D() # stack hist2dTitle = ('%s %s;%s;%s;%s' % (plots[plot].quotaName, detector, plots[plot].abscissa, plots[plot].ordinate, plots[plot].quotaName)) if dosingledetector: hist2d_X0_total = prof2d_X0_det_total else: hist2d_X0_total = hist_X0_total hist2d_X0_total.SetTitle(hist2dTitle) frame.SetTitle(hist2dTitle) frame.SetTitleOffset(0.5, "Y") #If here you put different histomin,histomaxin plot_utils you won't see anything #for the material plots. if plots[plot].histoMin != -1.: hist2d_X0_total.SetMinimum(plots[plot].histoMin) if plots[plot].histoMax != -1.: hist2d_X0_total.SetMaximum(plots[plot].histoMax) # can2name = "MBCan_2D_%s_%s_%s" % (detector, plot, plotmat) can2 = TCanvas(can2name, can2name, 2480 + 248, 580 + 58 + 58) can2.SetTopMargin(0.1) can2.SetBottomMargin(0.1) can2.SetLeftMargin(0.04) can2.SetRightMargin(0.06) can2.SetFillColor(kWhite) gStyle.SetOptStat(0) gStyle.SetTitleFillColor(0) gStyle.SetTitleBorderSize(0) #hist2d_X0_total.SetMaximum(hist2d_X0_total.GetMaximum()) # Color palette # gStyle.SetPalette()#1 acustompalette() ex1 = TExec("ex1", "acustompalette();") ex1.Draw() #for i in range(100): MyPaletteArray.append(i+1) #gStyle.SetPalette(first_color_number); # Log? can2.SetLogz(plots[plot].zLog) # Draw in colors #frame.Draw() #hist2d_X0_total.Draw("COLZsame") #Dummy draw to create the palette object hist2d_X0_total.Draw("COLZ") #Dummy draw to create the palette object # Store can2.Update() #Aesthetic palette = hist2d_X0_total.GetListOfFunctions().FindObject("palette") if palette: palette.__class__ = TPaletteAxis palette.SetX1NDC(0.945) palette.SetX2NDC(0.96) palette.SetY1NDC(0.1) palette.SetY2NDC(0.9) palette.GetAxis().SetTickSize(.01) palette.GetAxis().SetTitle("") if plots[plot].zLog: palette.GetAxis().SetLabelOffset(-0.01) paletteTitle = TLatex(1.12 * maxX, maxY, plots[plot].quotaName) paletteTitle.SetTextAngle(90.) paletteTitle.SetTextSize(0.05) paletteTitle.SetTextAlign(31) paletteTitle.Draw() hist2d_X0_total.GetYaxis().SetTickLength( hist2d_X0_total.GetXaxis().GetTickLength() / 4.) hist2d_X0_total.GetYaxis().SetTickLength( hist2d_X0_total.GetXaxis().GetTickLength() / 4.) hist2d_X0_total.SetTitleOffset(0.5, "Y") hist2d_X0_total.GetYaxis().SetTitleOffset(0.45) #hist2d_X0_total.GetXaxis().SetTitleOffset(1.15); #hist2d_X0_total.GetXaxis().SetNoExponent(True) #hist2d_X0_total.GetYaxis().SetNoExponent(True) #Add eta labels keep_alive = [] if plots[plot].iDrawEta: keep_alive.extend(drawEtaValues()) can2.Modified() hist2d_X0_total.SetContour(255) # Store can2.Update() can2.Modified() can2.SaveAs("%s/%s_%s%s.pdf" % (theDirname, detector, plot, plotmat)) can2.SaveAs("%s/%s_%s%s.png" % (theDirname, detector, plot, plotmat)) #can2.SaveAs( "%s/%s_%s%s.root" % (theDirname, detector, plot, plotmat)) #Zoom in a little bit if plot == "x_vs_z_vs_Rsum" or plot == "l_vs_z_vs_Rsum" or plot == "x_vs_z_vs_Rsumcos" or plot == "l_vs_z_vs_Rsumcos" or plot == "x_vs_z_vs_Rloc" or plot == "l_vs_z_vs_Rloc" or plot == "x_vs_z_vs_Rloccos" or plot == "l_vs_z_vs_Rloccos": #Z+ #hist2d_X0_total.GetXaxis().SetLimits( 3100., 5200.) if dosingledetector: hist2d_X0_total.GetXaxis().SetRangeUser(3100., 5400.) else: hist2d_X0_total.GetXaxis().SetRangeUser(0., 7000.) #Do not draw eta values in the zoom case keep_alive = [] #hist2d_X0_total.Draw("COLZ") can2.Update() can2.Modified() can2.SaveAs("%s/%s/%s_%s%s_ZplusZoom.pdf" % (theDirname, "ZPlusZoom", detector, plot, plotmat)) can2.SaveAs("%s/%s/%s_%s%s_ZplusZoom.png" % (theDirname, "ZPlusZoom", detector, plot, plotmat)) #Z- #hist2d_X0_total.GetXaxis().SetLimits( 3100., 5200.) if dosingledetector: hist2d_X0_total.GetXaxis().SetRangeUser(-5400., -3100.) else: hist2d_X0_total.GetXaxis().SetRangeUser(0., -7000.) #Do not draw eta values in the zoom case keep_alive = [] #hist2d_X0_total.Draw("COLZ") can2.Update() can2.Modified() can2.SaveAs("%s/%s/%s_%s%s_ZminusZoom.pdf" % (theDirname, "ZMinusZoom", detector, plot, plotmat)) can2.SaveAs("%s/%s/%s_%s%s_ZminusZoom.png" % (theDirname, "ZMinusZoom", detector, plot, plotmat)) gStyle.SetStripDecimals(True)
x = 2 # X position y = 1 # Y position inx1 = x * mSize + y # index of hxw matrix ellement # pT of the leading jet x = 1 # X position y = 1 # Y position inx2 = x * mSize + y # index of hxw matrix ellement # exlusion matrix excluded = (inx1, inx2) print "Excluded cells=", excluded hhD = TProfile2D("profile", "profile", mSize, 0, mSize, mSize, 0, mSize, 0, 1000) names = ["MET", "j", "#mu", "e", "#gamma"] Names1 = [] Names1.append(names[0]) for h in range(1, maxTypes + 1, 1): for i in range(1, maxNumber + 1): Names1.append(names[h] + "_{" + str(i) + "}") Names2 = [] for i in range(len(Names1)): Names2.append(Names1[i]) Names1 = Names1[::-1] print Names1 for h in range(mSize): for w in range(mSize): i1 = h i2 = w
tree = TTree("Pk1D", "SDSS 1D Power spectrum Ly-a") zqso,mean_z,mean_reso,mean_SNR,lambda_min,lambda_max,plate,mjd,fiber,\ nb_mask_pix,nb_r,k_r,Pk_r,Pk_raw_r,Pk_noise_r,cor_reso_r,Pk_diff_r = make_tree(tree,nb_bin_max) # control histograms if (args.forest_type == 'Lya'): forest_inf = 1040. forest_sup = 1200. elif (args.forest_type == 'SiIV'): forest_inf = 1270. forest_sup = 1380. elif (args.forest_type == 'CIV'): forest_inf = 1410. forest_sup = 1520. hdelta = TProfile2D('hdelta', 'delta mean as a function of lambda-lambdaRF', 36, 3600., 7200., 16, forest_inf, forest_sup, -5.0, 5.0) hdelta_RF = TProfile('hdelta_RF', 'delta mean as a function of lambdaRF', 320, forest_inf, forest_sup, -5.0, 5.0) hdelta_OBS = TProfile('hdelta_OBS', 'delta mean as a function of lambdaOBS', 1800, 3600., 7200., -5.0, 5.0) hdelta_RF_we = TProfile( 'hdelta_RF_we', 'delta mean weighted as a function of lambdaRF', 320, forest_inf, forest_sup, -5.0, 5.0) hdelta_OBS_we = TProfile( 'hdelta_OBS_we', 'delta mean weighted as a function of lambdaOBS', 1800, 3600., 7200., -5.0, 5.0) hivar = TH1D('hivar', ' ivar ', 10000, 0.0, 10000.) hsnr = TH1D('hsnr', ' snr per pixel ', 100, 0.0, 100.)
def create2Dmap(varname, params, title, dumptcl): # use the slices to build a list of bin edges ptbins = [item[0] for item in params["ptSlices"]] etabins = [item[0] for item in params["etaSlices2D"]] ptbins.append(params["ptSlices"][-1][1]) etabins.append(params["etaSlices2D"][-1][1]) # set more realistic caps if not dumptcl: if ptbins[-1] > 5e4: ptbins[-1] = ptbins[-2] * 2. # probably somewhere in 200 -- 4000? if etabins[-1] > 5e4: etabins[-1] = 5. ptbinsext = [] for iedge in range(0, len(ptbins) - 1): # print "ptbins"+str(ptbins) binwidth = ptbins[iedge + 1] - ptbins[iedge] if ptbins[iedge + 1] >= 9e4: ptbinsext.append(ptbins[iedge]) continue # don't subdivide the overflow bin nsplits = params["sliceSplit"] if ptbins[iedge + 1] >= 150 or ptbins[iedge] == 100: nsplits = 2 for j in range(0, nsplits): # 0, 1, 2 if sliceSplit = 3 # low, low+0*width/3, low+width/3, low+2*width/3 ptbinsext.append(ptbins[iedge] + int(j * binwidth / nsplits)) ptbinsext.append(ptbins[-1]) # print ptbinsext etabinsext = [] for iedge in range(0, len(etabins) - 1): # print "etabins"+str(etabins) binwidth = etabins[iedge + 1] - etabins[iedge] if etabins[iedge + 1] >= 9e4: etabinsext.append(etabins[iedge]) continue # don't subdivide the overflow bin nsplits = params["sliceSplit"] if 'electron' in varname and etabins[iedge] == 1.5: nsplits = 7 for j in range(0, nsplits): # 0, 1, 2 if sliceSplit = 3 # low, low+0*width/3, low+width/3, low+2*width/3 etabinsext.append(etabins[iedge] + j * binwidth / nsplits) etabinsext.append(etabins[-1]) # print etabinsext # arrays for ROOT xbins = array('d', ptbinsext) ybins = array('d', etabinsext) if "efficiency" in varname: h = TProfile2D(varname, title, len(xbins) - 1, xbins, len(ybins) - 1, ybins) h.GetXaxis().SetTitle("jet p_{T} [GeV]") h.GetYaxis().SetTitle("jet #eta") h.Sumw2() return h
equality.SetLineColor(kOrange + 10) equality.DrawLine(h1_p.GetXaxis().GetXmin(), 1, h1_p.GetXaxis().GetXmax(), 1) gStyle.SetOptStat(0) files = ['trackTupla.root', 'trackTuplaNewMaterial.root'] destinations = [ '/afs/cern.ch/work/r/rovere/public/temporary/materialEffects/CurrentGeometry', '/afs/cern.ch/work/r/rovere/public/temporary/materialEffects/NewMaterialGeometry' ] delta_p_xy = [] delta_p_xy.append( TProfile2D('Delta_p_Old', 'Delta_p', 240, -120, 120, 240, -120, 120)) delta_p_xy.append( TProfile2D('Delta_p_New', 'Delta_p', 240, -120, 120, 240, -120, 120)) outermost_z = [] outermost_z.append(TH1F("outermost_z_Old", "outermost_z", 560, -280, 280)) outermost_z.append(TH1F("outermost_z_New", "outermost_z", 560, -280, 280)) delta_p_outermost_z = [] delta_p_outermost_z.append( TProfile("Delta_p_outermost_z_Old", "Delta_p_outermost_z", 560, -280, 280)) delta_p_outermost_z.append( TProfile("Delta_p_outermost_z_New", "Delta_p_outermost_z", 560, -280, 280)) delta_p_rz = [] delta_p_rz.append( TProfile2D("Delta_p_rz_Old", "Delta_p_rz", 600, -300, 300, 120, 0, 120)) delta_p_rz.append( TProfile2D("Delta_p_rz_New", "Delta_p_rz", 600, -300, 300, 120, 0, 120))
def main(): # pylint: disable-msg=too-many-locals,too-many-branches,too-many-statements """Compute the 1D power spectrum""" parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='Compute the 1D power spectrum') parser.add_argument('--out-dir', type=str, default=None, required=True, help='Output directory') parser.add_argument( '--out-format', type=str, default='fits', required=False, help='Output format: root or fits (if root call PyRoot)') parser.add_argument('--in-dir', type=str, default=None, required=True, help='Directory to delta files') parser.add_argument( '--in-format', type=str, default='fits', required=False, help=' Input format used for input files: ascii or fits') parser.add_argument('--SNR-min', type=float, default=2., required=False, help='Minimal mean SNR per pixel ') parser.add_argument('--reso-max', type=float, default=85., required=False, help='Maximal resolution in km/s ') parser.add_argument('--lambda-obs-min', type=float, default=3600., required=False, help='Lower limit on observed wavelength [Angstrom]') parser.add_argument('--nb-part', type=int, default=3, required=False, help='Number of parts in forest') parser.add_argument('--nb-pixel-min', type=int, default=75, required=False, help='Minimal number of pixels in a part of forest') parser.add_argument( '--nb-pixel-masked-max', type=int, default=40, required=False, help='Maximal number of masked pixels in a part of forest') parser.add_argument('--no-apply-filling', action='store_true', default=False, required=False, help='Dont fill masked pixels') parser.add_argument( '--noise-estimate', type=str, default='mean_diff', required=False, help=('Estimate of Pk_noise ' 'pipeline/diff/mean_diff/rebin_diff/mean_rebin_diff')) parser.add_argument('--forest-type', type=str, default='Lya', required=False, help='Forest used: Lya, SiIV, CIV') parser.add_argument('--debug', action='store_true', default=False, required=False, help='Fill root histograms for debugging') parser.add_argument( '--abs-igm', type=str, default='LYA', required=False, help=('Name of the absorption line in picca.constants defining the ' 'redshift of the forest pixels')) args = parser.parse_args() # Create root file if args.out_format == 'root': # pylint: disable-msg=import-error,import-outside-toplevel # import is done here as ROOT is not a required package for the code # to run, except if args.out_format is set to 'root' from ROOT import TH1D, TFile, TTree, TProfile2D, TProfile store_file = TFile(args.out_dir + "/Testpicca.root", "RECREATE", "PK 1D studies studies") max_num_bins = 700 tree = TTree("Pk1D", "SDSS 1D Power spectrum Ly-a") (z_qso, mean_z, mean_reso, mean_snr, lambda_min_tree, lambda_max_tree, plate, mjd, fiber, num_masked_pixels_tree, num_bins_tree, k_tree, pk_tree, pk_raw_tree, pk_noise_tree, correction_reso_tree, pk_diff_tree) = make_tree(tree, max_num_bins) # control histograms if args.forest_type == 'Lya': lambda_min = 1040. lambda_max = 1200. elif args.forest_type == 'SiIV': lambda_min = 1270. lambda_max = 1380. elif args.forest_type == 'CIV': lambda_min = 1410. lambda_max = 1520. hist_delta = TProfile2D('hdelta', 'delta mean as a function of lambda-lambdaRF', 36, 3600., 7200., 16, lambda_min, lambda_max, -5.0, 5.0) hist_delta_rest_frame = TProfile( 'hdelta_RF', 'delta mean as a function of lambdaRF', 320, lambda_min, lambda_max, -5.0, 5.0) hist_delta_obs_frame = TProfile( 'hdelta_OBS', 'delta mean as a function of lambdaOBS', 1800, 3600., 7200., -5.0, 5.0) hist_weighted_delta_rest_frame = TProfile( 'hdelta_RF_we', 'delta mean weighted as a function of lambdaRF', 320, lambda_min, lambda_max, -5.0, 5.0) hist_weighted_delta_obs_frame = TProfile( 'hdelta_OBS_we', 'delta mean weighted as a function of lambdaOBS', 1800, 3600., 7200., -5.0, 5.0) hist_ivar = TH1D('hivar', ' ivar ', 10000, 0.0, 10000.) hist_snr = TH1D('hsnr', ' snr per pixel ', 100, 0.0, 100.) hist_weighted_delta_rest_frame.Sumw2() hist_weighted_delta_obs_frame.Sumw2() # Read deltas if args.in_format == 'fits': files = glob.glob(args.in_dir + "/*.fits.gz") elif args.in_format == 'ascii': files = glob.glob(args.in_dir + "/*.txt") num_data = 0 # initialize randoms np.random.seed(4) # loop over input files for index, file in enumerate(files): if index % 1 == 0: userprint("\rread {} of {} {}".format(index, len(files), num_data), end="") # read fits or ascii file if args.in_format == 'fits': hdul = fitsio.FITS(file) deltas = [ Delta.from_fitsio(hdu, pk1d_type=True) for hdu in hdul[1:] ] elif args.in_format == 'ascii': ascii_file = open(file, 'r') deltas = [Delta.from_ascii(line) for line in ascii_file] num_data += len(deltas) userprint("\n ndata = ", num_data) results = None # loop over deltas for delta in deltas: # Selection over the SNR and the resolution if (delta.mean_snr <= args.SNR_min or delta.mean_reso >= args.reso_max): continue # first pixel in forest selected_pixels = 10**delta.log_lambda > args.lambda_obs_min first_pixel_index = (np.argmax(selected_pixels) if np.any(selected_pixels) else len(selected_pixels)) # minimum number of pixel in forest min_num_pixels = args.nb_pixel_min if (len(delta.log_lambda) - first_pixel_index) < min_num_pixels: continue # Split in n parts the forest max_num_parts = (len(delta.log_lambda) - first_pixel_index) // min_num_pixels num_parts = min(args.nb_part, max_num_parts) (mean_z_array, log_lambda_array, delta_array, exposures_diff_array, ivar_array) = split_forest(num_parts, delta.delta_log_lambda, delta.log_lambda, delta.delta, delta.exposures_diff, delta.ivar, first_pixel_index) for index2 in range(num_parts): # rebin exposures_diff spectrum if (args.noise_estimate == 'rebin_diff' or args.noise_estimate == 'mean_rebin_diff'): exposures_diff_array[index2] = rebin_diff_noise( delta.delta_log_lambda, log_lambda_array[index2], exposures_diff_array[index2]) # Fill masked pixels with 0. (log_lambda_new, delta_new, exposures_diff_new, ivar_new, num_masked_pixels) = fill_masked_pixels( delta.delta_log_lambda, log_lambda_array[index2], delta_array[index2], exposures_diff_array[index2], ivar_array[index2], args.no_apply_filling) if num_masked_pixels > args.nb_pixel_masked_max: continue if args.out_format == 'root' and args.debug: compute_mean_delta(log_lambda_new, delta_new, ivar_new, delta.z_qso, hist_delta, hist_delta_rest_frame, hist_delta_obs_frame, hist_ivar, hist_snr, hist_weighted_delta_rest_frame, hist_weighted_delta_obs_frame) # Compute pk_raw k, pk_raw = compute_pk_raw(delta.delta_log_lambda, delta_new) # Compute pk_noise run_noise = False if args.noise_estimate == 'pipeline': run_noise = True pk_noise, pk_diff = compute_pk_noise(delta.delta_log_lambda, ivar_new, exposures_diff_new, run_noise) # Compute resolution correction delta_pixel = (delta.delta_log_lambda * np.log(10.) * constants.speed_light / 1000.) correction_reso = compute_correction_reso( delta_pixel, delta.mean_reso, k) # Compute 1D Pk if args.noise_estimate == 'pipeline': pk = (pk_raw - pk_noise) / correction_reso elif (args.noise_estimate == 'diff' or args.noise_estimate == 'rebin_diff'): pk = (pk_raw - pk_diff) / correction_reso elif (args.noise_estimate == 'mean_diff' or args.noise_estimate == 'mean_rebin_diff'): selection = (k > 0) & (k < 0.02) if args.noise_estimate == 'mean_rebin_diff': selection = (k > 0.003) & (k < 0.02) mean_pk_diff = (sum(pk_diff[selection]) / float(len(pk_diff[selection]))) pk = (pk_raw - mean_pk_diff) / correction_reso # save in root format if args.out_format == 'root': z_qso[0] = delta.z_qso mean_z[0] = mean_z_array[index2] mean_reso[0] = delta.mean_reso mean_snr[0] = delta.mean_snr lambda_min_tree[0] = np.power(10., log_lambda_new[0]) lambda_max_tree[0] = np.power(10., log_lambda_new[-1]) num_masked_pixels_tree[0] = num_masked_pixels plate[0] = delta.plate mjd[0] = delta.mjd fiber[0] = delta.fiberid num_bins_tree[0] = min(len(k), max_num_bins) for index3 in range(num_bins_tree[0]): k_tree[index3] = k[index3] pk_raw_tree[index3] = pk_raw[index3] pk_noise_tree[index3] = pk_noise[index3] pk_diff_tree[index3] = pk_diff[index3] pk_tree[index3] = pk[index3] correction_reso_tree[index3] = correction_reso[index3] tree.Fill() # save in fits format if args.out_format == 'fits': header = [{ 'name': 'RA', 'value': delta.ra, 'comment': "QSO's Right Ascension [degrees]" }, { 'name': 'DEC', 'value': delta.dec, 'comment': "QSO's Declination [degrees]" }, { 'name': 'Z', 'value': delta.z_qso, 'comment': "QSO's redshift" }, { 'name': 'MEANZ', 'value': mean_z_array[index2], 'comment': "Absorbers mean redshift" }, { 'name': 'MEANRESO', 'value': delta.mean_reso, 'comment': 'Mean resolution [km/s]' }, { 'name': 'MEANSNR', 'value': delta.mean_snr, 'comment': 'Mean signal to noise ratio' }, { 'name': 'NBMASKPIX', 'value': num_masked_pixels, 'comment': 'Number of masked pixels in the section' }, { 'name': 'PLATE', 'value': delta.plate, 'comment': "Spectrum's plate id" }, { 'name': 'MJD', 'value': delta.mjd, 'comment': ('Modified Julian Date,date the spectrum ' 'was taken') }, { 'name': 'FIBER', 'value': delta.fiberid, 'comment': "Spectrum's fiber number" }] cols = [k, pk_raw, pk_noise, pk_diff, correction_reso, pk] names = [ 'k', 'Pk_raw', 'Pk_noise', 'Pk_diff', 'cor_reso', 'Pk' ] comments = [ 'Wavenumber', 'Raw power spectrum', "Noise's power spectrum", 'Noise coadd difference power spectrum', 'Correction resolution function', 'Corrected power spectrum (resolution and noise)' ] units = [ '(km/s)^-1', 'km/s', 'km/s', 'km/s', 'km/s', 'km/s' ] try: results.write(cols, names=names, header=header, comments=comments, units=units) except AttributeError: results = fitsio.FITS((args.out_dir + '/Pk1D-' + str(index) + '.fits.gz'), 'rw', clobber=True) results.write(cols, names=names, header=header, comment=comments, units=units) if (args.out_format == 'fits' and results is not None): results.close() # Store root file results if args.out_format == 'root': store_file.Write() userprint("all done ")
ll_eff_histos = [] for l in range(0, 4): ll_eff_histos.append([]) for h in l_eff_histos: ll_eff_histos[l].append(h.Clone()) #try to say something about the fact if granddaughters from the antiS loose tracking eff because they fall outside of the tracker acceptance. tprof_etaOfGrandMotherAntiS_numberOfTrackerLayers = TProfile( "tprof_etaOfGrandMotherAntiS_numberOfTrackerLayers", "; Simulated grandmother #bar{S} #eta; Simulated track mean number of tracker layers hit", 100, -5, 5, 0, 20) tprof_etaOfGrandMotherAntiS_eff = TProfile( "tprof_etaOfGrandMotherAntiS_eff", "; Simulated grandmother #bar{S} #eta; Efficiency", 100, -5, 5, 0, 1) tprof2_etaOfGrandMotherAntiS_lxyz_numberOfTrackerLayers = TProfile2D( "tprof2_etaOfGrandMotherAntiS_lxyz_numberOfTrackerLayers", "; Simulated grandmother #bar{S} #eta; Simulated track l_{xyz}(beamspot) (cm);Simulated track mean number of tracker layers hit", 20, -5, 5, 26, 0, 120, 0, 20) tprof2_etaOfGrandMotherAntiS_lxyz_eff = TProfile2D( "tprof2_etaOfGrandMotherAntiS_lxyz_eff", "; Simulated grandmother #bar{S} #eta; Simulated track l_{xyz}(beamspot) (cm); Efficiency", 20, -5, 5, 26, 0, 130, 0, 1) prof2_vz_lxy_creation_vertex_daughters_numberOfTrackerLayers_lowPt_lowPz = TProfile2D( "prof2_lxy_vx_creation_vertex_daughters_numberOfTrackerLayers_lowPt_lowPz", ";v_{z} decay vertex K_{S}^{0} (cm); l_{0} decay vertex K_{S}^{0} (cm);mean #tracker layers hit by the track from K_{S}^{0} daughter", 60, -300, 300, 120, 0, 120) prof2_vz_lxy_creation_vertex_daughters_numberOfTrackerLayers_lowPt_middlePz = TProfile2D( "prof2_lxy_vx_creation_vertex_daughters_numberOfTrackerLayers_lowPt_middlePz", ";v_{z} decay vertex K_{S}^{0} (cm); l_{0} decay vertex K_{S}^{0} (cm);mean #tracker layers hit by the track from K_{S}^{0} daughter", 60, -300, 300, 120, 0, 120) prof2_vz_lxy_creation_vertex_daughters_numberOfTrackerLayers_lowPt_highPz = TProfile2D(