data = totalPdf.generate(fitter.ws.set('obsSet'), RooFit.Name('data_obs'), RooFit.Extended()) if fitter.pars.binData: data = RooDataHist('data_obs', 'data_obs', fitter.ws.set('obsSet'), data) data.Print('v') getattr(fitter.ws, 'import')(data) else: data = fitter.loadData() fitter.setMultijetYield() data.Print() startpars.IsA().Destructor(startpars) print 'Time elapsed: %.1f sec' % timer.RealTime() print 'CPU time used: %.1f sec' % timer.CpuTime() print 'starting fitting routine' timer.Continue() #fitter.ws.var('top_nrm').setConstant() fr = None fr = fitter.fit() plot1 = fitter.stackedPlot(pars.var[0]) leg1 = RooWjj2DFitter.Wjj2DFitter.legend4Plot(plot1) plot2 = fitter.stackedPlot(pars.var[1]) leg2 = RooWjj2DFitter.Wjj2DFitter.legend4Plot(plot2) c1 = TCanvas('c1', fitter.ws.var(pars.var[0]).GetTitle() + ' plot') plot1.addObject(leg1) plot1.Draw() #leg1.Draw('same')
for x in range(minnumberofsteps, maxnumberofsteps + 1): timer_1.Start() Integ_Midpoint.SetBinContent(x, MidPointIntegral( a, b, x, func)) # Set the value of the integral as a function of the steps x y = old_integration_value - 1.0 * MidPointIntegral( a, b, x, func) # Calculate Delta I y = 1.0 * abs(y) / MidPointIntegral(a, b, x, func) # Calculate Delta I/I Integ_Err_Midpoint.SetBinContent( x, y) # Set the Error histogram equal to Delta I / I old_integration_value = MidPointIntegral( a, b, x, func) # Store the "old" integration value timer_1.Stop() timer_Cpu_Midpoint = timer_Cpu_Midpoint + timer_1.CpuTime() #timer_Real_Midpoint = timer_Real_Midpoint + timer_1.RealTime() timer_Midpoint.SetBinContent(x, timer_Cpu_Midpoint) if y < trsh and first_time_flag == 0: first_time_flag = 1 Midpoint_stops_at_n = x print "Midpoint Done at: ", x print " Integral value MidPoint: ", MidPointIntegral(a, b, x, func) print " Error value MidPoint: ", y timer_2 = TStopwatch() timer_Cpu_Trapezoid = 0.0 Trapezoid_stops_at_n = 0.0 old_integration_value = 0.0 first_time_flag = 0
event.getByLabel(muon_L, muon_H) muons = muon_H.product() print('muons size = ', muPt.size(), ', good muon size = ', muId.size(), 'good muon collection size = ', len(muons)) for imu in range(0, muPt.size()): print('muon pt in b2g muon collection =', muPt.at(imu)) for imu in muons: print('muon pt', imu.getP4().Pt()) #print ('muon charge', imu.getCharge()) #Lets just get the good muons: #goodMuIso # Done processing the events! # Stop our timer timer.Stop() # Print out our timing information rtime = timer.RealTime() # Real time (or "wall time") ctime = timer.CpuTime() # CPU time print("Analyzed events: {0:6d}".format(nEventsAnalyzed)) print("RealTime={0:6.2f} seconds, CpuTime={1:6.2f} seconds".format( rtime, ctime)) print("{0:4.2f} events / RealTime second .".format(nEventsAnalyzed / rtime)) print("{0:4.2f} events / CpuTime second .".format(nEventsAnalyzed / ctime)) subprocess.call(["ps aux | grep skhalil | cat > memory.txt", ""], shell=True)
stat = os.stat("/tmp/mergeOutput.lock") if stat.st_uid == os.getuid() and stat.st_gid == os.getgid(): os.chmod("/tmp/mergeOutput.lock", 0666) fcntl.lockf(f, fcntl.LOCK_EX) processes = [] for composite_dataset in composite_datasets: p = Process(target=mergeCompositeDataset, args=(composite_dataset, sema)) p.start() processes.append(p) for p in processes: p.join() fcntl.lockf(f, fcntl.LOCK_UN) f.close() sw.Stop() cpu = sw.CpuTime() real = sw.RealTime() days = int(cpu / (60.0 * 60.0 * 24.0)) cpu -= days * (60.0 * 60.0 * 24.0) hours = int(cpu / (60.0 * 60.0)) cpu -= hours * (60.0 * 60.0) minutes = int(cpu / 60.0) cpu -= minutes * 60.0 timeInfo = "\n\n\n=============================================\n\n" timeInfo += "CPU Time: " if days > 0: timeInfo += str(days) + " days, " if days > 0 or hours > 0: timeInfo += str(hours) + " hours, "