def MDtest_plot_rhohist(varname='rho',Ndesbins=[2,3],lmin=3,lmax=80,getrhopred=True,firstNreal=-1,rhofiletag='',nvss=True,plottag=''): plotdir='output/MDchecks/plots/' rhogrid=MDtest_read_rho_wfiles(varname,Ndesbins,lmin,lmax,rhofiletag,nvss=nvss) #output order are nvss, then the des-like surveys in given Ndesbins order Nreal=rhogrid.shape[1] if firstNreal>0 and firstNreal<Nreal: Nreal=firstNreal rhogrid=rhogrid[:,:Nreal] testname='MDchecks' if getrhopred: rhopred=MDtest_get_expected_rho(varname,Ndesbins,lmin,lmax,nvss=nvss) else: rhopred=[] if plottag: tag='_'+plottag else: tag='' plotname ='MDtest_{1:s}hist_r{0:05d}{2:s}'.format(Nreal,varname,tag) reclabels=[m.tag for m in MDtest_get_maptypelist(Ndesbins=Ndesbins,nvss=nvss)] # if nvss: # reclabels.append('NVSS') # for n in Ndesbins: # reclabels.append('DES {0:d} bins'.format(n)) if varname=='rho': au.plot_rhohist(rhogrid,reclabels,testname,plotdir,plotname,rhopred) elif varname=='s': au.plot_shist(rhogrid,reclabels,testname,plotdir,plotname,rhopred)
def analyze_stage4(): ''' resolve indirect calls ''' global isKernel if not isKernel: AnalysisUtils.forward_analysis_intra_defined_funcs() AnalysisUtils.forward_analysis_intra_defined_funcs() keepAllConsistency()
def test_userentries(outputFilePath=None): if not containsUserClient(): return rebuildAllInternalDataWOParseModInitFunc(True) AnalysisUtils.forward_analysis_intra_defined_funcs() binPath = getOriginalBinaryPath() outputFile = None if not None is outputFilePath: outputFile = open(outputFilePath, "a") foundEntryPoints = findEntryPoints() if len(foundEntryPoints) == 0: if not None is outputFile: outputFile.write("\n\n************\nNo Entry Points in {}:\n".format(binPath)) else: if not None is outputFile: outputFile.write("\n\n============\nEntry Points in {}:\n".format(binPath) ) for entryType in foundEntryPoints: entries = foundEntryPoints[entryType] for entry in entries: outputStr = "[{}] 0x{:016X}: {}".format(entryType, entry, getName(entry)) print outputStr if not None is outputFile: outputFile.write(outputStr + "\n") if not None is outputFile: outputFile.close()
def test_indirectcalls(outputFilePath=None, listAll=False): #importDepsTils() modInitFuncs = getAllModInitFuncs() if len(modInitFuncs) == 0: return parseGOTNames() #if not haveSymbol(): # return commonSeg = getSegByName("__common") if not None is commonSeg: for ea in Heads(commonSeg.start_ea, commonSeg.end_ea): deName = getDeNameAtEA(ea) if None is deName: continue if deName.endswith("::gMetaClass"): SetType(ea, deName[:-len("::gMetaClass")] + "::MetaClass") parseKernelHeadersAndSetType() rebuildAllInternalDataWOParseModInitFunc(True) AnalysisUtils.forward_analysis_intra_defined_funcs() solvedIndirectCalls, unsolvedIndirectCalls, allIndirectCalls, solveRate = AnalysisUtils.checkResolutionRate() result = {"solvedIndirectCalls": {x: list(solvedIndirectCalls[x]) for x in solvedIndirectCalls}, "unsolvedIndirectCalls":list(unsolvedIndirectCalls), "allIndirectCalls": list(allIndirectCalls)} if not None is outputFilePath: with open(outputFilePath, "w") as f: json.dump(result, f) print "solved {}, unsolved {}, rate {}".format(len(solvedIndirectCalls), len(unsolvedIndirectCalls), solveRate) if listAll: print ", ".join(["0x{:X}".format(x) for x in unsolvedIndirectCalls]) return solvedIndirectCalls, unsolvedIndirectCalls
def main(args): sims = os.path.sep.join([pathname, '..', 'sims', 'doi_10.1101_025387', args.species]) rdict = {} corrs = {} for i in xrange(1, 7): path = os.path.sep.join([sims, str(i)]) print("reading results from {}".format(path)) tdf = ParsingUtils.readThreeColumnTruth(os.path.sep.join([path, "truth.tsv"]) , "_true") kdf = ParsingUtils.readKallisto(os.path.sep.join([path, "abundance.tsv"]), "_kallisto") sdf = ParsingUtils.readSailfish(os.path.sep.join([path, "quant.sf"]), "_sailfish") df = tdf.join(kdf, rsuffix="_K").join(sdf, rsuffix="_S") for method in ["sailfish", "kallisto"]: for ct in ["spearman", "pearson"]: corrKey = "{}_{}".format(method, ct) corr = df["TPM_true"].corr(df["TPM_{}".format(method)], method=ct) if corrKey in corrs: corrs[corrKey].append(corr) else: corrs[corrKey] = [corr] rdict[i] = df corrStrs = [] for k, v in corrs.iteritems(): corrStrs.append("{}: {}".format(k, ','.join(["{:0.2}".format(c) for c in v]))) corrStrs = sorted(corrStrs) print('\n'.join(corrStrs)) relDiffs = {} for k,v in rdict.iteritems(): rds = AnalysisUtils.relDiff("TPM_true", "TPM_sailfish", v, verbose=False) rdk = AnalysisUtils.relDiff("TPM_true", "TPM_kallisto", v, verbose=False) for method, rd in {"sailfish" : rds, "kallisto" : rdk}.iteritems(): for summaryName, summaryFunc in {"median" : AnalysisUtils.getMedian, "mean" : AnalysisUtils.getMean}.iteritems(): signedKey = "{}_{}_{}".format(method, summaryName, "signed") absKey = "{}_{}_{}".format(method, summaryName, "abs") if signedKey in relDiffs: relDiffs[signedKey].append(summaryFunc(rd[0])) else: relDiffs[signedKey] = [summaryFunc(rd[0])] if absKey in relDiffs: relDiffs[absKey].append(summaryFunc(rd[0].abs())) else: relDiffs[absKey] = [summaryFunc(rd[0].abs())] for signedness in ["signed", "abs"]: for stat in ["median", "mean"]: if stat == "median": print("mean of medians of {} relative differences is :\n kallisto: {:0.2f}\n sailfish: {:0.2f}\n".format( signedness, np.mean(relDiffs["kallisto_{}_{}".format(stat, signedness)]), np.mean(relDiffs["sailfish_{}_{}".format(stat, signedness)]))) elif stat == "mean": print("median of means of {} relative differences is :\n kallisto: {:0.2f}\n sailfish: {:0.2f}\n".format( signedness, np.median(relDiffs["kallisto_{}_{}".format(stat, signedness)]), np.median(relDiffs["sailfish_{}_{}".format(stat, signedness)])))
def MDtest_get_glm(Nreal=1,minreal=0,Ndesbins=[2,3],nvss=True): cldat=MDtest_get_Cl(justread=True,Ndesbins=Ndesbins,nvss=nvss) print 'cldat.bintaglist:',cldat.bintaglist #cldat seems to be associated with correct crosspairs # also, manipulating cl data has expected effects on rho hist rlzns=np.arange(minreal,minreal+Nreal) #leaving reclist empty means just gen gal and isw maps au.getmaps_fromCl(cldat,rlzns=rlzns)
def MDtest_get_glm_and_rec(Nreal=1,minreal=0,justgetrho=0,dorho=1,Ndesbins=[2,3],lmin=3,lmax=80,rhofiletag='',nvss=True): #adding this block of text fixed bug when passing two sets of analysis. why? maptypes=MDtest_get_maptypelist(Ndesbins=Ndesbins,nvss=nvss) mapsfor=[mt.tag for mt in maptypes] #tags for maps we want to make mapsfor.append('isw') allcldat=MDtest_get_Cl(justread=True,Ndesbins=Ndesbins,nvss=nvss) #cldat=get_reduced_cldata(allcldat,dothesemaps=mapsfor)#basically just reorders cldat=allcldat makeplots = Nreal==1 rlzns=np.arange(minreal,minreal+Nreal) reclist=MDtest_get_reclist(Ndesbins=Ndesbins,lmin=lmin,lmax=lmax,nvss=nvss) au.getmaps_fromCl(cldat,rlzns=rlzns,reclist=reclist,justgetrho=justgetrho,dorho=dorho,dos=False,dochisq=False,rhofiletag=rhofiletag)
def MDtest_get_expected_rho(varname='rho',Ndesbins=[2,3],lmin=3,lmax=80,nvss=True): Nrec=len(Ndesbins)+nvss cldat=MDtest_get_Cl(Ndesbins=Ndesbins,nvss=nvss) cldat=MDtest_boostNVSSnoise(cldat) reclist=MDtest_get_reclist(Ndesbins,lmin,lmax,nvss=nvss) rhopred=np.zeros(Nrec) for i in xrange(Nrec): rhopred[i]=au.compute_rho_fromcl(cldat,reclist[i],varname=varname) return rhopred
def launchCheckerRacedemo(checkerResultFileName, kext): checkerArgs = AnalysisUtils.CheckerArgs(\ checkerName="checker_racedemo", \ checkerResultFileName=checkerResultFileName, \ preCheckers=[checker_racedemo_pre], \ resultHandler=checkerRacedemoResultHandler, \ crossKEXT = False, \ onlyUserClients=True) launchChecker(checkerArgs, kext, q_ea_and_lockretain=[])
def launchCheckerAtIndex(checkerIdx, kext=None): if checkerIdx >= len(Checkers): print "checkerIdx {} out of {}".format(checkerIdx, len(Checkers)) return checker = Checkers[checkerIdx] if not None is checker: checker_name = checker["name"] checker_launcher = checker["launcher"] allUCInfos = getAllUCInfos() if checker["requireUC"] and len(allUCInfos) == 0: print "[!] {} requires accessible userclient, but kext does not have".format( checker_name) return AnalysisUtils.forward_analysis_intra_defined_funcs(kext) print "[+] Launching checker {}".format(checker_name) checker_launcher("abc", kext)
def main(): sims = os.path.sep.join([pathname, '..', 'sims', 'rsem']) rdict = {} for i in xrange(1, 21): path = os.path.sep.join([sims, str(i)]) print("reading results from {}".format(path)) tdf = ParsingUtils.readRSEMTruth(os.path.sep.join([path, "truth.tsv"]) , "_true") kdf = ParsingUtils.readKallisto(os.path.sep.join([path, "abundance.tsv"]), "_kallisto") sdf = ParsingUtils.readSailfish(os.path.sep.join([path, "quant.sf"]), "_sailfish") df = tdf.join(kdf, rsuffix="_K").join(sdf, rsuffix="_S") rdict[i] = df relDiffs = {} for k,v in rdict.iteritems(): rds = AnalysisUtils.relDiff("TPM_true", "TPM_sailfish", v, verbose=False) rdk = AnalysisUtils.relDiff("TPM_true", "TPM_kallisto", v, verbose=False) for method, rd in {"sailfish" : rds, "kallisto" : rdk}.iteritems(): for summaryName, summaryFunc in {"median" : AnalysisUtils.getMedian, "mean" : AnalysisUtils.getMean}.iteritems(): signedKey = "{}_{}_{}".format(method, summaryName, "signed") absKey = "{}_{}_{}".format(method, summaryName, "abs") if signedKey in relDiffs: relDiffs[signedKey].append(summaryFunc(rd[0])) else: relDiffs[signedKey] = [summaryFunc(rd[0])] if absKey in relDiffs: relDiffs[absKey].append(summaryFunc(rd[0].abs())) else: relDiffs[absKey] = [summaryFunc(rd[0].abs())] for signedness in ["signed", "abs"]: for stat in ["median", "mean"]: if stat == "median": print("mean of medians of {} relative differences is :\n kallisto: {:0.2f}\n sailfish: {:0.2f}\n".format( signedness, np.mean(relDiffs["kallisto_{}_{}".format(stat, signedness)]), np.mean(relDiffs["sailfish_{}_{}".format(stat, signedness)]))) elif stat == "mean": print("median of means of {} relative differences is :\n kallisto: {:0.2f}\n sailfish: {:0.2f}\n".format( signedness, np.median(relDiffs["kallisto_{}_{}".format(stat, signedness)]), np.median(relDiffs["sailfish_{}_{}".format(stat, signedness)])))
def launchCheckerInFunc(funcEA, className, resultFile, checkerArgs, **kwargs): if None is resultFile: resultFile = sys.stdout print "\n[+] Launch {} in func {} at {:016X}".format( checkerArgs.checkerName, getName(funcEA), funcEA) checkerResults = CheckerResults(checkerArgs.checkerName, className, funcEA, resultFile) AnalysisUtils.forward_analysis_in_func(funcEA, \ isPureChecker=True, \ isInterProc=checkerArgs.isInterProc, \ crossKEXT = checkerArgs.crossKEXT, \ preCheckers=checkerArgs.preCheckers, \ postCheckers=checkerArgs.postCheckers, \ checkerResults = checkerResults, \ checkerArgs=checkerArgs, \ **kwargs) if not None is checkerArgs.resultHandler: checkerArgs.resultHandler(checkerResults, resultFile)
def MDtest_read_rho_wfiles(varname='rho',Ndesbins=[2,3],lmin=3,lmax=80,rhofiletag='',nvss=True): maptypes=MDtest_get_maptypelist(Ndesbins=Ndesbins,nvss=nvss) #list of LSS survey types mapdir='output/MDchecks/map_output/' if not rhofiletag: tagstr='' else: tagstr='_'+rhofiletag if lmax>0: files=['iswREC.{0:s}.fid.fullsky-lmin{2:02d}-lmax{3:02d}.MDtest{4:s}.{1:s}.dat'.format(mtype.tag,varname,lmin,lmax,tagstr) for mtype in maptypes] else: files=['iswREC.{0:s}.fid.fullsky-lmin{2:02d}.MDtest{3:s}.{1:s}.dat'.format(mtype.tag,varname,lmin,tagstr) for mtype in maptypes] rhogrid=np.array([au.read_rhodat_wfile(mapdir+f) for f in files]) return rhogrid
def test_indirectcalls(kextPrefix=None, outputFilePath=None, listAll=False): loadNecessaryDataFromPersistNode() AnalysisUtils.forward_analysis_intra_defined_funcs(kextPrefix) solvedIndirectCalls, unsolvedIndirectCalls, allIndirectCalls, solveRate = AnalysisUtils.checkResolutionRate( kextPrefix) result = { "solvedIndirectCalls": {x: list(solvedIndirectCalls[x]) for x in solvedIndirectCalls}, "unsolvedIndirectCalls": list(unsolvedIndirectCalls), "allIndirectCalls": allIndirectCalls } if not None is outputFilePath: with open(outputFilePath, "w") as f: json.dump(result, f) print "solved {}, unsolved {}, rate {}".format(len(solvedIndirectCalls), len(unsolvedIndirectCalls), solveRate) if listAll: print ", ".join( ["0x{:X}".format(x) for x in sorted(unsolvedIndirectCalls)]) return solvedIndirectCalls, unsolvedIndirectCalls
def preprocess(self, dst_file): chain = r.TChain("HPS_Event") chain.Add(dst_file) tree = chain.GetTree() from ROOT import HpsEvent hps_event = HpsEvent() branch = chain.SetBranchAddress("Event", r.AddressOf(hps_event)) # Add variables to the ntuple ft_maker = ft.FlatTupleMaker(self.output_file_name) ft_maker.add_variable("cluster_0_energy") ft_maker.add_variable("cluster_1_energy") ft_maker.add_variable("cluster_0_x") ft_maker.add_variable("cluster_1_x") ft_maker.add_variable("cluster_0_y") ft_maker.add_variable("cluster_1_y") ft_maker.add_variable("track_0_p") ft_maker.add_variable("track_0_px") ft_maker.add_variable("track_0_py") ft_maker.add_variable("track_0_pz") ft_maker.add_variable("track_0_theta") ft_maker.add_variable("track_0_phi0") ft_maker.add_variable("track_0_omega") ft_maker.add_variable("track_0_d0") ft_maker.add_variable("track_0_z0") ft_maker.add_variable("track_1_p") ft_maker.add_variable("track_1_px") ft_maker.add_variable("track_1_py") ft_maker.add_variable("track_1_pz") ft_maker.add_variable("track_1_theta") ft_maker.add_variable("track_1_phi0") ft_maker.add_variable("track_1_omega") ft_maker.add_variable("track_1_d0") ft_maker.add_variable("track_1_z0") ft_maker.add_variable("track_pair_p_sum") matcher = tcm.TrackClusterMatcher() for entry in xrange(0, chain.GetEntries()): if (entry + 1) % 1000 == 0: print "Event " + str(entry + 1) chain.GetEntry(entry) # Loop through all clusters in the event and find a 'good' pair. # For now, a 'good' pair is defined as two clusters whose cluster # time difference is less than 1.6 ns and greater than -1.6 ns cluster_pair = au.get_good_cluster_pair(hps_event) if len(cluster_pair) != 2: continue matcher.find_all_matches(hps_event) tracks = [ matcher.get_track(cluster_pair[0]), matcher.get_track(cluster_pair[1]) ] if (tracks[0] is None) or (tracks[1] is None): continue ft_maker.set_variable_value("cluster_0_energy", cluster_pair[0].getEnergy()) ft_maker.set_variable_value("cluster_1_energy", cluster_pair[1].getEnergy()) ft_maker.set_variable_value("cluster_0_x", cluster_pair[0].getPosition()[0]) ft_maker.set_variable_value("cluster_1_x", cluster_pair[1].getPosition()[0]) ft_maker.set_variable_value("cluster_0_y", cluster_pair[0].getPosition()[1]) ft_maker.set_variable_value("cluster_1_y", cluster_pair[1].getPosition()[1]) ft_maker.set_variable_value( "track_0_p", np.linalg.norm(np.asarray(tracks[0].getMomentum()))) ft_maker.set_variable_value("track_0_px", tracks[0].getMomentum()[0]) ft_maker.set_variable_value("track_0_py", tracks[0].getMomentum()[1]) ft_maker.set_variable_value("track_0_pz", tracks[0].getMomentum()[2]) track_0_theta = math.fabs(math.pi / 2 - math.acos(te.get_cos_theta(tracks[0]))) ft_maker.set_variable_value("track_0_theta", track_0_theta) ft_maker.set_variable_value("track_0_phi0", tracks[0].getPhi0()) ft_maker.set_variable_value("track_0_omega", tracks[0].getOmega()) ft_maker.set_variable_value("track_0_d0", tracks[0].getD0()) ft_maker.set_variable_value("track_0_z0", tracks[0].getZ0()) ft_maker.set_variable_value( "track_1_p", np.linalg.norm(np.asarray(tracks[1].getMomentum()))) ft_maker.set_variable_value("track_1_px", tracks[1].getMomentum()[0]) ft_maker.set_variable_value("track_1_py", tracks[1].getMomentum()[1]) ft_maker.set_variable_value("track_1_pz", tracks[1].getMomentum()[2]) track_1_theta = math.fabs(math.pi / 2 - math.acos(te.get_cos_theta(tracks[1]))) ft_maker.set_variable_value("track_1_theta", track_1_theta) ft_maker.set_variable_value("track_1_phi0", tracks[1].getPhi0()) ft_maker.set_variable_value("track_1_omega", tracks[1].getOmega()) ft_maker.set_variable_value("track_1_d0", tracks[1].getD0()) ft_maker.set_variable_value("track_1_z0", tracks[1].getZ0()) p_sum = np.linalg.norm(np.asarray( tracks[0].getMomentum())) + np.linalg.norm( np.asarray(tracks[1].getMomentum())) ft_maker.set_variable_value("track_pair_p_sum", p_sum) ft_maker.fill() ft_maker.close()
def perform_checker_on_kext(kextname, checker_id): if not kextname in getAllKEXTPrefixes(): return AnalysisUtils.forward_analysis_intra_defined_funcs(kextname) PolicyChecker.launchCheckerAtIndex(checker_id, kextname)
def preprocess(self, dst_file): chain = r.TChain("HPS_Event") chain.Add(dst_file) tree = chain.GetTree() from ROOT import HpsEvent hps_event = HpsEvent() branch = chain.SetBranchAddress("Event", r.AddressOf(hps_event)) # Create the file name for the preprocessed ROOT file #output_file_name = root_file.GetName()[str(root_file.GetName()).rindex("/") + 1:-5] output_file_name = "beam_tri_preprocessed_signal.root" # Add variables to the ntuple ft_maker = ft.FlatTupleMaker(output_file_name) ft_maker.add_variable("cluster_0_energy") ft_maker.add_variable("cluster_1_energy") ft_maker.add_variable("cluster_0_x") ft_maker.add_variable("cluster_1_x") ft_maker.add_variable("cluster_0_y") ft_maker.add_variable("cluster_1_y") ft_maker.add_variable("track_0_p") ft_maker.add_variable("track_0_px") ft_maker.add_variable("track_0_py") ft_maker.add_variable("track_0_pz") ft_maker.add_variable("track_0_tanlambda") ft_maker.add_variable("track_0_phi0") ft_maker.add_variable("track_0_omega") ft_maker.add_variable("track_0_d0") ft_maker.add_variable("track_0_z0") ft_maker.add_variable("track_0_charge") ft_maker.add_variable("track_1_p") ft_maker.add_variable("track_1_px") ft_maker.add_variable("track_1_py") ft_maker.add_variable("track_1_pz") ft_maker.add_variable("track_1_tanlambda") ft_maker.add_variable("track_1_phi0") ft_maker.add_variable("track_1_omega") ft_maker.add_variable("track_1_d0") ft_maker.add_variable("track_1_z0") ft_maker.add_variable("track_1_charge") matcher = tcm.TrackClusterMatcher() for entry in xrange(0, chain.GetEntries()): if (entry + 1) % 1000 == 0: print "Event " + str(entry + 1) chain.GetEntry(entry) #if not au.is_good_data_event(hps_event) : continue # Loop through all clusters in the event and find a 'good' pair. # For now, a 'good' pair is defined as two clusters whose cluster # time difference is less than 1.7 ns and greater than -1.6 ns cluster_pair = au.get_good_cluster_pair(hps_event) if not self.is_good_cluster_pair(cluster_pair): continue matcher.find_all_matches(hps_event) tracks = [ matcher.get_track(cluster_pair[0]), matcher.get_track(cluster_pair[1]) ] if (tracks[0] is None) or (tracks[1] is None): continue ft_maker.set_variable_value("cluster_0_energy", cluster_pair[0].getEnergy()) ft_maker.set_variable_value("cluster_1_energy", cluster_pair[1].getEnergy()) ft_maker.set_variable_value("cluster_0_x", cluster_pair[0].getPosition()[0]) ft_maker.set_variable_value("cluster_1_x", cluster_pair[1].getPosition()[0]) ft_maker.set_variable_value("cluster_0_y", cluster_pair[0].getPosition()[1]) ft_maker.set_variable_value("cluster_1_y", cluster_pair[1].getPosition()[1]) ft_maker.set_variable_value( "track_0_p", np.linalg.norm(np.asarray(tracks[0].getMomentum()))) ft_maker.set_variable_value("track_0_px", tracks[0].getMomentum()[0]) ft_maker.set_variable_value("track_0_py", tracks[0].getMomentum()[1]) ft_maker.set_variable_value("track_0_pz", tracks[0].getMomentum()[2]) ft_maker.set_variable_value("track_0_tanlambda", tracks[0].getTanLambda()) ft_maker.set_variable_value("track_0_phi0", tracks[0].getPhi0()) ft_maker.set_variable_value("track_0_omega", tracks[0].getOmega()) ft_maker.set_variable_value("track_0_d0", tracks[0].getD0()) ft_maker.set_variable_value("track_0_z0", tracks[0].getZ0()) ft_maker.set_variable_value("track_0_charge", tracks[0].getCharge()) ft_maker.set_variable_value( "track_1_p", np.linalg.norm(np.asarray(tracks[1].getMomentum()))) ft_maker.set_variable_value("track_1_px", tracks[1].getMomentum()[0]) ft_maker.set_variable_value("track_1_py", tracks[1].getMomentum()[1]) ft_maker.set_variable_value("track_1_pz", tracks[1].getMomentum()[2]) ft_maker.set_variable_value("track_1_tanlambda", tracks[1].getTanLambda()) ft_maker.set_variable_value("track_1_phi0", tracks[1].getPhi0()) ft_maker.set_variable_value("track_1_omega", tracks[1].getOmega()) ft_maker.set_variable_value("track_1_d0", tracks[1].getD0()) ft_maker.set_variable_value("track_1_z0", tracks[1].getZ0()) ft_maker.set_variable_value("track_1_charge", tracks[1].getCharge()) ft_maker.fill() ft_maker.close()
def preprocess(self, dst_file): chain = r.TChain("HPS_Event") chain.Add(dst_file) tree = chain.GetTree() from ROOT import HpsEvent hps_event = HpsEvent() branch = chain.SetBranchAddress("Event", r.AddressOf(hps_event)) # Create the file name for the preprocessed ROOT file #output_file_name = root_file.GetName()[str(root_file.GetName()).rindex("/") + 1:-5] output_file_name = "beam_tri_preprocessed_signal.root" # Add variables to the ntuple ft_maker = ft.FlatTupleMaker(output_file_name) ft_maker.add_variable("cluster_0_energy") ft_maker.add_variable("cluster_1_energy") ft_maker.add_variable("cluster_0_x") ft_maker.add_variable("cluster_1_x") ft_maker.add_variable("cluster_0_y") ft_maker.add_variable("cluster_1_y") ft_maker.add_variable("track_0_p") ft_maker.add_variable("track_0_px") ft_maker.add_variable("track_0_py") ft_maker.add_variable("track_0_pz") ft_maker.add_variable("track_0_tanlambda") ft_maker.add_variable("track_0_phi0") ft_maker.add_variable("track_0_omega") ft_maker.add_variable("track_0_d0") ft_maker.add_variable("track_0_z0") ft_maker.add_variable("track_0_charge") ft_maker.add_variable("track_1_p") ft_maker.add_variable("track_1_px") ft_maker.add_variable("track_1_py") ft_maker.add_variable("track_1_pz") ft_maker.add_variable("track_1_tanlambda") ft_maker.add_variable("track_1_phi0") ft_maker.add_variable("track_1_omega") ft_maker.add_variable("track_1_d0") ft_maker.add_variable("track_1_z0") ft_maker.add_variable("track_1_charge") matcher = tcm.TrackClusterMatcher() for entry in xrange(0, chain.GetEntries()): if (entry+1)%1000 == 0 : print "Event " + str(entry+1) chain.GetEntry(entry) #if not au.is_good_data_event(hps_event) : continue # Loop through all clusters in the event and find a 'good' pair. # For now, a 'good' pair is defined as two clusters whose cluster # time difference is less than 1.7 ns and greater than -1.6 ns cluster_pair = au.get_good_cluster_pair(hps_event) if not self.is_good_cluster_pair(cluster_pair) : continue matcher.find_all_matches(hps_event) tracks = [matcher.get_track(cluster_pair[0]), matcher.get_track(cluster_pair[1])] if (tracks[0] is None) or (tracks[1] is None) : continue ft_maker.set_variable_value("cluster_0_energy", cluster_pair[0].getEnergy()) ft_maker.set_variable_value("cluster_1_energy", cluster_pair[1].getEnergy()) ft_maker.set_variable_value("cluster_0_x", cluster_pair[0].getPosition()[0]) ft_maker.set_variable_value("cluster_1_x", cluster_pair[1].getPosition()[0]) ft_maker.set_variable_value("cluster_0_y", cluster_pair[0].getPosition()[1]) ft_maker.set_variable_value("cluster_1_y", cluster_pair[1].getPosition()[1]) ft_maker.set_variable_value("track_0_p", np.linalg.norm(np.asarray(tracks[0].getMomentum()))) ft_maker.set_variable_value("track_0_px", tracks[0].getMomentum()[0]) ft_maker.set_variable_value("track_0_py", tracks[0].getMomentum()[1]) ft_maker.set_variable_value("track_0_pz", tracks[0].getMomentum()[2]) ft_maker.set_variable_value("track_0_tanlambda", tracks[0].getTanLambda()) ft_maker.set_variable_value("track_0_phi0", tracks[0].getPhi0()) ft_maker.set_variable_value("track_0_omega", tracks[0].getOmega()) ft_maker.set_variable_value("track_0_d0", tracks[0].getD0()) ft_maker.set_variable_value("track_0_z0", tracks[0].getZ0()) ft_maker.set_variable_value("track_0_charge", tracks[0].getCharge()) ft_maker.set_variable_value("track_1_p", np.linalg.norm(np.asarray(tracks[1].getMomentum()))) ft_maker.set_variable_value("track_1_px", tracks[1].getMomentum()[0]) ft_maker.set_variable_value("track_1_py", tracks[1].getMomentum()[1]) ft_maker.set_variable_value("track_1_pz", tracks[1].getMomentum()[2]) ft_maker.set_variable_value("track_1_tanlambda", tracks[1].getTanLambda()) ft_maker.set_variable_value("track_1_phi0", tracks[1].getPhi0()) ft_maker.set_variable_value("track_1_omega", tracks[1].getOmega()) ft_maker.set_variable_value("track_1_d0", tracks[1].getD0()) ft_maker.set_variable_value("track_1_z0", tracks[1].getZ0()) ft_maker.set_variable_value("track_1_charge", tracks[1].getCharge()) ft_maker.fill() ft_maker.close()
def preprocess(self, dst_file): chain = r.TChain("HPS_Event") chain.Add(dst_file) tree = chain.GetTree() from ROOT import HpsEvent hps_event = HpsEvent() branch = chain.SetBranchAddress("Event", r.AddressOf(hps_event)) # Add variables to the ntuple ft_maker = ft.FlatTupleMaker(self.output_file_name) ft_maker.add_variable("cluster_0_energy") ft_maker.add_variable("cluster_1_energy") ft_maker.add_variable("cluster_0_x") ft_maker.add_variable("cluster_1_x") ft_maker.add_variable("cluster_0_y") ft_maker.add_variable("cluster_1_y") ft_maker.add_variable("track_0_p") ft_maker.add_variable("track_0_px") ft_maker.add_variable("track_0_py") ft_maker.add_variable("track_0_pz") ft_maker.add_variable("track_0_theta") ft_maker.add_variable("track_0_phi0") ft_maker.add_variable("track_0_omega") ft_maker.add_variable("track_0_d0") ft_maker.add_variable("track_0_z0") ft_maker.add_variable("track_1_p") ft_maker.add_variable("track_1_px") ft_maker.add_variable("track_1_py") ft_maker.add_variable("track_1_pz") ft_maker.add_variable("track_1_theta") ft_maker.add_variable("track_1_phi0") ft_maker.add_variable("track_1_omega") ft_maker.add_variable("track_1_d0") ft_maker.add_variable("track_1_z0") ft_maker.add_variable("track_pair_p_sum") matcher = tcm.TrackClusterMatcher() for entry in xrange(0, chain.GetEntries()): if (entry+1)%1000 == 0 : print "Event " + str(entry+1) chain.GetEntry(entry) # Loop through all clusters in the event and find a 'good' pair. # For now, a 'good' pair is defined as two clusters whose cluster # time difference is less than 1.6 ns and greater than -1.6 ns cluster_pair = au.get_good_cluster_pair(hps_event) if len(cluster_pair) != 2 : continue matcher.find_all_matches(hps_event) tracks = [matcher.get_track(cluster_pair[0]), matcher.get_track(cluster_pair[1])] if (tracks[0] is None) or (tracks[1] is None) : continue ft_maker.set_variable_value("cluster_0_energy", cluster_pair[0].getEnergy()) ft_maker.set_variable_value("cluster_1_energy", cluster_pair[1].getEnergy()) ft_maker.set_variable_value("cluster_0_x", cluster_pair[0].getPosition()[0]) ft_maker.set_variable_value("cluster_1_x", cluster_pair[1].getPosition()[0]) ft_maker.set_variable_value("cluster_0_y", cluster_pair[0].getPosition()[1]) ft_maker.set_variable_value("cluster_1_y", cluster_pair[1].getPosition()[1]) ft_maker.set_variable_value("track_0_p", np.linalg.norm(np.asarray(tracks[0].getMomentum()))) ft_maker.set_variable_value("track_0_px", tracks[0].getMomentum()[0]) ft_maker.set_variable_value("track_0_py", tracks[0].getMomentum()[1]) ft_maker.set_variable_value("track_0_pz", tracks[0].getMomentum()[2]) track_0_theta = math.fabs(math.pi/2 - math.acos(te.get_cos_theta(tracks[0]))) ft_maker.set_variable_value("track_0_theta", track_0_theta) ft_maker.set_variable_value("track_0_phi0", tracks[0].getPhi0()) ft_maker.set_variable_value("track_0_omega", tracks[0].getOmega()) ft_maker.set_variable_value("track_0_d0", tracks[0].getD0()) ft_maker.set_variable_value("track_0_z0", tracks[0].getZ0()) ft_maker.set_variable_value("track_1_p", np.linalg.norm(np.asarray(tracks[1].getMomentum()))) ft_maker.set_variable_value("track_1_px", tracks[1].getMomentum()[0]) ft_maker.set_variable_value("track_1_py", tracks[1].getMomentum()[1]) ft_maker.set_variable_value("track_1_pz", tracks[1].getMomentum()[2]) track_1_theta = math.fabs(math.pi/2 - math.acos(te.get_cos_theta(tracks[1]))) ft_maker.set_variable_value("track_1_theta", track_1_theta) ft_maker.set_variable_value("track_1_phi0", tracks[1].getPhi0()) ft_maker.set_variable_value("track_1_omega", tracks[1].getOmega()) ft_maker.set_variable_value("track_1_d0", tracks[1].getD0()) ft_maker.set_variable_value("track_1_z0", tracks[1].getZ0()) p_sum = np.linalg.norm(np.asarray(tracks[0].getMomentum())) + np.linalg.norm(np.asarray(tracks[1].getMomentum())) ft_maker.set_variable_value("track_pair_p_sum", p_sum) ft_maker.fill() ft_maker.close()
def MDtest_iswrec(Nreal,minreal=0,justgetrho=0,dorho=1,Ndesbins=[2,3],lmin=3,lmax=80,rhofiletag='',nvss=True,fitbias=True): rlzns=np.arange(minreal,minreal+Nreal) cldat=MDtest_get_Cl(justread=False,Ndesbins=Ndesbins,nvss=nvss) reclist=MDtest_get_reclist(Ndesbins=Ndesbins,lmin=lmin,lmax=lmax,nvss=nvss) dummyglm=gmc.get_glm(cldat,Nreal=0,runtag=cldat.rundat.tag) au.doiswrec_formaps(dummyglm,cldat,rlzns=rlzns,reclist=reclist,rhofiletag=rhofiletag,dos=False,fitbias=fitbias)