def main(): utils.load_packages() parser = optparse.OptionParser(usage=usage) parser.add_option('-v', '--verbose', action='store_true', default=False) (opts, args) = parser.parse_args() if not args_are_valid(args) : parser.error('invalid arguments') verbose = opts.verbose input_files = [i for ia in args for i in parse_input(ia)] allOpts = [x.dest for x in parser._get_all_options()[1:]] if opts.verbose : print '\nUsing the following opts:\n' print '\n'.join("%s : %s"%(o, str(getattr(opts, o))) for o in allOpts) file_counters = {'processed':0, 'flagged':0} event_counters = {'processed':0, 'flagged':0} for input_file in input_files: input_counts, filtered_counts = initial_counts_differ_from_susyprop_counts(input_file, verbose=opts.verbose) flagged = input_counts!=filtered_counts if flagged: file_counters['flagged'] += 1 print "{0}!={1}, {2}".format(input_counts, filtered_counts, input_file) file_counters['processed'] += 1 event_counters['processed'] += input_counts event_counters['flagged'] += (input_counts-filtered_counts) print "Processed {0} inputs, {1} have events flagged with {2}".format(file_counters['processed'], file_counters['flagged'], susyprop_label) print "Fraction of events flagged: {0:.2%}".format(event_counters['flagged'] / event_counters['processed'])
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() parser = optparse.OptionParser(usage=usage) parser.add_option('-f', '--fill', action='store_true', default=False, help='force filling histos') parser.add_option('-l', '--labels', help='labels for plots (comma separated)') parser.add_option('-n', '--normalize', action='store_true', help='normalize histograms to unity') parser.add_option('-N', '--num-entries', help='loop only over N entries') parser.add_option('-s', '--sample', help='sample name') parser.add_option('-v', '--verbose', action='store_true', default=False) (opts, args) = parser.parse_args() print 'labels ',opts.labels if not args_are_valid(args) : parser.error('invalid arguments') verbose = opts.verbose input1 = args[0] input2 = args[1] out_dir = args[2] histo_cache_filename = os.path.join(out_dir, 'plot_comparison.root') fill_histos = opts.fill or not os.path.exists(histo_cache_filename) allOpts = [x.dest for x in parser._get_all_options()[1:]] if opts.verbose : print '\nUsing the following opts:\n' print '\n'.join("%s : %s"%(o, str(getattr(opts, o))) for o in allOpts) print '\n'.join(["%s : %s"%(o, str(eval(o))) for o in ['histo_cache_filename', 'fill_histos']]) if fill_histos : run_fill(input1, input2, histo_cache_filename, opts) run_plot(histo_cache_filename, out_dir, opts)
def main(): utils.load_packages() parser = optparse.OptionParser(usage=usage) parser.add_option('-v', '--verbose', action='store_true', default=False) (opts, args) = parser.parse_args() if not args_are_valid(args): parser.error('invalid arguments') verbose = opts.verbose input_files = [i for ia in args for i in parse_input(ia)] allOpts = [x.dest for x in parser._get_all_options()[1:]] if opts.verbose: print '\nUsing the following opts:\n' print '\n'.join("%s : %s" % (o, str(getattr(opts, o))) for o in allOpts) file_counters = {'processed': 0, 'flagged': 0} event_counters = {'processed': 0, 'flagged': 0} for input_file in input_files: input_counts, filtered_counts = initial_counts_differ_from_susyprop_counts( input_file, verbose=opts.verbose) flagged = input_counts != filtered_counts if flagged: file_counters['flagged'] += 1 print "{0}!={1}, {2}".format(input_counts, filtered_counts, input_file) file_counters['processed'] += 1 event_counters['processed'] += input_counts event_counters['flagged'] += (input_counts - filtered_counts) print "Processed {0} inputs, {1} have events flagged with {2}".format( file_counters['processed'], file_counters['flagged'], susyprop_label) print "Fraction of events flagged: {0:.2%}".format( event_counters['flagged'] / event_counters['processed'])
def main() : utils.load_packages() if len(sys.argv) < 2 : print("Usage:\n\tinput file: {} susyNt.root\n\tinput dir: {} /<path-to-susyNt-samples>/".format(sys.argv[0], sys.argv[0])) return input = sys.argv[1] if is_file(input) : trig_info_from_file(input, False) elif is_dir(input) : trig_info_from_dir(input)
def main(): utils.load_packages() if len(sys.argv) < 2: print( "Usage:\n\tinput file: {} susyNt.root\n\tinput dir: {} /<path-to-susyNt-samples>/" .format(sys.argv[0], sys.argv[0])) return input = sys.argv[1] if is_file(input): trig_info_from_file(input, False) elif is_dir(input): trig_info_from_dir(input)
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() input1 = sys.argv[1] input_file = r.TFile.Open(input1) tree = input_file.Get('susyNt') tree.StartViewer() answ = '' while answ != 'y': answ = raw_input('Quit?')
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() sample_name = 'Sherpa_CT10_lllnu_WZ' input_dir = '/var/tmp/susynt_dev/data/ntup_susy/' #input_dir = '/var/tmp/susynt_dev/data/ntup_common/' input_files = glob.glob(os.path.join(input_dir, '*.root*')) chain = r.TChain('susyNt') for f in input_files : chain.Add(f) num_entries = chain.GetEntries() num_entries_to_process = num_entries if num_entries<1e4 else int(1e4) print "About to loop on %d entries"%num_entries_to_process run_with_chain(chain, num_entries_to_process)
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() sample_name = 'mc14_13TeV.110401.PowhegPythia_P2012_ttbar_nonallhad' # input_dir = '/var/tmp/susynt_dev/data/ntup_susy/' # #input_dir = '/var/tmp/susynt_dev/data/ntup_common/' # input_files = glob.glob(os.path.join(input_dir, '*.root*')) input_files = ['./susyNt.root'] chain = r.TChain('susyNt') for f in input_files : chain.Add(f) num_entries = chain.GetEntries() num_entries_to_process = num_entries if num_entries<1e4 else int(1e4) print "About to loop on %d entries"%num_entries_to_process run_with_chain(chain, num_entries_to_process)
def main(): utils.load_packages() if len(sys.argv)<2: print("Usage:\n{} susyNt1.root [susyNt2.root ...]".format(sys.argv[0])) return input_file_names = sys.argv[1:] tree_name = 'susyNt' col_width = 20 lstr_col = ('{:<'+str(col_width)+'s}') rstr_col = ('{:>'+str(col_width)+'s}') num_col = ('{:>'+str(col_width)+'.2f}') fields = ['branch_name', 'kb/event'] # probably we do not need the in-memory footprintl; uncomment if you do # fields = ['branch_name', 'kb/event', 'memory kb/event'] header_template = ' '.join([lstr_col]+[rstr_col for f in fields[1:]]) line_template = ' '.join([lstr_col]+[num_col for f in fields[1:]]) header = header_template.format(*fields) line_break = '-'*(col_width*len(fields)+1*(len(fields)-1)) for input_file_name in input_file_names: input_file = r.TFile.Open(input_file_name) tree = input_file.Get(tree_name) stat_info = os.stat(input_file_name) num_entries = tree.GetEntries() per_event = 1.0 / (Units.kb *num_entries) branches = tree.GetListOfBranches() print ("{} size on disk {:.2f} kb ({} entries)".format( input_file_name, os.path.getsize(input_file_name)/Units.kb, num_entries)) print (line_break) print (header) print (line_break) branch_attributes = [{'branch_name': b.GetName(), 'kb/event': b.GetZipBytes()*per_event, 'memory kb/event': b.GetTotalSize()*per_event } for b in branches] branch_attributes = sorted(branch_attributes, key=lambda a: a['kb/event']) lines = [line_template.format(*(a[k] for k in fields)) for a in branch_attributes] print ('\n'.join(lines)) branch_attributes.append({'branch_name': 'total', 'kb/event': sum(a['kb/event'] for a in branch_attributes), 'memory kb/event': sum(a['memory kb/event'] for a in branch_attributes)}) print(line_break) print (line_template.format(*[branch_attributes[-1][k] for k in fields])) input_file.Close()
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() sample_name = 'mc14_13TeV.110401.PowhegPythia_P2012_ttbar_nonallhad' # input_dir = '/var/tmp/susynt_dev/data/ntup_susy/' # #input_dir = '/var/tmp/susynt_dev/data/ntup_common/' # input_files = glob.glob(os.path.join(input_dir, '*.root*')) input_files = ['./susyNt.root'] chain = r.TChain('susyNt') for f in input_files: chain.Add(f) num_entries = chain.GetEntries() num_entries_to_process = num_entries if num_entries < 1e4 else int(1e4) print "About to loop on %d entries" % num_entries_to_process run_with_chain(chain, num_entries_to_process)
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() sample_name = "Sherpa_CT10_lllnu_WZ" input_dir = "/var/tmp/susynt_dev/data/ntup_susy/" # input_dir = '/var/tmp/susynt_dev/data/ntup_common/' input_files = glob.glob(os.path.join(input_dir, "*.root*")) chain = r.TChain("susyNt") for f in input_files: chain.Add(f) num_entries = chain.GetEntries() num_entries_to_process = num_entries if num_entries < 1e4 else int(1e4) print "About to loop on %d entries" % num_entries_to_process run_with_chain(chain, num_entries_to_process)
def main(): utils.load_packages() utils.generate_dicts() utils.import_SUSYDefs_enums() parser = optparse.OptionParser(usage=usage) parser.add_option('-f', '--fill', action='store_true', default=False, help='force filling histos') parser.add_option('-l', '--labels', help='labels for plots (comma separated)') parser.add_option('-n', '--normalize', action='store_true', help='normalize histograms to unity') parser.add_option('-N', '--num-entries', help='loop only over N entries') parser.add_option('-s', '--sample', help='sample name') parser.add_option('-v', '--verbose', action='store_true', default=False) (opts, args) = parser.parse_args() print 'labels ', opts.labels if not args_are_valid(args): parser.error('invalid arguments') verbose = opts.verbose input1 = args[0] input2 = args[1] out_dir = args[2] histo_cache_filename = os.path.join(out_dir, 'plot_comparison.root') fill_histos = opts.fill or not os.path.exists(histo_cache_filename) allOpts = [x.dest for x in parser._get_all_options()[1:]] if opts.verbose: print '\nUsing the following opts:\n' print '\n'.join("%s : %s" % (o, str(getattr(opts, o))) for o in allOpts) print '\n'.join([ "%s : %s" % (o, str(eval(o))) for o in ['histo_cache_filename', 'fill_histos'] ]) if fill_histos: run_fill(input1, input2, histo_cache_filename, opts) run_plot(histo_cache_filename, out_dir, opts)
"--input", help="Input directory containing text files", default="") (options, args) = parser.parse_args() input_directory = options.input if input_directory == "": print "ERROR Provided directory is \"\"" sys.exit() if not os.path.isdir(input_directory): print "ERROR Provided directory does not exist" sys.exit() # load in the C++/RootCore packages utils.load_packages() filelists = get_filelists(input_directory) first_files = get_first_files(filelists) n_total = len(first_files) n_checked = 0 bad_datasets = [] for f in first_files: n_checked += 1 check = check_weights(f, n_checked, n_total) if not (check == ""): bad_datasets.append(check) print 60 * "-"
if __name__ == "__main__" : parser = OptionParser() parser.add_option("-i", "--input", help="Input directory containing text files", default = "") (options,args) = parser.parse_args() input_directory = options.input if input_directory == "" : print "ERROR Provided directory is \"\"" sys.exit() if not os.path.isdir(input_directory) : print "ERROR Provided directory does not exist" sys.exit() # load in the C++/RootCore packages utils.load_packages() filelists = get_filelists(input_directory) first_files = get_first_files(filelists) n_total = len(first_files) n_checked = 0 bad_datasets = [] for f in first_files : n_checked+=1 check = check_weights(f, n_checked, n_total) if not (check=="") : bad_datasets.append(check) print 60*"-"