h_sel_srih = ROOT.TH2F('h_sel_srih', 'h_sel_srih', gl_bins, gl_min, gl_max, n1_bins, n1_min, n1_max)
h_sril     = ROOT.TH2F('h_sril', 'h_sril', gl_bins, gl_min, gl_max, n1_bins, n1_min, n1_max)
h_srih     = ROOT.TH2F('h_srih', 'h_srih', gl_bins, gl_min, gl_max, n1_bins, n1_min, n1_max)

h_srl.SetDirectory(0)
h_srh.SetDirectory(0)
h_sril.SetDirectory(0)
h_srih.SetDirectory(0)

ROOT.SetOwnership(h_srl, False)
ROOT.SetOwnership(h_srh, False)
ROOT.SetOwnership(h_sril, False)
ROOT.SetOwnership(h_srih, False)


for (m3, mu) in sorted(mass_dict.iterkeys()):

    mgl, mn1 = mass_dict[(int(m3), int(mu))]

    name = 'GGM_M3_mu_%i_%i' % (m3, mu)

    # total events
    ds = get_sample_datasets(name)[0]
    total_events = get_sumw(ds)

    if total_events == 0:
        continue

    srl_events_scaled = get_events(name, selection=SR_L, lumi='data').mean
    srh_events_scaled = get_events(name, selection=SR_H, lumi='data').mean
def main():

    parser = argparse.ArgumentParser(description='run limit (batch)')

    parser.add_argument('-i', dest='histfile', help='Input file with histograms', required=True)
    parser.add_argument('-o', dest='output', help='Output directory for results', required=True)
    parser.add_argument('-c', dest='configfile', default=os.path.join(os.environ['SUSY_ANALYSIS'], 'lib/PhotonMet_HistFitter_config.py'), help='HF configfile')
    parser.add_argument('--sr', dest='region', help='SRL, SRH, SRinclL or SRinclH', required=True)
    parser.add_argument('--hf',  dest='hf_options', help='HF extra options')
    parser.add_argument('--queue',  default='8nh', help='Batch queue (8nh|1nd|...)')
    parser.add_argument('--nosyst', action='store_true', help='No systematics')
    parser.add_argument('--data', default='data', help='data|data15|data16')
    parser.add_argument('--asimov',  action='store_true', help='Use asimov aprox.')
    parser.add_argument('--ntoys',  default='5000', help='Number of toys (By default use toys)')
    parser.add_argument('--dry', action='store_true', help='Dry run (not submit to batch)')
    parser.add_argument('--sigxs',  action='store_true', help='')
    parser.add_argument('--only', help='Point as 1600_450')
    parser.add_argument('--exclude', help='Point as 1600_450')
    parser.add_argument('--failed', help='')

    args = parser.parse_args()


    histfile = os.path.abspath(args.histfile)

    outdir = os.path.abspath(args.output)

    jobdir     = os.path.join(outdir, 'jobs')
    logdir     = os.path.join(outdir, 'logs')
    resultsdir = os.path.join(outdir, 'results')

    os.system('mkdir -p %s %s %s' % (jobdir, logdir, resultsdir))

    if args.asimov:
        options = '-i %s --sr %s --asimov --syst --data %s' % (histfile, args.region, args.data)
    else:
        options = '-i %s --sr %s --ntoys %s --syst --data %s' % (histfile, args.region, args.ntoys, args.data)

    if args.sigxs:
        options += ' --sigxs'

    requested_points = []
    excluded_points  = []
    if args.only is not None:
        requested_points = args.only.split(',')
    if args.exclude is not None:
        excluded_points = args.exclude.split(',')

    if args.failed is not None:
        lines = [ l.strip() for l in open(args.failed).read().split('\n') if l ]
        
        requested_points.extend(lines)

    for (m3, mu) in mass_dict.iterkeys():

        point = '%d_%d' % (m3, mu)

        if requested_points and point not in requested_points:
            continue

        if point in excluded_points:
            continue

        write_and_submit_job(jobdir, logdir, resultsdir, args.configfile, options, point, args.region, args.queue, not args.dry)