configure_pat_tuple
import os
import sys

parser = argparse.ArgumentParser(description='Build PAT Tuple CRAB submission')
parser.add_argument('jobid', help='Job ID identifier')
parser.add_argument('--samples', nargs='+', type=str, required=False,
                    help='Filter samples using list of patterns (shell style)')
args = parser.parse_args()

cfg = 'patTuple_cfg.py'
jobId = args.jobid

print " # Job ID: %s Version: %s" % (jobId, fsa_version())
print 'export TERMCAP=screen'
for sample in sorted(datadefs.keys()):
    sample_info = datadefs[sample]

    passes_filter = True

    # Filter by sample wildcards
    if args.samples:
        passes_wildcard = False
        for pattern in args.samples:
            if fnmatch.fnmatchcase(sample, pattern):
                passes_wildcard = True
        passes_filter = passes_wildcard and passes_filter
    if not passes_filter:
        continue

    submit_dir_base = "/scratch/{logname}/{jobid}/{sample}".format(
'''

Print out the size and number of events for each sample, and a summary for each
responible.


Author: Evan K. Friis, UW Madison

'''

from FinalStateAnalysis.MetaData.datadefs import datadefs
from FinalStateAnalysis.MetaData.datatools import query_das

people = {}

for dataset in sorted(datadefs.keys()):
    dataset_info =  datadefs[dataset]
    result = query_das(dataset_info['datasetpath'])
    print " ".join([
        dataset,
        'Files: %i' % result['nfiles'],
        'Events: %s' % result['nevents'],
        'Size: %0.f GB' % result['size'],
        'Resp: %s' % dataset_info['responsible']
    ])
    sofar = people.setdefault(dataset_info['responsible'], 0)
    people[dataset_info['responsible']] += result['size']

print ""
print "Job summary"
print "==========="
f.write('[CRAB]\njobtype = cmssw\nscheduler = %s\nuse_server = 1\n' %
       'glidein' if not args.glite else 'glite')
f.write('[USER]\nreturn_data = 0\ncopy_data = 1\nstorage_element = T2_US_Wisconsin\n')
f.write('publish_data = 1\ndbs_url_for_publication = https://cmsdbsprod.cern.ch:8443/cms_dbs_ph_analysis_01_writer/servlet/DBSServlet\n')
f.write('[GRID]\nrb = CERN\nmaxtarballsize = 250\n')
if not args.nowhitelist:
    f.write('se_white_list = T2_US, T3_US\n')
f.close()

#make multicrab.cfg
f=open('%s/multicrab.cfg' % jobId, 'w')
f.write('[MULTICRAB]\ncfg = crab.cfg\n')
f.write('[COMMON]\nCMSSW.get_edm_output = 1\n\n')

# Loop over samples
for sample in sorted(datadefs.keys()):
    sample_info = datadefs[sample]
    passes_filter = True
    # Filter by sample wildcards
    if args.samples:
        passes_wildcard = False
        for pattern in args.samples:
            if fnmatch.fnmatchcase(sample, pattern):
                passes_wildcard = True
        passes_filter = passes_wildcard and passes_filter
    if not passes_filter:
        continue

    f.write('[')
    f.write(sample)
    f.write(']\n')