コード例 #1
0
def main():
	result = dataset.get_datasets(generation=ds_generation, set_info=True)
	result = {k: v for k, v in result.iteritems() if k == "qcdmg"}
	for process, dss in result.iteritems():
		print process
		
		files = []
		for ds in dss:
			if ds.tuple_path:
				files += ds.tuple_path
		
		n = 200
		groups = [files[i:i+n] for i in xrange(0, len(files), n)]
		if len(groups) > 500:
			print "ERROR: So many files!"
			sys.exit()
		if groups:
			for i, group in enumerate(groups):
				print i, len(group)
				if i > 21: #in [14, 19]:
					out_file = "{}/{}_tuple_{}.root".format(out_dir, process, i)
					cmd = "hadd -f {}".format(out_file)            # "-f" forces overwrite of output file.
					for f in group:
						if "_1405.root" not in f and "_1323.root" not in f and "_1443.root" not in f and "_1199.root" not in f and "_1464.root" not in f:
							cmd += " root://cmseos.fnal.gov/{}".format(f)
					raw_output = Popen([cmd], shell=True, stdout=PIPE, stderr=PIPE).communicate()
					if raw_output[1]:
						print raw_output[0]
						print raw_output[1]
						sys.exit()
					else:
						print out_file
		else:
			print "There were no tuple files to combine."
コード例 #2
0
def main():
	files = [f for f in os.listdir(dir_local) if ".root" in f]
	tuples = [f for f in files if "_tuple" in f]
	tuples = [t for t in tuples if "QCD" not in t]
	if tuples:
		for t in tuples:
			name = t.split("_tuple")[0]
			ds = dataset.get_datasets(name=name).values()[0][0]
			if ds:
				dir_store = "{}/{}/{}_tuples".format(dir_eos, ds.name, ds.subprocess)
				eos.mkdir(dir_store)
				eos.cp("{}/{}".format(dir_local, t), dir_store)
				print "{} went into {}".format(t, dir_store)
			else:
				print "I don't know what DS this belongs to."
	else:
		print "There aren't any tuples to organize in {}.".format(dir_local)
	return True
コード例 #3
0
def make_load_trees():
    script_name = "load_trees.cc"

    # Make the script:
    script = "{\n"
    print "Making {} ...".format(script_name)
    dsd = dataset.get_datasets(set_info=True)
    for process, dss in dsd.iteritems():
        script += '\tTChain* {} = new TChain("analyzer/events");\n'.format(process)
        for ds in dss:
            if ds.analyze:
                for f in ds.tuple_path:
                    script += '\t{}->Add("{}{}");\n'.format(process, "root://cmsxrootd.fnal.gov/" if cmslpc else "", f)
        script += "\t\n"
    script += "//\tgROOT->SetBatch();\n"
    script += "}"

    # Output the script:
    with open(script_name, "w") as out:
        out.write(script)
コード例 #4
0
def make_send_to_lxplus():
    script_name = "send_to_lxplus.sh"
    script = ""

    # Make the script:
    print "Making {} ...".format(script_name)
    dsd = dataset.get_datasets(category=["sqto2j", "sqto4j"])
    # 	print dsd
    for process, dss in dsd.iteritems():
        for ds in dss:
            # 			print ds.name, ds.tuple_path
            script += "xrdcp -f root://cmseos.fnal.gov/{} {} &&\n".format(ds.tuple_path, dir_local)
            script += "rsync -rlthv --progress {}/{} [email protected]:/afs/cern.ch/user/e/elhughes/work/data/fat &&\n".format(
                dir_local, ds.tuple_path.split("/")[-1]
            )
            script += "rm {}/{} &&\n".format(dir_local, ds.tuple_path.split("/")[-1])
    script += "ls {}".format(dir_local)

    # 	script = "rsync -rlthv --progress --append-verify {}/*_tuple.root [email protected]:/afs/cern.ch/user/e/elhughes/work/data/fat\n".format(dataset.tuple_dir)
    # 	script = "rsync -rlthv --progress --append-verify {}/*anatuples.root [email protected]:/afs/cern.ch/user/e/elhughes/work/data/fat\n".format(dataset.tuple_dir)

    # Output the script:
    with open(script_name, "w") as out:
        out.write(script)
コード例 #5
0
options.register ('doTrim',
	True,
	VarParsing.multiplicity.singleton,
	VarParsing.varType.bool,
	"Set to false to not perform trimming"
)

options.parseArguments()
options.cmssw = ''.join([d for d in list((re.search("/?([^/]*)$", options.cmssw)).group(1)) if d.isdigit()])		# Format the "cmssw" option, so "CMSSW_7_3_2_patch2" turns into "7322" if necessary.
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(options.maxEvents))		# Set up the number of events to run over.

### Input:
in_files = ["{0}/{1}".format(options.inDir, f) for f in options.inFile]
if (not in_files) and (not options.crab):
	assert options.dataset
	result = dataset.get_datasets(name=options.dataset, generation=options.generation, set_info=True)
	print result
	ds = result.values()[0][0]
	print ds.miniaod_path
	in_files = ds.miniaod_path
	assert hasattr(ds, "subprocess")
	options.subprocess = ds.subprocess

if (not in_files) and (not options.crab):
	print "ERROR: You're not running over any files!"
	sys.exit()

### Output:
if not options.outFile:
	options.outFile = "{0}_{1}_jets.root".format(options.subprocess, options.cmssw)
# /SET UP