default='IT81', help='Detector configuration') p.add_argument('-d', '--date', dest='date', help='Date to run over (optional)') p.add_argument('-b', '--bintype', dest='bintype', default='logdist', help='ShowerLLH binning used [standard|nozenith|logdist]') p.add_argument('--test', dest='test', default=False, action='store_true', help='Run off cluster to test') p.add_argument('--overwrite', dest='overwrite', default=False, action='store_true', help='Overwrite existing merged files') args = p.parse_args() my.setupGlobals(verbose=False) my.setupShowerLLH(verbose=False) prefix = '%s/%s_data/files' % (my.llh_data, args.config) hdfMerge = '%s/build/hdfwriter/resources/scripts/merge.py' % my.offline tempDir = '${_CONDOR_SCRATCH_DIR}/data-llh' masterList = glob.glob('%s/DataLLH_*_%s_0*.hdf5' % (prefix, args.bintype)) masterList.sort() dates = [os.path.basename(f).split('_')[1] for f in masterList] dates = sorted(list(set(dates))) if args.date: dates = [ymd for ymd in dates if args.date in ymd] exList, jobIDs = [],[]
def py_submit(exelines, jobID=None, sublines=None, test=False): # Option for testing off cluster if test: if len(exelines) > 1: raise SystemExit('Multiple exelines not available in test') os.system(exelines[0]) return # Setup global variables my.setupGlobals(verbose=False) if jobID == None: randint = np.random.uniform(100000) #jobID = 'npx4-%05d' % randint jobID = 'npx4-{:05d}'.format(randint) #outexe = '%s/npx4-execs/%s.sh' % (my.npx4, jobID) outexe = '{}/npx4-execs/{}.sh'.format(my.npx4, jobID) condor_script = '%s/2sub.sub' % my.npx4 # Run eval statement as it doesn't run by default when fed to script #setPath = "echo eval `/cvmfs/icecube.opensciencegrid.org/setup.sh`" setPath = "echo eval `/cvmfs/icecube.opensciencegrid.org/py2-v1/setup.sh`" p = subprocess.Popen(setPath, stdout=subprocess.PIPE, shell=True) path, err = p.communicate() path = path.strip() # Setup execution script lines = [ "#!/bin/bash", "date", "hostname", "", #"cd %s" % os.getcwd(), #"eval `/cvmfs/icecube.opensciencegrid.org/setup.sh`", "%s" % path, "" ] lines += exelines lines += ["date"] lines = [l + '\n' for l in lines] with open(outexe, 'w') as f: f.writelines(lines) # Make file executable st = os.stat(outexe) os.chmod(outexe, st.st_mode | stat.S_IEXEC) # Condor submission script lines = [ "Universe = vanilla\n", "Executable = %s/npx4-execs/%s.sh\n" % (my.npx4, jobID), "Log = %s/npx4-logs/%s.log\n" % (my.npx4, jobID), "Output = %s/npx4-out/%s.out\n" % (my.npx4, jobID), "Error = %s/npx4-error/%s.error\n" % (my.npx4, jobID), "Notification = NEVER\n", "Queue\n" ] # Option for additional lines to submission script if sublines != None: for l in sublines: lines.insert(-1, '%s\n' % l) with open(condor_script, 'w') as f: f.writelines(lines) os.system('condor_submit %s' % condor_script)
def py_submit(exelines, jobID=None, sublines=None, test=False): # Option for testing off cluster if test: if len(exelines) > 1: raise SystemExit('Multiple exelines not available in test') os.system(exelines[0]) return # Setup global variables my.setupGlobals(verbose=False) if jobID == None: randint = random.randint(0,100000) jobID = 'npx4-{:05f}'.format(randint) outexe = '{}/npx4-execs/{}.sh'.format(my.npx4, jobID) condor_script = '{}/submit-desc.sub'.format(my.npx4) # Run eval statement as it doesn't run by default when fed to script setPath = "echo eval `/cvmfs/icecube.opensciencegrid.org/py2-v1/setup.sh`" p = subprocess.Popen(setPath, stdout=subprocess.PIPE, shell=True) path, err = p.communicate() path = path.strip() # Setup execution script lines = [ "#!/bin/bash", "date", "hostname", "", #"cd %s" % os.getcwd(), #"eval `/cvmfs/icecube.opensciencegrid.org/setup.sh`", # "{}".format(path), "" ] lines += exelines lines += ["date"] lines = [l + '\n' for l in lines] #print(lines) with open(outexe, 'w') as f: f.writelines(lines) # Make file executable st = os.stat(outexe) os.chmod(outexe, st.st_mode | stat.S_IEXEC) # Condor submission script lines = [ "universe = vanilla\n", "executable = {}/npx4-execs/{}.sh\n".format(my.npx4, jobID), "log = {}/npx4-logs/{}.log\n".format(my.npx4, jobID), "output = {}/npx4-out/{}.out\n".format(my.npx4, jobID), "error = {}/npx4-error/{}.error\n".format(my.npx4, jobID), "notification = Never\n", # use the current metaproject environment "getenv = True\n", #"Notification = Complete\n", #"notify_user = [email protected]\n", "queue\n" ] # Option for additional lines to submission script if sublines != None: for l in sublines: lines.insert(-1, '%s\n' % l) with open(condor_script, 'w') as f: f.writelines(lines) os.system('condor_submit %s' % condor_script)
return [] #0 jobs; 0 completed, 0 removed, 0 idle, 0 running, 0 held, 0 suspended # Get a list of files running running = [] for line in output: jobID = line.strip().split(' ')[-1] if jobID[-3:] != '.sh': continue running.append(jobID) # Get a list of files submitted submittedFiles = glob.glob('%s/npx4-execs/*.sh' % prefix) submittedFiles = [os.path.basename(f) for f in submittedFiles] # Find out which submitted files have finished finished = [] for jobID in submittedFiles: if jobID not in running and jobID != '': finished.append(jobID) return finished if __name__ == "__main__": my.setupGlobals(verbose=False) finished = getFinished(my.npx4) print len(finished), 'finished files:' print finished