def testResubmit(self): s = resubmit(self.logger, []) #1) missing required -d option expRes = CommandResult(2001, 'ERROR: Task option is required') res = s() self.assertEquals(expRes, res) #2) correct execution analysisDir = self.reqarea s = resubmit(self.logger, self.maplistopt + ["-d", analysisDir]) res = s() expRes = CommandResult(0, '') self.assertEquals( expRes, res) #3) wrong -d option analysisDir = os.path.join(os.path.dirname(__file__), 'crab_XXX') self.assertRaises( TaskNotFoundException, resubmit, self.logger, self.maplistopt + ["-d", analysisDir])
def testResubmit(self): s = resubmit(self.logger, []) #1) missing required -d option expRes = CommandResult(2001, 'ERROR: Task option is required') res = s() self.assertEquals(expRes, res) #2) correct execution analysisDir = self.reqarea s = resubmit(self.logger, self.maplistopt + ["-d", analysisDir]) res = s() expRes = CommandResult(0, '') self.assertEquals(expRes, res) #3) wrong -d option analysisDir = os.path.join(os.path.dirname(__file__), 'crab_XXX') self.assertRaises(TaskNotFoundException, resubmit, self.logger, self.maplistopt + ["-d", analysisDir])
def resubmit_crab(args): '''Resubmit jobs''' if not crabLoaded: logging.error('You must source a crab environment to submit to crab.\nsource /cvmfs/cms.cern.ch/crab3/crab.sh') return crab_dirs = [] if args.jobName: workArea = get_crab_workArea(args) crab_dirs += sorted(glob.glob('{0}/*'.format(workArea))) elif args.directories: for d in args.directories: crab_dirs += glob.glob(d) else: log.error("Shouldn't be possible to get here") tblogger, logger, memhandler = initLoggers() tblogger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING) memhandler.setLevel(logging.WARNING) resubmitMap = {} for d in crab_dirs: if os.path.exists(d): statusArgs = ['--dir',d] resubmitArgs = ['--dir',d] try: summary = crabClientStatus.status(logger,statusArgs)() resubmit = False total = 0 failed = 0 allJobStatus = {} if 'jobs' in summary: for j,job in summary['jobs'].iteritems(): total += 1 if job['State'] not in allJobStatus: allJobStatus[job['State']] = 0 allJobStatus[job['State']] += 1 if job['State'] in ['failed']: failed += 1 resubmit = True if resubmit: log.info('Resubmitting {0}'.format(d)) log.info('{0} of {1} jobs failed'.format(failed,total)) log.info(' '.join(['{0}: {1}'.format(state,allJobStatus[state]) for state in allowedStates if state in allJobStatus])) resubmitMap[d] = crabClientResubmit.resubmit(logger,resubmitArgs)() except HTTPException as hte: log.warning("Submission for input directory {0} failed: {1}".format(d, hte.headers)) except ClientException as cle: log.warning("Submission for input directory {0} failed: {1}".format(d, cle)) for d,statMap in resubmitMap.iteritems(): if statMap['status'] != 'SUCCESS': log.info('Status: {0} - {1}'.format(statMap['status'],d))
def resubmit_crab(args): '''Resubmit jobs''' crab_dirs = [] if args.jobName: workArea = get_crab_workArea(args) crab_dirs += sorted(glob.glob('{0}/*'.format(workArea))) elif args.crabDirectories: for d in args.crabDirectories: crab_dirs += glob.glob(d) else: log.error("Shouldn't be possible to get here") tblogger, logger, memhandler = initLoggers() tblogger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING) memhandler.setLevel(logging.WARNING) resubmitMap = {} for d in crab_dirs: if os.path.exists(d): statusArgs = ['--dir', d] resubmitArgs = ['--dir', d] try: summary = crabClientStatus.status(logger, statusArgs)() resubmit = False total = 0 failed = 0 if 'jobs' in summary: for j, job in summary['jobs'].iteritems(): total += 1 if job['State'] in ['failed']: failed += 1 resubmit = True if resubmit: log.info('Resubmitting {0}'.format(d)) log.info('{0} of {1} jobs failed'.format(failed, total)) resubmitMap[d] = crabClientResubmit.resubmit( logger, resubmitArgs)() except HTTPException as hte: log.warning( "Submission for input directory {0} failed: {1}".format( d, hte.headers)) except ClientException as cle: log.warning( "Submission for input directory {0} failed: {1}".format( d, cle)) for d, statMap in resubmitMap.iteritems(): if statMap['status'] != 'SUCCESS': log.info('Status: {0} - {1}'.format(statMap['status'], d))
def resubmit_crab(args): '''Resubmit jobs''' crab_dirs = [] if args.jobName: workArea = get_crab_workArea(args) crab_dirs += sorted(glob.glob('{0}/*'.format(workArea))) elif args.crabDirectories: for d in args.crabDirectories: crab_dirs += glob.glob(d) else: log.error("Shouldn't be possible to get here") tblogger, logger, memhandler = initLoggers() tblogger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING) memhandler.setLevel(logging.WARNING) resubmitMap = {} for d in crab_dirs: if os.path.exists(d): statusArgs = ['--dir',d] resubmitArgs = ['--dir',d] try: summary = crabClientStatus.status(logger,statusArgs)() resubmit = False total = 0 failed = 0 if 'jobs' in summary: for j,job in summary['jobs'].iteritems(): total += 1 if job['State'] in ['failed']: failed += 1 resubmit = True if resubmit: log.info('Resubmitting {0}'.format(d)) log.info('{0} of {1} jobs failed'.format(failed,total)) resubmitMap[d] = crabClientResubmit.resubmit(logger,resubmitArgs)() except HTTPException as hte: log.warning("Submission for input directory {0} failed: {1}".format(d, hte.headers)) except ClientException as cle: log.warning("Submission for input directory {0} failed: {1}".format(d, cle)) for d,statMap in resubmitMap.iteritems(): if statMap['status'] != 'SUCCESS': log.info('Status: {0} - {1}'.format(statMap['status'],d))
print ' Task ' + shortname + ' status is ' + res['status'] if res['status'] == 'SUBMITTED' or res['status'] == 'QUEUED': if KILL: print ' Killing jobs..' try: killobj = kill(logger, ['--dir', taskdir]) killobj() except: print ' Failed to kill ' + shortname else: print ' Resubmitting potential failed jobs..' try: resubmitobj = resubmit(logger, ['--dir', taskdir]) resubmitobj() except: print ' Failed to resubmit ' + shortname elif res['status'] == 'COMPLETED': print ' Clearing.' shutil.rmtree(taskdir) elif res['status'] in ['KILLED', 'KILLFAILED', 'FAILED', 'SUBMITFAILED', 'RESUBMITFAILED']: print ' Obtaining the list of lumis not analyzed.' try: reportobj = report(logger, ['--dir', taskdir]) reportobj() except: print ' Failed to fetch the lumi list.'
def resubmit_crab(args): '''Resubmit jobs''' if not crabLoaded: logging.error( 'You must source a crab environment to submit to crab.\nsource /cvmfs/cms.cern.ch/crab3/crab.sh' ) return crab_dirs = [] if args.jobName: workArea = get_crab_workArea(args) crab_dirs += sorted(glob.glob('{0}/*'.format(workArea))) elif args.crabDirectories: for d in args.crabDirectories: crab_dirs += glob.glob(d) else: log.error("Shouldn't be possible to get here") tblogger, logger, memhandler = initLoggers() tblogger.setLevel(logging.WARNING) logger.setLevel(logging.WARNING) memhandler.setLevel(logging.WARNING) resubmitMap = {} for d in crab_dirs: if os.path.exists(d): statusArgs = ['--dir', d] resubmitArgs = ['--dir', d] try: summary = crabClientStatus.status(logger, statusArgs)() resubmit = False total = 0 failed = 0 allJobStatus = {} if 'jobs' in summary: for j, job in summary['jobs'].iteritems(): total += 1 if job['State'] not in allJobStatus: allJobStatus[job['State']] = 0 allJobStatus[job['State']] += 1 if job['State'] in ['failed']: failed += 1 resubmit = True if resubmit: log.info('Resubmitting {0}'.format(d)) log.info('{0} of {1} jobs failed'.format(failed, total)) log.info(' '.join([ '{0}: {1}'.format(state, allJobStatus[state]) for state in allowedStates if state in allJobStatus ])) resubmitMap[d] = crabClientResubmit.resubmit( logger, resubmitArgs)() except HTTPException as hte: log.warning( "Submission for input directory {0} failed: {1}".format( d, hte.headers)) except ClientException as cle: log.warning( "Submission for input directory {0} failed: {1}".format( d, cle)) for d, statMap in resubmitMap.iteritems(): if statMap['status'] != 'SUCCESS': log.info('Status: {0} - {1}'.format(statMap['status'], d))