示例#1
0
    def testStatus(self):
        s = status(self.logger, self.maplistopt)

        #1) missing required -d option
        expRes = CommandResult(2001, 'ERROR: Task option is required')
        res = s()
        self.assertEquals(expRes, res)

        #2) correct execution
        analysisDir = self.reqarea
        s = status(self.logger, self.maplistopt + ["-d", analysisDir])
        res = s()
        expRes = CommandResult(0, None)
        self.assertEquals( expRes, res)

        #3) Print request details
        s = status(self.logger, self.maplistopt + ["-d", analysisDir])
        s._printRequestDetails({u'requestDetails': {u'RequestMessages': [[u'No blocks pass white/blacklist']], 'RequestStatus': 'failed'}})

        #4) .requestcache file does note exists
        os.remove(os.path.join(analysisDir, ".requestcache"))
        self.assertRaises( CachefileNotFoundException, status, self.logger, self.maplistopt + ["-d", analysisDir])

        #5) wrong -d option
        analysisDir = os.path.join(os.path.dirname(__file__), 'crab_XXX')
        self.assertRaises( TaskNotFoundException, status, self.logger, self.maplistopt + ["-d", analysisDir])
示例#2
0
    def testStatus(self):
        s = status(self.logger, self.maplistopt)

        #1) missing required -d option
        expRes = CommandResult(2001, 'ERROR: Task option is required')
        res = s()
        self.assertEquals(expRes, res)

        #2) correct execution
        analysisDir = self.reqarea
        s = status(self.logger, self.maplistopt + ["-d", analysisDir])
        res = s()
        expRes = CommandResult(0, None)
        self.assertEquals(expRes, res)

        #3) Print request details
        s = status(self.logger, self.maplistopt + ["-d", analysisDir])
        s._printRequestDetails({
            u'requestDetails': {
                u'RequestMessages': [[u'No blocks pass white/blacklist']],
                'RequestStatus': 'failed'
            }
        })

        #4) .requestcache file does note exists
        os.remove(os.path.join(analysisDir, ".requestcache"))
        self.assertRaises(CachefileNotFoundException, status, self.logger,
                          self.maplistopt + ["-d", analysisDir])

        #5) wrong -d option
        analysisDir = os.path.join(os.path.dirname(__file__), 'crab_XXX')
        self.assertRaises(TaskNotFoundException, status, self.logger,
                          self.maplistopt + ["-d", analysisDir])
示例#3
0
def status_crab(args):
    '''Check jobs'''
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.crabDirectories:
        for d in args.crabDirectories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    statusMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir',d]
            #if args.verbose: statusArgs += ['--long']
            try:
                log.info('Retrieving status of {0}'.format(d))
                statusMap[d] = crabClientStatus.status(logger,statusArgs)()
            except HTTPException as hte:
                log.warning("Status for input directory {0} failed: {1}".format(d, hte.headers))
            except ClientException as cle:
                log.warning("Status for input directory {0} failed: {1}".format(d, cle))

    parse_crab_status(args,statusMap)
示例#4
0
def status_crab(args):
    '''Check jobs'''
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.crabDirectories:
        for d in args.crabDirectories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    statusMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir', d]
            #if args.verbose: statusArgs += ['--long']
            try:
                log.info('Retrieving status of {0}'.format(d))
                statusMap[d] = crabClientStatus.status(logger, statusArgs)()
            except HTTPException as hte:
                log.warning(
                    "Status for input directory {0} failed: {1}".format(
                        d, hte.headers))
            except ClientException as cle:
                log.warning(
                    "Status for input directory {0} failed: {1}".format(
                        d, cle))

    parse_crab_status(args, statusMap)
示例#5
0
def status_crab(args):
    '''Check jobs'''
    if not crabLoaded:
        logging.error('You must source a crab environment to submit to crab.\nsource /cvmfs/cms.cern.ch/crab3/crab.sh')
        return
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.directories:
        for d in args.directories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    statusMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir',d]
            #if args.verbose: statusArgs += ['--long']
            try:
                log.info('Retrieving status of {0}'.format(d))
                statusMap[d] = crabClientStatus.status(logger,statusArgs)()
                if args.verbose: print_single_status(args,statusMap[d])
            except HTTPException as hte:
                log.warning("Status for input directory {0} failed: {1}".format(d, hte.headers))
            except ClientException as cle:
                log.warning("Status for input directory {0} failed: {1}".format(d, cle))

    parse_crab_status(args,statusMap)
def purge_crab(args):
    '''Resubmit jobs'''
    if not crabLoaded:
        logging.error(
            'You must source a crab environment to submit to crab.\nsource /cvmfs/cms.cern.ch/crab3/crab.sh'
        )
        return
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.directories:
        for d in args.directories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    purgeMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir', d]
            purgeArgs = ['--cache', '--dir', d]
            try:
                summary = crabClientStatus.status(logger, statusArgs)()
                purge = False
                total = 0
                finished = 0
                allJobStatus = {}
                if 'jobs' in summary:
                    for j, job in summary['jobs'].iteritems():
                        total += 1
                        if job['State'] not in allJobStatus:
                            allJobStatus[job['State']] = 0
                        allJobStatus[job['State']] += 1
                        if job['State'] in ['finished']:
                            finished += 1
                if total and finished == total:
                    purge = True
                if purge:
                    log.info('Purging {0}'.format(d))
                    log.info(' '.join([
                        '{0}: {1}'.format(state, allJobStatus[state])
                        for state in allowedStates if state in allJobStatus
                    ]))
                    purgeMap[d] = crabClientPurge.purge(logger, purgeArgs)()
            except HTTPException as hte:
                log.warning(
                    "Submission for input directory {0} failed: {1}".format(
                        d, hte.headers))
            except ClientException as cle:
                log.warning(
                    "Submission for input directory {0} failed: {1}".format(
                        d, cle))
示例#7
0
def resubmit_crab(args):
    '''Resubmit jobs'''
    if not crabLoaded:
        logging.error('You must source a crab environment to submit to crab.\nsource /cvmfs/cms.cern.ch/crab3/crab.sh')
        return
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.directories:
        for d in args.directories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    resubmitMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir',d]
            resubmitArgs = ['--dir',d]
            try:
                summary = crabClientStatus.status(logger,statusArgs)()
                resubmit = False
                total = 0
                failed = 0
                allJobStatus = {}
                if 'jobs' in summary:
                    for j,job in summary['jobs'].iteritems():
                        total += 1
                        if job['State'] not in allJobStatus: allJobStatus[job['State']] = 0
                        allJobStatus[job['State']] += 1
                        if job['State'] in ['failed']:
                            failed += 1
                            resubmit = True
                if resubmit:
                    log.info('Resubmitting {0}'.format(d))
                    log.info('{0} of {1} jobs failed'.format(failed,total))
                    log.info(' '.join(['{0}: {1}'.format(state,allJobStatus[state]) for state in allowedStates if state in allJobStatus]))
                    resubmitMap[d] = crabClientResubmit.resubmit(logger,resubmitArgs)()
            except HTTPException as hte:
                log.warning("Submission for input directory {0} failed: {1}".format(d, hte.headers))
            except ClientException as cle:
                log.warning("Submission for input directory {0} failed: {1}".format(d, cle))

    for d,statMap in resubmitMap.iteritems():
        if statMap['status'] != 'SUCCESS':
            log.info('Status: {0} - {1}'.format(statMap['status'],d))
示例#8
0
def resubmit_crab(args):
    '''Resubmit jobs'''
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.crabDirectories:
        for d in args.crabDirectories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    resubmitMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir', d]
            resubmitArgs = ['--dir', d]
            try:
                summary = crabClientStatus.status(logger, statusArgs)()
                resubmit = False
                total = 0
                failed = 0
                if 'jobs' in summary:
                    for j, job in summary['jobs'].iteritems():
                        total += 1
                        if job['State'] in ['failed']:
                            failed += 1
                            resubmit = True
                if resubmit:
                    log.info('Resubmitting {0}'.format(d))
                    log.info('{0} of {1} jobs failed'.format(failed, total))
                    resubmitMap[d] = crabClientResubmit.resubmit(
                        logger, resubmitArgs)()
            except HTTPException as hte:
                log.warning(
                    "Submission for input directory {0} failed: {1}".format(
                        d, hte.headers))
            except ClientException as cle:
                log.warning(
                    "Submission for input directory {0} failed: {1}".format(
                        d, cle))

    for d, statMap in resubmitMap.iteritems():
        if statMap['status'] != 'SUCCESS':
            log.info('Status: {0} - {1}'.format(statMap['status'], d))
示例#9
0
def resubmit_crab(args):
    '''Resubmit jobs'''
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.crabDirectories:
        for d in args.crabDirectories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    resubmitMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir',d]
            resubmitArgs = ['--dir',d]
            try:
                summary = crabClientStatus.status(logger,statusArgs)()
                resubmit = False
                total = 0
                failed = 0
                if 'jobs' in summary:
                    for j,job in summary['jobs'].iteritems():
                        total += 1
                        if job['State'] in ['failed']:
                            failed += 1
                            resubmit = True
                if resubmit:
                    log.info('Resubmitting {0}'.format(d))
                    log.info('{0} of {1} jobs failed'.format(failed,total))
                    resubmitMap[d] = crabClientResubmit.resubmit(logger,resubmitArgs)()
            except HTTPException as hte:
                log.warning("Submission for input directory {0} failed: {1}".format(d, hte.headers))
            except ClientException as cle:
                log.warning("Submission for input directory {0} failed: {1}".format(d, cle))

    for d,statMap in resubmitMap.iteritems():
        if statMap['status'] != 'SUCCESS':
            log.info('Status: {0} - {1}'.format(statMap['status'],d))
示例#10
0
def status_crab(args):
    '''Check jobs'''
    if not crabLoaded:
        logging.error(
            'You must source a crab environment to submit to crab.\nsource /cvmfs/cms.cern.ch/crab3/crab.sh'
        )
        return
    crab_dirs = []
    if args.jobName:
        workArea = get_crab_workArea(args)
        crab_dirs += sorted(glob.glob('{0}/*'.format(workArea)))
    elif args.directories:
        for d in args.directories:
            crab_dirs += glob.glob(d)
    else:
        log.error("Shouldn't be possible to get here")

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.WARNING)
    logger.setLevel(logging.WARNING)
    memhandler.setLevel(logging.WARNING)

    statusMap = {}
    for d in crab_dirs:
        if os.path.exists(d):
            statusArgs = ['--dir', d]
            #if args.verbose: statusArgs += ['--long']
            try:
                log.info('Retrieving status of {0}'.format(d))
                statusMap[d] = crabClientStatus.status(logger, statusArgs)()
                if args.verbose: print_single_status(args, statusMap[d])
            except HTTPException as hte:
                log.warning(
                    "Status for input directory {0} failed: {1}".format(
                        d, hte.headers))
            except ClientException as cle:
                log.warning(
                    "Status for input directory {0} failed: {1}".format(
                        d, cle))

    parse_crab_status(args, statusMap)
示例#11
0
文件: jobsit.py 项目: yiiyama/metscan
    query = 'UPDATE `scanstatus` SET `status` = \'failed\' WHERE `status` LIKE \'scanning\' AND (`run`, `lumi`) IN (%s)' % (', '.join(allLumis))

    dbcursor.execute(query)
    shutil.rmtree(config.installdir + '/jobs/' + timestamp + '/' + jobdir)


timestamps = sorted(os.listdir(config.installdir + '/jobs'))
for timestamp in timestamps:
    jobdirs = [d for d in os.listdir(config.installdir + '/jobs/' + timestamp) if d.startswith('crab_')]
    for jobdir in jobdirs:
        taskdir = config.installdir + '/jobs/' + timestamp + '/' + jobdir
        shortname = timestamp + '/' + jobdir

        try:
            statusobj = status(logger, ['--dir', taskdir])
            res = statusobj()
        except:
            print ' CRAB directory ' + shortname + ' is corrupted. Deleting.'
            cleanup(timestamp, jobdir)
            continue

        print ' Task ' + shortname + ' status is ' + res['status']

        if res['status'] == 'SUBMITTED' or res['status'] == 'QUEUED':
            if KILL:
                print ' Killing jobs..'
                try:
                    killobj = kill(logger, ['--dir', taskdir])
                    killobj()
                except: