Exemple #1
0
    def run(self):
        url = self.url
        dataset = self.dataset
        reasons = []
        statuses = [
            'assignment-approved', 'assigned', 'acquired', 'running-open',
            'running-closed', 'completed', 'force-complete', 'closed-out'
        ]

        ##print "reqmgr check on ",dataset
        actors = getWorkflowByInput(url, dataset, details=True)
        using_actors = [
            actor for actor in actors if actor['RequestStatus'] in statuses
        ]
        if len(using_actors):
            reasons.append('input')

        actors = getWorkflowByOutput(url, dataset, details=True)
        using_actors = [
            actor for actor in actors if actor['RequestStatus'] in statuses
        ]
        if len(using_actors):
            reasons.append('output')

        actors = getWorkflowByMCPileup(url, dataset, details=True)
        using_actors = [
            actor for actor in actors if actor['RequestStatus'] in statuses
        ]
        if len(using_actors):
            reasons.append('pilup')

        self.reasons = reasons
    def run(self):
        url = self.url
        dataset= self.dataset
        reasons = []
        statuses = ['assignment-approved','assigned','acquired','running-open','running-closed','completed','force-complete','closed-out']

        ##print "reqmgr check on ",dataset
        actors = getWorkflowByInput( url, dataset , details=True)
        using_actors = [actor for actor in actors if actor['RequestStatus'] in statuses]
        if len(using_actors):
            reasons.append('input')

        actors = getWorkflowByOutput( url, dataset , details=True)
        using_actors = [actor for actor in actors if actor['RequestStatus'] in statuses]
        if len(using_actors):
            reasons.append('output')

        actors = getWorkflowByMCPileup( url, dataset , details=True)
        using_actors = [actor for actor in actors if actor['RequestStatus'] in statuses]
        if len(using_actors):
            reasons.append('pilup')
        
        self.reasons = reasons
Exemple #3
0
            usors = getWorkflowByMCPileup(url, dataset, details=True)
            ## find the latest request date using that dataset in secondary
            for usor in usors:
                d =time.mktime(time.strptime("-".join(map(str,usor['RequestDate'])), "%Y-%m-%d-%H-%M-%S"))
                secondary_timeout[dataset] = max(secondary_timeout[dataset],d)

        if secondary_timeout[dataset]: ## different than zero
            delay_days = 30
            delay = delay_days*24*60*60 # 30 days     
            if (now-secondary_timeout[dataset])>delay:
                print "unlocking secondary input after",delay_days,"days"
                unlock = True


        tier = dataset.split('/')[-1]
        creators = getWorkflowByOutput( url, dataset , details=True)
        if not creators and not tier == 'RAW':
            ds_status = getDatasetStatus( dataset )
            if not '-v0/' in dataset and ds_status!=None:
                sendEmail('failing get by output','%s has not been produced by anything?'%dataset)
                newly_locking.add(dataset)
                continue
            else:
                # does not matter, cannot be an OK dataset
                unlock = True
                bad_ds = True
        creators_status = [r['RequestStatus'] for r in creators]
        print "Statuses of workflow that made the dataset",dataset,"are",creators_status
        if all([status in ['failed','aborted','rejected','aborted-archived','rejected-archived'] for status in creators_status]):
            ## crap 
            print "\tunlocking",dataset,"for bad workflow statuses"
Exemple #4
0
                            now - odb.date
                        ) / 24 * 60 * 60, "[days] since announcement, limit is", delay_days, "[days]"
                    else:
                        unlock = False
                        print "re-locking", dataset, "because ", delay_days, "[days] expiration date is not passed, now:", now, "announced", odb.date, ":", (
                            now - odb.date) / 24 * 60 * 60, "[days]"
                else:
                    print "re-Locking", dataset, "because of special tier needing double check"
                    unlock = False
                time_point("Checked to keep on disk for 30 days", sub_lap=True)

        if unlock:
            print "\tunlocking", dataset
            LI.release(dataset)
            ##would like to pass to *-unlock, or even destroy from local db
            creators = getWorkflowByOutput(url, dataset, details=True)
            for creator in creators:
                for wfo in session.query(Workflow).filter(
                        Workflow.name == creator['RequestName']).all():
                    if not 'unlock' in wfo.status and any([
                            wfo.status.startswith(key)
                            for key in ['done', 'forget']
                    ]):
                        wfo.status += '-unlock'
                        print "setting", wfo.name, "to", wfo.status
            session.commit()
        else:
            print "\nrelocking", dataset
            newly_locking.add(dataset)

        time_point("Checked all")
input_json = defaultdict(list)
input_rl = []
output_json = {}
output_rl = defaultdict(list)
missing_rl = defaultdict(list)
errors_by_lb = defaultdict(lambda : defaultdict(set))
dbsapi = DbsApi(url='https://cmsweb.cern.ch/dbs/prod/global/DBSReader')
ecode_ban = []#99999,139,134,92]

## try to get more workflows by their outputs
outs=set()
for wf in wfs:
    outs.update( wf['OutputDatasets'] )

for out in outs:
    o_wfs = getWorkflowByOutput( url , out, details=True )
    wfns = [wf['RequestName'] for wf in wfs]
    for o_wf in o_wfs:
        if not o_wf['RequestName'] in wfns:
            print "got also",o_wf['RequestName']
            wfs.append( o_wf )

for wf in wfs:
    wfi = workflowInfo( url, wf['RequestName'], request=wf)

    date = ''.join(map(lambda s : '%02d'%s ,wf['RequestDate']))

    if wf['RequestStatus'] not in ['completed','announced','normal-archived']: continue
    print "Checking on",wf['RequestName']

    ## create the input json
Exemple #6
0
input_json = defaultdict(list)
input_rl = []
output_json = {}
output_rl = defaultdict(list)
missing_rl = defaultdict(list)
errors_by_lb = defaultdict(lambda: defaultdict(set))
dbsapi = DbsApi(url='https://cmsweb.cern.ch/dbs/prod/global/DBSReader')
ecode_ban = []  #99999,139,134,92]

## try to get more workflows by their outputs
outs = set()
for wf in wfs:
    outs.update(wf['OutputDatasets'])

for out in outs:
    o_wfs = getWorkflowByOutput(url, out, details=True)
    wfns = [wf['RequestName'] for wf in wfs]
    for o_wf in o_wfs:
        if not o_wf['RequestName'] in wfns:
            print "got also", o_wf['RequestName']
            wfs.append(o_wf)

for wf in wfs:
    wfi = workflowInfo(url, wf['RequestName'], request=wf)

    date = ''.join(map(lambda s: '%02d' % s, wf['RequestDate']))

    if wf['RequestStatus'] not in [
            'completed', 'announced', 'normal-archived'
    ]:
        continue