示例#1
0
if max_wf: wfs = wfs[:max_wf]

random.shuffle( wfs )
all_blocks_at_sites = defaultdict(set)

done = json.loads(open('myblock_done.json').read())

print len(wfs),"to look the output of"
for iw,wfo in enumerate(wfs):
    print "%s/%s:"%(iw,len(wfs)),wfo.name
    #wfi = workflowInfo(url, wfo.name)
    #outs= wfi.request['OutputDatasets']
    wl = getWorkLoad(url, wfo.name)
    outs= wl['OutputDatasets']
    for out in outs:
        blocks_at_sites = getDatasetBlockAndSite(url, out, group="")
        deletions = getDatasetOnGoingDeletion(url, out)
        if len(deletions):
            print "\t\tshould not subscribe with on-going deletions",out
            continue
        for site,blocks in blocks_at_sites.items():
            if 'Buffer' in site or 'Export' in site or 'MSS' in site: continue
            all_blocks_at_sites[site].update( blocks )
        print "\t",out
        print "\t\t",len(blocks_at_sites),"sites",sorted(blocks_at_sites.keys()),"with unsubscribed blocks"

if len(all_blocks_at_sites.keys())==0 and len(wfs):
    ## no subscription to be done at this time, let me know
    sendEmail('no unsubscribed blocks','while catching up %s does not need to be there anymore'%( one_status ))

print len(all_blocks_at_sites.keys()),"sites to subscribe things at"
示例#2
0
            continue
        try:
            pid = filter(lambda w: w.count('-') == 2,
                         wqe['RequestName'].split('_'))[0]
            camp = pid.split('-')[1]
        except:
            camp = None
        if not camp: continue
        #print json.dumps( wqe, indent=2)
        for b in wqe['Inputs']:
            if not '#' in b: continue
            #b is the block
            ds = b.split('#')[0]
            if not ds in block_locations:
                s_block_locations[ds] = getDatasetBlockAndSite(url,
                                                               ds,
                                                               complete='y')
                for s in s_block_locations[ds]:
                    for bl in s_block_locations[ds][s]:
                        block_locations[ds][bl].append(s)

            if not b in block_locations[ds]:
                print b, "is not to be found in phedex"
                continue
            #block_ce = [si.SE_to_CE(s) for s in block_locations[ds][b]]
            #wqe_ce = [s for s in wqe['Inputs'][b]]
            ## true location of the data
            block_se = block_locations[ds][b]
            ## what the global queue thinks about the block location
            wqe_se = [si.CE_to_SE(s) for s in wqe['Inputs'][b]]
            ## where the wf is set to be run at
示例#3
0
                    not_runable_acdc.add( wf['RequestName'] )
                    #not_processable.add( b )
                se_whitelist = sorted(set([si.CE_to_SE(s) for s in wqe['SiteWhitelist'] if s in si.sites_ready]))
                missing_in_whitelist = sorted([si.SE_to_CE(s) for s in (set(acdc_location) - set(se_whitelist))])
                if wqe['NoInputUpdate']==False and missing_in_whitelist: #(se_whitelist>=acdc_location):
                    #missing_in_whitelist = sorted([si.SE_to_CE(s) for s in (set(acdc_location) - set(se_whitelist))])
                    print "Should have",missing_in_whitelist,"also in the whitelist, or have xrootd enabled"
                    print sorted(acdc_location),"for the ACDC location"
                    print sorted(se_whitelist),"for the whitelist"
                    not_runable_acdc.add( wf['RequestName'] )
                
                continue
            #b is the block
            ds = b.split('#')[0]
            if not ds in block_locations:
                s_block_locations[ds] = getDatasetBlockAndSite(url, ds, complete='y', vetoes=[])
                for s in s_block_locations[ds]:
                    for bl in s_block_locations[ds][s]:
                        block_locations[ds][bl].append( s )

            if not b in block_locations[ds]:
                print b,"is not to be found in phedex, needed by",wfi.request['RequestName']
                ## should send a critical log
                continue

            #block_ce = [si.SE_to_CE(s) for s in block_locations[ds][b]]
            #wqe_ce = [s for s in wqe['Inputs'][b]]
            ## true location of the data
            block_se = block_locations[ds][b] 

示例#4
0
print len(wfs), "to look the output of"

for iw, wfn in enumerate(wfs):
    if type(wfn) == dict:
        wl = wfn
        wfn = wl['RequestName']
    else:
        wl = getWorkLoad(url, wfn)

    print "%s/%s:" % (iw, len(wfs)), wfn

    if not wl:
        continue
    outs = wl['OutputDatasets']
    for out in outs:
        blocks_at_sites = getDatasetBlockAndSite(url, out, group="")
        deletions = getDatasetOnGoingDeletion(url, out)
        if len(deletions):
            print "\t\tshould not subscribe with on-going deletions", out
            continue
        for site, blocks in blocks_at_sites.items():
            if 'Buffer' in site or 'Export' in site or 'MSS' in site: continue
            all_blocks_at_sites[site].update(blocks)
        print "\t", out
        print "\t\t", len(blocks_at_sites), "sites", sorted(
            blocks_at_sites.keys()), "with unsubscribed blocks"

if len(all_blocks_at_sites.keys()) == 0 and len(wfs):
    ## no subscription to be done at this time, let me know
    #sendEmail('no unsubscribed blocks','while catching up %s does not need to be there anymore'%( one_status ))
    pass