Exemple #1
0
def get_lfc_host(dq2_site_id):
    '''
    Gets the LFC host of a site registered in TiersOfATLAS.
    '''

    lfc_url = TiersOfATLAS.getLocalCatalog(dq2_site_id)
    if lfc_url:
        return lfc_url.split('/')[2][:-1]
    else:
        return None
Exemple #2
0
def get_lfc_host(dq2_site_id):
    '''
    Gets the LFC host of a site registered in TiersOfATLAS.
    '''

    lfc_url = TiersOfATLAS.getLocalCatalog(dq2_site_id)
    if lfc_url:
        return lfc_url.split('/')[2][:-1]
    else:
        return None
Exemple #3
0
    locnum = 0
    for i in locrange:
        locnum += len(locations[i])
    if locnum == 0:
        print 'ERROR no location'
        sys.exit(0)

    lfn_guid = {}
    for guid, info in contents.iteritems():
        lfn_guid[info['lfn']] = guid

    allLFCs = []

    for i in locrange:
        for location in locations[i]:
            l = TiersOfATLAS.getLocalCatalog(location)
            if l and l not in allLFCs and l.startswith(
                    'lfc') and l not in removefromlfclist:
                allLFCs.append(l)

    status, guidReplicas, guidSizes, guidmd5sum = getinputreplicas(
        lfn_guid, allLFCs)

    print guidReplicas
    locations_srm = {}
    for i in locrange:
        for location in locations[i]:
            try:
                if 'srm' in TiersOfATLAS.ToACache.sites[location]:
                    tempsrm = TiersOfATLAS.ToACache.sites[location]['srm']
                    tempsrm = re.sub('token:*\w*:', '', tempsrm)
Exemple #4
0
    locnum = 0
    for i in locrange:
        locnum += len(locations[i])
    if locnum == 0:
        print "ERROR no location"
        sys.exit(0)

    lfn_guid = {}
    for guid, info in contents.iteritems():
        lfn_guid[info["lfn"]] = guid

    allLFCs = []

    for i in locrange:
        for location in locations[i]:
            l = TiersOfATLAS.getLocalCatalog(location)
            if l and l not in allLFCs and l.startswith("lfc") and l not in removefromlfclist:
                allLFCs.append(l)

    status, guidReplicas, guidSizes, guidmd5sum = getinputreplicas(lfn_guid, allLFCs)

    print guidReplicas
    locations_srm = {}
    for i in locrange:
        for location in locations[i]:
            try:
                if "srm" in TiersOfATLAS.ToACache.sites[location]:
                    tempsrm = TiersOfATLAS.ToACache.sites[location]["srm"]
                    tempsrm = re.sub("token:*\w*:", "", tempsrm)
                    tempsrm = re.sub(":*\d*/srm/managerv2\?SFN=", "", tempsrm)
                    print tempsrm
Exemple #5
0
 def findLostFiles(self,datasetName,fileMap):
     methodName = 'findLostFiles'
     methodName += ' <datasetName={0}>'.format(datasetName)
     tmpLog = MsgWrapper(logger,methodName)
     tmpLog.info('start')
     try:
         # get replicas
         tmpStat,tmpOut = self.listDatasetReplicas(datasetName)
         if tmpStat != self.SC_SUCCEEDED:
             tmpLog.error('faild to get dataset replicas with {0}'.format(tmpOut))
             raise tmpStat,tmpOut
         # check if complete replica is available
         hasCompReplica = False
         datasetReplicaMap = tmpOut
         for tmpEndPoint in datasetReplicaMap.keys():
             if datasetReplicaMap[tmpEndPoint][-1]['found'] != None and \
                     datasetReplicaMap[tmpEndPoint][-1]['total'] == datasetReplicaMap[tmpEndPoint][-1]['found']:
                 hasCompReplica = True
                 break
         # no lost files
         if hasCompReplica:
             tmpLog.info('done with no lost files')
             self.SC_SUCCEEDED,{}
         # get LFNs and scopes
         lfnMap = {}
         scopeMap = {}
         for tmpGUID in fileMap.keys():
             tmpLFN = fileMap[tmpGUID]['lfn']
             lfnMap[tmpGUID] = tmpLFN
             scopeMap[tmpLFN] = fileMap[tmpGUID]['scope']
         # get LFC and SE
         lfcSeMap = {}
         for tmpEndPoint in datasetReplicaMap.keys():
             # get LFC
             lfc = TiersOfATLAS.getLocalCatalog(tmpEndPoint)
             # add map
             if not lfcSeMap.has_key(lfc):
                 lfcSeMap[lfc] = []
             # get SE
             seStr = TiersOfATLAS.getSiteProperty(tmpEndPoint, 'srm')
             tmpMatch = re.search('://([^:/]+):*\d*/',seStr)
             if tmpMatch != None:
                 se = tmpMatch.group(1)
                 if not se in lfcSeMap[lfc]:
                     lfcSeMap[lfc].append(se)
         # get SURLs
         for lfcHost,seList in lfcSeMap.iteritems():
             tmpStat,tmpRetMap = self.getSURLsFromLFC(lfnMap,lfcHost,seList,scopes=scopeMap)
             if tmpStat != self.SC_SUCCEEDED:
                 tmpLog.error('faild to get SURLs with {0}'.format(tmpRetMap))
                 raise tmpStat,tmpRetMap
             # look for missing files
             newLfnMap = {}
             for tmpGUID,tmpLFN in lfnMap.iteritems():
                 if not tmpLFN in tmpRetMap:
                     newLfnMap[tmpGUID] = tmpLFN
             lfnMap = newLfnMap
         tmpLog.info('done with lost '+','.join(str(tmpLFN) for tmpLFN in lfnMap.values()))
         return self.SC_SUCCEEDED,lfnMap
     except:
         errtype,errvalue = sys.exc_info()[:2]
         errCode = self.checkError(errtype)
         errMsg = '{0} {1}'.format(errtype.__name__,errvalue)
         tmpLog.error(errMsg)
         return errCode,'{0} : {1}'.format(methodName,errMsg)
Exemple #6
0
 def getAvailableFiles(self,datasetSpec,siteEndPointMap,siteMapper,ngGroup=[],checkLFC=False):
     # make logger
     methodName = 'getAvailableFiles'
     methodName += ' <datasetID={0}>'.format(datasetSpec.datasetID)
     tmpLog = MsgWrapper(logger,methodName)
     tmpLog.info('start datasetName={0}'.format(datasetSpec.datasetName))
     try:
         # list of NG endpoints
         ngEndPoints = []
         if 1 in ngGroup:
             ngEndPoints += ['_SCRATCHDISK$','_LOCALGROUPDISK$','_LOCALGROUPTAPE$','_USERDISK$',
                            '_DAQ$','_TMPDISK$','_TZERO$','_GRIDFTP$','MOCKTEST$']
         if 2 in ngGroup:
             ngEndPoints += ['_LOCALGROUPTAPE$',
                            '_DAQ$','_TMPDISK$','_TZERO$','_GRIDFTP$','MOCKTEST$']
         # get all associated endpoints
         siteAllEndPointsMap = {}
         for siteName,endPointPattList in siteEndPointMap.iteritems():
             # get all endpoints matching with patterns 
             allEndPointList = []
             for endPointPatt in endPointPattList:
                 if '*' in endPointPatt:
                     # wildcard
                     endPointPatt = endPointPatt.replace('*','.*')
                     for endPointToA in TiersOfATLAS.getAllDestinationSites():
                         if re.search('^'+endPointPatt+'$',endPointToA) != None:
                             if not endPointToA in allEndPointList:
                                 allEndPointList.append(endPointToA)
                 else:
                     # normal endpoint
                     if endPointPatt in TiersOfATLAS.getAllDestinationSites() and \
                            not endPointPatt in allEndPointList:
                         allEndPointList.append(endPointPatt)
             # get associated endpoints
             siteAllEndPointsMap[siteName] = []
             for endPoint in allEndPointList:
                 # append
                 if not self.checkNGEndPoint(endPoint,ngEndPoints) and \
                         not endPoint in siteAllEndPointsMap[siteName]:
                     siteAllEndPointsMap[siteName].append(endPoint)
                 else:
                     # already checked
                     continue
                 # get alternate name
                 altName = TiersOfATLAS.getSiteProperty(endPoint,'alternateName')
                 if altName != None and altName != ['']:
                     for assEndPoint in TiersOfATLAS.resolveGOC({altName[0]:None})[altName[0]]:
                         if not assEndPoint in siteAllEndPointsMap[siteName] and \
                                not self.checkNGEndPoint(assEndPoint,ngEndPoints):
                             siteAllEndPointsMap[siteName].append(assEndPoint)
         # get replica map
         tmpStat,tmpOut = self.listDatasetReplicas(datasetSpec.datasetName)
         if tmpStat != self.SC_SUCCEEDED:
             tmpLog.error('faild to get dataset replicas with {0}'.format(tmpOut))
             raise tmpStat,tmpOut
         datasetReplicaMap = tmpOut
         # collect SE, LFC hosts, storage path, storage type
         lfcSeMap = {}
         storagePathMap = {}
         completeReplicaMap = {}
         siteHasCompleteReplica = False
         for siteName,allEndPointList in siteAllEndPointsMap.iteritems():
             tmpLfcSeMap = {}
             tmpStoragePathMap = {}
             tmpSiteSpec = siteMapper.getSite(siteName)
             for tmpEndPoint in allEndPointList:
                 # storage type
                 if TiersOfATLAS.isTapeSite(tmpEndPoint):
                     storageType = 'localtape'
                 else:
                     storageType = 'localdisk'
                 # no scan when site has complete replicas
                 if datasetReplicaMap.has_key(tmpEndPoint) and datasetReplicaMap[tmpEndPoint][-1]['found'] != None \
                    and datasetReplicaMap[tmpEndPoint][-1]['total'] == datasetReplicaMap[tmpEndPoint][-1]['found']:
                     completeReplicaMap[tmpEndPoint] = storageType
                     siteHasCompleteReplica = True
                 # no LFC scan for many-time datasets
                 if datasetSpec.isManyTime():
                     continue
                 # get LFC
                 lfc = TiersOfATLAS.getLocalCatalog(tmpEndPoint)
                 # add map
                 if not tmpLfcSeMap.has_key(lfc):
                     tmpLfcSeMap[lfc] = []
                 # get SE
                 seStr = TiersOfATLAS.getSiteProperty(tmpEndPoint, 'srm')
                 tmpMatch = re.search('://([^:/]+):*\d*/',seStr)
                 if tmpMatch != None:
                     se = tmpMatch.group(1)
                     if not se in tmpLfcSeMap[lfc]:
                         tmpLfcSeMap[lfc].append(se)
                 else:
                     tmpLog.error('faild to extract SE from %s for %s:%s' % \
                                  (seStr,siteName,tmpEndPoint))
                 # get SE + path
                 seStr = TiersOfATLAS.getSiteProperty(tmpEndPoint, 'srm')
                 tmpMatch = re.search('(srm://.+)$',seStr)
                 if tmpMatch == None:
                     tmpLog.error('faild to extract SE+PATH from %s for %s:%s' % \
                                  (seStr,siteName,tmpEndPoint))
                     continue
                 # add full path to storage map
                 tmpSePath = tmpMatch.group(1)
                 tmpStoragePathMap[tmpSePath] = {'siteName':siteName,'storageType':storageType}
                 # add compact path
                 tmpSePath = re.sub('(:\d+)*/srm/[^\?]+\?SFN=','',tmpSePath)
                 tmpStoragePathMap[tmpSePath] = {'siteName':siteName,'storageType':storageType}
             # add to map to trigger LFC scan if complete replica is missing at the site
             if DataServiceUtils.isCachedFile(datasetSpec.datasetName,tmpSiteSpec):
                 pass
             elif not siteHasCompleteReplica or checkLFC:
                 for tmpKey,tmpVal in tmpLfcSeMap.iteritems():
                     if not lfcSeMap.has_key(tmpKey):
                         lfcSeMap[tmpKey] = []
                     lfcSeMap[tmpKey] += tmpVal
                 for tmpKey,tmpVal in tmpStoragePathMap.iteritems():
                     storagePathMap[tmpKey] = tmpVal
         # collect GUIDs and LFNs
         fileMap        = {}
         lfnMap         = {}
         lfnFileSepcMap = {}
         scopeMap       = {}
         for tmpFile in datasetSpec.Files:
             fileMap[tmpFile.GUID] = tmpFile.lfn
             lfnMap[tmpFile.lfn] = tmpFile
             lfnFileSepcMap[tmpFile.lfn] = tmpFile
             scopeMap[tmpFile.lfn] = tmpFile.scope
         # get SURLs
         surlMap = {}
         for lfcHost,seList in lfcSeMap.iteritems():
             tmpLog.debug('lookup in LFC:{0} for {1}'.format(lfcHost,str(seList)))               
             tmpStat,tmpRetMap = self.getSURLsFromLFC(fileMap,lfcHost,seList,scopes=scopeMap)
             tmpLog.debug(str(tmpStat))
             if tmpStat != self.SC_SUCCEEDED:
                 raise RuntimeError,tmpRetMap
             for lfn,surls in tmpRetMap.iteritems():
                 if not surlMap.has_key(lfn):
                     surlMap[lfn] = surls
                 else:
                     surlMap[lfn] += surls
         # make return
         returnMap = {}
         for siteName,allEndPointList in siteAllEndPointsMap.iteritems():
             # set default return values
             if not returnMap.has_key(siteName):
                 returnMap[siteName] = {'localdisk':[],'localtape':[],'cache':[],'remote':[]}
             # loop over all files    
             tmpSiteSpec = siteMapper.getSite(siteName)                
             # check if the file is cached
             if DataServiceUtils.isCachedFile(datasetSpec.datasetName,tmpSiteSpec):
                 for tmpFileSpec in datasetSpec.Files:
                     # add to cached file list
                     returnMap[siteName]['cache'].append(tmpFileSpec)
             # complete replicas
             if not checkLFC:        
                 for tmpEndPoint in allEndPointList:
                     if completeReplicaMap.has_key(tmpEndPoint):
                         storageType = completeReplicaMap[tmpEndPoint]
                         returnMap[siteName][storageType] += datasetSpec.Files
         # loop over all available LFNs
         avaLFNs = surlMap.keys()
         avaLFNs.sort()
         for tmpLFN in avaLFNs:
             tmpFileSpec = lfnFileSepcMap[tmpLFN]                
             # loop over all SURLs
             for tmpSURL in surlMap[tmpLFN]:
                 for tmpSePath in storagePathMap.keys():
                     # check SURL
                     if tmpSURL.startswith(tmpSePath):
                         # add
                         siteName = storagePathMap[tmpSePath]['siteName']
                         storageType = storagePathMap[tmpSePath]['storageType']
                         if not tmpFileSpec in returnMap[siteName][storageType]:
                             returnMap[siteName][storageType].append(tmpFileSpec)
                         break
         # dump
         dumpStr = ''
         for siteName,storageTypeFile in returnMap.iteritems():
             dumpStr += '{0}:('.format(siteName)
             for storageType,fileList in storageTypeFile.iteritems():
                 dumpStr += '{0}:{1},'.format(storageType,len(fileList))
             dumpStr = dumpStr[:-1]
             dumpStr += ') '
         dumpStr= dumpStr[:-1]
         tmpLog.debug(dumpStr)
         # return
         tmpLog.info('done')            
         return self.SC_SUCCEEDED,returnMap
     except:
         errtype,errvalue = sys.exc_info()[:2]
         errMsg = 'failed with {0} {1}'.format(errtype.__name__,errvalue)
         tmpLog.error(errMsg)
         return self.SC_FAILED,'{0}.{1} {2}'.format(self.__class__.__name__,methodName,errMsg)