def getSEsForSites(ids): '''Retrieve the SEs for a site''' _refreshToACache() # sites can be a colon seperated list like 'CERN:LYON:ASGC' re_srm = re.compile('srm://([^/]+)(/.+)') se_dict = {} for id in ids: sites = _resolveSites(id.upper()) if not sites: logger.error('Site %s not found in TiersOfATLAS', id) continue for site in sites: site_info = ToACache.sites[site] if 'srm' not in site_info: logger.error('Site %s has no srm info in TiersOfATLAS', site) continue sitesrm = site_info['srm'] if sitesrm == '': logger.debug('Site %s has no srm info in TiersOfATLAS', site) continue sitesrm = re.sub('token:*\w*:', '', sitesrm) sitesrm = re.sub(':*\d*/srm/managerv2\?SFN=', '', sitesrm) match = re_srm.match(sitesrm) if not match: logger.debug('Cannot extract host from %', sitesrm) continue se_dict[match.group(1)] = True return se_dict.keys()
def getSEsForSites(ids): '''Retrieve the SEs for a site''' _refreshToACache() # sites can be a colon seperated list like 'CERN:LYON:ASGC' re_srm = re.compile('srm://([^/]+)(/.+)') se_dict = {} for id in ids: sites = _resolveSites(id.upper()) if not sites: logger.error('Site %s not found in TiersOfATLAS',id) continue for site in sites: site_info = ToACache.sites[site] if 'srm' not in site_info: logger.error('Site %s has no srm info in TiersOfATLAS',site) continue sitesrm = site_info['srm'] if sitesrm == '': logger.debug('Site %s has no srm info in TiersOfATLAS',site) continue sitesrm = re.sub('token:*\w*:','', sitesrm) sitesrm = re.sub(':*\d*/srm/managerv2\?SFN=','', sitesrm) match = re_srm.match(sitesrm) if not match: logger.debug('Cannot extract host from %',sitesrm) continue se_dict[match.group(1)] = True return se_dict.keys()
def getAllSites(only_lcg=True, resolve=False, excluded_clouds=[], excluded_sites=[], blacklist=True): '''list all sites defined in TiersOfATLAS''' _refreshToACache() _refreshCESEInfo() sites = [] if not 'TO' in excluded_clouds: sites += getSites('CERN') if not 'IT' in excluded_clouds: sites += getSites('ITALYSITES') if not 'ES' in excluded_clouds: sites += getSites('SPAINSITES') if not 'FR' in excluded_clouds: sites += getSites('FRANCESITES') if not 'UK' in excluded_clouds: sites += getSites('UKSITES') if not 'DE' in excluded_clouds: sites += getSites('FZKSITES') if not 'NL' in excluded_clouds: sites += getSites('NLSITES') if not 'TW' in excluded_clouds: sites += getSites('TAIWANSITES') if not 'CA' in excluded_clouds: sites += getSites('CANADASITES') if not only_lcg: sites += getSites('USASITES') sites += getSites('NDGF') # exclude sites for site in excluded_sites: if site in sites: sites.remove(site) if resolve: sites = _resolveSites(sites) # exclude sites - check again after site resolution for site in excluded_sites: if site in sites: sites.remove(site) sites.sort() if (blacklist): for bad_site in CESEInfo['blacklist']: try: sites.remove(bad_site) except ValueError: pass return sites
def _resolveSites(sites): new_sites = [] for site in sites: if site in ToACache.topology: new_sites += _resolveSites(ToACache.topology[site]) else: new_sites.append(site) return new_sites
def getAllSites(only_lcg=True,resolve=False, excluded_clouds=[], excluded_sites=[], blacklist=True): '''list all sites defined in TiersOfATLAS''' _refreshToACache() _refreshCESEInfo() sites = [] if not 'TO' in excluded_clouds: sites += getSites('CERN') if not 'IT' in excluded_clouds: sites += getSites('ITALYSITES') if not 'ES' in excluded_clouds: sites += getSites('SPAINSITES') if not 'FR' in excluded_clouds: sites += getSites('FRANCESITES') if not 'UK' in excluded_clouds: sites += getSites('UKSITES') if not 'DE' in excluded_clouds: sites += getSites('FZKSITES') if not 'NL' in excluded_clouds: sites += getSites('NLSITES') if not 'TW' in excluded_clouds: sites += getSites('TAIWANSITES') if not 'CA' in excluded_clouds: sites += getSites('CANADASITES') if not only_lcg: sites += getSites('USASITES') sites += getSites('NDGF') # exclude sites for site in excluded_sites: if site in sites: sites.remove(site) if resolve: sites = _resolveSites(sites) # exclude sites - check again after site resolution for site in excluded_sites: if site in sites: sites.remove(site) sites.sort() if (blacklist): for bad_site in CESEInfo['blacklist']: try: sites.remove(bad_site) except ValueError: pass return sites
def getCEsForSites(ids, excluded_ids=[], CREAM=False, cputime=0): '''Retrieve the CEs for a site''' _refreshToACache() _refreshCESEInfo() re_srm = re.compile('srm://([^/]+)(/.+)') ce_dict = {} for id in ids: # allow the full use of all ToA names as FZKSITES etc. sites = _resolveSites([id.upper()]) if not sites: logger.error('Site %s not found in TiersOfATLAS', id) continue # remove excluded sites try: excluded_sites = config['ExcludedSites'].split() except ConfigError: excluded_sites = [] if excluded_ids: excluded_sites = excluded_ids + excluded_sites for site in excluded_sites: if site in sites: logger.warning('Site %s has been excluded.', site) sites.remove(site) # try to find CEs associated to a site via srm tag and/or associated names tag for site in sites: site_info = ToACache.sites[site] ces = [] if 'srm' in site_info: sitesrm = site_info['srm'] if sitesrm == '': logger.debug('Site %s has no srm info in TiersOfATLAS', site) continue sitesrm = re.sub('token:*\w*:', '', sitesrm) sitesrm = re.sub(':*\d*/srm/managerv2\?SFN=', '', sitesrm) match = re_srm.match(sitesrm) if not match: logger.debug('Cannot extract host from %s', sitesrm) else: try: if CREAM: ces = CESEInfo['se_info'][match.group( 1)]['close_creamce'] if cputime > 0: for ces_tmp in ces: cescputime = CESEInfo['creamce_info'][ ces_tmp]['cputime'] if cescputime: cescputime = int(cescputime) if cputime < cescputime: ces = [ces_tmp] break else: ces = CESEInfo['se_info'][match.group( 1)]['close_ce'] except KeyError: logger.debug('Did not find CE-SE association for %s', match.group(1)) if not ces: try: lcg_site = site_info['alternateName'][-1].upper() ces = CESEInfo['lcg_site_info'][lcg_site] except Exception: logger.debug( 'No CE information on site %s. Maybe it failes the SAM test.', site) for ce in ces: ce_dict[ce] = True return ce_dict.keys()
def getCEsForSites(ids, excluded_ids = [], CREAM = False, cputime=0 ): '''Retrieve the CEs for a site''' _refreshToACache() _refreshCESEInfo() re_srm = re.compile('srm://([^/]+)(/.+)') ce_dict = {} for id in ids: # allow the full use of all ToA names as FZKSITES etc. sites = _resolveSites([id.upper()]) if not sites: logger.error('Site %s not found in TiersOfATLAS',id) continue # remove excluded sites try: excluded_sites = config['ExcludedSites'].split() except ConfigError: excluded_sites = [ ] if excluded_ids: excluded_sites = excluded_ids + excluded_sites for site in excluded_sites: if site in sites: logger.warning('Site %s has been excluded.',site) sites.remove(site) # try to find CEs associated to a site via srm tag and/or associated names tag for site in sites: site_info = ToACache.sites[site] ces = [] if 'srm' in site_info: sitesrm = site_info['srm'] if sitesrm == '': logger.debug('Site %s has no srm info in TiersOfATLAS',site) continue sitesrm = re.sub('token:*\w*:','', sitesrm) sitesrm = re.sub(':*\d*/srm/managerv2\?SFN=','', sitesrm) match = re_srm.match(sitesrm) if not match: logger.debug('Cannot extract host from %s',sitesrm) else: try: if CREAM: ces = CESEInfo['se_info'][match.group(1)]['close_creamce'] if cputime > 0: for ces_tmp in ces: cescputime = CESEInfo['creamce_info'][ces_tmp]['cputime'] if cescputime: cescputime = int(cescputime) if cputime < cescputime: ces = [ ces_tmp ] break else: ces = CESEInfo['se_info'][match.group(1)]['close_ce'] except KeyError: logger.debug('Did not find CE-SE association for %s',match.group(1)) if not ces: try: lcg_site = site_info['alternateName'][-1].upper() ces = CESEInfo['lcg_site_info'][lcg_site] except Exception: logger.debug('No CE information on site %s. Maybe it failes the SAM test.',site) for ce in ces: ce_dict[ce] = True return ce_dict.keys()