class ResourcesTestCase( unittest.TestCase ): def setUp( self ): Script.disableCS( ) Script.parseCommandLine() self.resources = Resources() def test_getSites( self ): print result = self.resources.getSites( {'Name':['CERN','CPPM','PNPI']} ) self.assertTrue( result['OK'], 'getSites' ) sites = result['Value'] print sites result = self.resources.getEligibleSites( {'Name':['CERN','CPPM','PNPI']} ) self.assertTrue( result['OK'], 'getEligibleSites' ) eligibleSites = result['Value'] self.assertEqual(sites, eligibleSites, 'sites and eligible sites are the same') def test_getResources( self ): print result = self.resources.getResources( 'CERN', 'Storage' ) self.assertTrue( result['OK'], 'getResources' ) ses = result['Value'] print ses def test_getNodes( self ): print result = self.resources.getNodes( 'CERN::ce130', 'Queue' ) self.assertTrue( result['OK'], 'getNodes' ) nodes = result['Value'] print nodes def test_getEligibleResources( self ): print result = self.resources.getEligibleResources( 'Computing', { 'Site':['CERN','CPPM','Zurich'],'SubmissionMode':'Direct' } ) self.assertTrue( result['OK'], 'getEligibleResources' ) ces = result['Value'] print ces def test_getEligibleNodes( self ): print result = self.resources.getEligibleNodes( 'AccessProtocol', { 'Site':['CERN','CPPM','Zurich'] }, { 'Protocol':'srm' } ) self.assertTrue( result['OK'], 'getEligibleNodes' ) aps = result['Value'] print aps def test_getEligibleComputingElements( self ): siteMask = ['LCG.CERN.ch','LCG.CPPM.fr'] result = self.resources.getEligibleResources( 'Computing', {'Site':siteMask, 'SubmissionMode':'gLite', 'CEType':['LCG','CREAM']} ) self.assertTrue( result['OK'], 'getEligibleResources' ) print for ce in result['Value']: ceHost = self.resources.getComputingElementValue( ce, 'Host', 'unknown' ) print ce, ceHost
class Synchronizer(object): ''' Every time there is a successful write on the CS, Synchronizer().sync() is executed. It updates the database with the values on the CS. ''' def __init__(self): """ Constructor. examples: >>> s = Synchronizer() """ self.log = gLogger.getSubLogger(self.__class__.__name__) self.operations = Operations() self.resources = Resources() self.rStatus = ResourceStatusClient.ResourceStatusClient() self.rssConfig = RssConfiguration() self.diracAdmin = DiracAdmin() def sync(self, _eventName, _params): ''' Main synchronizer method. It synchronizes the three types of elements: Sites, Resources and Nodes. Each _syncX method returns a dictionary with the additions and deletions. examples: >>> s.sync( None, None ) S_OK() :Parameters: **_eventName** - any this parameter is ignored, but needed by caller function. **_params** - any this parameter is ignored, but needed by caller function. :return: S_OK ''' defSyncResult = {'added': [], 'deleted': []} # Sites syncSites = self._syncSites() if not syncSites['OK']: self.log.error(syncSites['Message']) syncSites = (syncSites['OK'] and syncSites['Value']) or defSyncResult # Resources syncResources = self._syncResources() if not syncResources['OK']: self.log.error(syncResources['Message']) syncResources = (syncResources['OK'] and syncResources['Value']) or defSyncResult # Nodes syncNodes = self._syncNodes() if not syncNodes['OK']: self.log.error(syncNodes['Message']) syncNodes = (syncNodes['OK'] and syncNodes['Value']) or defSyncResult # Notify via email to : self.notify(syncSites, syncResources, syncNodes) return S_OK() def notify(self, syncSites, syncResources, syncNodes): """ Method sending email notification with the result of the synchronization. Email is sent to Operations( EMail/Production ) email address. examples: >>> s.notify( {}, {}, {} ) >>> s.notify( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, {}, {} ) >>> s.notify( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, { 'Computing : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }}, {} ) :Parameters: **syncSites** - dict() ( keys: added, deleted ) dictionary with the sites added and deleted from the DB **syncResources** - dict() ( keys: added, deleted ) dictionary with the resources added and deleted from the DB **syncNodes** - dict() ( keys: added, deleted ) dictionary with the nodes added and deleted from the DB :return: S_OK """ # Human readable summary msgBody = self.getBody(syncSites, syncResources, syncNodes) self.log.info(msgBody) # Email addresses toAddress = self.operations.getValue('EMail/Production', '') fromAddress = self.rssConfig.getConfigFromAddress('') if toAddress and fromAddress and msgBody: # Subject of the email setup = gConfig.getValue('DIRAC/Setup') subject = '[RSS](%s) CS Synchronization' % setup self.diracAdmin.sendMail(toAddress, subject, msgBody, fromAddress=fromAddress) def getBody(self, syncSites, syncResources, syncNodes): """ Method that given the outputs of the three synchronization methods builds a human readable string. examples: >>> s.getBody( {}, {}, {} ) '' >>> s.getBody( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, {}, {} ) ''' SITES: Site: deleted:1 RubbishSite ''' >>> s.getBody( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, { 'Computing : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }}, {} ) ''' SITES: Site: deleted:1 RubbishSite RESOURCES: Computing: added:2 newCE01 newCE02 ''' :Parameters: **syncSites** - dict() ( keys: added, deleted ) dictionary with the sites added and deleted from the DB **syncResources** - dict() ( keys: added, deleted ) dictionary with the resources added and deleted from the DB **syncNodes** - dict() ( keys: added, deleted ) dictionary with the nodes added and deleted from the DB :return: str """ syncMsg = '' for element, syncResult in [('SITES', syncSites), ('RESOURCES', syncResources), ('NODES', syncNodes)]: elementsMsg = '' for elementType, elements in syncResult.items(): elementMsg = '' if elements['added']: elementMsg += '\n %s added: %d \n' % ( elementType, len(elements['added'])) elementMsg += ' ' + '\n '.join(elements['added']) if elements['deleted']: elementMsg += '\n %s deleted: %d \n' % ( elementType, len(elements['deleted'])) elementMsg += ' ' + '\n '.join(elements['deleted']) if elementMsg: elementsMsg += '\n\n%s:\n' % elementType elementsMsg += elementMsg if elementsMsg: syncMsg += '\n\n%s:' % element + elementsMsg return syncMsg #............................................................................. # Sync methods: Site, Resource & Node def _syncSites(self): """ Method that synchronizes sites ( using their canonical name: CERN.ch ) with elementType = 'Site'. It gets from the CS the eligible site names and then synchronizes them with the DB. If not on the DB, they are added. If in the DB but not on the CS, they are deleted. examples: >> s._syncSites() S_OK( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] } } ) :return: S_OK( { 'Site' : { 'added' : [], 'deleted' : [] }} ) | S_ERROR """ # Get site names from the CS foundSites = self.resources.getEligibleSites() if not foundSites['OK']: return foundSites sites = {} # Synchronize with the DB resSync = self.__dbSync('Site', 'Site', foundSites['Value']) if not resSync['OK']: self.log.error('Error synchronizing Sites') self.log.error(resSync['Message']) else: sites = resSync['Value'] return S_OK({'Site': sites}) def _syncResources(self): """ Method that synchronizes resources as defined on RESOURCE_NODE_MAPPING dictionary keys. It makes one sync round per key ( elementType ). Gets from the CS the eligible Resource/<elementType> names and then synchronizes them with the DB. If not on the DB, they are added. If in the DB but not on the CS, they are deleted. examples: >>> s._syncResources() S_OK( { 'Computing' : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }, 'Storage' : { 'added' : [], 'deleted' : [] }, ... } ) :return: S_OK( { 'RESOURCE_NODE_MAPPINGKey1' : { 'added' : [], 'deleted' : [] }, ...} ) """ resources = {} # Iterate over the different elementTypes for Resource ( Computing, Storage... ) for elementType in RESOURCE_NODE_MAPPING.keys(): # Get Resource / <elementType> names from CS foundResources = self.resources.getEligibleResources(elementType) if not foundResources['OK']: self.log.error(foundResources['Message']) continue # Translate CS result into a list foundResources = foundResources['Value'] # Synchronize with the DB resSync = self.__dbSync('Resource', elementType, foundResources) if not resSync['OK']: self.log.error('Error synchronizing %s %s' % ('Resource', elementType)) self.log.error(resSync['Message']) else: resources[elementType] = resSync['Value'] return S_OK(resources) def _syncNodes(self): """ Method that synchronizes resources as defined on RESOURCE_NODE_MAPPING dictionary values. It makes one sync round per key ( elementType ). Gets from the CS the eligible Node/<elementType> names and then synchronizes them with the DB. If not on the DB, they are added. If in the DB but not on the CS, they are deleted. examples: >>> s._syncNodes() S_OK( { 'Queue' : { 'added' : [], 'deleted' : [] }, ... } ) :return: S_OK( { 'RESOURCE_NODE_MAPPINGValue1' : { 'added' : [], 'deleted' : [] }, ...} ) """ nodes = {} # Iterate over the different elementTypes for Node ( Queue, AccessProtocol... ) for elementType in RESOURCE_NODE_MAPPING.values(): # Get Node / <elementType> names from CS foundNodes = self.resources.getEligibleNodes(elementType) if not foundNodes['OK']: self.log.error(foundNodes['Value']) continue # Translate CS result into a list : maps NodeName to SiteName<>NodeName to # avoid duplicates # Looong list comprehension, sorry ! foundNodes = [ '%s<>%s' % (key, item) for key, subDict in foundNodes['Value'].items() for subList in subDict.values() for item in subList ] # Synchronize with the DB resSync = self.__dbSync('Node', elementType, foundNodes) if not resSync['OK']: self.log.error('Error synchronizing %s %s' % ('Node', elementType)) self.log.error(resSync['Message']) else: nodes[elementType] = resSync['Value'] return S_OK(nodes) #............................................................................. # DB sync actions def __dbSync(self, elementFamily, elementType, elementsCS): """ Method synchronizing CS and DB. Compares <elementsCS> with <elementsDB> given the elementFamily and elementType ( e.g. Resource / Computing ). If there are missing elements in the DB, are inserted. If are missing elements in the CS, are deleted from the DB. Note that the logs from the RSS DB are kept ! ( just in case ). :Parameters: **elementFamily** - str any of the valid element families : Site, Resource, Node **elementType** - str any of the valid element types for <elementFamily> **elementsCS** - list list with the elements for <elementFamily>/<elementType> found in the CS :return: S_OK( { 'added' : [], 'deleted' : [] } ) | S_ERROR """ # deleted, added default response syncRes = { 'deleted': [], 'added': [], } # Gets <elementFamily>/<elementType> elements from DB elementsDB = self.rStatus.selectStatusElement( elementFamily, 'Status', elementType=elementType, meta={'columns': ['name']}) if not elementsDB['OK']: return elementsDB elementsDB = [elementDB[0] for elementDB in elementsDB['Value']] # Elements in DB but not in CS -> to be deleted toBeDeleted = list(set(elementsDB).difference(set(elementsCS))) if toBeDeleted: resDelete = self.__dbDelete(elementFamily, elementType, toBeDeleted) if not resDelete['OK']: return resDelete else: syncRes['deleted'] = toBeDeleted # Elements in CS but not in DB -> to be added toBeAdded = list(set(elementsCS).difference(set(elementsDB))) if toBeAdded: resInsert = self.__dbInsert(elementFamily, elementType, toBeAdded) if not resInsert['OK']: return resInsert else: syncRes['added'] = toBeAdded return S_OK(syncRes) def __dbDelete(self, elementFamily, elementType, toBeDeleted): """ Method that given the elementFamily and elementType, deletes all entries in the History and Status tables for the given elements in toBeDeleted ( all their status Types ). :Parameters: **elementFamily** - str any of the valid element families : Site, Resource, Node **elementType** - str any of the valid element types for <elementFamily>, just used for logging purposes. **toBeDeleted** - list list with the elements to be deleted :return: S_OK | S_ERROR """ self.log.info('Deleting %s %s:' % (elementFamily, elementType)) self.log.info(toBeDeleted) return self.rStatus._extermineStatusElement(elementFamily, toBeDeleted) def __dbInsert(self, elementFamily, elementType, toBeAdded): """ Method that given the elementFamily and elementType, adds all elements in toBeAdded with their respective statusTypes, obtained from the CS. They are synchronized with status 'Unknown' and reason 'Synchronized'. :Parameters: **elementFamily** - str any of the valid element families : Site, Resource, Node **elementType** - str any of the valid element types for <elementFamily> **toBeDeleted** - list list with the elements to be added :return: S_OK | S_ERROR """ self.log.info('Adding %s %s:' % (elementFamily, elementType)) self.log.info(toBeAdded) statusTypes = self.rssConfig.getConfigStatusType(elementType) for element in toBeAdded: for statusType in statusTypes: resInsert = self.rStatus.addIfNotThereStatusElement( elementFamily, 'Status', name=element, statusType=statusType, status='Unknown', elementType=elementType, reason='Synchronized') if not resInsert['OK']: return resInsert return S_OK() #............................................................................... # # def _syncUsers( self ): # ''' # Sync Users: compares CS with DB and does the necessary modifications. # ''' # # gLogger.verbose( '-- Synchronizing users --') # # usersCS = CSHelpers.getRegistryUsers() # if not usersCS[ 'OK' ]: # return usersCS # usersCS = usersCS[ 'Value' ] # # gLogger.verbose( '%s users found in CS' % len( usersCS ) ) # # usersDB = self.rManagement.selectUserRegistryCache( meta = { 'columns' : [ 'login' ] } ) # if not usersDB[ 'OK' ]: # return usersDB # usersDB = [ userDB[0] for userDB in usersDB[ 'Value' ] ] # # # Users that are in DB but not in CS # toBeDeleted = list( set( usersDB ).difference( set( usersCS.keys() ) ) ) # gLogger.verbose( '%s users to be deleted' % len( toBeDeleted ) ) # # # Delete users # # FIXME: probably it is not needed since there is a DatabaseCleanerAgent # for userLogin in toBeDeleted: # # deleteQuery = self.rManagement.deleteUserRegistryCache( login = userLogin ) # # gLogger.verbose( '... %s' % userLogin ) # if not deleteQuery[ 'OK' ]: # return deleteQuery # # # AddOrModify Users # for userLogin, userDict in usersCS.items(): # # _name = userDict[ 'DN' ].split( '=' )[ -1 ] # _email = userDict[ 'Email' ] # # query = self.rManagement.addOrModifyUserRegistryCache( userLogin, _name, _email ) # gLogger.verbose( '-> %s' % userLogin ) # if not query[ 'OK' ]: # return query # # return S_OK() ################################################################################ #EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
class StorageFactory: def __init__(self, useProxy=False, vo=None): self.valid = True self.proxy = False self.proxy = useProxy self.resourceStatus = ResourceStatus() self.resourcesHelper = Resources(vo=vo) ########################################################################################### # # Below are public methods for obtaining storage objects # def getStorageName(self, initialName): return self._getConfigStorageName(initialName) def getStorage(self, parameterDict): """ This instantiates a single storage for the details provided and doesn't check the CS. """ # The storage name must be supplied. if parameterDict.has_key('StorageName'): storageName = parameterDict['StorageName'] else: errStr = "StorageFactory.getStorage: StorageName must be supplied" gLogger.error(errStr) return S_ERROR(errStr) # ProtocolName must be supplied otherwise nothing with work. if parameterDict.has_key('ProtocolName'): protocolName = parameterDict['ProtocolName'] else: errStr = "StorageFactory.getStorage: ProtocolName must be supplied" gLogger.error(errStr) return S_ERROR(errStr) # The other options need not always be specified if parameterDict.has_key('Protocol'): protocol = parameterDict['Protocol'] else: protocol = '' if parameterDict.has_key('Port'): port = parameterDict['Port'] else: port = '' if parameterDict.has_key('Host'): host = parameterDict['Host'] else: host = '' if parameterDict.has_key('Path'): path = parameterDict['Path'] else: path = '' if parameterDict.has_key('SpaceToken'): spaceToken = parameterDict['SpaceToken'] else: spaceToken = '' if parameterDict.has_key('WSUrl'): wsPath = parameterDict['WSUrl'] else: wsPath = '' return self.__generateStorageObject(storageName, protocolName, protocol, path, host, port, spaceToken, wsPath) def getStorages(self, storageName, protocolList=[]): """ Get an instance of a Storage based on the DIRAC SE name based on the CS entries CS 'storageName' is the DIRAC SE name i.e. 'CERN-RAW' 'protocolList' is an optional list of protocols if a sub-set is desired i.e ['SRM2','SRM1'] """ self.remoteProtocols = [] self.localProtocols = [] self.name = '' self.options = {} self.protocolDetails = [] self.storages = [] # Get the name of the storage provided res = self._getConfigStorageName(storageName) if not res['OK']: self.valid = False return res storageName = res['Value'] self.name = storageName # Get the options defined in the CS for this storage res = self._getConfigStorageOptions(storageName) if not res['OK']: self.valid = False return res self.options = res['Value'] # Get the protocol specific details res = self._getConfigStorageProtocols(storageName) if not res['OK']: self.valid = False return res self.protocolDetails = res['Value'] requestedLocalProtocols = [] requestedRemoteProtocols = [] requestedProtocolDetails = [] turlProtocols = [] # Generate the protocol specific plug-ins self.storages = [] for protocolDict in self.protocolDetails: protocolName = protocolDict['ProtocolName'] protocolRequested = True if protocolList: if protocolName not in protocolList: protocolRequested = False if protocolRequested: protocol = protocolDict['Protocol'] host = protocolDict['Host'] path = protocolDict['Path'] port = protocolDict['Port'] spaceToken = protocolDict['SpaceToken'] wsUrl = protocolDict['WSUrl'] res = self.__generateStorageObject(storageName, protocolName, protocol, path=path, host=host, port=port, spaceToken=spaceToken, wsUrl=wsUrl) if res['OK']: self.storages.append(res['Value']) if protocolName in self.localProtocols: turlProtocols.append(protocol) requestedLocalProtocols.append(protocolName) if protocolName in self.remoteProtocols: requestedRemoteProtocols.append(protocolName) requestedProtocolDetails.append(protocolDict) else: gLogger.info(res['Message']) if len(self.storages) > 0: resDict = {} resDict['StorageName'] = self.name resDict['StorageOptions'] = self.options resDict['StorageObjects'] = self.storages resDict['LocalProtocols'] = requestedLocalProtocols resDict['RemoteProtocols'] = requestedRemoteProtocols resDict['ProtocolOptions'] = requestedProtocolDetails resDict['TurlProtocols'] = turlProtocols return S_OK(resDict) else: errStr = "StorageFactory.getStorages: Failed to instantiate any storage protocols." gLogger.error(errStr, self.name) return S_ERROR(errStr) ########################################################################################### # # Below are internal methods for obtaining section/option/value configuration # def _getConfigStorageName(self, storageName): """ This gets the name of the storage the configuration service. If the storage is an alias for another the resolution is performed. 'storageName' is the storage section to check in the CS """ result = self.resourcesHelper.getStorageElementOptionsDict(storageName) if not result['OK']: errStr = "StorageFactory._getConfigStorageName: Failed to get storage options" gLogger.error(errStr, result['Message']) return S_ERROR(errStr) if not result['Value']: errStr = "StorageFactory._getConfigStorageName: Supplied storage doesn't exist." gLogger.error(errStr, storageName) return S_ERROR(errStr) seConfig = result['Value'] resolvedName = seConfig.get('Alias', storageName) return S_OK(resolvedName) def _getConfigStorageOptions(self, storageName): """ Get the options associated to the StorageElement as defined in the CS """ result = self.resourcesHelper.getStorageElementOptionsDict(storageName) if not result['OK']: errStr = "StorageFactory._getStorageOptions: Failed to get storage options." gLogger.error(errStr, "%s: %s" % (storageName, result['Message'])) return S_ERROR(errStr) optionsDict = result['Value'] result = self.resourceStatus.getStorageElementStatus(storageName) if not result['OK']: errStr = "StorageFactory._getStorageOptions: Failed to get storage status" gLogger.error(errStr, "%s: %s" % (storageName, result['Message'])) return S_ERROR(errStr) optionsDict.update(result['Value'][storageName]) return S_OK(optionsDict) def _getConfigStorageProtocols(self, storageName): """ Protocol specific information is present as sections in the Storage configuration """ result = getSiteForResource('Storage', storageName) if not result['OK']: return result site = result['Value'] result = self.resourcesHelper.getEligibleNodes('AccessProtocol', { 'Site': site, 'Resource': storageName }) if not result['OK']: return result nodesDict = result['Value'] protocols = [] for site in nodesDict: for se in nodesDict[site]: protocols.extend(nodesDict[site][se]) sortedProtocols = sortList(protocols) protocolDetails = [] for protocol in sortedProtocols: result = self._getConfigStorageProtocolDetails( storageName, protocol) if not result['OK']: return result protocolDetails.append(result['Value']) self.protocols = self.localProtocols + self.remoteProtocols return S_OK(protocolDetails) def _getConfigStorageProtocolDetails(self, storageName, protocol): """ Parse the contents of the protocol block """ # First obtain the options that are available result = getSiteForResource('Storage', storageName) if not result['OK']: return result site = result['Value'] result = self.resourcesHelper.getNodeOptionsDict( site, 'Storage', storageName, protocol) if not result['OK']: return result optionsDict = result['Value'] # We must have certain values internally even if not supplied in CS protocolDict = { 'Access': '', 'Host': '', 'Path': '', 'Port': '', 'Protocol': '', 'ProtocolName': '', 'SpaceToken': '', 'WSUrl': '' } for option in optionsDict: protocolDict[option] = optionsDict[option] # Now update the local and remote protocol lists. # A warning will be given if the Access option is not set. if protocolDict['Access'] == 'remote': self.remoteProtocols.append(protocolDict['ProtocolName']) elif protocolDict['Access'] == 'local': self.localProtocols.append(protocolDict['ProtocolName']) else: errStr = "StorageFactory.__getProtocolDetails: The 'Access' option for %s:%s is neither 'local' or 'remote'." % ( storageName, protocol) gLogger.warn(errStr) # The ProtocolName option must be defined if not protocolDict['ProtocolName']: errStr = "StorageFactory.__getProtocolDetails: 'ProtocolName' option is not defined." gLogger.error(errStr, "%s: %s" % (storageName, protocol)) return S_ERROR(errStr) return S_OK(protocolDict) ########################################################################################### # # Below is the method for obtaining the object instantiated for a provided storage configuration # def __generateStorageObject(self, storageName, protocolName, protocol, path=None, host=None, port=None, spaceToken=None, wsUrl=None): storageType = protocolName if self.proxy: storageType = 'Proxy' moduleRootPaths = getInstalledExtensions() moduleLoaded = False path = path.rstrip('/') if not path: path = '/' for moduleRootPath in moduleRootPaths: if moduleLoaded: break gLogger.verbose("Trying to load from root path %s" % moduleRootPath) moduleFile = os.path.join(rootPath, moduleRootPath, "Resources", "Storage", "%sStorage.py" % storageType) gLogger.verbose("Looking for file %s" % moduleFile) if not os.path.isfile(moduleFile): continue try: # This inforces the convention that the plug in must be named after the protocol moduleName = "%sStorage" % (storageType) storageModule = __import__( '%s.Resources.Storage.%s' % (moduleRootPath, moduleName), globals(), locals(), [moduleName]) except Exception, x: errStr = "StorageFactory._generateStorageObject: Failed to import %s: %s" % ( storageName, x) gLogger.exception(errStr) return S_ERROR(errStr) try: evalString = "storageModule.%s(storageName,protocol,path,host,port,spaceToken,wsUrl)" % moduleName storage = eval(evalString) if not storage.isOK(): errStr = "StorageFactory._generateStorageObject: Failed to instantiate storage plug in." gLogger.error(errStr, "%s" % (moduleName)) return S_ERROR(errStr) except Exception, x: errStr = "StorageFactory._generateStorageObject: Failed to instantiate %s(): %s" % ( moduleName, x) gLogger.exception(errStr) return S_ERROR(errStr) # If use proxy, keep the original protocol name if self.proxy: storage.protocolName = protocolName return S_OK(storage)
class StorageFactory: def __init__(self, useProxy=False, vo=None): self.valid = True self.proxy = False self.proxy = useProxy self.resourceStatus = ResourceStatus() self.resourcesHelper = Resources(vo=vo) ########################################################################################### # # Below are public methods for obtaining storage objects # def getStorageName(self, initialName): return self._getConfigStorageName(initialName) def getStorage(self, parameterDict): """ This instantiates a single storage for the details provided and doesn't check the CS. """ # The storage name must be supplied. if parameterDict.has_key("StorageName"): storageName = parameterDict["StorageName"] else: errStr = "StorageFactory.getStorage: StorageName must be supplied" gLogger.error(errStr) return S_ERROR(errStr) # ProtocolName must be supplied otherwise nothing with work. if parameterDict.has_key("ProtocolName"): protocolName = parameterDict["ProtocolName"] else: errStr = "StorageFactory.getStorage: ProtocolName must be supplied" gLogger.error(errStr) return S_ERROR(errStr) # The other options need not always be specified if parameterDict.has_key("Protocol"): protocol = parameterDict["Protocol"] else: protocol = "" if parameterDict.has_key("Port"): port = parameterDict["Port"] else: port = "" if parameterDict.has_key("Host"): host = parameterDict["Host"] else: host = "" if parameterDict.has_key("Path"): path = parameterDict["Path"] else: path = "" if parameterDict.has_key("SpaceToken"): spaceToken = parameterDict["SpaceToken"] else: spaceToken = "" if parameterDict.has_key("WSUrl"): wsPath = parameterDict["WSUrl"] else: wsPath = "" return self.__generateStorageObject( storageName, protocolName, protocol, path, host, port, spaceToken, wsPath, parameterDict ) def getStorages(self, storageName, protocolList=[]): """ Get an instance of a Storage based on the DIRAC SE name based on the CS entries CS 'storageName' is the DIRAC SE name i.e. 'CERN-RAW' 'protocolList' is an optional list of protocols if a sub-set is desired i.e ['SRM2','SRM1'] """ self.remoteProtocols = [] self.localProtocols = [] self.name = "" self.options = {} self.protocolDetails = [] self.storages = [] # Get the name of the storage provided res = self._getConfigStorageName(storageName) if not res["OK"]: self.valid = False return res storageName = res["Value"] self.name = storageName # Get the options defined in the CS for this storage res = self._getConfigStorageOptions(storageName) if not res["OK"]: self.valid = False return res self.options = res["Value"] # Get the protocol specific details res = self._getConfigStorageProtocols(storageName) if not res["OK"]: self.valid = False return res self.protocolDetails = res["Value"] requestedLocalProtocols = [] requestedRemoteProtocols = [] requestedProtocolDetails = [] turlProtocols = [] # Generate the protocol specific plug-ins self.storages = [] for protocolDict in self.protocolDetails: protocolName = protocolDict["ProtocolName"] protocolRequested = True if protocolList: if protocolName not in protocolList: protocolRequested = False if protocolRequested: protocol = protocolDict["Protocol"] host = protocolDict["Host"] path = protocolDict["Path"] port = protocolDict["Port"] spaceToken = protocolDict["SpaceToken"] wsUrl = protocolDict["WSUrl"] res = self.__generateStorageObject( storageName, protocolName, protocol, path=path, host=host, port=port, spaceToken=spaceToken, wsUrl=wsUrl, parameters=protocolDict, ) if res["OK"]: self.storages.append(res["Value"]) if protocolName in self.localProtocols: turlProtocols.append(protocol) requestedLocalProtocols.append(protocolName) if protocolName in self.remoteProtocols: requestedRemoteProtocols.append(protocolName) requestedProtocolDetails.append(protocolDict) else: gLogger.info(res["Message"]) if len(self.storages) > 0: resDict = {} resDict["StorageName"] = self.name resDict["StorageOptions"] = self.options resDict["StorageObjects"] = self.storages resDict["LocalProtocols"] = requestedLocalProtocols resDict["RemoteProtocols"] = requestedRemoteProtocols resDict["ProtocolOptions"] = requestedProtocolDetails resDict["TurlProtocols"] = turlProtocols return S_OK(resDict) else: errStr = "StorageFactory.getStorages: Failed to instantiate any storage protocols." gLogger.error(errStr, self.name) return S_ERROR(errStr) ########################################################################################### # # Below are internal methods for obtaining section/option/value configuration # def _getConfigStorageName(self, storageName): """ This gets the name of the storage the configuration service. If the storage is an alias for another the resolution is performed. 'storageName' is the storage section to check in the CS """ result = self.resourcesHelper.getStorageElementOptionsDict(storageName) if not result["OK"]: errStr = "StorageFactory._getConfigStorageName: Failed to get storage options" gLogger.error(errStr, result["Message"]) return S_ERROR(errStr) if not result["Value"]: errStr = "StorageFactory._getConfigStorageName: Supplied storage doesn't exist." gLogger.error(errStr, storageName) return S_ERROR(errStr) if "Alias" in res["Value"]: configPath = "%s/%s/Alias" % (self.rootConfigPath, storageName) aliasName = gConfig.getValue(configPath) result = self._getConfigStorageName(aliasName) if not result["OK"]: errStr = "StorageFactory._getConfigStorageName: Supplied storage doesn't exist." gLogger.error(errStr, configPath) return S_ERROR(errStr) resolvedName = result["Value"] else: resolvedName = storageName return S_OK(resolvedName) def _getConfigStorageOptions(self, storageName): """ Get the options associated to the StorageElement as defined in the CS """ result = self.resourcesHelper.getStorageElementOptionsDict(storageName) if not result["OK"]: errStr = "StorageFactory._getStorageOptions: Failed to get storage options." gLogger.error(errStr, "%s: %s" % (storageName, result["Message"])) return S_ERROR(errStr) optionsDict = result["Value"] result = self.resourceStatus.getStorageStatus(storageName, "ReadAccess") if not result["OK"]: errStr = "StorageFactory._getStorageOptions: Failed to get storage status" gLogger.error(errStr, "%s: %s" % (storageName, result["Message"])) return S_ERROR(errStr) # optionsDict.update( result[ 'Value' ][ storageName ] ) return S_OK(optionsDict) def _getConfigStorageProtocols(self, storageName): """ Protocol specific information is present as sections in the Storage configuration """ result = getSiteForResource(storageName) if not result["OK"]: return result site = result["Value"] result = self.resourcesHelper.getEligibleNodes("AccessProtocol", {"Site": site, "Resource": storageName}) if not result["OK"]: return result nodesList = result["Value"] protocols = [] for node in nodesList: protocols.append(node) protocolDetails = [] for protocol in protocols: result = self._getConfigStorageProtocolDetails(protocol) if not result["OK"]: return result protocolDetails.append(result["Value"]) self.protocols = self.localProtocols + self.remoteProtocols return S_OK(protocolDetails) def _getConfigStorageProtocolDetails(self, protocol): """ Parse the contents of the protocol block """ result = self.resourcesHelper.getAccessProtocolOptionsDict(protocol) if not result["OK"]: return result optionsDict = result["Value"] # We must have certain values internally even if not supplied in CS protocolDict = { "Access": "", "Host": "", "Path": "", "Port": "", "Protocol": "", "ProtocolName": "", "SpaceToken": "", "WSUrl": "", } for option in optionsDict: protocolDict[option] = optionsDict[option] # Now update the local and remote protocol lists. # A warning will be given if the Access option is not set. if protocolDict["Access"] == "remote": self.remoteProtocols.append(protocolDict["ProtocolName"]) elif protocolDict["Access"] == "local": self.localProtocols.append(protocolDict["ProtocolName"]) else: errStr = ( "StorageFactory.__getProtocolDetails: The 'Access' option for %s is neither 'local' or 'remote'." % protocol ) gLogger.warn(errStr) # The ProtocolName option must be defined if not protocolDict["ProtocolName"]: errStr = "StorageFactory.__getProtocolDetails: 'ProtocolName' option is not defined." gLogger.error(errStr, "%s" % protocol) return S_ERROR(errStr) return S_OK(protocolDict) ########################################################################################### # # Below is the method for obtaining the object instantiated for a provided storage configuration # def __generateStorageObject( self, storageName, protocolName, protocol, path=None, host=None, port=None, spaceToken=None, wsUrl=None, parameters={}, ): storageType = protocolName if self.proxy: storageType = "Proxy" moduleRootPaths = getInstalledExtensions() moduleLoaded = False path = path.rstrip("/") if not path: path = "/" for moduleRootPath in moduleRootPaths: if moduleLoaded: break gLogger.verbose("Trying to load from root path %s" % moduleRootPath) moduleFile = os.path.join(rootPath, moduleRootPath, "Resources", "Storage", "%sStorage.py" % storageType) gLogger.verbose("Looking for file %s" % moduleFile) if not os.path.isfile(moduleFile): continue try: # This inforces the convention that the plug in must be named after the protocol moduleName = "%sStorage" % (storageType) storageModule = __import__( "%s.Resources.Storage.%s" % (moduleRootPath, moduleName), globals(), locals(), [moduleName] ) except Exception, x: errStr = "StorageFactory._generateStorageObject: Failed to import %s: %s" % (storageName, x) gLogger.exception(errStr) return S_ERROR(errStr) try: evalString = "storageModule.%s(storageName,protocol,path,host,port,spaceToken,wsUrl)" % moduleName storage = eval(evalString) if not storage.isOK(): errStr = "StorageFactory._generateStorageObject: Failed to instantiate storage plug in." gLogger.error(errStr, "%s" % (moduleName)) return S_ERROR(errStr) except Exception, x: errStr = "StorageFactory._generateStorageObject: Failed to instantiate %s(): %s" % (moduleName, x) gLogger.exception(errStr) return S_ERROR(errStr) # Set extra parameters if any if parameters: result = storage.setParameters(parameters) if not result["OK"]: return result # If use proxy, keep the original protocol name if self.proxy: storage.protocolName = protocolName return S_OK(storage)
class Synchronizer( object ): ''' Every time there is a successful write on the CS, Synchronizer().sync() is executed. It updates the database with the values on the CS. ''' def __init__( self ): """ Constructor. examples: >>> s = Synchronizer() """ self.log = gLogger.getSubLogger( self.__class__.__name__ ) self.operations = Operations() self.resources = Resources() self.rStatus = ResourceStatusClient.ResourceStatusClient() self.rssConfig = RssConfiguration() self.diracAdmin = DiracAdmin() def sync( self, _eventName, _params ): ''' Main synchronizer method. It synchronizes the three types of elements: Sites, Resources and Nodes. Each _syncX method returns a dictionary with the additions and deletions. examples: >>> s.sync( None, None ) S_OK() :Parameters: **_eventName** - any this parameter is ignored, but needed by caller function. **_params** - any this parameter is ignored, but needed by caller function. :return: S_OK ''' defSyncResult = { 'added' : [], 'deleted' : [] } # Sites syncSites = self._syncSites() if not syncSites[ 'OK' ]: self.log.error( syncSites[ 'Message' ] ) syncSites = ( syncSites[ 'OK' ] and syncSites[ 'Value' ] ) or defSyncResult # Resources syncResources = self._syncResources() if not syncResources[ 'OK' ]: self.log.error( syncResources[ 'Message' ] ) syncResources = ( syncResources[ 'OK' ] and syncResources[ 'Value' ] ) or defSyncResult # Nodes syncNodes = self._syncNodes() if not syncNodes[ 'OK' ]: self.log.error( syncNodes[ 'Message' ] ) syncNodes = ( syncNodes[ 'OK' ] and syncNodes[ 'Value' ] ) or defSyncResult # Notify via email to : self.notify( syncSites, syncResources, syncNodes ) return S_OK() def notify( self, syncSites, syncResources, syncNodes ): """ Method sending email notification with the result of the synchronization. Email is sent to Operations( EMail/Production ) email address. examples: >>> s.notify( {}, {}, {} ) >>> s.notify( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, {}, {} ) >>> s.notify( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, { 'Computing : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }}, {} ) :Parameters: **syncSites** - dict() ( keys: added, deleted ) dictionary with the sites added and deleted from the DB **syncResources** - dict() ( keys: added, deleted ) dictionary with the resources added and deleted from the DB **syncNodes** - dict() ( keys: added, deleted ) dictionary with the nodes added and deleted from the DB :return: S_OK """ # Human readable summary msgBody = self.getBody( syncSites, syncResources, syncNodes ) self.log.info( msgBody ) # Email addresses toAddress = self.operations.getValue( 'EMail/Production', '' ) fromAddress = self.rssConfig.getConfigFromAddress( '' ) if toAddress and fromAddress and msgBody: # Subject of the email setup = gConfig.getValue( 'DIRAC/Setup' ) subject = '[RSS](%s) CS Synchronization' % setup self.diracAdmin.sendMail( toAddress, subject, msgBody, fromAddress = fromAddress ) def getBody( self, syncSites, syncResources, syncNodes ): """ Method that given the outputs of the three synchronization methods builds a human readable string. examples: >>> s.getBody( {}, {}, {} ) '' >>> s.getBody( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, {}, {} ) ''' SITES: Site: deleted:1 RubbishSite ''' >>> s.getBody( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, { 'Computing : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }}, {} ) ''' SITES: Site: deleted:1 RubbishSite RESOURCES: Computing: added:2 newCE01 newCE02 ''' :Parameters: **syncSites** - dict() ( keys: added, deleted ) dictionary with the sites added and deleted from the DB **syncResources** - dict() ( keys: added, deleted ) dictionary with the resources added and deleted from the DB **syncNodes** - dict() ( keys: added, deleted ) dictionary with the nodes added and deleted from the DB :return: str """ syncMsg = '' for element, syncResult in [ ( 'SITES', syncSites ), ( 'RESOURCES', syncResources ), ( 'NODES', syncNodes ) ]: elementsMsg = '' for elementType, elements in syncResult.items(): elementMsg = '' if elements[ 'added' ]: elementMsg += '\n %s added: %d \n' % ( elementType, len( elements[ 'added' ] ) ) elementMsg += ' ' + '\n '.join( elements[ 'added' ] ) if elements[ 'deleted' ]: elementMsg += '\n %s deleted: %d \n' % ( elementType, len( elements[ 'deleted' ] ) ) elementMsg += ' ' + '\n '.join( elements[ 'deleted' ] ) if elementMsg: elementsMsg += '\n\n%s:\n' % elementType elementsMsg += elementMsg if elementsMsg: syncMsg += '\n\n%s:' % element + elementsMsg return syncMsg #............................................................................. # Sync methods: Site, Resource & Node def _syncSites( self ): """ Method that synchronizes sites ( using their canonical name: CERN.ch ) with elementType = 'Site'. It gets from the CS the eligible site names and then synchronizes them with the DB. If not on the DB, they are added. If in the DB but not on the CS, they are deleted. examples: >> s._syncSites() S_OK( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] } } ) :return: S_OK( { 'Site' : { 'added' : [], 'deleted' : [] }} ) | S_ERROR """ # Get site names from the CS foundSites = self.resources.getEligibleSites() if not foundSites[ 'OK' ]: return foundSites sites = {} # Synchronize with the DB resSync = self.__dbSync( 'Site', 'Site', foundSites[ 'Value' ] ) if not resSync[ 'OK' ]: self.log.error( 'Error synchronizing Sites' ) self.log.error( resSync[ 'Message' ] ) else: sites = resSync[ 'Value' ] return S_OK( { 'Site' : sites } ) def _syncResources( self ): """ Method that synchronizes resources as defined on RESOURCE_NODE_MAPPING dictionary keys. It makes one sync round per key ( elementType ). Gets from the CS the eligible Resource/<elementType> names and then synchronizes them with the DB. If not on the DB, they are added. If in the DB but not on the CS, they are deleted. examples: >>> s._syncResources() S_OK( { 'Computing' : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }, 'Storage' : { 'added' : [], 'deleted' : [] }, ... } ) :return: S_OK( { 'RESOURCE_NODE_MAPPINGKey1' : { 'added' : [], 'deleted' : [] }, ...} ) """ resources = {} # Iterate over the different elementTypes for Resource ( Computing, Storage... ) for elementType in RESOURCE_NODE_MAPPING.keys(): # Get Resource / <elementType> names from CS foundResources = self.resources.getEligibleResources( elementType ) if not foundResources[ 'OK' ]: self.log.error( foundResources[ 'Message' ] ) continue # Translate CS result into a list foundResources = foundResources[ 'Value' ] # Synchronize with the DB resSync = self.__dbSync( 'Resource', elementType, foundResources ) if not resSync[ 'OK' ]: self.log.error( 'Error synchronizing %s %s' % ( 'Resource', elementType ) ) self.log.error( resSync[ 'Message' ] ) else: resources[ elementType ] = resSync[ 'Value' ] return S_OK( resources ) def _syncNodes( self ): """ Method that synchronizes resources as defined on RESOURCE_NODE_MAPPING dictionary values. It makes one sync round per key ( elementType ). Gets from the CS the eligible Node/<elementType> names and then synchronizes them with the DB. If not on the DB, they are added. If in the DB but not on the CS, they are deleted. examples: >>> s._syncNodes() S_OK( { 'Queue' : { 'added' : [], 'deleted' : [] }, ... } ) :return: S_OK( { 'RESOURCE_NODE_MAPPINGValue1' : { 'added' : [], 'deleted' : [] }, ...} ) """ nodes = {} # Iterate over the different elementTypes for Node ( Queue, AccessProtocol... ) for elementType in RESOURCE_NODE_MAPPING.values(): # Get Node / <elementType> names from CS foundNodes = self.resources.getEligibleNodes( elementType ) if not foundNodes[ 'OK' ]: self.log.error( foundNodes[ 'Value' ] ) continue # Translate CS result into a list : maps NodeName to SiteName<>NodeName to # avoid duplicates # Looong list comprehension, sorry ! foundNodes = [ '%s<>%s' % ( key, item ) for key, subDict in foundNodes[ 'Value' ].items() for subList in subDict.values() for item in subList ] # Synchronize with the DB resSync = self.__dbSync( 'Node', elementType, foundNodes ) if not resSync[ 'OK' ]: self.log.error( 'Error synchronizing %s %s' % ( 'Node', elementType ) ) self.log.error( resSync[ 'Message' ] ) else: nodes[ elementType ] = resSync[ 'Value' ] return S_OK( nodes ) #............................................................................. # DB sync actions def __dbSync( self, elementFamily, elementType, elementsCS ): """ Method synchronizing CS and DB. Compares <elementsCS> with <elementsDB> given the elementFamily and elementType ( e.g. Resource / Computing ). If there are missing elements in the DB, are inserted. If are missing elements in the CS, are deleted from the DB. Note that the logs from the RSS DB are kept ! ( just in case ). :Parameters: **elementFamily** - str any of the valid element families : Site, Resource, Node **elementType** - str any of the valid element types for <elementFamily> **elementsCS** - list list with the elements for <elementFamily>/<elementType> found in the CS :return: S_OK( { 'added' : [], 'deleted' : [] } ) | S_ERROR """ # deleted, added default response syncRes = { 'deleted' : [], 'added' : [], } # Gets <elementFamily>/<elementType> elements from DB elementsDB = self.rStatus.selectStatusElement( elementFamily, 'Status', elementType = elementType, meta = { 'columns' : [ 'name' ] } ) if not elementsDB[ 'OK' ]: return elementsDB elementsDB = [ elementDB[ 0 ] for elementDB in elementsDB[ 'Value' ] ] # Elements in DB but not in CS -> to be deleted toBeDeleted = list( set( elementsDB ).difference( set( elementsCS ) ) ) if toBeDeleted: resDelete = self.__dbDelete( elementFamily, elementType, toBeDeleted ) if not resDelete[ 'OK' ]: return resDelete else: syncRes[ 'deleted' ] = toBeDeleted # Elements in CS but not in DB -> to be added toBeAdded = list( set( elementsCS ).difference( set( elementsDB ) ) ) if toBeAdded: resInsert = self.__dbInsert( elementFamily, elementType, toBeAdded ) if not resInsert[ 'OK' ]: return resInsert else: syncRes[ 'added' ] = toBeAdded return S_OK( syncRes ) def __dbDelete( self, elementFamily, elementType, toBeDeleted ): """ Method that given the elementFamily and elementType, deletes all entries in the History and Status tables for the given elements in toBeDeleted ( all their status Types ). :Parameters: **elementFamily** - str any of the valid element families : Site, Resource, Node **elementType** - str any of the valid element types for <elementFamily>, just used for logging purposes. **toBeDeleted** - list list with the elements to be deleted :return: S_OK | S_ERROR """ self.log.info( 'Deleting %s %s:' % ( elementFamily, elementType ) ) self.log.info( toBeDeleted ) return self.rStatus._extermineStatusElement( elementFamily, toBeDeleted ) def __dbInsert( self, elementFamily, elementType, toBeAdded ): """ Method that given the elementFamily and elementType, adds all elements in toBeAdded with their respective statusTypes, obtained from the CS. They are synchronized with status 'Unknown' and reason 'Synchronized'. :Parameters: **elementFamily** - str any of the valid element families : Site, Resource, Node **elementType** - str any of the valid element types for <elementFamily> **toBeDeleted** - list list with the elements to be added :return: S_OK | S_ERROR """ self.log.info( 'Adding %s %s:' % ( elementFamily, elementType ) ) self.log.info( toBeAdded ) statusTypes = self.rssConfig.getConfigStatusType( elementType ) for element in toBeAdded: for statusType in statusTypes: resInsert = self.rStatus.addIfNotThereStatusElement( elementFamily, 'Status', name = element, statusType = statusType, status = 'Unknown', elementType = elementType, reason = 'Synchronized') if not resInsert[ 'OK' ]: return resInsert return S_OK() #............................................................................... # # def _syncUsers( self ): # ''' # Sync Users: compares CS with DB and does the necessary modifications. # ''' # # gLogger.verbose( '-- Synchronizing users --') # # usersCS = CSHelpers.getRegistryUsers() # if not usersCS[ 'OK' ]: # return usersCS # usersCS = usersCS[ 'Value' ] # # gLogger.verbose( '%s users found in CS' % len( usersCS ) ) # # usersDB = self.rManagement.selectUserRegistryCache( meta = { 'columns' : [ 'login' ] } ) # if not usersDB[ 'OK' ]: # return usersDB # usersDB = [ userDB[0] for userDB in usersDB[ 'Value' ] ] # # # Users that are in DB but not in CS # toBeDeleted = list( set( usersDB ).difference( set( usersCS.keys() ) ) ) # gLogger.verbose( '%s users to be deleted' % len( toBeDeleted ) ) # # # Delete users # # FIXME: probably it is not needed since there is a DatabaseCleanerAgent # for userLogin in toBeDeleted: # # deleteQuery = self.rManagement.deleteUserRegistryCache( login = userLogin ) # # gLogger.verbose( '... %s' % userLogin ) # if not deleteQuery[ 'OK' ]: # return deleteQuery # # # AddOrModify Users # for userLogin, userDict in usersCS.items(): # # _name = userDict[ 'DN' ].split( '=' )[ -1 ] # _email = userDict[ 'Email' ] # # query = self.rManagement.addOrModifyUserRegistryCache( userLogin, _name, _email ) # gLogger.verbose( '-> %s' % userLogin ) # if not query[ 'OK' ]: # return query # # return S_OK() ################################################################################ #EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
class ResourcesTestCase(unittest.TestCase): def setUp(self): Script.disableCS() Script.parseCommandLine() self.resources = Resources() def test_getSites(self): print result = self.resources.getSites({'Name': ['CERN', 'CPPM', 'PNPI']}) self.assertTrue(result['OK'], 'getSites') sites = result['Value'] print sites result = self.resources.getEligibleSites( {'Name': ['CERN', 'CPPM', 'PNPI']}) self.assertTrue(result['OK'], 'getEligibleSites') eligibleSites = result['Value'] self.assertEqual(sites, eligibleSites, 'sites and eligible sites are the same') def test_getResources(self): print result = self.resources.getResources('CERN', 'Storage') self.assertTrue(result['OK'], 'getResources') ses = result['Value'] print ses def test_getNodes(self): print result = self.resources.getNodes('CERN::ce130', 'Queue') self.assertTrue(result['OK'], 'getNodes') nodes = result['Value'] print nodes def test_getEligibleResources(self): print result = self.resources.getEligibleResources( 'Computing', { 'Site': ['CERN', 'CPPM', 'Zurich'], 'SubmissionMode': 'Direct' }) self.assertTrue(result['OK'], 'getEligibleResources') ces = result['Value'] print ces def test_getEligibleNodes(self): print result = self.resources.getEligibleNodes( 'AccessProtocol', {'Site': ['CERN', 'CPPM', 'Zurich']}, {'Protocol': 'srm'}) self.assertTrue(result['OK'], 'getEligibleNodes') aps = result['Value'] print aps def test_getEligibleComputingElements(self): siteMask = ['LCG.CERN.ch', 'LCG.CPPM.fr'] result = self.resources.getEligibleResources( 'Computing', { 'Site': siteMask, 'SubmissionMode': 'gLite', 'CEType': ['LCG', 'CREAM'] }) self.assertTrue(result['OK'], 'getEligibleResources') print for ce in result['Value']: ceHost = self.resources.getComputingElementValue( ce, 'Host', 'unknown') print ce, ceHost