def doCommand(self): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getServiceStats` :params: :attr:`args`: a tuple - args[1]: a ValidElement - args[0]: should be the name of the Site :returns: {'Active':xx, 'Probing':yy, 'Banned':zz, 'Total':xyz} """ super(ServiceStats_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs["ResourceStatusClient"].getServiceStats( self.args[1] ) # , statusType = None )# self.args[0], self.args[1] )['Value'] except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand(self): """ Return getJobsEff from Jobs Client :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string: should be the name of the ValidElement returns: { 'JobsEff': X } """ super(JobsEff_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = self.APIs[ 'JobsClient' ].getJobsEff( self.args[0], self.args[1], self.args[2] ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self ): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getResourceStats` :params: :attr:`args`: a tuple - `args[0]` string, a ValidElement. Should be in ('Site', 'Service') - `args[1]` should be the name of the Site or Service :returns: """ super( ResourceStats_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = self.APIs[ 'ResourceStatusClient' ].getResourceStats( self.args[0], self.args[1], statusType = None ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self ): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getServiceStats` :params: :attr:`args`: a tuple - args[1]: a ValidElement - args[0]: should be the name of the Site :returns: {'Active':xx, 'Probing':yy, 'Banned':zz, 'Total':xyz} """ super( ServiceStats_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = self.APIs[ 'ResourceStatusClient' ].getServiceStats( self.args[1] )#, statusType = None )# self.args[0], self.args[1] )['Value'] except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self ): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getStorageElementStats` :params: :attr:`args`: a tuple - `args[0]` should be in ['Site', 'Resource'] - `args[1]` should be the name of the Site or Resource :returns: """ super( StorageElementsStats_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if self.args[0] == 'Service': granularity = 'Site' name = self.args[1].split( '@' )[1] elif self.args[0] in [ 'Site', 'Resource' ]: granularity = self.args[0] name = self.args[1] else: return { 'Result' : S_ERROR( '%s is not a valid granularity' % self.args[ 0 ] ) } res = self.APIs[ 'ResourceStatusClient' ].getStorageElementStats( granularity, name, statusType = None ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Return getJobsEff from Jobs Client :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string: should be the name of the ValidElement returns: { 'JobsEff': X } """ super(JobsEff_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs['JobsClient'].getJobsEff(self.args[0], self.args[1], self.args[2]) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getResourceStats` :params: :attr:`args`: a tuple - `args[0]` string, a ValidElement. Should be in ('Site', 'Service') - `args[1]` should be the name of the Site or Service :returns: """ super(ResourceStats_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs["ResourceStatusClient"].getResourceStats(self.args[0], self.args[1], statusType=None) except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand(self): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getStorageElementStats` :params: :attr:`args`: a tuple - `args[0]` should be in ['Site', 'Resource'] - `args[1]` should be the name of the Site or Resource :returns: """ super(StorageElementsStats_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: if self.args[0] == "Service": granularity = "Site" name = self.args[1].split("@")[1] elif self.args[0] in ["Site", "Resource"]: granularity = self.args[0] name = self.args[1] else: return {"Result": S_ERROR("%s is not a valid granularity" % self.args[0])} res = self.APIs["ResourceStatusClient"].getStorageElementStats(granularity, name, statusType=None) except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def _executeCheck(self, _arg): ''' Method executed by the threads in the pool. Picks one element from the common queue, and enforces policies on that element. ''' # Init the APIs beforehand, and reuse them. __APIs__ = [ 'ResourceStatusClient', 'ResourceManagementClient', 'GGUSTicketsClient' ] clients = knownAPIs.initAPIs(__APIs__, {}) pep = PEP(clients=clients) while True: toBeChecked = self.sitesToBeChecked.get() pepDict = { 'granularity': toBeChecked[0], 'name': toBeChecked[1], 'statusType': toBeChecked[2], 'status': toBeChecked[3], 'formerStatus': toBeChecked[4], 'siteType': toBeChecked[5], 'tokenOwner': toBeChecked[6] } try: self.log.info( "Checking Site %s, with type/status: %s/%s" % \ ( pepDict['name'], pepDict['statusType'], pepDict['status'] ) ) pepRes = pep.enforce(**pepDict) if pepRes.has_key('PolicyCombinedResult') and pepRes[ 'PolicyCombinedResult'].has_key('Status'): pepStatus = pepRes['PolicyCombinedResult']['Status'] if pepStatus != pepDict['status']: self.log.info('Updated Site %s (%s) from %s to %s' % (pepDict['name'], pepDict['statusType'], pepDict['status'], pepStatus)) # remove from InCheck list self.siteNamesInCheck.remove( (pepDict['name'], pepDict['statusType'])) except Exception: self.log.exception( "SSInspector._executeCheck Checking Site %s, with type/status: %s/%s" % \ ( pepDict['name'], pepDict['statusType'], pepDict['status'] ) ) try: self.siteNamesInCheck.remove( (pepDict['name'], pepDict['statusType'])) except IndexError: pass ################################################################################ #EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
def doCommand(self): """ Returns simple pilots efficiency :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement returns: { 'Result': 'Good'|'Fair'|'Poor'|'Idle'|'Bad' } """ super(PilotsEffSimpleCached_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: if self.args[0] == 'Service': name = self.APIs['ResourceStatusClient'].getGeneralName( self.args[0], self.args[1], 'Site') name = name['Value'][0] granularity = 'Site' elif self.args[0] == 'Site': name = self.args[1] granularity = self.args[0] else: return { 'Result': S_ERROR('%s is not a valid granularity' % self.args[0]) } clientDict = { 'name': name, 'commandName': 'PilotsEffSimpleEverySites', 'value': 'PE_S', 'opt_ID': 'NULL', 'meta': { 'columns': 'Result' } } res = self.APIs['ResourceManagementClient'].getClientCache( **clientDict) if res['OK']: res = res['Value'] if res == None or res == []: res = S_OK('Idle') else: res = S_OK(res[0]) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand( self ): """ Return getQuality from DIRAC's accounting ReportsClient `args`: a tuple - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement - args[2]: optional dateTime object: a "from" date - args[3]: optional dateTime object: a "to" date :returns: {'Result': None | a float between 0.0 and 100.0} """ super( TransferQuality_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] try: if self.args[2] is None: fromD = datetime.utcnow()-timedelta(hours = 2) else: fromD = self.args[2] if self.args[3] is None: toD = datetime.utcnow() else: toD = self.args[3] res = self.APIs[ 'ReportsClient' ].getReport( 'DataOperation', 'Quality', fromD, toD, { 'OperationType': 'putAndRegister', 'Destination' : [ self.args[1] ] }, 'Channel' ) if res['OK']: pr_q_d = res[ 'Value' ][ 'data' ] values = [] if len( pr_q_d ) == 1: for k in pr_q_d.keys(): for n in pr_q_d[ k ].values(): values.append( n ) res = S_OK( sum( values ) / len( values ) ) else: for n in pr_q_d['Total'].values(): values.append(n) res = S_OK( sum( values ) / len( values ) ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self, CEs = None): """ Returns successfull pilots using the DIRAC accounting system for every CE for the last self.args[0] hours :params: :attr:`CEs`: list of CEs (when not given, take every CE) :returns: """ super( SuccessfullPilotsByCESplitted_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if CEs is None: meta = {'columns':'ResourceName'} CEs = self.APIs[ 'ResourceStatusClient' ].getResource( resourceType = [ 'CE','CREAMCE' ], meta = meta ) if not CEs['OK']: return { 'Result' : CEs } CEs = [ ce[0] for ce in CEs['Value'] ] if not CEs: return { 'Result' : S_ERROR( 'CEs is empty' ) } self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] fromD = datetime.utcnow() - timedelta(hours = self.args[0]) toD = datetime.utcnow() succ_pilots = self.APIs[ 'ReportsClient' ].getReport('Pilot', 'NumberOfPilots', fromD, toD, {'GridStatus':['Done'], 'GridCE':CEs}, 'GridCE') if not succ_pilots['OK']: return { 'Result' : succ_pilots } succ_pilots = succ_pilots['Value'] listOfCEs = succ_pilots['data'].keys() plotGran = succ_pilots['granularity'] singlePlots = {} for CE in listOfCEs: if CE in CEs: plot = {} plot['data'] = {CE: succ_pilots['data'][CE]} plot['granularity'] = plotGran singlePlots[CE] = plot res = S_OK( { 'Pilot': singlePlots } ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self, sites = None): """ Returns running and runned jobs, querying the WMSHistory for the last self.args[0] hours :params: :attr:`sites`: list of sites (when not given, take every sites) :returns: """ super( RunningJobsBySiteSplitted_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if sites is None: sites = self.APIs[ 'ResourceStatusClient' ].getSite( meta = {'columns': 'SiteName'} ) if not sites['OK']: return { 'Result' : sites } sites = [ si[0] for si in sites['Value'] ] if not sites: return { 'Result' : S_ERROR( 'Sites is empty' ) } self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] fromD = datetime.utcnow()-timedelta(hours = self.args[0]) toD = datetime.utcnow() run_jobs = self.APIs[ 'ReportsClient' ].getReport('WMSHistory', 'NumberOfJobs', fromD, toD, {}, 'Site') if not run_jobs['OK']: return { 'Result' : run_jobs } run_jobs = run_jobs['Value'] listOfSites = run_jobs['data'].keys() plotGran = run_jobs['granularity'] singlePlots = {} for site in listOfSites: if site in sites: plot = {} plot['data'] = {site: run_jobs['data'][site]} plot['granularity'] = plotGran singlePlots[site] = plot res = S_OK( { 'WMSHistory' : singlePlots } ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self, resources=None): """ Returns downtimes information for all the resources in input. :params: :attr:`sites`: list of resource names (when not given, take every resource) :returns: {'ResourceName': {'SEVERITY': 'OUTAGE'|'AT_RISK', 'StartDate': 'aDate', ...} ... } """ self.APIs = initAPIs(self.__APIs__, self.APIs) try: if resources is None: meta = {'columns': 'ResourceName'} resources = self.APIs['ResourceStatusClient'].getResource( meta=meta) if not resources['OK']: return {'Result': resources} resources = [re[0] for re in resources['Value']] resGOC = self.APIs['GOCDBClient'].getStatus( 'Resource', resources, None, 120) if not resGOC['OK']: return {'Result': resGOC} resGOC = resGOC['Value'] if resGOC == None: resGOC = [] res = {} for dt_ID in resGOC: dt = {} dt['ID'] = dt_ID dt['StartDate'] = resGOC[dt_ID]['FORMATED_START_DATE'] dt['EndDate'] = resGOC[dt_ID]['FORMATED_END_DATE'] dt['Severity'] = resGOC[dt_ID]['SEVERITY'] dt['Description'] = resGOC[dt_ID]['DESCRIPTION'].replace( '\'', '') dt['Link'] = resGOC[dt_ID]['GOCDB_PORTAL_URL'] res[dt_ID] = dt res = S_OK(res) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self, sites = None): """ Returns failed jobs using the DIRAC accounting system for every site for the last self.args[0] hours :params: :attr:`sites`: list of sites (when not given, take every site) :returns: """ super( FailedPilotsBySiteSplitted_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if sites is None: sites = self.APIs[ 'ResourceStatusClient' ].getSite( meta = {'columns': 'SiteName'} ) if not sites['OK']: return { 'Result' : sites } sites = [ si[0] for si in sites['Value'] ] if not sites: return { 'Result' : S_ERROR( 'Sites is empty' ) } self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] fromD = datetime.utcnow()-timedelta(hours = self.args[0]) toD = datetime.utcnow() failed_pilots = self.APIs[ 'ReportsClient' ].getReport('Pilot', 'NumberOfPilots', fromD, toD, {'GridStatus':['Aborted'], 'Site':sites}, 'Site') if not failed_pilots['OK']: return { 'Result' : failed_pilots } failed_pilots = failed_pilots['Value'] listOfSites = failed_pilots['data'].keys() plotGran = failed_pilots['granularity'] singlePlots = {} for site in listOfSites: if site in sites: plot = {} plot['data'] = { site: failed_pilots['data'][site] } plot['granularity'] = plotGran singlePlots[site] = plot res = S_OK( { 'Pilot': singlePlots } ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Returns transfer quality from the plot cached in the accounting cache. :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement :returns: {'Result': None | a float between 0.0 and 100.0} """ super(TransferQualityFromCachedPlot_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: name = self.args[1] plotType = self.args[2] plotName = self.args[3] accountingDict = { 'name' : name, 'plotType' : plotType, 'plotName' : plotName } kwargs = { 'meta' : { 'columns' : 'Result' } } accountingDict.update( kwargs ) res = self.APIs[ 'ResourceManagementClient' ].getAccountingCache( **accountingDict ) if res['OK']: res = res[ 'Value'] if res == []: res = S_OK( None ) else: res = eval(res[0][0]) s,n = 0,0 SE = res[ 'data' ].keys()[ 0 ] n = n + len(res['data'][SE]) s = s + sum(res['data'][SE].values()) meanQuality = s/n res = S_OK( meanQuality ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self ): """ Returns transfer quality plot as it is cached in the accounting cache. :attr:`args`: - args[0]: string - should be a ValidElement - args[1]: string - should be the name of the ValidElement - args[2]: string - should be the plot type - args[3]: string - should be the plot name :returns: a plot """ super( CachedPlot_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: granularity = self.args[0] name = self.args[1] plotType = self.args[2] plotName = self.args[3] if granularity == 'Service': name = name.split('@')[1] accountingDict = { 'name' : name, 'plotType' : plotType, 'plotName' : plotName } kwargs = { 'meta' : { 'columns' : 'Result' } } accountingDict.update( kwargs ) res = self.APIs[ 'ResourceManagementClient' ].getAccountingCache( **accountingDict ) if res[ 'OK' ]: res = res[ 'Value' ] if res == []: res = S_OK( { 'data' : {}, 'granularity' : 900 } ) else: res = S_OK( eval( res[0] ) ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Returns simple pilots efficiency :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement returns: { 'Result': 'Good'|'Fair'|'Poor'|'Idle'|'Bad' } """ super(PilotsEffSimpleCached_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: if self.args[0] == "Service": name = self.APIs["ResourceStatusClient"].getGeneralName(self.args[0], self.args[1], "Site") name = name["Value"][0] granularity = "Site" elif self.args[0] == "Site": name = self.args[1] granularity = self.args[0] else: return {"Result": S_ERROR("%s is not a valid granularity" % self.args[0])} clientDict = { "name": name, "commandName": "PilotsEffSimpleEverySites", "value": "PE_S", "opt_ID": "NULL", "meta": {"columns": "Result"}, } res = self.APIs["ResourceManagementClient"].getClientCache(**clientDict) if res["OK"]: res = res["Value"] if res == None or res == []: res = S_OK("Idle") else: res = S_OK(res[0]) except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand( self, resources = None ): """ Returns downtimes information for all the resources in input. :params: :attr:`sites`: list of resource names (when not given, take every resource) :returns: {'ResourceName': {'SEVERITY': 'OUTAGE'|'AT_RISK', 'StartDate': 'aDate', ...} ... } """ self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if resources is None: meta = { 'columns' : 'ResourceName' } resources = self.APIs[ 'ResourceStatusClient' ].getResource( meta = meta ) if not resources['OK']: return { 'Result' : resources } resources = [ re[0] for re in resources['Value'] ] resGOC = self.APIs[ 'GOCDBClient' ].getStatus( 'Resource', resources, None, 120 ) if not resGOC['OK']: return { 'Result' : resGOC } resGOC = resGOC['Value'] if resGOC == None: resGOC = [] res = {} for dt_ID in resGOC: dt = {} dt['ID'] = dt_ID dt['StartDate'] = resGOC[dt_ID]['FORMATED_START_DATE'] dt['EndDate'] = resGOC[dt_ID]['FORMATED_END_DATE'] dt['Severity'] = resGOC[dt_ID]['SEVERITY'] dt['Description'] = resGOC[dt_ID]['DESCRIPTION'].replace( '\'', '' ) dt['Link'] = resGOC[dt_ID]['GOCDB_PORTAL_URL'] res[dt_ID] = dt res = S_OK( res ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Returns simple jobs efficiency :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement returns: { 'Result': 'Good'|'Fair'|'Poor'|'Idle'|'Bad' } """ super(JobsEffSimpleCached_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if self.args[0] == 'Service': name = self.APIs[ 'ResourceStatusClient' ].getGeneralName( self.args[0], self.args[1], 'Site' ) name = name[ 'Value' ][ 0 ] granularity = 'Site' elif self.args[0] == 'Site': name = self.args[1] granularity = self.args[0] else: return { 'Result' : S_ERROR( '%s is not a valid granularity' % self.args[ 0 ] ) } clientDict = { 'name' : name, 'commandName' : 'JobsEffSimpleEveryOne', 'value' : 'JE_S', 'opt_ID' : 'NULL', 'meta' : { 'columns' : 'Result' } } res = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict ) if res[ 'OK' ]: res = res[ 'Value' ] if res == None or res == []: res = S_OK( 'Idle' ) else: res = S_OK( res[ 0 ] ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def _executeCheck( self, _arg ): ''' Method executed by the threads in the pool. Picks one element from the common queue, and enforces policies on that element. ''' # Init the APIs beforehand, and reuse them. __APIs__ = [ 'ResourceStatusClient', 'ResourceManagementClient' ] clients = knownAPIs.initAPIs( __APIs__, {} ) pep = PEP( clients = clients ) while True: toBeChecked = self.resourcesToBeChecked.get() pepDict = { 'granularity' : toBeChecked[ 0 ], 'name' : toBeChecked[ 1 ], 'statusType' : toBeChecked[ 2 ], 'status' : toBeChecked[ 3 ], 'formerStatus' : toBeChecked[ 4 ], 'siteType' : toBeChecked[ 5 ], 'resourceType' : toBeChecked[ 6 ], 'tokenOwner' : toBeChecked[ 7 ] } try: self.log.info( "Checking Resource %s, with type/status: %s/%s" % \ ( pepDict['name'], pepDict['statusType'], pepDict['status'] ) ) pepRes = pep.enforce( **pepDict ) if pepRes.has_key( 'PolicyCombinedResult' ) and pepRes[ 'PolicyCombinedResult' ].has_key( 'Status' ): pepStatus = pepRes[ 'PolicyCombinedResult' ][ 'Status' ] if pepStatus != pepDict[ 'status' ]: self.log.info( 'Updated Site %s (%s) from %s to %s' % ( pepDict['name'], pepDict['statusType'], pepDict['status'], pepStatus )) # remove from InCheck list self.resourceNamesInCheck.remove( ( pepDict[ 'name' ], pepDict[ 'statusType' ] ) ) except Exception: self.log.exception( "RSInspector._executeCheck Checking Resource %s, with type/status: %s/%s" % \ ( pepDict['name'], pepDict['statusType'], pepDict['status'] ) ) try: self.resourceNamesInCheck.remove( ( pepDict[ 'name' ], pepDict[ 'statusType' ] ) ) except IndexError: pass ################################################################################ #EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
def doCommand(self): """ Return getPilotsEff from Pilots Client """ super(PilotsEff_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs["PilotsClient"].getPilotsEff(self.args[0], self.args[1], self.args[2]) except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand(self, RSClientIn=None): """ Returns simple pilots efficiency :attr:`args`: - args[0]: string - should be a ValidElement - args[1]: string - should be the name of the ValidElement returns: { 'Result': 'Good'|'Fair'|'Poor'|'Idle'|'Bad' } """ super(PilotsEffSimple_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: if self.args[0] == 'Service': name = self.APIs['ResourceStatusClient'].getGeneralName( self.args[0], self.args[1], 'Site') name = name['Value'][0] granularity = 'Site' elif self.args[0] in ['Site', 'Resource']: name = self.args[1] granularity = self.args[0] else: return { 'Result': S_ERROR('%s is not a valid granularity' % self.args[0]) } res = self.APIs['PilotsClient'].getPilotsSimpleEff( granularity, name) if res is None: res = 'Idle' elif res[name] is None: res = 'Idle' else: res = res[name] except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Return getPilotsEff from Pilots Client """ super(PilotsEff_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs['PilotsClient'].getPilotsEff( self.args[0], self.args[1], self.args[2]) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getMonitoredStatus` :params: :attr:`args`: a tuple - `args[0]`: string - should be a ValidElement - `args[1]`: string - should be the name of the ValidElement - `args[2]`: optional string - a ValidElement (get status of THIS ValidElement for name in args[1], will call getGeneralName) :returns: {'MonitoredStatus': 'Active'|'Probing'|'Banned'} """ super(MonitoredStatus_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: validElements = RssConfiguration.getValidElements() if len(self.args) == 3: if validElements.index(self.args[2]) >= validElements.index(self.args[0]): return {"Result": S_ERROR("Error in MonitoredStatus_Command")} toBeFound = self.APIs["ResourceStatusClient"].getGeneralName(self.args[0], self.args[1], self.args[2])[ "Value" ] else: toBeFound = self.args[1] res = self.APIs["ResourceStatusClient"].getMonitoredStatus(self.args[2], toBeFound) if res["OK"]: res = res["Value"] if res: res = S_OK(res[0][0]) else: res = S_OK(None) except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand( self ): """ Uses :meth:`DIRAC.ResourceStatusSystem.Client.ResourceStatusClient.getMonitoredStatus` :params: :attr:`args`: a tuple - `args[0]`: string - should be a ValidElement - `args[1]`: string - should be the name of the ValidElement - `args[2]`: optional string - a ValidElement (get status of THIS ValidElement for name in args[1], will call getGeneralName) :returns: {'MonitoredStatus': 'Active'|'Probing'|'Banned'} """ super( MonitoredStatus_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: validElements = RssConfiguration.getValidElements() if len( self.args ) == 3: if validElements.index( self.args[2] ) >= validElements.index( self.args[0] ): return { 'Result' : S_ERROR( 'Error in MonitoredStatus_Command' ) } toBeFound = self.APIs[ 'ResourceStatusClient' ].getGeneralName( self.args[0], self.args[1], self.args[2] )[ 'Value' ] else: toBeFound = self.args[1] res = self.APIs[ 'ResourceStatusClient' ].getMonitoredStatus( self.args[2], toBeFound ) if res[ 'OK' ]: res = res[ 'Value' ] if res: res = S_OK( res[ 0 ][ 0 ] ) else: res = S_OK( None ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self, RSClientIn=None): """ Returns simple pilots efficiency :attr:`args`: - args[0]: string - should be a ValidElement - args[1]: string - should be the name of the ValidElement returns: { 'Result': 'Good'|'Fair'|'Poor'|'Idle'|'Bad' } """ super(PilotsEffSimple_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: if self.args[0] == "Service": name = self.APIs["ResourceStatusClient"].getGeneralName(self.args[0], self.args[1], "Site") name = name["Value"][0] granularity = "Site" elif self.args[0] in ["Site", "Resource"]: name = self.args[1] granularity = self.args[0] else: return {"Result": S_ERROR("%s is not a valid granularity" % self.args[0])} res = self.APIs["PilotsClient"].getPilotsSimpleEff(granularity, name) if res is None: res = "Idle" elif res[name] is None: res = "Idle" else: res = res[name] except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand(self ): """ Returns simple jobs efficiency :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement returns: { 'Result': 'Good'|'Fair'|'Poor'|'Idle'|'Bad' } """ super (JobsEffSimple_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if self.args[0] == 'Service': name = self.APIs[ 'ResourceStatusClient' ].getGeneralName( self.args[0], self.args[1], 'Site' ) name = name[ 'Value' ][ 0 ] granularity = 'Site' elif self.args[0] == 'Site': name = self.args[1] granularity = self.args[0] else: return { 'Result' : S_ERROR( '%s is not a valid granularity' % self.args[ 0 ] ) } res = self.APIs[ 'JobsClient' ].getJobsSimpleEff( name ) if res == None: res = S_OK( 'Idle' ) else: res = S_OK( res[ name ] ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self ): """ Return getTicketsList from GGUSTickets Client `args`: - args[0]: string: should be the name of the site """ super( GGUSTickets_Open, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = callClient( self.args[1], self.APIs[ 'GGUSTicketsClient' ] ) if res[ 'OK' ]: res = S_OK( res[ 'Value' ][ 0 ][ 'open' ] ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Returns last hour system charge, and the system charge of an hour before returns: { 'LastHour': n_lastHour 'anHourBefore': n_anHourBefore } """ super(SystemCharge_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = self.APIs[ 'JobsClient' ].getSystemCharge() except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Return getTicketsList from GGUSTickets Client `args`: - args[0]: string: should be the name of the site """ super(GGUSTickets_Open, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = callClient(self.args[1], self.APIs['GGUSTicketsClient']) if res['OK']: res = S_OK(res['Value'][0]['open']) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Returns last hour system charge, and the system charge of an hour before returns: { 'LastHour': n_lastHour 'anHourBefore': n_anHourBefore } """ super(SystemCharge_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs['JobsClient'].getSystemCharge() except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def _executeCheck(self): ''' Method executed by the threads in the pool. Picks one element from the common queue, and enforces policies on that element. ''' # Init the APIs beforehand, and reuse them. __APIs__ = [ 'ResourceStatusClient', 'ResourceManagementClient' ] clients = knownAPIs.initAPIs( __APIs__, {} ) pep = PEP( clients = clients ) while True: toBeChecked = self.queue.get() pepDict = { 'granularity' : toBeChecked[ 0 ], 'name' : toBeChecked[ 1 ], 'statusType' : toBeChecked[ 2 ], 'status' : toBeChecked[ 3 ], 'formerStatus' : toBeChecked[ 4 ], 'siteType' : toBeChecked[ 5 ], 'serviceType' : toBeChecked[ 6 ], 'tokenOwner' : toBeChecked[ 7 ]} try: self.log.info( "Checking Service %s, with type/status: %s/%s" % ( pepDict['name'], pepDict['statusType'], pepDict['status'] ) ) pepRes = pep.enforce( **pepDict ) if pepRes.has_key( 'PolicyCombinedResult' ) and pepRes[ 'PolicyCombinedResult' ].has_key( 'Status' ): pepStatus = pepRes[ 'PolicyCombinedResult' ][ 'Status' ] if pepStatus != pepDict[ 'status' ]: self.log.info( 'Updated %s %s from %s/%s to %s/%s' % ( pepDict["granularity"], pepDict['name'], pepDict['statusType'], pepDict['status'], pepDict['statusType'], pepStatus )) except Exception: self.log.exception( "SeSInspector._executeCheck Checking Service %s, with type/status: %s/%s" % ( pepDict['name'], pepDict['statusType'], pepDict['status'] ) )
def doCommand(self, sites=None): """ Returns simple jobs efficiency for all the sites in input. :params: :attr:`sites`: list of site names (when not given, take every site) :returns: {'SiteName': {'JE_S': 'Good'|'Fair'|'Poor'|'Idle'|'Bad'}, ...} """ self.APIs = initAPIs(self.__APIs__, self.APIs) try: if sites is None: sites = self.APIs['ResourceStatusClient'].getSite( meta={'columns': 'SiteName'}) if not sites['OK']: return {'Result': sites} sites = [si[0] for si in sites['Value']] res = self.APIs['JobsClient'].getJobsSimpleEff( sites, self.APIs['WMSAdministrator']) if res is None: res = [] resToReturn = {} for site in res: resToReturn[site] = {'JE_S': res[site]} res = S_OK(resToReturn) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Use callClient to get GGUS info :attr:`args`: - args[0]: string: should be the name of the site """ super( GGUSTickets_Info, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = callClient( self.args[ 1 ], self.APIs[ 'GGUSTicketsClient' ] ) # if openTickets == 'Unknown': # return { 'GGUS_Info' : 'Unknown' } if res[ 'OK' ]: res = S_OK( res[ 'Value' ][ 2 ] ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Use CallClient to get GGUS link :attr:`args`: - args[0]: string: should be the name of the site """ super(GGUSTickets_Link, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = callClient(self.args[1], self.APIs['GGUSTicketsClient']) #if openTickets == 'Unknown': # return { 'GGUS_Link':'Unknown' } if res['OK']: res = S_OK(res['Value'][1]) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand( self, sites = None ): """ Returns simple jobs efficiency for all the sites in input. :params: :attr:`sites`: list of site names (when not given, take every site) :returns: {'SiteName': {'JE_S': 'Good'|'Fair'|'Poor'|'Idle'|'Bad'}, ...} """ self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if sites is None: sites = self.APIs[ 'ResourceStatusClient' ].getSite( meta = { 'columns' : 'SiteName' } ) if not sites['OK']: return { 'Result' : sites } sites = [ si[ 0 ] for si in sites[ 'Value' ] ] res = self.APIs[ 'JobsClient' ].getJobsSimpleEff( sites, self.APIs[ 'WMSAdministrator' ] ) if res is None: res = [] resToReturn = {} for site in res: resToReturn[ site ] = { 'JE_S' : res[ site ] } res = S_OK( resToReturn ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Return getPeriods from ResourceStatus Client - args[0] should be a ValidElement - args[1] should be the name of the ValidElement - args[2] should be the present status - args[3] are the number of hours requested """ super(RSPeriods_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: res = self.APIs["ResourceStatusClient"].getPeriods(self.args[0], self.args[1], self.args[2], self.args[3]) except Exception, e: _msg = "%s (%s): %s" % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {"Result": S_ERROR(_msg)}
def doCommand( self ): """ Return getPeriods from ResourceStatus Client - args[0] should be a ValidElement - args[1] should be the name of the ValidElement - args[2] should be the present status - args[3] are the number of hours requested """ super( RSPeriods_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: res = self.APIs[ 'ResourceStatusClient' ].getPeriods( self.args[0], self.args[1], self.args[2], self.args[3] ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self, sources = None, SEs = None ): """ Returns failed transfer using the DIRAC accounting system for every SE for the last self.args[0] hours :params: :attr:`sources`: list of source sites (when not given, take every site) :attr:`SEs`: list of storage elements (when not given, take every SE) :returns: """ super( FailedTransfersBySourceSplitted_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if SEs is None: meta = { 'columns' : 'StorageElementName' } SEs = self.APIs[ 'ResourceStatusClient' ].getStorageElement( meta = meta) if not SEs[ 'OK' ]: return { 'Result' : SEs } SEs = [ se[0] for se in SEs[ 'Value' ] ] if sources is None: meta = { 'columns' : 'SiteName' } sources = self.APIs[ 'ResourceStatusClient' ].getSite( meta = meta ) if not sources[ 'OK' ]: return { 'Result' : sources } sources = [ si[0] for si in sources[ 'Value' ] ] if not sources + SEs: return { 'Result' : S_ERROR( 'Sources + SEs is empty' ) } self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] fromD = datetime.utcnow()-timedelta( hours = self.args[ 0 ] ) toD = datetime.utcnow() ft_source = self.APIs[ 'ReportsClient' ].getReport( 'DataOperation', 'FailedTransfers', fromD, toD, { 'OperationType':'putAndRegister', 'Source': sources + SEs, 'Destination': sources + SEs, 'FinalStatus':[ 'Failed' ] }, 'Source' ) if not ft_source[ 'OK' ]: return { 'Result' : ft_source } ft_source = ft_source[ 'Value' ] listOfSources = ft_source[ 'data' ].keys() plotGran = ft_source[ 'granularity' ] singlePlots = {} for source in listOfSources: if source in sources: plot = {} plot[ 'data' ] = { source: ft_source[ 'data' ][ source ] } plot[ 'granularity' ] = plotGran singlePlots[ source ] = plot res = S_OK( { 'DataOperation': singlePlots } ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def initialize(self): # Attribute defined outside __init__ # pylint: disable-msg=W0201 try: self.rmClient = ResourceManagementClient() self.clientsInvoker = ClientsInvoker() commandsListClientsCache = [ ('ClientsCache_Command', 'JobsEffSimpleEveryOne_Command'), ('ClientsCache_Command', 'PilotsEffSimpleEverySites_Command'), ('ClientsCache_Command', 'DTEverySites_Command'), ('ClientsCache_Command', 'DTEveryResources_Command') ] commandsListAccountingCache = [ ('AccountingCache_Command', 'TransferQualityByDestSplitted_Command', (2, ), 'Always'), ('AccountingCache_Command', 'FailedTransfersBySourceSplitted_Command', (2, ), 'Always'), ('AccountingCache_Command', 'TransferQualityByDestSplittedSite_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'SuccessfullJobsBySiteSplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'FailedJobsBySiteSplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'SuccessfullPilotsBySiteSplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'FailedPilotsBySiteSplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'SuccessfullPilotsByCESplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'FailedPilotsByCESplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', (24, ), 'Hourly'), ('AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', (168, ), 'Hourly'), ('AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', (720, ), 'Daily'), ('AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', (8760, ), 'Daily'), ] commandsVOBOXAvailability = ( 'VOBOXAvailabilityCommand', 'VOBOXAvailabilityCommand', ) commandsSpaceTokenOccupancy = ( 'SpaceTokenOccupancyCommand', 'SpaceTokenOccupancyCommand', ) self.commandObjectsListClientsCache = [] self.commandObjectsListAccountingCache = [] self.commandObjectsVOBOXAvailability = [] self.commandObjectsSpaceTokenOccupancy = [] cc = CommandCaller() # We know beforehand which APIs are we going to need, so we initialize them # first, making everything faster. knownAPIs = [ 'ResourceStatusClient', 'WMSAdministrator', 'ReportGenerator', 'JobsClient', 'PilotsClient', 'GOCDBClient', 'ReportsClient' ] knownAPIs = initAPIs(knownAPIs, {}) for command in commandsListClientsCache: cObj = cc.setCommandObject(command) for apiName, apiInstance in knownAPIs.items(): cc.setAPI(cObj, apiName, apiInstance) self.commandObjectsListClientsCache.append((command, cObj)) for command in commandsListAccountingCache: cObj = cc.setCommandObject(command) for apiName, apiInstance in knownAPIs.items(): cc.setAPI(cObj, apiName, apiInstance) cArgs = command[2] self.commandObjectsListAccountingCache.append( (command, cObj, cArgs)) for cArgs in self.__getVOBOXAvailabilityCandidates(): cObj = cc.setCommandObject(commandsVOBOXAvailability) self.commandObjectsVOBOXAvailability.append( (commandsVOBOXAvailability, cObj, cArgs)) for cArgs in self.__getSpaceTokenOccupancyCandidates(): cObj = cc.setCommandObject(commandsSpaceTokenOccupancy) self.commandObjectsSpaceTokenOccupancy.append( (commandsSpaceTokenOccupancy, cObj, cArgs)) return S_OK() except Exception: errorStr = "CacheFeederAgent initialization" self.log.exception(errorStr) return S_ERROR(errorStr)
def doCommand(self, sites=None): """ Returns downtimes information for all the sites in input. :params: :attr:`sites`: list of site names (when not given, take every site) :returns: {'SiteName': {'SEVERITY': 'OUTAGE'|'AT_RISK', 'StartDate': 'aDate', ...} ... } """ self.APIs = initAPIs(self.__APIs__, self.APIs) try: if sites is None: GOC_sites = self.APIs['ResourceStatusClient'].getGridSite( meta={'columns': 'GridSiteName'}) if not GOC_sites['OK']: return {'Result': GOC_sites} GOC_sites = [gs[0] for gs in GOC_sites['Value']] else: GOC_sites = [getGOCSiteName(x)['Value'] for x in sites] resGOC = self.APIs['GOCDBClient'].getStatus( 'Site', GOC_sites, None, 120) if not resGOC['OK']: return {'Result': resGOC} resGOC = resGOC['Value'] if resGOC == None: resGOC = [] res = {} for dt_ID in resGOC: try: dt = {} dt['ID'] = dt_ID dt['StartDate'] = resGOC[dt_ID]['FORMATED_START_DATE'] dt['EndDate'] = resGOC[dt_ID]['FORMATED_END_DATE'] dt['Severity'] = resGOC[dt_ID]['SEVERITY'] dt['Description'] = resGOC[dt_ID]['DESCRIPTION'].replace( '\'', '') dt['Link'] = resGOC[dt_ID]['GOCDB_PORTAL_URL'] DIRACnames = getDIRACSiteName(res[dt_ID]['SITENAME']) if not DIRACnames['OK']: return {'Result': DIRACnames} for DIRACname in DIRACnames['Value']: res[dt_ID.split()[0] + ' ' + DIRACname] = dt except KeyError: continue res = S_OK(res) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Return getStatus from GOC DB Client :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string: should be the name of the ValidElement - args[2]: string: optional, number of hours in which the down time is starting """ timeFormat = "%Y-%m-%d %H:%M" super(GOCDBStatus_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: granularity = self.args[0] name = self.args[1] if len(self.args) > 2: hours = self.args[2] else: hours = None if granularity == 'Site': name = getGOCSiteName(name)['Value'] res = self.APIs['GOCDBClient'].getStatus(granularity, name, None, hours) if not res['OK']: return {'Result': res} res = res['Value'] if res is None or res == {}: return {'Result': S_OK({'DT': None})} DT_dict_result = {} now = datetime.utcnow().replace(microsecond=0, second=0) if len(res) > 1: #there's more than one DT resDT = None for dt_ID in res: #looking for an ongoing one startSTR = res[dt_ID]['FORMATED_START_DATE'] start_datetime = datetime.strptime(startSTR, timeFormat) if start_datetime < now: resDT = res[dt_ID] break #if I'm here, there's no OnGoing DT if resDT is None: resDT = res[res.keys()[0]] res = resDT else: res = res[res.keys()[0]] DT_dict_result['DT'] = res['SEVERITY'] DT_dict_result['EndDate'] = res['FORMATED_END_DATE'] startSTR = res['FORMATED_START_DATE'] start_datetime = datetime.strptime(startSTR, timeFormat) if start_datetime > now: diff = convertTime(start_datetime - now, 'hours') DT_dict_result['DT'] = DT_dict_result['DT'] + " in " + str( diff) + ' hours' res = S_OK(DT_dict_result) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand( self ): """ Returns jobs accounting info for sites in the last 24h `args`: - args[0]: string - should be a ValidElement - args[1]: string - should be the name of the ValidElement - args[2]: string - should be 'Job' or 'Pilot' or 'DataOperation' or 'WMSHistory' (??) or 'SRM' (??) - args[3]: string - should be the plot to generate (e.g. CPUEfficiency) - args[4]: dictionary - e.g. {'Format': 'LastHours', 'hours': 24} - args[5]: string - should be the grouping - args[6]: dictionary - optional conditions """ super( DIRACAccounting_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] try: granularity = self.args[0] name = self.args[1] accounting = self.args[2] plot = self.args[3] period = self.args[4] grouping = self.args[5] if period[ 'Format' ] == 'LastHours': fromT = datetime.utcnow() - timedelta( hours = period[ 'hours' ] ) toT = datetime.utcnow() elif period[ 'Format' ] == 'Periods': #TODO pass if self.args[6] is not None: conditions = self.args[6] else: conditions = {} if accounting == 'Job' or accounting == 'Pilot': if granularity == 'Resource': conditions[ 'GridCE' ] = [ name ] elif granularity == 'Service': conditions[ 'Site' ] = [ name.split('@').pop() ] elif granularity == 'Site': conditions[ 'Site' ] = [ name ] else: return { 'Result' : S_ERROR( '%s is not a valid granularity' % granularity ) } elif accounting == 'DataOperation': conditions[ 'Destination' ] = [ name ] res = self.APIs[ 'ReportsClient' ].getReport( accounting, plot, fromT, toT, conditions, grouping ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def initialize( self ): # Attribute defined outside __init__ # pylint: disable-msg=W0201 try: self.rmClient = ResourceManagementClient() self.clientsInvoker = ClientsInvoker() commandsListClientsCache = [ ( 'ClientsCache_Command', 'JobsEffSimpleEveryOne_Command' ), ( 'ClientsCache_Command', 'PilotsEffSimpleEverySites_Command' ), ( 'ClientsCache_Command', 'DTEverySites_Command' ), ( 'ClientsCache_Command', 'DTEveryResources_Command' ) ] commandsListAccountingCache = [ ( 'AccountingCache_Command', 'TransferQualityByDestSplitted_Command', ( 2, ), 'Always' ), ( 'AccountingCache_Command', 'FailedTransfersBySourceSplitted_Command', ( 2, ), 'Always' ), ( 'AccountingCache_Command', 'TransferQualityByDestSplittedSite_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'SuccessfullJobsBySiteSplitted_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'FailedJobsBySiteSplitted_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'SuccessfullPilotsBySiteSplitted_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'FailedPilotsBySiteSplitted_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'SuccessfullPilotsByCESplitted_Command' , ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'FailedPilotsByCESplitted_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', ( 24, ), 'Hourly' ), ( 'AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', ( 168, ), 'Hourly' ), ( 'AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', ( 720, ), 'Daily' ), ( 'AccountingCache_Command', 'RunningJobsBySiteSplitted_Command', ( 8760, ), 'Daily' ), ] commandsVOBOXAvailability = ( 'VOBOXAvailabilityCommand', 'VOBOXAvailabilityCommand', ) commandsSpaceTokenOccupancy = ( 'SpaceTokenOccupancyCommand', 'SpaceTokenOccupancyCommand', ) self.commandObjectsListClientsCache = [] self.commandObjectsListAccountingCache = [] self.commandObjectsVOBOXAvailability = [] self.commandObjectsSpaceTokenOccupancy = [] cc = CommandCaller() # We know beforehand which APIs are we going to need, so we initialize them # first, making everything faster. knownAPIs = [ 'ResourceStatusClient', 'WMSAdministrator', 'ReportGenerator', 'JobsClient', 'PilotsClient', 'GOCDBClient', 'ReportsClient' ] knownAPIs = initAPIs( knownAPIs, {} ) for command in commandsListClientsCache: cObj = cc.setCommandObject( command ) for apiName, apiInstance in knownAPIs.items(): cc.setAPI( cObj, apiName, apiInstance ) self.commandObjectsListClientsCache.append( ( command, cObj ) ) for command in commandsListAccountingCache: cObj = cc.setCommandObject( command ) for apiName, apiInstance in knownAPIs.items(): cc.setAPI( cObj, apiName, apiInstance ) cArgs = command[ 2 ] self.commandObjectsListAccountingCache.append( ( command, cObj, cArgs ) ) for cArgs in self.__getVOBOXAvailabilityCandidates(): cObj = cc.setCommandObject( commandsVOBOXAvailability ) self.commandObjectsVOBOXAvailability.append( ( commandsVOBOXAvailability, cObj, cArgs ) ) for cArgs in self.__getSpaceTokenOccupancyCandidates(): cObj = cc.setCommandObject( commandsSpaceTokenOccupancy ) self.commandObjectsSpaceTokenOccupancy.append( ( commandsSpaceTokenOccupancy, cObj, cArgs ) ) return S_OK() except Exception: errorStr = "CacheFeederAgent initialization" self.log.exception( errorStr ) return S_ERROR( errorStr )
def doCommand(self): """ Returns DT info that are cached. :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement - args[2]: string: optional, number of hours in which the down time is starting """ timeFormat = "%Y-%m-%d %H:%M" super(DTInfo_Cached_Command, self).doCommand() self.APIs = initAPIs(self.__APIs__, self.APIs) try: granularity = self.args[0] name = self.args[1] now = datetime.utcnow().replace(microsecond=0, second=0) if granularity == 'Site': commandName = 'DTEverySites' elif granularity == 'Resource': commandName = 'DTEveryResources' meta = {'columns': 'opt_ID'} res = self.APIs['ResourceManagementClient'].getClientCache( name=name, commandName=commandName, meta=meta) if not res['OK']: return {'Result': res} res = res['Value'] #CachedResult clientDict = { 'name': name, 'commandName': commandName, 'value': None, 'opt_ID': None, 'meta': { 'columns': 'Result' } } if len(res) > 1: #there's more than one DT dt_ID_startingSoon = res[0] clientDict['value'] = 'StartDate' clientDict['optID'] = dt_ID_startingSoon clientDict['meta'] = {'columns': 'Result'} startSTR_startingSoon = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if startSTR_startingSoon: startSTR_startingSoon = startSTR_startingSoon[0][0] clientDict['value'] = 'EndDate' clientDict['optID'] = dt_ID_startingSoon clientDict['meta'] = {'columns': 'Result'} endSTR_startingSoon = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if endSTR_startingSoon: endSTR_startingSoon = endSTR_startingSoon[0][0] start_datetime_startingSoon = datetime.strptime( startSTR_startingSoon, timeFormat) end_datetime_startingSoon = datetime.strptime( endSTR_startingSoon, timeFormat) DT_ID = None if start_datetime_startingSoon < now: if end_datetime_startingSoon > now: #ongoing downtime found! DT_ID = dt_ID_startingSoon if DT_ID is None: for dt_ID in res[1:]: #looking for an ongoing one clientDict['value'] = 'StartDate' clientDict['optID'] = dt_ID clientDict['meta'] = {'columns': 'Result'} startSTR = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if startSTR: startSTR = startSTR[0][0] clientDict['value'] = 'EndDate' clientDict['optID'] = dt_ID clientDict['meta'] = {'columns': 'Result'} endSTR = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if endSTR: endSTR = endSTR[0][0] start_datetime = datetime.strptime( startSTR, timeFormat) end_datetime = datetime.strptime(endSTR, timeFormat) if start_datetime < now: if end_datetime > now: #ongoing downtime found! DT_ID = dt_ID break if start_datetime < start_datetime_startingSoon: #the DT starts before the former considered one dt_ID_startingSoon = dt_ID if DT_ID is None: #if I'm here, there's no OnGoing DT DT_ID = dt_ID_startingSoon else: DT_ID = res[0] DT_dict_result = {} clientDict['value'] = 'EndDate' clientDict['optID'] = DT_ID clientDict['meta'] = {'columns': 'Result'} endSTR = self.APIs['ResourceManagementClient'].getClientCache( **clientDict)['Value'] if endSTR: endSTR = endSTR[0][0] end_datetime = datetime.strptime(endSTR, timeFormat) if end_datetime < now: return {'Result': S_OK({'DT': None})} DT_dict_result['EndDate'] = endSTR clientDict['value'] = 'Severity' clientDict['optID'] = DT_ID clientDict['meta'] = {'columns': 'Result'} DT_dict_result['DT'] = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if DT_dict_result['DT']: DT_dict_result['DT'] = DT_dict_result['DT'][0][0] clientDict['value'] = 'StartDate' clientDict['optID'] = DT_ID clientDict['meta'] = {'columns': 'Result'} DT_dict_result['StartDate'] = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if DT_dict_result['StartDate']: DT_dict_result['StartDate'] = DT_dict_result['StartDate'][0][0] clientDict['value'] = 'Description' clientDict['optID'] = DT_ID clientDict['meta'] = {'columns': 'Result'} DT_dict_result['Description'] = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if DT_dict_result['Description']: DT_dict_result['Description'] = DT_dict_result['Description'][ 0][0] clientDict['value'] = 'Link' clientDict['optID'] = DT_ID clientDict['meta'] = {'columns': 'Result'} DT_dict_result['Link'] = self.APIs[ 'ResourceManagementClient'].getClientCache( **clientDict)['Value'] if DT_dict_result['Link']: DT_dict_result['Link'] = DT_dict_result['Link'][0][0] start_datetime = datetime.strptime(DT_dict_result['StartDate'], timeFormat) if start_datetime > now: self.args[2] diff = convertTime(start_datetime - now, 'hours') if diff > self.args[2]: return {'Result': S_OK({'DT': None})} DT_dict_result['DT'] = DT_dict_result['DT'] + " in " + str( diff) + ' hours' res = S_OK(DT_dict_result) except Exception, e: _msg = '%s (%s): %s' % (self.__class__.__name__, self.args, e) gLogger.exception(_msg) return {'Result': S_ERROR(_msg)}
def doCommand(self): """ Return getStatus from GOC DB Client :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string: should be the name of the ValidElement - args[2]: string: optional, number of hours in which the down time is starting """ timeFormat = "%Y-%m-%d %H:%M" super(GOCDBStatus_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: granularity = self.args[0] name = self.args[1] if len( self.args ) > 2: hours = self.args[2] else: hours = None if granularity == 'Site': name = getGOCSiteName( name )[ 'Value' ] res = self.APIs[ 'GOCDBClient' ].getStatus( granularity, name, None, hours ) if not res['OK']: return { 'Result' : res } res = res['Value'] if res is None or res == {}: return { 'Result' : S_OK( { 'DT' : None } ) } DT_dict_result = {} now = datetime.utcnow().replace( microsecond = 0, second = 0 ) if len( res ) > 1: #there's more than one DT resDT = None for dt_ID in res: #looking for an ongoing one startSTR = res[ dt_ID ][ 'FORMATED_START_DATE' ] start_datetime = datetime.strptime( startSTR, timeFormat ) if start_datetime < now: resDT = res[ dt_ID ] break #if I'm here, there's no OnGoing DT if resDT is None: resDT = res[res.keys()[0]] res = resDT else: res = res[res.keys()[0]] DT_dict_result['DT'] = res['SEVERITY'] DT_dict_result['EndDate'] = res['FORMATED_END_DATE'] startSTR = res['FORMATED_START_DATE'] start_datetime = datetime.strptime( startSTR, timeFormat ) if start_datetime > now: diff = convertTime( start_datetime - now, 'hours' ) DT_dict_result[ 'DT' ] = DT_dict_result['DT'] + " in " + str( diff ) + ' hours' res = S_OK( DT_dict_result ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self, sites = None ): """ Returns downtimes information for all the sites in input. :params: :attr:`sites`: list of site names (when not given, take every site) :returns: {'SiteName': {'SEVERITY': 'OUTAGE'|'AT_RISK', 'StartDate': 'aDate', ...} ... } """ self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if sites is None: GOC_sites = self.APIs[ 'ResourceStatusClient' ].getGridSite( meta = { 'columns' : 'GridSiteName' }) if not GOC_sites['OK']: return { 'Result' : GOC_sites } GOC_sites = [ gs[0] for gs in GOC_sites['Value'] ] else: GOC_sites = [ getGOCSiteName( x )['Value'] for x in sites ] resGOC = self.APIs[ 'GOCDBClient' ].getStatus( 'Site', GOC_sites, None, 120 ) if not resGOC['OK']: return { 'Result' : resGOC } resGOC = resGOC['Value'] if resGOC == None: resGOC = [] res = {} for dt_ID in resGOC: try: dt = {} dt['ID'] = dt_ID dt['StartDate'] = resGOC[dt_ID]['FORMATED_START_DATE'] dt['EndDate'] = resGOC[dt_ID]['FORMATED_END_DATE'] dt['Severity'] = resGOC[dt_ID]['SEVERITY'] dt['Description'] = resGOC[dt_ID]['DESCRIPTION'].replace( '\'', '' ) dt['Link'] = resGOC[dt_ID]['GOCDB_PORTAL_URL'] DIRACnames = getDIRACSiteName( res[dt_ID]['SITENAME'] ) if not DIRACnames['OK']: return { 'Result' : DIRACnames } for DIRACname in DIRACnames['Value']: res[dt_ID.split()[0] + ' ' + DIRACname] = dt except KeyError: continue res = S_OK( res ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand(self): """ Returns DT info that are cached. :attr:`args`: - args[0]: string: should be a ValidElement - args[1]: string should be the name of the ValidElement - args[2]: string: optional, number of hours in which the down time is starting """ timeFormat = "%Y-%m-%d %H:%M" super(DTInfo_Cached_Command, self).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: granularity = self.args[0] name = self.args[1] now = datetime.utcnow().replace( microsecond = 0, second = 0 ) if granularity == 'Site': commandName = 'DTEverySites' elif granularity == 'Resource': commandName = 'DTEveryResources' meta = { 'columns': 'opt_ID' } res = self.APIs[ 'ResourceManagementClient' ].getClientCache( name = name, commandName = commandName, meta = meta ) if not res[ 'OK' ]: return { 'Result' : res } res = res[ 'Value' ] #CachedResult clientDict = { 'name' : name, 'commandName' : commandName, 'value' : None, 'opt_ID' : None, 'meta' : { 'columns' : 'Result' } } if len(res) > 1: #there's more than one DT dt_ID_startingSoon = res[0] clientDict[ 'value' ] = 'StartDate' clientDict[ 'optID' ] = dt_ID_startingSoon clientDict[ 'meta' ] = { 'columns' : 'Result' } startSTR_startingSoon = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if startSTR_startingSoon: startSTR_startingSoon = startSTR_startingSoon[0][0] clientDict[ 'value' ] = 'EndDate' clientDict[ 'optID' ] = dt_ID_startingSoon clientDict[ 'meta' ] = { 'columns' : 'Result' } endSTR_startingSoon = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if endSTR_startingSoon: endSTR_startingSoon = endSTR_startingSoon[0][0] start_datetime_startingSoon = datetime.strptime(startSTR_startingSoon, timeFormat ) end_datetime_startingSoon = datetime.strptime(endSTR_startingSoon, timeFormat ) DT_ID = None if start_datetime_startingSoon < now: if end_datetime_startingSoon > now: #ongoing downtime found! DT_ID = dt_ID_startingSoon if DT_ID is None: for dt_ID in res[1:]: #looking for an ongoing one clientDict[ 'value' ] = 'StartDate' clientDict[ 'optID' ] = dt_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } startSTR = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if startSTR: startSTR = startSTR[0][0] clientDict[ 'value' ] = 'EndDate' clientDict[ 'optID' ] = dt_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } endSTR = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if endSTR: endSTR = endSTR[0][0] start_datetime = datetime.strptime( startSTR, timeFormat ) end_datetime = datetime.strptime( endSTR, timeFormat ) if start_datetime < now: if end_datetime > now: #ongoing downtime found! DT_ID = dt_ID break if start_datetime < start_datetime_startingSoon: #the DT starts before the former considered one dt_ID_startingSoon = dt_ID if DT_ID is None: #if I'm here, there's no OnGoing DT DT_ID = dt_ID_startingSoon else: DT_ID = res[0] DT_dict_result = {} clientDict[ 'value' ] = 'EndDate' clientDict[ 'optID' ] = DT_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } endSTR = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if endSTR: endSTR = endSTR[0][0] end_datetime = datetime.strptime( endSTR, timeFormat ) if end_datetime < now: return { 'Result' : S_OK( { 'DT' : None } ) } DT_dict_result['EndDate'] = endSTR clientDict[ 'value' ] = 'Severity' clientDict[ 'optID' ] = DT_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } DT_dict_result['DT'] = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if DT_dict_result['DT']: DT_dict_result['DT'] = DT_dict_result['DT'][0][0] clientDict[ 'value' ] = 'StartDate' clientDict[ 'optID' ] = DT_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } DT_dict_result['StartDate'] = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if DT_dict_result['StartDate']: DT_dict_result['StartDate'] = DT_dict_result['StartDate'][0][0] clientDict[ 'value' ] = 'Description' clientDict[ 'optID' ] = DT_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } DT_dict_result['Description'] = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if DT_dict_result['Description']: DT_dict_result['Description'] = DT_dict_result['Description'][0][0] clientDict[ 'value' ] = 'Link' clientDict[ 'optID' ] = DT_ID clientDict[ 'meta' ] = { 'columns' : 'Result' } DT_dict_result['Link'] = self.APIs[ 'ResourceManagementClient' ].getClientCache( **clientDict )[ 'Value' ] if DT_dict_result['Link']: DT_dict_result['Link'] = DT_dict_result['Link'][0][0] start_datetime = datetime.strptime( DT_dict_result['StartDate'], timeFormat ) if start_datetime > now: self.args[2] diff = convertTime(start_datetime - now, 'hours') if diff > self.args[2]: return { 'Result': S_OK( { 'DT' : None } ) } DT_dict_result['DT'] = DT_dict_result['DT'] + " in " + str(diff) + ' hours' res = S_OK( DT_dict_result ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }
def doCommand( self, sources = None, SEs = None ): """ Returns transfer quality using the DIRAC accounting system for every SE of a single site for the last self.args[0] hours :params: :attr:`sources`: list of source sites (when not given, take every site) :attr:`SEs`: list of storage elements (when not given, take every SE) :returns: """ super( TransferQualityByDestSplittedSite_Command, self ).doCommand() self.APIs = initAPIs( self.__APIs__, self.APIs ) try: if SEs is None: SEs = self.APIs[ 'ResourceStatusClient' ].getStorageElement( meta = {'columns': 'StorageElementName' }) if not SEs[ 'OK' ]: return { 'Result' : SEs } SEs = [ se[0] for se in SEs[ 'Value' ] ] if sources is None: sources = self.APIs[ 'ResourceStatusClient' ].getSite( meta = {'columns': 'SiteName'} ) if not sources[ 'OK' ]: return { 'Result' : sources } sources = [ si[0] for si in sources[ 'Value' ] ] if not sources + SEs: return { 'Result' : S_ERROR( 'Sources + SEs is empty' ) } self.APIs[ 'ReportsClient' ].rpcClient = self.APIs[ 'ReportGenerator' ] fromD = datetime.utcnow() - timedelta( hours = self.args[ 0 ] ) toD = datetime.utcnow() qualityAll = self.APIs[ 'ReportsClient' ].getReport( 'DataOperation', 'Quality', fromD, toD, {'OperationType':'putAndRegister', 'Source':sources + SEs, 'Destination':sources + SEs }, 'Destination' ) if not qualityAll[ 'OK' ]: return { 'Result' : qualityAll } qualityAll = qualityAll[ 'Value' ] listOfDest = qualityAll[ 'data' ].keys() storSitesWeb = self.APIs[ 'ResourceStatusClient' ].getMonitoredsStatusWeb( 'StorageElement', { 'StorageElementName': listOfDest }, 0, 300 ) if not storSitesWeb[ 'OK' ]: return { 'Result' : storSitesWeb } storSitesWeb = storSitesWeb[ 'Value' ][ 'Records' ] SESiteMapping = {} siteSEMapping = {} for r in storSitesWeb: sites = r[ 2 ].split( ' ' )[ :-1 ] SESiteMapping[ r[ 0 ] ] = sites for SE in SESiteMapping.keys(): for site in SESiteMapping[ SE ]: try: l = siteSEMapping[ site ] l.append( SE ) siteSEMapping[ site ] = l except KeyError: siteSEMapping[ site ] = [ SE ] plotGran = qualityAll[ 'granularity' ] singlePlots = {} for site in siteSEMapping.keys(): plot = {} plot[ 'data' ] = {} for SE in siteSEMapping[site]: plot[ 'data' ][ SE ] = qualityAll[ 'data' ][ SE ] plot[ 'granularity' ] = plotGran singlePlots[ site ] = plot res = S_OK( { 'DataOperation': singlePlots } ) except Exception, e: _msg = '%s (%s): %s' % ( self.__class__.__name__, self.args, e ) gLogger.exception( _msg ) return { 'Result' : S_ERROR( _msg ) }