コード例 #1
0
 def getPlotData( self ):
   retVal = self.__parseFormParams()
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   params = retVal[ 'Value' ]
   repClient = ReportsClient( rpcClient = getRPCClient( "Accounting/ReportGenerator" ) )
   retVal = repClient.getReport( *params )
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   rawData = retVal[ 'Value' ]
   groupKeys = rawData[ 'data' ].keys()
   groupKeys.sort()
   if 'granularity' in rawData:
     granularity = rawData[ 'granularity' ]
     data = rawData['data']
     tS = int( Time.toEpoch( params[2] ) )
     timeStart = tS - tS % granularity
     strData = "epoch,%s\n" % ",".join( groupKeys )
     for timeSlot in range( timeStart, int( Time.toEpoch( params[3] ) ), granularity ):
       lineData = [ str( timeSlot ) ]
       for key in groupKeys:
         if timeSlot in data[ key ]:
           lineData.append( str( data[ key ][ timeSlot ] ) )
         else:
           lineData.append( "" )
       strData += "%s\n" % ",".join( lineData )
   else:
     strData = "%s\n" % ",".join( groupKeys )
     strData += ",".join( [ str( rawData[ 'data' ][ k ] ) for k in groupKeys ] )
   response.headers['Content-type'] = 'text/csv'
   response.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % md5( str( params ) ).hexdigest()
   response.headers['Content-Length'] = len( strData )
   return strData
コード例 #2
0
ファイル: Test_ReportsClient.py プロジェクト: DIRACGrid/DIRAC
def test_addAndRemove():

  # just inserting one record
  record = createAccountingRecord()
  record.setStartTime()
  record.setEndTime()
  res = gDataStoreClient.addRegister(record)
  assert res['OK']
  res = gDataStoreClient.commit()
  assert res['OK']

  rc = ReportsClient()

  res = rc.listReports('DataOperation')
  assert res['OK']

  res = rc.listUniqueKeyValues('DataOperation')
  assert res['OK']

  res = rc.getReport('DataOperation', 'Successful transfers',
                     datetime.datetime.utcnow(), datetime.datetime.utcnow(),
                     {}, 'Destination')
  assert res['OK']

  # now removing that record
  res = gDataStoreClient.remove(record)
  assert res['OK']
コード例 #3
0
def test_addAndRemoveStorageOccupancy():

    # just inserting one record
    record = createStorageOccupancyAccountingRecord()
    record.setStartTime()
    record.setEndTime()
    res = gDataStoreClient.addRegister(record)
    assert res['OK']
    res = gDataStoreClient.commit()
    assert res['OK']

    rc = ReportsClient()

    res = rc.listReports('StorageOccupancy')
    assert res['OK']

    res = rc.listUniqueKeyValues('StorageOccupancy')
    assert res['OK']

    res = rc.getReport('StorageOccupancy', 'Free and Used Space',
                       datetime.datetime.utcnow(), datetime.datetime.utcnow(),
                       {}, 'StorageElement')
    assert res['OK']

    # now removing that record
    res = gDataStoreClient.remove(record)
    assert res['OK']
コード例 #4
0
ファイル: acct.py プロジェクト: atsareg/BESDIRAC
 def getPlotData( self ):
   retVal = self.__parseFormParams()
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   params = retVal[ 'Value' ]
   repClient = ReportsClient( rpcClient = getRPCClient( "Accounting/ReportGenerator" ) )
   retVal = repClient.getReport( *params )
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   rawData = retVal[ 'Value' ]
   groupKeys = rawData[ 'data' ].keys()
   groupKeys.sort()
   if 'granularity' in rawData:
     granularity = rawData[ 'granularity' ]
     data = rawData['data']
     tS = int( Time.toEpoch( params[2] ) )
     timeStart = tS - tS % granularity
     strData = "epoch,%s\n" % ",".join( groupKeys )
     for timeSlot in range( timeStart, int( Time.toEpoch( params[3] ) ), granularity ):
       lineData = [ str( timeSlot ) ]
       for key in groupKeys:
         if timeSlot in data[ key ]:
           lineData.append( str( data[ key ][ timeSlot ] ) )
         else:
           lineData.append( "" )
       strData += "%s\n" % ",".join( lineData )
   else:
     strData = "%s\n" % ",".join( groupKeys )
     strData += ",".join( [ str( rawData[ 'data' ][ k ] ) for k in groupKeys ] )
   response.headers['Content-type'] = 'text/csv'
   response.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % md5( str( params ) ).hexdigest()
   response.headers['Content-Length'] = len( strData )
   return strData
コード例 #5
0
def test_addAndRemoveDataOperation():

    # just inserting one record
    record = createDataOperationAccountingRecord()
    record.setStartTime()
    record.setEndTime()
    res = gDataStoreClient.addRegister(record)
    assert res['OK']
    res = gDataStoreClient.commit()
    assert res['OK']

    rc = ReportsClient()

    res = rc.listReports('DataOperation')
    assert res['OK']

    res = rc.listUniqueKeyValues('DataOperation')
    assert res['OK']

    res = rc.getReport('DataOperation', 'Successful transfers',
                       datetime.datetime.utcnow(), datetime.datetime.utcnow(),
                       {}, 'Destination')
    assert res['OK']

    # now removing that record
    res = gDataStoreClient.remove(record)
    assert res['OK']
コード例 #6
0
ファイル: WMSHistoryCorrector.py プロジェクト: TaykYoku/DIRAC
    def _getHistoryData(self, timeSpan, groupToUse):
        """Get history data from Accounting WMSHistory database

        :param int timeSpan: time span
        :param str groupToUse: requested user group
        :return: dictionary with history data
        """
        reportsClient = ReportsClient()

        reportCondition = {"Status": ["Running"]}
        if not groupToUse:
            reportGrouping = "UserGroup"
        else:
            reportGrouping = "User"
            reportCondition = {"UserGroup": groupToUse}
        now = datetime.datetime.utcnow()
        result = reportsClient.getReport(
            "WMSHistory",
            "AverageNumberOfJobs",
            now - datetime.timedelta(seconds=timeSpan),
            now,
            reportCondition,
            reportGrouping,
            {"lastSeconds": timeSpan},
        )
        return result
コード例 #7
0
    def web_getPlotData(self):
        callback = {}
        retVal = self.__parseFormParams()
        if not retVal[ 'OK' ]:
            callback = {"success":"false", "error":retVal[ 'Message' ]}
            self.finish( callback )

        params = retVal[ 'Value' ]
        '''self.finish({'success' : 'true', 'result' : params})'''
        repClient = ReportsClient( rpcClient = RPCClient( "Accounting/ReportGenerator" ) )
        retVal = repClient.getReport(*params)
        if not retVal[ 'OK' ]:
            callback = {"success":"false", "error":retVal[ 'Message' ]}
            self.finish( callback )
        rawData = retVal[ 'Value' ]
        groupKeys = rawData[ 'data' ].keys()
        self.finish({'success' : 'true', 'result' : groupKeys})
コード例 #8
0
def getJobsHistory():
  result = gOAManager.authorize()
  if not result[ 'OK' ]:
    bottle.abort( 401, result[ 'Message' ] )
  condDict = {}
  if 'allOwners' not in bottle.request.params:
    condDict[ 'User' ] = gOAData.userName
  timeSpan = 86400
  if 'timeSpan' in bottle.request.params:
    try:
      timeSpan = max( 86400, int( bottle.request.params[ 'timeSpan' ] ) )
    except ValueError:
      bottle.abort( 400, "timeSpan has to be an integer!" )
  print "[DEBUG] condDict is %s" % condDict
  rpg = ReportsClient( rpcClient = getRPCClient("Accounting/ReportGenerator"), transferClient = getTransferClient("Accounting/ReportGenerator") )
  end = datetime.datetime.utcnow()
  start = end - datetime.timedelta( seconds = timeSpan )
  result = rpg.getReport( "WMSHistory", "NumberOfJobs", start, end, condDict, 'Status' )
  if not result[ 'OK' ]:
    bottle.abort( 500, "Server Error: %s" % result[ 'Message' ] )
  return result[ 'Value' ]  
コード例 #9
0
  def _getHistoryData(self, timeSpan, groupToUse):
    """ Get history data from Accounting WMSHistory database

        :param int timeSpan: time span
        :param str groupToUse: requested user group
        :return: dictionary with history data
    """
    reportsClient = ReportsClient()

    reportCondition = {'Status': ['Running']}
    if not groupToUse:
      reportGrouping = 'UserGroup'
    else:
      reportGrouping = 'User'
      reportCondition = {'UserGroup': groupToUse}
    now = Time.dateTime()
    result = reportsClient.getReport('WMSHistory', 'AverageNumberOfJobs',
                                     now - datetime.timedelta(seconds=timeSpan), now,
                                     reportCondition, reportGrouping,
                                     {'lastSeconds': timeSpan})
    return result
コード例 #10
0
    def _getHistoryData(self, timeSpan, groupToUse):
        """ Get history data from Accounting WMSHistory database

        :param int timeSpan: time span
        :param str groupToUse: requested user group
        :return: dictionary with history data
    """
        reportsClient = ReportsClient()

        reportCondition = {'Status': ['Running']}
        if not groupToUse:
            reportGrouping = 'UserGroup'
        else:
            reportGrouping = 'User'
            reportCondition = {'UserGroup': groupToUse}
        now = Time.dateTime()
        result = reportsClient.getReport(
            'WMSHistory', 'AverageNumberOfJobs',
            now - datetime.timedelta(seconds=timeSpan), now, reportCondition,
            reportGrouping, {'lastSeconds': timeSpan})
        return result
コード例 #11
0
class FailedPilotsByCESplitted_Command(Command):
    def doCommand(self, CEs=None):
        """ 
    Returns failed pilots using the DIRAC accounting system for every CE 
    for the last self.args[0] hours 
        
    :params:
      :attr:`CEs`: list of CEs (when not given, take every CE)

    :returns:
      
    """

        if CEs is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            CEs = RPC_RSS.getCEsList()
            if not CEs['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + CEs['Message']
            else:
                CEs = CEs['Value']

        if self.RPC is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        fromD = datetime.datetime.utcnow() - datetime.timedelta(
            hours=self.args[0])
        toD = datetime.datetime.utcnow()

        try:
            failed_pilots = self.client.getReport(
                'Pilot', 'NumberOfPilots', fromD, toD, {
                    'GridStatus': ['Aborted'],
                    'GridCE': CEs
                }, 'GridCE')
            if not failed_pilots['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + failed_pilots['Message']
            else:
                failed_pilots = failed_pilots['Value']

        except:
            gLogger.exception(
                "Exception when calling FailedPilotsByCESplitted_Command")
            return {}

        listOfCEs = failed_pilots['data'].keys()

        plotGran = failed_pilots['granularity']

        singlePlots = {}

        for CE in listOfCEs:
            if CE in CEs:
                plot = {}
                plot['data'] = {CE: failed_pilots['data'][CE]}
                plot['granularity'] = plotGran
                singlePlots[CE] = plot

        resToReturn = {'Pilot': singlePlots}

        return resToReturn

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #12
0
class FailedTransfersBySourceSplitted_Command(Command):
  
  def doCommand(self, sources = None, SEs = None):
    """ 
    Returns failed transfer using the DIRAC accounting system for every SE 
    for the last self.args[0] hours 
        
    :params:
      :attr:`sources`: list of source sites (when not given, take every site)
    
      :attr:`SEs`: list of storage elements (when not given, take every SE)

    :returns:
      
    """

    if SEs is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      SEs = RPC_RSS.getStorageElementsList()
      if not SEs['OK']:
        raise RSSException, where(self, self.doCommand) + " " + SEs['Message'] 
      else:
        SEs = SEs['Value']
    
    if sources is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      sources = RPC_RSS.getSitesList()
      if not sources['OK']:
        raise RSSException, where(self, self.doCommand) + " " + sources['Message'] 
      else:
        sources = sources['Value']
    
    if self.RPC is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient("Accounting/ReportGenerator", timeout = self.timeout)
      
    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient(rpcClient = self.RPC)

    fromD = datetime.datetime.utcnow()-datetime.timedelta(hours = self.args[0])
    toD = datetime.datetime.utcnow()

    try:
      ft_source = self.client.getReport('DataOperation', 'FailedTransfers', 
                                         fromD, toD, 
                                         {'OperationType':'putAndRegister', 
                                          'Source':sources + SEs, 'Destination':sources + SEs,
                                          'FinalStatus':['Failed']}, 
                                         'Source')
      if not ft_source['OK']:
        raise RSSException, where(self, self.doCommand) + " " + ft_source['Message'] 
      else:
        ft_source = ft_source['Value']

    except:
      gLogger.exception("Exception when calling FailedTransfersBySourceSplitted_Command")
      return {}
    
    listOfSources = ft_source['data'].keys()
    
    plotGran = ft_source['granularity']
    
    singlePlots = {}
    
    for source in listOfSources:
      if source in sources:
        plot = {}
        plot['data'] = {source: ft_source['data'][source]}
        plot['granularity'] = plotGran
        singlePlots[source] = plot
    
    resToReturn = {'DataOperation': singlePlots}

    return resToReturn


  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #13
0
class TransferQualityCommand( Command ):

  def __init__( self, args = None, clients = None ):
    
    super( TransferQualityCommand, self ).__init__( args, clients )
    
    if 'ReportGenerator' in self.apis:
      self.rgClient = self.apis[ 'ReportGenerator' ]
    else:
      self.rgClient = RPCClient( 'Accounting/ReportGenerator' ) 

    if 'ReportsClient' in self.apis:
      self.rClient = self.apis[ 'ReportsClient' ]
    else:
      self.rClient = ReportsClient() 

    self.rClient.rpcClient = self.rgClient

  def doCommand( self ):
    """ 
    Return getQuality from DIRAC's accounting ReportsClient
    
    `args`: a tuple
      - args[0]: string: should be a ValidElement

      - args[1]: string should be the name of the ValidElement

      - args[2]: optional dateTime object: a "from" date
    
      - args[3]: optional dateTime object: a "to" date
      
    :returns:
      {'Result': None | a float between 0.0 and 100.0}
    """

    if not 'fromDate' in self.args:
      fromDate = datetime.utcnow() - timedelta( hours = 2 )
    else:  
      fromDate = self.args[ 'fromDate' ]

    if not 'toDate' in self.args:
      toDate = datetime.utcnow()
    else:
      toDate = self.args[ 'toDate' ]

    if not 'name' in self.args:
      return S_ERROR( 'name not specified' )
    name = self.args[ 'name' ]

    results = self.rClient.getReport( 'DataOperation', 'Quality', fromDate, toDate, 
                                      { 'OperationType' : 'putAndRegister', 
                                        'Destination'   : [ name ] 
                                      }, 'Channel' )
      
    if not results[ 'OK' ]:
      return results
    
    pr_q_d = results[ 'Value' ][ 'data' ]
    
    #FIXME: WHAT the hell is this doing ?
    values = []
    if len( pr_q_d ) == 1:
      
      for k in pr_q_d.keys():
        for n in pr_q_d[ k ].values():
          values.append( n )
      res = sum( values ) / len( values )    

    else:
      for n in pr_q_d[ 'Total' ].values():
        values.append(n)
      res = sum( values ) / len( values )

    return S_OK( res )      
コード例 #14
0
class TransferQualityBySourceSplittedSite_Command(Command):
    def doCommand(self, sources=None, SEs=None):
        """ 
    Returns transfer quality using the DIRAC accounting system for every SE
    of a single site and for the site itself for the last self.args[0] hours 
        
    :params:
      :attr:`dests`: list of destinations (when not given, take everything)
    
      :attr:`SEs`: list of storage elements (when not given, take every SE)

    :returns:
      
    """

        if SEs is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            SEs = RPC_RSS.getStorageElementsList('Read')
            if not SEs['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + SEs['Message']
            else:
                SEs = SEs['Value']

        if sources is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            sources = RPC_RSS.getSitesList()
            if not sources['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + sources['Message']
            else:
                sources = sources['Value']

        if self.RPC is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        fromD = datetime.datetime.utcnow() - datetime.timedelta(
            hours=self.args[0])
        toD = datetime.datetime.utcnow()

        try:
            qualityAll = self.client.getReport(
                'DataOperation', 'Quality', fromD, toD, {
                    'OperationType': 'putAndRegister',
                    'Source': sources + SEs,
                    'Destination': sources + SEs
                }, 'Destination')
            if not qualityAll['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + qualityAll['Message']
            else:
                qualityAll = qualityAll['Value']

        except:
            gLogger.exception(
                "Exception when calling TransferQualityByDestSplittedSite_Command"
            )
            return {}

        listOfDest = qualityAll['data'].keys()

        try:
            storSitesWeb = RPC_RSS.getStorageElementsStatusWeb(
                {'StorageElementName': listOfDest}, [], 0, 300, 'Read')
        except NameError:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            storSitesWeb = RPC_RSS.getStorageElementsStatusWeb(
                {'StorageElementName': listOfDest}, [], 0, 300, 'Read')

        if not storSitesWeb['OK']:
            raise RSSException, where(
                self, self.doCommand) + " " + storSitesWeb['Message']
        else:
            storSitesWeb = storSitesWeb['Value']['Records']

        SESiteMapping = {}
        siteSEMapping = {}

        for r in storSitesWeb:
            sites = r[2].split(' ')[:-1]
            SESiteMapping[r[0]] = sites

        for SE in SESiteMapping.keys():
            for site in SESiteMapping[SE]:
                try:
                    l = siteSEMapping[site]
                    l.append(SE)
                    siteSEMapping[site] = l
                except KeyError:
                    siteSEMapping[site] = [SE]

        plotGran = qualityAll['granularity']

        singlePlots = {}

        for site in siteSEMapping.keys():
            plot = {}
            plot['data'] = {}
            for SE in siteSEMapping[site]:
                plot['data'][SE] = qualityAll['data'][SE]
            plot['granularity'] = plotGran

            singlePlots[site] = plot

        resToReturn = {'DataOperation': singlePlots}

        return resToReturn

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #15
0
class FailedPilotsByCESplittedCommand(Command):

  def __init__(self, args=None, clients=None):

    super(FailedPilotsByCESplittedCommand, self).__init__(args, clients)

    if 'ReportsClient' in self.apis:
      self.rClient = self.apis['ReportsClient']
    else:
      self.rClient = ReportsClient()

    if 'ReportGenerator' in self.apis:
      self.rgClient = self.apis['ReportGenerator']
    else:
      self.rgClient = RPCClient('Accounting/ReportGenerator')

    self.rClient.rpcClient = self.rgClient

  def doCommand(self):
    """
    Returns failed pilots using the DIRAC accounting system for every CE
    for the last self.args[0] hours

    :params:
      :attr:`CEs`: list of CEs (when not given, take every CE)

    :returns:

    """

    if 'hours' not in self.args:
      return S_ERROR('Number of hours not specified')
    hours = self.args['hours']

    ces = None
    if 'ces' in self.args:
      ces = self.args['ces']
    if ces is None:
      # FIXME: pointing to the CSHelper instead
      #      meta = {'columns':'ResourceName'}
      #      CEs = self.rsClient.getResource( resourceType = [ 'CE','CREAMCE' ], meta = meta )
      #      if not CEs['OK']:
      #        return CEs
      #      CEs = [ ce[0] for ce in CEs['Value'] ]

      ces = CSHelpers.getComputingElements()
      if not ces['OK']:
        return ces
      ces = ces['Value']

    if not ces:
      return S_ERROR('CEs is empty')

    fromD = datetime.utcnow() - timedelta(hours=hours)
    toD = datetime.utcnow()

    failedPilots = self.rClient.getReport('Pilot', 'NumberOfPilots', fromD, toD,
                                          {'GridStatus': ['Aborted'],
                                           'GridCE': ces
                                           }, 'GridCE')
    if not failedPilots['OK']:
      return failedPilots
    failedPilots = failedPilots['Value']

    if 'data' not in failedPilots:
      return S_ERROR('Missing data key')
    if 'granularity' not in failedPilots:
      return S_ERROR('Missing granularity key')

    singlePlots = {}

    for ce, value in failedPilots['data'].items():
      if ce in ces:
        plot = {}
        plot['data'] = {ce: value}
        plot['granularity'] = failedPilots['granularity']
        singlePlots[ce] = plot

    return S_OK(singlePlots)
コード例 #16
0
class WMSHistoryCorrector( BaseCorrector ):

  _GLOBAL_MAX_CORRECTION = 'MaxGlobalCorrection'
  _SLICE_TIME_SPAN = 'TimeSpan'
  _SLICE_WEIGHT = 'Weight'
  _SLICE_MAX_CORRECTION = 'MaxCorrection'

  def initialize( self ):
    self.__log = gLogger.getSubLogger( "WMSHistoryCorrector" )
    self.__reportsClient = ReportsClient()
    self.__usageHistory = {}
    self.__slices = {}
    self.__lastHistoryUpdate = 0
    self.__globalCorrectionFactor = 5
    self._fillSlices()
    return S_OK()

#   def _applyHistoryCorrections( self, entityShares, baseSection = "" ):
#     if baseSection not in self.__historyForCorrections or not self.__historyForCorrections[ baseSection ]:
#       return entityShares

  def _fillSlices( self ):
    self.__log.info( "Filling time slices..." )
    self.__slices = {}
    self.__globalCorrectionFactor =self.getCSOption( self._GLOBAL_MAX_CORRECTION, 5 )
    result = self.getCSSections()
    if not result[ 'OK' ]:
      self.__log.error( "Cound not get configured time slices", result[ 'Message' ] )
      return
    timeSlices = result[ 'Value' ]
    for timeSlice in timeSlices:
      self.__slices[ timeSlice ] = {}
      for key, defaultValue in ( ( self._SLICE_TIME_SPAN, 604800 ),
                                 ( self._SLICE_WEIGHT, 1 ),
                                 ( self._SLICE_MAX_CORRECTION, 3 ) ):
        self.__slices[ timeSlice ][ key ] = self.getCSOption( "%s/%s" % ( timeSlice, key ), defaultValue )
    #Weight has to be normalized to sum 1
    weightSum = 0
    for timeSlice in self.__slices:
      weightSum += self.__slices[ timeSlice ][ self._SLICE_WEIGHT ]
    for timeSlice in self.__slices:
      self.__slices[ timeSlice ][ self._SLICE_WEIGHT ] /= float( weightSum )
    self.__log.info( "Found %s time slices" % len( self.__slices ) )

  def updateHistoryKnowledge( self ):
    updatePeriod = self.getCSOption( 'UpdateHistoryPeriod', 900 )
    now = nativetime.time()
    if self.__lastHistoryUpdate + updatePeriod > now:
      self.__log.verbose( "Skipping history update. Last update was less than %s secs ago" % updatePeriod)
      return
    self.__lastHistoryUpdate = now
    self.__log.info( "Updating history knowledge" )
    self.__usageHistory = {}
    for timeSlice in self.__slices:
      result = self._getUsageHistoryForTimeSpan( self.__slices[ timeSlice ][ self._SLICE_TIME_SPAN ],
                                                 self.getGroup() )
      if not result[ 'OK' ]:
        self.__usageHistory = {}
        self.__log.error( "Could not get history for slice", "%s: %s" % ( timeSlice, result[ 'Message' ] ) )
        return
      self.__usageHistory[ timeSlice ] = result[ 'Value' ]
      self.__log.info( "Got history for slice %s (%s entities in slice)" % ( timeSlice, len( self.__usageHistory[ timeSlice ] ) ) )
    self.__log.info( "Updated history knowledge" )

  def _getUsageHistoryForTimeSpan( self, timeSpan, groupToUse = "" ):
    reportCondition = { 'Status' : [ 'Running' ] }
    if not groupToUse:
      reportGrouping = 'UserGroup'
    else:
      reportGrouping = 'User'
      reportCondition = { 'UserGroup' : groupToUse }
    now = Time.dateTime()
    result = self.__reportsClient.getReport( 'WMSHistory', 'AverageNumberOfJobs',
                                             now - datetime.timedelta( seconds = timeSpan ), now,
                                             reportCondition, reportGrouping,
                                             { 'lastSeconds' : timeSpan } )
    if not result[ 'OK' ]:
      self.__log.error( "Cannot get history from Accounting", result[ 'Message' ] )
      return result
    data = result['Value'].get( 'data', [] )
    if not data:
      message = "Empty history data from Accounting"
      self.__log.error( message )
      return S_ERROR( message )

    #Map the usernames to DNs
    if groupToUse:
      mappedData = {}
      for userName in data:
        result = CS.getDNForUsername( userName )
        if not result[ 'OK' ]:
          self.__log.error( "User does not have any DN assigned", "%s :%s" % ( userName, result[ 'Message' ] ) )
          continue
        for userDN in result[ 'Value' ]:
          mappedData[ userDN ] = data[ userName ]
      data = mappedData

    return S_OK( data )

  def __normalizeShares( self, entityShares ):
    totalShare = 0.0
    normalizedShares = {}
    #Normalize shares
    for entity in entityShares:
      totalShare += entityShares[ entity ]
    self.__log.verbose( "Total share for given entities is %.3f" % totalShare )
    for entity in entityShares:
      normalizedShare = entityShares[ entity ] / totalShare
      normalizedShares[ entity ] = normalizedShare
      self.__log.verbose( "Normalized share for %s: %.3f" % ( entity, normalizedShare ) )

    return normalizedShares

  def applyCorrection( self, entitiesExpectedShare ):
    #Normalize expected shares
    normalizedShares = self.__normalizeShares( entitiesExpectedShare )

    if not self.__usageHistory:
      self.__log.verbose( "No history knowledge available. Correction is 1 for all entities" )
      return entitiesExpectedShare

    entitiesSliceCorrections = dict( [ ( entity, [] ) for entity in entitiesExpectedShare ] )
    for timeSlice in self.__usageHistory:
      self.__log.verbose( "Calculating correction for time slice %s" % timeSlice )
      sliceTotal = 0.0
      sliceHistory = self.__usageHistory[ timeSlice ]
      for entity in entitiesExpectedShare:
        if entity in sliceHistory:
          sliceTotal += sliceHistory[ entity ]
          self.__log.verbose( "Usage for %s: %.3f" % ( entity, sliceHistory[ entity ] ) )
      self.__log.verbose( "Total usage for slice %.3f" % sliceTotal )
      if sliceTotal == 0.0:
        self.__log.verbose( "Slice usage is 0, skeeping slice" )
        continue
      maxSliceCorrection = self.__slices[ timeSlice ][ self._SLICE_MAX_CORRECTION ]
      minSliceCorrection = 1.0/maxSliceCorrection
      for entity in entitiesExpectedShare:
        if entity in sliceHistory:
          normalizedSliceUsage = sliceHistory[ entity ] / sliceTotal
          self.__log.verbose( "Entity %s is present in slice %s (normalized usage %.2f)" % ( entity,
                                                                                             timeSlice,
                                                                                             normalizedSliceUsage ) )
          sliceCorrectionFactor = normalizedShares[ entity ] / normalizedSliceUsage
          sliceCorrectionFactor = min( sliceCorrectionFactor, maxSliceCorrection )
          sliceCorrectionFactor = max( sliceCorrectionFactor, minSliceCorrection )
          sliceCorrectionFactor *= self.__slices[ timeSlice ][ self._SLICE_WEIGHT ]
        else:
          self.__log.verbose( "Entity %s is not present in slice %s" % ( entity, timeSlice ) )
          sliceCorrectionFactor = maxSliceCorrection
        self.__log.verbose( "Slice correction factor for entity %s is %.3f" % ( entity, sliceCorrectionFactor ) )
        entitiesSliceCorrections[ entity ].append( sliceCorrectionFactor )

    correctedEntityShare = {}
    maxGlobalCorrectionFactor = self.__globalCorrectionFactor
    minGlobalCorrectionFactor = 1.0/maxGlobalCorrectionFactor
    for entity in entitiesSliceCorrections:
      entityCorrectionFactor = 0.0
      slicesCorrections = entitiesSliceCorrections[ entity ]
      if not slicesCorrections:
        self.__log.verbose( "Entity does not have any correction %s" % entity )
        correctedEntityShare[ entity ] = entitiesExpectedShare[ entity ]
      else:
        for cF in entitiesSliceCorrections[ entity ]:
          entityCorrectionFactor += cF
        entityCorrectionFactor = min( entityCorrectionFactor, maxGlobalCorrectionFactor )
        entityCorrectionFactor = max( entityCorrectionFactor, minGlobalCorrectionFactor )
        correctedShare = entitiesExpectedShare[ entity ] * entityCorrectionFactor
        correctedEntityShare[ entity ] = correctedShare
        self.__log.verbose( "Final correction factor for entity %s is %.3f\n Final share is %.3f" % ( entity,
                                                                                                      entityCorrectionFactor,
                                                                                                      correctedShare ) )
    self.__log.verbose( "Initial shares:\n  %s" % "\n  ".join( [ "%s : %.2f" % ( en, entitiesExpectedShare[ en ] ) \
                                                                for en in entitiesExpectedShare ] ) )
    self.__log.verbose( "Corrected shares:\n  %s" % "\n  ".join( [ "%s : %.2f" % ( en, correctedEntityShare[ en ] ) \
                                                                  for en in correctedEntityShare ] ) )
    return correctedEntityShare
コード例 #17
0
class FailedPilotsByCESplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(FailedPilotsByCESplittedCommand, self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

        if "ReportGenerator" in self.apis:
            self.rgClient = self.apis["ReportGenerator"]
        else:
            self.rgClient = Client(url="Accounting/ReportGenerator")

        self.rClient.rpcClient = self.rgClient

    def doCommand(self):
        """
        Returns failed pilots using the DIRAC accounting system for every CE
        for the last self.args[0] hours

        :params:
          :attr:`CEs`: list of CEs (when not given, take every CE)

        :returns:

        """

        if "hours" not in self.args:
            return S_ERROR("Number of hours not specified")
        hours = self.args["hours"]

        ces = None
        if "ces" in self.args:
            ces = self.args["ces"]
        if ces is None:
            res = getCESiteMapping()
            if not res["OK"]:
                return res
            ces = list(res["Value"])

        if not ces:
            return S_ERROR("CEs is empty")

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        failedPilots = self.rClient.getReport("Pilot", "NumberOfPilots", fromD,
                                              toD, {
                                                  "GridStatus": ["Aborted"],
                                                  "GridCE": ces
                                              }, "GridCE")
        if not failedPilots["OK"]:
            return failedPilots
        failedPilots = failedPilots["Value"]

        if "data" not in failedPilots:
            return S_ERROR("Missing data key")
        if "granularity" not in failedPilots:
            return S_ERROR("Missing granularity key")

        failedPilots["data"] = {
            site: strToIntDict(value)
            for site, value in failedPilots["data"].items()
        }

        singlePlots = {}

        for ce, value in failedPilots["data"].items():
            if ce in ces:
                plot = {}
                plot["data"] = {ce: value}
                plot["granularity"] = failedPilots["granularity"]
                singlePlots[ce] = plot

        return S_OK(singlePlots)
コード例 #18
0
class SuccessfullJobsBySiteSplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(SuccessfullJobsBySiteSplittedCommand,
              self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

    def doCommand(self):
        """
        Returns successfull jobs using the DIRAC accounting system for every site
        for the last self.args[0] hours

        :params:
          :attr:`sites`: list of sites (when not given, take every site)

        :returns:

        """

        if "hours" not in self.args:
            return S_ERROR("Number of hours not specified")
        hours = self.args["hours"]

        sites = None
        if "sites" in self.args:
            sites = self.args["sites"]
        if sites is None:
            sites = getSites()
            if not sites["OK"]:
                return sites
            sites = sites["Value"]

        if not sites:
            return S_ERROR("Sites is empty")

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        successfulJobs = self.rClient.getReport("Job", "NumberOfJobs", fromD,
                                                toD, {
                                                    "FinalStatus": ["Done"],
                                                    "Site": sites
                                                }, "Site")
        if not successfulJobs["OK"]:
            return successfulJobs
        successfulJobs = successfulJobs["Value"]

        if "data" not in successfulJobs:
            return S_ERROR("Missing data key")
        if "granularity" not in successfulJobs:
            return S_ERROR("Missing granularity key")

        singlePlots = {}

        successfulJobs["data"] = {
            site: strToIntDict(value)
            for site, value in successfulJobs["data"].items()
        }

        for site, value in successfulJobs["data"].items():
            if site in sites:
                plot = {}
                plot["data"] = {site: value}
                plot["granularity"] = successfulJobs["granularity"]
                singlePlots[site] = plot

        return S_OK(singlePlots)
コード例 #19
0
class TransferQualityCommand(Command):
    def __init__(self, args=None, clients=None):

        super(TransferQualityCommand, self).__init__(args, clients)

        if "ReportGenerator" in self.apis:
            self.rgClient = self.apis["ReportGenerator"]
        else:
            self.rgClient = RPCClient("Accounting/ReportGenerator")

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

        self.rClient.rpcClient = self.rgClient

    def doCommand(self):
        """
    Return getQuality from DIRAC's accounting ReportsClient

    `args`: a tuple
      - args[0]: string: should be a ValidElement

      - args[1]: string should be the name of the ValidElement

      - args[2]: optional dateTime object: a "from" date

      - args[3]: optional dateTime object: a "to" date

    :returns:
      {'Result': None | a float between 0.0 and 100.0}
    """

        if "fromDate" not in self.args:
            fromDate = datetime.utcnow() - timedelta(hours=2)
        else:
            fromDate = self.args["fromDate"]

        if "toDate" not in self.args:
            toDate = datetime.utcnow()
        else:
            toDate = self.args["toDate"]

        if "name" not in self.args:
            return S_ERROR("name not specified")
        name = self.args["name"]

        results = self.rClient.getReport(
            "DataOperation",
            "Quality",
            fromDate,
            toDate,
            {"OperationType": "putAndRegister", "Destination": [name]},
            "Channel",
        )

        if not results["OK"]:
            return results

        pr_q_d = results["Value"]["data"]

        # FIXME: WHAT the hell is this doing ?
        values = []
        if len(pr_q_d) == 1:

            for k in pr_q_d.keys():
                for n in pr_q_d[k].values():
                    values.append(n)
            res = sum(values) / len(values)

        else:
            for n in pr_q_d["Total"].values():
                values.append(n)
            res = sum(values) / len(values)

        return S_OK(res)
コード例 #20
0
class TransferQualityEverySEs_Command( Command ):

  def doCommand( self, SEs = None ):
    """ 
    Returns transfer quality using the DIRAC accounting system for every SE 
        
    :params:
      :attr:`SEs`: list of storage elements (when not given, take every SE)
    
    :returns:
      {'SiteName': {TQ : 'Good'|'Fair'|'Poor'|'Idle'|'Bad'} ...}
    """

    if SEs is None:
#      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient( "ResourceStatus/ResourceStatus" )
      SEs = RPC_RSS.getStorageElementsList()
      if not SEs['OK']:
        raise RSSException, where( self, self.doCommand ) + " " + SEs['Message']
      else:
        SEs = SEs['Value']

    if self.RPC is None:
#      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient( "Accounting/ReportGenerator", timeout = self.timeout )

    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient( rpcClient = self.RPC )

    fromD = datetime.datetime.utcnow() - datetime.timedelta( hours = 2 )
    toD = datetime.datetime.utcnow()

    try:
      qualityAll = self.client.getReport( 'DataOperation', 'Quality', fromD, toD,
                                         {'OperationType':'putAndRegister',
                                          'Destination':SEs}, 'Channel' )
      if not qualityAll['OK']:
        raise RSSException, where( self, self.doCommand ) + " " + qualityAll['Message']
      else:
        qualityAll = qualityAll['Value']['data']

    except:
      gLogger.exception( "Exception when calling TransferQualityEverySEs_Command" )
      return {}

    listOfDestSEs = []

    for k in qualityAll.keys():
      try:
        key = k.split( ' -> ' )[1]
        if key not in listOfDestSEs:
          listOfDestSEs.append( key )
      except:
        continue

    meanQuality = {}

    for destSE in listOfDestSEs:
      s = 0
      n = 0
      for k in qualityAll.keys():
        try:
          if k.split( ' -> ' )[1] == destSE:
            n = n + len( qualityAll[k] )
            s = s + sum( qualityAll[k].values() )
        except:
          continue
      meanQuality[destSE] = s / n

    resToReturn = {}

    for se in meanQuality:
      resToReturn[se] = {'TQ': meanQuality[se]}

    return resToReturn


  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #21
0
class RunningJobsBySiteSplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(RunningJobsBySiteSplittedCommand, self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

        if "ReportGenerator" in self.apis:
            self.rgClient = self.apis["ReportGenerator"]
        else:
            self.rgClient = Client(url="Accounting/ReportGenerator")

        self.rClient.rpcClient = self.rgClient

    def doCommand(self):
        """
        Returns running and runned jobs, querying the WMSHistory
        for the last self.args[0] hours

        :params:
          :attr:`sites`: list of sites (when not given, take every sites)

        :returns:

        """

        if "hours" not in self.args:
            return S_ERROR("Number of hours not specified")
        hours = self.args["hours"]

        sites = None
        if "sites" in self.args:
            sites = self.args["sites"]
        if sites is None:
            # FIXME: pointing to the CSHelper instead
            #      sources = self.rsClient.getSite( meta = {'columns': 'SiteName'} )
            #      if not sources[ 'OK' ]:
            #        return sources
            #      sources = [ si[0] for si in sources[ 'Value' ] ]
            sites = getSites()
            if not sites["OK"]:
                return sites
            sites = sites["Value"]

        if not sites:
            return S_ERROR("Sites is empty")

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        runJobs = self.rClient.getReport("WMSHistory", "NumberOfJobs", fromD,
                                         toD, {}, "Site")
        if not runJobs["OK"]:
            return runJobs
        runJobs = runJobs["Value"]

        if "data" not in runJobs:
            return S_ERROR("Missing data key")
        if "granularity" not in runJobs:
            return S_ERROR("Missing granularity key")

        runJobs["data"] = {
            site: strToIntDict(value)
            for site, value in runJobs["data"].items()
        }

        singlePlots = {}

        for site, value in runJobs["data"].items():
            if site in sites:
                plot = {}
                plot["data"] = {site: value}
                plot["granularity"] = runJobs["granularity"]
                singlePlots[site] = plot

        return S_OK(singlePlots)
コード例 #22
0
class FailedJobsBySiteSplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(FailedJobsBySiteSplittedCommand, self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

    def doCommand(self):
        """
        Returns failed jobs using the DIRAC accounting system for every site
        for the last self.args[0] hours

        :params:
          :attr:`sites`: list of sites (when not given, take every site)

        :returns:

        """

        if "hours" not in self.args:
            return S_ERROR("Number of hours not specified")
        hours = self.args["hours"]

        sites = None
        if "sites" in self.args:
            sites = self.args["sites"]
        if sites is None:
            # FIXME: pointing to the CSHelper instead
            #      sources = self.rsClient.getSite( meta = {'columns': 'SiteName'} )
            #      if not sources[ 'OK' ]:
            #        return sources
            #      sources = [ si[0] for si in sources[ 'Value' ] ]
            sites = getSites()
            if not sites["OK"]:
                return sites
            sites = sites["Value"]

        if not sites:
            return S_ERROR("Sites is empty")

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        failedJobs = self.rClient.getReport("Job", "NumberOfJobs", fromD, toD,
                                            {
                                                "FinalStatus": ["Failed"],
                                                "Site": sites
                                            }, "Site")
        if not failedJobs["OK"]:
            return failedJobs
        failedJobs = failedJobs["Value"]

        if "data" not in failedJobs:
            return S_ERROR("Missing data key")
        if "granularity" not in failedJobs:
            return S_ERROR("Missing granularity key")

        failedJobs["data"] = {
            site: strToIntDict(value)
            for site, value in failedJobs["data"].items()
        }

        singlePlots = {}

        for site, value in failedJobs["data"].items():
            if site in sites:
                plot = {}
                plot["data"] = {site: value}
                plot["granularity"] = failedJobs["granularity"]
                singlePlots[site] = plot

        return S_OK(singlePlots)
コード例 #23
0
class SuccessfullJobsBySiteSplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(SuccessfullJobsBySiteSplittedCommand,
              self).__init__(args, clients)

        if 'ReportsClient' in self.apis:
            self.rClient = self.apis['ReportsClient']
        else:
            self.rClient = ReportsClient()

        if 'ReportGenerator' in self.apis:
            self.rgClient = self.apis['ReportGenerator']
        else:
            self.rgClient = RPCClient('Accounting/ReportGenerator')

        self.rClient.rpcClient = self.rgClient

    def doCommand(self):
        """ 
    Returns successfull jobs using the DIRAC accounting system for every site 
    for the last self.args[0] hours 
        
    :params:
      :attr:`sites`: list of sites (when not given, take every site)

    :returns:
      
    """

        if not 'hours' in self.args:
            return S_ERROR('Number of hours not specified')
        hours = self.args['hours']

        sites = None
        if 'sites' in self.args:
            sites = self.args['sites']
        if sites is None:
            #FIXME: pointing to the CSHelper instead
            #      sources = self.rsClient.getSite( meta = {'columns': 'SiteName'} )
            #      if not sources[ 'OK' ]:
            #        return sources
            #      sources = [ si[0] for si in sources[ 'Value' ] ]
            sites = Resources.getSites()
            if not sites['OK']:
                return sites
            sites = sites['Value']

        if not sites:
            return S_ERROR('Sites is empty')

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        successfulJobs = self.rClient.getReport('Job', 'NumberOfJobs', fromD,
                                                toD, {
                                                    'FinalStatus': ['Done'],
                                                    'Site': sites
                                                }, 'Site')
        if not successfulJobs['OK']:
            return successfulJobs
        successfulJobs = successfulJobs['Value']

        if not 'data' in successfulJobs:
            return S_ERROR('Missing data key')
        if not 'granularity' in successfulJobs:
            return S_ERROR('Missing granularity key')

        singlePlots = {}

        for site, value in successfulJobs['data'].items():
            if site in sites:
                plot = {}
                plot['data'] = {site: value}
                plot['granularity'] = successfulJobs['granularity']
                singlePlots[site] = plot

        return S_OK(singlePlots)
コード例 #24
0
class DIRACAccounting_Command(Command):
    def doCommand(self):
        """ 
    Returns jobs accounting info for sites in the last 24h
    `args`: 
       - args[0]: string - should be a ValidRes
       
       - args[1]: string - should be the name of the ValidRes
       
       - args[2]: string - should be 'Job' or 'Pilot' or 'DataOperation'
         or 'WMSHistory' (??) or 'SRM' (??)
       
       - args[3]: string - should be the plot to generate (e.g. CPUEfficiency) 
       
       - args[4]: dictionary - e.g. {'Format': 'LastHours', 'hours': 24}
       
       - args[5]: string - should be the grouping
       
       - args[6]: dictionary - optional conditions
    """
        super(DIRACAccounting_Command, self).doCommand()

        if self.RPC is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        granularity = self.args[0]
        name = self.args[1]
        accounting = self.args[2]
        plot = self.args[3]
        period = self.args[4]
        if period['Format'] == 'LastHours':
            fromT = datetime.datetime.utcnow() - datetime.timedelta(
                hours=period['hours'])
            toT = datetime.datetime.utcnow()
        elif period['Format'] == 'Periods':
            #TODO
            pass
        grouping = self.args[5]
        try:
            if self.args[6] is not None:
                conditions = self.args[6]
            else:
                raise Exception
        except:
            conditions = {}
            if accounting == 'Job' or accounting == 'Pilot':
                if granularity == 'Resource':
                    conditions['GridCE'] = [name]
                elif granularity == 'Service':
                    conditions['Site'] = [name.split('@').pop()]
                elif granularity == 'Site':
                    conditions['Site'] = [name]
                else:
                    raise InvalidRes, where(self, self.doCommand)
            elif accounting == 'DataOperation':
                conditions['Destination'] = [name]

        try:

            res = self.client.getReport(accounting, plot, fromT, toT,
                                        conditions, grouping)

            if res['OK']:
                return {'Result': res['Value']}
            else:
                raise RSSException, where(
                    self, self.doCommand) + ' ' + res['Message']

        except:
            gLogger.exception("Exception when calling ReportsClient for " +
                              granularity + " " + name)
            return {'Result': 'Unknown'}

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #25
0
class SuccessfullJobsBySiteSplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(SuccessfullJobsBySiteSplittedCommand,
              self).__init__(args, clients)

        if 'ReportsClient' in self.apis:
            self.rClient = self.apis['ReportsClient']
        else:
            self.rClient = ReportsClient()

    def doCommand(self):
        """
    Returns successfull jobs using the DIRAC accounting system for every site
    for the last self.args[0] hours

    :params:
      :attr:`sites`: list of sites (when not given, take every site)

    :returns:

    """

        if 'hours' not in self.args:
            return S_ERROR('Number of hours not specified')
        hours = self.args['hours']

        sites = None
        if 'sites' in self.args:
            sites = self.args['sites']
        if sites is None:
            sites = getSites()
            if not sites['OK']:
                return sites
            sites = sites['Value']

        if not sites:
            return S_ERROR('Sites is empty')

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        successfulJobs = self.rClient.getReport('Job', 'NumberOfJobs', fromD,
                                                toD, {
                                                    'FinalStatus': ['Done'],
                                                    'Site': sites
                                                }, 'Site')
        if not successfulJobs['OK']:
            return successfulJobs
        successfulJobs = successfulJobs['Value']

        if 'data' not in successfulJobs:
            return S_ERROR('Missing data key')
        if 'granularity' not in successfulJobs:
            return S_ERROR('Missing granularity key')

        singlePlots = {}

        successfulJobs['data'] = {
            site: strToIntDict(value)
            for site, value in successfulJobs['data'].items()
        }

        for site, value in successfulJobs['data'].items():
            if site in sites:
                plot = {}
                plot['data'] = {site: value}
                plot['granularity'] = successfulJobs['granularity']
                singlePlots[site] = plot

        return S_OK(singlePlots)
コード例 #26
0
class TransferCommand(Command):
    '''
    Transfer "master" Command
  '''
    def __init__(self, args=None, clients=None):

        super(TransferCommand, self).__init__(args, clients)

        if 'ReportsClient' in self.apis:
            self.rClient = self.apis['ReportsClient']
        else:
            self.rClient = ReportsClient()

        if 'ReportGenerator' in self.apis:
            self.rgClient = self.apis['ReportGenerator']
        else:
            self.rgClient = RPCClient('Accounting/ReportGenerator')

        self.rClient.rpcClient = self.rgClient

        if 'ResourceManagementClient' in self.apis:
            self.rmClient = self.apis['ResourceManagementClient']
        else:
            self.rmClient = ResourceManagementClient()

    def _storeCommand(self, results):
        '''
      Stores the results of doNew method on the database.    
    '''

        for result in results:

            resQuery = self.rmClient.addOrModifyTransferCache(
                result['SourceName'], result['DestinationName'],
                result['Metric'], result['Value'])
            if not resQuery['OK']:
                return resQuery
        return S_OK()

    def _prepareCommand(self):
        '''
      TransferChannelCommand requires four arguments:
      - hours       : <int>
      - direction   : Source | Destination
      - elementName : <str>
      - metric      : Quality | FailedTransfers
      
      GGUSTickets are associated with gocDB names, so we have to transform the
      diracSiteName into a gocSiteName.    
    '''

        if not 'hours' in self.args:
            return S_ERROR('Number of hours not specified')
        hours = self.args['hours']

        if not 'direction' in self.args:
            return S_ERROR('direction is missing')
        direction = self.args['direction']

        if direction not in ['Source', 'Destination']:
            return S_ERROR('direction is not Source nor Destination')

        if not 'name' in self.args:
            return S_ERROR('"name" is missing')
        name = self.args['name']

        if not 'metric' in self.args:
            return S_ERROR('metric is missing')
        metric = self.args['metric']

        if metric not in ['Quality', 'FailedTransfers']:
            return S_ERROR('metric is not Quality nor FailedTransfers')

        return S_OK((hours, name, direction, metric))

    def doNew(self, masterParams=None):
        '''
      Gets the parameters to run, either from the master method or from its
      own arguments.
      
      For every elementName ( cannot process bulk queries.. ) contacts the 
      accounting client. It reurns dictionaries like { 'X -> Y' : { id: 100%.. } }
      
      If there are ggus tickets, are recorded and then returned.        
    '''

        if masterParams is not None:
            hours, name, direction, metric = masterParams

        else:
            params = self._prepareCommand()
            if not params['OK']:
                return params
            hours, name, direction, metric = params['Value']

        toD = datetime.utcnow()
        fromD = toD - timedelta(hours=hours)

        # dictionary with conditions for the accounting
        transferDict = {'OperationType': 'putAndRegister', direction: name}

        if metric == 'FailedTransfers':
            transferDict['FinalStatus'] = ['Failed']

        transferResults = self.rClient.getReport('DataOperation', metric,
                                                 fromD, toD, transferDict,
                                                 'Channel')

        if not transferResults['OK']:
            return transferResults
        transferResults = transferResults['Value']

        if not 'data' in transferResults:
            return S_ERROR('Missing data key')
        transferResults = transferResults['data']

        uniformResult = []

        for channel, elementDict in transferResults.items():

            try:
                source, destination = channel.split(' -> ')
            except ValueError:
                continue

            channelDict = {}
            channelDict['SourceName'] = source
            channelDict['DestinationName'] = destination
            channelDict['Metric'] = metric
            channelDict['Value'] = sum(elementDict.values()) / len(
                elementDict.values())

            uniformResult.append(channelDict)

        storeRes = self._storeCommand(uniformResult)
        if not storeRes['OK']:
            return storeRes

        # Compute mean of all transfer channels
        value = 0
        for channelDict in uniformResult:
            value += channelDict['Value']

        if uniformResult:
            value = float(value) / len(uniformResult)
        else:
            value = None

        return S_OK({'Mean': value, 'Name': name})

    def doCache(self):
        '''
      Method that reads the cache table and tries to read from it. It will 
      return a list of dictionaries if there are results.
    '''

        params = self._prepareCommand()
        if not params['OK']:
            return params
        _hours, name, direction, metric = params['Value']

        sourceName, destinationName = None, None
        if direction == 'Source':
            sourceName = name
        if direction == 'Destination':
            destinationName = name

        result = self.rmClient.selectTransferCache(sourceName, destinationName,
                                                   metric)
        if not result['OK']:
            return result

        result = [dict(zip(result['Columns'], res)) for res in result['Value']]

        # Compute mean of all transfer channels
        value = 0
        for channelDict in result:
            value += channelDict['Value']

        if result:
            value = float(value) / len(result)
        else:
            value = None

        return S_OK({'Mean': value, 'Name': name})

    def doMaster(self):
        '''
      Master method, which looks little bit spaguetti code, sorry !
      - It gets all Sites.
      - It gets all StorageElements
      
      As there is no bulk query, it compares with what we have on the database.
      It queries a portion of them.
    '''

        sites = CSHelpers.getSites()
        if not sites['OK']:
            return sites
        sites = sites['Value']

        ses = CSHelpers.getStorageElements()
        if not ses['OK']:
            return ses
        ses = ses['Value']

        elementNames = sites + ses

        #    sourceQuery = self.rmClient.selectTransferCache( meta = { 'columns' : [ 'SourceName' ] } )
        #    if not sourceQuery[ 'OK' ]:
        #      return sourceQuery
        #    sourceQuery = [ element[0] for element in sourceQuery[ 'Value' ] ]
        #
        #    sourceElementsToQuery = list( set( elementNames ).difference( set( sourceQuery ) ) )
        gLogger.info('Processing %s' % ', '.join(elementNames))

        for metric in ['Quality', 'FailedTransfers']:
            for direction in ['Source', 'Destination']:
                # 2 hours of window
                result = self.doNew((2, elementNames, direction, metric))
                if not result['OK']:
                    self.metrics['failed'].append(result)

        return S_OK(self.metrics)


################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
コード例 #27
0
class FailedPilotsByCESplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(FailedPilotsByCESplittedCommand, self).__init__(args, clients)

        if 'ReportsClient' in self.apis:
            self.rClient = self.apis['ReportsClient']
        else:
            self.rClient = ReportsClient()

        if 'ReportGenerator' in self.apis:
            self.rgClient = self.apis['ReportGenerator']
        else:
            self.rgClient = RPCClient('Accounting/ReportGenerator')

        self.rClient.rpcClient = self.rgClient

    def doCommand(self):
        """
    Returns failed pilots using the DIRAC accounting system for every CE
    for the last self.args[0] hours

    :params:
      :attr:`CEs`: list of CEs (when not given, take every CE)

    :returns:

    """

        if 'hours' not in self.args:
            return S_ERROR('Number of hours not specified')
        hours = self.args['hours']

        ces = None
        if 'ces' in self.args:
            ces = self.args['ces']
        if ces is None:
            res = getCESiteMapping()
            if not res['OK']:
                return res
            ces = list(res['Value'])

        if not ces:
            return S_ERROR('CEs is empty')

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        failedPilots = self.rClient.getReport('Pilot', 'NumberOfPilots', fromD,
                                              toD, {
                                                  'GridStatus': ['Aborted'],
                                                  'GridCE': ces
                                              }, 'GridCE')
        if not failedPilots['OK']:
            return failedPilots
        failedPilots = failedPilots['Value']

        if 'data' not in failedPilots:
            return S_ERROR('Missing data key')
        if 'granularity' not in failedPilots:
            return S_ERROR('Missing granularity key')

        failedPilots['data'] = {
            site: strToIntDict(value)
            for site, value in failedPilots['data'].items()
        }

        singlePlots = {}

        for ce, value in failedPilots['data'].items():
            if ce in ces:
                plot = {}
                plot['data'] = {ce: value}
                plot['granularity'] = failedPilots['granularity']
                singlePlots[ce] = plot

        return S_OK(singlePlots)
コード例 #28
0
ファイル: WMSHistoryCorrector.py プロジェクト: ptakha/DIRAC-1
class WMSHistoryCorrector(BaseCorrector):

    _GLOBAL_MAX_CORRECTION = 'MaxGlobalCorrection'
    _SLICE_TIME_SPAN = 'TimeSpan'
    _SLICE_WEIGHT = 'Weight'
    _SLICE_MAX_CORRECTION = 'MaxCorrection'

    def initialize(self):
        self.__log = gLogger.getSubLogger("WMSHistoryCorrector")
        self.__reportsClient = ReportsClient()
        self.__usageHistory = {}
        self.__slices = {}
        self.__lastHistoryUpdate = 0
        self.__globalCorrectionFactor = 5
        self._fillSlices()
        return S_OK()


#   def _applyHistoryCorrections( self, entityShares, baseSection = "" ):
#     if baseSection not in self.__historyForCorrections or not self.__historyForCorrections[ baseSection ]:
#       return entityShares

    def _fillSlices(self):
        self.__log.info("Filling time slices...")
        self.__slices = {}
        self.__globalCorrectionFactor = self.getCSOption(
            self._GLOBAL_MAX_CORRECTION, 5)
        result = self.getCSSections()
        if not result['OK']:
            self.__log.error("Cound not get configured time slices",
                             result['Message'])
            return
        timeSlices = result['Value']
        for timeSlice in timeSlices:
            self.__slices[timeSlice] = {}
            for key, defaultValue in ((self._SLICE_TIME_SPAN,
                                       604800), (self._SLICE_WEIGHT, 1),
                                      (self._SLICE_MAX_CORRECTION, 3)):
                self.__slices[timeSlice][key] = self.getCSOption(
                    "%s/%s" % (timeSlice, key), defaultValue)
        #Weight has to be normalized to sum 1
        weightSum = 0
        for timeSlice in self.__slices:
            weightSum += self.__slices[timeSlice][self._SLICE_WEIGHT]
        for timeSlice in self.__slices:
            self.__slices[timeSlice][self._SLICE_WEIGHT] /= float(weightSum)
        self.__log.info("Found %s time slices" % len(self.__slices))

    def updateHistoryKnowledge(self):
        updatePeriod = self.getCSOption('UpdateHistoryPeriod', 900)
        now = nativetime.time()
        if self.__lastHistoryUpdate + updatePeriod > now:
            self.__log.verbose(
                "Skipping history update. Last update was less than %s secs ago"
                % updatePeriod)
            return
        self.__lastHistoryUpdate = now
        self.__log.info("Updating history knowledge")
        self.__usageHistory = {}
        for timeSlice in self.__slices:
            result = self._getUsageHistoryForTimeSpan(
                self.__slices[timeSlice][self._SLICE_TIME_SPAN],
                self.getGroup())
            if not result['OK']:
                self.__usageHistory = {}
                self.__log.error("Could not get history for slice",
                                 "%s: %s" % (timeSlice, result['Message']))
                return
            self.__usageHistory[timeSlice] = result['Value']
            self.__log.info("Got history for slice %s (%s entities in slice)" %
                            (timeSlice, len(self.__usageHistory[timeSlice])))
        self.__log.info("Updated history knowledge")

    def _getUsageHistoryForTimeSpan(self, timeSpan, groupToUse=""):
        reportCondition = {'Status': ['Running']}
        if not groupToUse:
            reportGrouping = 'UserGroup'
        else:
            reportGrouping = 'User'
            reportCondition = {'UserGroup': groupToUse}
        now = Time.dateTime()
        result = self.__reportsClient.getReport(
            'WMSHistory', 'AverageNumberOfJobs',
            now - datetime.timedelta(seconds=timeSpan), now, reportCondition,
            reportGrouping, {'lastSeconds': timeSpan})
        if not result['OK']:
            self.__log.error("Cannot get history from Accounting",
                             result['Message'])
            return result
        data = result['Value']['data']

        #Map the usernames to DNs
        if groupToUse:
            mappedData = {}
            for userName in data:
                result = CS.getDNForUsername(userName)
                if not result['OK']:
                    self.__log.error("User does not have any DN assigned",
                                     "%s :%s" % (userName, result['Message']))
                    continue
                for userDN in result['Value']:
                    mappedData[userDN] = data[userName]
            data = mappedData

        return S_OK(data)

    def __normalizeShares(self, entityShares):
        totalShare = 0.0
        normalizedShares = {}
        #Normalize shares
        for entity in entityShares:
            totalShare += entityShares[entity]
        self.__log.verbose("Total share for given entities is %.3f" %
                           totalShare)
        for entity in entityShares:
            normalizedShare = entityShares[entity] / totalShare
            normalizedShares[entity] = normalizedShare
            self.__log.verbose("Normalized share for %s: %.3f" %
                               (entity, normalizedShare))

        return normalizedShares

    def applyCorrection(self, entitiesExpectedShare):
        #Normalize expected shares
        normalizedShares = self.__normalizeShares(entitiesExpectedShare)

        if not self.__usageHistory:
            self.__log.verbose(
                "No history knowledge available. Correction is 1 for all entities"
            )
            return entitiesExpectedShare

        entitiesSliceCorrections = dict([(entity, [])
                                         for entity in entitiesExpectedShare])
        for timeSlice in self.__usageHistory:
            self.__log.verbose("Calculating correction for time slice %s" %
                               timeSlice)
            sliceTotal = 0.0
            sliceHistory = self.__usageHistory[timeSlice]
            for entity in entitiesExpectedShare:
                if entity in sliceHistory:
                    sliceTotal += sliceHistory[entity]
                    self.__log.verbose("Usage for %s: %.3f" %
                                       (entity, sliceHistory[entity]))
            self.__log.verbose("Total usage for slice %.3f" % sliceTotal)
            if sliceTotal == 0.0:
                self.__log.verbose("Slice usage is 0, skeeping slice")
                continue
            maxSliceCorrection = self.__slices[timeSlice][
                self._SLICE_MAX_CORRECTION]
            minSliceCorrection = 1.0 / maxSliceCorrection
            for entity in entitiesExpectedShare:
                if entity in sliceHistory:
                    normalizedSliceUsage = sliceHistory[entity] / sliceTotal
                    self.__log.verbose(
                        "Entity %s is present in slice %s (normalized usage %.2f)"
                        % (entity, timeSlice, normalizedSliceUsage))
                    sliceCorrectionFactor = normalizedShares[
                        entity] / normalizedSliceUsage
                    sliceCorrectionFactor = min(sliceCorrectionFactor,
                                                maxSliceCorrection)
                    sliceCorrectionFactor = max(sliceCorrectionFactor,
                                                minSliceCorrection)
                    sliceCorrectionFactor *= self.__slices[timeSlice][
                        self._SLICE_WEIGHT]
                else:
                    self.__log.verbose("Entity %s is not present in slice %s" %
                                       (entity, timeSlice))
                    sliceCorrectionFactor = maxSliceCorrection
                self.__log.verbose(
                    "Slice correction factor for entity %s is %.3f" %
                    (entity, sliceCorrectionFactor))
                entitiesSliceCorrections[entity].append(sliceCorrectionFactor)

        correctedEntityShare = {}
        maxGlobalCorrectionFactor = self.__globalCorrectionFactor
        minGlobalCorrectionFactor = 1.0 / maxGlobalCorrectionFactor
        for entity in entitiesSliceCorrections:
            entityCorrectionFactor = 0.0
            slicesCorrections = entitiesSliceCorrections[entity]
            if not slicesCorrections:
                self.__log.verbose("Entity does not have any correction %s" %
                                   entity)
                correctedEntityShare[entity] = entitiesExpectedShare[entity]
            else:
                for cF in entitiesSliceCorrections[entity]:
                    entityCorrectionFactor += cF
                entityCorrectionFactor = min(entityCorrectionFactor,
                                             maxGlobalCorrectionFactor)
                entityCorrectionFactor = max(entityCorrectionFactor,
                                             minGlobalCorrectionFactor)
                correctedShare = entitiesExpectedShare[
                    entity] * entityCorrectionFactor
                correctedEntityShare[entity] = correctedShare
                self.__log.verbose(
                    "Final correction factor for entity %s is %.3f\n Final share is %.3f"
                    % (entity, entityCorrectionFactor, correctedShare))
        self.__log.verbose("Initial shares:\n  %s" % "\n  ".join([
            "%s : %.2f" % (en, entitiesExpectedShare[en])
            for en in entitiesExpectedShare
        ]))
        self.__log.verbose("Corrected shares:\n  %s" % "\n  ".join([
            "%s : %.2f" % (en, correctedEntityShare[en])
            for en in correctedEntityShare
        ]))
        return correctedEntityShare
コード例 #29
0
class RunningJobsBySiteSplitted_Command(Command):
  
  def doCommand(self, sites = None):
    """ 
    Returns running and runned jobs, querying the WMSHistory  
    for the last self.args[0] hours 
        
    :params:
      :attr:`sites`: list of sites (when not given, take every sites)

    :returns:
      
    """

    if sites is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      sites = RPC_RSS.getSitesList()
      if not sites['OK']:
        raise RSSException, where(self, self.doCommand) + " " + sites['Message'] 
      else:
        sites = sites['Value']
    
    if self.RPC is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient("Accounting/ReportGenerator", timeout = self.timeout)
      
    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient(rpcClient = self.RPC)

    fromD = datetime.datetime.utcnow()-datetime.timedelta(hours = self.args[0])
    toD = datetime.datetime.utcnow()

    try:
      run_jobs = self.client.getReport('WMSHistory', 'NumberOfJobs', fromD, toD, 
                                       {}, 'Site')
      if not run_jobs['OK']:
        raise RSSException, where(self, self.doCommand) + " " + run_jobs['Message'] 
      else:
        run_jobs = run_jobs['Value']

    except:
      gLogger.exception("Exception when calling RunningJobsBySiteSplitted_Command")
      return {}
    
    listOfSites = run_jobs['data'].keys()
    
    plotGran = run_jobs['granularity']
    
    singlePlots = {}
    
    for site in listOfSites:
      if site in sites:
        plot = {}
        plot['data'] = {site: run_jobs['data'][site]}
        plot['granularity'] = plotGran
        singlePlots[site] = plot
    
    resToReturn = {'WMSHistory': singlePlots}

    return resToReturn


  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #30
0
class RunningJobsBySiteSplittedCommand(Command):

  def __init__(self, args=None, clients=None):

    super(RunningJobsBySiteSplittedCommand, self).__init__(args, clients)

    if 'ReportsClient' in self.apis:
      self.rClient = self.apis['ReportsClient']
    else:
      self.rClient = ReportsClient()

    if 'ReportGenerator' in self.apis:
      self.rgClient = self.apis['ReportGenerator']
    else:
      self.rgClient = RPCClient('Accounting/ReportGenerator')

    self.rClient.rpcClient = self.rgClient

  def doCommand(self):
    """
    Returns running and runned jobs, querying the WMSHistory
    for the last self.args[0] hours

    :params:
      :attr:`sites`: list of sites (when not given, take every sites)

    :returns:

    """

    if 'hours' not in self.args:
      return S_ERROR('Number of hours not specified')
    hours = self.args['hours']

    sites = None
    if 'sites' in self.args:
      sites = self.args['sites']
    if sites is None:
      # FIXME: pointing to the CSHelper instead
      #      sources = self.rsClient.getSite( meta = {'columns': 'SiteName'} )
      #      if not sources[ 'OK' ]:
      #        return sources
      #      sources = [ si[0] for si in sources[ 'Value' ] ]
      sites = getSites()
      if not sites['OK']:
        return sites
      sites = sites['Value']

    if not sites:
      return S_ERROR('Sites is empty')

    fromD = datetime.utcnow() - timedelta(hours=hours)
    toD = datetime.utcnow()

    runJobs = self.rClient.getReport('WMSHistory', 'NumberOfJobs', fromD, toD,
                                     {}, 'Site')
    if not runJobs['OK']:
      return runJobs
    runJobs = runJobs['Value']

    if 'data' not in runJobs:
      return S_ERROR('Missing data key')
    if 'granularity' not in runJobs:
      return S_ERROR('Missing granularity key')

    singlePlots = {}

    for site, value in runJobs['data'].items():
      if site in sites:
        plot = {}
        plot['data'] = {site: value}
        plot['granularity'] = runJobs['granularity']
        singlePlots[site] = plot

    return S_OK(singlePlots)
コード例 #31
0
class DIRACAccounting_Command(Command):
  
  def doCommand(self):
    """ 
    Returns jobs accounting info for sites in the last 24h
    `args`: 
       - args[0]: string - should be a ValidRes
       
       - args[1]: string - should be the name of the ValidRes
       
       - args[2]: string - should be 'Job' or 'Pilot' or 'DataOperation'
         or 'WMSHistory' (??) or 'SRM' (??)
       
       - args[3]: string - should be the plot to generate (e.g. CPUEfficiency) 
       
       - args[4]: dictionary - e.g. {'Format': 'LastHours', 'hours': 24}
       
       - args[5]: string - should be the grouping
       
       - args[6]: dictionary - optional conditions
    """
    super(DIRACAccounting_Command, self).doCommand()
    
    if self.RPC is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient("Accounting/ReportGenerator", timeout = self.timeout)
      
    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient(rpcClient = self.RPC)

    granularity = self.args[0]
    name = self.args[1]
    accounting = self.args[2]
    plot = self.args[3]
    period = self.args[4]
    if period['Format'] == 'LastHours':
      fromT = datetime.datetime.utcnow()-datetime.timedelta(hours = period['hours'])
      toT = datetime.datetime.utcnow()
    elif period['Format'] == 'Periods':
      #TODO
      pass
    grouping = self.args[5]
    try:
      if self.args[6] is not None:
        conditions = self.args[6]
      else:
        raise Exception
    except:
      conditions = {}
      if accounting == 'Job' or accounting == 'Pilot':
        if granularity == 'Resource':
          conditions['GridCE'] = [name]
        elif granularity == 'Service':
          conditions['Site'] = [name.split('@').pop()]
        elif granularity == 'Site':
          conditions['Site'] = [name]
        else:
          raise InvalidRes, where(self, self.doCommand)
      elif accounting == 'DataOperation':
        conditions['Destination'] = [name]
          
    try:

      res = self.client.getReport(accounting, plot, fromT, toT, conditions, grouping)
          
      if res['OK']:
        return {'Result':res['Value']}
      else:
        raise RSSException, where(self, self.doCommand) + ' ' + res['Message'] 

    except:
      gLogger.exception("Exception when calling ReportsClient for " + granularity + " " + name )
      return {'Result':'Unknown'}

  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #32
0
class DIRACAccountingCommand( Command ):
  
  def __init__( self, args = None, clients = None ):
    
    super( DIRACAccountingCommand, self ).__init__( args, clients )
    
    if 'ReportGenerator' in self.apis:
      self.rgClient = self.apis[ 'ReportGenerator' ]
    else:
      self.rgClient = RPCClient( 'Accounting/ReportGenerator' ) 

    if 'ReportsClient' in self.apis:
      self.rClient = self.apis[ 'ReportsClient' ]
    else:
      self.rClient = ReportsClient() 

    self.rClient.rpcClient = self.rgClient
  
  def doCommand( self ):
    """ 
    Returns jobs accounting info for sites in the last 24h
    `args`: 
       - args[0]: string - should be a ValidElement
       
       - args[1]: string - should be the name of the ValidElement
       
       - args[2]: string - should be 'Job' or 'Pilot' or 'DataOperation'
         or 'WMSHistory' (??) or 'SRM' (??)
       
       - args[3]: string - should be the plot to generate (e.g. CPUEfficiency) 
       
       - args[4]: dictionary - e.g. {'Format': 'LastHours', 'hours': 24}
       
       - args[5]: string - should be the grouping
       
       - args[6]: dictionary - optional conditions
    """

    granularity = self.args[0]
    name        = self.args[1]
    accounting  = self.args[2]
    plot        = self.args[3]
    period      = self.args[4]
    grouping    = self.args[5]
   
    if period[ 'Format' ] == 'LastHours':
      fromT = datetime.utcnow() - timedelta( hours = period[ 'hours' ] )
      toT   = datetime.utcnow()
    elif period[ 'Format' ] == 'Periods':
      #TODO
      pass
        
    if self.args[6] is not None:
      conditions = self.args[6]
    else:
      conditions = {}
      if accounting == 'Job' or accounting == 'Pilot':
        if granularity == 'Resource':
          conditions[ 'GridCE' ] = [ name ]
        elif granularity == 'Service':
          conditions[ 'Site' ] = [ name.split('@').pop() ]
        elif granularity == 'Site':
          conditions[ 'Site' ] = [ name ]
        else:
          return S_ERROR( '%s is not a valid granularity' % granularity )
      elif accounting == 'DataOperation':
        conditions[ 'Destination' ] = [ name ]

    return self.rClient.getReport( accounting, plot, fromT, toT, conditions, grouping )
コード例 #33
0
class TransferQualityCommand(Command):
    def __init__(self, args=None, clients=None):

        super(TransferQualityCommand, self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

    def doCommand(self):
        """
        Return getQuality from DIRAC's accounting ReportsClient

        `args`: a tuple
          - args[0]: string: should be a ValidElement

          - args[1]: string should be the name of the ValidElement

          - args[2]: optional dateTime object: a "from" date

          - args[3]: optional dateTime object: a "to" date

        :returns:
          {'Result': None | a float between 0.0 and 100.0}
        """

        if "fromDate" not in self.args:
            fromDate = datetime.utcnow() - timedelta(hours=2)
        else:
            fromDate = self.args["fromDate"]

        if "toDate" not in self.args:
            toDate = datetime.utcnow()
        else:
            toDate = self.args["toDate"]

        if "name" not in self.args:
            return S_ERROR("name not specified")
        name = self.args["name"]

        results = self.rClient.getReport(
            "DataOperation",
            "Quality",
            fromDate,
            toDate,
            {
                "OperationType": "putAndRegister",
                "Destination": [name]
            },
            "Channel",
        )

        if not results["OK"]:
            return results

        pr_q_d = {
            channel: strToIntDict(value)
            for channel, value in results["Value"]["data"].items()
        }

        # FIXME: WHAT the hell is this doing ?
        values = []
        if len(pr_q_d) == 1:

            for k in pr_q_d:
                for n in pr_q_d[k].values():
                    values.append(n)
            res = sum(values) / len(values)  # FIXME: should convert to int?

        else:
            for n in pr_q_d["Total"].values():
                values.append(n)
            res = sum(values) / len(values)  # FIXME: should convert to int?

        return S_OK(res)
コード例 #34
0
class TransferCommand( Command ):
  '''
    Transfer "master" Command
  '''
  
  def __init__( self, args = None, clients = None ):
    
    super( TransferCommand, self ).__init__( args, clients )
    
    if 'ReportsClient' in self.apis:
      self.rClient = self.apis[ 'ReportsClient' ]
    else:
      self.rClient = ReportsClient() 

    if 'ReportGenerator' in self.apis:
      self.rgClient = self.apis[ 'ReportGenerator' ]
    else:
      self.rgClient = RPCClient( 'Accounting/ReportGenerator' )       
    
    self.rClient.rpcClient = self.rgClient  

    if 'ResourceManagementClient' in self.apis:
      self.rmClient = self.apis[ 'ResourceManagementClient' ]
    else:
      self.rmClient = ResourceManagementClient()
  
  def _storeCommand( self, results ):
    '''
      Stores the results of doNew method on the database.    
    '''

    for result in results:
      
      resQuery = self.rmClient.addOrModifyTransferCache( result[ 'SourceName' ], 
                                                         result[ 'DestinationName' ], 
                                                         result[ 'Metric' ], 
                                                         result[ 'Value' ] )
      if not resQuery[ 'OK' ]:
        return resQuery
    return S_OK()  
  
  def _prepareCommand( self ):
    '''
      TransferChannelCommand requires four arguments:
      - hours       : <int>
      - direction   : Source | Destination
      - elementName : <str>
      - metric      : Quality | FailedTransfers
      
      GGUSTickets are associated with gocDB names, so we have to transform the
      diracSiteName into a gocSiteName.    
    '''

    if not 'hours' in self.args:
      return S_ERROR( 'Number of hours not specified' )
    hours = self.args[ 'hours' ]

    if not 'direction' in self.args:
      return S_ERROR( 'direction is missing' )
    direction = self.args[ 'direction' ]

    if direction not in [ 'Source', 'Destination' ]:
      return S_ERROR( 'direction is not Source nor Destination' )

    if not 'name' in self.args:
      return S_ERROR( '"name" is missing' )
    name = self.args[ 'name' ]

    if not 'metric' in self.args:
      return S_ERROR( 'metric is missing' )
    metric = self.args[ 'metric' ]
    
    if metric not in [ 'Quality', 'FailedTransfers' ]:
      return S_ERROR( 'metric is not Quality nor FailedTransfers' )

    return S_OK( ( hours, name, direction, metric ) )
  
  def doNew( self, masterParams = None ):
    '''
      Gets the parameters to run, either from the master method or from its
      own arguments.
      
      For every elementName ( cannot process bulk queries.. ) contacts the 
      accounting client. It reurns dictionaries like { 'X -> Y' : { id: 100%.. } }
      
      If there are ggus tickets, are recorded and then returned.        
    '''
    
    if masterParams is not None:
      hours, name, direction, metric = masterParams
      
    else:
      params = self._prepareCommand()
      if not params[ 'OK' ]:
        return params
      hours, name, direction, metric = params[ 'Value' ] 
    
    toD   = datetime.utcnow()    
    fromD = toD - timedelta( hours = hours )
        
    # dictionary with conditions for the accounting
    transferDict = { 
                     'OperationType' : 'putAndRegister',
                     direction       : name
                     }

    if metric == 'FailedTransfers':
      transferDict[ 'FinalStatus' ] = [ 'Failed' ]

    transferResults = self.rClient.getReport( 'DataOperation', metric, fromD, 
                                              toD, transferDict, 'Channel' )
    
    if not transferResults[ 'OK' ]:
      return transferResults
    transferResults = transferResults[ 'Value' ]
    
    if not 'data' in transferResults:
      return S_ERROR( 'Missing data key' )
    transferResults = transferResults[ 'data' ]

    uniformResult = []
           
    for channel, elementDict in transferResults.items():
      
      try:
        source, destination = channel.split( ' -> ' )
      except ValueError:
        continue  
      
      channelDict = {}
      channelDict[ 'SourceName' ]      = source
      channelDict[ 'DestinationName' ] = destination
      channelDict[ 'Metric' ]          = metric
      channelDict[ 'Value' ]           = sum( elementDict.values() ) / len( elementDict.values() )
      
      uniformResult.append( channelDict )
                 
    storeRes = self._storeCommand( uniformResult )
    if not storeRes[ 'OK' ]:
      return storeRes
    
    # Compute mean of all transfer channels
    value = 0
    for channelDict in uniformResult:
      value += channelDict[ 'Value' ]  

    if uniformResult:
      value = float( value ) / len( uniformResult )
    else:
      value = None              
           
    return S_OK( { 'Mean' : value, 'Name' : name } )  
    
  def doCache( self ):
    '''
      Method that reads the cache table and tries to read from it. It will 
      return a list of dictionaries if there are results.
    '''
    
    params = self._prepareCommand()
    if not params[ 'OK' ]:
      return params
    _hours, name, direction, metric = params[ 'Value' ] 
    
    sourceName, destinationName = None, None
    if direction == 'Source':
      sourceName = name
    if direction == 'Destination':  
      destinationName = name
      
    result = self.rmClient.selectTransferCache( sourceName, destinationName, metric )  
    if not result[ 'OK' ]:
      return result
    
    result = [ dict( zip( result[ 'Columns' ], res ) ) for res in result[ 'Value' ] ]

    # Compute mean of all transfer channels
    value = 0
    for channelDict in result:
      value += channelDict[ 'Value' ]  

    if result:
      value = float( value ) / len( result )
    else:
      value = None              
           
    return S_OK( { 'Mean' : value, 'Name' : name } )  

  def doMaster( self ):
    '''
      Master method, which looks little bit spaguetti code, sorry !
      - It gets all Sites.
      - It gets all StorageElements
      
      As there is no bulk query, it compares with what we have on the database.
      It queries a portion of them.
    '''

    sites = CSHelpers.getSites()
    if not sites[ 'OK' ]:
      return sites
    sites = sites[ 'Value' ]
  
    ses = CSHelpers.getStorageElements()
    if not ses[ 'OK' ]:
      return ses
    ses = ses[ 'Value' ]
      
    elementNames = sites + ses   

#    sourceQuery = self.rmClient.selectTransferCache( meta = { 'columns' : [ 'SourceName' ] } )
#    if not sourceQuery[ 'OK' ]:
#      return sourceQuery
#    sourceQuery = [ element[0] for element in sourceQuery[ 'Value' ] ]
#    
#    sourceElementsToQuery = list( set( elementNames ).difference( set( sourceQuery ) ) )
    gLogger.info( 'Processing %s' % ', '.join( elementNames ) )
 
    for metric in [ 'Quality', 'FailedTransfers' ]:
      for direction in [ 'Source', 'Destination' ]: 
        # 2 hours of window
        result = self.doNew( ( 2, elementNames, direction, metric )  ) 
        if not result[ 'OK' ]:
          self.metrics[ 'failed' ].append( result )
       
    return S_OK( self.metrics )
  
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
コード例 #35
0
ファイル: TransferCommand.py プロジェクト: TaykYoku/DIRAC
class TransferCommand(Command):
    """
    Transfer "master" Command
    """

    def __init__(self, args=None, clients=None):

        super(TransferCommand, self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

        if "ResourceManagementClient" in self.apis:
            self.rmClient = self.apis["ResourceManagementClient"]
        else:
            self.rmClient = ResourceManagementClient()

    def _storeCommand(self, results):
        """
        Stores the results of doNew method on the database.
        """

        for result in results:

            resQuery = self.rmClient.addOrModifyTransferCache(
                result["SourceName"], result["DestinationName"], result["Metric"], result["Value"]
            )
            if not resQuery["OK"]:
                return resQuery
        return S_OK()

    def _prepareCommand(self):
        """
        TransferChannelCommand requires four arguments:
        - hours       : <int>
        - direction   : Source | Destination
        - elementName : <str>
        - metric      : Quality | FailedTransfers

        GGUSTickets are associated with gocDB names, so we have to transform the
        diracSiteName into a gocSiteName.
        """

        if "hours" not in self.args:
            return S_ERROR("Number of hours not specified")
        hours = self.args["hours"]

        if "direction" not in self.args:
            return S_ERROR("direction is missing")
        direction = self.args["direction"]

        if direction not in ["Source", "Destination"]:
            return S_ERROR("direction is not Source nor Destination")

        if "name" not in self.args:
            return S_ERROR('"name" is missing')
        name = self.args["name"]

        if "metric" not in self.args:
            return S_ERROR("metric is missing")
        metric = self.args["metric"]

        if metric not in ["Quality", "FailedTransfers"]:
            return S_ERROR("metric is not Quality nor FailedTransfers")

        return S_OK((hours, name, direction, metric))

    def doNew(self, masterParams=None):
        """
        Gets the parameters to run, either from the master method or from its
        own arguments.

        For every elementName ( cannot process bulk queries.. ) contacts the
        accounting client. It reurns dictionaries like { 'X -> Y' : { id: 100%.. } }

        If there are ggus tickets, are recorded and then returned.
        """

        if masterParams is not None:
            hours, name, direction, metric = masterParams

        else:
            params = self._prepareCommand()
            if not params["OK"]:
                return params
            hours, name, direction, metric = params["Value"]

        toD = datetime.utcnow()
        fromD = toD - timedelta(hours=hours)

        # dictionary with conditions for the accounting
        transferDict = {"OperationType": "putAndRegister", direction: name}

        if metric == "FailedTransfers":
            transferDict["FinalStatus"] = ["Failed"]

        transferResults = self.rClient.getReport("DataOperation", metric, fromD, toD, transferDict, "Channel")

        if not transferResults["OK"]:
            return transferResults
        transferResults = transferResults["Value"]

        if "data" not in transferResults:
            return S_ERROR("Missing data key")

        transferResults = {channel: strToIntDict(value) for channel, value in transferResults["data"].items()}

        uniformResult = []

        for channel, elementDict in transferResults.items():

            try:
                source, destination = channel.split(" -> ")
            except ValueError:
                continue

            channelDict = {}
            channelDict["SourceName"] = source
            channelDict["DestinationName"] = destination
            channelDict["Metric"] = metric
            channelDict["Value"] = int(sum(elementDict.values()) / len(elementDict.values()))

            uniformResult.append(channelDict)

        storeRes = self._storeCommand(uniformResult)
        if not storeRes["OK"]:
            return storeRes

        # Compute mean of all transfer channels
        value = 0
        for channelDict in uniformResult:
            value += channelDict["Value"]

        if uniformResult:
            value = float(value) / len(uniformResult)
        else:
            value = None

        return S_OK({"Mean": value, "Name": name})

    def doCache(self):
        """
        Method that reads the cache table and tries to read from it. It will
        return a list of dictionaries if there are results.
        """

        params = self._prepareCommand()
        if not params["OK"]:
            return params
        _hours, name, direction, metric = params["Value"]

        sourceName, destinationName = None, None
        if direction == "Source":
            sourceName = name
        if direction == "Destination":
            destinationName = name

        result = self.rmClient.selectTransferCache(sourceName, destinationName, metric)
        if not result["OK"]:
            return result

        result = [dict(zip(result["Columns"], res)) for res in result["Value"]]

        # Compute mean of all transfer channels
        value = 0
        for channelDict in result:
            value += channelDict["Value"]

        if result:
            value = float(value) / len(result)
        else:
            value = None

        return S_OK({"Mean": value, "Name": name})

    def doMaster(self):
        """
        Master method, which looks little bit spaguetti code, sorry !
        - It gets all Sites.
        - It gets all StorageElements

        As there is no bulk query, it compares with what we have on the database.
        It queries a portion of them.
        """

        sites = getSites()
        if not sites["OK"]:
            return sites
        sites = sites["Value"]

        elementNames = sites + DMSHelpers().getStorageElements()

        #    sourceQuery = self.rmClient.selectTransferCache( meta = { 'columns' : [ 'SourceName' ] } )
        #    if not sourceQuery[ 'OK' ]:
        #      return sourceQuery
        #    sourceQuery = [ element[0] for element in sourceQuery[ 'Value' ] ]
        #
        #    sourceElementsToQuery = list( set( elementNames ).difference( set( sourceQuery ) ) )
        self.log.info("Processing %s" % ", ".join(elementNames))

        for metric in ["Quality", "FailedTransfers"]:
            for direction in ["Source", "Destination"]:
                # 2 hours of window
                result = self.doNew((2, elementNames, direction, metric))
                if not result["OK"]:
                    self.metrics["failed"].append(result)

        return S_OK(self.metrics)
コード例 #36
0
class FailedTransfersBySourceSplitted_Command(Command):
    def doCommand(self, sources=None, SEs=None):
        """ 
    Returns failed transfer using the DIRAC accounting system for every SE 
    for the last self.args[0] hours 
        
    :params:
      :attr:`sources`: list of source sites (when not given, take every site)
    
      :attr:`SEs`: list of storage elements (when not given, take every SE)

    :returns:
      
    """

        if SEs is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            SEs = RPC_RSS.getStorageElementsList('Read')
            if not SEs['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + SEs['Message']
            else:
                SEs = SEs['Value']

        if sources is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            sources = RPC_RSS.getSitesList()
            if not sources['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + sources['Message']
            else:
                sources = sources['Value']

        if self.RPC is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        fromD = datetime.datetime.utcnow() - datetime.timedelta(
            hours=self.args[0])
        toD = datetime.datetime.utcnow()

        try:
            ft_source = self.client.getReport(
                'DataOperation', 'FailedTransfers', fromD, toD, {
                    'OperationType': 'putAndRegister',
                    'Source': sources + SEs,
                    'Destination': sources + SEs,
                    'FinalStatus': ['Failed']
                }, 'Source')
            if not ft_source['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + ft_source['Message']
            else:
                ft_source = ft_source['Value']

        except:
            gLogger.exception(
                "Exception when calling FailedTransfersBySourceSplitted_Command"
            )
            return {}

        listOfSources = ft_source['data'].keys()

        plotGran = ft_source['granularity']

        singlePlots = {}

        for source in listOfSources:
            if source in sources:
                plot = {}
                plot['data'] = {source: ft_source['data'][source]}
                plot['granularity'] = plotGran
                singlePlots[source] = plot

        resToReturn = {'DataOperation': singlePlots}

        return resToReturn

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #37
0
class FailedPilotsBySiteSplittedCommand( Command ):

  def __init__( self, args = None, clients = None ):

    super( FailedPilotsBySiteSplittedCommand, self ).__init__( args, clients )

    if 'ReportsClient' in self.apis:
      self.rClient = self.apis[ 'ReportsClient' ]
    else:
      self.rClient = ReportsClient()

    if 'ReportGenerator' in self.apis:
      self.rgClient = self.apis[ 'ReportGenerator' ]
    else:
      self.rgClient = RPCClient( 'Accounting/ReportGenerator' )

    self.rClient.rpcClient = self.rgClient

  def doCommand( self ):
    """
    Returns failed jobs using the DIRAC accounting system for every site
    for the last self.args[0] hours

    :params:
      :attr:`sites`: list of sites (when not given, take every site)

    :returns:

    """

    if 'hours' not in self.args:
      return S_ERROR( 'Number of hours not specified' )
    hours = self.args[ 'hours' ]

    sites = None
    if 'sites' in self.args:
      sites = self.args[ 'sites' ]
    if sites is None:
#FIXME: pointing to the CSHelper instead
#      sources = self.rsClient.getSite( meta = {'columns': 'SiteName'} )
#      if not sources[ 'OK' ]:
#        return sources
#      sources = [ si[0] for si in sources[ 'Value' ] ]
      sites = CSHelpers.getSites()
      if not sites[ 'OK' ]:
        return sites
      sites = sites[ 'Value' ]

    if not sites:
      return S_ERROR( 'Sites is empty' )

    fromD = datetime.utcnow() - timedelta( hours = hours )
    toD   = datetime.utcnow()

    failedPilots = self.rClient.getReport( 'Pilot', 'NumberOfPilots', fromD, toD,
                                           { 'GridStatus' : [ 'Aborted' ],
                                              'Site'       : sites
                                           }, 'Site' )
    if not failedPilots[ 'OK' ]:
      return failedPilots
    failedPilots = failedPilots[ 'Value' ]

    if not 'data' in failedPilots:
      return S_ERROR( 'Missing data key' )
    if not 'granularity' in failedPilots:
      return S_ERROR( 'Missing granularity key' )

    singlePlots = {}

    for site, value in failedPilots[ 'data' ].items():
      if site in sites:
        plot                  = {}
        plot[ 'data' ]        = { site: value }
        plot[ 'granularity' ] = failedPilots[ 'granularity' ]
        singlePlots[ site ]   = plot

    return S_OK( singlePlots )
コード例 #38
0
class RunningJobsBySiteSplitted_Command(Command):
    def doCommand(self, sites=None):
        """ 
    Returns running and runned jobs, querying the WMSHistory  
    for the last self.args[0] hours 
        
    :params:
      :attr:`sites`: list of sites (when not given, take every sites)

    :returns:
      
    """

        if sites is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            sites = RPC_RSS.getSitesList()
            if not sites['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + sites['Message']
            else:
                sites = sites['Value']

        if self.RPC is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        fromD = datetime.datetime.utcnow() - datetime.timedelta(
            hours=self.args[0])
        toD = datetime.datetime.utcnow()

        try:
            run_jobs = self.client.getReport('WMSHistory', 'NumberOfJobs',
                                             fromD, toD, {}, 'Site')
            if not run_jobs['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + run_jobs['Message']
            else:
                run_jobs = run_jobs['Value']

        except:
            gLogger.exception(
                "Exception when calling RunningJobsBySiteSplitted_Command")
            return {}

        listOfSites = run_jobs['data'].keys()

        plotGran = run_jobs['granularity']

        singlePlots = {}

        for site in listOfSites:
            if site in sites:
                plot = {}
                plot['data'] = {site: run_jobs['data'][site]}
                plot['granularity'] = plotGran
                singlePlots[site] = plot

        resToReturn = {'WMSHistory': singlePlots}

        return resToReturn

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #39
0
ファイル: AccountingCacheCommand.py プロジェクト: rob-c/DIRAC
class FailedPilotsBySiteSplittedCommand(Command):
    def __init__(self, args=None, clients=None):

        super(FailedPilotsBySiteSplittedCommand, self).__init__(args, clients)

        if 'ReportsClient' in self.apis:
            self.rClient = self.apis['ReportsClient']
        else:
            self.rClient = ReportsClient()

    def doCommand(self):
        """
    Returns failed jobs using the DIRAC accounting system for every site
    for the last self.args[0] hours

    :params:
      :attr:`sites`: list of sites (when not given, take every site)

    :returns:

    """

        if 'hours' not in self.args:
            return S_ERROR('Number of hours not specified')
        hours = self.args['hours']

        sites = None
        if 'sites' in self.args:
            sites = self.args['sites']
        if sites is None:
            # FIXME: pointing to the CSHelper instead
            #      sources = self.rsClient.getSite( meta = {'columns': 'SiteName'} )
            #      if not sources[ 'OK' ]:
            #        return sources
            #      sources = [ si[0] for si in sources[ 'Value' ] ]
            sites = getSites()
            if not sites['OK']:
                return sites
            sites = sites['Value']

        if not sites:
            return S_ERROR('Sites is empty')

        fromD = datetime.utcnow() - timedelta(hours=hours)
        toD = datetime.utcnow()

        failedPilots = self.rClient.getReport('Pilot', 'NumberOfPilots', fromD,
                                              toD, {
                                                  'GridStatus': ['Aborted'],
                                                  'Site': sites
                                              }, 'Site')
        if not failedPilots['OK']:
            return failedPilots
        failedPilots = failedPilots['Value']

        if 'data' not in failedPilots:
            return S_ERROR('Missing data key')
        if 'granularity' not in failedPilots:
            return S_ERROR('Missing granularity key')

        failedPilots['data'] = {
            site: strToIntDict(value)
            for site, value in failedPilots['data'].iteritems()
        }

        singlePlots = {}

        for site, value in failedPilots['data'].iteritems():
            if site in sites:
                plot = {}
                plot['data'] = {site: value}
                plot['granularity'] = failedPilots['granularity']
                singlePlots[site] = plot

        return S_OK(singlePlots)
コード例 #40
0
class FailedPilotsByCESplitted_Command(Command):
  
  def doCommand(self, CEs = None):
    """ 
    Returns failed pilots using the DIRAC accounting system for every CE 
    for the last self.args[0] hours 
        
    :params:
      :attr:`CEs`: list of CEs (when not given, take every CE)

    :returns:
      
    """

    if CEs is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      CEs = RPC_RSS.getCEsList()
      if not CEs['OK']:
        raise RSSException, where(self, self.doCommand) + " " + CEs['Message'] 
      else:
        CEs = CEs['Value']
    
    if self.RPC is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient("Accounting/ReportGenerator", timeout = self.timeout)
      
    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient(rpcClient = self.RPC)

    fromD = datetime.datetime.utcnow()-datetime.timedelta(hours = self.args[0])
    toD = datetime.datetime.utcnow()

    try:
      failed_pilots = self.client.getReport('Pilot', 'NumberOfPilots', fromD, toD, 
                                            {'GridStatus':['Aborted'], 'GridCE':CEs}, 'GridCE')
      if not failed_pilots['OK']:
        raise RSSException, where(self, self.doCommand) + " " + failed_pilots['Message'] 
      else:
        failed_pilots = failed_pilots['Value']

    except:
      gLogger.exception("Exception when calling FailedPilotsByCESplitted_Command")
      return {}
    
    listOfCEs = failed_pilots['data'].keys()
    
    plotGran = failed_pilots['granularity']
    
    singlePlots = {}

    for CE in listOfCEs:
      if CE in CEs:
        plot = {}
        plot['data'] = {CE: failed_pilots['data'][CE]}
        plot['granularity'] = plotGran
        singlePlots[CE] = plot
    
    resToReturn = {'Pilot': singlePlots}

    return resToReturn

  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #41
0
class TransferQuality_Command(Command):
    def doCommand(self):
        """ 
    Return getQuality from DIRAC's accounting ReportsClient
    
    `args`: a tuple
      - args[0]: string: should be a ValidRes

      - args[1]: string should be the name of the ValidRes

      - args[2]: optional dateTime object: a "from" date
    
      - args[3]: optional dateTime object: a "to" date
      
    :returns:
      {'Result': None | a float between 0.0 and 100.0}
    """
        super(TransferQuality_Command, self).doCommand()

        if self.RPC is None:
            from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        try:
            if self.args[2] is None:
                fromD = datetime.datetime.utcnow() - datetime.timedelta(
                    hours=2)
            else:
                fromD = self.args[2]
        except:
            fromD = datetime.datetime.utcnow() - datetime.timedelta(hours=2)
        try:
            if self.args[3] is None:
                toD = datetime.datetime.utcnow()
            else:
                toD = self.args[3]
        except:
            toD = datetime.datetime.utcnow()

        try:
            pr_quality = self.client.getReport(
                'DataOperation', 'Quality', fromD, toD, {
                    'OperationType': 'putAndRegister',
                    'Destination': [self.args[1]]
                }, 'Channel')

            if not pr_quality['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + pr_quality['Message']

        except:
            gLogger.exception(
                "Exception when calling ReportsClient for %s %s" %
                (self.args[0], self.args[1]))
            return {'Result': 'Unknown'}

        pr_q_d = pr_quality['Value']['data']

        if pr_q_d == {}:
            return {'Result': None}
        else:
            if len(pr_q_d) == 1:
                values = []
                for k in pr_q_d.keys():
                    for n in pr_q_d[k].values():
                        values.append(n)
                return {'Result': sum(values) / len(values)}
            else:
                values = []
                for n in pr_q_d['Total'].values():
                    values.append(n)
                return {'Result': sum(values) / len(values)}

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #42
0
class TransferQualityByDestSplittedSite_Command(Command):
  
  def doCommand(self, sources = None, SEs = None):
    """ 
    Returns transfer quality using the DIRAC accounting system for every SE
    of a single site for the last self.args[0] hours 
        
    :params:
      :attr:`sources`: list of source sites (when not given, take every site)
    
      :attr:`SEs`: list of storage elements (when not given, take every SE)

    :returns:
      
    """

    if SEs is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      SEs = RPC_RSS.getStorageElementsList()
      if not SEs['OK']:
        raise RSSException, where(self, self.doCommand) + " " + SEs['Message'] 
      else:
        SEs = SEs['Value']
    
    if sources is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      sources = RPC_RSS.getSitesList()
      if not sources['OK']:
        raise RSSException, where(self, self.doCommand) + " " + sources['Message'] 
      else:
        sources = sources['Value']
    
    if self.RPC is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient("Accounting/ReportGenerator", timeout = self.timeout)
      
    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient(rpcClient = self.RPC)

    fromD = datetime.datetime.utcnow()-datetime.timedelta(hours = self.args[0])
    toD = datetime.datetime.utcnow()

    try:
      qualityAll = self.client.getReport('DataOperation', 'Quality', fromD, toD, 
                                         {'OperationType':'putAndRegister', 
                                          'Source':sources + SEs, 'Destination':sources + SEs}, 
                                          'Destination')
      if not qualityAll['OK']:
        raise RSSException, where(self, self.doCommand) + " " + qualityAll['Message'] 
      else:
        qualityAll = qualityAll['Value']

    except:
      gLogger.exception("Exception when calling TransferQualityByDestSplittedSite_Command")
      return {}
    
    listOfDest = qualityAll['data'].keys()
    
    try:
      storSitesWeb = RPC_RSS.getStorageElementsStatusWeb({'StorageElementName':listOfDest}, [], 0, 300)
    except NameError:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
      storSitesWeb = RPC_RSS.getStorageElementsStatusWeb({'StorageElementName':listOfDest}, [], 0, 300)
    
    if not storSitesWeb['OK']:
      raise RSSException, where(self, self.doCommand) + " " + storSitesWeb['Message'] 
    else:
      storSitesWeb = storSitesWeb['Value']['Records']
    
    SESiteMapping = {}
    siteSEMapping = {}
    
    for r in storSitesWeb:
      sites = r[2].split(' ')[:-1]
      SESiteMapping[r[0]] = sites
      
    for SE in SESiteMapping.keys():
      for site in SESiteMapping[SE]:
        try:
          l = siteSEMapping[site]
          l.append(SE)
          siteSEMapping[site] = l
        except KeyError:
          siteSEMapping[site] = [SE]
   
    
    plotGran = qualityAll['granularity']
    
    singlePlots = {}
    
    for site in siteSEMapping.keys():
      plot = {}
      plot['data'] = {}
      for SE in siteSEMapping[site]:
        plot['data'][SE] = qualityAll['data'][SE]
      plot['granularity'] = plotGran
    
      singlePlots[site] = plot
    
    resToReturn = {'DataOperation': singlePlots}

    return resToReturn

  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #43
0
class TransferQualityEverySEs_Command(Command):
    def doCommand(self, SEs=None):
        """ 
    Returns transfer quality using the DIRAC accounting system for every SE 
        
    :params:
      :attr:`SEs`: list of storage elements (when not given, take every SE)
    
    :returns:
      {'SiteName': {TQ : 'Good'|'Fair'|'Poor'|'Idle'|'Bad'} ...}
    """

        if SEs is None:
            #      from DIRAC.Core.DISET.RPCClient import RPCClient
            RPC_RSS = RPCClient("ResourceStatus/ResourceStatus")
            SEs = RPC_RSS.getStorageElementsList()
            if not SEs['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + SEs['Message']
            else:
                SEs = SEs['Value']

        if self.RPC is None:
            #      from DIRAC.Core.DISET.RPCClient import RPCClient
            self.RPC = RPCClient("Accounting/ReportGenerator",
                                 timeout=self.timeout)

        if self.client is None:
            from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
            self.client = ReportsClient(rpcClient=self.RPC)

        fromD = datetime.datetime.utcnow() - datetime.timedelta(hours=2)
        toD = datetime.datetime.utcnow()

        try:
            qualityAll = self.client.getReport(
                'DataOperation', 'Quality', fromD, toD, {
                    'OperationType': 'putAndRegister',
                    'Destination': SEs
                }, 'Channel')
            if not qualityAll['OK']:
                raise RSSException, where(
                    self, self.doCommand) + " " + qualityAll['Message']
            else:
                qualityAll = qualityAll['Value']['data']

        except:
            gLogger.exception(
                "Exception when calling TransferQualityEverySEs_Command")
            return {}

        listOfDestSEs = []

        for k in qualityAll.keys():
            try:
                key = k.split(' -> ')[1]
                if key not in listOfDestSEs:
                    listOfDestSEs.append(key)
            except:
                continue

        meanQuality = {}

        for destSE in listOfDestSEs:
            s = 0
            n = 0
            for k in qualityAll.keys():
                try:
                    if k.split(' -> ')[1] == destSE:
                        n = n + len(qualityAll[k])
                        s = s + sum(qualityAll[k].values())
                except:
                    continue
            meanQuality[destSE] = s / n

        resToReturn = {}

        for se in meanQuality:
            resToReturn[se] = {'TQ': meanQuality[se]}

        return resToReturn

    doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #44
0
class TransferQuality_Command(Command):

  def doCommand(self):
    """ 
    Return getQuality from DIRAC's accounting ReportsClient
    
    `args`: a tuple
      - args[0]: string: should be a ValidRes

      - args[1]: string should be the name of the ValidRes

      - args[2]: optional dateTime object: a "from" date
    
      - args[3]: optional dateTime object: a "to" date
      
    :returns:
      {'Result': None | a float between 0.0 and 100.0}
    """
    super(TransferQuality_Command, self).doCommand()
   
    if self.RPC is None:
      from DIRAC.Core.DISET.RPCClient import RPCClient
      self.RPC = RPCClient("Accounting/ReportGenerator", timeout = self.timeout)
      
    if self.client is None:
      from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
      self.client = ReportsClient(rpcClient = self.RPC)

    try:
      if self.args[2] is None:
        fromD = datetime.datetime.utcnow()-datetime.timedelta(hours = 2)
      else:
        fromD = self.args[2]
    except:
      fromD = datetime.datetime.utcnow()-datetime.timedelta(hours = 2)
    try:
      if self.args[3] is None:
        toD = datetime.datetime.utcnow()
      else:
        toD = self.args[3]
    except:
      toD = datetime.datetime.utcnow()

    try:
      pr_quality = self.client.getReport('DataOperation', 'Quality', fromD, toD, 
                                         {'OperationType':'putAndRegister', 
                                          'Destination':[self.args[1]]}, 'Channel')
      
      if not pr_quality['OK']:
        raise RSSException, where(self, self.doCommand) + " " + pr_quality['Message'] 

    except:
      gLogger.exception("Exception when calling ReportsClient for %s %s" %(self.args[0], self.args[1]))
      return {'Result':'Unknown'}
    
    pr_q_d = pr_quality['Value']['data']
    
    if pr_q_d == {}:
      return {'Result':None}
    else:
      if len(pr_q_d) == 1:
        values = []
        for k in pr_q_d.keys():
          for n in pr_q_d[k].values():
            values.append(n)
        return {'Result':sum(values)/len(values)}
      else:
        values = []
        for n in pr_q_d['Total'].values():
          values.append(n)
        return {'Result':sum(values)/len(values)} 
  
  doCommand.__doc__ = Command.doCommand.__doc__ + doCommand.__doc__
コード例 #45
0
class DIRACAccountingCommand(Command):
    def __init__(self, args=None, clients=None):

        super(DIRACAccountingCommand, self).__init__(args, clients)

        if "ReportsClient" in self.apis:
            self.rClient = self.apis["ReportsClient"]
        else:
            self.rClient = ReportsClient()

    def doCommand(self):
        """
        Returns jobs accounting info for sites in the last 24h

        Args:
           - args[0]: string - should be a ValidElement

           - args[1]: string - should be the name of the ValidElement

           - args[2]: string - should be 'Job' or 'Pilot' or 'DataOperation'
             or 'WMSHistory' (??)

           - args[3]: string - should be the plot to generate (e.g. CPUEfficiency)

           - args[4]: dictionary - e.g. {'Format': 'LastHours', 'hours': 24}

           - args[5]: string - should be the grouping

           - args[6]: dictionary - optional conditions
        """

        granularity = self.args[0]
        name = self.args[1]
        accounting = self.args[2]
        plot = self.args[3]
        period = self.args[4]
        grouping = self.args[5]

        if period["Format"] == "LastHours":
            fromT = datetime.utcnow() - timedelta(hours=period["hours"])
            toT = datetime.utcnow()
        elif period["Format"] == "Periods":
            # TODO
            pass

        if self.args[6] is not None:
            conditions = self.args[6]
        else:
            conditions = {}
            if accounting == "Job" or accounting == "Pilot":
                if granularity == "Resource":
                    conditions["GridCE"] = [name]
                elif granularity == "Service":
                    conditions["Site"] = [name.split("@").pop()]
                elif granularity == "Site":
                    conditions["Site"] = [name]
                else:
                    return S_ERROR("%s is not a valid granularity" %
                                   granularity)
            elif accounting == "DataOperation":
                conditions["Destination"] = [name]

        return self.rClient.getReport(accounting, plot, fromT, toT, conditions,
                                      grouping)