Exemplo n.º 1
0
    def export_removeRegisters(self, entriesList):
        """
      Remove a record for a type
    """
        setup = self.serviceInfoDict["clientSetup"]
        expectedTypes = [basestring, datetime.datetime, datetime.datetime, list]
        for entry in entriesList:
            if len(entry) != 4:
                return S_ERROR("Invalid records")
            for i in range(len(entry)):
                if not isinstance(entry[i], expectedTypes[i]):
                    return S_ERROR("%s field in the records should be %s" % (i, expectedTypes[i]))
        ok = 0
        for entry in entriesList:
            startTime = int(Time.toEpoch(entry[1]))
            endTime = int(Time.toEpoch(entry[2]))
            record = entry[3]
            result = self.__acDB.deleteRecord(
                setup, entry[0], startTime, endTime, record
            )  # pylint: disable=too-many-function-args,no-member
            if not result["OK"]:
                return S_OK(ok)
            ok += 1

        return S_OK(ok)
Exemplo n.º 2
0
 def web_getHistoryValues(self):
     try:
         dbVars = [ str(f) for f in json.loads(self.request.arguments[ 'vars' ][0]) ]
     except:
         dbVars = [ 'Load', 'Jobs', 'TransferredFiles' ]
     try:
         timespan = int(self.request.arguments[ 'timespan' ][0])
     except:
         timespan = 86400
     rpcClient = RPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getHistoryValues(3600, {}, dbVars, timespan)
     if not result[ 'OK' ]:
         callback = {"success":"false", "error":result[ 'Message' ]}
         return self.write(callback)
     svcData = result[ 'Value' ]
     data = []
     olderThan = Time.toEpoch() - 400
     for record in svcData[ 'Records' ]:
         rL = []
         for iP in range(len(svcData[ 'ParameterNames' ])):
             param = svcData[ 'ParameterNames' ][iP]
             if param == 'Update':
                 rL.append(Time.toEpoch(record[iP]))
             else:
                 rL.append(record[iP])
         if rL[0] < olderThan:
             data.append(rL)
     callback = {"success":"true", 'data': data, 'fields' : svcData[ 'ParameterNames' ]}
     return self.write(callback)
Exemplo n.º 3
0
 def getPlotData( self ):
   retVal = self.__parseFormParams()
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   params = retVal[ 'Value' ]
   repClient = ReportsClient( rpcClient = getRPCClient( "Accounting/ReportGenerator" ) )
   retVal = repClient.getReport( *params )
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   rawData = retVal[ 'Value' ]
   groupKeys = rawData[ 'data' ].keys()
   groupKeys.sort()
   if 'granularity' in rawData:
     granularity = rawData[ 'granularity' ]
     data = rawData['data']
     tS = int( Time.toEpoch( params[2] ) )
     timeStart = tS - tS % granularity
     strData = "epoch,%s\n" % ",".join( groupKeys )
     for timeSlot in range( timeStart, int( Time.toEpoch( params[3] ) ), granularity ):
       lineData = [ str( timeSlot ) ]
       for key in groupKeys:
         if timeSlot in data[ key ]:
           lineData.append( str( data[ key ][ timeSlot ] ) )
         else:
           lineData.append( "" )
       strData += "%s\n" % ",".join( lineData )
   else:
     strData = "%s\n" % ",".join( groupKeys )
     strData += ",".join( [ str( rawData[ 'data' ][ k ] ) for k in groupKeys ] )
   response.headers['Content-type'] = 'text/csv'
   response.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % md5( str( params ) ).hexdigest()
   response.headers['Content-Length'] = len( strData )
   return strData
Exemplo n.º 4
0
    def export_removeRegisters(self, entriesList):
        """
      Remove a record for a type
    """
        setup = self.serviceInfoDict['clientSetup']
        expectedTypes = [
            six.string_types, datetime.datetime, datetime.datetime, list
        ]
        for entry in entriesList:
            if len(entry) != 4:
                return S_ERROR("Invalid records")
            for i in range(len(entry)):
                if not isinstance(entry[i], expectedTypes[i]):
                    return S_ERROR("%s field in the records should be %s" %
                                   (i, expectedTypes[i]))
        ok = 0
        for entry in entriesList:
            startTime = int(Time.toEpoch(entry[1]))
            endTime = int(Time.toEpoch(entry[2]))
            record = entry[3]
            result = self.__acDB.deleteRecord(  # pylint: disable=no-member
                setup, entry[0], startTime, endTime, record)
            if not result['OK']:
                return S_OK(ok)
            ok += 1

        return S_OK(ok)
Exemplo n.º 5
0
 def getHistoryValues(self):
     try:
         dbVars = [str(f) for f in simplejson.loads(request.params['vars'])]
     except:
         dbVars = ['Load', 'Jobs', 'TransferredFiles']
     try:
         timespan = int(request.params['timespan'])
     except:
         timespan = 86400
     rpcClient = getRPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getHistoryValues(3600, {}, dbVars, timespan)
     if not result['OK']:
         return S_ERROR(result['Message'])
     svcData = result['Value']
     data = []
     olderThan = Time.toEpoch() - 400
     for record in svcData['Records']:
         rL = []
         for iP in range(len(svcData['ParameterNames'])):
             param = svcData['ParameterNames'][iP]
             if param == 'Update':
                 rL.append(Time.toEpoch(record[iP]))
             else:
                 rL.append(record[iP])
         if rL[0] < olderThan:
             data.append(rL)
     return S_OK({'data': data, 'fields': svcData['ParameterNames']})
Exemplo n.º 6
0
 def web_getHistoryValues(self):
     try:
         dbVars = [
             str(f) for f in json.loads(self.request.arguments['vars'][0])
         ]
     except:
         dbVars = ['Load', 'Jobs', 'TransferredFiles']
     try:
         timespan = int(self.request.arguments['timespan'][0])
     except:
         timespan = 86400
     rpcClient = RPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getHistoryValues(3600, {}, dbVars, timespan)
     if not result['OK']:
         callback = {"success": "false", "error": result['Message']}
         return self.write(callback)
     svcData = result['Value']
     data = []
     olderThan = Time.toEpoch() - 400
     for record in svcData['Records']:
         rL = []
         for iP in range(len(svcData['ParameterNames'])):
             param = svcData['ParameterNames'][iP]
             if param == 'Update':
                 rL.append(Time.toEpoch(record[iP]))
             else:
                 rL.append(record[iP])
         if rL[0] < olderThan:
             data.append(rL)
     callback = {
         "success": "true",
         'data': data,
         'fields': svcData['ParameterNames']
     }
     return self.write(callback)
Exemplo n.º 7
0
    def export_removeRegisters(self, entriesList):
        """
      Remove a record for a type
    """
        setup = self.serviceInfoDict['clientSetup']
        expectedTypes = [
            types.StringType, Time._dateTimeType, Time._dateTimeType,
            types.ListType
        ]
        for entry in entriesList:
            if len(entry) != 4:
                return S_ERROR("Invalid records")
            for i in range(len(entry)):
                if type(entry[i]) != expectedTypes[i]:
                    return S_ERROR("%s field in the records should be %s" %
                                   (i, expectedTypes[i]))
        ok = 0
        for entry in entriesList:
            typeName = "%s_%s" % (setup, entry[0])
            startTime = int(Time.toEpoch(entry[1]))
            endTime = int(Time.toEpoch(entry[2]))
            record = entry[3]
            result = gAccountingDB.deleteRecord(typeName, startTime, endTime,
                                                record)
            if not result['OK']:
                return S_OK(ok)
            ok += 1

        return S_OK(ok)
Exemplo n.º 8
0
def generateDocs(data, withTimeStamp=True):
    """Generator for fast bulk indexing, yields docs

    :param list data: list of dictionaries
    :param bool withTimeStamp: add the timestamps to the docs

    :return: doc
    """
    for doc in copy.deepcopy(data):
        if "_type" not in doc:
            doc["_type"] = "_doc"
        if withTimeStamp:
            if "timestamp" not in doc:
                sLog.warn("timestamp is not given")

            # if the timestamp is not provided, we use the current utc time.
            timestamp = doc.get("timestamp", int(Time.toEpoch()))
            try:
                if isinstance(timestamp, datetime):
                    doc["timestamp"] = int(timestamp.strftime("%s")) * 1000
                elif isinstance(timestamp, six.string_types):
                    timeobj = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S.%f")
                    doc["timestamp"] = int(timeobj.strftime("%s")) * 1000
                else:  # we assume  the timestamp is an unix epoch time (integer).
                    doc["timestamp"] = timestamp * 1000
            except (TypeError, ValueError) as e:
                # in case we are not able to convert the timestamp to epoch time....
                sLog.error("Wrong timestamp", e)
                doc["timestamp"] = int(Time.toEpoch()) * 1000

        sLog.debug("yielding %s" % doc)
        yield doc
Exemplo n.º 9
0
 def getPlotData( self ):
   retVal = self.__parseFormParams()
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   params = retVal[ 'Value' ]
   repClient = ReportsClient( rpcClient = getRPCClient( "Accounting/ReportGenerator" ) )
   retVal = repClient.getReport( *params )
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   rawData = retVal[ 'Value' ]
   groupKeys = rawData[ 'data' ].keys()
   groupKeys.sort()
   if 'granularity' in rawData:
     granularity = rawData[ 'granularity' ]
     data = rawData['data']
     tS = int( Time.toEpoch( params[2] ) )
     timeStart = tS - tS % granularity
     strData = "epoch,%s\n" % ",".join( groupKeys )
     for timeSlot in range( timeStart, int( Time.toEpoch( params[3] ) ), granularity ):
       lineData = [ str( timeSlot ) ]
       for key in groupKeys:
         if timeSlot in data[ key ]:
           lineData.append( str( data[ key ][ timeSlot ] ) )
         else:
           lineData.append( "" )
       strData += "%s\n" % ",".join( lineData )
   else:
     strData = "%s\n" % ",".join( groupKeys )
     strData += ",".join( [ str( rawData[ 'data' ][ k ] ) for k in groupKeys ] )
   response.headers['Content-type'] = 'text/csv'
   response.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % md5( str( params ) ).hexdigest()
   response.headers['Content-Length'] = len( strData )
   return strData
Exemplo n.º 10
0
 def web_getRunningInstancesByImageHistory(self):
     try:
         bucketSize = int(self.request.arguments['bucketSize'][0])
     except:
         bucketSize = 900
     try:
         timespan = int(self.request.arguments['timespan'][0])
     except:
         timespan = 86400
     rpcClient = RPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getRunningInstancesByImageHistory(
         timespan, bucketSize)
     if not result['OK']:
         callback = {"success": "false", "error": result['Message']}
         return self.write(callback)
     svcData = result['Value']
     data = []
     olderThan = Time.toEpoch() - 400
     for record in svcData:
         eTime = Time.toEpoch(record[0])
         if eTime < olderThan:
             rL = [eTime, record[1], int(record[2])]
         data.append(rL)
     callback = {"success": "true", 'data': data}
     return self.write(callback)
Exemplo n.º 11
0
 def web_getRunningInstancesHistory(self):
     try:
         bucketSize = int(self.request.arguments[ 'bucketSize' ][0])
     except:
         bucketSize = 900
     try:
         timespan = int(self.request.arguments[ 'timespan' ][0])
     except:
         timespan = 86400
     rpcClient = RPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getRunningInstancesHistory(timespan, bucketSize)
     if not result[ 'OK' ]:
         callback = {"success":"false", "error":result[ 'Message' ]}
         return self.write(callback)
     svcData = result[ 'Value' ]
     data = []
     olderThan = Time.toEpoch() - 400
     rL = []
     for record in svcData:
         eTime = Time.toEpoch(record[0])
         if eTime < olderThan:
             rL = [ eTime, int(record[1]) ]
         data.append(rL)
     callback = {"success":"true", 'data': data, "timespan": timespan}
     return self.write(callback)
Exemplo n.º 12
0
 def getHistoryValues( self ):
   try:
     dbVars = [ str( f ) for f in simplejson.loads( request.params[ 'vars' ] ) ]
   except:
     dbVars = [ 'Load', 'Jobs', 'TransferredFiles' ]
   try:
     timespan = int( request.params[ 'timespan' ] )
   except:
     timespan = 86400
   rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
   result = rpcClient.getHistoryValues( 3600, {}, dbVars, timespan )
   if not result[ 'OK' ]:
     return S_ERROR( result[ 'Message' ] )
   svcData = result[ 'Value' ]
   data = []
   olderThan = Time.toEpoch() - 400
   for record in svcData[ 'Records' ]:
     rL = []
     for iP in range( len( svcData[ 'ParameterNames' ] ) ):
       param = svcData[ 'ParameterNames' ][iP]
       if param == 'Update':
         rL.append( Time.toEpoch( record[iP] ) )
       else:
         rL.append( record[iP] )
     if rL[0] < olderThan:
       data.append( rL )
   return S_OK( { 'data': data, 'fields' : svcData[ 'ParameterNames' ] } )
Exemplo n.º 13
0
 def export_commitRegisters(self, entriesList):
     """
   Add a record for a type
 """
     setup = self.serviceInfoDict['clientSetup']
     expectedTypes = [
         six.string_types, datetime.datetime, datetime.datetime, list
     ]
     for entry in entriesList:
         if len(entry) != 4:
             return S_ERROR("Invalid records")
         for i, _ in enumerate(entry):
             if not isinstance(entry[i], expectedTypes[i]):
                 self.log.error(
                     "Unexpected type in report",
                     ": field %d in the records should be %s (and it is %s)"
                     % (i, expectedTypes[i], type(entry[i])))
                 return S_ERROR("Unexpected type in report")
     records = []
     for entry in entriesList:
         startTime = int(Time.toEpoch(entry[1]))
         endTime = int(Time.toEpoch(entry[2]))
         self.log.debug("inserting", entry)
         records.append((setup, entry[0], startTime, endTime, entry[3]))
     return self.__acDB.insertRecordBundleThroughQueue(records)
Exemplo n.º 14
0
 def export_commit( self, typeName, startTime, endTime, valuesList ):
   """
     Add a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.insertRecordThroughQueue( setup, typeName, startTime, endTime, valuesList )
Exemplo n.º 15
0
 def export_commit( self, typeName, startTime, endTime, valuesList ):
   """
     Add a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.insertRecordThroughQueue( setup, typeName, startTime, endTime, valuesList ) #pylint: disable=too-many-function-args,no-member
Exemplo n.º 16
0
 def export_remove( self, typeName, startTime, endTime, valuesList ):
   """
     Remove a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.deleteRecord( setup, typeName, startTime, endTime, valuesList )
Exemplo n.º 17
0
 def export_remove( self, typeName, startTime, endTime, valuesList ):
   """
     Remove a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.deleteRecord( setup, typeName, startTime, endTime, valuesList ) #pylint: disable=too-many-function-args,no-member
Exemplo n.º 18
0
  def bulk_index(self, indexprefix, doc_type='_doc', data=None, mapping=None, period=None):
    """
    :param str indexPrefix: index name.
    :param str doc_type: the type of the document
    :param list data: contains a list of dictionary
    :param dict mapping: the mapping used by elasticsearch
    :param str period: We can specify which kind of indices will be created.
                       Currently only daily and monthly indexes are supported.
    """
    sLog.verbose("Bulk indexing", "%d records will be inserted" % len(data))
    if mapping is None:
      mapping = {}

    indexName = self.generateFullIndexName(indexprefix, period)
    sLog.debug("Bulk indexing into %s/%s of %s" % (indexName, doc_type, data))

    if not self.exists(indexName):
      retVal = self.createIndex(indexprefix, mapping, period)
      if not retVal['OK']:
        return retVal
    docs = []
    for row in data:
      body = {
          '_index': indexName,
          '_type': doc_type,
          '_source': {}
      }
      body['_source'] = row

      if 'timestamp' not in row:
        sLog.warn("timestamp is not given! Note: the actual time is used!")

      # if the timestamp is not provided, we use the current utc time.
      timestamp = row.get('timestamp', int(Time.toEpoch()))
      try:
        if isinstance(timestamp, datetime):
          body['_source']['timestamp'] = int(timestamp.strftime('%s')) * 1000
        elif isinstance(timestamp, six.string_types):
          timeobj = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
          body['_source']['timestamp'] = int(timeobj.strftime('%s')) * 1000
        else:  # we assume  the timestamp is an unix epoch time (integer).
          body['_source']['timestamp'] = timestamp * 1000
      except (TypeError, ValueError) as e:
        # in case we are not able to convert the timestamp to epoch time....
        sLog.error("Wrong timestamp", e)
        body['_source']['timestamp'] = int(Time.toEpoch()) * 1000
      docs += [body]
    try:
      res = bulk(self.__client, docs, chunk_size=self.__chunk_size)
    except BulkIndexError as e:
      return S_ERROR(e)

    if res[0] == len(docs):
      # we have inserted all documents...
      return S_OK(len(docs))
    else:
      return S_ERROR(res)
    return res
Exemplo n.º 19
0
 def export_commit(self, typeName, startTime, endTime, valuesList):
     """
   Add a record for a type
 """
     setup = self.serviceInfoDict['clientSetup']
     startTime = int(Time.toEpoch(startTime))
     endTime = int(Time.toEpoch(endTime))
     return self.__acDB.insertRecordThroughQueue(setup, typeName, startTime,
                                                 endTime, valuesList)  #pylint: disable=too-many-function-args,no-member
Exemplo n.º 20
0
 def export_remove(self, typeName, startTime, endTime, valuesList):
     """
   Remove a record for a type
 """
     setup = self.serviceInfoDict['clientSetup']
     startTime = int(Time.toEpoch(startTime))
     endTime = int(Time.toEpoch(endTime))
     return self.__acDB.deleteRecord(setup, typeName, startTime, endTime,
                                     valuesList)  #pylint: disable=too-many-function-args,no-member
Exemplo n.º 21
0
  def bulk_index(self, indexprefix, doc_type, data, mapping=None, period=None):
    """
    :param str indexPrefix: index name.
    :param str doc_type: the type of the document
    :param list data: contains a list of dictionary
    :paran dict mapping: the mapping used by elasticsearch
    :param str period: We can specify which kind of indices will be created.
                       Currently only daily and monthly indexes are supported.
    """
    gLogger.info("%d records will be insert to %s" % (len(data), doc_type))
    if mapping is None:
      mapping = {}

    indexName = generateFullIndexName(indexprefix, period)
    gLogger.debug("inserting datat to %s index" % indexName)
    if not self.exists(indexName):
      retVal = self.createIndex(indexprefix, mapping, period)
      if not retVal['OK']:
        return retVal
    docs = []
    for row in data:
      body = {
          '_index': indexName,
          '_type': doc_type,
          '_source': {}
      }
      body['_source'] = row

      if 'timestamp' not in row:
        gLogger.warn("timestamp is not given! Note: the actual time is used!")

      # if the timestamp is not provided, we use the current utc time.
      timestamp = row.get('timestamp', int(Time.toEpoch()))
      try:
        if isinstance(timestamp, datetime):
          body['_source']['timestamp'] = int(timestamp.strftime('%s')) * 1000
        elif isinstance(timestamp, basestring):
          timeobj = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
          body['_source']['timestamp'] = int(timeobj.strftime('%s')) * 1000
        else:  # we assume  the timestamp is an unix epoch time (integer).
          body['_source']['timestamp'] = timestamp * 1000
      except (TypeError, ValueError) as e:
        # in case we are not able to convert the timestamp to epoch time....
        gLogger.error("Wrong timestamp", e)
        body['_source']['timestamp'] = int(Time.toEpoch()) * 1000
      docs += [body]
    try:
      res = bulk(self.__client, docs, chunk_size=self.__chunk_size)
    except BulkIndexError as e:
      return S_ERROR(e)

    if res[0] == len(docs):
      # we have inserted all documents...
      return S_OK(len(docs))
    else:
      return S_ERROR(res)
    return res
Exemplo n.º 22
0
 def export_remove( self, typeName, startTime, endTime, valuesList ):
   """
     Remove a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   typeName = "%s_%s" % ( setup, typeName )
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return gAccountingDB.deleteRecord( typeName, startTime, endTime, valuesList )
Exemplo n.º 23
0
    def bulk_index(self, indexprefix, doc_type, data, mapping=None):
        """
    :param str indexPrefix: it is the index name.
    :param str doc_type: the type of the document
    :param data: contains a list of dictionary
    :type data: python:list
    """
        gLogger.info("%d records will be insert to %s" % (len(data), doc_type))
        if mapping is None:
            mapping = {}

        indexName = generateFullIndexName(indexprefix)
        gLogger.debug("inserting datat to %s index" % indexName)
        if not self.exists(indexName):
            retVal = self.createIndex(indexprefix, mapping)
            if not retVal['OK']:
                return retVal
        docs = []
        for row in data:
            body = {'_index': indexName, '_type': doc_type, '_source': {}}
            body['_source'] = row

            if 'timestamp' not in row:
                gLogger.warn(
                    "timestamp is not given! Note: the actual time is used!")

            timestamp = row.get(
                'timestamp', int(Time.toEpoch())
            )  #if the timestamp is not provided, we use the current utc time.
            try:
                if isinstance(timestamp, datetime):
                    body['_source']['timestamp'] = int(
                        timestamp.strftime('%s')) * 1000
                elif isinstance(timestamp, basestring):
                    timeobj = datetime.strptime(timestamp,
                                                '%Y-%m-%d %H:%M:%S.%f')
                    body['_source']['timestamp'] = int(
                        timeobj.strftime('%s')) * 1000
                else:  #we assume  the timestamp is an unix epoch time (integer).
                    body['_source']['timestamp'] = timestamp * 1000
            except (TypeError, ValueError) as e:
                # in case we are not able to convert the timestamp to epoch time....
                gLogger.error("Wrong timestamp", e)
                body['_source']['timestamp'] = int(Time.toEpoch()) * 1000
            docs += [body]
        try:
            res = bulk(self.__client, docs, chunk_size=self.__chunk_size)
        except BulkIndexError as e:
            return S_ERROR(e)

        if res[0] == len(docs):
            # we have inserted all documents...
            return S_OK(len(docs))
        else:
            return S_ERROR(res)
        return res
Exemplo n.º 24
0
 def export_remove(self, typeName, startTime, endTime, valuesList):
     """
   Remove a record for a type
 """
     setup = self.serviceInfoDict['clientSetup']
     typeName = "%s_%s" % (setup, typeName)
     startTime = int(Time.toEpoch(startTime))
     endTime = int(Time.toEpoch(endTime))
     return gAccountingDB.deleteRecord(typeName, startTime, endTime,
                                       valuesList)
 def plotView(self):
     """
 Plot a saved view
 """
     plotRequest = {}
     try:
         webRequest = simplejson.loads(request.params['plotRequest'])
         if 'id' not in webRequest:
             return S_ERROR("Missing viewID in plot request")
         plotRequest['id'] = webRequest['id']
         if 'size' not in webRequest:
             return S_ERROR("Missing plotsize in plot request")
         plotRequest['size'] = webRequest['size']
         if 'time' not in webRequest:
             return S_ERROR("Missing time span in plot request")
         timeReq = webRequest['time']
         if timeReq['timespan'] < 0:
             toSecs = self.__dateToSecs(str(timeReq['toDate']))
             fromSecs = self.__dateToSecs(str(timeReq['fromDate']))
         else:
             toSecs = int(Time.toEpoch())
             fromSecs = toSecs - timeReq['timespan']
         plotRequest['fromSecs'] = fromSecs
         plotRequest['toSecs'] = toSecs
         if 'varData' in webRequest:
             plotRequest['varData'] = webRequest['varData']
     except Exception, e:
         return self.__translateToExpectedExtResult(
             S_ERROR("Error while processing plot parameters: %s" % str(e)))
 def tryView(self):
     """
 Try plotting graphs for a view
 """
     try:
         plotRequest = simplejson.loads(request.params['plotRequest'])
         if 'timeLength' in request.params:
             timeLength = str(request.params['timeLength'])
             toSecs = int(Time.toEpoch())
             if timeLength == "hour":
                 fromSecs = toSecs - 3600
             elif timeLength == "day":
                 fromSecs = toSecs - 86400
             elif timeLength == "month":
                 fromSecs = toSecs - 2592000
             elif fromSecs == "year":
                 fromDate = toSecs - 31104000
             else:
                 return S_ERROR("Time length value not valid")
         else:
             fromDate = str(request.params['fromDate'])
             toDate = str(request.params['toDate'])
             fromSecs = self.__dateToSecs(fromDate)
             toSecs = self.__dateToSecs(toDate)
     except Exception, e:
         return S_ERROR("Error while processing plot parameters: %s" %
                        str(e))
    def getActivitiesList(self):
        try:
            start = int(request.params['start'])
        except:
            start = 0
        try:
            limit = int(request.params['limit'])
        except:
            limit = 0

        try:
            sortField = str(request.params['sortField']).replace("_", ".")
            sortDir = str(request.params['sortDirection'])
            sort = [(sortField, sortDir)]
        except:
            sort = []
        rpcClient = getRPCClient("Framework/Monitoring")
        retVal = rpcClient.getActivitiesContents({}, sort, start, limit)
        if not retVal['OK']:
            return retVal
        svcData = retVal['Value']
        data = {'numActivities': svcData['TotalRecords'], 'activities': []}
        now = Time.toEpoch()
        for record in svcData['Records']:
            formatted = {}
            for i in range(len(svcData['Fields'])):
                formatted[svcData['Fields'][i].replace(".", "_")] = record[i]
            if 'activities_lastUpdate' in formatted:
                formatted['activities_lastUpdate'] = now - int(
                    formatted['activities_lastUpdate'])
            data['activities'].append(formatted)
        return data
  def web_plotView( self ):

    plotRequest = {}
    try:
      if 'id' not in self.request.arguments:
        self.finish( { 'success' : "false", 'error' : "Missing viewID in plot request" } )
        return
      plotRequest[ 'id' ] = self.request.arguments[ 'id' ][0]
      if 'size' not in self.request.arguments:
        self.finish( { 'success' : "false", 'error' : "Missing plotsize in plot request" } )
        return
      plotRequest[ 'size' ] = int( self.request.arguments[ 'size' ][0] )

      timespan = int( self.request.arguments[ 'timespan' ][0] )
      if timespan < 0:
        toSecs = self.__dateToSecs( str( self.request.arguments[ 'toDate' ][0] ) )
        fromSecs = self.__dateToSecs( str( self.request.arguments[ 'fromDate' ][0] ) )
      else:
        toSecs = int( Time.toEpoch() )
        fromSecs = toSecs - timespan
      plotRequest[ 'fromSecs' ] = fromSecs
      plotRequest[ 'toSecs' ] = toSecs
      if 'varData' in self.request.arguments:
        plotRequest[ 'varData' ] = dict( json.loads( self.request.arguments[ 'varData' ][0] ) )
    except Exception, e:
      self.finish( { 'success' : "false", 'error' : "Error while processing plot parameters: %s" % str( e ) } )
      return
Exemplo n.º 29
0
    def __consolidateMarks(self, allData):
        """
        Copies all marks except last step ones and consolidates them.

        :type allData: bool
        :param allData: This is used to indicate whether all the data is present or not.
        :return: dictionary of consolidatedMarks.
        """
        consolidatedMarks = {}
        remainderMarks = {}
        for key in self.activitiesMarks:
            if allData:
                lastStepToSend = int(Time.toEpoch())
            else:
                lastStepToSend = self.__UTCStepTime(key)
            consolidatedMarks[key] = {}
            remainderMarks[key] = {}
            for markTime in self.activitiesMarks[key]:
                markValue = self.activitiesMarks[key][markTime]
                if markTime >= lastStepToSend:
                    remainderMarks[key][markTime] = markValue
                else:
                    consolidatedMarks[key][markTime] = markValue
                    # Consolidate the copied ones
                    totalValue = 0
                    for mark in consolidatedMarks[key][markTime]:
                        totalValue += mark
                    if self.activitiesDefinitions[key]["type"] == self.OP_MEAN:
                        totalValue /= len(consolidatedMarks[key][markTime])
                    consolidatedMarks[key][markTime] = totalValue
            if len(consolidatedMarks[key]) == 0:
                del consolidatedMarks[key]
        self.activitiesMarks = remainderMarks
        return consolidatedMarks
Exemplo n.º 30
0
 def _mbReceivedMsg(self, trid, msgObj):
     result = self._authorizeProposal(
         ("Message", msgObj.getName()), trid,
         self._transportPool.get(trid).getConnectingCredentials())
     if not result["OK"]:
         return result
     result = self._instantiateHandler(trid)
     if not result["OK"]:
         return result
     handlerObj = result["Value"]
     response = handlerObj._rh_executeMessageCallback(msgObj)
     if self.activityMonitoring and response["OK"]:
         self.activityMonitoringReporter.addRecord({
             "timestamp":
             int(Time.toEpoch()),
             "host":
             Network.getFQDN(),
             "componentType":
             "service",
             "component":
             "_".join(self._name.split("/")),
             "componentLocation":
             self._cfg.getURL(),
             "ServiceResponseTime":
             response["Value"][1],
         })
     if response["OK"]:
         return response["Value"][0]
     else:
         return response
Exemplo n.º 31
0
    def registerActivity(self, sourceId, acName, acDict):
        """
    Register an activity.

    :type sourceId: string
    :param sourceId: The source id.
    :type acName: string
    :param acName: name of the activity.
    :type acDict: dictionary
    :param acDict: The activity dictionary containing information about 'category', 'description', 'bucketLength',
                                                                        'type', 'unit'.
    :return: a list of values.
    """
        m = hashlib.md5()
        acDict['name'] = acName
        acDict['sourceId'] = sourceId
        m.update(str(acDict).encode())
        retList = self.__select("filename", "activities", acDict)
        if len(retList) > 0:
            return retList[0][0]
        else:
            acDict['lastUpdate'] = int(Time.toEpoch() - 86000)
            filePath = m.hexdigest()
            filePath = "%s/%s.rrd" % (filePath[:2], filePath)
            self.log.info("Registering activity", str(acDict))
            # This is basically called by the ServiceInterface inside registerActivities method and then all the activity
            # information is stored in the sqlite3 db using the __insert method.

            if self.__insert("activities", {
                    'id': 'NULL',
                    'filename': "'%s'" % filePath,
            }, acDict) == 0:
                return -1
            return self.__select("filename", "activities", acDict)[0][0]
Exemplo n.º 32
0
  def web_plotView(self):

    plotRequest = {}
    try:
      if 'id' not in self.request.arguments:
        self.finish({ 'success' : "false", 'error' : "Missing viewID in plot request" })
        return
      plotRequest[ 'id' ] = self.request.arguments[ 'id' ][0]
      if 'size' not in self.request.arguments:
        self.finish({ 'success' : "false", 'error' : "Missing plotsize in plot request" })
        return
      plotRequest[ 'size' ] = int(self.request.arguments[ 'size' ][0])

      timespan = int(self.request.arguments[ 'timespan' ][0])
      if timespan < 0:
        toSecs = self.__dateToSecs(str(self.request.arguments[ 'toDate' ][0]))
        fromSecs = self.__dateToSecs(str(self.request.arguments[ 'fromDate' ][0]))
      else:
        toSecs = int(Time.toEpoch())
        fromSecs = toSecs - timespan
      plotRequest[ 'fromSecs' ] = fromSecs
      plotRequest[ 'toSecs' ] = toSecs
      if 'varData' in self.request.arguments:
        plotRequest[ 'varData' ] = self.request.arguments[ 'varData' ][0]
    except Exception, e:
      self.finish({ 'success' : "false", 'error' : "Error while processing plot parameters: %s" % str(e) })
      return
Exemplo n.º 33
0
 def web_tryView(self):
   """
   Try plotting graphs for a view
   """
   try:
     plotRequest = json.loads(self.request.arguments[ 'plotRequest' ][0])
     if 'timeLength' in self.request.arguments:
       timeLength = str(self.request.arguments[ 'timeLength' ][0])
       toSecs = int(Time.toEpoch())
       if timeLength == "hour":
         fromSecs = toSecs - 3600
       elif timeLength == "day":
         fromSecs = toSecs - 86400
       elif timeLength == "month":
         fromSecs = toSecs - 2592000
       elif fromSecs == "year":
         fromDate = toSecs - 31104000
       else:
         self.finish({"success":"false", "error":"Time length value not valid"})
         return
     else:
       fromDate = str(self.request.arguments[ 'fromDate' ][0])
       toDate = str(self.request.arguments[ 'toDate' ][0])
       fromSecs = self.__dateToSecs(fromDate)
       toSecs = self.__dateToSecs(toDate)
   except Exception, e:
     self.finish({"success":"false", "error":"Error while processing plot parameters: %s" % str(e)})
     return
Exemplo n.º 34
0
 def tryView( self ):
   """
   Try plotting graphs for a view
   """
   try:
     plotRequest = simplejson.loads( request.params[ 'plotRequest' ] )
     if 'timeLength' in request.params:
       timeLength = str( request.params[ 'timeLength' ] )
       toSecs = int( Time.toEpoch() )
       if timeLength == "hour":
         fromSecs = toSecs - 3600
       elif timeLength == "day":
         fromSecs = toSecs - 86400
       elif timeLength == "month":
         fromSecs = toSecs - 2592000
       elif fromSecs == "year":
         fromDate = toSecs - 31104000
       else:
         return S_ERROR( "Time length value not valid" )
     else:
       fromDate = str( request.params[ 'fromDate' ] )
       toDate = str( request.params[ 'toDate' ] )
       fromSecs = self.__dateToSecs( fromDate )
       toSecs = self.__dateToSecs( toDate )
   except Exception, e:
     return S_ERROR( "Error while processing plot parameters: %s" % str( e ) )
Exemplo n.º 35
0
 def __consolidateMarks(self, allData):
     """
   Copies all marks except last step ones
   and consolidates them
 """
     consolidatedMarks = {}
     remainderMarks = {}
     for key in self.activitiesMarks:
         if allData:
             lastStepToSend = int(Time.toEpoch())
         else:
             lastStepToSend = self.__UTCStepTime(key)
         consolidatedMarks[key] = {}
         remainderMarks[key] = {}
         for markTime in self.activitiesMarks[key]:
             markValue = self.activitiesMarks[key][markTime]
             if markTime >= lastStepToSend:
                 remainderMarks[key][markTime] = markValue
             else:
                 consolidatedMarks[key][markTime] = markValue
                 # Consolidate the copied ones
                 totalValue = 0
                 for mark in consolidatedMarks[key][markTime]:
                     totalValue += mark
                 if self.activitiesDefinitions[key]['type'] == self.OP_MEAN:
                     totalValue /= len(consolidatedMarks[key][markTime])
                 consolidatedMarks[key][markTime] = totalValue
         if len(consolidatedMarks[key]) == 0:
             del (consolidatedMarks[key])
     self.activitiesMarks = remainderMarks
     return consolidatedMarks
 def web_tryView( self ):
   """
   Try plotting graphs for a view
   """
   try:
     plotRequest = json.loads( self.request.arguments[ 'plotRequest' ][0] )
     if 'timeLength' in self.request.arguments:
       timeLength = str( self.request.arguments[ 'timeLength' ][0] )
       toSecs = int( Time.toEpoch() )
       if timeLength == "hour":
         fromSecs = toSecs - 3600
       elif timeLength == "day":
         fromSecs = toSecs - 86400
       elif timeLength == "month":
         fromSecs = toSecs - 2592000
       elif fromSecs == "year":
         fromDate = toSecs - 31104000
       else:
         self.finish( {"success":"false", "error":"Time length value not valid"} )
         return
     else:
       fromDate = str( self.request.arguments[ 'fromDate' ][0] )
       toDate = str( self.request.arguments[ 'toDate' ][0] )
       fromSecs = self.__dateToSecs( fromDate )
       toSecs = self.__dateToSecs( toDate )
   except Exception, e:
     self.finish( {"success":"false", "error":"Error while processing plot parameters: %s" % str( e )} )
     return
Exemplo n.º 37
0
def filterOngoing(selectOutput):
  """
    Selects all the ongoing downtimes
  """

  downtimes = selectOutput
  downtimesFiltered = []
  currentDate = Time.toEpoch(Time.dateTime())

  for dt in downtimes:
    dtStart = Time.toEpoch(dt['startDate'])
    dtEnd = Time.toEpoch(dt['endDate'])
    if (dtStart <= currentDate) and (dtEnd >= currentDate):
      downtimesFiltered.append(dt)

  return downtimesFiltered
Exemplo n.º 38
0
 def __consolidateMarks( self, allData ):
   """
     Copies all marks except last step ones
     and consolidates them
   """
   consolidatedMarks = {}
   remainderMarks = {}
   for key in self.activitiesMarks:
     if allData:
       lastStepToSend = int( Time.toEpoch() )
     else:
       lastStepToSend = self.__UTCStepTime( key )
     consolidatedMarks[ key ] = {}
     remainderMarks [ key ] = {}
     for markTime in self.activitiesMarks[ key ]:
       markValue = self.activitiesMarks[ key ][ markTime ]
       if markTime >= lastStepToSend:
         remainderMarks[ key ][ markTime ] = markValue
       else:
         consolidatedMarks[ key ][ markTime ] = markValue
         # Consolidate the copied ones
         totalValue = 0
         for mark in consolidatedMarks[ key ][ markTime ]:
           totalValue += mark
         if self.activitiesDefinitions[ key ][ 'type' ] == self.OP_MEAN:
           totalValue /= len( consolidatedMarks[ key ][ markTime ] )
         consolidatedMarks[ key ][ markTime ] = totalValue
     if len( consolidatedMarks[ key ] ) == 0:
       del( consolidatedMarks[ key ] )
   self.activitiesMarks = remainderMarks
   return consolidatedMarks
Exemplo n.º 39
0
 def plotView( self ):
   """
   Plot a saved view
   """
   plotRequest = {}
   try:
     webRequest = simplejson.loads( request.params[ 'plotRequest' ] )
     if 'id' not in webRequest:
       return S_ERROR( "Missing viewID in plot request" )
     plotRequest[ 'id' ] = webRequest[ 'id' ]
     if 'size' not in webRequest:
       return S_ERROR( "Missing plotsize in plot request" )
     plotRequest[ 'size' ] = webRequest[ 'size' ]
     if 'time' not in webRequest:
       return S_ERROR( "Missing time span in plot request" )
     timeReq = webRequest[ 'time' ]
     if timeReq[ 'timespan' ] < 0:
       toSecs = self.__dateToSecs( str( timeReq[ 'toDate' ] ) )
       fromSecs = self.__dateToSecs( str( timeReq[ 'fromDate' ] ) )
     else:
       toSecs = int( Time.toEpoch() )
       fromSecs = toSecs - timeReq[ 'timespan' ]
     plotRequest[ 'fromSecs' ] = fromSecs
     plotRequest[ 'toSecs' ] = toSecs
     if 'varData' in webRequest:
       plotRequest[ 'varData' ] = webRequest[ 'varData' ]
   except Exception, e:
     return self.__translateToExpectedExtResult( S_ERROR( "Error while processing plot parameters: %s" % str( e ) ) )
Exemplo n.º 40
0
  def getActivitiesList( self ):
    try:
      start = int( request.params[ 'start' ] )
    except:
      start = 0
    try:
      limit = int( request.params[ 'limit' ] )
    except:
      limit = 0

    try:
      sortField = str( request.params[ 'sortField' ] ).replace( "_", "." )
      sortDir = str( request.params[ 'sortDirection' ] )
      sort = [ ( sortField, sortDir ) ]
    except:
      sort = []
    rpcClient = getRPCClient( "Framework/Monitoring" )
    retVal = rpcClient.getActivitiesContents( {}, sort, start, limit )
    if not retVal[ 'OK' ]:
      return retVal
    svcData = retVal[ 'Value' ]
    data = { 'numActivities' : svcData[ 'TotalRecords' ], 'activities' : [] }
    now = Time.toEpoch()
    for record in svcData[ 'Records' ]:
      formatted = {}
      for i in range( len( svcData[ 'Fields' ] ) ):
        formatted[ svcData[ 'Fields' ][i].replace( ".", "_" ) ] = record[i]
      if 'activities_lastUpdate' in formatted:
        formatted[ 'activities_lastUpdate' ] = now - int( formatted[ 'activities_lastUpdate' ] )
      data[ 'activities' ].append( formatted )
    return data
Exemplo n.º 41
0
def filterOngoing( selectOutput ):
  '''
    Selects all the ongoing downtimes
  '''

  downtimes = selectOutput
  downtimesFiltered = []
  currentDate = Time.toEpoch( Time.dateTime() )

  for dt in downtimes:
    dtStart = Time.toEpoch( dt[ 'startDate' ] )
    dtEnd = Time.toEpoch( dt[ 'endDate' ] )
    if ( dtStart <= currentDate ) and ( dtEnd >= currentDate ):
      downtimesFiltered.append( dt )

  return downtimesFiltered
Exemplo n.º 42
0
    def getUniqueValue(self, indexName, key, orderBy=False):
        """
    :param str indexName: the name of the index which will be used for the query
    :param dict orderBy: it is a dictionary in case we want to order the result {key:'desc'} or {key:'asc'}
    :returns: a list of unique value for a certain key from the dictionary.
    """

        query = self._Search(indexName)

        endDate = datetime.utcnow()

        startDate = endDate - timedelta(days=30)

        timeFilter = self._Q('range',
                             timestamp={
                                 'lte': int(Time.toEpoch(endDate)) * 1000,
                                 'gte': int(Time.toEpoch(startDate)) * 1000,
                             })
        query = query.filter('bool', must=timeFilter)
        if orderBy:
            query.aggs.bucket(key,
                              'terms',
                              field=key,
                              size=self.RESULT_SIZE,
                              order=orderBy).metric(key,
                                                    'cardinality',
                                                    field=key)
        else:
            query.aggs.bucket(key, 'terms', field=key,
                              size=self.RESULT_SIZE).metric(key,
                                                            'cardinality',
                                                            field=key)

        try:
            query = query.extra(
                size=self.RESULT_SIZE)  # do not need the raw data.
            sLog.debug("Query", query.to_dict())
            result = query.execute()
        except TransportError as e:
            return S_ERROR(e)

        values = []
        for bucket in result.aggregations[key].buckets:
            values += [bucket['key']]
        del query
        sLog.debug("Nb of unique rows retrieved", len(values))
        return S_OK(values)
Exemplo n.º 43
0
  def getUniqueValue(self, indexName, key, orderBy=False):
    """
    :param str indexName the name of the index which will be used for the query
    :param dict orderBy it is a dictionary in case we want to order the result {key:'desc'} or {key:'asc'}
    It returns a list of unique value for a certain key from the dictionary.
    """

    query = self._Search(indexName)

    endDate = datetime.utcnow()

    startDate = endDate - timedelta(days=30)

    timeFilter = self._Q('range',
                         timestamp={'lte': int(Time.toEpoch(endDate)) * 1000,
                                    'gte': int(Time.toEpoch(startDate)) * 1000, })
    query = query.filter('bool', must=timeFilter)
    if orderBy:
      query.aggs.bucket(key,
                        'terms',
                        field=key,
                        size=self.RESULT_SIZE,
                        order=orderBy).metric(key,
                                              'cardinality',
                                              field=key)
    else:
      query.aggs.bucket(key,
                        'terms',
                        field=key,
                        size=self.RESULT_SIZE).metric(key,
                                                      'cardinality',
                                                      field=key)

    try:
      query = query.extra(size=self.RESULT_SIZE)  # do not need the raw data.
      gLogger.debug("Query", query.to_dict())
      result = query.execute()
    except TransportError as e:
      return S_ERROR(e)

    values = []
    for bucket in result.aggregations[key].buckets:
      values += [bucket['key']]
    del query
    gLogger.debug("Nb of unique rows retrieved", len(values))
    return S_OK(values)
Exemplo n.º 44
0
 def export_commitRegisters( self, entriesList ):
   """
     Add a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   expectedTypes = [ types.StringType, Time._dateTimeType, Time._dateTimeType, types.ListType ]
   for entry in entriesList:
     if len( entry ) != 4:
       return S_ERROR( "Invalid records" )
     for i in range( len( entry ) ):
       if type( entry[i] ) != expectedTypes[i]:
         return S_ERROR( "%s field in the records should be %s" % ( i, expectedTypes[i] ) )
   records = []
   for entry in entriesList:
     startTime = int( Time.toEpoch( entry[1] ) )
     endTime = int( Time.toEpoch( entry[2] ) )
     records.append( ( setup, entry[0], startTime, endTime, entry[3] ) )
   return self.__acDB.insertRecordBundleThroughQueue( records )
Exemplo n.º 45
0
    def execute(self):
        """ Main execution method
    """
        # Get the WMS Snapshot!
        result = JobDB().getSummarySnapshot(self.__jobDBFields)
        now = Time.dateTime()
        if not result['OK']:
            self.log.error(
                "Can't get the JobDB summary",
                "%s: won't commit at this cycle" % result['Message'])
            return S_ERROR()

        # Now we try to commit
        values = result['Value'][1]

        self.log.info("Start sending records")
        for record in values:
            record = record[1:]
            rD = {}
            for fV in self.__summaryDefinedFields:
                rD[fV[0]] = fV[1]
            for iP in range(len(self.__summaryKeyFieldsMapping)):
                fieldName = self.__summaryKeyFieldsMapping[iP]
                rD[self.__renameFieldsMapping.get(fieldName,
                                                  fieldName)] = record[iP]
            record = record[len(self.__summaryKeyFieldsMapping):]
            for iP in range(len(self.__summaryValueFieldsMapping)):
                rD[self.__summaryValueFieldsMapping[iP]] = int(record[iP])

            for backend in self.datastores:
                if backend.lower() == 'monitoring':
                    rD['timestamp'] = int(Time.toEpoch(now))
                    self.datastores['Monitoring'].addRecord(rD)

                elif backend.lower() == 'accounting':
                    acWMS = WMSHistory()
                    acWMS.setStartTime(now)
                    acWMS.setEndTime(now)
                    acWMS.setValuesFromDict(rD)
                    retVal = acWMS.checkValues()
                    if not retVal['OK']:
                        self.log.error("Invalid accounting record ",
                                       "%s -> %s" % (retVal['Message'], rD))
                    else:
                        self.datastores['Accounting'].addRegister(acWMS)

        for backend, datastore in self.datastores.items():
            self.log.info("Committing to %s backend" % backend)
            result = datastore.commit()
            if not result['OK']:
                self.log.error("Couldn't commit WMS history to %s" % backend,
                               result['Message'])
                return S_ERROR()
            self.log.verbose("Done committing to %s backend" % backend)

        return S_OK()
Exemplo n.º 46
0
 def export_commitRegisters( self, entriesList ):
   """
     Add a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   expectedTypes = [ basestring, datetime.datetime, datetime.datetime, list ]
   for entry in entriesList:
     if len( entry ) != 4:
       return S_ERROR( "Invalid records" )
     for i in range( len( entry ) ):
       if not isinstance(entry[i], expectedTypes[i]):
         gLogger.error( "Unexpected type in report",
                        ": field %d in the records should be %s (and it is %s)" % ( i, expectedTypes[i], type(entry[i])) )
         return S_ERROR( "Unexpected type in report" )
   records = []
   for entry in entriesList:
     startTime = int( Time.toEpoch( entry[1] ) )
     endTime = int( Time.toEpoch( entry[2] ) )
     records.append( ( setup, entry[0], startTime, endTime, entry[3] ) )
   return self.__acDB.insertRecordBundleThroughQueue( records )
Exemplo n.º 47
0
 def web_getCsvPlotData(self):
     callback = {}
     retVal = self.__parseFormParams()
     if not retVal['OK']:
         callback = {"success": "false", "error": retVal['Message']}
         self.finish(callback)
     params = retVal['Value']
     repClient = ReportsClient(
         rpcClient=RPCClient("Accounting/ReportGenerator"))
     retVal = yield self.threadTask(repClient.getReport, *params)
     if not retVal['OK']:
         callback = {"success": "false", "error": retVal['Message']}
         self.finish(callback)
     rawData = retVal['Value']
     groupKeys = rawData['data'].keys()
     groupKeys.sort()
     #     print rawData['data']
     if 'granularity' in rawData:
         granularity = rawData['granularity']
         data = rawData['data']
         tS = int(Time.toEpoch(params[2]))
         timeStart = tS - tS % granularity
         strData = "epoch,%s\n" % ",".join(groupKeys)
         for timeSlot in range(timeStart, int(Time.toEpoch(params[3])),
                               granularity):
             lineData = [str(timeSlot)]
             for key in groupKeys:
                 if timeSlot in data[key]:
                     lineData.append(str(data[key][timeSlot]))
                 else:
                     lineData.append("")
             strData += "%s\n" % ",".join(lineData)
     else:
         strData = "%s\n" % ",".join(groupKeys)
         strData += ",".join([str(rawData['data'][k]) for k in groupKeys])
     self.set_header('Content-type', 'text/csv')
     self.set_header(
         'Content-Disposition',
         'attachment; filename="%s.csv"' % md5(str(params)).hexdigest())
     self.set_header('Content-Length', len(strData))
     self.finish(strData)
Exemplo n.º 48
0
 def getRunningInstancesHistory(self):
     try:
         bucketSize = int(request.params['bucketSize'])
     except:
         bucketSize = 900
     try:
         timespan = int(request.params['timespan'])
     except:
         timespan = 86400
     rpcClient = getRPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getRunningInstancesHistory(timespan, bucketSize)
     if not result['OK']:
         return S_ERROR(result['Message'])
     svcData = result['Value']
     data = []
     olderThan = Time.toEpoch() - 400
     for record in svcData:
         eTime = Time.toEpoch(record[0])
         if eTime < olderThan:
             rL = [eTime, int(record[1])]
         data.append(rL)
     return S_OK(data)
Exemplo n.º 49
0
 def getRunningInstancesHistory( self ):
   try:
     bucketSize = int( request.params[ 'bucketSize' ] )
   except:
     bucketSize = 900
   try:
     timespan = int( request.params[ 'timespan' ] )
   except:
     timespan = 86400
   rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
   result = rpcClient.getRunningInstancesHistory( timespan, bucketSize )
   if not result[ 'OK' ]:
     return S_ERROR( result[ 'Message' ] )
   svcData = result[ 'Value' ]
   data = []
   olderThan = Time.toEpoch() - 400
   for record in svcData:
     eTime = Time.toEpoch( record[0] )
     if eTime < olderThan:
       rL = [ eTime, int( record[1] ) ]
     data.append( rL )
   return S_OK( data )
Exemplo n.º 50
0
  def export_removeRegisters( self, entriesList ):
    """
      Remove a record for a type
    """
    setup = self.serviceInfoDict[ 'clientSetup' ]
    expectedTypes = [ types.StringType, Time._dateTimeType, Time._dateTimeType, types.ListType ]
    for entry in entriesList:
      if len( entry ) != 4:
        return S_ERROR( "Invalid records" )
      for i in range( len( entry ) ):
        if type( entry[i] ) != expectedTypes[i]:
          return S_ERROR( "%s field in the records should be %s" % ( i, expectedTypes[i] ) )
    ok = 0
    for entry in entriesList:
      startTime = int( Time.toEpoch( entry[1] ) )
      endTime = int( Time.toEpoch( entry[2] ) )
      record = entry[3]
      result = self.__acDB.deleteRecord( setup, entry[0], startTime, endTime, record )
      if not result[ 'OK' ]:
        return S_OK( ok )
      ok += 1

    return S_OK( ok )
Exemplo n.º 51
0
  def web_getCsvPlotData(self):
    callback = {}
    retVal = self.__parseFormParams()
    if not retVal['OK']:
      callback = {"success": "false", "error": retVal['Message']}
      self.finish(callback)
    params = retVal['Value']
    repClient = ReportsClient(rpcClient=RPCClient("Accounting/ReportGenerator"))
    retVal = yield self.threadTask(repClient.getReport, *params)
    if not retVal['OK']:
      callback = {"success": "false", "error": retVal['Message']}
      self.finish(callback)
    rawData = retVal['Value']
    groupKeys = rawData['data'].keys()
    groupKeys.sort()
#     print rawData['data']
    if 'granularity' in rawData:
      granularity = rawData['granularity']
      data = rawData['data']
      tS = int(Time.toEpoch(params[2]))
      timeStart = tS - tS % granularity
      strData = "epoch,%s\n" % ",".join(groupKeys)
      for timeSlot in range(timeStart, int(Time.toEpoch(params[3])), granularity):
        lineData = [str(timeSlot)]
        for key in groupKeys:
          if timeSlot in data[key]:
            lineData.append(str(data[key][timeSlot]))
          else:
            lineData.append("")
        strData += "%s\n" % ",".join(lineData)
    else:
      strData = "%s\n" % ",".join(groupKeys)
      strData += ",".join([str(rawData['data'][k]) for k in groupKeys])
    self.set_header('Content-type', 'text/csv')
    self.set_header('Content-Disposition', 'attachment; filename="%s.csv"' % md5(str(params)).hexdigest())
    self.set_header('Content-Length', len(strData))
    self.finish(strData)
Exemplo n.º 52
0
 def export_commitMarks( self, sourceId, activitiesDict, componentExtraInfo = {} ):
   """
   Adds marks for activities
   """
   nowEpoch = Time.toEpoch()
   maxEpoch = nowEpoch + 7200
   minEpoch = nowEpoch - 86400
   invalidActivities = []
   for acName in activitiesDict:
     for time in activitiesDict[ acName ]:
       if time > maxEpoch or time < minEpoch:
         gLogger.info( "Time %s  ( [%s,%s] ) is invalid for activity %s" % ( time, minEpoch, maxEpoch, acName ) )
         invalidActivities.append( acName )
         break
   for acName in invalidActivities:
     gLogger.info( "Not commiting activity %s" % acName )
     del( activitiesDict[ acName ] )
   return gServiceInterface.commitMarks( sourceId, activitiesDict, componentExtraInfo )
Exemplo n.º 53
0
  def __checkPlotRequest( self, reportRequest ):
    """
    It check the plot attributes. We have to make sure that all attributes which are needed are provided.

    :param dict reportRequest contains the plot attributes.

    """
    # If extraArgs is not there add it
    if 'extraArgs' not in reportRequest:
      reportRequest[ 'extraArgs' ] = {}
    if not isinstance( reportRequest[ 'extraArgs' ], self.__reportRequestDict[ 'extraArgs' ] ):
      return S_ERROR( "Extra args has to be of type %s" % self.__reportRequestDict[ 'extraArgs' ] )
    reportRequestExtra = reportRequest[ 'extraArgs' ]

    # Check sliding plots
    if 'lastSeconds' in reportRequestExtra:
      try:
        lastSeconds = long( reportRequestExtra[ 'lastSeconds' ] )
      except ValueError:
        gLogger.error( "lastSeconds key must be a number" )
        return S_ERROR( "Value Error" )
      if lastSeconds < 3600:
        return S_ERROR( "lastSeconds must be more than 3600" )
      now = Time.dateTime() #this is an UTC time
      reportRequest[ 'endTime' ] = now
      reportRequest[ 'startTime' ] = now - datetime.timedelta( seconds = lastSeconds )
    else:
      # if end date is not there, just set it to now
      if not reportRequest.get( 'endTime' ):
        # check the existence of the endTime it can be present and empty
        reportRequest[ 'endTime' ] = Time.dateTime()
    # Check keys
    for key in self.__reportRequestDict:
      if not key in reportRequest:
        return S_ERROR( 'Missing mandatory field %s in plot request' % key )

      if not isinstance( reportRequest[ key ], self.__reportRequestDict[ key ] ):
        return S_ERROR( "Type mismatch for field %s (%s), required one of %s" % ( key,
                                                                                  str( type( reportRequest[ key ] ) ),
                                                                                  str( self.__reportRequestDict[ key ] ) ) )
      if key in ( 'startTime', 'endTime' ):
        reportRequest[ key ] = int( Time.toEpoch( reportRequest[ key ] ) )

    return S_OK( reportRequest )
Exemplo n.º 54
0
 def __checkPlotRequest(self, reportRequest):
     # If extraArgs is not there add it
     if "extraArgs" not in reportRequest:
         reportRequest["extraArgs"] = {}
     if type(reportRequest["extraArgs"]) != self.__reportRequestDict["extraArgs"]:
         return S_ERROR("Extra args has to be of type %s" % self.__reportRequestDict["extraArgs"])
     reportRequestExtra = reportRequest["extraArgs"]
     # Check sliding plots
     if "lastSeconds" in reportRequestExtra:
         try:
             lastSeconds = long(reportRequestExtra["lastSeconds"])
         except:
             return S_ERROR("lastSeconds key must be a number")
         if lastSeconds < 3600:
             return S_ERROR("lastSeconds must be more than 3600")
         now = Time.dateTime()
         reportRequest["endTime"] = now
         reportRequest["startTime"] = now - datetime.timedelta(seconds=lastSeconds)
     else:
         # if enddate is not there, just set it to now
         if not reportRequest.get("endTime", False):
             reportRequest["endTime"] = Time.dateTime()
     # Check keys
     for key in self.__reportRequestDict:
         if not key in reportRequest:
             return S_ERROR("Missing mandatory field %s in plot reques" % key)
         requestKeyType = type(reportRequest[key])
         if key in ("startTime", "endTime"):
             if requestKeyType not in self.__reportRequestDict[key]:
                 return S_ERROR(
                     "Type mismatch for field %s (%s), required one of %s"
                     % (key, str(requestKeyType), str(self.__reportRequestDict[key]))
                 )
             reportRequest[key] = int(Time.toEpoch(reportRequest[key]))
         else:
             if requestKeyType != self.__reportRequestDict[key]:
                 return S_ERROR(
                     "Type mismatch for field %s (%s), required %s"
                     % (key, str(requestKeyType), str(self.__reportRequestDict[key]))
                 )
     return S_OK(reportRequest)
Exemplo n.º 55
0
 def execute( self ):
   """ Main execution method
   """
   result = gConfig.getSections( "/DIRAC/Setups" )
   if not result[ 'OK' ]:
     return result
   validSetups = result[ 'Value' ]
   self.log.info( "Valid setups for this cycle are %s" % ", ".join( validSetups ) )
   # Get the WMS Snapshot!
   result = self.jobDB.getSummarySnapshot( self.__jobDBFields )
   now = Time.dateTime()
   if not result[ 'OK' ]:
     self.log.error( "Can't get the jobdb summary", result[ 'Message' ] )
   else:
     values = result[ 'Value' ][1]
     self.log.info( "Start sending records!" )
     for record in values:
       recordSetup = record[0]
       if recordSetup not in validSetups:
         self.log.error( "Setup %s is not valid" % recordSetup )
         continue
       record = record[1:]
       rD = {}
       for fV in self.__summaryDefinedFields:
         rD[ fV[0] ] = fV[1]
       for iP in range( len( self.__summaryKeyFieldsMapping ) ):
         fieldName = self.__summaryKeyFieldsMapping[iP]
         rD[ self.__renameFieldsMapping.get( fieldName, fieldName ) ] = record[iP]
       record = record[ len( self.__summaryKeyFieldsMapping ): ]
       for iP in range( len( self.__summaryValueFieldsMapping ) ):
         rD[ self.__summaryValueFieldsMapping[iP] ] = int( record[iP] )
       rD['timestamp'] = int( Time.toEpoch( now ) )       
       self.monitoringReporter.addRecord( rD )
     retVal = self.monitoringReporter.commit()
     if retVal['OK']:
       self.log.info( "The records are successfully sent to the Store!" )
     else:
       self.log.warn( "Faild to insert the records! It will be retried in the next iteration", retVal['Message'] )
       
   return S_OK()
Exemplo n.º 56
0
 def registerActivity( self, sourceId, acName, acDict ):
   """
   Register an activity
   """
   m = hashlib.md5()
   acDict[ 'name' ] = acName
   acDict[ 'sourceId' ] = sourceId
   m.update( str( acDict ) )
   retList = self.__select( "filename", "activities", acDict )
   if len( retList ) > 0:
     return retList[0][0]
   else:
     acDict[ 'lastUpdate' ] = int( Time.toEpoch() - 86000 )
     filePath = m.hexdigest()
     filePath = "%s/%s.rrd" % ( filePath[:2], filePath )
     self.log.info( "Registering activity", str( acDict ) )
     if self.__insert( "activities", { 'id' : 'NULL',
                                       'filename' : "'%s'" % filePath,
                                     },
                       acDict ) == 0:
       return -1
     return self.__select( "filename", "activities", acDict )[0][0]
Exemplo n.º 57
0
 def execute( self ):
   """ Main execution method
   """
   result = gConfig.getSections( "/DIRAC/Setups" )
   if not result[ 'OK' ]:
     return result
   validSetups = result[ 'Value' ]
   gLogger.info( "Valid setups for this cycle are %s" % ", ".join( validSetups ) )
   # Get the WMS Snapshot!
   result = self.jobDB.getSummarySnapshot( self.__jobDBFields )
   now = Time.dateTime()
   if not result[ 'OK' ]:
     gLogger.error( "Can't the the jobdb summary", result[ 'Message' ] )
   else:
     values = result[ 'Value' ][1]
     gLogger.info( "Start sending records!" )
     for record in values:
       recordSetup = record[0]
       if recordSetup not in validSetups:
         gLogger.error( "Setup %s is not valid" % recordSetup )
         continue
       record = record[1:]
       rD = {}
       for fV in self.__summaryDefinedFields:
         rD[ fV[0] ] = fV[1]
       for iP in range( len( self.__summaryKeyFieldsMapping ) ):
         fieldName = self.__summaryKeyFieldsMapping[iP]
         rD[ self.__renameFieldsMapping.get( fieldName, fieldName ) ] = record[iP]
       record = record[ len( self.__summaryKeyFieldsMapping ): ]
       for iP in range( len( self.__summaryValueFieldsMapping ) ):
         rD[ self.__summaryValueFieldsMapping[iP] ] = int( record[iP] )
       rD['startTime'] = int( Time.toEpoch( now ) )       
       rD['metric'] = 'WMSHistory'
       message = json.dumps( rD )
       self.sendRecords( message )
     gLogger.info( "The records are successfully sent!" )
       
   return S_OK()