def test_03_api( self ):
    """ DataLoggingDB API

    :param self: self reference
    """
    
    lfns = [ '/Test/00001234/File1', '/Test/00001234/File2' ] 
    fileTuples = tuple( [ ( lfn, "TestStatus", "MinorStatus", Time.toString(), Time.dateTime(), "Somewhere" ) 
                          for lfn in lfns ] )
    
    result = self.__db.addFileRecord( lfns, "TestStatus", date = '2012-04-28 09:49:02.545466' )
    self.assertEqual( result["OK"], True ) 
    self.assertEqual( result["Value"], 2 )
    self.assertEqual( result["lastRowId"], 2 )

    result = self.__db.addFileRecords( fileTuples )
    self.assertEqual( result["OK"], True )

    result = self.__db.getFileLoggingInfo( lfns[0] )
    self.assertEqual( result["OK"], True )
    self.assertEqual( len( result["Value"] ), 2 )

    result = self.__db.getFileLoggingInfo( lfns[1] )
    self.assertEqual( result["OK"], True )
    self.assertEqual( len( result["Value"] ), 2 )

    result = self.__db.getUniqueStates()
    self.assertEqual( result["OK"], True )
    self.assertEqual( result["Value"], [ "TestStatus" ] )

    result = self.__db._update( 'DROP TABLE `%s`' % self.__db.tableName )
    self.assertEqual( result["OK"], True )
def getToken( key ):
  '''
    Function that gets the userName from the proxy
  '''

  proxyInfo = getProxyInfo()
  if not proxyInfo[ 'OK' ]:
    error( str( proxyInfo ) )

  if key.lower() == 'owner':
    userName = proxyInfo[ 'Value' ][ 'username' ]
    tokenOwner = S_OK( userName )
    if not tokenOwner[ 'OK' ]:
      error( tokenOwner[ 'Message' ] )
    return tokenOwner[ 'Value' ]

  elif key.lower() == 'expiration':
    expiration = proxyInfo[ 'Value' ][ 'secondsLeft' ]
    tokenExpiration = S_OK( expiration )
    if not tokenExpiration[ 'OK' ]:
      error( tokenExpiration[ 'Message' ] )

    now = Time.dateTime()
    #datetime.datetime.utcnow()
    expirationDate = now + datetime.timedelta( seconds=tokenExpiration['Value'] )
    expirationDate = Time.toString( expirationDate )
    expirationDate = expirationDate.split('.')[0]
    return expirationDate
Exemple #3
0
 def web_getHistoryValues(self):
     try:
         dbVars = [ str(f) for f in json.loads(self.request.arguments[ 'vars' ][0]) ]
     except:
         dbVars = [ 'Load', 'Jobs', 'TransferredFiles' ]
     try:
         timespan = int(self.request.arguments[ 'timespan' ][0])
     except:
         timespan = 86400
     rpcClient = RPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getHistoryValues(3600, {}, dbVars, timespan)
     if not result[ 'OK' ]:
         callback = {"success":"false", "error":result[ 'Message' ]}
         return self.write(callback)
     svcData = result[ 'Value' ]
     data = []
     olderThan = Time.toEpoch() - 400
     for record in svcData[ 'Records' ]:
         rL = []
         for iP in range(len(svcData[ 'ParameterNames' ])):
             param = svcData[ 'ParameterNames' ][iP]
             if param == 'Update':
                 rL.append(Time.toEpoch(record[iP]))
             else:
                 rL.append(record[iP])
         if rL[0] < olderThan:
             data.append(rL)
     callback = {"success":"true", 'data': data, 'fields' : svcData[ 'ParameterNames' ]}
     return self.write(callback)
Exemple #4
0
def getToken(key):
    """
    Function that gets the userName from the proxy
  """

    proxyInfo = getProxyInfo()
    if not proxyInfo["OK"]:
        error(str(proxyInfo))

    if key.lower() == "owner":
        userName = proxyInfo["Value"]["username"]
        tokenOwner = S_OK(userName)
        if not tokenOwner["OK"]:
            error(tokenOwner["Message"])
        return tokenOwner["Value"]

    elif key.lower() == "expiration":
        expiration = proxyInfo["Value"]["secondsLeft"]
        tokenExpiration = S_OK(expiration)
        if not tokenExpiration["OK"]:
            error(tokenExpiration["Message"])

        now = Time.dateTime()
        # datetime.datetime.utcnow()
        expirationDate = now + datetime.timedelta(seconds=tokenExpiration["Value"])
        expirationDate = Time.toString(expirationDate)
        expirationDate = expirationDate.split(".")[0]
        return expirationDate
Exemple #5
0
    def export_removeRegisters(self, entriesList):
        """
      Remove a record for a type
    """
        setup = self.serviceInfoDict["clientSetup"]
        expectedTypes = [basestring, datetime.datetime, datetime.datetime, list]
        for entry in entriesList:
            if len(entry) != 4:
                return S_ERROR("Invalid records")
            for i in range(len(entry)):
                if not isinstance(entry[i], expectedTypes[i]):
                    return S_ERROR("%s field in the records should be %s" % (i, expectedTypes[i]))
        ok = 0
        for entry in entriesList:
            startTime = int(Time.toEpoch(entry[1]))
            endTime = int(Time.toEpoch(entry[2]))
            record = entry[3]
            result = self.__acDB.deleteRecord(
                setup, entry[0], startTime, endTime, record
            )  # pylint: disable=too-many-function-args,no-member
            if not result["OK"]:
                return S_OK(ok)
            ok += 1

        return S_OK(ok)
Exemple #6
0
 def getPlotData( self ):
   retVal = self.__parseFormParams()
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   params = retVal[ 'Value' ]
   repClient = ReportsClient( rpcClient = getRPCClient( "Accounting/ReportGenerator" ) )
   retVal = repClient.getReport( *params )
   if not retVal[ 'OK' ]:
     c.error = retVal[ 'Message' ]
     return render( "/error.mako" )
   rawData = retVal[ 'Value' ]
   groupKeys = rawData[ 'data' ].keys()
   groupKeys.sort()
   if 'granularity' in rawData:
     granularity = rawData[ 'granularity' ]
     data = rawData['data']
     tS = int( Time.toEpoch( params[2] ) )
     timeStart = tS - tS % granularity
     strData = "epoch,%s\n" % ",".join( groupKeys )
     for timeSlot in range( timeStart, int( Time.toEpoch( params[3] ) ), granularity ):
       lineData = [ str( timeSlot ) ]
       for key in groupKeys:
         if timeSlot in data[ key ]:
           lineData.append( str( data[ key ][ timeSlot ] ) )
         else:
           lineData.append( "" )
       strData += "%s\n" % ",".join( lineData )
   else:
     strData = "%s\n" % ",".join( groupKeys )
     strData += ",".join( [ str( rawData[ 'data' ][ k ] ) for k in groupKeys ] )
   response.headers['Content-type'] = 'text/csv'
   response.headers['Content-Disposition'] = 'attachment; filename="%s.csv"' % md5( str( params ) ).hexdigest()
   response.headers['Content-Length'] = len( strData )
   return strData
  def export_ping( self ):
    dInfo = {}
    dInfo[ 'version' ] = DIRAC.version
    dInfo[ 'time' ] = Time.dateTime()
    #Uptime
    try:
      oFD = file( "/proc/uptime" )
      iUptime = long( float( oFD.readline().split()[0].strip() ) )
      oFD.close()
      dInfo[ 'host uptime' ] = iUptime
    except:
      pass
    startTime = self.serviceInfoDict[ 'serviceStartTime' ]
    dInfo[ 'service start time' ] = self.serviceInfoDict[ 'serviceStartTime' ]
    serviceUptime = Time.dateTime() - startTime
    dInfo[ 'service uptime' ] = serviceUptime.days * 3600 + serviceUptime.seconds
    #Load average
    try:
      oFD = file( "/proc/loadavg" )
      sLine = oFD.readline()
      oFD.close()
      dInfo[ 'load' ] = " ".join( sLine.split()[:3] )
    except:
      pass
    dInfo[ 'name' ] = self.serviceInfoDict[ 'serviceName' ]
    stTimes = os.times()
    dInfo[ 'cpu times' ] = { 'user time' : stTimes[0],
                             'system time' : stTimes[1],
                             'children user time' : stTimes[2],
                             'children system time' : stTimes[3],
                             'elapsed real time' : stTimes[4]
                           }

    return S_OK( dInfo )
Exemple #8
0
 def getHistoryValues( self ):
   try:
     dbVars = [ str( f ) for f in simplejson.loads( request.params[ 'vars' ] ) ]
   except:
     dbVars = [ 'Load', 'Jobs', 'TransferredFiles' ]
   try:
     timespan = int( request.params[ 'timespan' ] )
   except:
     timespan = 86400
   rpcClient = getRPCClient( "WorkloadManagement/VirtualMachineManager" )
   result = rpcClient.getHistoryValues( 3600, {}, dbVars, timespan )
   if not result[ 'OK' ]:
     return S_ERROR( result[ 'Message' ] )
   svcData = result[ 'Value' ]
   data = []
   olderThan = Time.toEpoch() - 400
   for record in svcData[ 'Records' ]:
     rL = []
     for iP in range( len( svcData[ 'ParameterNames' ] ) ):
       param = svcData[ 'ParameterNames' ][iP]
       if param == 'Update':
         rL.append( Time.toEpoch( record[iP] ) )
       else:
         rL.append( record[iP] )
     if rL[0] < olderThan:
       data.append( rL )
   return S_OK( { 'data': data, 'fields' : svcData[ 'ParameterNames' ] } )
Exemple #9
0
 def web_getRunningInstancesHistory(self):
     try:
         bucketSize = int(self.request.arguments[ 'bucketSize' ][0])
     except:
         bucketSize = 900
     try:
         timespan = int(self.request.arguments[ 'timespan' ][0])
     except:
         timespan = 86400
     rpcClient = RPCClient("WorkloadManagement/VirtualMachineManager")
     result = rpcClient.getRunningInstancesHistory(timespan, bucketSize)
     if not result[ 'OK' ]:
         callback = {"success":"false", "error":result[ 'Message' ]}
         return self.write(callback)
     svcData = result[ 'Value' ]
     data = []
     olderThan = Time.toEpoch() - 400
     rL = []
     for record in svcData:
         eTime = Time.toEpoch(record[0])
         if eTime < olderThan:
             rL = [ eTime, int(record[1]) ]
         data.append(rL)
     callback = {"success":"true", 'data': data, "timespan": timespan}
     return self.write(callback)
Exemple #10
0
  def setExecutionTime(self,time):
    """ Set the execution time to the current data and time
    """

    if time.lower() == "now":
      self.attributes['ExecutionTime'] = Time.toString(Time.dateTime())
    else:
      self.attributes['ExecutionTime'] = time
Exemple #11
0
 def export_commit( self, typeName, startTime, endTime, valuesList ):
   """
     Add a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.insertRecordThroughQueue( setup, typeName, startTime, endTime, valuesList ) #pylint: disable=too-many-function-args,no-member
Exemple #12
0
 def export_remove( self, typeName, startTime, endTime, valuesList ):
   """
     Remove a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.deleteRecord( setup, typeName, startTime, endTime, valuesList ) #pylint: disable=too-many-function-args,no-member
Exemple #13
0
 def export_commit( self, typeName, startTime, endTime, valuesList ):
   """
     Add a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.insertRecordThroughQueue( setup, typeName, startTime, endTime, valuesList )
Exemple #14
0
 def export_remove( self, typeName, startTime, endTime, valuesList ):
   """
     Remove a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return self.__acDB.deleteRecord( setup, typeName, startTime, endTime, valuesList )
 def export_remove( self, typeName, startTime, endTime, valuesList ):
   """
     Remove a record for a type
   """
   setup = self.serviceInfoDict[ 'clientSetup' ]
   typeName = "%s_%s" % ( setup, typeName )
   startTime = int( Time.toEpoch( startTime ) )
   endTime = int( Time.toEpoch( endTime ) )
   return gAccountingDB.deleteRecord( typeName, startTime, endTime, valuesList )
Exemple #16
0
  def bulk_index(self, indexprefix, doc_type, data, mapping=None, period=None):
    """
    :param str indexPrefix: index name.
    :param str doc_type: the type of the document
    :param list data: contains a list of dictionary
    :paran dict mapping: the mapping used by elasticsearch
    :param str period: We can specify which kind of indices will be created.
                       Currently only daily and monthly indexes are supported.
    """
    gLogger.info("%d records will be insert to %s" % (len(data), doc_type))
    if mapping is None:
      mapping = {}

    indexName = generateFullIndexName(indexprefix, period)
    gLogger.debug("inserting datat to %s index" % indexName)
    if not self.exists(indexName):
      retVal = self.createIndex(indexprefix, mapping, period)
      if not retVal['OK']:
        return retVal
    docs = []
    for row in data:
      body = {
          '_index': indexName,
          '_type': doc_type,
          '_source': {}
      }
      body['_source'] = row

      if 'timestamp' not in row:
        gLogger.warn("timestamp is not given! Note: the actual time is used!")

      # if the timestamp is not provided, we use the current utc time.
      timestamp = row.get('timestamp', int(Time.toEpoch()))
      try:
        if isinstance(timestamp, datetime):
          body['_source']['timestamp'] = int(timestamp.strftime('%s')) * 1000
        elif isinstance(timestamp, basestring):
          timeobj = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
          body['_source']['timestamp'] = int(timeobj.strftime('%s')) * 1000
        else:  # we assume  the timestamp is an unix epoch time (integer).
          body['_source']['timestamp'] = timestamp * 1000
      except (TypeError, ValueError) as e:
        # in case we are not able to convert the timestamp to epoch time....
        gLogger.error("Wrong timestamp", e)
        body['_source']['timestamp'] = int(Time.toEpoch()) * 1000
      docs += [body]
    try:
      res = bulk(self.__client, docs, chunk_size=self.__chunk_size)
    except BulkIndexError as e:
      return S_ERROR(e)

    if res[0] == len(docs):
      # we have inserted all documents...
      return S_OK(len(docs))
    else:
      return S_ERROR(res)
    return res
Exemple #17
0
  def addLoggingRecord(self,
                       jobID,
                       status='idem',
                       minor='idem',
                       application='idem',
                       date='',
                       source='Unknown'):
    """ Add a new entry to the JobLoggingDB table. One, two or all the three status
        components (major, minor, application) can be specified.
        Optionally the time stamp of the status can
        be provided in a form of a string in a format '%Y-%m-%d %H:%M:%S' or
        as datetime.datetime object. If the time stamp is not provided the current
        UTC time is used.
    """

    event = 'status/minor/app=%s/%s/%s' % (status, minor, application)
    self.gLogger.info("Adding record for job " + str(jobID) + ": '" + event + "' from " + source)

    if not date:
      # Make the UTC datetime string and float
      _date = Time.dateTime()
      epoc = time.mktime(_date.timetuple()) + _date.microsecond / 1000000. - MAGIC_EPOC_NUMBER
      time_order = round(epoc, 3)
    else:
      try:
        if isinstance(date, basestring):
          # The date is provided as a string in UTC
          _date = Time.fromString(date)
          epoc = time.mktime(_date.timetuple()) + _date.microsecond / 1000000. - MAGIC_EPOC_NUMBER
          time_order = round(epoc, 3)
        elif isinstance(date, Time._dateTimeType):
          _date = date
          epoc = time.mktime(_date.timetuple()) + _date.microsecond / 1000000. - \
              MAGIC_EPOC_NUMBER  # pylint: disable=no-member
          time_order = round(epoc, 3)
        else:
          self.gLogger.error('Incorrect date for the logging record')
          _date = Time.dateTime()
          epoc = time.mktime(_date.timetuple()) - MAGIC_EPOC_NUMBER
          time_order = round(epoc, 3)
      except BaseException:
        self.gLogger.exception('Exception while date evaluation')
        _date = Time.dateTime()
        epoc = time.mktime(_date.timetuple()) - MAGIC_EPOC_NUMBER
        time_order = round(epoc, 3)

    cmd = "INSERT INTO LoggingInfo (JobId, Status, MinorStatus, ApplicationStatus, " + \
          "StatusTime, StatusTimeOrder, StatusSource) VALUES (%d,'%s','%s','%s','%s',%f,'%s')" % \
        (int(jobID), status, minor, application[:255],
         str(_date), time_order, source)

    return self._update(cmd)
Exemple #18
0
 def _checkJobLastUpdateTime( self, joblist , StalledDays ):
   timeLimitToConsider = Time.dateTime() - Time.day * StalledDays 
   ret = False
   for JobID in joblist:
     result = self.jobDB.getJobAttributes(int(JobID))
     if result['OK']:
        if result['Value'].has_key('LastUpdateTime'):
          LastUpdateTime = result['Value']['LastUpdateTime']
          if Time.fromString(LastUpdateTime) > timeLimitToConsider:
            ret = True
            self.log.debug('Since '+str(JobID)+' updates LastUpdateTime on '+str(LastUpdateTime)+', this does not to need to be deleted.')
            break
     else:
       self.log.error("Error taking job info. from DB:%s" % str( result['Message'] ) )
   return ret
 def _checkJobLastUpdateTime( self, joblist , StalledDays ):
   timeLimitToConsider = Time.dateTime() - Time.day * StalledDays 
   ret = False
   for jobID in joblist:
     result = self.jobDB.getJobAttributes( int( jobID ) )
     if result['OK']:
       if 'LastUpdateTime' in result['Value']:
         lastUpdateTime = result['Value']['LastUpdateTime']
         if Time.fromString( lastUpdateTime ) > timeLimitToConsider:
           ret = True
           self.log.debug( 'Since %s updates LastUpdateTime on %s this does not to need to be deleted.' % ( str( jobID ), str( lastUpdateTime ) ) )
           break
     else:
       self.log.error( "Error taking job info from DB", result['Message'] )
   return ret
Exemple #20
0
 def __consolidateMarks( self, allData ):
   """
     Copies all marks except last step ones
     and consolidates them
   """
   consolidatedMarks = {}
   remainderMarks = {}
   for key in self.activitiesMarks:
     if allData:
       lastStepToSend = int( Time.toEpoch() )
     else:
       lastStepToSend = self.__UTCStepTime( key )
     consolidatedMarks[ key ] = {}
     remainderMarks [ key ] = {}
     for markTime in self.activitiesMarks[ key ]:
       markValue = self.activitiesMarks[ key ][ markTime ]
       if markTime >= lastStepToSend:
         remainderMarks[ key ][ markTime ] = markValue
       else:
         consolidatedMarks[ key ][ markTime ] = markValue
         # Consolidate the copied ones
         totalValue = 0
         for mark in consolidatedMarks[ key ][ markTime ]:
           totalValue += mark
         if self.activitiesDefinitions[ key ][ 'type' ] == self.OP_MEAN:
           totalValue /= len( consolidatedMarks[ key ][ markTime ] )
         consolidatedMarks[ key ][ markTime ] = totalValue
     if len( consolidatedMarks[ key ] ) == 0:
       del( consolidatedMarks[ key ] )
   self.activitiesMarks = remainderMarks
   return consolidatedMarks
Exemple #21
0
 def submit(self):
   pagestart = time()
   RPC = getRPCClient("WorkloadManagement/JobMonitoring")
   user = str(credentials.getUsername())
   result = RPC.getOwners()
   if result["OK"]:
     defaultGroup = gConfig.getValue("/Registry/DefaultGroup","")
     if defaultGroup == "":
       return {"success":"false","error":"Option /Registry/DefaultGroup is undefined, please set the default group in the CS"}
     group = str(credentials.getSelectedGroup())
     groupProperty = credentials.getProperties(group)
     if user not in result["Value"] and ( "JobAdministrator" or "JobSharing" ) not in groupProperty:
       c.result = {"success":"false","error":"You don't have any jobs in the DIRAC system"}
       return c.result
   else:
     c.result = {"success":"false","error":result["Message"]}
     return c.result
   req = self.__request()
   gLogger.always("getJobPageSummaryWeb(%s,%s,%s,%s)" % (req,globalSort,pageNumber,numberOfJobs))
   result = RPC.getJobPageSummaryWeb(req,globalSort,pageNumber,numberOfJobs)
   gLogger.always(" - REZ: " %result)
   if result["OK"]:
     result = result["Value"]
     gLogger.info("ReS",result)
     if result.has_key("TotalRecords"):
       if  result["TotalRecords"] > 0:
         if result.has_key("ParameterNames") and result.has_key("Records"):
           if len(result["ParameterNames"]) > 0:
             if len(result["Records"]) > 0:
               c.result = []
               jobs = result["Records"]
               head = result["ParameterNames"]
               headLength = len(head)
               for i in jobs:
                 tmp = {}
                 for j in range(0,headLength):
                   tmp[head[j]] = i[j]
                 c.result.append(tmp)
               total = result["TotalRecords"]
               timestamp = Time.dateTime().strftime("%Y-%m-%d %H:%M [UTC]")
               if result.has_key("Extras"):
                 st = self.__dict2string(req)
                 extra = result["Extras"]
                 c.result = {"success":"true","result":c.result,"total":total,"extra":extra,"request":st,"date":timestamp}
               else:
                 c.result = {"success":"true","result":c.result,"total":total,"date":timestamp}
             else:
               c.result = {"success":"false","result":"","error":"There are no data to display"}
           else:
             c.result = {"success":"false","result":"","error":"ParameterNames field is missing"}
         else:
           c.result = {"success":"false","result":"","error":"Data structure is corrupted"}
       else:
         c.result = {"success":"false","result":"","error":"There were no data matching your selection"}
     else:
       c.result = {"success":"false","result":"","error":"Data structure is corrupted"}
   else:
     c.result = {"success":"false","error":result["Message"]}
   gLogger.info("\033[0;31mJOB SUBMIT REQUEST:\033[0m %s" % (time() - pagestart))
   return c.result
    def _getHistoryData(self, timeSpan, groupToUse):
        """Get history data from ElasticSearch Monitoring database

        :param int timeSpan: time span
        :param str groupToUse: requested user group
        :return: dictionary with history data
        """

        monitoringClient = MonitoringClient()

        reportCondition = {"Status": ["Running"]}
        if not groupToUse:
            reportGrouping = "UserGroup"
            reportCondition["grouping"] = ["UserGroup"]
        else:
            reportGrouping = "User"
            reportCondition["UserGroup"] = groupToUse
            reportCondition["grouping"] = ["User"]

        now = Time.dateTime()
        result = monitoringClient.getReport(
            "WMSHistory",
            "AverageNumberOfJobs",
            now - datetime.timedelta(seconds=timeSpan),
            now,
            reportCondition,
            reportGrouping,
            {"lastSeconds": timeSpan},
        )
        return result
 def execute( self ):
   """ Remove jobs in various status
   """
   #Delete jobs in "Deleted" state
   result = self.removeJobsByStatus( { 'Status' : 'Deleted' } )
   if not result[ 'OK' ]:
     return result
   #Get all the Job types that can be cleaned
   result = self.__getAllowedJobTypes()
   if not result[ 'OK' ]:
     return result
   
   # No jobs in the system subject to removal
   if not result['Value']:
     return S_OK()
   
   baseCond = { 'JobType' : result[ 'Value' ] }
   # Remove jobs with final status
   for status in self.removeStatusDelay:
     delay = self.removeStatusDelay[ status ]
     condDict = dict( baseCond )
     condDict[ 'Status' ] = status
     delTime = str( Time.dateTime() - delay * Time.day )
     result = self.removeJobsByStatus( condDict, delTime )
     if not result['OK']:
       gLogger.warn( 'Failed to remove jobs in status %s' % status )
   return S_OK()
def filterOngoing( selectOutput ):
  '''
    Selects all the ongoing downtimes
  '''

  downtimes = selectOutput
  downtimesFiltered = []
  currentDate = Time.toEpoch( Time.dateTime() )

  for dt in downtimes:
    dtStart = Time.toEpoch( dt[ 'startDate' ] )
    dtEnd = Time.toEpoch( dt[ 'endDate' ] )
    if ( dtStart <= currentDate ) and ( dtEnd >= currentDate ):
      downtimesFiltered.append( dt )

  return downtimesFiltered
    def export_getCurrentPilotCounters(cls, attrDict={}):
        """Get pilot counters per Status with attrDict selection. Final statuses are given for
        the last day.
        """

        result = cls.pilotAgentsDB.getCounters("PilotAgents", ["Status"], attrDict, timeStamp="LastUpdateTime")
        if not result["OK"]:
            return result
        last_update = Time.dateTime() - Time.day
        resultDay = cls.pilotAgentsDB.getCounters(
            "PilotAgents", ["Status"], attrDict, newer=last_update, timeStamp="LastUpdateTime"
        )
        if not resultDay["OK"]:
            return resultDay

        resultDict = {}
        for statusDict, count in result["Value"]:
            status = statusDict["Status"]
            resultDict[status] = count
            if status in PilotStatus.PILOT_FINAL_STATES:
                resultDict[status] = 0
                for statusDayDict, ccount in resultDay["Value"]:
                    if status == statusDayDict["Status"]:
                        resultDict[status] = ccount
                    break

        return S_OK(resultDict)
Exemple #26
0
    def registerActivity(self, sourceId, acName, acDict):
        """
    Register an activity.

    :type sourceId: string
    :param sourceId: The source id.
    :type acName: string
    :param acName: name of the activity.
    :type acDict: dictionary
    :param acDict: The activity dictionary containing information about 'category', 'description', 'bucketLength',
                                                                        'type', 'unit'.
    :return: a list of values.
    """
        m = hashlib.md5()
        acDict['name'] = acName
        acDict['sourceId'] = sourceId
        m.update(str(acDict).encode())
        retList = self.__select("filename", "activities", acDict)
        if len(retList) > 0:
            return retList[0][0]
        else:
            acDict['lastUpdate'] = int(Time.toEpoch() - 86000)
            filePath = m.hexdigest()
            filePath = "%s/%s.rrd" % (filePath[:2], filePath)
            self.log.info("Registering activity", str(acDict))
            # This is basically called by the ServiceInterface inside registerActivities method and then all the activity
            # information is stored in the sqlite3 db using the __insert method.

            if self.__insert("activities", {
                    'id': 'NULL',
                    'filename': "'%s'" % filePath,
            }, acDict) == 0:
                return -1
            return self.__select("filename", "activities", acDict)[0][0]
Exemple #27
0
 def storeHashTagById( self, userIds, tagName, hashTag = False, connObj = False ):
   """
   Set a data entry for a profile
   """
   if not hashTag:
     hashTag = md5.md5()
     hashTag.update( "%s;%s;%s" % ( Time.dateTime(), userIds, tagName ) )
     hashTag = hashTag.hexdigest()
   hashTagUnescaped = hashTag
   result = self._escapeString( hashTag )
   if not result[ 'OK' ]:
     return result
   hashTag = result[ 'Value' ]
   result = self._escapeString( tagName )
   if not result[ 'OK' ]:
     return result
   tagName = result[ 'Value' ]
   insertSQL = "INSERT INTO `up_HashTags` ( UserId, GroupId, VOId, TagName, HashTag ) VALUES ( %s, %s, %s, %s, %s )" % ( userIds[0], userIds[1], userIds[2], tagName, hashTag )
   result = self._update( insertSQL, conn = connObj )
   if result[ 'OK' ]:
     return S_OK( hashTagUnescaped )
   #If error and not duplicate -> real error
   if result[ 'Message' ].find( "Duplicate entry" ) == -1:
     return result
   updateSQL = "UPDATE `up_HashTags` set HashTag=%s WHERE UserId = %s AND GroupId = %s AND VOId = %s AND TagName = %s" % ( hashTag, userIds[0], userIds[1], userIds[2], tagName )
   result = self._update( updateSQL, conn = connObj )
   if not result[ 'OK' ]:
     return result
   return S_OK( hashTagUnescaped )
Exemple #28
0
  def initialize( self, request ):
    """ Set default values to attributes and parameters
    """
    if type( request ) == NoneType:
      # Set some defaults
      for name in self.attributeNames:
        self.attributes[name] = 'Unknown'
      self.attributes['CreationTime'] = str( Time.dateTime() )
      self.attributes['Status'] = "New"
      result = getProxyInfo()
      if result['OK']:
        proxyDict = result[ 'Value' ]
        self.attributes['OwnerDN'] = proxyDict[ 'identity' ]
        if 'group' in proxyDict:
          self.attributes['OwnerGroup'] = proxyDict[ 'group' ]
      self.attributes['DIRACSetup'] = gConfig.getValue( '/DIRAC/Setup', 'Unknown' )
    elif isinstance( request, RequestContainer ):
      for attr in self.attributeNames:
        self.attributes[attr] = request.attributes[attr]

    # initialize request from an XML string
    if type( request ) in StringTypes:
      for name in self.attributeNames:
        self.attributes[name] = 'Unknown'
      self.parseRequest( request )

    # Initialize request from another request
    elif isinstance( request, RequestContainer ):
      self.subRequests = copy.deepcopy( request.subrequests )
Exemple #29
0
    def __init__(self, serviceData):
        """
        Init the variables for the service

        :param serviceData: dict with modName, standalone, loadName, moduleObj, classObj. e.g.:
          {'modName': 'Framework/serviceName',
          'standalone': True,
          'loadName': 'Framework/serviceName',
          'moduleObj': <module 'serviceNameHandler' from '/home/DIRAC/FrameworkSystem/Service/serviceNameHandler.pyo'>,
          'classObj': <class 'serviceNameHandler.serviceHandler'>}

          Standalone is true if there is only one service started
          If it's false, every service is linked to a different MonitoringClient
        """
        self._svcData = serviceData
        self._name = serviceData["modName"]
        self._startTime = Time.dateTime()
        self._validNames = [serviceData["modName"]]
        if serviceData["loadName"] not in self._validNames:
            self._validNames.append(serviceData["loadName"])
        self._cfg = ServiceConfiguration(list(self._validNames))
        self._standalone = serviceData["standalone"]
        self.__monitorLastStatsUpdate = time.time()
        self._stats = {"queries": 0, "connections": 0}
        self._authMgr = AuthManager(
            "%s/Authorization" %
            PathFinder.getServiceSection(serviceData["loadName"]))
        self._transportPool = getGlobalTransportPool()
        self.__cloneId = 0
        self.__maxFD = 0
Exemple #30
0
    def export_getCurrentPilotCounters(cls, attrDict={}):
        """ Get pilot counters per Status with attrDict selection. Final statuses are given for
        the last day.
    """

        result = PilotAgentsDB().getCounters('PilotAgents', ['Status'],
                                             attrDict,
                                             timeStamp='LastUpdateTime')
        if not result['OK']:
            return result
        last_update = Time.dateTime() - Time.day
        resultDay = PilotAgentsDB().getCounters('PilotAgents', ['Status'],
                                                attrDict,
                                                newer=last_update,
                                                timeStamp='LastUpdateTime')
        if not resultDay['OK']:
            return resultDay

        resultDict = {}
        for statusDict, count in result['Value']:
            status = statusDict['Status']
            resultDict[status] = count
            if status in FINAL_STATES:
                resultDict[status] = 0
                for statusDayDict, ccount in resultDay['Value']:
                    if status == statusDayDict['Status']:
                        resultDict[status] = ccount
                    break

        return S_OK(resultDict)
Exemple #31
0
 def _mbReceivedMsg(self, trid, msgObj):
     result = self._authorizeProposal(
         ("Message", msgObj.getName()), trid,
         self._transportPool.get(trid).getConnectingCredentials())
     if not result["OK"]:
         return result
     result = self._instantiateHandler(trid)
     if not result["OK"]:
         return result
     handlerObj = result["Value"]
     response = handlerObj._rh_executeMessageCallback(msgObj)
     if self.activityMonitoring and response["OK"]:
         self.activityMonitoringReporter.addRecord({
             "timestamp":
             int(Time.toEpoch()),
             "host":
             Network.getFQDN(),
             "componentType":
             "service",
             "component":
             "_".join(self._name.split("/")),
             "componentLocation":
             self._cfg.getURL(),
             "ServiceResponseTime":
             response["Value"][1],
         })
     if response["OK"]:
         return response["Value"][0]
     else:
         return response
Exemple #32
0
    def __getLoggingInfo(self, transid):

        callback = {}
        tsClient = TransformationClient()
        result = tsClient.getTransformationLogging(transid)
        if result["OK"]:
            result = result["Value"]
            if len(result) > 0:
                callback = []
                resultUser = gConfig.getSections("/Security/Users")
                if resultUser["OK"]:
                    users = resultUser["Value"]
                    dndb = {}
                    for j in users:
                        dndb[gConfig.getValue("/Security/Users/%s/DN" % j)] = j
                else:
                    dndb = {}
                for i in result:
                    DN = i["AuthorDN"]
                    if DN in dndb:
                        i["AuthorDN"] = dndb[DN]
                    else:
                        i["AuthorDN"] = DN  # "Owner Unknown"
                    date = Time.toString(i["MessageDate"])
                    callback.append([i["Message"], date, i["AuthorDN"]])
                callback = {"success": "true", "result": callback}
            else:
                callback = {"success": "false", "error": "Nothing to display"}
        else:
            callback = {"success": "false", "error": result["Message"]}
        gLogger.info("PRODUCTION LOG:", id)
        return callback
  def export_getCurrentJobCounters(attrDict=None):
    """ Get job counters per Status with attrDict selection. Final statuses are given for
        the last day.
    """

    if not attrDict:
      attrDict = {}
    result = gJobDB.getCounters('Jobs', ['Status'], attrDict, timeStamp='LastUpdateTime')
    if not result['OK']:
      return result
    last_update = Time.dateTime() - Time.day
    resultDay = gJobDB.getCounters('Jobs', ['Status'], attrDict, newer=last_update,
                                   timeStamp='LastUpdateTime')
    if not resultDay['OK']:
      return resultDay

    resultDict = {}
    for statusDict, count in result['Value']:
      status = statusDict['Status']
      resultDict[status] = count
      if status in FINAL_STATES:
        resultDict[status] = 0
        for statusDayDict, ccount in resultDay['Value']:
          if status == statusDayDict['Status']:
            resultDict[status] = ccount
          break

    return S_OK(resultDict)
Exemple #34
0
    def clearWaitingPilots(self, condDict):
        """ Clear pilots in the faulty Waiting state
    """

        last_update = Time.dateTime() - MAX_WAITING_STATE_LENGTH * Time.hour
        clearDict = {
            "Status": "Waiting",
            "OwnerDN": condDict["OwnerDN"],
            "OwnerGroup": condDict["OwnerGroup"],
            "GridType": condDict["GridType"],
            "Broker": condDict["Broker"],
        }
        result = self.pilotDB.selectPilots(clearDict, older=last_update)
        if not result["OK"]:
            self.log.warn("Failed to get the Pilot Agents fpr Waiting state")
            return result
        if not result["Value"]:
            return S_OK()
        refList = result["Value"]

        for pilotRef in refList:
            self.log.info("Setting Waiting pilot to Aborted: %s" % pilotRef)
            result = self.pilotDB.setPilotStatus(pilotRef, "Stalled", statusReason="Exceeded max waiting time")

        return S_OK()
Exemple #35
0
 def submit(self):
   pagestart = time()
   RPC = getRPCClient("WorkloadManagement/JobMonitoring")
   user = str(credentials.getUsername())
   result = RPC.getOwners()
   if result["OK"]:
     defaultGroup = gConfig.getValue("/Registry/DefaultGroup","")
     if defaultGroup == "":
       return {"success":"false","error":"Option /Registry/DefaultGroup is undefined, please set the default group in the CS"}
     group = str(credentials.getSelectedGroup())
     groupProperty = credentials.getProperties(group)
     if user not in result["Value"] and ( "JobAdministrator" or "JobSharing" ) not in groupProperty:
       c.result = {"success":"false","error":"You don't have any jobs in the DIRAC system"}
       return c.result
   else:
     c.result = {"success":"false","error":result["Message"]}
     return c.result
   req = self.__request()
   gLogger.always("getJobPageSummaryWeb(%s,%s,%s,%s)" % (req,globalSort,pageNumber,numberOfJobs))
   result = RPC.getJobPageSummaryWeb(req,globalSort,pageNumber,numberOfJobs)
   gLogger.always(" - REZ: " %result)
   if result["OK"]:
     result = result["Value"]
     gLogger.info("ReS",result)
     if result.has_key("TotalRecords"):
       if  result["TotalRecords"] > 0:
         if result.has_key("ParameterNames") and result.has_key("Records"):
           if len(result["ParameterNames"]) > 0:
             if len(result["Records"]) > 0:
               c.result = []
               jobs = result["Records"]
               head = result["ParameterNames"]
               headLength = len(head)
               for i in jobs:
                 tmp = {}
                 for j in range(0,headLength):
                   tmp[head[j]] = i[j]
                 c.result.append(tmp)
               total = result["TotalRecords"]
               timestamp = Time.dateTime().strftime("%Y-%m-%d %H:%M [UTC]")
               if result.has_key("Extras"):
                 st = self.__dict2string(req)
                 extra = result["Extras"]
                 c.result = {"success":"true","result":c.result,"total":total,"extra":extra,"request":st,"date":timestamp}
               else:
                 c.result = {"success":"true","result":c.result,"total":total,"date":timestamp}
             else:
               c.result = {"success":"false","result":"","error":"There are no data to display"}
           else:
             c.result = {"success":"false","result":"","error":"ParameterNames field is missing"}
         else:
           c.result = {"success":"false","result":"","error":"Data structure is corrupted"}
       else:
         c.result = {"success":"false","result":"","error":"There were no data matching your selection"}
     else:
       c.result = {"success":"false","result":"","error":"Data structure is corrupted"}
   else:
     c.result = {"success":"false","error":result["Message"]}
   gLogger.info("\033[0;31mJOB SUBMIT REQUEST:\033[0m %s" % (time() - pagestart))
   return c.result
Exemple #36
0
 def am_createStopAgentFile(self):
     try:
         fd = open(self.am_getStopAgentFile(), 'w')
         fd.write('Dirac site agent Stopped at %s' % Time.toString())
         fd.close()
     except Exception:
         pass
  def getActivitiesList( self ):
    try:
      start = int( request.params[ 'start' ] )
    except:
      start = 0
    try:
      limit = int( request.params[ 'limit' ] )
    except:
      limit = 0

    try:
      sortField = str( request.params[ 'sortField' ] ).replace( "_", "." )
      sortDir = str( request.params[ 'sortDirection' ] )
      sort = [ ( sortField, sortDir ) ]
    except:
      sort = []
    rpcClient = getRPCClient( "Framework/Monitoring" )
    retVal = rpcClient.getActivitiesContents( {}, sort, start, limit )
    if not retVal[ 'OK' ]:
      return retVal
    svcData = retVal[ 'Value' ]
    data = { 'numActivities' : svcData[ 'TotalRecords' ], 'activities' : [] }
    now = Time.toEpoch()
    for record in svcData[ 'Records' ]:
      formatted = {}
      for i in range( len( svcData[ 'Fields' ] ) ):
        formatted[ svcData[ 'Fields' ][i].replace( ".", "_" ) ] = record[i]
      if 'activities_lastUpdate' in formatted:
        formatted[ 'activities_lastUpdate' ] = now - int( formatted[ 'activities_lastUpdate' ] )
      data[ 'activities' ].append( formatted )
    return data
 def plotView( self ):
   """
   Plot a saved view
   """
   plotRequest = {}
   try:
     webRequest = simplejson.loads( request.params[ 'plotRequest' ] )
     if 'id' not in webRequest:
       return S_ERROR( "Missing viewID in plot request" )
     plotRequest[ 'id' ] = webRequest[ 'id' ]
     if 'size' not in webRequest:
       return S_ERROR( "Missing plotsize in plot request" )
     plotRequest[ 'size' ] = webRequest[ 'size' ]
     if 'time' not in webRequest:
       return S_ERROR( "Missing time span in plot request" )
     timeReq = webRequest[ 'time' ]
     if timeReq[ 'timespan' ] < 0:
       toSecs = self.__dateToSecs( str( timeReq[ 'toDate' ] ) )
       fromSecs = self.__dateToSecs( str( timeReq[ 'fromDate' ] ) )
     else:
       toSecs = int( Time.toEpoch() )
       fromSecs = toSecs - timeReq[ 'timespan' ]
     plotRequest[ 'fromSecs' ] = fromSecs
     plotRequest[ 'toSecs' ] = toSecs
     if 'varData' in webRequest:
       plotRequest[ 'varData' ] = webRequest[ 'varData' ]
   except Exception, e:
     return self.__translateToExpectedExtResult( S_ERROR( "Error while processing plot parameters: %s" % str( e ) ) )
Exemple #39
0
    def setJobStatus(self,
                     status='',
                     minorStatus='',
                     applicationStatus='',
                     sendFlag=True,
                     minor=None,
                     application=None):
        """ Send job status information to the JobState service for jobID
    """
        # Backward compatibility
        # FIXME: to remove in next version
        if minor or application:
            gLogger.warn("Use deprecated argument to setJobStatus()",
                         "minor=%s, application=%s" % (minor, application))
        if minor is not None:
            minorStatus = minor
        if application is not None:
            applicationStatus = application

        timeStamp = Time.toString()
        # add job status record
        self.jobStatusInfo.append(
            (status.replace("'", ''), minorStatus.replace("'", ''), timeStamp))
        if applicationStatus:
            self.appStatusInfo.append(
                (applicationStatus.replace("'", ''), timeStamp))
        if sendFlag and self.jobID:
            # and send
            return self.sendStoredStatusInfo()

        return S_OK()
 def tryView( self ):
   """
   Try plotting graphs for a view
   """
   try:
     plotRequest = simplejson.loads( request.params[ 'plotRequest' ] )
     if 'timeLength' in request.params:
       timeLength = str( request.params[ 'timeLength' ] )
       toSecs = int( Time.toEpoch() )
       if timeLength == "hour":
         fromSecs = toSecs - 3600
       elif timeLength == "day":
         fromSecs = toSecs - 86400
       elif timeLength == "month":
         fromSecs = toSecs - 2592000
       elif fromSecs == "year":
         fromDate = toSecs - 31104000
       else:
         return S_ERROR( "Time length value not valid" )
     else:
       fromDate = str( request.params[ 'fromDate' ] )
       toDate = str( request.params[ 'toDate' ] )
       fromSecs = self.__dateToSecs( fromDate )
       toSecs = self.__dateToSecs( toDate )
   except Exception, e:
     return S_ERROR( "Error while processing plot parameters: %s" % str( e ) )
Exemple #41
0
 def storeHashTagById(self, userIds, tagName, hashTag=False, connObj=False):
     """
 Set a data entry for a profile
 """
     if not hashTag:
         hashTag = md5.md5()
         hashTag.update("%s;%s;%s" % (Time.dateTime(), userIds, tagName))
         hashTag = hashTag.hexdigest()
     hashTagUnescaped = hashTag
     result = self._escapeString(hashTag)
     if not result['OK']:
         return result
     hashTag = result['Value']
     result = self._escapeString(tagName)
     if not result['OK']:
         return result
     tagName = result['Value']
     insertSQL = "INSERT INTO `up_HashTags` ( UserId, GroupId, VOId, TagName, HashTag ) VALUES ( %s, %s, %s, %s, %s )" % (
         userIds[0], userIds[1], userIds[2], tagName, hashTag)
     result = self._update(insertSQL, conn=connObj)
     if result['OK']:
         return S_OK(hashTagUnescaped)
     #If error and not duplicate -> real error
     if result['Message'].find("Duplicate entry") == -1:
         return result
     updateSQL = "UPDATE `up_HashTags` set HashTag=%s WHERE UserId = %s AND GroupId = %s AND VOId = %s AND TagName = %s" % (
         hashTag, userIds[0], userIds[1], userIds[2], tagName)
     result = self._update(updateSQL, conn=connObj)
     if not result['OK']:
         return result
     return S_OK(hashTagUnescaped)