def __setJobStatus(cls, jobID, status=None, minorStatus=None, appStatus=None, source=None, datetime=None): """ update the job provided statuses (major, minor and application) If sets also the source and the time stamp (or current time) This method calls the bulk method internally """ sDict = {} if status: sDict['Status'] = status if minorStatus: sDict['MinorStatus'] = minorStatus if appStatus: sDict['ApplicationStatus'] = appStatus if sDict: if source: sDict['Source'] = source if not datetime: datetime = Time.toString() return cls.__setJobStatusBulk(jobID, {datetime: sDict}) return S_OK()
def test_03_api(self): """ DataLoggingDB API :param self: self reference """ lfns = ['/Test/00001234/File1', '/Test/00001234/File2'] fileTuples = tuple([(lfn, "TestStatus", "MinorStatus", Time.toString(), Time.dateTime(), "Somewhere") for lfn in lfns]) result = self.__db.addFileRecord(lfns, "TestStatus", date='2012-04-28 09:49:02.545466') self.assertEqual(result["OK"], True) self.assertEqual(result["Value"], 2) self.assertEqual(result["lastRowId"], 2) result = self.__db.addFileRecords(fileTuples) self.assertEqual(result["OK"], True) result = self.__db.getFileLoggingInfo(lfns[0]) self.assertEqual(result["OK"], True) self.assertEqual(len(result["Value"]), 2) result = self.__db.getFileLoggingInfo(lfns[1]) self.assertEqual(result["OK"], True) self.assertEqual(len(result["Value"]), 2) result = self.__db.getUniqueStates() self.assertEqual(result["OK"], True) self.assertEqual(result["Value"], ["TestStatus"]) result = self.__db._update('DROP TABLE `%s`' % self.__db.tableName) self.assertEqual(result["OK"], True)
def am_createStopAgentFile( self ): try: fd = open( self.am_getStopAgentFile(), 'w' ) fd.write( 'Dirac site agent Stopped at %s' % Time.toString() ) fd.close() except Exception: pass
def __logProduction(self, prodid): id = int(prodid) RPC = getRPCClient("Transformation/TransformationManager") result = RPC.getTransformationLogging(id) if result["OK"]: result = result["Value"] if len(result) > 0: c.result = [] resultUser = gConfig.getSections("/Security/Users") if resultUser["OK"]: users = resultUser["Value"] dndb = {} for j in users: dndb[gConfig.getValue("/Security/Users/%s/DN" % j)] = j else: dndb = {} for i in result: DN = i["AuthorDN"] if dndb.has_key(DN): i["AuthorDN"] = dndb[DN] else: i["AuthorDN"] = DN # "Owner Unknown" date = Time.toString(i["MessageDate"]) c.result.append([i["Message"], i["AuthorDN"], date]) c.result = {"success": "true", "result": c.result} else: c.result = {"success": "false", "error": "Nothing to display"} else: c.result = {"success": "false", "error": result["Message"]} gLogger.info("PRODUCTION LOG:", id) return c.result
def __getLoggingInfo(self, transid): callback = {} tsClient = TransformationClient() result = tsClient.getTransformationLogging(transid) if result["OK"]: result = result["Value"] if len(result) > 0: callback = [] resultUser = gConfig.getSections("/Security/Users") if resultUser["OK"]: users = resultUser["Value"] dndb = {} for j in users: dndb[gConfig.getValue("/Security/Users/%s/DN" % j)] = j else: dndb = {} for i in result: DN = i["AuthorDN"] if DN in dndb: i["AuthorDN"] = dndb[DN] else: i["AuthorDN"] = DN # "Owner Unknown" date = Time.toString(i["MessageDate"]) callback.append([i["Message"], date, i["AuthorDN"]]) callback = {"success": "true", "result": callback} else: callback = {"success": "false", "error": "Nothing to display"} else: callback = {"success": "false", "error": result["Message"]} gLogger.info("PRODUCTION LOG:", id) return callback
def setJobStatus(self, status='', minorStatus='', applicationStatus='', sendFlag=True, minor=None, application=None): """ Send job status information to the JobState service for jobID """ # Backward compatibility # FIXME: to remove in next version if minor or application: gLogger.warn("Use deprecated argument to setJobStatus()", "minor=%s, application=%s" % (minor, application)) if minor is not None: minorStatus = minor if application is not None: applicationStatus = application timeStamp = Time.toString() # add job status record self.jobStatusInfo.append( (status.replace("'", ''), minorStatus.replace("'", ''), timeStamp)) if applicationStatus: self.appStatusInfo.append( (applicationStatus.replace("'", ''), timeStamp)) if sendFlag and self.jobID: # and send return self.sendStoredStatusInfo() return S_OK()
def getToken(key): """ Function that gets the userName from the proxy """ proxyInfo = getProxyInfo() if not proxyInfo["OK"]: error(str(proxyInfo)) if key.lower() == "owner": userName = proxyInfo["Value"]["username"] tokenOwner = S_OK(userName) if not tokenOwner["OK"]: error(tokenOwner["Message"]) return tokenOwner["Value"] elif key.lower() == "expiration": expiration = proxyInfo["Value"]["secondsLeft"] tokenExpiration = S_OK(expiration) if not tokenExpiration["OK"]: error(tokenExpiration["Message"]) now = Time.dateTime() # datetime.datetime.utcnow() expirationDate = now + datetime.timedelta(seconds=tokenExpiration["Value"]) expirationDate = Time.toString(expirationDate) expirationDate = expirationDate.split(".")[0] return expirationDate
def am_createStopAgentFile(self): try: fd = open(self.am_getStopAgentFile(), 'w') fd.write('Dirac site agent Stopped at %s' % Time.toString()) fd.close() except Exception: pass
def test_03_api( self ): """ DataLoggingDB API :param self: self reference """ lfns = [ '/Test/00001234/File1', '/Test/00001234/File2' ] fileTuples = tuple( [ ( lfn, "TestStatus", "MinorStatus", Time.toString(), Time.dateTime(), "Somewhere" ) for lfn in lfns ] ) result = self.__db.addFileRecord( lfns, "TestStatus", date = '2012-04-28 09:49:02.545466' ) self.assertEqual( result["OK"], True ) self.assertEqual( result["Value"], 2 ) self.assertEqual( result["lastRowId"], 2 ) result = self.__db.addFileRecords( fileTuples ) self.assertEqual( result["OK"], True ) result = self.__db.getFileLoggingInfo( lfns[0] ) self.assertEqual( result["OK"], True ) self.assertEqual( len( result["Value"] ), 2 ) result = self.__db.getFileLoggingInfo( lfns[1] ) self.assertEqual( result["OK"], True ) self.assertEqual( len( result["Value"] ), 2 ) result = self.__db.getUniqueStates() self.assertEqual( result["OK"], True ) self.assertEqual( result["Value"], [ "TestStatus" ] ) result = self.__db._update( 'DROP TABLE `%s`' % self.__db.tableName ) self.assertEqual( result["OK"], True )
def __logProduction(self, prodid): id = int(prodid) RPC = getRPCClient("Transformation/TransformationManager") result = RPC.getTransformationLogging(id) if result["OK"]: result = result["Value"] if len(result) > 0: c.result = [] resultUser = gConfig.getSections("/Security/Users") if resultUser["OK"]: users = resultUser["Value"] dndb = {} for j in users: dndb[gConfig.getValue("/Security/Users/%s/DN" % j)] = j else: dndb = {} for i in result: DN = i["AuthorDN"] if dndb.has_key(DN): i["AuthorDN"] = dndb[DN] else: i["AuthorDN"] = DN #"Owner Unknown" date = Time.toString(i["MessageDate"]) c.result.append([i["Message"], i["AuthorDN"], date]) c.result = {"success": "true", "result": c.result} else: c.result = {"success": "false", "error": "Nothing to display"} else: c.result = {"success": "false", "error": result["Message"]} gLogger.info("PRODUCTION LOG:", id) return c.result
def getToken( key ): ''' Function that gets the userName from the proxy ''' proxyInfo = getProxyInfo() if not proxyInfo[ 'OK' ]: error( str( proxyInfo ) ) if key.lower() == 'owner': userName = proxyInfo[ 'Value' ][ 'username' ] tokenOwner = S_OK( userName ) if not tokenOwner[ 'OK' ]: error( tokenOwner[ 'Message' ] ) return tokenOwner[ 'Value' ] elif key.lower() == 'expiration': expiration = proxyInfo[ 'Value' ][ 'secondsLeft' ] tokenExpiration = S_OK( expiration ) if not tokenExpiration[ 'OK' ]: error( tokenExpiration[ 'Message' ] ) now = Time.dateTime() #datetime.datetime.utcnow() expirationDate = now + datetime.timedelta( seconds=tokenExpiration['Value'] ) expirationDate = Time.toString( expirationDate ) expirationDate = expirationDate.split('.')[0] return expirationDate
def setExecutionTime(self, time): """ Set the execution time to the current data and time """ if time.lower() == "now": self.attributes['ExecutionTime'] = Time.toString(Time.dateTime()) else: self.attributes['ExecutionTime'] = time
def setExecutionTime(self,time): """ Set the execution time to the current data and time """ if time.lower() == "now": self.attributes['ExecutionTime'] = Time.toString(Time.dateTime()) else: self.attributes['ExecutionTime'] = time
def __graphTimeComment(self, fromEpoch, toEpoch): comStr = " 'COMMENT:Generated on %s UTC'" % Time.toString().replace( ":", r"\:").split(".")[0] comStr += " 'COMMENT:%s'" % ( "From %s to %s" % (Time.fromEpoch(fromEpoch), Time.fromEpoch(toEpoch))).replace( ":", r"\:") return comStr
def toTuple(self): return (self.systemName, self.level, Time.toString(self.time), self.msgText, self.variableText, self.frameInfo, self.subSystemName )
def toTuple( self ): return ( self.systemName, self.level, Time.toString( self.time ), self.msgText, self.variableText, self.frameInfo, self.subSystemName )
def setJobParameter(self, par_name, par_value, sendFlag=True): """ Send job parameter for jobID """ timeStamp = Time.toString() # add job parameter record self.jobParameters[par_name] = (par_value, timeStamp) if sendFlag and self.jobID: # and send return self.sendStoredJobParameters() return S_OK()
def setApplicationStatus(self, appStatus, sendFlag=True): """Send application status information to the JobState service for jobID""" timeStamp = Time.toString() # add Application status record if not isinstance(appStatus, str): appStatus = repr(appStatus) self.appStatusInfo.append((appStatus.replace("'", ""), timeStamp)) if sendFlag and self.jobID: # and send return self.sendStoredStatusInfo() return S_OK()
def setApplicationStatus( self, appStatus, sendFlag = True ): """ Send application status information to the JobState service for jobID """ if not self.jobID: return S_OK( 'Local execution, jobID is null.' ) timeStamp = Time.toString() # add Application status record self.appStatusInfo.append( ( appStatus.replace( "'", '' ), timeStamp ) ) if sendFlag: # and send return self.sendStoredStatusInfo() return S_OK()
def setJobParameter( self, par_name, par_value, sendFlag = True ): """ Send job parameter for jobID """ if not self.jobID: return S_OK( 'Local execution, jobID is null.' ) timeStamp = Time.toString() # add job parameter record self.jobParameters[par_name] = ( par_value, timeStamp ) if sendFlag: # and send return self.sendStoredJobParameters() return S_OK()
def setJobParameter(self, par_name, par_value, sendFlag=True): """ Send job parameter for jobID """ if not self.jobID: return S_OK('Local execution, jobID is null.') timeStamp = Time.toString() # add job parameter record self.jobParameters[par_name] = (par_value, timeStamp) if sendFlag: # and send return self.sendStoredJobParameters() return S_OK()
def setApplicationStatus(self, appStatus, sendFlag=True): """ Send application status information to the JobState service for jobID """ if not self.jobID: return S_OK('Local execution, jobID is null.') timeStamp = Time.toString() # add Application status record self.appStatusInfo.append((appStatus.replace("'", ''), timeStamp)) if sendFlag: # and send return self.sendStoredStatusInfo() return S_OK()
def __sanitizeForJSON(self, val): vType = type(val) if vType in Time._allTypes: return Time.toString(val) elif vType == types.DictType: for k in val: val[k] = self.__sanitizeForJSON(val[k]) elif vType == types.ListType: for iP in range(len(val)): val[iP] = self.__sanitizeForJSON(val[iP]) elif vType == types.TupleType: nt = [] for iP in range(len(val)): nt[iP] = self.__sanitizeForJSON(val[iP]) val = tuple(nt) return val
def sanitizeForJSON(self, val): vType = type(val) if vType in Time._allTypes: return Time.toString(val) elif vType == types.DictType: for k in val: val[k] = self.sanitizeForJSON(val[k]) elif vType == types.ListType: for iP in range(len(val)): val[iP] = self.sanitizeForJSON(val[i]) elif vType == types.TupleType: nt = [] for iP in range(len(val)): nt[iP] = self.sanitizeForJSON(val[i]) val = tuple(nt) return val
def handleOldPilots(self, connection): """ select all pilots that have not been updated in the last N days and declared them Deleted, accounting for them. """ pilotsToAccount = {} timeLimitToConsider = Time.toString(Time.dateTime() - Time.day * self.pilotStalledDays) result = self.pilotDB.selectPilots( {"Status": PilotStatus.PILOT_TRANSIENT_STATES}, older=timeLimitToConsider, timeStamp="LastUpdateTime") if not result["OK"]: self.log.error("Failed to get the Pilot Agents") return result if not result["Value"]: return S_OK() refList = result["Value"] result = self.pilotDB.getPilotInfo(refList) if not result["OK"]: self.log.error("Failed to get Info for Pilot Agents") return result pilotsDict = result["Value"] for pRef in pilotsDict: if pilotsDict[pRef].get("Jobs") and self._checkJobLastUpdateTime( pilotsDict[pRef]["Jobs"], self.pilotStalledDays): self.log.debug( "%s should not be deleted since one job of %s is running." % (str(pRef), str(pilotsDict[pRef]["Jobs"]))) continue deletedJobDict = pilotsDict[pRef] deletedJobDict["Status"] = PilotStatus.DELETED deletedJobDict["StatusDate"] = Time.dateTime() pilotsToAccount[pRef] = deletedJobDict if len(pilotsToAccount) > 100: self.accountPilots(pilotsToAccount, connection) self._killPilots(pilotsToAccount) pilotsToAccount = {} self.accountPilots(pilotsToAccount, connection) self._killPilots(pilotsToAccount) return S_OK()
def handleOldPilots(self, connection): """ select all pilots that have not been updated in the last N days and declared them Deleted, accounting for them. """ pilotsToAccount = {} timeLimitToConsider = Time.toString(Time.dateTime() - Time.day * self.pilotStalledDays) result = self.pilotDB.selectPilots({'Status': self.queryStateList}, older=timeLimitToConsider, timeStamp='LastUpdateTime') if not result['OK']: self.log.error('Failed to get the Pilot Agents') return result if not result['Value']: return S_OK() refList = result['Value'] result = self.pilotDB.getPilotInfo(refList) if not result['OK']: self.log.error('Failed to get Info for Pilot Agents') return result pilotsDict = result['Value'] for pRef in pilotsDict: if pilotsDict[pRef].get('Jobs') and self._checkJobLastUpdateTime( pilotsDict[pRef]['Jobs'], self.pilotStalledDays): self.log.debug( '%s should not be deleted since one job of %s is running.' % (str(pRef), str(pilotsDict[pRef]['Jobs']))) continue deletedJobDict = pilotsDict[pRef] deletedJobDict['Status'] = 'Deleted' deletedJobDict['StatusDate'] = Time.dateTime() pilotsToAccount[pRef] = deletedJobDict if len(pilotsToAccount) > 100: self.accountPilots(pilotsToAccount, connection) self._killPilots(pilotsToAccount) pilotsToAccount = {} self.accountPilots(pilotsToAccount, connection) self._killPilots(pilotsToAccount) return S_OK()
def tabularPrint(table): columns_names = table[0].keys() records = [] for row in table: record = [] for k, v in row.items(): if type(v) == datetime.datetime: record.append(Time.toString(v)) elif v is None: record.append("") else: record.append(v) records.append(record) output = printTable(columns_names, records, numbering=False, columnSeparator=" | ", printOut=False) subLogger.notice(output)
def __init__( self, rpcStub = None, executionOrder = 0 ): """Instantiates the Workflow object and some default parameters. """ self.subAttributeNames = ['Status', 'SubRequestID', 'Operation', 'ExecutionOrder', 'CreationTime', 'LastUpdate', 'Arguments'] self.subAttributes = {} for attr in self.subAttributeNames: self.subAttributes[attr] = "Unknown" # Some initial values self.subAttributes['Status'] = "Waiting" self.subAttributes['SubRequestID'] = makeGuid() self.subAttributes['CreationTime'] = Time.toString() self.subAttributes['ExecutionOrder'] = executionOrder if rpcStub: self.subAttributes['Arguments'] = DEncode.encode( rpcStub ) self.subAttributes['Operation'] = rpcStub[1]
def tabularPrint(table): columns_names = list(table[0]) records = [] for row in table: record = [] for k, v in row.items(): if isinstance(v, datetime.datetime): record.append(Time.toString(v)) elif v is None: record.append("") else: record.append(v) records.append(record) output = printTable(columns_names, records, numbering=False, columnSeparator=" | ", printOut=False) subLogger.notice(output)
def handleOldPilots( self, connection ): """ select all pilots that have not been updated in the last N days and declared them Deleted, accounting for them. """ pilotsToAccount = {} timeLimitToConsider = Time.toString( Time.dateTime() - Time.day * self.pilotStalledDays ) # A.T. Below looks to be a bug #result = self.pilotDB.selectPilots( {'Status':self.queryStateList} , older=None, timeStamp='LastUpdateTime' ) result = self.pilotDB.selectPilots( { 'Status':self.queryStateList} , older = timeLimitToConsider, timeStamp = 'LastUpdateTime' ) if not result['OK']: self.log.error( 'Failed to get the Pilot Agents' ) return result if not result['Value']: return S_OK() refList = result['Value'] result = self.pilotDB.getPilotInfo( refList ) if not result['OK']: self.log.error( 'Failed to get Info for Pilot Agents' ) return result pilotsDict = result['Value'] for pRef in pilotsDict: if pilotsDict[pRef].has_key('Jobs') and len(pilotsDict[pRef]['Jobs']) > 0 and self._checkJobLastUpdateTime(pilotsDict[pRef]['Jobs'],self.pilotStalledDays): self.log.debug('%s should not be deleted since one job of %s is running.' % ( str(pRef) , str(pilotsDict[pRef]['Jobs']) ) ) continue deletedJobDict = pilotsDict[pRef] deletedJobDict['Status'] = 'Deleted' deletedJobDict['StatusDate'] = Time.dateTime() pilotsToAccount[ pRef ] = deletedJobDict if len( pilotsToAccount ) > 100: self.accountPilots( pilotsToAccount, connection ) self._killPilots( pilotsToAccount ) pilotsToAccount = {} self.accountPilots( pilotsToAccount, connection ) self._killPilots( pilotsToAccount ) return S_OK()
def setJobStatus(self, status="", minorStatus="", applicationStatus="", sendFlag=True): """Accumulate and possibly send job status information to the JobState service""" timeStamp = Time.toString() # add job status record self.jobStatusInfo.append( (status.replace("'", ""), minorStatus.replace("'", ""), timeStamp)) if applicationStatus: self.appStatusInfo.append( (applicationStatus.replace("'", ""), timeStamp)) if sendFlag and self.jobID: # and send return self.sendStoredStatusInfo() return S_OK()
def composeString(self, messageObject): from DIRAC.Core.Utilities import Time #If not headers, just show lines if not self._optionsDictionary['showHeaders']: return messageObject.getMessage() #Do the full header messageName = "%s" % messageObject.getName() if messageObject.getSubSystemName(): messageName += "/%s" % messageObject.getSubSystemName() if self._showCallingFrame and messageObject.getFrameInfo(): messageName += "[%s]" % messageObject.getFrameInfo() timeToShow = Time.toString(messageObject.getTime()).split('.')[0] lines = [] for lineString in messageObject.getMessage().split("\n"): lines.append("%s UTC %s %s: %s" % (timeToShow, messageName, messageObject.getLevel().rjust(5), lineString)) return "\n".join(lines)
def setJobStatus( self, JobID, status ): """ Insert HadoopID """ resultInfo = self.getJobIDInfo( JobID ) tableName, _validStates, idName = self.__getTypeTuple( 'job' ) sqlUpdate = 'UPDATE %s SET BdJobStatus= "%s" WHERE %s = %s' % ( tableName, status, idName, JobID ) self._update( sqlUpdate ) sqlUpdate = 'UPDATE %s SET BdJobLastUpdate= "%s" WHERE %s = %s' % ( tableName, Time.toString(), idName, JobID ) self._update( sqlUpdate ) job_his = self.insertHistoryJob( str( JobID ), resultInfo[0][0], status, resultInfo[0][1], Time.toString(), resultInfo[0][2], resultInfo[0][3], resultInfo[0][4], resultInfo[0][5], resultInfo[0][6] ) if not job_his['OK']: return S_ERROR( 'Failed to insert Big Data Job in history table' ) result = self.setIntoJobDBStatus( JobID, status, resultInfo[0][1], resultInfo[0][2], resultInfo[0][7].strip() ) return S_OK( result )
def composeString( self, messageObject ): from DIRAC.Core.Utilities import Time #If not headers, just show lines if not self._optionsDictionary[ 'showHeaders' ]: return messageObject.getMessage() #Do the full header messageName = "%s" % messageObject.getName() if messageObject.getSubSystemName(): messageName += "/%s" % messageObject.getSubSystemName() if self._showCallingFrame and messageObject.getFrameInfo(): messageName += "[%s]" % messageObject.getFrameInfo() timeToShow = Time.toString( messageObject.getTime() ).split( '.' )[0] lines = [] for lineString in messageObject.getMessage().split( "\n" ): lines.append( "%s UTC %s %s: %s" % ( timeToShow, messageName, messageObject.getLevel().rjust( 5 ), lineString ) ) return "\n".join( lines )
def __init__(self, rpcStub=None, executionOrder=0): """Instantiates the Workflow object and some default parameters. """ self.subAttributeNames = [ 'Status', 'SubRequestID', 'Operation', 'ExecutionOrder', 'CreationTime', 'LastUpdate', 'Arguments' ] self.subAttributes = {} for attr in self.subAttributeNames: self.subAttributes[attr] = "Unknown" # Some initial values self.subAttributes['Status'] = "Waiting" self.subAttributes['SubRequestID'] = makeGuid() self.subAttributes['CreationTime'] = Time.toString() self.subAttributes['ExecutionOrder'] = executionOrder if rpcStub: self.subAttributes['Arguments'] = DEncode.encode(rpcStub) self.subAttributes['Operation'] = rpcStub[1]
def initialize(self,script): """ Set default values to attributes,parameters """ for name in self.attributeNames: self.attributes[name] = 'Unknown' # Set some defaults self.attributes['DIRACSetup'] = "LHCb-Development" status,self.attributes['RequestID'] = commands.getstatusoutput('uuidgen') self.attributes['CreationTime'] = Time.toString(Time.dateTime()) self.attributes['Status'] = "New" for name in self.parameterNames: self.parameters[name] = 'Unknown' for name in self.subrequestTypes: self.subrequests[name] = [] if script: self.parseRequest(script)
def handleOldPilots(self, connection): """ select all pilots that have not been updated in the last N days and declared them Deleted, accounting for them. """ pilotsToAccount = {} timeLimitToConsider = Time.toString(Time.dateTime() - Time.day * self.pilotStalledDays) # A.T. Below looks to be a bug #result = self.pilotDB.selectPilots( {'Status':self.queryStateList} , older=None, timeStamp='LastUpdateTime' ) result = self.pilotDB.selectPilots({'Status': self.queryStateList}, older=timeLimitToConsider, timeStamp='LastUpdateTime') if not result['OK']: self.log.error('Failed to get the Pilot Agents') return result if not result['Value']: return S_OK() refList = result['Value'] result = self.pilotDB.getPilotInfo(refList) if not result['OK']: self.log.error('Failed to get Info for Pilot Agents') return result pilotsDict = result['Value'] for pRef in pilotsDict: statusDate = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime()) deletedJobDict = pilotsDict[pRef] deletedJobDict['Status'] = 'Deleted' deletedJobDict['StatusDate'] = statusDate pilotsToAccount[pRef] = deletedJobDict if len(pilotsToAccount) > 100: self.accountPilots(pilotsToAccount, connection) pilotsToAccount = {} self.accountPilots(pilotsToAccount, connection) return S_OK()
def initialize(self, script): """ Set default values to attributes,parameters """ for name in self.attributeNames: self.attributes[name] = 'Unknown' # Set some defaults self.attributes['DIRACSetup'] = "LHCb-Development" status, self.attributes['RequestID'] = commands.getstatusoutput( 'uuidgen') self.attributes['CreationTime'] = Time.toString(Time.dateTime()) self.attributes['Status'] = "New" for name in self.parameterNames: self.parameters[name] = 'Unknown' for name in self.subrequestTypes: self.subrequests[name] = [] if script: self.parseRequest(script)
def handleOldPilots( self, connection ): """ select all pilots that have not been updated in the last N days and declared them Deleted, accounting for them. """ pilotsToAccount = {} timeLimitToConsider = Time.toString( Time.dateTime() - Time.day * self.pilotStalledDays ) # A.T. Below looks to be a bug #result = self.pilotDB.selectPilots( {'Status':self.queryStateList} , older=None, timeStamp='LastUpdateTime' ) result = self.pilotDB.selectPilots( { 'Status':self.queryStateList} , older = timeLimitToConsider, timeStamp = 'LastUpdateTime' ) if not result['OK']: self.log.error( 'Failed to get the Pilot Agents' ) return result if not result['Value']: return S_OK() refList = result['Value'] result = self.pilotDB.getPilotInfo( refList ) if not result['OK']: self.log.error( 'Failed to get Info for Pilot Agents' ) return result pilotsDict = result['Value'] for pRef in pilotsDict: statusDate = time.strftime( '%Y-%m-%d %H:%M:%S', time.gmtime() ) deletedJobDict = pilotsDict[pRef] deletedJobDict['Status'] = 'Deleted' deletedJobDict['StatusDate'] = statusDate pilotsToAccount[ pRef ] = deletedJobDict if len( pilotsToAccount ) > 100: self.accountPilots( pilotsToAccount, connection ) pilotsToAccount = {} self.accountPilots( pilotsToAccount, connection ) return S_OK()
def printTable( table ): ''' Prints query output on a tabular ''' columns_names = table[0].keys() columns = [ [c] for c in columns_names ] for row in table: for j, key in enumerate( row ): if type( row[key] ) == datetime.datetime: row[key] = Time.toString( row[key] ) if row[key] is None: row[key] = '' columns[j].append( row[key] ) columns_width = [] for column in columns: columns_width.append( max( [ len( str( value ) ) for value in column ] ) ) columns_separator = True for i in range( len( table ) + 1 ): row = '' for j in range( len( columns ) ): row = row + "{:{}}".format( columns[j][i], columns_width[j] ) + " | " row = "| " + row line = "-" * ( len( row ) - 1 ) if columns_separator: subLogger.notice( line ) subLogger.notice( row ) if columns_separator: subLogger.notice( line ) columns_separator = False subLogger.notice( line )
def submitBigDataJobs( self, endpoint, numBigDataJobsAllowed, runningSiteName, NameNode, BigDataSoftware, BigDataSoftwareVersion, HLLName, HLLVersion, PublicIP, Port, jobIds , runningEndPointName, JobName, User, dataset, UsePilot, IsInteractive ): """ Big Data job submission with all the parameters of SITE and Job """ self.log.info( 'Director:submitBigDataJobs:JobSubmisionProcess' ) if ( numBigDataJobsAllowed <= 0 ): return S_ERROR( "Number of slots reached for %s in the NameNode " % runningSiteName, NameNode ) if NameNode not in self.runningEndPoints[endpoint]['NameNode']: return S_ERROR( 'Unknown NameNode: %s' % NameNode ) newJob = BigDataDB.insertBigDataJob( jobIds, JobName, Time.toString(), NameNode, runningSiteName, PublicIP, "", "", "", BigDataSoftware, BigDataSoftwareVersion, HLLName, HLLVersion, "Submitted" ) self.log.info( 'Director:submitBigDataJobs:SubmitJob' ) dictBDJobSubmitted = self._submitBigDataJobs( NameNode, Port, jobIds, PublicIP, runningEndPointName, User, JobName, dataset, UsePilot, IsInteractive ) if not dictBDJobSubmitted[ 'OK' ]: return dictBDJobSubmitted bdjobID = dictBDJobSubmitted['Value'] result = BigDataDB.setHadoopID( jobIds, bdjobID ) if not result[ 'OK' ]: S_ERROR( "BigData ID not updated" ) result = BigDataDB.setIntoJobDBStatus( jobIds, "Submitted", "", runningSiteName, bdjobID ) if not result[ 'OK' ]: S_ERROR( "JobDB of BigData Soft not updated" ) self.log.info( 'Director:submitBigDataJobs:JobSubmitted' ) return S_OK( "OK" )
def handleOldPilots(self, connection): """ select all pilots that have not been updated in the last N days and declared them Deleted, accounting for them. """ pilotsToAccount = {} timeLimitToConsider = Time.toString(Time.dateTime() - Time.day * self.pilotStalledDays) # A.T. Below looks to be a bug # result = self.pilotDB.selectPilots( {'Status':self.queryStateList} , older=None, timeStamp='LastUpdateTime' ) result = self.pilotDB.selectPilots( {"Status": self.queryStateList}, older=timeLimitToConsider, timeStamp="LastUpdateTime" ) if not result["OK"]: self.log.error("Failed to get the Pilot Agents") return result if not result["Value"]: return S_OK() refList = result["Value"] result = self.pilotDB.getPilotInfo(refList) if not result["OK"]: self.log.error("Failed to get Info for Pilot Agents") return result pilotsDict = result["Value"] for pRef in pilotsDict: deletedJobDict = pilotsDict[pRef] deletedJobDict["Status"] = "Deleted" deletedJobDict["StatusDate"] = Time.dateTime() pilotsToAccount[pRef] = deletedJobDict if len(pilotsToAccount) > 100: self.accountPilots(pilotsToAccount, connection) pilotsToAccount = {} self.accountPilots(pilotsToAccount, connection) return S_OK()
def main(): params = Params() params.registerCLISwitches() Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() result = gProxyManager.getDBContents() if not result['OK']: print("Can't retrieve list of users: %s" % result['Message']) DIRAC.exit(1) keys = result['Value']['ParameterNames'] records = result['Value']['Records'] dataDict = {} now = Time.dateTime() for record in records: expirationDate = record[3] dt = expirationDate - now secsLeft = dt.days * 86400 + dt.seconds if secsLeft > params.proxyLifeTime: userName, userDN, userGroup, _, persistent = record if userName not in dataDict: dataDict[userName] = [] dataDict[userName].append( (userDN, userGroup, expirationDate, persistent)) for userName in dataDict: print("* %s" % userName) for iP in range(len(dataDict[userName])): data = dataDict[userName][iP] print(" DN : %s" % data[0]) print(" group : %s" % data[1]) print(" not after : %s" % Time.toString(data[2])) print(" persistent : %s" % data[3]) if iP < len(dataDict[userName]) - 1: print(" -") DIRAC.exit(0)
def insertMessage( self, message, site, nodeFQDN, userDN, userGroup, remoteAddress ): """ This function inserts the Log message into the DB """ messageDate = Time.toString( message.getTime() ) messageDate = messageDate[:messageDate.find( '.' )] messageName = message.getName() messageSubSystemName = message.getSubSystemName() fieldsList = [ 'MessageTime', 'VariableText' ] messageList = [ messageDate, message.getVariableMessage() ] inValues = [ userDN, userGroup ] inFields = [ 'OwnerDN', 'OwnerGroup' ] outFields = [ 'UserDNID' ] result = self.__insertIntoAuxiliaryTable( 'UserDNs', outFields, inFields, inValues ) if not result['OK']: return result messageList.append( result['Value'] ) fieldsList.extend( outFields ) if not site: site = 'Unknown' inFields = [ 'SiteName'] inValues = [ site ] outFields = [ 'SiteID' ] result = self.__insertIntoAuxiliaryTable( 'Sites', outFields, inFields, inValues ) if not result['OK']: return result siteIDKey = result['Value'] inFields = [ 'ClientIPNumberString' , 'ClientFQDN', 'SiteID' ] inValues = [ remoteAddress, nodeFQDN, siteIDKey ] outFields = [ 'ClientIPNumberID' ] result = self.__insertIntoAuxiliaryTable( 'ClientIPs', outFields, inFields, inValues ) if not result['OK']: return result messageList.append( result['Value'] ) fieldsList.extend( outFields ) messageList.append( message.getLevel() ) fieldsList.append( 'LogLevel' ) if not messageName: messageName = 'Unknown' inFields = [ 'SystemName' ] inValues = [ messageName ] outFields = [ 'SystemID' ] result = self.__insertIntoAuxiliaryTable( 'Systems', outFields, inFields, inValues ) if not result['OK']: return result systemIDKey = result['Value'] if not messageSubSystemName: messageSubSystemName = 'Unknown' inFields = [ 'SubSystemName', 'SystemID' ] inValues = [ messageSubSystemName, systemIDKey ] outFields = [ 'SubSystemID' ] result = self.__insertIntoAuxiliaryTable( 'SubSystems', outFields, inFields, inValues ) if not result['OK']: return result subSystemIDKey = result['Value'] inFields = [ 'FixedTextString' , 'SubSystemID' ] inValues = [ message.getFixedMessage(), subSystemIDKey ] outFields = [ 'FixedTextID' ] result = self.__insertIntoAuxiliaryTable( 'FixedTextMessages', outFields, inFields, inValues ) if not result['OK']: return result messageList.append( result['Value'] ) fieldsList.extend( outFields ) return self.insertFields( 'MessageRepository', fieldsList, messageList )
def generateNewVersion(self): self.setVersion(Time.toString()) self.sync() gLogger.info("Generated new version %s" % self.getVersion())
def export_setJobStatusBulk(self, jobID, statusDict): """ Set various status fields for job specified by its JobId. Set only the last status in the JobDB, updating all the status logging information in the JobLoggingDB. The statusDict has datetime as a key and status information dictionary as values """ status = '' minor = '' application = '' appCounter = '' jobID = int(jobID) result = jobDB.getJobAttributes(jobID, ['Status', 'StartExecTime', 'EndExecTime']) if not result['OK']: return result if not result['Value']: # if there is no matching Job it returns an empty dictionary return S_ERROR('No Matching Job') # If the current status is Stalled and we get an update, it should probably be "Running" if result['Value']['Status'] == JobStatus.STALLED: status = JobStatus.RUNNING startTime = result['Value'].get('StartExecTime', '') endTime = result['Value'].get('EndExecTime', '') # Get the latest WN time stamps of status updates result = logDB.getWMSTimeStamps(int(jobID)) if not result['OK']: return result lastTime = max([float(t) for s, t in result['Value'].items() if s != 'LastTime']) lastTime = Time.toString(Time.fromEpoch(lastTime)) dates = sorted(statusDict) # Pick up start and end times from all updates, if they don't exist for date in dates: sDict = statusDict[date] status = sDict.get('Status', status) if status in JobStatus.JOB_FINAL_STATES and not endTime: endTime = date minor = sDict.get('MinorStatus', minor) # Pick up the start date if minor == "Application" and status == JobStatus.RUNNING and not startTime: startTime = date # We should only update the status if its time stamp is more recent than the last update if dates[-1] >= lastTime: # Get the last status values for date in [date for date in dates if date >= lastTime]: sDict = statusDict[date] status = sDict.get('Status', status) minor = sDict.get('MinorStatus', minor) application = sDict.get('ApplicationStatus', application) appCounter = sDict.get('ApplicationCounter', appCounter) attrNames = [] attrValues = [] if status: attrNames.append('Status') attrValues.append(status) if minor: attrNames.append('MinorStatus') attrValues.append(minor) if application: attrNames.append('ApplicationStatus') attrValues.append(application) if appCounter: attrNames.append('ApplicationCounter') attrValues.append(appCounter) result = jobDB.setJobAttributes(jobID, attrNames, attrValues, update=True) if not result['OK']: return result # Update start and end time if needed if endTime: result = jobDB.setEndExecTime(jobID, endTime) if startTime: result = jobDB.setStartExecTime(jobID, startTime) # Update the JobLoggingDB records for date in dates: sDict = statusDict[date] status = sDict['Status'] if sDict['Status'] else 'idem' minor = sDict['MinorStatus'] if sDict['MinorStatus'] else 'idem' application = sDict['ApplicationStatus'] if sDict['ApplicationStatus'] else 'idem' source = sDict['Source'] result = logDB.addLoggingRecord(jobID, status, minor, application, date, source) if not result['OK']: return result return S_OK()
def export_setJobStatusBulk(self, jobID, statusDict): """ Set various status fields for job specified by its JobId. Set only the last status in the JobDB, updating all the status logging information in the JobLoggingDB. The statusDict has datetime as a key and status information dictionary as values """ status = "" minor = "" application = "" appCounter = "" endDate = '' startDate = '' startFlag = '' jobID = int(jobID) result = jobDB.getJobAttributes(jobID, ['Status']) if not result['OK']: return result if not result['Value']: # if there is no matching Job it returns an empty dictionary return S_ERROR('No Matching Job') new_status = result['Value']['Status'] if new_status == "Stalled": status = 'Running' # Get the latest WN time stamps of status updates result = logDB.getWMSTimeStamps(int(jobID)) if not result['OK']: return result lastTime = max( [float(t) for s, t in result['Value'].items() if s != 'LastTime']) lastTime = Time.toString(Time.fromEpoch(lastTime)) # Get the last status values dates = sorted(statusDict) # We should only update the status if its time stamp is more recent than the last update for date in [date for date in dates if date >= lastTime]: sDict = statusDict[date] if sDict['Status']: status = sDict['Status'] if status in JOB_FINAL_STATES: endDate = date if status == "Running": startFlag = 'Running' if sDict['MinorStatus']: minor = sDict['MinorStatus'] if minor == "Application" and startFlag == 'Running': startDate = date if sDict['ApplicationStatus']: application = sDict['ApplicationStatus'] counter = sDict.get('ApplicationCounter') if counter: appCounter = counter attrNames = [] attrValues = [] if status: attrNames.append('Status') attrValues.append(status) if minor: attrNames.append('MinorStatus') attrValues.append(minor) if application: attrNames.append('ApplicationStatus') attrValues.append(application) if appCounter: attrNames.append('ApplicationCounter') attrValues.append(appCounter) result = jobDB.setJobAttributes(jobID, attrNames, attrValues, update=True) if not result['OK']: return result if endDate: result = jobDB.setEndExecTime(jobID, endDate) if startDate: result = jobDB.setStartExecTime(jobID, startDate) # Update the JobLoggingDB records for date in dates: sDict = statusDict[date] status = sDict['Status'] if not status: status = 'idem' minor = sDict['MinorStatus'] if not minor: minor = 'idem' application = sDict['ApplicationStatus'] if not application: application = 'idem' source = sDict['Source'] result = logDB.addLoggingRecord(jobID, status, minor, application, date, source) if not result['OK']: return result return S_OK()
def test(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue( 'DIRAC/Setup', 'Test' ) gConfig.setOptionValue( '/DIRAC/Setups/Test/DataManagement', 'Test' ) host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Host', host ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/DBName', db ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/User', user ) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Password', pwd ) db = DataLoggingDB() assert db._connect()['OK'] lfns = ['/Test/00001234/File1', '/Test/00001234/File2'] status = 'TestStatus' minor = 'MinorStatus' date1 = Time.toString() date2 = Time.dateTime() source = 'Somewhere' fileTuples = ( ( lfns[0], status, minor, date1, source ), ( lfns[1], status, minor, date2, source ) ) try: gLogger.info( '\n Creating Table\n' ) # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info( '\n Inserting some records\n' ) result = db.addFileRecord( lfns, status, date = '2012-04-28 09:49:02.545466' ) assert result['OK'] assert result['Value'] == 2 assert result['lastRowId'] == 2 result = db.addFileRecords( fileTuples ) assert result['OK'] gLogger.info( '\n Retrieving some records\n' ) result = db.getFileLoggingInfo( lfns[0] ) assert result['OK'] assert len( result['Value'] ) == 2 result = db.getFileLoggingInfo( lfns[1] ) assert result['OK'] assert len( result['Value'] ) == 2 result = db.getUniqueStates() assert result['OK'] assert result['Value'] == [status] gLogger.info( '\n Removing Table\n' ) result = db._update( 'DROP TABLE `%s`' % db.tableName ) assert result['OK'] gLogger.info( '\n OK\n' ) except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit( 1 )
def insertMessage(self, message, site, nodeFQDN, userDN, userGroup, remoteAddress): """ This function inserts the Log message into the DB """ messageDate = Time.toString(message.getTime()) messageDate = messageDate[:messageDate.find('.')] messageName = message.getName() messageSubSystemName = message.getSubSystemName() fieldsList = ['MessageTime', 'VariableText'] messageList = [messageDate, message.getVariableMessage()] inValues = [userDN, userGroup] inFields = ['OwnerDN', 'OwnerGroup'] outFields = ['UserDNID'] result = self.__insertIntoAuxiliaryTable('UserDNs', outFields, inFields, inValues) if not result['OK']: return result messageList.append(result['Value']) fieldsList.extend(outFields) if not site: site = 'Unknown' inFields = ['SiteName'] inValues = [site] outFields = ['SiteID'] result = self.__insertIntoAuxiliaryTable('Sites', outFields, inFields, inValues) if not result['OK']: return result siteIDKey = result['Value'] inFields = ['ClientIPNumberString', 'ClientFQDN', 'SiteID'] inValues = [remoteAddress, nodeFQDN, siteIDKey] outFields = ['ClientIPNumberID'] result = self.__insertIntoAuxiliaryTable('ClientIPs', outFields, inFields, inValues) if not result['OK']: return result messageList.append(result['Value']) fieldsList.extend(outFields) messageList.append(message.getLevel()) fieldsList.append('LogLevel') if not messageName: messageName = 'Unknown' inFields = ['SystemName'] inValues = [messageName] outFields = ['SystemID'] result = self.__insertIntoAuxiliaryTable('Systems', outFields, inFields, inValues) if not result['OK']: return result systemIDKey = result['Value'] if not messageSubSystemName: messageSubSystemName = 'Unknown' inFields = ['SubSystemName', 'SystemID'] inValues = [messageSubSystemName, systemIDKey] outFields = ['SubSystemID'] result = self.__insertIntoAuxiliaryTable('SubSystems', outFields, inFields, inValues) if not result['OK']: return result subSystemIDKey = result['Value'] inFields = ['FixedTextString', 'SubSystemID'] inValues = [message.getFixedMessage(), subSystemIDKey] outFields = ['FixedTextID'] result = self.__insertIntoAuxiliaryTable('FixedTextMessages', outFields, inFields, inValues) if not result['OK']: return result messageList.append(result['Value']) fieldsList.extend(outFields) return self.insertFields('MessageRepository', fieldsList, messageList)
def testSystemLoggingDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/Framework', 'Test') host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Host', host) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/DBName', db) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/User', user) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Password', pwd) from DIRAC.FrameworkSystem.private.logging.Message import tupleToMessage systemName = 'TestSystem' subSystemName = 'TestSubSystem' level = 10 time = Time.toString() msgTest = 'Hello' variableText = time frameInfo = "" message = tupleToMessage((systemName, level, time, msgTest, variableText, frameInfo, subSystemName)) site = 'somewehere' longSite = 'somewehere1234567890123456789012345678901234567890123456789012345678901234567890' nodeFQDN = '127.0.0.1' userDN = 'Yo' userGroup = 'Us' remoteAddress = 'elsewhere' records = 10 db = SystemLoggingDB() assert db._connect()['OK'] try: if False: for tableName in db.tableDict.keys(): result = db._update('DROP TABLE IF EXISTS `%s`' % tableName) assert result['OK'] gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info('\n Inserting some records\n') for k in range(records): result = db.insertMessage(message, site, nodeFQDN, userDN, userGroup, remoteAddress) assert result['OK'] assert result['lastRowId'] == k + 1 assert result['Value'] == 1 result = db.insertMessage(message, longSite, nodeFQDN, userDN, userGroup, remoteAddress) assert not result['OK'] result = db._queryDB(showFieldList=['SiteName']) assert result['OK'] assert result['Value'][0][0] == site result = db._queryDB(showFieldList=['SystemName']) assert result['OK'] assert result['Value'][0][0] == systemName result = db._queryDB(showFieldList=['SubSystemName']) assert result['OK'] assert result['Value'][0][0] == subSystemName result = db._queryDB(showFieldList=['OwnerGroup']) assert result['OK'] assert result['Value'][0][0] == userGroup result = db._queryDB(showFieldList=['FixedTextString']) assert result['OK'] assert result['Value'][0][0] == msgTest result = db._queryDB(showFieldList=['VariableText', 'SiteName'], count=True, groupColumn='VariableText') assert result['OK'] assert result['Value'][0][1] == site assert result['Value'][0][2] == records gLogger.info('\n Removing Table\n') for tableName in [ 'MessageRepository', 'FixedTextMessages', 'SubSystems', 'Systems', 'AgentPersistentData', 'ClientIPs', 'Sites', 'UserDNs' ]: result = db._update('DROP TABLE `%s`' % tableName) assert result['OK'] gLogger.info('\n OK\n') except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit(1)
now = Time.dateTime() for record in records: expirationDate = record[ 2 ] dt = expirationDate - now secsLeft = dt.days * 86400 + dt.seconds if secsLeft > params.proxyLifeTime: userDN = record[ 0 ] userGroup = record[ 1 ] persistent = record[ 3 ] retVal = CS.getUsernameForDN( userDN ) if retVal[ 'OK' ]: userName = retVal[ 'Value' ] if not userName in dataDict: dataDict[ userName ] = [] dataDict[ userName ].append( ( userDN, userGroup, expirationDate, persistent ) ) for userName in dataDict: print "* %s" % userName for iP in range( len( dataDict[ userName ] ) ): data = dataDict[ userName ][ iP ] print " DN : %s" % data[0] print " group : %s" % data[1] print " not after : %s" % Time.toString( data[2] ) print " persistent : %s" % data[3] if iP < len( dataDict[ userName ] ) - 1: print " -" DIRAC.exit( 0 )
def __graphTimeComment( self, fromEpoch, toEpoch ): comStr = " 'COMMENT:Generated on %s UTC'" % Time.toString().replace( ":", "\:" ).split( "." )[0] comStr += " 'COMMENT:%s'" % ( "From %s to %s" % ( Time.fromEpoch( fromEpoch ), Time.fromEpoch( toEpoch ) ) ).replace( ":", "\:" ) return comStr
def execute( self ): """Main Agent code: 1.- Query TaskQueueDB for existing TQs 2.- Add their Priorities 3.- Submit pilots """ self.__checkSubmitPools() self.directorDict = {} self.directorDict['Setup'] = gConfig.getValue( '/DIRAC/Setup', 'None' ) self.directorDict['CPUTime'] = 9999999 #Add all submit pools self.directorDict[ 'SubmitPool' ] = self.am_getOption( "SubmitPools" ) #Add all DIRAC platforms if not specified otherwise if not 'Platform' in self.directorDict: result = getDIRACPlatforms() if result['OK']: self.directorDict['Platform'] = result['Value'] rpcMatcher = RPCClient( "WorkloadManagement/Matcher" ) result = rpcMatcher.getMatchingTaskQueues( self.directorDict ) if not result['OK']: self.log.error( 'Could not retrieve TaskQueues from TaskQueueDB', result['Message'] ) return result taskQueueDict = result['Value'] self.log.info( 'Found %s TaskQueues' % len( taskQueueDict ) ) if not taskQueueDict: self.log.info( 'No TaskQueue to Process' ) return S_OK() prioritySum = 0 waitingJobs = 0 for taskQueueID in taskQueueDict: taskQueueDict[taskQueueID]['TaskQueueID'] = taskQueueID prioritySum += taskQueueDict[taskQueueID]['Priority'] waitingJobs += taskQueueDict[taskQueueID]['Jobs'] self.log.info( 'Sum of Priorities %s' % prioritySum ) if waitingJobs == 0: self.log.info( 'No waiting Jobs' ) return S_OK( 'No waiting Jobs' ) if prioritySum <= 0: return S_ERROR( 'Wrong TaskQueue Priorities' ) self.pilotsPerPriority = self.am_getOption( 'pilotsPerIteration' ) / prioritySum self.pilotsPerJob = self.am_getOption( 'pilotsPerIteration' ) / waitingJobs self.callBackLock.acquire() self.submittedPilots = 0 self.callBackLock.release() self.toSubmitPilots = 0 waitingStatusList = ['Submitted', 'Ready', 'Scheduled', 'Waiting'] timeLimitToConsider = Time.toString( Time.dateTime() - Time.hour * self.am_getOption( "maxPilotWaitingHours" ) ) for taskQueueID in taskQueueDict: self.log.verbose( 'Processing TaskQueue', taskQueueID ) result = pilotAgentsDB.countPilots( { 'TaskQueueID': taskQueueID, 'Status': waitingStatusList}, None, timeLimitToConsider ) if not result['OK']: self.log.error( 'Fail to get Number of Waiting pilots', result['Message'] ) waitingPilots = 0 else: waitingPilots = result['Value'] self.log.verbose( 'Waiting Pilots for TaskQueue %s:' % taskQueueID, waitingPilots ) result = self.submitPilotsForTaskQueue( taskQueueDict[taskQueueID], waitingPilots ) if result['OK']: self.toSubmitPilots += result['Value'] self.log.info( 'Number of pilots to be Submitted %s' % self.toSubmitPilots ) # Now wait until all Jobs in the Default ThreadPool are proccessed if 'Default' in self.pools: # only for those in "Default' thread Pool # for pool in self.pools: self.pools['Default'].processAllResults() self.log.info( 'Number of pilots Submitted %s' % self.submittedPilots ) return S_OK()
def generateNewVersion( self ): self.setVersion( Time.toString() ) self.sync() gLogger.info( "Generated new version %s" % self.getVersion() )
def export_setJobStatusBulk( self, jobID, statusDict ): """ Set various status fields for job specified by its JobId. Set only the last status in the JobDB, updating all the status logging information in the JobLoggingDB. The statusDict has datetime as a key and status information dictionary as values """ status = "" minor = "" application = "" appCounter = "" endDate = '' startDate = '' startFlag = '' jobID = int( jobID ) result = jobDB.getJobAttributes( jobID, ['Status'] ) if not result['OK']: return result if not result['Value']: # if there is no matching Job it returns an empty dictionary return S_ERROR( 'No Matching Job' ) new_status = result['Value']['Status'] if new_status == "Stalled": status = 'Running' # Get the latest WN time stamps of status updates result = logDB.getWMSTimeStamps( int( jobID ) ) if not result['OK']: return result lastTime = max( [float( t ) for s, t in result['Value'].items() if s != 'LastTime'] ) lastTime = Time.toString( Time.fromEpoch( lastTime ) ) # Get the last status values dates = sorted( statusDict ) # We should only update the status if its time stamp is more recent than the last update for date in [date for date in dates if date >= lastTime]: sDict = statusDict[date] if sDict['Status']: status = sDict['Status'] if status in JOB_FINAL_STATES: endDate = date if status == "Running": startFlag = 'Running' if sDict['MinorStatus']: minor = sDict['MinorStatus'] if minor == "Application" and startFlag == 'Running': startDate = date if sDict['ApplicationStatus']: application = sDict['ApplicationStatus'] counter = sDict.get( 'ApplicationCounter' ) if counter: appCounter = counter attrNames = [] attrValues = [] if status: attrNames.append( 'Status' ) attrValues.append( status ) if minor: attrNames.append( 'MinorStatus' ) attrValues.append( minor ) if application: attrNames.append( 'ApplicationStatus' ) attrValues.append( application ) if appCounter: attrNames.append( 'ApplicationCounter' ) attrValues.append( appCounter ) result = jobDB.setJobAttributes( jobID, attrNames, attrValues, update = True ) if not result['OK']: return result if endDate: result = jobDB.setEndExecTime( jobID, endDate ) if startDate: result = jobDB.setStartExecTime( jobID, startDate ) # Update the JobLoggingDB records for date in dates: sDict = statusDict[date] status = sDict['Status'] if not status: status = 'idem' minor = sDict['MinorStatus'] if not minor: minor = 'idem' application = sDict['ApplicationStatus'] if not application: application = 'idem' source = sDict['Source'] result = logDB.addLoggingRecord( jobID, status, minor, application, date, source ) if not result['OK']: return result return S_OK()
def testSystemLoggingDB(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue( 'DIRAC/Setup', 'Test' ) gConfig.setOptionValue( '/DIRAC/Setups/Test/Framework', 'Test' ) host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Host', host ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/DBName', db ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/User', user ) gConfig.setOptionValue( '/Systems/Framework/Test/Databases/SystemLoggingDB/Password', pwd ) from DIRAC.FrameworkSystem.private.logging.Message import tupleToMessage systemName = 'TestSystem' subSystemName = 'TestSubSystem' level = 10 time = Time.toString() msgTest = 'Hello' variableText = time frameInfo = "" message = tupleToMessage( ( systemName, level, time, msgTest, variableText, frameInfo, subSystemName ) ) site = 'somewehere' longSite = 'somewehere1234567890123456789012345678901234567890123456789012345678901234567890' nodeFQDN = '127.0.0.1' userDN = 'Yo' userGroup = 'Us' remoteAddress = 'elsewhere' records = 10 db = SystemLoggingDB() assert db._connect()['OK'] try: if False: for tableName in db.tableDict.keys(): result = db._update( 'DROP TABLE IF EXISTS `%s`' % tableName ) assert result['OK'] gLogger.info( '\n Creating Table\n' ) # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info( '\n Inserting some records\n' ) for k in range( records ): result = db.insertMessage( message, site, nodeFQDN, userDN, userGroup, remoteAddress ) assert result['OK'] assert result['lastRowId'] == k + 1 assert result['Value'] == 1 result = db.insertMessage( message, longSite, nodeFQDN, userDN, userGroup, remoteAddress ) assert not result['OK'] result = db._queryDB( showFieldList = [ 'SiteName' ] ) assert result['OK'] assert result['Value'][0][0] == site result = db._queryDB( showFieldList = [ 'SystemName' ] ) assert result['OK'] assert result['Value'][0][0] == systemName result = db._queryDB( showFieldList = [ 'SubSystemName' ] ) assert result['OK'] assert result['Value'][0][0] == subSystemName result = db._queryDB( showFieldList = [ 'OwnerGroup' ] ) assert result['OK'] assert result['Value'][0][0] == userGroup result = db._queryDB( showFieldList = [ 'FixedTextString' ] ) assert result['OK'] assert result['Value'][0][0] == msgTest result = db._queryDB( showFieldList = [ 'VariableText', 'SiteName' ], count = True, groupColumn = 'VariableText' ) assert result['OK'] assert result['Value'][0][1] == site assert result['Value'][0][2] == records gLogger.info( '\n Removing Table\n' ) for tableName in [ 'MessageRepository', 'FixedTextMessages', 'SubSystems', 'Systems', 'AgentPersistentData', 'ClientIPs', 'Sites', 'UserDNs' ]: result = db._update( 'DROP TABLE `%s`' % tableName ) assert result['OK'] gLogger.info( '\n OK\n' ) except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit( 1 )
def __generateUniqueClientName(self): hashStr = ":".join((Time.toString(), str(random.random()), Network.getFQDN(), gLogger.getName())) hexHash = md5(hashStr).hexdigest() return hexHash
def test(): """ Some test cases """ # building up some fake CS values gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/DataManagement', 'Test') host = '127.0.0.1' user = '******' pwd = 'Dirac' db = 'AccountingDB' gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Host', host) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/DBName', db) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/User', user) gConfig.setOptionValue( '/Systems/DataManagement/Test/Databases/DataLoggingDB/Password', pwd) db = DataLoggingDB() assert db._connect()['OK'] lfns = ['/Test/00001234/File1', '/Test/00001234/File2'] status = 'TestStatus' minor = 'MinorStatus' date1 = Time.toString() date2 = Time.dateTime() source = 'Somewhere' fileTuples = ((lfns[0], status, minor, date1, source), (lfns[1], status, minor, date2, source)) try: gLogger.info('\n Creating Table\n') # Make sure it is there and it has been created for this test result = db._checkTable() assert result['OK'] result = db._checkTable() assert not result['OK'] assert result['Message'] == 'The requested table already exist' gLogger.info('\n Inserting some records\n') result = db.addFileRecord(lfns, status, date='2012-04-28 09:49:02.545466') assert result['OK'] assert result['Value'] == 2 assert result['lastRowId'] == 2 result = db.addFileRecords(fileTuples) assert result['OK'] gLogger.info('\n Retrieving some records\n') result = db.getFileLoggingInfo(lfns[0]) assert result['OK'] assert len(result['Value']) == 2 result = db.getFileLoggingInfo(lfns[1]) assert result['OK'] assert len(result['Value']) == 2 result = db.getUniqueStates() assert result['OK'] assert result['Value'] == [status] gLogger.info('\n Removing Table\n') result = db._update('DROP TABLE `%s`' % db.tableName) assert result['OK'] gLogger.info('\n OK\n') except AssertionError: print 'ERROR ', if not result['OK']: print result['Message'] else: print result sys.exit(1)