def __init__(self, transClient=None, logger=None, requestClient=None, requestClass=None, requestValidator=None, ownerDN=None, ownerGroup=None): """ c'tor the requestClass is by default Request. If extensions want to use an extended type, they can pass it as a parameter. This is the same behavior as WorfkloTasks and jobClass """ if not logger: logger = gLogger.getSubLogger('RequestTasks') super(RequestTasks, self).__init__(transClient, logger) useCertificates = True if (bool(ownerDN) and bool(ownerGroup)) else False if not requestClient: self.requestClient = ReqClient(useCertificates=useCertificates, delegatedDN=ownerDN, delegatedGroup=ownerGroup) else: self.requestClient = requestClient if not requestClass: self.requestClass = Request else: self.requestClass = requestClass if not requestValidator: self.requestValidator = RequestValidator() else: self.requestValidator = requestValidator
def main(): Script.registerSwitch('', 'Full', ' Print full list of requests') from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient fullPrint = False for switch in Script.getUnprocessedSwitches(): if switch[0] == 'Full': fullPrint = True reqClient = ReqClient() for server, rpcClient in reqClient.requestProxies().items(): DIRAC.gLogger.always("Checking request cache at %s" % server) reqCache = rpcClient.listCacheDir() if not reqCache['OK']: DIRAC.gLogger.error("Cannot list request cache", reqCache) continue reqCache = reqCache['Value'] if fullPrint: DIRAC.gLogger.always("List of requests", reqCache) else: DIRAC.gLogger.always("Number of requests in the cache", len(reqCache)) DIRAC.exit(0)
def setUp(self): """ test case set up """ gLogger.setLevel('NOTICE') self.file = File() self.file.LFN = "/lhcb/user/c/cibak/testFile" self.file.Checksum = "123456" self.file.ChecksumType = "ADLER32" self.file2 = File() self.file2.LFN = "/lhcb/user/f/fstagni/testFile" self.file2.Checksum = "654321" self.file2.ChecksumType = "ADLER32" self.operation = Operation() self.operation.Type = "ReplicateAndRegister" self.operation.TargetSE = "CERN-USER" self.operation.addFile(self.file) self.operation.addFile(self.file2) self.request = Request() self.request.RequestName = "RequestManagerHandlerTests" self.request.OwnerDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" self.request.OwnerGroup = "dirac_user" self.request.JobID = 123 self.request.addOperation(self.operation) # # JSON representation of a whole request self.jsonStr = self.request.toJSON()['Value'] # # request client self.requestClient = ReqClient() self.stressRequests = 1000 self.bulkRequest = 1000
def reqClient(self): """Return RequestClient.""" if not self._reqClient: from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient self._reqClient = ReqClient() return self._reqClient
def initialize(self): """agent initialisation reading and setting config opts :param self: self reference """ # # shifter proxy # See cleanContent method: this proxy will be used ALSO when the file catalog used # is the DIRAC File Catalog (DFC). # This is possible because of unset of the "UseServerCertificate" option self.shifterProxy = self.am_getOption("shifterProxy", self.shifterProxy) # # transformations types self.dataProcTTypes = Operations().getValue( "Transformations/DataProcessing", self.dataProcTTypes) self.dataManipTTypes = Operations().getValue( "Transformations/DataManipulation", self.dataManipTTypes) agentTSTypes = self.am_getOption("TransformationTypes", []) if agentTSTypes: self.transformationTypes = sorted(agentTSTypes) else: self.transformationTypes = sorted(self.dataProcTTypes + self.dataManipTTypes) self.log.info("Will consider the following transformation types: %s" % str(self.transformationTypes)) # # directory locations self.directoryLocations = sorted( self.am_getOption("DirectoryLocations", self.directoryLocations)) self.log.info( "Will search for directories in the following locations: %s" % str(self.directoryLocations)) # # transformation metadata self.transfidmeta = self.am_getOption("TransfIDMeta", self.transfidmeta) self.log.info("Will use %s as metadata tag name for TransformationID" % self.transfidmeta) # # archive periof in days self.archiveAfter = self.am_getOption("ArchiveAfter", self.archiveAfter) # days self.log.info("Will archive Completed transformations after %d days" % self.archiveAfter) # # transformation log SEs self.logSE = Operations().getValue("/LogStorage/LogSE", self.logSE) self.log.info("Will remove logs found on storage element: %s" % self.logSE) # # transformation client self.transClient = TransformationClient() # # wms client self.wmsClient = WMSClient() # # request client self.reqClient = ReqClient() # # file catalog client self.metadataClient = FileCatalogClient() # # job monitoring client self.jobMonitoringClient = JobMonitoringClient() return S_OK()
def __init__( self, *args, **kwargs ): """ c'tor """ AgentModule.__init__( self, *args, **kwargs ) # # replica manager self.replicaManager = ReplicaManager() # # transformation client self.transClient = TransformationClient() # # wms client self.wmsClient = WMSClient() # # request client self.reqClient = ReqClient() # # file catalog clinet self.metadataClient = FileCatalogClient() # # placeholders for CS options # # transformations types self.transformationTypes = None # # directory locations self.directoryLocations = None # # transformation metadata self.transfidmeta = None # # archive periof in days self.archiveAfter = None # # active SEs self.activeStorages = None # # transformation log SEs self.logSE = None # # enable/disable execution self.enableFlag = None
def __submitRMSOp(self, target_se, lfns_chunk_dict, whichRMSOp='ReplicateAndRegister' ): """ target_se : SE name to which to replicate lfns_chunk_dict : LFNS dict with 100 lfns as key andeEach lfn has 'Size', 'Checksum' whichRMSOp: Choose from RMP operation - ReplicateAndRegister, ReplicateAndRemove, PutAndRegister """ ## Setup request request = Request() request.RequestName = "DDM_"+ str(target_se) + datetime.datetime.now().strftime("_%Y%m%d_%H%M%S") myOp = Operation() myOp.Type = whichRMSOp myOp.TargetSE = target_se ## Add LFNS to operations for lfn in lfns_chunk_dict.keys(): opFile = File() opFile.LFN = lfn opFile.Size = lfns_chunk_dict[lfn]['Size'] if "Checksum" in lfns_chunk_dict[lfn]: opFile.Checksum = lfns_chunk_dict[lfn]['Checksum'] opFile.ChecksumType = 'ADLER32' ## Add file to operation myOp.addFile( opFile ) request.addOperation( myOp ) reqClient = ReqClient() putRequest = reqClient.putRequest( request ) if not putRequest["OK"]: gLogger.error( "Unable to put request '%s': %s" % ( request.RequestName, putRequest["Message"] ) ) return S_ERROR("Problem submitting to RMS.")
def setUp(self): """ test case set up """ gLogger.setLevel('INFO') self.file = File() self.file.LFN = "/lhcb/user/c/cibak/testFile" self.file.Checksum = "123456" self.file.ChecksumType = "ADLER32" self.file2 = File() self.file2.LFN = "/lhcb/user/f/fstagni/testFile" self.file2.Checksum = "654321" self.file2.ChecksumType = "ADLER32" self.operation = Operation() self.operation.Type = "ReplicateAndRegister" self.operation.TargetSE = "CERN-USER" self.operation.addFile(self.file) self.operation.addFile(self.file2) proxyInfo = getProxyInfo()['Value'] self.request = Request() self.request.RequestName = "RequestManagerHandlerTests" self.request.OwnerDN = proxyInfo['identity'] self.request.OwnerGroup = proxyInfo['group'] self.request.JobID = 123 self.request.addOperation(self.operation) # # JSON representation of a whole request self.jsonStr = self.request.toJSON()['Value'] # # request client self.requestClient = ReqClient()
def myRequest(): """Create a request and put it to the db""" request = Request() request.RequestName = 'myAwesomeRemovalRequest.xml' request.JobID = 0 request.SourceComponent = "myScript" remove = Operation() remove.Type = "RemoveFile" lfn = "/ilc/user/s/sailer/test.txt" rmFile = File() rmFile.LFN = lfn remove.addFile(rmFile) request.addOperation(remove) isValid = RequestValidator().validate(request) if not isValid['OK']: raise RuntimeError("Failover request is not valid: %s" % isValid['Message']) else: print "It is a GOGOGO" requestClient = ReqClient() result = requestClient.putRequest(request) print result
def putRequest(self, userName, userDN, userGroup, sourceSE, targetSE1, targetSE2): """ test case for user """ req = self.buildRequest(userName, userGroup, sourceSE, targetSE1, targetSE2) req.RequestName = "test%s-%s" % (userName, userGroup) req.OwnerDN = userDN req.OwnerGroup = userGroup gLogger.always("putRequest: request '%s'" % req.RequestName) for op in req: gLogger.always("putRequest: => %s %s %s" % (op.Order, op.Type, op.TargetSE)) for f in op: gLogger.always("putRequest: ===> file %s" % f.LFN) reqClient = ReqClient() delete = reqClient.deleteRequest(req.RequestName) if not delete["OK"]: gLogger.error("putRequest: %s" % delete["Message"]) return delete put = reqClient.putRequest(req) if not put["OK"]: gLogger.error("putRequest: %s" % put["Message"]) return put
def _sendFailoverRequest(self, request): """Send failover reques per Job. This request would basically be a DISET request for setting the job status. If this fails, it only prints a message. :param Request request: Request() object :return: None """ if len(request): self.log.info("Trying to send the failover request") # The request is ready, send it now isValid = RequestValidator().validate(request) if not isValid["OK"]: self.log.error("Failover request is not valid", isValid["Message"]) self.log.error("Printing out the content of the request") reqToJSON = request.toJSON() if reqToJSON["OK"]: print(str(reqToJSON["Value"])) else: self.log.error( "Something went wrong creating the JSON from request", reqToJSON["Message"]) else: # Now trying to send the request requestClient = ReqClient() result = requestClient.putRequest(request) if not result["OK"]: self.log.error("Failed to set failover request", result["Message"])
def __init__(self, transClient=None, logger=None, requestClient=None, requestClass=None, requestValidator=None): """ c'tor the requestClass is by default Request. If extensions want to use an extended type, they can pass it as a parameter. This is the same behavior as WorfkloTasks and jobClass """ if not logger: logger = gLogger.getSubLogger('RequestTasks') super(RequestTasks, self).__init__(transClient, logger) if not requestClient: self.requestClient = ReqClient() else: self.requestClient = requestClient if not requestClass: self.requestClass = Request else: self.requestClass = requestClass if not requestValidator: self.requestValidator = RequestValidator() else: self.requestValidator = requestValidator
def __deleteSandboxFromExternalBackend(self, SEName, SEPFN): if self.getCSOption("DelayedExternalDeletion", True): gLogger.info("Setting deletion request") try: request = Request() request.RequestName = "RemoteSBDeletion:%s|%s:%s" % ( SEName, SEPFN, time.time()) physicalRemoval = Operation() physicalRemoval.Type = "PhysicalRemoval" physicalRemoval.TargetSE = SEName fileToRemove = File() fileToRemove.PFN = SEPFN physicalRemoval.addFile(fileToRemove) request.addOperation(physicalRemoval) return ReqClient().putRequest(request) except Exception as e: gLogger.exception("Exception while setting deletion request") return S_ERROR("Cannot set deletion request: %s" % str(e)) else: gLogger.info("Deleting external Sandbox") try: return StorageElement(SEName).removeFile(SEPFN) except Exception as e: gLogger.exception( "RM raised an exception while trying to delete a remote sandbox" ) return S_ERROR( "RM raised an exception while trying to delete a remote sandbox" )
def initialize(self): """ agent initialisation reading and setting confing opts :param self: self reference """ # # shifter proxy self.am_setOption('shifterProxy', 'DataManager') # # transformations types self.dataProcTTypes = Operations().getValue( 'Transformations/DataProcessing', self.dataProcTTypes) self.dataManipTTypes = Operations().getValue( 'Transformations/DataManipulation', self.dataManipTTypes) agentTSTypes = self.am_getOption('TransformationTypes', []) if agentTSTypes: self.transformationTypes = sorted(agentTSTypes) else: self.transformationTypes = sorted(self.dataProcTTypes + self.dataManipTTypes) self.log.info("Will consider the following transformation types: %s" % str(self.transformationTypes)) # # directory locations self.directoryLocations = sorted( self.am_getOption('DirectoryLocations', ['TransformationDB', 'MetadataCatalog'])) self.log.info( "Will search for directories in the following locations: %s" % str(self.directoryLocations)) # # transformation metadata self.transfidmeta = self.am_getOption('TransfIDMeta', "TransformationID") self.log.info("Will use %s as metadata tag name for TransformationID" % self.transfidmeta) # # archive periof in days self.archiveAfter = self.am_getOption('ArchiveAfter', 7) # days self.log.info("Will archive Completed transformations after %d days" % self.archiveAfter) # # active SEs self.activeStorages = sorted(self.am_getOption('ActiveSEs', [])) self.log.info("Will check the following storage elements: %s" % str(self.activeStorages)) # # transformation log SEs self.logSE = Operations().getValue('/LogStorage/LogSE', 'LogSE') self.log.info("Will remove logs found on storage element: %s" % self.logSE) # # enable/disable execution, should be using CS option Status?? with default value as 'Active'?? self.enableFlag = self.am_getOption('EnableFlag', 'True') # # transformation client self.transClient = TransformationClient() # # wms client self.wmsClient = WMSClient() # # request client self.reqClient = ReqClient() # # file catalog client self.metadataClient = FileCatalogClient() return S_OK()
def __init__( self, requestJSON, handlersDict, csPath, agentName, standalone=False, requestClient=None, rmsMonitoring=False): """c'tor :param self: self reference :param str requestJSON: request serialized to JSON :param dict opHandlers: operation handlers """ self.request = Request(requestJSON) # # csPath self.csPath = csPath # # agent name self.agentName = agentName # # standalone flag self.standalone = standalone # # handlers dict self.handlersDict = handlersDict # # handlers class def self.handlers = {} # # own sublogger self.log = gLogger.getSubLogger("pid_%s/%s" % (os.getpid(), self.request.RequestName)) # # get shifters info self.__managersDict = {} shifterProxies = self.__setupManagerProxies() if not shifterProxies["OK"]: self.log.error("Cannot setup shifter proxies", shifterProxies["Message"]) # This flag which is set and sent from the RequestExecutingAgent and is False by default. self.rmsMonitoring = rmsMonitoring if self.rmsMonitoring: self.rmsMonitoringReporter = MonitoringReporter(monitoringType="RMSMonitoring") else: # # initialize gMonitor gMonitor.setComponentType(gMonitor.COMPONENT_AGENT) gMonitor.setComponentName(self.agentName) gMonitor.initialize() # # own gMonitor activities gMonitor.registerActivity("RequestAtt", "Requests processed", "RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM) gMonitor.registerActivity("RequestFail", "Requests failed", "RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM) gMonitor.registerActivity("RequestOK", "Requests done", "RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM) if requestClient is None: self.requestClient = ReqClient() else: self.requestClient = requestClient
def _sendToFailover( rpcStub ): """ Create a ForwardDISET operation for failover """ request = Request() request.RequestName = "Accounting.DataStore.%s.%s" % ( time.time(), random.random() ) forwardDISETOp = Operation() forwardDISETOp.Type = "ForwardDISET" forwardDISETOp.Arguments = DEncode.encode( rpcStub ) request.addOperation( forwardDISETOp ) return ReqClient().putRequest( request )
def callback(self): """ Trigger the callback once all the FTS interactions are done and update the status of the Operation to 'Finished' if successful """ self.reqClient = ReqClient() res = self._callback() if res['OK']: self.status = 'Finished' return res
def export_putRequest(self, requestJSON): """ put a new request into RequestDB """ requestDict = json.loads(requestJSON) request = Request(requestDict) operation = Operation() # # create new operation operation.Type = "WMSSecureOutputData" request.insertBefore(operation, request[0]) userDN, userGroup, _ = self.__getOwnerGroupDN('ProductionManager') request.OwnerDN = userDN request.OwnerGroup = userGroup return ReqClient().putRequest(request)
def commitRequest(self): """Send request to the Request Management Service""" if self.request.isEmpty(): return S_OK() isValid = RequestValidator().validate(self.request) if not isValid["OK"]: return S_ERROR("Failover request is not valid: %s" % isValid["Message"]) else: requestClient = ReqClient() result = requestClient.putRequest(self.request) return result
def _sendToFailover(rpcStub): """Create a ForwardDISET operation for failover""" try: request = Request() request.RequestName = "Accounting.DataStore.%s.%s" % (time.time(), random.random()) forwardDISETOp = Operation() forwardDISETOp.Type = "ForwardDISET" forwardDISETOp.Arguments = DEncode.encode(rpcStub) request.addOperation(forwardDISETOp) return ReqClient().putRequest(request) # We catch all the exceptions, because it should never crash except Exception as e: # pylint: disable=broad-except return S_ERROR(ERMSUKN, "Exception sending accounting failover request: %s" % repr(e))
def export_putRequest(self, requestJSON): """ put a new request into RequestDB """ requestDict = json.loads(requestJSON) requestName = requestDict.get("RequestID", requestDict.get('RequestName', "***UNKNOWN***")) #pylint: disable=unused-variable request = Request(requestDict) operation = Operation() # # create new operation operation.Type = "WMSSecureOutputData" request.insertBefore(operation, request[0]) userDN, userGroup, userName = self.__getOwnerGroupDN( 'ProductionManager') #pylint: disable=unused-variable request.OwnerDN = userDN request.OwnerGroup = userGroup return ReqClient().putRequest(request)
def __deleteSandboxFromExternalBackend(self, SEName, SEPFN): if self.getCSOption("DelayedExternalDeletion", True): gLogger.info("Setting deletion request") try: # We need the hostDN used in order to pass these credentials to the # SandboxStoreDB.. hostCertLocation, _ = Locations.getHostCertificateAndKeyLocation( ) hostCert = X509Certificate.X509Certificate() hostCert.loadFromFile(hostCertLocation) hostDN = hostCert.getSubjectDN().get("Value") # use the host authentication to fetch the data result = self.sandboxDB.getSandboxOwner( SEName, SEPFN, hostDN, "hosts") if not result["OK"]: return result _owner, ownerDN, ownerGroup = result["Value"] request = Request() request.RequestName = "RemoteSBDeletion:%s|%s:%s" % ( SEName, SEPFN, time.time()) request.OwnerDN = ownerDN request.OwnerGroup = ownerGroup physicalRemoval = Operation() physicalRemoval.Type = "PhysicalRemoval" physicalRemoval.TargetSE = SEName fileToRemove = File() fileToRemove.PFN = SEPFN physicalRemoval.addFile(fileToRemove) request.addOperation(physicalRemoval) return ReqClient().putRequest(request) except Exception as e: gLogger.exception("Exception while setting deletion request") return S_ERROR(f"Cannot set deletion request: {e}") else: gLogger.info("Deleting external Sandbox") try: return StorageElement(SEName).removeFile(SEPFN) except Exception: gLogger.exception( "RM raised an exception while trying to delete a remote sandbox" ) return S_ERROR( "RM raised an exception while trying to delete a remote sandbox" )
def initialize(self): """Sets defaults """ self.am_setOption('shifterProxy', 'ProductionManager') self.transClient = TransformationClient() self.reqClient = ReqClient() self.consChecks = ConsistencyChecks(interactive=False, transClient=self.transClient) transformationTypes = Operations().getValue( 'Transformations/DataProcessing', []) extendableTTypes = Operations().getValue( 'Transformations/ExtendableTransfTypes', ['MCSimulation']) self.transformationTypes = list( set(transformationTypes) - set(extendableTTypes)) return S_OK()
def __setRemovalRequest( self, lfn, ownerDN, ownerGroup ): """ Set removal request with the given credentials """ oRequest = Request() oRequest.OwnerDN = ownerDN oRequest.OwnerGroup = ownerGroup oRequest.RequestName = os.path.basename( lfn ).strip() + '_removal_request.xml' oRequest.SourceComponent = 'JobCleaningAgent' removeFile = Operation() removeFile.Type = 'RemoveFile' removedFile = File() removedFile.LFN = lfn removeFile.addFile( removedFile ) oRequest.addOperation( removeFile ) return ReqClient().putRequest( oRequest )
def __init__(self, requestJSON, handlersDict, csPath, agentName): """c'tor :param self: self reference :param str requestJSON: request serialized to JSON :param dict opHandlers: operation handlers """ self.request = Request(requestJSON) # # csPath self.csPath = csPath # # agent name self.agentName = agentName # # handlers dict self.handlersDict = handlersDict # # handlers class def self.handlers = {} # # own sublogger self.log = gLogger.getSubLogger(self.request.RequestName) # # get shifters info self.__managersDict = {} shifterProxies = self.__setupManagerProxies() if not shifterProxies["OK"]: self.log.error(shifterProxies["Message"]) # # initialize gMonitor gMonitor.setComponentType(gMonitor.COMPONENT_AGENT) gMonitor.setComponentName(self.agentName) gMonitor.initialize() # # own gMonitor activities gMonitor.registerActivity("RequestAtt", "Requests processed", "RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM) gMonitor.registerActivity("RequestFail", "Requests failed", "RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM) gMonitor.registerActivity("RequestOK", "Requests done", "RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM) self.requestClient = ReqClient()
def __setRemovalRequest(self, lfn, ownerDN, ownerGroup): """Set removal request with the given credentials""" oRequest = Request() oRequest.OwnerDN = ownerDN oRequest.OwnerGroup = ownerGroup oRequest.RequestName = os.path.basename( lfn).strip() + "_removal_request.xml" oRequest.SourceComponent = "JobCleaningAgent" removeFile = Operation() removeFile.Type = "RemoveFile" removedFile = File() removedFile.LFN = lfn removeFile.addFile(removedFile) oRequest.addOperation(removeFile) # put the request with the owner certificate to make sure it's still a valid DN return ReqClient(useCertificates=True, delegatedDN=ownerDN, delegatedGroup=ownerGroup).putRequest(oRequest)
def __init__( self, requestJSON, handlersDict, csPath, agentName, standalone=False, requestClient=None, rmsMonitoring=False ): """c'tor :param self: self reference :param str requestJSON: request serialized to JSON :param dict opHandlers: operation handlers """ self.request = Request(requestJSON) # # csPath self.csPath = csPath # # agent name self.agentName = agentName # # standalone flag self.standalone = standalone # # handlers dict self.handlersDict = handlersDict # # handlers class def self.handlers = {} # # own sublogger self.log = gLogger.getSubLogger("pid_%s/%s" % (os.getpid(), self.request.RequestName)) # # get shifters info self.__managersDict = {} shifterProxies = self.__setupManagerProxies() if not shifterProxies["OK"]: self.log.error("Cannot setup shifter proxies", shifterProxies["Message"]) # This flag which is set and sent from the RequestExecutingAgent and is False by default. self.rmsMonitoring = rmsMonitoring if self.rmsMonitoring: self.rmsMonitoringReporter = MonitoringReporter(monitoringType="RMSMonitoring") if requestClient is None: self.requestClient = ReqClient() else: self.requestClient = requestClient
def main(): Script.parseCommandLine() import DIRAC from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient reqClient = ReqClient() dbSummary = reqClient.getDBSummary() if not dbSummary["OK"]: DIRAC.gLogger.error(dbSummary["Message"]) DIRAC.exit(-1) dbSummary = dbSummary["Value"] if not dbSummary: DIRAC.gLogger.info("ReqDB is empty!") DIRAC.exit(0) reqs = dbSummary.get("Request", {}) ops = dbSummary.get("Operation", {}) fs = dbSummary.get("File", {}) DIRAC.gLogger.always("Requests:") for reqState, reqCount in sorted(reqs.items()): DIRAC.gLogger.always("- '%s' %s" % (reqState, reqCount)) DIRAC.gLogger.always("Operations:") for opType, opDict in sorted(ops.items()): DIRAC.gLogger.always("- '%s':" % opType) for opState, opCount in sorted(opDict.items()): DIRAC.gLogger.always(" - '%s' %s" % (opState, opCount)) DIRAC.gLogger.always("Files:") for fState, fCount in sorted(fs.items()): DIRAC.gLogger.always("- '%s' %s" % (fState, fCount)) DIRAC.exit(0)
def requestClient(self): """ request client getter """ if not self.__requestClient: self.__requestClient = ReqClient() return self.__requestClient
def requestClient(cls): """ request client getter """ if not cls.__requestClient: cls.__requestClient = ReqClient() return cls.__requestClient