def setRequest( self, requestName, requestString, desiredStatus = None ): """ Set request to the database (including all sub-requests) :param self: self reference :param str requestName: request name :param str requestString: serilised request :param mixed desiredState: optional request status, defult = None """ self.log.info( "setRequest: Attempting to set %s." % requestName ) request = RequestContainer( requestString ) requestTypes = request.getSubRequestTypes()['Value'] try: for requestType in requestTypes: subRequestString = request.toXML( desiredType = requestType )['Value'] if subRequestString: if desiredStatus: status = desiredStatus elif not request.isRequestTypeEmpty( requestType )['Value']: status = 'Waiting' else: status = 'Done' subRequestDir = os.path.join( self.root, requestType, status ) if not os.path.exists( subRequestDir ): os.makedirs( subRequestDir ) subRequestPath = os.path.join( subRequestDir, requestName ) subRequestFile = open( subRequestPath, 'w' ) subRequestFile.write( subRequestString ) subRequestFile.close() self.log.info( "setRequest: Successfully set %s." % requestName ) return S_OK() except Exception, error: errStr = "setRequest: Exception while setting request." self.log.exception( errStr, requestName, lException = error ) self.deleteRequest( requestName ) return S_ERROR( errStr )
def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup=''): requestType = 'transfer' requestOperation = 'replicateAndRegister' try: requestType, requestOperation = transBody.split(';') except: pass for taskID in sortList(taskDict.keys()): paramDict = taskDict[taskID] transID = paramDict['TransformationID'] oRequest = RequestContainer(init=False) subRequestIndex = oRequest.initiateSubRequest(requestType)['Value'] attributeDict = { 'Operation': requestOperation, 'TargetSE': paramDict['TargetSE'] } oRequest.setSubRequestAttributes(subRequestIndex, requestType, attributeDict) files = [] for lfn in paramDict['InputData'].split(';'): files.append({'LFN': lfn}) oRequest.setSubRequestFiles(subRequestIndex, requestType, files) requestName = str(transID).zfill(8) + '_' + str(taskID).zfill(8) oRequest.setRequestAttributes({'RequestName': requestName}) taskDict[taskID]['TaskObject'] = oRequest.toXML()['Value'] return S_OK(taskDict)
def setRequest(self, requestName, requestString, desiredStatus=None): """ Set request to the database (including all sub-requests) :param self: self reference :param str requestName: request name :param str requestString: serilised request :param mixed desiredState: optional request status, defult = None """ self.log.info("setRequest: Attempting to set %s." % requestName) request = RequestContainer(requestString) requestTypes = request.getSubRequestTypes()["Value"] try: for requestType in requestTypes: subRequestString = request.toXML(desiredType=requestType)["Value"] if subRequestString: if desiredStatus: status = desiredStatus elif not request.isRequestTypeEmpty(requestType)["Value"]: status = "Waiting" else: status = "Done" subRequestDir = os.path.join(self.root, requestType, status) if not os.path.exists(subRequestDir): os.makedirs(subRequestDir) subRequestPath = os.path.join(subRequestDir, requestName) subRequestFile = open(subRequestPath, "w") subRequestFile.write(subRequestString) subRequestFile.close() self.log.info("setRequest: Successfully set %s." % requestName) return S_OK() except Exception, error: errStr = "setRequest: Exception while setting request." self.log.exception(errStr, requestName, lException=error) self.deleteRequest(requestName) return S_ERROR(errStr)
def __getRequestString(self, requestName): """ Obtain the string for request (including all sub-requests) """ gLogger.info( "RequestDBFile.__getRequestString: Attempting to get string for %s." % requestName) res = self.__locateRequest(requestName) if not res['OK']: return res subRequestPaths = res['Value'] try: oRequest = RequestContainer(init=False) for subRequestPath in subRequestPaths: res = self.__readSubRequestString(subRequestPath) if not res['OK']: return res subRequestString = res['Value'] tempRequest = RequestContainer(subRequestString) #,init=False) oRequest.setRequestAttributes( tempRequest.getRequestAttributes()['Value']) oRequest.update(tempRequest) requestString = oRequest.toXML()['Value'] gLogger.info( "RequestDBFile.__getRequestString: Successfully obtained string for %s." % requestName) result = S_OK(requestString) result['Request'] = oRequest return result except Exception, x: errStr = "RequestDBFile.__getRequestString: Exception while obtaining request string." gLogger.exception(errStr, requestName, lException=x) return S_ERROR(errStr)
def __getRequestString(self, requestName): """ Obtain the string for request (including all sub-requests) :param self: self reference :param str requestName: request name """ self.log.info("__getRequestString: Attempting to get string for %s." % requestName) res = self.__locateRequest(requestName) if not res['OK']: return res subRequestPaths = res['Value'] try: oRequest = RequestContainer(init=False) for subRequestPath in subRequestPaths: res = self.__readSubRequestString(subRequestPath) if not res['OK']: return res subRequestString = res['Value'] tempRequest = RequestContainer(subRequestString) oRequest.setRequestAttributes( tempRequest.getRequestAttributes()['Value']) oRequest.update(tempRequest) requestString = oRequest.toXML()['Value'] self.log.info( "__getRequestString: Successfully obtained string for %s." % requestName) result = S_OK(requestString) result['Request'] = oRequest return result except Exception, error: errStr = "__getRequestString: Exception while obtaining request string." self.log.exception(errStr, requestName, lException=error) return S_ERROR(errStr)
def setRequest(self, requestName, requestString, desiredStatus=None): """ Set request to the database (including all sub-requests) """ gLogger.info("RequestDBFile._setRequest: Attempting to set %s." % requestName) request = RequestContainer(requestString) requestTypes = request.getSubRequestTypes()['Value'] try: for requestType in requestTypes: subRequestString = request.toXML( desiredType=requestType)['Value'] if subRequestString: if desiredStatus: status = desiredStatus elif not request.isRequestTypeEmpty(requestType)['Value']: status = 'ToDo' else: status = 'Done' subRequestDir = os.path.join(self.root, requestType, status) if not os.path.exists(subRequestDir): os.makedirs(subRequestDir) subRequestPath = os.path.join(subRequestDir, requestName) subRequestFile = open(subRequestPath, 'w') subRequestFile.write(subRequestString) subRequestFile.close() gLogger.info("RequestDBFile._setRequest: Successfully set %s." % requestName) return S_OK() except Exception, x: errStr = "RequestDBFile._setRequest: Exception while setting request." gLogger.exception(errStr, requestName, lException=x) self.deleteRequest(requestName) return S_ERROR(errStr)
def setRequest( self, requestName, requestString, desiredStatus = None ): """ Set request to the database (including all sub-requests) """ gLogger.info( "RequestDBFile._setRequest: Attempting to set %s." % requestName ) request = RequestContainer( requestString ) requestTypes = request.getSubRequestTypes()['Value'] try: for requestType in requestTypes: subRequestString = request.toXML( desiredType = requestType )['Value'] if subRequestString: if desiredStatus: status = desiredStatus elif not request.isRequestTypeEmpty( requestType )['Value']: status = 'ToDo' else: status = 'Done' subRequestDir = '%s/%s/%s' % ( self.root, requestType, status ) if not os.path.exists( subRequestDir ): os.makedirs( subRequestDir ) subRequestPath = '%s/%s' % ( subRequestDir, requestName ) subRequestFile = open( subRequestPath, 'w' ) subRequestFile.write( subRequestString ) subRequestFile.close() gLogger.info( "RequestDBFile._setRequest: Successfully set %s." % requestName ) return S_OK() except Exception, x: errStr = "RequestDBFile._setRequest: Exception while setting request." gLogger.exception( errStr, requestName, lException = x ) self.deleteRequest( requestName ) return S_ERROR( errStr )
def _sendToFailover( rpcStub ): requestClient = RequestClient() request = RequestContainer() request.setDISETRequest( rpcStub ) requestStub = request.toXML()['Value'] return requestClient.setRequest( "Accounting.DataStore.%s.%s" % ( time.time(), random.random() ), requestStub )
def updateRequest( self, requestName, requestString ): """ update request given its name and xml serilised string :param str requestName: Requests.RequestName :param str requestString: request serilised to xml """ request = RequestContainer( request = requestString ) requestTypes = ['transfer', 'register', 'removal', 'stage', 'diset', 'logupload'] requestID = request.getRequestID()['Value'] updateRequestFailed = False for requestType in requestTypes: res = request.getNumSubRequests( requestType ) if res['OK']: numRequests = res['Value'] for ind in range( numRequests ): res = request.getSubRequestAttributes( ind, requestType ) if res['OK']: subRequestDict = res['Value'] if 'SubRequestID' in subRequestDict: subRequestID = res['Value']['SubRequestID'] res = self.__updateSubRequestFiles( ind, requestType, subRequestID, request ) if res['OK']: if request.isSubRequestDone( ind, requestType )['Value']: res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', 'Done' ) else: res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', 'Waiting' ) if not res['OK']: updateRequestFailed = True else: updateRequestFailed = True if "Error" in subRequestDict: result = self._setSubRequestAttribute( requestID, subRequestID, 'Error', subRequestDict['Error'] ) if not result['OK']: updateRequestFailed = True else: updateRequestFailed = True else: updateRequestFailed = True else: updateRequestFailed = True if updateRequestFailed: errStr = 'Failed to update request %s.' % requestID return S_ERROR( errStr ) else: requestStatus = self.getRequestStatus( requestID ) if not requestStatus["OK"]: return requestStatus requestStatus = requestStatus["Value"] if requestStatus["SubRequestStatus"] not in ( "Waiting", "Assigned" ): res = self._setRequestAttribute( requestID, 'Status', 'Done' ) if not res['OK']: errStr = 'Failed to update request status of %s to Done.' % requestID return S_ERROR( errStr ) return S_OK()
def submitTaskToExternal(self, request): if type(request) in types.StringTypes: oRequest = RequestContainer(request) name = oRequest.getRequestName()['Value'] elif type(request) == types.InstanceType: name = request.getRequestName()['Value'] request = request.toXML()['Value'] else: return S_ERROR("Request should be string or request object") return self.requestClient.setRequest(name, request)
def submitTaskToExternal(self,request): if type(request) in types.StringTypes: oRequest = RequestContainer(request) name = oRequest.getRequestName()['Value'] elif type(request) == types.InstanceType: name = request.getRequestName()['Value'] request = request.toXML()['Value'] else: return S_ERROR("Request should be string or request object") return self.requestClient.setRequest(name,request)
def generateRequest(self): ''' Commit the accumulated records and generate request eventually ''' result = self.commit() request = None if not result['OK']: # Generate Request request = RequestContainer() if result.has_key('FailedResults'): for res in result['FailedResults']: if res.has_key('rpcStub'): request.setDISETRequest(res['rpcStub']) return S_OK(request)
def __init__(self, requestObject=False): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger("FailoverTransfer") self.rm = ReplicaManager() self.request = requestObject if not self.request: self.request = RequestContainer() self.request.setRequestName('default_request.xml') self.request.setSourceComponent('FailoverTransfer')
def generateRequest(self): """ Commit the accumulated records and generate request eventually """ result = self.commit() request = None if not result['OK']: # Generate Request request = RequestContainer() if result.has_key('FailedResults'): for res in result['FailedResults']: if res.has_key('rpcStub'): request.setDISETRequest(res['rpcStub']) return S_OK(request)
def __forwardable( requestString ): """ check if request if forwardable The sub-request of type transfer:putAndRegister, removal:physicalRemoval and removal:reTransfer are definitely not, they should be executed locally, as they are using local fs. :param str requestString: XML-serialised request """ request = RequestContainer( requestString ) subRequests = request.getSubRequests( "transfer" )["Value"] + request.getSubRequests( "removal" )["Value"] for subRequest in subRequests: if subRequest["Attributes"]["Operation"] in ( "putAndRegister", "physicalRemoval", "reTransfer" ): return S_ERROR("found operation '%s' that cannot be forwarded" % subRequest["Attributes"]["Operation"] ) return S_OK()
def __getRequestString(self, requestName): """ Obtain the string for request (including all sub-requests) :param self: self reference :param str requestName: request name """ self.log.info("__getRequestString: Attempting to get string for %s." % requestName) res = self.__locateRequest(requestName) if not res["OK"]: return res subRequestPaths = res["Value"] try: oRequest = RequestContainer(init=False) for subRequestPath in subRequestPaths: res = self.__readSubRequestString(subRequestPath) if not res["OK"]: return res subRequestString = res["Value"] tempRequest = RequestContainer(subRequestString) oRequest.setRequestAttributes(tempRequest.getRequestAttributes()["Value"]) oRequest.update(tempRequest) requestString = oRequest.toXML()["Value"] self.log.info("__getRequestString: Successfully obtained string for %s." % requestName) result = S_OK(requestString) result["Request"] = oRequest return result except Exception, error: errStr = "__getRequestString: Exception while obtaining request string." self.log.exception(errStr, requestName, lException=error) return S_ERROR(errStr)
def __getRequestString( self, requestName ): """ Obtain the string for request (including all sub-requests) """ gLogger.info( "RequestDBFile.__getRequestString: Attempting to get string for %s." % requestName ) res = self.__locateRequest( requestName ) if not res['OK']: return res subRequestPaths = res['Value'] try: oRequest = RequestContainer( init = False ) for subRequestPath in subRequestPaths: res = self.__readSubRequestString( subRequestPath ) if not res['OK']: return res subRequestString = res['Value'] tempRequest = RequestContainer( subRequestString )#,init=False) oRequest.setRequestAttributes( tempRequest.getRequestAttributes()['Value'] ) oRequest.update( tempRequest ) requestString = oRequest.toXML()['Value'] gLogger.info( "RequestDBFile.__getRequestString: Successfully obtained string for %s." % requestName ) result = S_OK( requestString ) result['Request'] = oRequest return result except Exception, x: errStr = "RequestDBFile.__getRequestString: Exception while obtaining request string." gLogger.exception( errStr, requestName, lException = x ) return S_ERROR( errStr )
def applicationSpecificInputs(self): """ Resolve all input variables for the module here. @return: S_OK() """ if self.step_commons.has_key('DataAttributes'): self.dataattributes = self.step_commons['DataAttributes'] else: return S_ERROR('No data attributes found, cannot proceed with registration in Catalog, ABORT!') for attribute in self.dataattributes.split(";"): if self.step_commons.has_key(attribute): self.attributesdict[attribute] = self.step_commons[attribute] if self.step_commons.has_key("destination"): self.destination = self.step_commons['destination'] if self.step_commons.has_key('GENFile'): self.generatorfile = self.step_commons['GENFile'] if self.step_commons.has_key('MokkaFile'): self.mokkafile = self.step_commons['MokkaFile'] if self.step_commons.has_key('MarlinFiles'): self.marlinfiles = self.step_commons['MarlinFiles'].split(';') if self.step_commons.has_key('SLICFile'): self.slicfile = self.step_commons['SLICFile'] if self.step_commons.has_key('LCSIMFiles'): self.lcsimfiles = self.step_commons['LCSIMFiles'].split(';') if self.workflow_commons.has_key('Request'): self.request = self.workflow_commons['Request'] else: self.request = RequestContainer() self.request.setRequestName('job_%s_request.xml' % self.jobID) self.request.setJobID(self.jobID) self.request.setSourceComponent("Job_%s" % self.jobID) return S_OK('Parameters resolved')
def jobexec(jobxml, wfParameters={}): jobfile = os.path.abspath(jobxml) if not os.path.exists(jobfile): gLogger.warn('Path to specified workflow %s does not exist' % (jobfile)) sys.exit(1) workflow = fromXMLFile(jobfile) gLogger.debug(workflow) code = workflow.createCode() gLogger.debug(code) jobID = 0 if os.environ.has_key('JOBID'): jobID = os.environ['JOBID'] gLogger.info('DIRAC JobID %s is running at site %s' % (jobID, DIRAC.siteName())) workflow.addTool('JobReport', JobReport(jobID)) workflow.addTool('AccountingReport', DataStoreClient()) workflow.addTool('Request', RequestContainer()) # Propagate the command line parameters to the workflow if any for name, value in wfParameters.items(): workflow.setValue(name, value) result = workflow.execute() return result
def test_toFile( self ): lfn = '/lhcb/production/test/case.lfn' # Add dummy transfer request transferDic = {'Attributes': {'Status': 'Waiting', 'SubRequestID': '7F7C1D94-E452-CD50-204C-EE2E2F1816A9', 'Catalogue':'', 'TargetSE':'CERN-tape', 'Operation':'MoveAndRegister', 'SourceSE':'RAL-tape'}, 'Files':[{'LFN':lfn, 'Status': 'Waiting', 'Attempt': 1, 'PFN': '', 'Size': 1231231, 'GUID': '7E9CED5A-295B-ED88-CE9A-CF41A62D2175', 'Addler': '', 'Md5': ''}], 'Datasets':[{'Dataset1':'DC06Stripping'}] } self.reqContainer.addSubRequest( transferDic, 'transfer' ) # Add dummy register request registerDic = {'Attributes':{'Status': 'Waiting', 'SubRequestID': '7F7C1D94-E452-CD50-204C-EE2E2F1816A9', 'Catalogue':'', 'TargetSE':'CERN-tape', 'Operation':'RegisterFile'}, 'Files':[{'LFN':lfn, 'Status': 'Waiting', 'Attempt': 1, 'PFN': 'srm://srm.cern.ch/castor/cern.ch/grid/lhcb/production/test/case.lfn', 'Size': 1231231, 'GUID': '7E9CED5A-295B-ED88-CE9A-CF41A62D2175', 'Addler': 'addler32', 'Md5': 'md5'}], 'Datasets':[{'Dataset1':'DC06Stripping'}] } self.reqContainer.addSubRequest( registerDic, 'register' ) # Add dummy removal request removalDic = {'Attributes':{'Status': 'Waiting', 'SubRequestID': '7F7C1D94-E452-CD50-204C-EE2E2F1816A9', 'Catalogue':'', 'TargetSE':'CERN-tape', 'Operation':'RemoveReplica', 'Catalogue':'LFC'}, 'Files':[{'LFN':lfn, 'Status': 'Waiting', 'Attempt': 1, 'PFN': 'srm://srm.cern.ch/castor/cern.ch/grid/lhcb/production/test/case.lfn', 'Size': 1231231, 'GUID': '7E9CED5A-295B-ED88-CE9A-CF41A62D2175', 'Addler': 'addler32', 'Md5': 'md5'}], 'Datasets':[{'Dataset1':'DC06Stripping'}] } self.reqContainer.addSubRequest( removalDic, 'removal' ) # Add dummy stage request stageDic = {'Attributes':{'Status': 'Waiting', 'SubRequestID': '7F7C1D94-E452-CD50-204C-EE2E2F1816A9', 'Catalogue':'', 'TargetSE':'CERN-tape', 'Operation':'StageAndPin'}, 'Files':[{'LFN':lfn, 'Status': 'Waiting', 'Attempt': 1, 'PFN': 'srm://srm.cern.ch/castor/cern.ch/grid/lhcb/production/test/case.lfn', 'Size': 1231231, 'GUID': '7E9CED5A-295B-ED88-CE9A-CF41A62D2175', 'Addler': 'addler32', 'Md5': 'md5'}], 'Datasets':[{'Dataset1':'DC06Stripping'}] } self.reqContainer.addSubRequest( stageDic, 'stage' ) # Get the XML string of the DM request string = self.reqContainer.toXML() fname = 'testRequest.xml' # Write the reqContainer to a file self.reqContainer.toFile( fname ) # Get the file contents reqfile = open( fname, 'r' ) testString = reqfile.read() reqfile.close() # Check the file contents are what is expected self.assertEqual( string['Value'], testString ) testReq = RequestContainer( string ) # Test that what is obtained when parsing the request is the same as what is given. transferReqDouble = self.reqContainer.getSubRequest( 0, 'transfer' ) for key in transferReqDouble.keys(): if key == 'Files': self.assertEqual( transferDic['Files'], transferReqDouble['Files'] ) elif key == 'Datasets': self.assertEqual( transferDic[key], transferReqDouble[key] ) else: for att in transferDic['Attributes'].keys(): self.assertEqual( transferDic['Attributes'][att], transferReqDouble['Value']['Attributes'][att] )
def test_01_getRequestStatus(self): self.assertEqual( self.requestDB.getRequestStatus("testRequest"), { 'OK': True, 'Value': { 'SubRequestStatus': 'Waiting', 'RequestStatus': 'Waiting' } }) ## get request getRemoval = self.requestDB.getRequest("removal") oRequest = RequestContainer(getRemoval["Value"]["RequestString"]) self.assertEqual( self.requestDB.getRequestStatus("testRequest"), { 'OK': True, 'Value': { 'SubRequestStatus': 'Assigned', 'RequestStatus': 'Waiting' } }) ## make removal Done oRequest.subRequests["removal"][0]["Attributes"]["Status"] = "Done" oRequest.subRequests["removal"][0]["Files"][0]["Status"] = "Done" update = self.requestDB.updateRequest( getRemoval["Value"]["RequestName"], oRequest.toXML()["Value"]) ## get status self.assertEqual( self.requestDB.getRequestStatus("testRequest"), { 'OK': True, 'Value': { 'SubRequestStatus': 'Waiting', 'RequestStatus': u'Waiting' } }) ## make transfer Done oRequest.subRequests["transfer"][0]["Attributes"]["Status"] = "Done" oRequest.subRequests["transfer"][0]["Files"][0]["Status"] = "Done" update = self.requestDB.updateRequest( getRemoval["Value"]["RequestName"], oRequest.toXML()["Value"]) ## get status self.assertEqual( self.requestDB.getRequestStatus("testRequest"), { 'OK': True, 'Value': { 'SubRequestStatus': 'Done', 'RequestStatus': 'Done' } })
def generateRequest(self): """ Generate failover requests for the operations in the internal cache """ request = RequestContainer() result = self.sendStoredStatusInfo() if not result['OK']: if result.has_key('rpcStub'): request.addSubRequest( DISETSubRequest(result['rpcStub']).getDictionary(), 'diset') else: return S_ERROR('Could not create job state sub-request') result = self.sendStoredJobParameters() if not result['OK']: if result.has_key('rpcStub'): request.addSubRequest( DISETSubRequest(result['rpcStub']).getDictionary(), 'diset') else: return S_ERROR('Could not create job parameters sub-request') if request.isEmpty()['Value']: request = None return S_OK(request)
def test_01_getRequestStatus( self ): self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ), {'OK': True, 'Value': {'SubRequestStatus': 'Waiting', 'RequestStatus': 'Waiting'}}) ## get request getRemoval = self.requestDB.getRequest( "removal" ) oRequest = RequestContainer( getRemoval["Value"]["RequestString"] ) self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ), {'OK': True, 'Value': {'SubRequestStatus': 'Assigned', 'RequestStatus': 'Waiting'}} ) ## make removal Done oRequest.subRequests["removal"][0]["Attributes"]["Status"] = "Done" oRequest.subRequests["removal"][0]["Files"][0]["Status"] = "Done" update = self.requestDB.updateRequest( getRemoval["Value"]["RequestName"], oRequest.toXML()["Value"] ) ## get status self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ), {'OK': True, 'Value': {'SubRequestStatus': 'Waiting', 'RequestStatus': u'Waiting'}}) ## make transfer Done oRequest.subRequests["transfer"][0]["Attributes"]["Status"] = "Done" oRequest.subRequests["transfer"][0]["Files"][0]["Status"] = "Done" update = self.requestDB.updateRequest( getRemoval["Value"]["RequestName"], oRequest.toXML()["Value"] ) ## get status self.assertEqual( self.requestDB.getRequestStatus( "testRequest" ), {'OK': True, 'Value': {'SubRequestStatus': 'Done', 'RequestStatus': 'Done'}} )
def applicationSpecificInputs(self): if self.step_commons.has_key("Enable"): self.enable = self.step_commons["Enable"] if not type(self.enable) == type(True): self.log.warn("Enable flag set to non-boolean value %s, setting to False" % self.enable) self.enable = False if self.step_commons.has_key("TestFailover"): self.enable = self.step_commons["TestFailover"] if not type(self.failoverTest) == type(True): self.log.warn("Test failover flag set to non-boolean value %s, setting to False" % self.failoverTest) self.failoverTest = False if os.environ.has_key("JOBID"): self.jobID = os.environ["JOBID"] self.log.verbose("Found WMS JobID = %s" % self.jobID) else: self.log.info("No WMS JobID found, disabling module via control flag") self.enable = False if self.workflow_commons.has_key("LogFilePath") and self.workflow_commons.has_key("LogTargetPath"): self.logFilePath = self.workflow_commons["LogFilePath"] self.logLFNPath = self.workflow_commons["LogTargetPath"] else: self.log.info("LogFilePath parameter not found, creating on the fly") result = getLogPath(self.workflow_commons) if not result["OK"]: self.log.error("Could not create LogFilePath", result["Message"]) return result self.logFilePath = result["Value"]["LogFilePath"][0] self.logLFNPath = result["Value"]["LogTargetPath"][0] if not type(self.logFilePath) == type(" "): self.logFilePath = self.logFilePath[0] if not type(self.logLFNPath) == type(" "): self.logLFNPath = self.logLFNPath[0] example_file = self.logFilePath if "/ilc/prod/clic" in example_file: self.experiment = "CLIC" elif "/ilc/prod/ilc/sid" in example_file: self.experiment = "ILC_SID" elif "/ilc/prod/ilc/mc-dbd" in example_file: self.experiment = "ILC_ILD" else: self.log.warn("Failed to determine experiment, reverting to default: %s" % self.experiment) if self.workflow_commons.has_key("Request"): self.request = self.workflow_commons["Request"] else: self.request = RequestContainer() self.request.setRequestName("job_%s_request.xml" % self.jobID) self.request.setJobID(self.jobID) self.request.setSourceComponent("Job_%s" % self.jobID) return S_OK("Parameters resolved")
def __deleteSandboxFromExternalBackend( self, SEName, SEPFN ): if self.getCSOption( "DelayedExternalDeletion", True ): gLogger.info( "Setting deletion request" ) try: request = RequestContainer() result = request.addSubRequest( { 'Attributes' : { 'Operation' : 'removePhysicalFile', 'TargetSE' : SEName, 'ExecutionOrder' : 1 } }, 'removal' ) index = result['Value'] fileDict = { 'PFN' : SEPFN, 'Status' : 'Waiting' } request.setSubRequestFiles( index, 'removal', [ fileDict ] ) return RequestClient().setRequest( "RemoteSBDeletion:%s|%s:%s" % ( SEName, SEPFN, time.time() ), request.toXML()[ 'Value' ] ) except Exception, e: gLogger.exception( "Exception while setting deletion request" ) return S_ERROR( "Cannot set deletion request: %s" % str( e ) )
def applicationSpecificInputs(self): """ By convention the module parameters are resolved here. """ self.log.verbose(self.workflow_commons) self.log.verbose(self.step_commons) #Earlier modules may have populated the report objects if self.workflow_commons.has_key('JobReport'): self.jobReport = self.workflow_commons['JobReport'] if self.step_commons.has_key('Enable'): self.enable = self.step_commons['Enable'] if not type(self.enable) == type(True): self.log.warn('Enable flag set to non-boolean value %s, setting to False' %self.enable) self.enable = False if self.step_commons.has_key('TestFailover'): self.enable = self.step_commons['TestFailover'] if not type(self.failoverTest) == type(True): self.log.warn('Test failover flag set to non-boolean value %s, setting to False' % self.failoverTest) self.failoverTest = False if os.environ.has_key('JOBID'): self.jobID = os.environ['JOBID'] self.log.verbose('Found WMS JobID = %s' % self.jobID) else: self.log.info('No WMS JobID found, disabling module via control flag') self.enable = False if self.workflow_commons.has_key('Request'): self.request = self.workflow_commons['Request'] else: self.request = RequestContainer() self.request.setRequestName('job_%s_request.xml' % self.jobID) self.request.setJobID(self.jobID) self.request.setSourceComponent("Job_%s" % self.jobID) #Use LHCb utility for local running via dirac-jobexec if self.workflow_commons.has_key('UserOutputData'): self.userOutputData = self.workflow_commons['UserOutputData'] if not type(self.userOutputData) == type([]): self.userOutputData = [i.strip() for i in self.userOutputData.split(';')] if self.workflow_commons.has_key('UserOutputSE'): specifiedSE = self.workflow_commons['UserOutputSE'] if not type(specifiedSE) == type([]): self.userOutputSE = [i.strip() for i in specifiedSE.split(';')] else: self.log.verbose('No UserOutputSE specified, using default value: %s' % (string.join(self.defaultOutputSE, ', '))) self.userOutputSE = self.defaultOutputSE if self.workflow_commons.has_key('UserOutputPath'): self.userOutputPath = self.workflow_commons['UserOutputPath'] return S_OK('Parameters resolved')
def __forwardable(requestString): """ check if request if forwardable The sub-request of type transfer:putAndRegister, removal:physicalRemoval and removal:reTransfer are definitely not, they should be executed locally, as they are using local fs. :param str requestString: XML-serialised request """ request = RequestContainer(requestString) subRequests = request.getSubRequests( "transfer")["Value"] + request.getSubRequests("removal")["Value"] for subRequest in subRequests: if subRequest["Attributes"]["Operation"] in ("putAndRegister", "physicalRemoval", "reTransfer"): return S_ERROR( "found operation '%s' that cannot be forwarded" % subRequest["Attributes"]["Operation"]) return S_OK()
def readRequestsForJobs( self, jobIDs ): """ read requests for jobs :param list jobIDs: list with jobIDs :return: S_OK( { "Successful" : { jobID1 : RequestContainer, ... }, "Failed" : { jobIDn : "Fail reason" } } ) """ readReqsForJobs = self.requestManager().readRequestsForJobs( jobIDs ) if not readReqsForJobs["OK"]: return readReqsForJobs ret = readReqsForJobs["Value"] if readReqsForJobs["Value"] else None if not ret: return S_ERROR( "No values returned" ) # # create RequestContainers out of xml strings for successful reads if "Successful" in ret: for jobID, xmlStr in ret["Successful"].items(): req = RequestContainer( init = False ) req.parseRequest( request = xmlStr ) ret["Successful"][jobID] = req return S_OK( ret )
def __init__(self,requestObject=False): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger( "FailoverTransfer" ) self.rm = ReplicaManager() self.request = requestObject if not self.request: self.request = RequestContainer() self.request.setRequestName('default_request.xml') self.request.setSourceComponent('FailoverTransfer')
def generateRequest(self): """ Generate failover requests for the operations in the internal cache """ request = RequestContainer() result = self.sendStoredStatusInfo() if not result['OK']: if result.has_key('rpcStub'): request.addSubRequest(DISETSubRequest(result['rpcStub']).getDictionary(),'diset') else: return S_ERROR('Could not create job state sub-request') result = self.sendStoredJobParameters() if not result['OK']: if result.has_key('rpcStub'): request.addSubRequest(DISETSubRequest(result['rpcStub']).getDictionary(),'diset') else: return S_ERROR('Could not create job parameters sub-request') if request.isEmpty()['Value']: request = None return S_OK(request)
def updateRequest(self, requestName, requestString): request = RequestContainer(request=requestString) requestTypes = [ 'transfer', 'register', 'removal', 'stage', 'diset', 'logupload' ] requestID = request.getRequestID()['Value'] updateRequestFailed = False for requestType in requestTypes: res = request.getNumSubRequests(requestType) if res['OK']: numRequests = res['Value'] for ind in range(numRequests): res = request.getSubRequestAttributes(ind, requestType) if res['OK']: subRequestDict = res['Value'] if 'SubRequestID' in subRequestDict: subRequestID = res['Value']['SubRequestID'] res = self.__updateSubRequestFiles( ind, requestType, subRequestID, request) if res['OK']: if request.isSubRequestDone( ind, requestType)['Value']: res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', 'Done') else: res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', 'Waiting') if not res['OK']: updateRequestFailed = True else: updateRequestFailed = True if "Error" in subRequestDict: result = self._setSubRequestAttribute( requestID, subRequestID, 'Error', subRequestDict['Error']) if not result['OK']: updateRequestFailed = True else: updateRequestFailed = True else: updateRequestFailed = True else: updateRequestFailed = True if updateRequestFailed: errStr = 'Failed to update request %s.' % requestID return S_ERROR(errStr) else: if request.isRequestDone()['Value']: res = self._setRequestAttribute(requestID, 'Status', 'Done') if not res['OK']: errStr = 'Failed to update request status of %s to Done.' % requestID return S_ERROR(errStr) return S_OK()
def sweeper( cls ): """ move cached request to the central request manager :param cls: class reference """ cacheDir = cls.cacheDir() ## cache dir empty? if not os.listdir( cacheDir ): gLogger.always("sweeper: CacheDir %s is empty, nothing to do" % cacheDir ) return S_OK() else: ## read 10 cache dir files, the oldest first cachedRequests = [ os.path.abspath( requestFile ) for requestFile in sorted( filter( os.path.isfile, [ os.path.join( cacheDir, requestName ) for requestName in os.listdir( cacheDir ) ] ), key = os.path.getctime ) ][:30] ## set cached requests to the central RequestManager for cachedFile in cachedRequests: try: requestString = "".join( open( cachedFile, "r" ).readlines() ) cachedRequest = RequestContainer( requestString ) requestName = cachedRequest.getAttribute("RequestName")["Value"] ## cibak: hack for DISET requests if requestName == "Unknown": cachedRequest.setAttribute( "RequestName", makeGuid() ) requestName = cachedRequest.getAttribute("RequestName")["Value"] setRequest = cls.requestManager().setRequest( requestName, requestString ) if not setRequest["OK"]: gLogger.error("sweeper: unable to set request '%s' @ RequestManager: %s" % ( requestName, setRequest["Message"] ) ) continue gLogger.info("sweeper: successfully set request '%s' @ RequestManager" % requestName ) os.unlink( cachedFile ) except Exception, error: gLogger.exception( "sweeper: hit by exception %s" % str(error) ) return S_ERROR( "sweeper: hit by exception: %s" % str(error) ) return S_OK()
def getRequestDict(cls, requestType): """ retrive Request of type requestType from RequestDB :param cls: class reference :param str requestType: type of request :return: S_ERROR on error :return: S_OK with request dictionary:: requestDict = { "requestString" : str, "requestName" : str, "sourceServer" : str, "executionOrder" : list, "requestObj" : RequestContainer, "jobId" : int } """ ## prepare requestDict requestDict = { "requestString": None, "requestName": None, "sourceServer": None, "executionOrder": None, "requestObj": None, "jobId": None } ## get request out of DB res = cls.requestClient().getRequest(requestType) if not res["OK"]: gLogger.error(res["Message"]) return res elif not res["Value"]: msg = "Request of type '%s' not found in RequestDB." % requestType gLogger.info(msg) return S_OK() ## store values requestDict["requestName"] = res["Value"]["RequestName"] requestDict["requestString"] = res["Value"]["RequestString"] requestDict["sourceServer"] = res["Value"]["Server"] requestDict["requestObj"] = RequestContainer( request=requestDict["requestString"]) ## get JobID try: requestDict["jobId"] = int(res["JobID"]) except ValueError, exc: gLogger.warn( "Cannot read JobID for request %s, setting it to 0: %s" % (requestDict["requestName"], str(exc))) requestDict["jobId"] = 0
def applicationSpecificInputs(self): """ By convention the module input parameters are resolved here. """ self.log.debug(self.workflow_commons) self.log.debug(self.step_commons) if os.environ.has_key('JOBID'): self.jobID = os.environ['JOBID'] self.log.verbose('Found WMS JobID = %s' %self.jobID) else: self.log.info('No WMS JobID found, disabling module via control flag') self.enable = False if self.step_commons.has_key('Enable'): self.enable = self.step_commons['Enable'] if not type(self.enable) == type(True): self.log.warn('Enable flag set to non-boolean value %s, setting to False' % self.enable) self.enable = False #Earlier modules will have populated the report objects if self.workflow_commons.has_key('JobReport'): self.jobReport = self.workflow_commons['JobReport'] if self.workflow_commons.has_key('FileReport'): self.fileReport = self.workflow_commons['FileReport'] if self.InputData: if type(self.InputData) != type([]): self.InputData = self.InputData.split(';') self.InputData = [x.replace('LFN:','') for x in self.InputData] if self.workflow_commons.has_key('Request'): self.request = self.workflow_commons['Request'] if not self.request: self.request = RequestContainer() self.request.setRequestName('job_%s_request.xml' % self.jobID) self.request.setJobID(self.jobID) self.request.setSourceComponent("Job_%s" % self.jobID) if self.workflow_commons.has_key('PRODUCTION_ID'): self.productionID = self.workflow_commons['PRODUCTION_ID'] if self.workflow_commons.has_key('JOB_ID'): self.prodJobID = self.workflow_commons['JOB_ID'] return S_OK('Parameters resolved')
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '' ): requestType = 'transfer' requestOperation = 'replicateAndRegister' try: requestType, requestOperation = transBody.split( ';' ) except: pass for taskID in sortList( taskDict.keys() ): paramDict = taskDict[taskID] transID = paramDict['TransformationID'] oRequest = RequestContainer( init = False ) subRequestIndex = oRequest.initiateSubRequest( requestType )['Value'] attributeDict = {'Operation':requestOperation, 'TargetSE':paramDict['TargetSE']} oRequest.setSubRequestAttributes( subRequestIndex, requestType, attributeDict ) files = [] for lfn in paramDict['InputData'].split( ';' ): files.append( {'LFN':lfn} ) oRequest.setSubRequestFiles( subRequestIndex, requestType, files ) requestName = str( transID ).zfill( 8 ) + '_' + str( taskID ).zfill( 8 ) oRequest.setRequestAttributes( {'RequestName':requestName} ) taskDict[taskID]['TaskObject'] = oRequest.toXML()['Value'] return S_OK( taskDict )
def sweeper(cls): """ move cached request to the central request manager :param self: self reference """ cacheDir = cls.cacheDir() ## cache dir empty? if not os.listdir(cacheDir): gLogger.always("sweeper: CacheDir %s is empty, nothing to do" % cacheDir) return S_OK() else: ## read 10 cache dir files, the oldest first cachedRequests = [ os.path.abspath(requestFile) for requestFile in sorted(filter(os.path.isfile, [ os.path.join(cacheDir, requestName) for requestName in os.listdir(cacheDir) ]), key=os.path.getctime) ][:30] ## set cached requests to the central RequestManager for cachedFile in cachedRequests: try: requestString = "".join(open(cachedFile, "r").readlines()) cachedRequest = RequestContainer(requestString) requestName = cachedRequest.getAttribute( "RequestName")["Value"] ## cibak: hack for DISET requests if requestName == "Unknown": cachedRequest.setAttribute("RequestName", makeGuid()) requestName = cachedRequest.getAttribute( "RequestName")["Value"] setRequest = cls.requestManager().setRequest( requestName, requestString) if not setRequest["OK"]: gLogger.error( "sweeper: unable to set request '%s' @ RequestManager: %s" % (requestName, setRequest["Message"])) continue gLogger.info( "sweeper: successfully set request '%s' @ RequestManager" % requestName) os.unlink(cachedFile) except Exception, error: gLogger.exception("sweeper: hit by exception %s" % str(error)) return S_ERROR("sweeper: hit by exception: %s" % str(error)) return S_OK()
def updateRequest( self, requestName, requestString ): request = RequestContainer( request = requestString ) requestTypes = ['transfer', 'register', 'removal', 'stage', 'diset', 'logupload'] requestID = request.getRequestID()['Value'] updateRequestFailed = False for requestType in requestTypes: res = request.getNumSubRequests( requestType ) if res['OK']: numRequests = res['Value'] for ind in range( numRequests ): res = request.getSubRequestAttributes( ind, requestType ) if res['OK']: subRequestDict = res['Value'] if 'SubRequestID' in subRequestDict: subRequestID = res['Value']['SubRequestID'] res = self.__updateSubRequestFiles( ind, requestType, subRequestID, request ) if res['OK']: if request.isSubRequestDone( ind, requestType )['Value']: res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', 'Done' ) else: res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', 'Waiting' ) if not res['OK']: updateRequestFailed = True else: updateRequestFailed = True if "Error" in subRequestDict: result = self._setSubRequestAttribute( requestID, subRequestID, 'Error', subRequestDict['Error'] ) if not result['OK']: updateRequestFailed = True else: updateRequestFailed = True else: updateRequestFailed = True else: updateRequestFailed = True if updateRequestFailed: errStr = 'Failed to update request %s.' % requestID return S_ERROR( errStr ) else: if request.isRequestDone()['Value']: res = self._setRequestAttribute( requestID, 'Status', 'Done' ) if not res['OK']: errStr = 'Failed to update request status of %s to Done.' % requestID return S_ERROR( errStr ) return S_OK()
def __deleteSandboxFromExternalBackend(self, SEName, SEPFN): if self.getCSOption("DelayedExternalDeletion", True): gLogger.info("Setting deletion request") try: request = RequestContainer() result = request.addSubRequest( { 'Attributes': { 'Operation': 'removePhysicalFile', 'TargetSE': SEName, 'ExecutionOrder': 1 } }, 'removal') index = result['Value'] fileDict = {'PFN': SEPFN, 'Status': 'Waiting'} request.setSubRequestFiles(index, 'removal', [fileDict]) return RequestClient().setRequest( "RemoteSBDeletion:%s|%s:%s" % (SEName, SEPFN, time.time()), request.toXML()['Value']) except Exception, e: gLogger.exception("Exception while setting deletion request") return S_ERROR("Cannot set deletion request: %s" % str(e))
def readRequestsForJobs( self, jobIDs ): """ read and return Requests for jobs :param mixed jobIDs: list with jobIDs or long JobIDs """ if type(jobIDs) != list: return S_ERROR("RequestDB: wrong format for jobIDs argument, got %s, expecting a list" ) # make sure list is uniqe and has only longs jobIDs = list( set( [ int(jobID) for jobID in jobIDs if int(jobID) != 0 ] ) ) reqCols = [ "RequestID", "RequestName", "JobID", "Status", "OwnerDN", "OwnerGroup", "DIRACSetup", "SourceComponent", "CreationTime", "SubmissionTime", "LastUpdate" ] subCols = [ "SubRequestID", "Operation", "Arguments", "RequestType", "ExecutionOrder", "Error", "SourceSE", "TargetSE", "Catalogue", "CreationTime", "SubmissionTime", "LastUpdate" ] fileCols = [ "FileID", "LFN", "Size", "PFN", "GUID", "Md5", "Addler", "Attempt", "Status" , "Error" ] requestNames = self.getRequestForJobs( jobIDs ) if not requestNames["OK"]: return requestNames requestNames = requestNames["Value"] ## this will be returned retDict = { "Successful" : dict(), "Failed" : dict() } for jobID in jobIDs: ## missing requests if jobID not in requestNames: retDict["Failed"][jobID] = "Request not found" continue requestName = requestNames[jobID] ## get request queryStr = "SELECT %s FROM Requests WHERE RequestName = '%s';" % ( ",".join( reqCols ), requestName ) queryRes = self._query( queryStr ) if not queryRes["OK"]: retDict["Failed"][jobID] = queryRes["Message"] continue queryRes = queryRes["Value"] if queryRes["Value"] else None if not queryRes: retDict["Failed"][jobID] = "Unable to read request attributes." continue requestObj = RequestContainer( init=False ) reqAttrs = dict( zip( reqCols, queryRes[0] ) ) requestObj.setRequestAttributes( reqAttrs ) queryStr = "SELECT %s FROM `SubRequests` WHERE `RequestID`=%s;" % ( ",".join(subCols), reqAttrs["RequestID"] ) queryRes = self._query( queryStr ) if not queryRes["OK"]: retDict["Failed"][jobID] = queryRes["Message"] continue queryRes = queryRes["Value"] if queryRes["Value"] else None if not queryRes: retDict["Failed"][jobID] = "Unable to read subrequest attributes." continue ## get sub-requests for recTuple in queryRes: subReqAttrs = dict( zip( subCols, recTuple ) ) subType = subReqAttrs["RequestType"] subReqAttrs["ExecutionOrder"] = int( subReqAttrs["ExecutionOrder"] ) del subReqAttrs["RequestType"] index = requestObj.initiateSubRequest( subType ) index = index["Value"] requestObj.setSubRequestAttributes( index, subType, subReqAttrs ) ## get files subFiles = [] fileQuery = "SELECT %s FROM `Files` WHERE `SubRequestID` = %s ORDER BY `FileID`;" % ( ",".join(fileCols), subReqAttrs["SubRequestID"] ) fileQueryRes = self._query( fileQuery ) if fileQueryRes["OK"] and fileQueryRes["Value"]: for fileRec in fileQueryRes["Value"]: subFiles.append( dict( zip(fileCols, fileRec) ) ) if subFiles: requestObj.setSubRequestFiles( index, subType, subFiles ) retDict["Successful"][jobID] = requestObj.toXML()["Value"] return S_OK( retDict )
def getRequest(self, requestType): """ Get a request of a given type eligible for execution """ # RG: What if requestType is not given? # the first query will return nothing. # KC: maybe returning S_ERROR would be enough? # alternatively we should check if requestType is known (in 'transfer', 'removal', 'register' and 'diset') if not requestType or type(requestType) not in types.StringTypes: return S_ERROR("Request type not given.") myRequestType = self._escapeString(requestType) if not myRequestType: return myRequestType myRequestType = myRequestType['Value'] start = time.time() dmRequest = RequestContainer(init=False) requestID = 0 subIDList = [] fields = [ 'RequestID', 'SubRequestID', 'Operation', 'Arguments', 'ExecutionOrder', 'SourceSE', 'TargetSE', 'Catalogue', 'CreationTime', 'SubmissionTime', 'LastUpdate' ] # get the pending SubRequest sorted by ExecutionOrder and LastUpdate req = "SELECT RequestID, ExecutionOrder, Status, RequestType, LastUpdate from SubRequests WHERE Status IN ( 'Waiting', 'Assigned' ) ORDER BY ExecutionOrder, LastUpdate" # now get sorted list of RequestID (according to the above) req = "SELECT * from ( %s ) as T1 GROUP BY RequestID" % req # and get the 100 oldest ones of Type requestType req = "SELECT RequestID, ExecutionOrder FROM ( %s ) as T2 WHERE RequestType = %s ORDER BY LastUpdate limit 100" % ( req, myRequestType) # and now get all waiting SubRequest for the selected RequestID and ExecutionOrder req = "SELECT A.%s FROM SubRequests AS A, ( %s ) AS B WHERE " % ( ', A.'.join(fields), req) req = "%s A.RequestID = B.RequestID AND A.ExecutionOrder = B.ExecutionOrder AND A.Status = 'Waiting' AND A.RequestType = %s;" % ( req, myRequestType) result = self._query(req) if not result['OK']: err = 'RequestDB._getRequest: Failed to retrieve Requests' return S_ERROR('%s\n%s' % (err, result['Message'])) if not result['Value']: return S_OK() # We get up to 10 Request candidates, to add some randomness reqDict = {} for row in result['Value']: reqDict.setdefault(row[0], []) reqDict[row[0]].append(row[1:]) reqIDList = reqDict.keys() random.shuffle(reqIDList) for reqID in reqIDList: sidList = [x[0] for x in reqDict[reqID]] for subID in sidList: req = "UPDATE SubRequests SET Status='Assigned' WHERE RequestID=%s AND SubRequestID=%s;" % ( reqID, subID) resAssigned = self._update(req) if not resAssigned['OK']: if subIDList: self.__releaseSubRequests(reqID, subIDList) return S_ERROR('Failed to assign subrequests: %s' % resAssigned['Message']) if resAssigned['Value'] == 0: # Somebody has assigned this request gLogger.warn( 'Already assigned subrequest %d of request %d' % (subID, reqID)) else: subIDList.append(subID) if subIDList: # We managed to get some requests, can continue now requestID = reqID break # Haven't succeeded to get any request if not requestID: return S_OK() dmRequest.setRequestID(requestID) fields = [ 'FileID', 'LFN', 'Size', 'PFN', 'GUID', 'Md5', 'Addler', 'Attempt', 'Status' ] for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in reqDict[ requestID]: if not subRequestID in subIDList: continue res = dmRequest.initiateSubRequest(requestType) ind = res['Value'] subRequestDict = { 'Status': 'Waiting', 'SubRequestID': subRequestID, 'Operation': operation, 'Arguments': arguments, 'ExecutionOrder': int(executionOrder), 'SourceSE': sourceSE, 'TargetSE': targetSE, 'Catalogue': catalogue, 'CreationTime': creationTime, 'SubmissionTime': submissionTime, 'LastUpdate': lastUpdate } res = dmRequest.setSubRequestAttributes(ind, requestType, subRequestDict) if not res['OK']: err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) req = "SELECT %s FROM Files WHERE SubRequestID = %s ORDER BY FileID;" % ( ', '.join(fields), subRequestID) res = self._query(req) if not res['OK']: err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res[ 'Value']: fileDict = { 'FileID': fileID, 'LFN': lfn, 'Size': size, 'PFN': pfn, 'GUID': guid, 'Md5': md5, 'Addler': addler, 'Attempt': attempt, 'Status': status } files.append(fileDict) res = dmRequest.setSubRequestFiles(ind, requestType, files) if not res['OK']: err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID res = self._query(req) if not res['OK']: err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) datasets = [] for dataset, status in res['Value']: datasets.append(dataset) res = dmRequest.setSubRequestDatasets(ind, requestType, datasets) if not res['OK']: err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) fields = [ 'RequestName', 'JobID', 'OwnerDN', 'OwnerGroup', 'DIRACSetup', 'SourceComponent', 'CreationTime', 'SubmissionTime', 'LastUpdate' ] req = "SELECT %s from Requests WHERE RequestID = %s;" % ( ', '.join(fields), requestID) res = self._query(req) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve max RequestID' self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate = res[ 'Value'][0] dmRequest.setRequestName(requestName) dmRequest.setJobID(jobID) dmRequest.setOwnerDN(ownerDN) dmRequest.setOwnerGroup(ownerGroup) dmRequest.setDIRACSetup(diracSetup) dmRequest.setSourceComponent(sourceComponent) dmRequest.setCreationTime(str(creationTime)) dmRequest.setLastUpdate(str(lastUpdate)) res = dmRequest.toXML() if not res['OK']: err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) requestString = res['Value'] #still have to manage the status of the dataset properly resultDict = {} resultDict['RequestName'] = requestName resultDict['RequestString'] = requestString resultDict['JobID'] = jobID return S_OK(resultDict)
def getRegisterRequest( ): """ helper fcn to build request """ requestContainer = RequestContainer( init = False ) requestContainer.setJobID( 11889410 ) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup( "lhcb_user" ) requestContainer.setDIRACSetup( "LHCb-Production" ) requestContainer.setSourceComponent( None ) requestContainer.setCreationTime( "0000-00-00 00:00:00" ) requestContainer.setLastUpdate( "2011-02-19 04:57:02" ) requestContainer.setStatus( "Waiting" ) requestContainer.initiateSubRequest( "register" ) subRequestDict = { "Status" : "Waiting", "SubRequestID" : 2259916, "Operation" : "registerFile", "Arguments" : None, "ExecutionOrder" : 0, "SourceSE" : None, "TargetSE" : "RAL-USER", "Catalogue" : "LcgFileCatalogCombined", "CreationTime" : "2011-02-19 04:57:02", "SubmissionTime" : "2011-02-19 04:57:02", "LastUpdate" : "2011-08-18 20:14:22" } requestContainer.setSubRequestAttributes( 0, "register", subRequestDict ) files = [ { "FileID" : 1610538, "LFN" : "/lhcb/user/c/cblanks/11889/11889410/LDSB.rsQrRL", "Size" : 153961749, "PFN" : "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cblanks/11889/11889410/LDSB.rsQrRL", "GUID" : "5911A19C-7CDF-7F2A-36ED-089CD410F98A", "Md5" : None, "Addler" : "92b85e26", "Attempt" : 1, "Status" : "Waiting" } ] requestContainer.setSubRequestFiles( 0, "register", files ) return { "requestName" : "11889410.xml", "requestString" : requestContainer.toXML()["Value"], "jobID" : 11889410, "executionOrder" : 0, "sourceServer" : "foobarserver", "configPath" : "/Systems/DataManagement/Development/Agents/RegistrationAgent" }
def execute( self ): res = self.RequestDBClient.getRequest( 'integrity' ) if not res['OK']: gLogger.info( "LFCvsSEAgent.execute: Failed to get request from database." ) return S_OK() elif not res['Value']: gLogger.info( "LFCvsSEAgent.execute: No requests to be executed found." ) return S_OK() requestString = res['Value']['RequestString'] requestName = res['Value']['RequestName'] sourceServer = res['Value']['Server'] gLogger.info( "LFCvsSEAgent.execute: Obtained request %s" % requestName ) oRequest = RequestContainer( request = requestString ) ################################################ # Find the number of sub-requests from the request res = oRequest.getNumSubRequests( 'integrity' ) if not res['OK']: errStr = "LFCvsSEAgent.execute: Failed to obtain number of integrity subrequests." gLogger.error( errStr, res['Message'] ) return S_OK() gLogger.info( "LFCvsSEAgent.execute: Found %s sub requests." % res['Value'] ) ################################################ # For all the sub-requests in the request for ind in range( res['Value'] ): gLogger.info( "LFCvsSEAgent.execute: Processing sub-request %s." % ind ) subRequestAttributes = oRequest.getSubRequestAttributes( ind, 'integrity' )['Value'] if subRequestAttributes['Status'] == 'Waiting': subRequestFiles = oRequest.getSubRequestFiles( ind, 'integrity' )['Value'] operation = subRequestAttributes['Operation'] ################################################ # If the sub-request is a lfcvsse operation if operation == 'LFCvsSE': gLogger.info( "LFCvsSEAgent.execute: Attempting to execute %s sub-request." % operation ) for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': lfn = subRequestFile['LFN'] oNamespaceBrowser = NamespaceBrowser( lfn ) # Loop over all the directories and sub-directories while ( oNamespaceBrowser.isActive() ): currentDir = oNamespaceBrowser.getActiveDir() gLogger.info( "LFCvsSEAgent.execute: Attempting to get contents of %s." % currentDir ) res = self.ReplicaManager.getCatalogDirectoryContents( currentDir ) if not res['OK']: subDirs = [currentDir] elif res['Value']['Failed'].has_key( currentDir ): subDirs = [currentDir] else: subDirs = res['Value']['Successful'][currentDir]['SubDirs'] files = res['Value']['Successful'][currentDir]['Files'] lfnSizeDict = {} pfnLfnDict = {} pfnStatusDict = {} sePfnDict = {} for lfn, lfnDict in files.items(): lfnSizeDict[lfn] = lfnDict['MetaData']['Size'] for se in lfnDict['Replicas'].keys(): pfn = lfnDict['Replicas'][se]['PFN'] status = lfnDict['Replicas'][se]['Status'] pfnStatusDict[pfn] = status pfnLfnDict[pfn] = lfn if not sePfnDict.has_key( se ): sePfnDict[se] = [] sePfnDict[se].append( pfn ) for storageElementName, physicalFiles in sePfnDict.items(): gLogger.info( "LFCvsSEAgent.execute: Attempting to get metadata for files on %s." % storageElementName ) res = self.ReplicaManager.getStorageFileMetadata( physicalFiles, storageElementName ) if not res['OK']: gLogger.error( "LFCvsSEAgent.execute: Completely failed to get physical file metadata.", res['Message'] ) else: for pfn in res['Value']['Failed'].keys(): gLogger.error( "LFCvsSEAgent.execute: Failed to get metadata.", "%s %s" % ( pfn, res['Value']['Failed'][pfn] ) ) lfn = pfnLfnDict[pfn] fileMetadata = {'Prognosis':'MissingSEPfn', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName, 'Size':lfnSizeDict[lfn]} IntegrityDB = RPCClient( 'DataManagement/DataIntegrity' ) resInsert = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if resInsert['OK']: gLogger.info( "LFCvsSEAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) for pfn, pfnDict in res['Value']['Successful'].items(): lfn = pfnLfnDict[pfn] catalogSize = int( lfnSizeDict[lfn] ) storageSize = int( pfnDict['Size'] ) if int( catalogSize ) == int( storageSize ): gLogger.info( "LFCvsSEAgent.execute: Catalog and storage sizes match.", "%s %s" % ( pfn, storageElementName ) ) gLogger.info( "Change the status in the LFC" ) else: gLogger.error( "LFCvsSEAgent.execute: Catalog and storage size mis-match.", "%s %s" % ( pfn, storageElementName ) ) fileMetadata = {'Prognosis':'PfnSizeMismatch', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName} IntegrityDB = RPCClient( 'DataManagement/DataIntegrity' ) resInsert = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if resInsert['OK']: gLogger.info( "LFCvsSEAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) oNamespaceBrowser.updateDirs( subDirs ) oRequest.setSubRequestFileAttributeValue( ind, 'integrity', lfn, 'Status', 'Done' ) ################################################ # If the sub-request is none of the above types else: gLogger.info( "LFCvsSEAgent.execute: Operation not supported.", operation ) ################################################ # Determine whether there are any active files if oRequest.isSubRequestEmpty( ind, 'integrity' )['Value']: oRequest.setSubRequestStatus( ind, 'integrity', 'Done' ) ################################################ # If the sub-request is already in terminal state else: gLogger.info( "LFCvsSEAgent.execute: Sub-request %s is status '%s' and not to be executed." % ( ind, subRequestAttributes['Status'] ) ) ################################################ # Generate the new request string after operation requestString = oRequest.toXML()['Value'] res = self.RequestDBClient.updateRequest( requestName, requestString, sourceServer ) return S_OK()
def setRequest(self, requestName, requestString): request = RequestContainer(init=True, request=requestString) requestTypes = request.getSubRequestTypes()['Value'] failed = False res = self._getRequestID(requestName) if not res['OK']: # we have a special case here: if request already exists, we override it if it # comes from a DIRAC job. This is identified by having a meaningful JobID in # the request if res['Message'].find('Duplicate') != -1: # Duplicate request jobID = request.getJobID()['Value'] if jobID == "Unknown": return res try: jobID = int(jobID) except: return res if jobID > 0: # Remove the existing request result = self._deleteRequest(requestName) if not result['OK']: message = res['Message'] return S_ERROR('Failed to set request: ' + message + ' can not override') res = self._getRequestID(requestName) if not res['OK']: return res else: return res requestID = res['Value'] subRequestIDs = {} res = self.__setRequestAttributes(requestID, request) if res['OK']: for requestType in requestTypes: res = request.getNumSubRequests(requestType) numRequests = res['Value'] for ind in range(numRequests): res = self._getSubRequestID(requestID, requestType) if res['OK']: subRequestID = res['Value'] res = self.__setSubRequestAttributes( requestID, ind, requestType, subRequestID, request) if res['OK']: subRequestIDs[subRequestID] = res['Value'] res = self.__setSubRequestFiles( ind, requestType, subRequestID, request) if res['OK']: res = self.__setSubRequestDatasets( ind, requestType, subRequestID, request) if not res['OK']: failed = True message = res['Message'] else: failed = True message = res['Message'] else: failed = True message = res['Message'] else: failed = True message = res['Message'] else: failed = True message = res['Message'] for subRequestID, status in subRequestIDs.items(): if not status: status = "Waiting" res = self._setSubRequestAttribute(requestID, subRequestID, 'Status', status) if not res['OK']: failed = True message = res['Message'] res = self._setRequestAttribute(requestID, 'Status', 'Waiting') if not res['OK']: failed = True message = res['Message'] if failed: res = self._deleteRequest(requestName) return S_ERROR('Failed to set request: ' + message) else: return S_OK(requestID)
class FailoverTransfer: ############################################################################# def __init__(self, requestObject=False): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger("FailoverTransfer") self.rm = ReplicaManager() self.request = requestObject if not self.request: self.request = RequestContainer() self.request.setRequestName('default_request.xml') self.request.setSourceComponent('FailoverTransfer') ############################################################################# def transferAndRegisterFile(self, fileName, localPath, lfn, destinationSEList, fileGUID=None, fileCatalog=None): """Performs the transfer and register operation with failover. """ errorList = [] for se in destinationSEList: self.log.info( 'Attempting rm.putAndRegister("%s","%s","%s",guid="%s",catalog="%s")' % (lfn, localPath, se, fileGUID, fileCatalog)) result = self.rm.putAndRegister(lfn, localPath, se, guid=fileGUID, catalog=fileCatalog) self.log.verbose(result) if not result['OK']: self.log.error('rm.putAndRegister failed with message', result['Message']) errorList.append(result['Message']) continue if not result['Value']['Failed']: self.log.info( 'rm.putAndRegister successfully uploaded %s to %s' % (fileName, se)) return S_OK({'uploadedSE': se, 'lfn': lfn}) #Now we know something went wrong errorDict = result['Value']['Failed'][lfn] if not errorDict.has_key('register'): self.log.error('rm.putAndRegister failed with unknown error', str(errorDict)) errorList.append( 'Unknown error while attempting upload to %s' % se) continue fileDict = errorDict['register'] #Therefore the registration failed but the upload was successful if not fileCatalog: fileCatalog = '' result = self.__setRegistrationRequest(fileDict['LFN'], se, fileCatalog, fileDict) if not result['OK']: self.log.error( 'Failed to set registration request for: SE %s and metadata: \n%s' % (se, fileDict)) errorList.append( 'Failed to set registration request for: SE %s and metadata: \n%s' % (se, fileDict)) continue else: self.log.info( 'Successfully set registration request for: SE %s and metadata: \n%s' % (se, fileDict)) metadata = {} metadata['filedict'] = fileDict metadata['uploadedSE'] = se metadata['lfn'] = lfn metadata['registration'] = 'request' return S_OK(metadata) self.log.error( 'Encountered %s errors during attempts to upload output data' % len(errorList)) return S_ERROR('Failed to upload output data file') ############################################################################# def transferAndRegisterFileFailover(self, fileName, localPath, lfn, targetSE, failoverSEList, fileGUID=None, fileCatalog=None): """Performs the transfer and register operation to failover storage and sets the necessary replication and removal requests to recover. """ failover = self.transferAndRegisterFile(fileName, localPath, lfn, failoverSEList, fileGUID, fileCatalog) if not failover['OK']: self.log.error('Could not upload file to failover SEs', failover['Message']) return failover #set removal requests and replication requests result = self.__setFileReplicationRequest(lfn, targetSE) if not result['OK']: self.log.error('Could not set file replication request', result['Message']) return result lfn = failover['Value']['lfn'] failoverSE = failover['Value']['uploadedSE'] self.log.info( 'Attempting to set replica removal request for LFN %s at failover SE %s' % (lfn, failoverSE)) result = self.__setReplicaRemovalRequest(lfn, failoverSE) if not result['OK']: self.log.error('Could not set removal request', result['Message']) return result return S_OK('%s uploaded to a failover SE' % fileName) ############################################################################# def getRequestObject(self): """Returns the potentially modified request object in order to propagate changes. """ return S_OK(self.request) ############################################################################# def __setFileReplicationRequest(self, lfn, se): """ Sets a registration request. """ self.log.info('Setting replication request for %s to %s' % (lfn, se)) result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'replicateAndRegister', 'TargetSE': se, 'ExecutionOrder': 0 } }, 'transfer') if not result['OK']: return result index = result['Value'] fileDict = {'LFN': lfn, 'Status': 'Waiting'} self.request.setSubRequestFiles(index, 'transfer', [fileDict]) return S_OK() ############################################################################# def __setRegistrationRequest(self, lfn, se, catalog, fileDict): """ Sets a registration request. """ self.log.info('Setting registration request for %s at %s.' % (lfn, se)) result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'registerFile', 'ExecutionOrder': 0, 'TargetSE': se, 'Catalogue': catalog } }, 'register') if not result['OK']: return result index = result['Value'] if not fileDict.has_key('Status'): fileDict['Status'] = 'Waiting' self.request.setSubRequestFiles(index, 'register', [fileDict]) return S_OK() ############################################################################# def __setReplicaRemovalRequest(self, lfn, se): """ Sets a removal request for a replica. """ result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'replicaRemoval', 'TargetSE': se, 'ExecutionOrder': 1 } }, 'removal') index = result['Value'] fileDict = {'LFN': lfn, 'Status': 'Waiting'} self.request.setSubRequestFiles(index, 'removal', [fileDict]) return S_OK() ############################################################################# def __setFileRemovalRequest(self, lfn, se='', pfn=''): """ Sets a removal request for a file including all replicas. """ result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'removeFile', 'TargetSE': se, 'ExecutionOrder': 1 } }, 'removal') index = result['Value'] fileDict = {'LFN': lfn, 'PFN': pfn, 'Status': 'Waiting'} self.request.setSubRequestFiles(index, 'removal', [fileDict]) return S_OK()
def getRequest(self, requestType): """ Obtain a request from the database of a certain type """ gLogger.info( "RequestDBFile._getRequest: Attempting to get %s type request." % requestType) try: # Determine the request name to be obtained candidateRequests = [] reqDir = os.path.join(self.root, requestType, "ToDo") self.getIdLock.acquire() if os.path.exists(reqDir): candidateRequests = [ os.path.basename(requestFile) for requestFile in sorted(filter(os.path.isfile, [ os.path.join(reqDir, requestName) for requestName in os.listdir(reqDir) ]), key=os.path.getctime) ] if not len(candidateRequests) > 0: self.getIdLock.release() gLogger.info( "RequestDBFile._getRequest: No request of type %s found." % requestType) return S_OK() # Select a request if not self.lastRequest.has_key(requestType): self.lastRequest[requestType] = ('', 0) lastRequest, lastRequestIndex = self.lastRequest[requestType] res = self.__selectRequestCursor(candidateRequests, lastRequest, lastRequestIndex) if not res['OK']: self.getIdLock.release() errStr = "RequestDBFile._getRequest: Failed to get request cursor." gLogger.error(errStr, res['Message']) return S_ERROR(errStr) selectedRequestName, selectedRequestIndex = res['Value'] # Obtain the string for the selected request res = self.__getRequestString(selectedRequestName) if not res['OK']: self.getIdLock.release() errStr = "RequestDBFile._getRequest: Failed to get request string for %s." % selectedRequestName gLogger.error(errStr, res['Message']) return S_ERROR(errStr) selectedRequestString = res['Value'] # Set the request status to assigned res = self.setRequestStatus(selectedRequestName, 'Assigned') if not res['OK']: self.getIdLock.release() errStr = "RequestDBFile._getRequest: Failed to set %s status to 'Assigned'." % selectedRequestName gLogger.error(errStr, res['Message']) return S_ERROR(errStr) # Update the request cursor and return the selected request self.lastRequest[requestType] = (selectedRequestName, selectedRequestIndex) self.getIdLock.release() gLogger.info( "RequestDBFile._getRequest: Successfully obtained %s request." % selectedRequestName) jobID = 'Unknown' try: oRequest = RequestContainer(request=selectedRequestString) jobID = oRequest.getJobID()['Value'] except: gLogger.exception('Could not get JobID from Request') resDict = { 'RequestString': selectedRequestString, 'RequestName': selectedRequestName, 'JobID': jobID } return S_OK(resDict) except Exception, x: errStr = "RequestDBFile._getRequest: Exception while getting request." gLogger.exception(errStr, requestType, lException=x) return S_ERROR(errStr)
def setUp( self ): self.reqContainer = RequestContainer()
def test_addSubRequest( self ): rc_o = RequestContainer() op1_Index = rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Operation': 'op1'}}, 'someType' ) op1_Index = op1_Index['Value'] subRequestExpected = {'someType': [{'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': []}]} self.assertEqual( rc_o.subRequests, subRequestExpected ) op2_index = rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Operation': 'op2'}}, 'someType' ) op2_index = op2_index['Value'] subRequestExpected = { 'someType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] } ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'ExecutionOrder': 'last'}}, 'someType' ) subRequestExpected = { 'someType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'ExecutionOrder': 'last'}}, 'someOtherType' ) subRequestExpected = { 'someType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ], 'someOtherType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 2, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) fileDict = {'LFN':'foo', 'Status':'Waiting'} rc_o.setSubRequestFiles( op1_Index, 'someType', [fileDict] ) subRequestExpected = { 'someType': [ { 'Files': [{'LFN':'foo', 'Status':'Waiting'}], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ], 'someOtherType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 2, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) fileLastOp = rc_o._getLastOrder( 'foo' ) rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'ExecutionOrder': fileLastOp + 1}}, 'someOtherType' ) subRequestExpected = { 'someType': [ { 'Files': [{'LFN':'foo', 'Status':'Waiting'}], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ], 'someOtherType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 2, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, ] } self.assertEqual( rc_o.subRequests, subRequestExpected )
def executeRequest(self): """ Do the actual work in the Thread """ ################################################ # Get a request from request DB gMonitor.addMark("Iteration", 1) res = self.requestDBClient.getRequest('removal') if not res['OK']: gLogger.info( "RemovalAgent.execute: Failed to get request from database.") return S_OK() elif not res['Value']: gLogger.info( "RemovalAgent.execute: No requests to be executed found.") self.pendingRequests = False return S_OK() requestString = res['Value']['RequestString'] requestName = res['Value']['RequestName'] sourceServer = res['Value']['Server'] jobID = 0 try: jobID = int(res['Value']['JobID']) except: gLogger.warn( "RemovalAgent.execute: JobID not present or malformed in request '%s', will use 0 instead." % requestName) gLogger.info("RemovalAgent.execute: Obtained request %s" % requestName) try: result = self.requestDBClient.getCurrentExecutionOrder( requestName, sourceServer) if result['OK']: currentOrder = result['Value'] else: gLogger.error('Can not get the request execution order') self.requestDBClient.updateRequest(requestName, requestString, sourceServer) return S_OK('Can not get the request execution order') oRequest = RequestContainer(request=requestString) ################################################ # Find the number of sub-requests from the request res = oRequest.getNumSubRequests('removal') if not res['OK']: errStr = "RemovalAgent.execute: Failed to obtain number of removal subrequests." gLogger.error(errStr, res['Message']) return S_OK() gLogger.info("RemovalAgent.execute: Found %s sub requests." % res['Value']) ################################################ # For all the sub-requests in the request modified = False for ind in range(res['Value']): gMonitor.addMark("Execute", 1) gLogger.info( "RemovalAgent.execute: Processing sub-request %s." % ind) subRequestAttributes = oRequest.getSubRequestAttributes( ind, 'removal')['Value'] subExecutionOrder = int(subRequestAttributes['ExecutionOrder']) subStatus = subRequestAttributes['Status'] if subStatus == 'Waiting' and subExecutionOrder <= currentOrder: subRequestFiles = oRequest.getSubRequestFiles( ind, 'removal')['Value'] operation = subRequestAttributes['Operation'] ################################################ # If the sub-request is a physical removal operation if operation == 'physicalRemoval': gLogger.info( "RemovalAgent.execute: Attempting to execute %s sub-request." % operation) diracSEs = subRequestAttributes['TargetSE'].split(',') physicalFiles = [] pfnToLfn = {} for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': pfn = str(subRequestFile['PFN']) lfn = str(subRequestFile['LFN']) pfnToLfn[pfn] = lfn physicalFiles.append(pfn) gMonitor.addMark('PhysicalRemovalAtt', len(physicalFiles)) failed = {} errMsg = {} for diracSE in diracSEs: res = self.replicaManager.removeStorageFile( physicalFiles, diracSE) if res['OK']: for pfn in res['Value']['Failed'].keys(): if not failed.has_key(pfn): failed[pfn] = {} failed[pfn][diracSE] = res['Value'][ 'Failed'][pfn] else: errMsg[diracSE] = res['Message'] for pfn in physicalFiles: if not failed.has_key(pfn): failed[pfn] = {} failed[pfn][diracSE] = 'Completely' # Now analyse the results failedPFNs = failed.keys() pfnsOK = [ pfn for pfn in physicalFiles if not pfn in failedPFNs ] gMonitor.addMark('PhysicalRemovalDone', len(pfnsOK)) for pfn in pfnsOK: gLogger.info( "RemovalAgent.execute: Successfully removed %s at %s" % (pfn, str(diracSEs))) res = oRequest.setSubRequestFileAttributeValue( ind, 'removal', pfnToLfn[pfn], 'Status', 'Done') if not res['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', pfnToLfn[pfn])) modified = True if failed: gMonitor.addMark('PhysicalRemovalFail', len(failedPFNs)) for pfn in failedPFNs: for diracSE in failed[pfn].keys(): if type(failed[pfn] [diracSE]) in StringTypes: if re.search( 'no such file or directory', failed[pfn][diracSE].lower()): gLogger.info( "RemovalAgent.execute: File did not exist.", pfn) res = oRequest.setSubRequestFileAttributeValue( ind, 'removal', pfnToLfn[pfn], 'Status', 'Done') if not res['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', pfnToLfn[pfn])) modified = True else: gLogger.info( "RemovalAgent.execute: Failed to remove file.", "%s at %s - %s" % (pfn, diracSE, failed[pfn][diracSE])) if errMsg: for diracSE in errMsg.keys(): errStr = "RemovalAgent.execute: Completely failed to remove replicas. At %s", diracSE gLogger.error(errStr, errMsg[diracSE]) ################################################ # If the sub-request is a physical removal operation elif operation == 'removeFile': gLogger.info( "RemovalAgent.execute: Attempting to execute %s sub-request." % operation) lfns = [] for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': lfn = str(subRequestFile['LFN']) lfns.append(lfn) gMonitor.addMark('RemoveFileAtt', len(lfns)) res = self.replicaManager.removeFile(lfns) if res['OK']: gMonitor.addMark( 'RemoveFileDone', len(res['Value']['Successful'].keys())) for lfn in res['Value']['Successful'].keys(): gLogger.info( "RemovalAgent.execute: Successfully removed %s." % lfn) result = oRequest.setSubRequestFileAttributeValue( ind, 'removal', lfn, 'Status', 'Done') if not result['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', lfn)) modified = True gMonitor.addMark( 'RemoveFileFail', len(res['Value']['Failed'].keys())) for lfn in res['Value']['Failed'].keys(): if type(res['Value']['Failed'] [lfn]) in StringTypes: if re.search( 'no such file or directory', res['Value']['Failed'] [lfn].lower()): gLogger.info( "RemovalAgent.execute: File did not exist.", lfn) result = oRequest.setSubRequestFileAttributeValue( ind, 'removal', lfn, 'Status', 'Done') if not result['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', lfn)) modified = True else: gLogger.info( "RemovalAgent.execute: Failed to remove file:", "%s %s" % (lfn, res['Value']['Failed'][lfn])) else: gMonitor.addMark('RemoveFileFail', len(lfns)) errStr = "RemovalAgent.execute: Completely failed to remove files files." gLogger.error(errStr, res['Message']) ################################################ # If the sub-request is a physical removal operation elif operation == 'replicaRemoval': gLogger.info( "RemovalAgent.execute: Attempting to execute %s sub-request." % operation) diracSEs = subRequestAttributes['TargetSE'].split(',') lfns = [] for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': lfn = str(subRequestFile['LFN']) lfns.append(lfn) gMonitor.addMark('ReplicaRemovalAtt', len(lfns)) failed = {} errMsg = {} for diracSE in diracSEs: res = self.replicaManager.removeReplica( diracSE, lfns) if res['OK']: for lfn in res['Value']['Failed'].keys(): errorMessage = str( res['Value']['Failed'][lfn]) if errorMessage.find( 'Write access not permitted for this credential.' ) != -1: if self.__getProxyAndRemoveReplica( diracSE, lfn): continue if errorMessage.find( 'seconds timeout for "__gfal_wrapper" call' ) != -1: self.timeOutCounter += 1 if not failed.has_key(lfn): failed[lfn] = {} failed[lfn][diracSE] = res['Value'][ 'Failed'][lfn] else: errMsg[diracSE] = res['Message'] for lfn in lfns: if not failed.has_key(lfn): failed[lfn] = {} failed[lfn][diracSE] = 'Completely' # Now analyse the results failedLFNs = failed.keys() lfnsOK = [lfn for lfn in lfns if not lfn in failedLFNs] gMonitor.addMark('ReplicaRemovalDone', len(lfnsOK)) for lfn in lfnsOK: gLogger.info( "RemovalAgent.execute: Successfully removed %s at %s" % (lfn, str(diracSEs))) res = oRequest.setSubRequestFileAttributeValue( ind, 'removal', lfn, 'Status', 'Done') if not res['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', lfn)) modified = True if failed: gMonitor.addMark('PhysicalRemovalFail', len(failedLFNs)) for lfn in failedLFNs: for diracSE in failed[lfn].keys(): if type(failed[lfn] [diracSE]) in StringTypes: if re.search( 'no such file or directory', failed[lfn][diracSE].lower()): gLogger.info( "RemovalAgent.execute: File did not exist.", lfn) res = oRequest.setSubRequestFileAttributeValue( ind, 'removal', lfn, 'Status', 'Done') if not res['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', lfn)) modified = True else: gLogger.info( "RemovalAgent.execute: Failed to remove file.", "%s at %s - %s" % (lfn, diracSE, failed[lfn][diracSE])) if errMsg: for diracSE in errMsg.keys(): errStr = "RemovalAgent.execute: Completely failed to remove replicas. At %s", diracSE gLogger.error(errStr, errMsg[diracSE]) ################################################ # If the sub-request is a request to the online system to retransfer elif operation == 'reTransfer': gLogger.info( "RemovalAgent.execute: Attempting to execute %s sub-request." % operation) diracSE = subRequestAttributes['TargetSE'] for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': pfn = str(subRequestFile['PFN']) lfn = str(subRequestFile['LFN']) res = self.replicaManager.onlineRetransfer( diracSE, pfn) if res['OK']: if res['Value']['Successful'].has_key(pfn): gLogger.info( "RemovalAgent.execute: Successfully requested retransfer of %s." % pfn) result = oRequest.setSubRequestFileAttributeValue( ind, 'removal', lfn, 'Status', 'Done') if not result['OK']: gLogger.error( "RemovalAgent.execute: Error setting status to %s for %s" % ('Done', lfn)) modified = True else: errStr = "RemovalAgent.execute: Failed to request retransfer." gLogger.error( errStr, "%s %s %s" % (pfn, diracSE, res['Value']['Failed'][pfn])) else: errStr = "RemovalAgent.execute: Completely failed to request retransfer." gLogger.error(errStr, res['Message']) else: gLogger.info( "RemovalAgent.execute: File already completed." ) ################################################ # If the sub-request is none of the above types else: gLogger.error( "RemovalAgent.execute: Operation not supported.", operation) ################################################ # Determine whether there are any active files if oRequest.isSubRequestEmpty(ind, 'removal')['Value']: oRequest.setSubRequestStatus(ind, 'removal', 'Done') gMonitor.addMark("Done", 1) ################################################ # If the sub-request is already in terminal state else: gLogger.info( "RemovalAgent.execute:", "Sub-request %s is status '%s' and not to be executed." % (ind, subRequestAttributes['Status'])) ################################################ # Generate the new request string after operation newrequestString = oRequest.toXML()['Value'] except: # if something fails return the original request back to the server res = self.requestDBClient.updateRequest(requestName, requestString, sourceServer) return S_OK() res = self.requestDBClient.updateRequest(requestName, newrequestString, sourceServer) if modified and jobID: result = self.finalizeRequest(requestName, jobID, sourceServer) return S_OK()
def execute(self): res = self.RequestDBClient.getRequest('integrity') if not res['OK']: gLogger.info( "LFCvsSEAgent.execute: Failed to get request from database.") return S_OK() elif not res['Value']: gLogger.info( "LFCvsSEAgent.execute: No requests to be executed found.") return S_OK() requestString = res['Value']['RequestString'] requestName = res['Value']['RequestName'] sourceServer = res['Value']['Server'] gLogger.info("LFCvsSEAgent.execute: Obtained request %s" % requestName) oRequest = RequestContainer(request=requestString) ################################################ # Find the number of sub-requests from the request res = oRequest.getNumSubRequests('integrity') if not res['OK']: errStr = "LFCvsSEAgent.execute: Failed to obtain number of integrity subrequests." gLogger.error(errStr, res['Message']) return S_OK() gLogger.info("LFCvsSEAgent.execute: Found %s sub requests." % res['Value']) ################################################ # For all the sub-requests in the request for ind in range(res['Value']): gLogger.info("LFCvsSEAgent.execute: Processing sub-request %s." % ind) subRequestAttributes = oRequest.getSubRequestAttributes( ind, 'integrity')['Value'] if subRequestAttributes['Status'] == 'Waiting': subRequestFiles = oRequest.getSubRequestFiles( ind, 'integrity')['Value'] operation = subRequestAttributes['Operation'] ################################################ # If the sub-request is a lfcvsse operation if operation == 'LFCvsSE': gLogger.info( "LFCvsSEAgent.execute: Attempting to execute %s sub-request." % operation) for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': lfn = subRequestFile['LFN'] oNamespaceBrowser = NamespaceBrowser(lfn) # Loop over all the directories and sub-directories while (oNamespaceBrowser.isActive()): currentDir = oNamespaceBrowser.getActiveDir() gLogger.info( "LFCvsSEAgent.execute: Attempting to get contents of %s." % currentDir) res = self.ReplicaManager.getCatalogDirectoryContents( currentDir) if not res['OK']: subDirs = [currentDir] elif res['Value']['Failed'].has_key( currentDir): subDirs = [currentDir] else: subDirs = res['Value']['Successful'][ currentDir]['SubDirs'] files = res['Value']['Successful'][ currentDir]['Files'] lfnSizeDict = {} pfnLfnDict = {} pfnStatusDict = {} sePfnDict = {} for lfn, lfnDict in files.items(): lfnSizeDict[lfn] = lfnDict['MetaData'][ 'Size'] for se in lfnDict['Replicas'].keys(): pfn = lfnDict['Replicas'][se][ 'PFN'] status = lfnDict['Replicas'][se][ 'Status'] pfnStatusDict[pfn] = status pfnLfnDict[pfn] = lfn if not sePfnDict.has_key(se): sePfnDict[se] = [] sePfnDict[se].append(pfn) for storageElementName, physicalFiles in sePfnDict.items( ): gLogger.info( "LFCvsSEAgent.execute: Attempting to get metadata for files on %s." % storageElementName) res = self.ReplicaManager.getStorageFileMetadata( physicalFiles, storageElementName) if not res['OK']: gLogger.error( "LFCvsSEAgent.execute: Completely failed to get physical file metadata.", res['Message']) else: for pfn in res['Value'][ 'Failed'].keys(): gLogger.error( "LFCvsSEAgent.execute: Failed to get metadata.", "%s %s" % (pfn, res['Value'] ['Failed'][pfn])) lfn = pfnLfnDict[pfn] fileMetadata = { 'Prognosis': 'MissingSEPfn', 'LFN': lfn, 'PFN': pfn, 'StorageElement': storageElementName, 'Size': lfnSizeDict[lfn] } IntegrityDB = RPCClient( 'DataManagement/DataIntegrity' ) resInsert = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata) if resInsert['OK']: gLogger.info( "LFCvsSEAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) for pfn, pfnDict in res['Value'][ 'Successful'].items(): lfn = pfnLfnDict[pfn] catalogSize = int( lfnSizeDict[lfn]) storageSize = int( pfnDict['Size']) if int(catalogSize) == int( storageSize): gLogger.info( "LFCvsSEAgent.execute: Catalog and storage sizes match.", "%s %s" % (pfn, storageElementName)) gLogger.info( "Change the status in the LFC" ) else: gLogger.error( "LFCvsSEAgent.execute: Catalog and storage size mis-match.", "%s %s" % (pfn, storageElementName)) fileMetadata = { 'Prognosis': 'PfnSizeMismatch', 'LFN': lfn, 'PFN': pfn, 'StorageElement': storageElementName } IntegrityDB = RPCClient( 'DataManagement/DataIntegrity' ) resInsert = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata) if resInsert['OK']: gLogger.info( "LFCvsSEAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) oNamespaceBrowser.updateDirs(subDirs) oRequest.setSubRequestFileAttributeValue( ind, 'integrity', lfn, 'Status', 'Done') ################################################ # If the sub-request is none of the above types else: gLogger.info( "LFCvsSEAgent.execute: Operation not supported.", operation) ################################################ # Determine whether there are any active files if oRequest.isSubRequestEmpty(ind, 'integrity')['Value']: oRequest.setSubRequestStatus(ind, 'integrity', 'Done') ################################################ # If the sub-request is already in terminal state else: gLogger.info( "LFCvsSEAgent.execute: Sub-request %s is status '%s' and not to be executed." % (ind, subRequestAttributes['Status'])) ################################################ # Generate the new request string after operation requestString = oRequest.toXML()['Value'] res = self.RequestDBClient.updateRequest(requestName, requestString, sourceServer) return S_OK()
print se.errorReason print Script.showHelp() from DIRAC.RequestManagementSystem.Client.RequestContainer import RequestContainer from DIRAC.RequestManagementSystem.Client.RequestClient import RequestClient requestClient = RequestClient() requestType = 'removal' requestOperation = 'replicaRemoval' if targetSE == 'All': requestOperation = 'removeFile' for lfnList in breakListIntoChunks(lfns, 100): oRequest = RequestContainer() subRequestIndex = oRequest.initiateSubRequest(requestType)['Value'] attributeDict = {'Operation': requestOperation, 'TargetSE': targetSE} oRequest.setSubRequestAttributes(subRequestIndex, requestType, attributeDict) files = [] for lfn in lfnList: files.append({'LFN': lfn}) oRequest.setSubRequestFiles(subRequestIndex, requestType, files) requestName = "%s_%s" % (md5(repr(time.time())).hexdigest()[:16], md5(repr(time.time())).hexdigest()[:16]) oRequest.setRequestAttributes({'RequestName': requestName}) DIRAC.gLogger.info(oRequest.toXML()['Value']) result = requestClient.setRequest(requestName, oRequest.toXML()['Value'])
def execute( self ): IntegrityDB = RPCClient( 'DataManagement/DataIntegrity' ) res = self.RequestDBClient.getRequest( 'integrity' ) if not res['OK']: gLogger.info( "SEvsLFCAgent.execute: Failed to get request from database." ) return S_OK() elif not res['Value']: gLogger.info( "SEvsLFCAgent.execute: No requests to be executed found." ) return S_OK() requestString = res['Value']['requestString'] requestName = res['Value']['requestName'] sourceServer = res['Value']['Server'] gLogger.info( "SEvsLFCAgent.execute: Obtained request %s" % requestName ) oRequest = RequestContainer( request = requestString ) ################################################ # Find the number of sub-requests from the request res = oRequest.getNumSubRequests( 'integrity' ) if not res['OK']: errStr = "SEvsLFCAgent.execute: Failed to obtain number of integrity subrequests." gLogger.error( errStr, res['Message'] ) return S_OK() gLogger.info( "SEvsLFCAgent.execute: Found %s sub requests." % res['Value'] ) ################################################ # For all the sub-requests in the request for ind in range( res['Value'] ): gLogger.info( "SEvsLFCAgent.execute: Processing sub-request %s." % ind ) subRequestAttributes = oRequest.getSubRequestAttributes( ind, 'integrity' )['Value'] if subRequestAttributes['Status'] == 'Waiting': subRequestFiles = oRequest.getSubRequestFiles( ind, 'integrity' )['Value'] operation = subRequestAttributes['Operation'] ################################################ # If the sub-request is a lfcvsse operation if operation == 'SEvsLFC': gLogger.info( "SEvsLFCAgent.execute: Attempting to execute %s sub-request." % operation ) storageElementName = subRequestAttributes['StorageElement'] for subRequestFile in subRequestFiles: if subRequestFile['Status'] == 'Waiting': lfn = subRequestFile['LFN'] storageElement = StorageElement( storageElementName ) res = storageElement.isValid() if not res['OK']: errStr = "SEvsLFCAgent.execute: Failed to instantiate destination StorageElement." gLogger.error( errStr, storageElement ) else: res = storageElement.getPfnForLfn( lfn ) if not res['OK']: gLogger.info( 'shit bugger do something.' ) else: oNamespaceBrowser = NamespaceBrowser( res['Value'] ) # Loop over all the directories and sub-directories while ( oNamespaceBrowser.isActive() ): currentDir = oNamespaceBrowser.getActiveDir() gLogger.info( "SEvsLFCAgent.execute: Attempting to list the contents of %s." % currentDir ) res = storageElement.listDirectory( currentDir ) if not res['Value']['Successful'].has_key( currentDir ): gLogger.error( "SEvsLFCAgent.execute: Failed to list the directory contents.", "%s %s" % ( currentDir, res['Value']['Successful']['Failed'][currentDir] ) ) subDirs = [currentDir] else: subDirs = [] files = {} for surl, surlDict in res['Value']['Successful'][currentDir]['Files'].items(): pfnRes = storageElement.getPfnForProtocol( surl, 'SRM2', withPort = False ) surl = pfnRes['Value'] files[surl] = surlDict for surl, surlDict in res['Value']['Successful'][currentDir]['SubDirs'].items(): pfnRes = storageElement.getPfnForProtocol( surl, 'SRM2', withPort = False ) surl = pfnRes['Value'] subDirs.append( surl ) #subDirs = res['Value']['Successful'][currentDir]['SubDirs'] gLogger.info( "SEvsLFCAgent.execute: Successfully obtained %s sub-directories." % len( subDirs ) ) #files = res['Value']['Successful'][currentDir]['Files'] gLogger.info( "SEvsLFCAgent.execute: Successfully obtained %s files." % len( files ) ) selectedLfns = [] lfnPfnDict = {} pfnSize = {} for pfn, pfnDict in files.items(): res = storageElement.getPfnPath( pfn ) if not res['OK']: gLogger.error( "SEvsLFCAgent.execute: Failed to get determine LFN from pfn.", "%s %s" % ( pfn, res['Message'] ) ) fileMetadata = {'Prognosis':'NonConventionPfn', 'LFN':'', 'PFN':pfn, 'StorageElement':storageElementName, 'Size':pfnDict['Size']} res = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if res['OK']: gLogger.info( "SEvsLFCAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) else: lfn = res['Value'] selectedLfns.append( lfn ) lfnPfnDict[lfn] = pfn pfnSize[pfn] = pfnDict['Size'] res = self.ReplicaManager.getCatalogFileMetadata( selectedLfns ) if not res['OK']: subDirs = [currentDir] else: for lfn in res['Value']['Failed'].keys(): gLogger.error( "SEvsLFCAgent.execute: Failed to get metadata.", "%s %s" % ( lfn, res['Value']['Failed'][lfn] ) ) pfn = lfnPfnDict[lfn] fileMetadata = {'Prognosis':'SEPfnNoLfn', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName, 'Size':pfnSize[pfn]} res = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if res['OK']: gLogger.info( "SEvsLFCAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) for lfn, lfnDict in res['Value']['Successful'].items(): pfn = lfnPfnDict[lfn] storageSize = pfnSize[pfn] catalogSize = lfnDict['Size'] if int( catalogSize ) == int( storageSize ): gLogger.info( "SEvsLFCAgent.execute: Catalog and storage sizes match.", "%s %s" % ( pfn, storageElementName ) ) gLogger.info( "Change the status in the LFC" ) elif int( storageSize ) == 0: gLogger.error( "SEvsLFCAgent.execute: Physical file size is 0.", "%s %s" % ( pfn, storageElementName ) ) fileMetadata = {'Prognosis':'ZeroSizePfn', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName} res = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if res['OK']: gLogger.info( "SEvsLFCAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) else: gLogger.error( "SEvsLFCAgent.execute: Catalog and storage size mis-match.", "%s %s" % ( pfn, storageElementName ) ) fileMetadata = {'Prognosis':'PfnSizeMismatch', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName} res = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if res['OK']: gLogger.info( "SEvsLFCAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) res = self.ReplicaManager.getCatalogReplicas( selectedLfns ) if not res['OK']: subDirs = [currentDir] else: for lfn in res['Value']['Failed'].keys(): gLogger.error( "SEvsLFCAgent.execute: Failed to get replica information.", "%s %s" % ( lfn, res['Value']['Failed'][lfn] ) ) pfn = lfnPfnDict[lfn] fileMetadata = {'Prognosis':'PfnNoReplica', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName, 'Size':pfnSize[pfn]} res = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if res['OK']: gLogger.info( "SEvsLFCAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) for lfn, repDict in res['Value']['Successful'].items(): pfn = lfnPfnDict[lfn] registeredPfns = repDict.values() if not pfn in registeredPfns: gLogger.error( "SEvsLFCAgent.execute: SE PFN not registered.", "%s %s" % ( lfn, pfn ) ) fileMetadata = {'Prognosis':'PfnNoReplica', 'LFN':lfn, 'PFN':pfn, 'StorageElement':storageElementName} res = IntegrityDB.insertProblematic( AGENT_NAME, fileMetadata ) if res['OK']: gLogger.info( "SEvsLFCAgent.execute: Successfully added to IntegrityDB." ) gLogger.error( "Change the status in the LFC,ProcDB...." ) else: gLogger.error( "Shit, f**k, bugger. Add the failover." ) else: gLogger.info( "SEvsLFCAgent.execute: SE Pfn verified.", pfn ) oNamespaceBrowser.updateDirs( subDirs ) oRequest.setSubRequestFileAttributeValue( ind, 'integrity', lfn, 'Status', 'Done' ) ################################################ # If the sub-request is none of the above types else: gLogger.info( "SEvsLFCAgent.execute: Operation not supported.", operation ) ################################################ # Determine whether there are any active files if oRequest.isSubRequestEmpty( ind, 'integrity' )['Value']: oRequest.setSubRequestStatus( ind, 'integrity', 'Done' ) ################################################ # If the sub-request is already in terminal state else: gLogger.info( "SEvsLFCAgent.execute: Sub-request %s is status '%s' and not to be executed." % ( ind, subRequestAttributes['Status'] ) ) ################################################ # Generate the new request string after operation requestString = oRequest.toXML()['Value'] res = self.RequestDBClient.updateRequest( requestName, requestString, sourceServer ) return S_OK()
def getRequest( operation ): """ fake requestDict :param str operation: sub-request operation attribute """ requestContainer = RequestContainer( init = False ) requestContainer.setJobID( 1 ) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup( "lhcb_user" ) requestContainer.setDIRACSetup( "LHCb-Production" ) requestContainer.setSourceComponent( None ) requestContainer.setCreationTime( "0000-00-00 00:00:00" ) requestContainer.setLastUpdate( "2011-12-01 04:57:02" ) requestContainer.setStatus( "Waiting" ) requestContainer.setAttribute( "RequestID", 123456789 ) requestContainer.initiateSubRequest( "transfer" ) subRequestDict = { "Status" : "Waiting", "SubRequestID" : 2222222, "Operation" : operation, "Arguments" : None, "ExecutionOrder" : 0, "SourceSE" : None, "TargetSE" : "CERN-USER", "Catalogue" : "LcgFileCatalogCombined", "CreationTime" : "2011-12-01 04:57:02", "SubmissionTime" : "2011-12-01 04:57:02", "LastUpdate" : "2011-12-01 20:14:22" } requestContainer.setSubRequestAttributes( 0, "transfer", subRequestDict ) files = [ { "FileID" : 3333333, "LFN" : "/lhcb/user/c/cibak/11889/11889410/test.zzz", "Size" : 44444444, "PFN" : "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz", "GUID" : "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R", "Md5" : None, "Addler" : "92b85e26", "Attempt" : 1, "Status" : "Waiting" } ] requestContainer.setSubRequestFiles( 0, "transfer", files ) return { "OK" : True, "Value" : { "requestName" : "%s.xml" % operation, "requestString" : requestContainer.toXML_new()["Value"], "requestObj" : requestContainer, "jobID" : 1, "executionOrder" : 0, "sourceServer" : "foobarserver" } }
def getRequest(self, requestType): """ Obtain a request from the database of a :requestType: type :param self: self reference :param str requestType: request type """ self.log.info("getRequest: Attempting to get %s type request." % requestType) try: # Determine the request name to be obtained candidateRequests = [] reqDir = os.path.join(self.root, requestType, "Waiting") self.getIdLock.acquire() if os.path.exists(reqDir): candidateRequests = [ os.path.basename(requestFile) for requestFile in sorted( filter( os.path.isfile, [os.path.join(reqDir, requestName) for requestName in os.listdir(reqDir)] ), key=os.path.getctime, ) ] if not len(candidateRequests) > 0: self.getIdLock.release() self.log.info("getRequest: No request of type %s found." % requestType) return S_OK() # Select a request if requestType not in self.lastRequest: self.lastRequest[requestType] = ("", 0) lastRequest, lastRequestIndex = self.lastRequest[requestType] res = self.__selectRequestCursor(candidateRequests, lastRequest, lastRequestIndex) if not res["OK"]: self.getIdLock.release() errStr = "getRequest: Failed to get request cursor." self.log.error(errStr, res["Message"]) return S_ERROR(errStr) selectedRequestName, selectedRequestIndex = res["Value"] # Obtain the string for the selected request res = self.__getRequestString(selectedRequestName) if not res["OK"]: self.getIdLock.release() errStr = "getRequest: Failed to get request string for %s." % selectedRequestName self.log.error(errStr, res["Message"]) return S_ERROR(errStr) selectedRequestString = res["Value"] # Set the request status to assigned res = self.setRequestStatus(selectedRequestName, "Assigned") if not res["OK"]: self.getIdLock.release() errStr = "getRequest: Failed to set %s status to 'Assigned'." % selectedRequestName self.log.error(errStr, res["Message"]) return S_ERROR(errStr) # Update the request cursor and return the selected request self.lastRequest[requestType] = (selectedRequestName, selectedRequestIndex) self.getIdLock.release() self.log.info("getRequest: Successfully obtained %s request." % selectedRequestName) oRequest = RequestContainer(request=selectedRequestString) jobID = oRequest.getJobID() jobID = jobID["Value"] if jobID["OK"] and jobID["Value"] else 0 try: jobID = int(jobID) except (TypeError, ValueError), error: self.log.error("getRequest: could not get JobID from Request, setting it to 0: %s" % str(error)) jobID = 0 return S_OK({"RequestString": selectedRequestString, "RequestName": selectedRequestName, "JobID": jobID})
def getRequest( self, requestType ): """ Get a request of a given type eligible for execution """ # RG: What if requestType is not given? # the first query will return nothing. # KC: maybe returning S_ERROR would be enough? # alternatively we should check if requestType is known (in 'transfer', 'removal', 'register' and 'diset') if not requestType or type( requestType ) not in types.StringTypes: return S_ERROR( "Request type not given." ) myRequestType = self._escapeString( requestType ) if not myRequestType: return myRequestType myRequestType = myRequestType['Value'] start = time.time() dmRequest = RequestContainer( init = False ) requestID = 0 subIDList = [] fields = ['RequestID', 'SubRequestID', 'Operation', 'Arguments', 'ExecutionOrder', 'SourceSE', 'TargetSE', 'Catalogue', 'CreationTime', 'SubmissionTime', 'LastUpdate', 'Status', 'RequestType'] # get the pending SubRequest sorted by ExecutionOrder and LastUpdate req = "SELECT `RequestID`,`ExecutionOrder`,`Status`,`RequestType`,`LastUpdate` FROM `SubRequests` "\ "WHERE `Status` IN ( 'Waiting', 'Assigned' ) ORDER BY `ExecutionOrder`,`LastUpdate`" # now get sorted list of RequestID (according to the above) req = "SELECT * FROM ( %s ) as T1 GROUP BY `RequestID`" % req # and get the 100 oldest ones of Type requestType req = "SELECT `RequestID`,`ExecutionOrder` FROM ( %s ) as T2 WHERE `RequestType`=%s "\ "ORDER BY `LastUpdate` LIMIT 100" % ( req, myRequestType ) # and now get all waiting SubRequest for the selected RequestID and ExecutionOrder req = "SELECT A.%s FROM SubRequests AS A, ( %s ) AS B WHERE " % ( ', A.'.join( fields ), req ) req = "%s A.RequestID=B.RequestID AND A.ExecutionOrder=B.ExecutionOrder" % ( req ) result = self._query( req ) if not result['OK']: err = 'RequestDB._getRequest: Failed to retrieve Requests' return S_ERROR( '%s\n%s' % ( err, result['Message'] ) ) if not result['Value']: return S_OK() # We get up to 10 Request candidates, to add some randomness reqDict = {} for row in result['Value']: if ('"%s"' % row[-1]) != myRequestType: continue if row[-2] != 'Waiting': continue reqDict.setdefault( row[0], [] ) reqDict[row[0]].append( row[1:-2] ) reqIDList = reqDict.keys() random.shuffle( reqIDList ) for reqID in reqIDList: sidList = [ x[0] for x in reqDict[reqID] ] for subID in sidList: req = "UPDATE SubRequests SET Status='Assigned' WHERE RequestID=%s AND SubRequestID=%s;" % ( reqID, subID ) resAssigned = self._update( req ) if not resAssigned['OK']: if subIDList: self.__releaseSubRequests( reqID, subIDList ) return S_ERROR( 'Failed to assign subrequests: %s' % resAssigned['Message'] ) if resAssigned['Value'] == 0: # Somebody has assigned this request gLogger.warn( 'Already assigned subrequest %d of request %d' % ( subID, reqID ) ) else: subIDList.append( subID ) if subIDList: # We managed to get some requests, can continue now requestID = reqID break # Haven't succeeded to get any request if not requestID: return S_OK() dmRequest.setRequestID( requestID ) fields = ['FileID', 'LFN', 'Size', 'PFN', 'GUID', 'Md5', 'Addler', 'Attempt', 'Status' ] for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, \ creationTime, submissionTime, lastUpdate in reqDict[requestID]: if not subRequestID in subIDList: continue res = dmRequest.initiateSubRequest( requestType ) ind = res['Value'] subRequestDict = { 'Status' : 'Waiting', 'SubRequestID' : subRequestID, 'Operation' : operation, 'Arguments' : arguments, 'ExecutionOrder': int( executionOrder ), 'SourceSE' : sourceSE, 'TargetSE' : targetSE, 'Catalogue' : catalogue, 'CreationTime' : creationTime, 'SubmissionTime': submissionTime, 'LastUpdate' : lastUpdate } res = dmRequest.setSubRequestAttributes( ind, requestType, subRequestDict ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT %s FROM `Files` WHERE `SubRequestID`=%s ORDER BY `FileID`;" % ( ', '.join( fields ), subRequestID ) res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res['Value']: fileDict = {'FileID':fileID, 'LFN':lfn, 'Size':size, 'PFN':pfn, 'GUID':guid, 'Md5':md5, 'Addler':addler, 'Attempt':attempt, 'Status':status} files.append( fileDict ) res = dmRequest.setSubRequestFiles( ind, requestType, files ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) datasets = [] for dataset, status in res['Value']: datasets.append( dataset ) res = dmRequest.setSubRequestDatasets( ind, requestType, datasets ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) fields = ['RequestName', 'JobID', 'OwnerDN', 'OwnerGroup', 'DIRACSetup', 'SourceComponent', 'CreationTime', 'SubmissionTime', 'LastUpdate'] req = "SELECT %s FROM `Requests` WHERE `RequestID`=%s;" % ( ', '.join( fields ), requestID ) res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve max RequestID' self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, \ creationTime, submissionTime, lastUpdate = res['Value'][0] dmRequest.setRequestName( requestName ) dmRequest.setJobID( jobID ) dmRequest.setOwnerDN( ownerDN ) dmRequest.setOwnerGroup( ownerGroup ) dmRequest.setDIRACSetup( diracSetup ) dmRequest.setSourceComponent( sourceComponent ) dmRequest.setCreationTime( str( creationTime ) ) dmRequest.setLastUpdate( str( lastUpdate ) ) res = dmRequest.toXML() if not res['OK']: err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) requestString = res['Value'] #still have to manage the status of the dataset properly resultDict = {} resultDict['RequestName'] = requestName resultDict['RequestString'] = requestString resultDict['JobID'] = jobID return S_OK( resultDict )
def getRequest(operation): """ fake requestDict :param str operation: sub-request operation attribute """ requestContainer = RequestContainer(init=False) requestContainer.setJobID(1) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup("lhcb_user") requestContainer.setDIRACSetup("LHCb-Production") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-12-01 04:57:02") requestContainer.setStatus("Waiting") requestContainer.setAttribute("RequestID", 123456789) requestContainer.initiateSubRequest("transfer") subRequestDict = { "Status": "Waiting", "SubRequestID": 2222222, "Operation": operation, "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "CERN-USER", "Catalogue": "LcgFileCatalogCombined", "CreationTime": "2011-12-01 04:57:02", "SubmissionTime": "2011-12-01 04:57:02", "LastUpdate": "2011-12-01 20:14:22" } requestContainer.setSubRequestAttributes(0, "transfer", subRequestDict) files = [{ "FileID": 3333333, "LFN": "/lhcb/user/c/cibak/11889/11889410/test.zzz", "Size": 44444444, "PFN": "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz", "GUID": "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R", "Md5": None, "Addler": "92b85e26", "Attempt": 1, "Status": "Waiting" }] requestContainer.setSubRequestFiles(0, "transfer", files) return { "OK": True, "Value": { "requestName": "%s.xml" % operation, "requestString": requestContainer.toXML_new()["Value"], "requestObj": requestContainer, "jobID": 1, "executionOrder": 0, "sourceServer": "foobarserver" } }
def setRequest( self, requestName, requestString ): """ insert request :requestName: to teh db :param str requestName: Requests.RequestName :param str requestString: xml-serialised request """ request = RequestContainer( init = True, request = requestString ) requestTypes = request.getSubRequestTypes()['Value'] failed = False res = self._getRequestID( requestName ) if not res['OK']: # we have a special case here: if request already exists, we override it if it # comes from a DIRAC job. This is identified by having a meaningful JobID in # the request if res['Message'].find( 'Duplicate' ) != -1: # Duplicate request jobID = request.getJobID()['Value'] if jobID == "Unknown": return res try: jobID = int( jobID ) except: return res if jobID > 0: # Remove the existing request result = self._deleteRequest( requestName ) if not result['OK']: message = res['Message'] return S_ERROR( 'Failed to set request: ' + message + ' can not override' ) res = self._getRequestID( requestName ) if not res['OK']: return res else: return res requestID = res['Value'] subRequestIDs = {} res = self.__setRequestAttributes( requestID, request ) if res['OK']: for requestType in requestTypes: res = request.getNumSubRequests( requestType ) numRequests = res['Value'] for ind in range( numRequests ): res = self._getSubRequestID( requestID, requestType ) if res['OK']: subRequestID = res['Value'] res = self.__setSubRequestAttributes( requestID, ind, requestType, subRequestID, request ) if res['OK']: subRequestIDs[subRequestID] = res['Value'] res = self.__setSubRequestFiles( ind, requestType, subRequestID, request ) if res['OK']: res = self.__setSubRequestDatasets( ind, requestType, subRequestID, request ) if not res['OK']: failed = True message = res['Message'] else: failed = True message = res['Message'] else: failed = True message = res['Message'] else: failed = True message = res['Message'] else: failed = True message = res['Message'] for subRequestID, status in subRequestIDs.items(): if not status: status = "Waiting" res = self._setSubRequestAttribute( requestID, subRequestID, 'Status', status ) if not res['OK']: failed = True message = res['Message'] res = self._setRequestAttribute( requestID, 'Status', 'Waiting' ) if not res['OK']: failed = True message = res['Message'] if failed: res = self._deleteRequest( requestName ) return S_ERROR( 'Failed to set request: ' + message ) else: return S_OK( requestID )