def __setRemovalRequest(self, lfn, ownerDN, ownerGroup): """ Set removal request with the given credentials """ request = RequestContainer() request.setRequestAttributes({ 'OwnerDN': ownerDN, 'OwnerGroup': ownerGroup }) requestName = os.path.basename(lfn).strip() + '_removal_request.xml' request.setRequestName(requestName) request.setSourceComponent('JobCleaningAgent') removalDict = { 'Attributes': { 'Operation': 'removeFile', 'TargetSE': '', 'ExecutionOrder': 0 } } result = request.addSubRequest(removalDict, 'removal') if not result['OK']: return result index = result['Value'] fileDict = {'LFN': lfn, 'PFN': '', 'Status': 'Waiting'} request.setSubRequestFiles(index, 'removal', [fileDict]) client = RequestClient() result = request.toXML() if not result['OK']: return result xmlRequest = result['Value'] result = client.setRequest(requestName, xmlRequest) return result
def prepareTransformationTasks(self, transBody, taskDict, owner='', ownerGroup=''): requestType = 'transfer' requestOperation = 'replicateAndRegister' try: requestType, requestOperation = transBody.split(';') except: pass for taskID in sortList(taskDict.keys()): paramDict = taskDict[taskID] transID = paramDict['TransformationID'] oRequest = RequestContainer(init=False) subRequestIndex = oRequest.initiateSubRequest(requestType)['Value'] attributeDict = { 'Operation': requestOperation, 'TargetSE': paramDict['TargetSE'] } oRequest.setSubRequestAttributes(subRequestIndex, requestType, attributeDict) files = [] for lfn in paramDict['InputData'].split(';'): files.append({'LFN': lfn}) oRequest.setSubRequestFiles(subRequestIndex, requestType, files) requestName = str(transID).zfill(8) + '_' + str(taskID).zfill(8) oRequest.setRequestAttributes({'RequestName': requestName}) taskDict[taskID]['TaskObject'] = oRequest.toXML()['Value'] return S_OK(taskDict)
def __setRemovalRequest( self, lfn, ownerDN, ownerGroup ): """ Set removal request with the given credentials """ request = RequestContainer() request.setRequestAttributes( { 'OwnerDN':ownerDN, 'OwnerGroup':ownerGroup } ) requestName = os.path.basename( lfn ).strip()+'_removal_request.xml' request.setRequestName( requestName ) request.setSourceComponent( 'JobCleaningAgent' ) removalDict = {'Attributes':{ 'Operation':'removeFile', 'TargetSE':'', 'ExecutionOrder':0 } } result = request.addSubRequest( removalDict, 'removal' ) if not result['OK']: return result index = result['Value'] fileDict = { 'LFN':lfn, 'PFN':'', 'Status':'Waiting' } request.setSubRequestFiles( index, 'removal', [fileDict] ) client = RequestClient() result = request.toXML() if not result['OK']: return result xmlRequest = result['Value'] result = client.setRequest( requestName, xmlRequest ) return result
def getRequest(): """ helper fcn to build requestContainer """ requestContainer = RequestContainer(init=False) ## get request requestContainer.setRequestName("00009423_00000118") requestContainer.setJobID(0) requestContainer.setOwnerDN("") requestContainer.setOwnerGroup("") requestContainer.setDIRACSetup("") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-02-19 04:57:02") requestContainer.setStatus("Waiting") ## get subrequest requestContainer.initiateSubRequest("transfer") subRequestDict = { "Status": "Waiting", "SubRequestID": 2259916, "Operation": "replicateAndRegister", "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "GRIDKA_MC-DST,GRIDKA_MC_M-DST", "Catalogue": None, "CreationTime": "2011-02-19 04:57:02", "SubmissionTime": "2011-02-19 04:57:02", "LastUpdate": "2011-08-18 20:14:22" } requestContainer.setSubRequestAttributes(0, "transfer", subRequestDict) ## get subrequest files files = [{ "FileID": 1610538, "LFN": "/lhcb/MC/MC10/ALLSTREAMS.DST/00009422/0000/00009422_00000171_1.allstreams.dst", "Size": None, "PFN": None, "GUID": None, "Md5": None, "Addler": None, "Attempt": 1, "Status": "Scheduled" }] requestContainer.setSubRequestFiles(0, "transfer", files) return { "OK": True, "Value": { "RequestName": "00009423_00000118", "RequestString": requestContainer.toXML()["Value"], "JobID": 0, "RequestContainer": requestContainer } }
def getRegisterRequest(): """ helper fcn to build request """ requestContainer = RequestContainer(init=False) requestContainer.setJobID(11889410) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup("lhcb_user") requestContainer.setDIRACSetup("LHCb-Production") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-02-19 04:57:02") requestContainer.setStatus("Waiting") requestContainer.initiateSubRequest("register") subRequestDict = { "Status": "Waiting", "SubRequestID": 2259916, "Operation": "registerFile", "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "RAL-USER", "Catalogue": "LcgFileCatalogCombined", "CreationTime": "2011-02-19 04:57:02", "SubmissionTime": "2011-02-19 04:57:02", "LastUpdate": "2011-08-18 20:14:22" } requestContainer.setSubRequestAttributes(0, "register", subRequestDict) files = [{ "FileID": 1610538, "LFN": "/lhcb/user/c/cblanks/11889/11889410/LDSB.rsQrRL", "Size": 153961749, "PFN": "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cblanks/11889/11889410/LDSB.rsQrRL", "GUID": "5911A19C-7CDF-7F2A-36ED-089CD410F98A", "Md5": None, "Addler": "92b85e26", "Attempt": 1, "Status": "Waiting" }] requestContainer.setSubRequestFiles(0, "register", files) return { "requestName": "11889410.xml", "requestString": requestContainer.toXML()["Value"], "jobID": 11889410, "executionOrder": 0, "sourceServer": "foobarserver", "configPath": "/Systems/DataManagement/Development/Agents/RegistrationAgent" }
def getRequest(operation): """ fake requestDict :param str operation: sub-request operation attribute """ requestContainer = RequestContainer(init=False) requestContainer.setJobID(1) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup("lhcb_user") requestContainer.setDIRACSetup("LHCb-Production") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-12-01 04:57:02") requestContainer.setStatus("Waiting") requestContainer.setAttribute("RequestID", 123456789) requestContainer.initiateSubRequest("transfer") subRequestDict = { "Status": "Waiting", "SubRequestID": 2222222, "Operation": operation, "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "CERN-USER", "Catalogue": "LcgFileCatalogCombined", "CreationTime": "2011-12-01 04:57:02", "SubmissionTime": "2011-12-01 04:57:02", "LastUpdate": "2011-12-01 20:14:22" } requestContainer.setSubRequestAttributes(0, "transfer", subRequestDict) files = [{ "FileID": 3333333, "LFN": "/lhcb/user/c/cibak/11889/11889410/test.zzz", "Size": 44444444, "PFN": "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz", "GUID": "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R", "Md5": None, "Addler": "92b85e26", "Attempt": 1, "Status": "Waiting" }] requestContainer.setSubRequestFiles(0, "transfer", files) return { "OK": True, "Value": { "requestName": "%s.xml" % operation, "requestString": requestContainer.toXML_new()["Value"], "requestObj": requestContainer, "jobID": 1, "executionOrder": 0, "sourceServer": "foobarserver" } }
def getRegisterRequest(): """ helper fcn to build request """ requestContainer = RequestContainer(init=False) requestContainer.setJobID(11889410) # requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup("lhcb_user") requestContainer.setDIRACSetup("LHCb-Production") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-02-19 04:57:02") requestContainer.setStatus("Waiting") requestContainer.initiateSubRequest("register") subRequestDict = { "Status": "Waiting", "SubRequestID": 2259916, "Operation": "registerFile", "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "RAL-USER", "Catalogue": "LcgFileCatalogCombined", "CreationTime": "2011-02-19 04:57:02", "SubmissionTime": "2011-02-19 04:57:02", "LastUpdate": "2011-08-18 20:14:22", } requestContainer.setSubRequestAttributes(0, "register", subRequestDict) files = [ { "FileID": 1610538, "LFN": "/lhcb/user/c/cblanks/11889/11889410/LDSB.rsQrRL", "Size": 153961749, "PFN": "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cblanks/11889/11889410/LDSB.rsQrRL", "GUID": "5911A19C-7CDF-7F2A-36ED-089CD410F98A", "Md5": None, "Addler": "92b85e26", "Attempt": 1, "Status": "Waiting", } ] requestContainer.setSubRequestFiles(0, "register", files) return { "requestName": "11889410.xml", "requestString": requestContainer.toXML()["Value"], "jobID": 11889410, "executionOrder": 0, "sourceServer": "foobarserver", "configPath": "/Systems/DataManagement/Development/Agents/RegistrationAgent", }
def getRequest(): """ helper fcn to build requestContainer """ requestContainer = RequestContainer( init = False ) ## get request requestContainer.setRequestName( "00009423_00000118" ) requestContainer.setJobID( 0 ) requestContainer.setOwnerDN( "" ) requestContainer.setOwnerGroup( "" ) requestContainer.setDIRACSetup( "" ) requestContainer.setSourceComponent( None ) requestContainer.setCreationTime( "0000-00-00 00:00:00" ) requestContainer.setLastUpdate( "2011-02-19 04:57:02" ) requestContainer.setStatus( "Waiting" ) ## get subrequest requestContainer.initiateSubRequest( "transfer" ) subRequestDict = { "Status" : "Waiting", "SubRequestID" : 2259916, "Operation" : "replicateAndRegister", "Arguments" : None, "ExecutionOrder" : 0, "SourceSE" : None, "TargetSE" : "GRIDKA_MC-DST,GRIDKA_MC_M-DST", "Catalogue" : None, "CreationTime" : "2011-02-19 04:57:02", "SubmissionTime" : "2011-02-19 04:57:02", "LastUpdate" : "2011-08-18 20:14:22" } requestContainer.setSubRequestAttributes( 0, "transfer", subRequestDict ) ## get subrequest files files = [ { "FileID" : 1610538, "LFN" : "/lhcb/MC/MC10/ALLSTREAMS.DST/00009422/0000/00009422_00000171_1.allstreams.dst", "Size" : None, "PFN" : None, "GUID" : None, "Md5" : None, "Addler" : None, "Attempt" : 1, "Status" : "Scheduled" } ] requestContainer.setSubRequestFiles( 0, "transfer", files ) return { "OK" : True, "Value" : { "RequestName" : "00009423_00000118", "RequestString" : requestContainer.toXML()["Value"], "JobID" : 0, "RequestContainer" : requestContainer } }
def getKwargsRemoveFile(): """ helper fcn to build request """ requestContainer = RequestContainer(init=False) requestContainer.setJobID(11111111) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup("lhcb_user") requestContainer.setDIRACSetup("LHCb-Production") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-12-01 04:57:02") requestContainer.setStatus("Waiting") requestContainer.initiateSubRequest("removal") subRequestDict = { "Status": "Waiting", "SubRequestID": 2222222, "Operation": "removeFile", "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "RAL-USER", "Catalogue": "LcgFileCatalogCombined", "CreationTime": "2011-12-01 04:57:02", "SubmissionTime": "2011-12-01 04:57:02", "LastUpdate": "2011-12-01 20:14:22" } requestContainer.setSubRequestAttributes(0, "removal", subRequestDict) files = [{ "FileID": 3333333, "LFN": "/lhcb/user/c/cibak/11889/11889410/test.zzz", "Size": 44444444, "PFN": "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz", "GUID": "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R", "Md5": None, "Addler": "92b85e26", "Attempt": 1, "Status": "Waiting" }] requestContainer.setSubRequestFiles(0, "removal", files) return { "requestName": "00000001.xml", "requestString": requestContainer.toXML()["Value"], "jobID": 1, "executionOrder": 0, "sourceServer": "foobarserver", "configPath": "/Systems/DataManagement/Development/Agents/RemovalAgent" }
def test__getLastOrder(self): # no files req = RequestContainer() res = req._getLastOrder() self.assertEqual( res, 0 ) self.assertEqual( req.subRequests, {} ) req.addSubRequest( {'Attributes':{'Operation':'replicateAndRegister', 'TargetSE':'SE', 'ExecutionOrder': 0}}, 'transfer' ) res = req._getLastOrder() self.assertEqual( res, 0 ) req.addSubRequest( {'Attributes':{'Operation':'replicateAndRegister', 'TargetSE':'SE', 'ExecutionOrder': 1}}, 'transfer' ) res = req._getLastOrder() self.assertEqual( res, 1 ) del( req ) # with files req = RequestContainer() res = req._getLastOrder( 'foo' ) self.assertEqual( res, 0 ) req.addSubRequest( {'Attributes':{'Operation':'replicateAndRegister', 'TargetSE':'SE', 'ExecutionOrder': 1}}, 'transfer' ) res = req._getLastOrder( 'foo' ) self.assertEqual( res, 0 ) req.setSubRequestFiles( 0, 'transfer', [{'LFN':'foo', 'Status':'Waiting'}] ) res = req._getLastOrder( 'foo' ) self.assertEqual( res, 1 ) req.addSubRequest( {'Attributes':{'Operation':'replicateAndRegister', 'TargetSE':'SE', 'ExecutionOrder': 2}}, 'removal' ) res = req._getLastOrder( 'foo' ) self.assertEqual( res, 1 ) req.setSubRequestFiles( 0, 'removal', [{'LFN':'foo', 'Status':'Waiting'}] ) res = req._getLastOrder( 'foo' ) self.assertEqual( res, 2 )
def test__getLastOrder(self): # no files req = RequestContainer() res = req._getLastOrder() self.assertEqual(res, 0) self.assertEqual(req.subRequests, {}) req.addSubRequest( {"Attributes": {"Operation": "replicateAndRegister", "TargetSE": "SE", "ExecutionOrder": 0}}, "transfer" ) res = req._getLastOrder() self.assertEqual(res, 0) req.addSubRequest( {"Attributes": {"Operation": "replicateAndRegister", "TargetSE": "SE", "ExecutionOrder": 1}}, "transfer" ) res = req._getLastOrder() self.assertEqual(res, 1) del (req) # with files req = RequestContainer() res = req._getLastOrder("foo") self.assertEqual(res, 0) req.addSubRequest( {"Attributes": {"Operation": "replicateAndRegister", "TargetSE": "SE", "ExecutionOrder": 1}}, "transfer" ) res = req._getLastOrder("foo") self.assertEqual(res, 0) req.setSubRequestFiles(0, "transfer", [{"LFN": "foo", "Status": "Waiting"}]) res = req._getLastOrder("foo") self.assertEqual(res, 1) req.addSubRequest( {"Attributes": {"Operation": "replicateAndRegister", "TargetSE": "SE", "ExecutionOrder": 2}}, "removal" ) res = req._getLastOrder("foo") self.assertEqual(res, 1) req.setSubRequestFiles(0, "removal", [{"LFN": "foo", "Status": "Waiting"}]) res = req._getLastOrder("foo") self.assertEqual(res, 2)
def getRequest(operation): """ fake requestDict :param str operation: sub-request operation attribute """ requestContainer = RequestContainer(init=False) requestContainer.setJobID(1) requestContainer.setOwnerDN("/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba") requestContainer.setOwnerGroup("lhcb_user") requestContainer.setDIRACSetup("LHCb-Production") requestContainer.setSourceComponent(None) requestContainer.setCreationTime("0000-00-00 00:00:00") requestContainer.setLastUpdate("2011-12-01 04:57:02") requestContainer.setStatus("Waiting") requestContainer.setAttribute("RequestID", 123456789) requestContainer.initiateSubRequest("transfer") subRequestDict = { "Status": "Waiting", "SubRequestID": 2222222, "Operation": operation, "Arguments": None, "ExecutionOrder": 0, "SourceSE": None, "TargetSE": "CERN-USER,PIC-USER", "Catalogue": "LcgFileCatalogCombined", "CreationTime": "2011-12-01 04:57:02", "SubmissionTime": "2011-12-01 04:57:02", "LastUpdate": "2011-12-01 20:14:22", } requestContainer.setSubRequestAttributes(0, "transfer", subRequestDict) files = [ { "FileID": 3333333, "LFN": "/lhcb/user/c/cibak/11889/11889410/test.zzz", "Size": 44444444, "PFN": "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz", "GUID": "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R", "Md5": None, "Addler": "92b85e26", "Attempt": 1, "Status": "Waiting", } ] requestContainer.setSubRequestFiles(0, "transfer", files) return requestContainer
def __deleteSandboxFromExternalBackend( self, SEName, SEPFN ): if self.getCSOption( "DelayedExternalDeletion", True ): gLogger.info( "Setting deletion request" ) try: request = RequestContainer() result = request.addSubRequest( { 'Attributes' : { 'Operation' : 'removePhysicalFile', 'TargetSE' : SEName, 'ExecutionOrder' : 1 } }, 'removal' ) index = result['Value'] fileDict = { 'PFN' : SEPFN, 'Status' : 'Waiting' } request.setSubRequestFiles( index, 'removal', [ fileDict ] ) return RequestClient().setRequest( "RemoteSBDeletion:%s|%s:%s" % ( SEName, SEPFN, time.time() ), request.toXML()[ 'Value' ] ) except Exception, e: gLogger.exception( "Exception while setting deletion request" ) return S_ERROR( "Cannot set deletion request: %s" % str( e ) )
def getKwargsRemoveFile(): """ helper fcn to build request """ requestContainer = RequestContainer( init = False ) requestContainer.setJobID( 11111111 ) #requestContainer.setOwnerDN( "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" ) requestContainer.setOwnerGroup( "lhcb_user" ) requestContainer.setDIRACSetup( "LHCb-Production" ) requestContainer.setSourceComponent( None ) requestContainer.setCreationTime( "0000-00-00 00:00:00" ) requestContainer.setLastUpdate( "2011-12-01 04:57:02" ) requestContainer.setStatus( "Waiting" ) requestContainer.initiateSubRequest( "removal" ) subRequestDict = { "Status" : "Waiting", "SubRequestID" : 2222222, "Operation" : "removeFile", "Arguments" : None, "ExecutionOrder" : 0, "SourceSE" : None, "TargetSE" : "RAL-USER", "Catalogue" : "LcgFileCatalogCombined", "CreationTime" : "2011-12-01 04:57:02", "SubmissionTime" : "2011-12-01 04:57:02", "LastUpdate" : "2011-12-01 20:14:22" } requestContainer.setSubRequestAttributes( 0, "removal", subRequestDict ) files = [ { "FileID" : 3333333, "LFN" : "/lhcb/user/c/cibak/11889/11889410/test.zzz", "Size" : 44444444, "PFN" : "srm://srm-lhcb.gridpp.rl.ac.uk/castor/ads.rl.ac.uk/prod/lhcb/user/c/cibak/11889/11889410/test.zzz", "GUID" : "5P13RD4L-4J5L-3D21-U5P1-3RD4L4J5P13R", "Md5" : None, "Addler" : "92b85e26", "Attempt" : 1, "Status" : "Waiting" } ] requestContainer.setSubRequestFiles( 0, "removal", files ) return { "requestName" : "00000001.xml", "requestString" : requestContainer.toXML()["Value"], "jobID" : 1, "executionOrder" : 0, "sourceServer" : "foobarserver", "configPath" : "/Systems/DataManagement/Development/Agents/RemovalAgent" }
def __deleteSandboxFromExternalBackend(self, SEName, SEPFN): if self.getCSOption("DelayedExternalDeletion", True): gLogger.info("Setting deletion request") try: request = RequestContainer() result = request.addSubRequest( { 'Attributes': { 'Operation': 'removePhysicalFile', 'TargetSE': SEName, 'ExecutionOrder': 1 } }, 'removal') index = result['Value'] fileDict = {'PFN': SEPFN, 'Status': 'Waiting'} request.setSubRequestFiles(index, 'removal', [fileDict]) return RequestClient().setRequest( "RemoteSBDeletion:%s|%s:%s" % (SEName, SEPFN, time.time()), request.toXML()['Value']) except Exception, e: gLogger.exception("Exception while setting deletion request") return S_ERROR("Cannot set deletion request: %s" % str(e))
def prepareTransformationTasks( self, transBody, taskDict, owner = '', ownerGroup = '' ): requestType = 'transfer' requestOperation = 'replicateAndRegister' try: requestType, requestOperation = transBody.split( ';' ) except: pass for taskID in sortList( taskDict.keys() ): paramDict = taskDict[taskID] transID = paramDict['TransformationID'] oRequest = RequestContainer( init = False ) subRequestIndex = oRequest.initiateSubRequest( requestType )['Value'] attributeDict = {'Operation':requestOperation, 'TargetSE':paramDict['TargetSE']} oRequest.setSubRequestAttributes( subRequestIndex, requestType, attributeDict ) files = [] for lfn in paramDict['InputData'].split( ';' ): files.append( {'LFN':lfn} ) oRequest.setSubRequestFiles( subRequestIndex, requestType, files ) requestName = str( transID ).zfill( 8 ) + '_' + str( taskID ).zfill( 8 ) oRequest.setRequestAttributes( {'RequestName':requestName} ) taskDict[taskID]['TaskObject'] = oRequest.toXML()['Value'] return S_OK( taskDict )
def readRequestsForJobs( self, jobIDs ): """ read and return Requests for jobs :param mixed jobIDs: list with jobIDs or long JobIDs """ if type(jobIDs) != list: return S_ERROR("RequestDB: wrong format for jobIDs argument, got %s, expecting a list" ) # make sure list is uniqe and has only longs jobIDs = list( set( [ int(jobID) for jobID in jobIDs if int(jobID) != 0 ] ) ) reqCols = [ "RequestID", "RequestName", "JobID", "Status", "OwnerDN", "OwnerGroup", "DIRACSetup", "SourceComponent", "CreationTime", "SubmissionTime", "LastUpdate" ] subCols = [ "SubRequestID", "Operation", "Arguments", "RequestType", "ExecutionOrder", "Error", "SourceSE", "TargetSE", "Catalogue", "CreationTime", "SubmissionTime", "LastUpdate" ] fileCols = [ "FileID", "LFN", "Size", "PFN", "GUID", "Md5", "Addler", "Attempt", "Status" , "Error" ] requestNames = self.getRequestForJobs( jobIDs ) if not requestNames["OK"]: return requestNames requestNames = requestNames["Value"] ## this will be returned retDict = { "Successful" : dict(), "Failed" : dict() } for jobID in jobIDs: ## missing requests if jobID not in requestNames: retDict["Failed"][jobID] = "Request not found" continue requestName = requestNames[jobID] ## get request queryStr = "SELECT %s FROM Requests WHERE RequestName = '%s';" % ( ",".join( reqCols ), requestName ) queryRes = self._query( queryStr ) if not queryRes["OK"]: retDict["Failed"][jobID] = queryRes["Message"] continue queryRes = queryRes["Value"] if queryRes["Value"] else None if not queryRes: retDict["Failed"][jobID] = "Unable to read request attributes." continue requestObj = RequestContainer( init=False ) reqAttrs = dict( zip( reqCols, queryRes[0] ) ) requestObj.setRequestAttributes( reqAttrs ) queryStr = "SELECT %s FROM `SubRequests` WHERE `RequestID`=%s;" % ( ",".join(subCols), reqAttrs["RequestID"] ) queryRes = self._query( queryStr ) if not queryRes["OK"]: retDict["Failed"][jobID] = queryRes["Message"] continue queryRes = queryRes["Value"] if queryRes["Value"] else None if not queryRes: retDict["Failed"][jobID] = "Unable to read subrequest attributes." continue ## get sub-requests for recTuple in queryRes: subReqAttrs = dict( zip( subCols, recTuple ) ) subType = subReqAttrs["RequestType"] subReqAttrs["ExecutionOrder"] = int( subReqAttrs["ExecutionOrder"] ) del subReqAttrs["RequestType"] index = requestObj.initiateSubRequest( subType ) index = index["Value"] requestObj.setSubRequestAttributes( index, subType, subReqAttrs ) ## get files subFiles = [] fileQuery = "SELECT %s FROM `Files` WHERE `SubRequestID` = %s ORDER BY `FileID`;" % ( ",".join(fileCols), subReqAttrs["SubRequestID"] ) fileQueryRes = self._query( fileQuery ) if fileQueryRes["OK"] and fileQueryRes["Value"]: for fileRec in fileQueryRes["Value"]: subFiles.append( dict( zip(fileCols, fileRec) ) ) if subFiles: requestObj.setSubRequestFiles( index, subType, subFiles ) retDict["Successful"][jobID] = requestObj.toXML()["Value"] return S_OK( retDict )
def test_addSubRequest( self ): rc_o = RequestContainer() op1_Index = rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Operation': 'op1'}}, 'someType' ) op1_Index = op1_Index['Value'] subRequestExpected = {'someType': [{'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': []}]} self.assertEqual( rc_o.subRequests, subRequestExpected ) op2_index = rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Operation': 'op2'}}, 'someType' ) op2_index = op2_index['Value'] subRequestExpected = { 'someType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] } ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'ExecutionOrder': 'last'}}, 'someType' ) subRequestExpected = { 'someType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'ExecutionOrder': 'last'}}, 'someOtherType' ) subRequestExpected = { 'someType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ], 'someOtherType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 2, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) fileDict = {'LFN':'foo', 'Status':'Waiting'} rc_o.setSubRequestFiles( op1_Index, 'someType', [fileDict] ) subRequestExpected = { 'someType': [ { 'Files': [{'LFN':'foo', 'Status':'Waiting'}], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ], 'someOtherType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 2, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, ] } self.assertEqual( rc_o.subRequests, subRequestExpected ) fileLastOp = rc_o._getLastOrder( 'foo' ) rc_o.addSubRequest( {'Attributes': {'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'ExecutionOrder': fileLastOp + 1}}, 'someOtherType' ) subRequestExpected = { 'someType': [ { 'Files': [{'LFN':'foo', 'Status':'Waiting'}], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op1'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 0, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': 'op2'}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] } ], 'someOtherType': [ { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 2, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, { 'Files': [], 'Attributes': {'Status': 'Waiting', 'LastUpdate': '', 'TargetSE': '', 'ExecutionOrder': 1, 'SubRequestID': 'x', 'CreationTime': '2012-06-06 14:53:43.763743', 'Catalogue': '', 'Error': '', 'Operation': ''}, 'Datasets': [] }, ] } self.assertEqual( rc_o.subRequests, subRequestExpected )
def getRequest( self, requestType ): """ Get a request of a given type eligible for execution """ # RG: What if requestType is not given? # the first query will return nothing. # KC: maybe returning S_ERROR would be enough? # alternatively we should check if requestType is known (in 'transfer', 'removal', 'register' and 'diset') if not requestType or type( requestType ) not in types.StringTypes: return S_ERROR( "Request type not given." ) myRequestType = self._escapeString( requestType ) if not myRequestType: return myRequestType myRequestType = myRequestType['Value'] start = time.time() dmRequest = RequestContainer( init = False ) requestID = 0 subIDList = [] fields = ['RequestID', 'SubRequestID', 'Operation', 'Arguments', 'ExecutionOrder', 'SourceSE', 'TargetSE', 'Catalogue', 'CreationTime', 'SubmissionTime', 'LastUpdate', 'Status', 'RequestType'] # get the pending SubRequest sorted by ExecutionOrder and LastUpdate req = "SELECT `RequestID`,`ExecutionOrder`,`Status`,`RequestType`,`LastUpdate` FROM `SubRequests` "\ "WHERE `Status` IN ( 'Waiting', 'Assigned' ) ORDER BY `ExecutionOrder`,`LastUpdate`" # now get sorted list of RequestID (according to the above) req = "SELECT * FROM ( %s ) as T1 GROUP BY `RequestID`" % req # and get the 100 oldest ones of Type requestType req = "SELECT `RequestID`,`ExecutionOrder` FROM ( %s ) as T2 WHERE `RequestType`=%s "\ "ORDER BY `LastUpdate` LIMIT 100" % ( req, myRequestType ) # and now get all waiting SubRequest for the selected RequestID and ExecutionOrder req = "SELECT A.%s FROM SubRequests AS A, ( %s ) AS B WHERE " % ( ', A.'.join( fields ), req ) req = "%s A.RequestID=B.RequestID AND A.ExecutionOrder=B.ExecutionOrder" % ( req ) result = self._query( req ) if not result['OK']: err = 'RequestDB._getRequest: Failed to retrieve Requests' return S_ERROR( '%s\n%s' % ( err, result['Message'] ) ) if not result['Value']: return S_OK() # We get up to 10 Request candidates, to add some randomness reqDict = {} for row in result['Value']: if ('"%s"' % row[-1]) != myRequestType: continue if row[-2] != 'Waiting': continue reqDict.setdefault( row[0], [] ) reqDict[row[0]].append( row[1:-2] ) reqIDList = reqDict.keys() random.shuffle( reqIDList ) for reqID in reqIDList: sidList = [ x[0] for x in reqDict[reqID] ] for subID in sidList: req = "UPDATE SubRequests SET Status='Assigned' WHERE RequestID=%s AND SubRequestID=%s;" % ( reqID, subID ) resAssigned = self._update( req ) if not resAssigned['OK']: if subIDList: self.__releaseSubRequests( reqID, subIDList ) return S_ERROR( 'Failed to assign subrequests: %s' % resAssigned['Message'] ) if resAssigned['Value'] == 0: # Somebody has assigned this request gLogger.warn( 'Already assigned subrequest %d of request %d' % ( subID, reqID ) ) else: subIDList.append( subID ) if subIDList: # We managed to get some requests, can continue now requestID = reqID break # Haven't succeeded to get any request if not requestID: return S_OK() dmRequest.setRequestID( requestID ) fields = ['FileID', 'LFN', 'Size', 'PFN', 'GUID', 'Md5', 'Addler', 'Attempt', 'Status' ] for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, \ creationTime, submissionTime, lastUpdate in reqDict[requestID]: if not subRequestID in subIDList: continue res = dmRequest.initiateSubRequest( requestType ) ind = res['Value'] subRequestDict = { 'Status' : 'Waiting', 'SubRequestID' : subRequestID, 'Operation' : operation, 'Arguments' : arguments, 'ExecutionOrder': int( executionOrder ), 'SourceSE' : sourceSE, 'TargetSE' : targetSE, 'Catalogue' : catalogue, 'CreationTime' : creationTime, 'SubmissionTime': submissionTime, 'LastUpdate' : lastUpdate } res = dmRequest.setSubRequestAttributes( ind, requestType, subRequestDict ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT %s FROM `Files` WHERE `SubRequestID`=%s ORDER BY `FileID`;" % ( ', '.join( fields ), subRequestID ) res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res['Value']: fileDict = {'FileID':fileID, 'LFN':lfn, 'Size':size, 'PFN':pfn, 'GUID':guid, 'Md5':md5, 'Addler':addler, 'Attempt':attempt, 'Status':status} files.append( fileDict ) res = dmRequest.setSubRequestFiles( ind, requestType, files ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) datasets = [] for dataset, status in res['Value']: datasets.append( dataset ) res = dmRequest.setSubRequestDatasets( ind, requestType, datasets ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) fields = ['RequestName', 'JobID', 'OwnerDN', 'OwnerGroup', 'DIRACSetup', 'SourceComponent', 'CreationTime', 'SubmissionTime', 'LastUpdate'] req = "SELECT %s FROM `Requests` WHERE `RequestID`=%s;" % ( ', '.join( fields ), requestID ) res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve max RequestID' self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, \ creationTime, submissionTime, lastUpdate = res['Value'][0] dmRequest.setRequestName( requestName ) dmRequest.setJobID( jobID ) dmRequest.setOwnerDN( ownerDN ) dmRequest.setOwnerGroup( ownerGroup ) dmRequest.setDIRACSetup( diracSetup ) dmRequest.setSourceComponent( sourceComponent ) dmRequest.setCreationTime( str( creationTime ) ) dmRequest.setLastUpdate( str( lastUpdate ) ) res = dmRequest.toXML() if not res['OK']: err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID ) self.__releaseSubRequests( requestID, subIDList ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) requestString = res['Value'] #still have to manage the status of the dataset properly resultDict = {} resultDict['RequestName'] = requestName resultDict['RequestString'] = requestString resultDict['JobID'] = jobID return S_OK( resultDict )
def test__getLastOrder(self): # no files req = RequestContainer() res = req._getLastOrder() self.assertEqual(res, 0) self.assertEqual(req.subRequests, {}) req.addSubRequest( { 'Attributes': { 'Operation': 'replicateAndRegister', 'TargetSE': 'SE', 'ExecutionOrder': 0 } }, 'transfer') res = req._getLastOrder() self.assertEqual(res, 0) req.addSubRequest( { 'Attributes': { 'Operation': 'replicateAndRegister', 'TargetSE': 'SE', 'ExecutionOrder': 1 } }, 'transfer') res = req._getLastOrder() self.assertEqual(res, 1) del (req) # with files req = RequestContainer() res = req._getLastOrder('foo') self.assertEqual(res, 0) req.addSubRequest( { 'Attributes': { 'Operation': 'replicateAndRegister', 'TargetSE': 'SE', 'ExecutionOrder': 1 } }, 'transfer') res = req._getLastOrder('foo') self.assertEqual(res, 0) req.setSubRequestFiles(0, 'transfer', [{ 'LFN': 'foo', 'Status': 'Waiting' }]) res = req._getLastOrder('foo') self.assertEqual(res, 1) req.addSubRequest( { 'Attributes': { 'Operation': 'replicateAndRegister', 'TargetSE': 'SE', 'ExecutionOrder': 2 } }, 'removal') res = req._getLastOrder('foo') self.assertEqual(res, 1) req.setSubRequestFiles(0, 'removal', [{ 'LFN': 'foo', 'Status': 'Waiting' }]) res = req._getLastOrder('foo') self.assertEqual(res, 2)
def getRequestForSubRequest(self, itself, subRequestID ): """ Select Request given SubRequestID. :param self: plugin reference :param itself: patient reference for injection :param int subRequestID: SubRequests.SubRequestID :warn: function has to be injected to RequestDBMySQL instance """ ## get RequestID requestID = "SELECT RequestID FROM SubRequests WHERE SubRequestID = %s;" % str(subRequestID) requestID = self._query( requestID ) if not requestID["OK"]: return requestID requestID = requestID["Value"][0] ## create RequestContainer requestContainer = RequestContainer( init = False ) requestContainer.setRequestID( requestID ) ## put some basic infos in requestInfo = "SELECT RequestName, JobID, OwnerDN, OwnerGroup, DIRACSetup, SourceComponent, CreationTime, SubmissionTime, LastUpdate, Status " requestInfo += "FROM Requests WHERE RequestID = %d;" % requestID requestInfo = self._query( requestInfo ) if not requestInfo["OK"]: return requestInfo requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate, status = requestInfo['Value'][0] requestContainer.setRequestName( requestName ) requestContainer.setJobID( jobID ) requestContainer.setOwnerDN( ownerDN ) requestContainer.setOwnerGroup( ownerGroup ) requestContainer.setDIRACSetup( diracSetup ) requestContainer.setSourceComponent( sourceComponent ) requestContainer.setCreationTime( str( creationTime ) ) requestContainer.setLastUpdate( str( lastUpdate ) ) requestContainer.setStatus( status ) ## get sub-requests subRequests = "SELECT SubRequestID, Status, RequestType, Operation, Arguments, ExecutionOrder, SourceSE, " subRequests += "TargetSE, Catalogue, CreationTime, SubmissionTime, LastUpdate FROM SubRequests WHERE RequestID=%s;" % requestID subRequests = self._query( subRequests ) if not subRequests["OK"]: return subRequests ## loop over sub requests for subRequestID, status, requestType, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in subRequests["Value"]: res = requestContainer.initiateSubRequest( requestType ) ind = res["Value"] subRequestDict = { "Status" : status, "SubRequestID" : subRequestID, "Operation" : operation, "Arguments" : arguments, "ExecutionOrder" : int( executionOrder ), "SourceSE" : sourceSE, "TargetSE" : targetSE, "Catalogue" : catalogue, "CreationTime" : creationTime, "SubmissionTime" : submissionTime, "LastUpdate" : lastUpdate } res = requestContainer.setSubRequestAttributes( ind, requestType, subRequestDict ) if not res["OK"]: return res ## get files for this subrequest req = "SELECT FileID, LFN, Size, PFN, GUID, Md5, Addler, Attempt, Status FROM Files WHERE SubRequestID = %s ORDER BY FileID;" % str(subRequestID) res = self._query( req ) if not res["OK"]: return res files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res["Value"]: fileDict = { "FileID" : fileID, "LFN" : lfn, "Size" : size, "PFN" : pfn, "GUID" : guid, "Md5" : md5, "Addler" : addler, "Attempt" : attempt, "Status" : status } files.append( fileDict ) res = requestContainer.setSubRequestFiles( ind, requestType, files ) if not res["OK"]: return res ## dump request to XML res = requestContainer.toXML() if not res["OK"]: return res requestString = res["Value"] ## return dictonary with all info in at least return S_OK( { "RequestName" : requestName, "RequestString" : requestString, "JobID" : jobID, "RequestContainer" : requestContainer } )
def getRequest( self, requestType = '' ): """ Get a request of a given type """ # RG: What if requestType is not given? # the first query will return nothing. start = time.time() dmRequest = RequestContainer( init = False ) requestID = 0 req = "SELECT RequestID,SubRequestID FROM SubRequests WHERE Status = 'Waiting' AND RequestType = '%s' ORDER BY LastUpdate ASC LIMIT 50;" % requestType res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve max RequestID' return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) if not res['Value']: return S_OK() reqIDList = [ x[0] for x in res['Value'] ] random.shuffle( reqIDList ) count = 0 for reqID in reqIDList: count += 1 if requestType: req = "SELECT SubRequestID,Operation,Arguments,ExecutionOrder,SourceSE,TargetSE,Catalogue,CreationTime,SubmissionTime,LastUpdate \ from SubRequests WHERE RequestID=%s AND RequestType='%s' AND Status='%s'" % ( reqID, requestType, 'Waiting' ) else: # RG: What if requestType is not given? # we should never get there, and it misses the "AND Status='Waiting'" req = "SELECT SubRequestID,Operation,Arguments,ExecutionOrder,SourceSE,TargetSE,Catalogue,CreationTime,SubmissionTime,LastUpdate \ from SubRequests WHERE RequestID=%s" % reqID res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve SubRequests for RequestID %s' % reqID return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) subIDList = [] for tuple in res['Value']: subID = tuple[0] # RG: We should set the condition "AND Status='Waiting'" # if the subrequest has got assigned it will failed req = "UPDATE SubRequests SET Status='Assigned' WHERE RequestID=%s AND SubRequestID=%s;" % ( reqID, subID ) resAssigned = self._update( req ) if not resAssigned['OK']: if subIDList: self.__releaseSubRequests( reqID, subIDList ) return S_ERROR( 'Failed to assign subrequests: %s' % resAssigned['Message'] ) if resAssigned['Value'] == 0: # Somebody has assigned this request gLogger.warn( 'Already assigned subrequest %d of request %d' % ( subID, reqID ) ) else: subIDList.append( subID ) # RG: We need to check that all subRequest with smaller ExecutionOrder are "Done" if subIDList: # We managed to get some requests, can continue now requestID = reqID break # Haven't succeeded to get any request if not requestID: return S_OK() dmRequest.setRequestID( requestID ) # RG: We have this list in subIDList, can different queries get part of the subrequets of the same type? subRequestIDs = [] for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in res['Value']: if not subRequestID in subIDList: continue subRequestIDs.append( subRequestID ) # RG: res['Value'] is the range of the loop and it gets redefined here !!!!!! res = dmRequest.initiateSubRequest( requestType ) ind = res['Value'] subRequestDict = { 'Status' : 'Waiting', 'SubRequestID' : subRequestID, 'Operation' : operation, 'Arguments' : arguments, 'ExecutionOrder': int( executionOrder ), 'SourceSE' : sourceSE, 'TargetSE' : targetSE, 'Catalogue' : catalogue, 'CreationTime' : creationTime, 'SubmissionTime': submissionTime, 'LastUpdate' : lastUpdate } res = dmRequest.setSubRequestAttributes( ind, requestType, subRequestDict ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT FileID,LFN,Size,PFN,GUID,Md5,Addler,Attempt,Status \ from Files WHERE SubRequestID = %s ORDER BY FileID;" % subRequestID res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res['Value']: fileDict = {'FileID':fileID, 'LFN':lfn, 'Size':size, 'PFN':pfn, 'GUID':guid, 'Md5':md5, 'Addler':addler, 'Attempt':attempt, 'Status':status} files.append( fileDict ) res = dmRequest.setSubRequestFiles( ind, requestType, files ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) datasets = [] for dataset, status in res['Value']: datasets.append( dataset ) res = dmRequest.setSubRequestDatasets( ind, requestType, datasets ) if not res['OK']: err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID, subRequestID ) self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) req = "SELECT RequestName,JobID,OwnerDN,OwnerGroup,DIRACSetup,SourceComponent,CreationTime,SubmissionTime,LastUpdate from Requests WHERE RequestID = %s;" % requestID res = self._query( req ) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve max RequestID' self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate = res['Value'][0] dmRequest.setRequestName( requestName ) dmRequest.setJobID( jobID ) dmRequest.setOwnerDN( ownerDN ) dmRequest.setOwnerGroup( ownerGroup ) dmRequest.setDIRACSetup( diracSetup ) dmRequest.setSourceComponent( sourceComponent ) dmRequest.setCreationTime( str( creationTime ) ) dmRequest.setLastUpdate( str( lastUpdate ) ) res = dmRequest.toXML() if not res['OK']: err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID ) self.__releaseSubRequests( requestID, subRequestIDs ) return S_ERROR( '%s\n%s' % ( err, res['Message'] ) ) requestString = res['Value'] #still have to manage the status of the dataset properly resultDict = {} resultDict['RequestName'] = requestName resultDict['RequestString'] = requestString resultDict['JobID'] = jobID return S_OK( resultDict )
class UploadOutputData(ModuleBase): """ As name suggest: upload output data. For Production only: See L{UserJobFinalization} for User job upload. """ ############################################################################# def __init__(self): """Module initialization. """ super(UploadOutputData, self).__init__() self.version = __RCSID__ self.log = gLogger.getSubLogger( "UploadOutputData" ) self.commandTimeOut = 10*60 self.enable = True self.failoverTest = False #flag to put file to failover SE by default self.failoverSEs = gConfig.getValue('/Resources/StorageElementGroups/Tier1-Failover', []) self.ops = Operations() #List all parameters here self.outputDataFileMask = '' self.outputMode = 'Any' #or 'Local' for reco case self.outputList = [] self.request = None self.PRODUCTION_ID = "" self.prodOutputLFNs = [] self.experiment = "CLIC" ############################################################################# def applicationSpecificInputs(self): """ By convention the module parameters are resolved here. """ self.log.verbose("Workflow commons:") self.log.verbose(self.workflow_commons) self.log.verbose("Step commons:") self.log.verbose(self.step_commons) if self.step_commons.has_key('Enable'): self.enable = self.step_commons['Enable'] if not type(self.enable) == type(True): self.log.warn('Enable flag set to non-boolean value %s, setting to False' % self.enable) self.enable = False if self.step_commons.has_key('TestFailover'): self.enable = self.step_commons['TestFailover'] if not type(self.failoverTest) == type(True): self.log.warn('Test failover flag set to non-boolean value %s, setting to False' % self.failoverTest) self.failoverTest = False if self.workflow_commons.has_key("PRODUCTION_ID"): self.PRODUCTION_ID = self.workflow_commons["PRODUCTION_ID"] if os.environ.has_key('JOBID'): self.log.verbose('Found WMS JobID = %s' % self.jobID) else: self.log.info('No WMS JobID found, disabling module via control flag') self.enable = False if self.workflow_commons.has_key('Request'): self.request = self.workflow_commons['Request'] else: self.request = RequestContainer() self.request.setRequestName('job_%s_request.xml' % self.jobID) self.request.setJobID(self.jobID) self.request.setSourceComponent("Job_%s" % self.jobID) ##This is the thing that is used to establish the list of outpufiles to treat: ## Make sure that all that is in the : "listoutput" and also in the ProductionData ## is treated properly. Needed as whatever is in listoutput does not contain any reference to the ## prodID and task ID. Also if for some reason a step failed, then the corresponding data will not be there if self.workflow_commons.has_key('outputList'): self.outputList = self.workflow_commons['outputList'] if self.workflow_commons.has_key('ProductionOutputData'): proddata = self.workflow_commons['ProductionOutputData'].split(";") self.log.verbose("prod data : %s" % proddata ) olist = {} for obj in self.outputList: fname_in_outputlist = obj['outputFile'].lower() extension = '' if fname_in_outputlist.count("_sim") or fname_in_outputlist.count("_rec") or fname_in_outputlist.count("_dst"): extension = ".slcio" elif fname_in_outputlist.count("_gen"): extension = ".stdhep" fname_in_outputlist = fname_in_outputlist.replace(extension,"") for prodfile in proddata: prodfile = os.path.basename(prodfile) extension = '' if prodfile.count("_sim") or prodfile.count("_rec") or prodfile.count("_dst"): extension = ".slcio" elif prodfile.count("_gen"): extension = ".stdhep" prodfile = prodfile.replace(extension,"") if olist.has_key(prodfile): ## This has already been treated, no need to come back to it. continue appdict = {} if (fname_in_outputlist.count("_gen")):# and prodfile.lower().count("_gen_")) : genf = obj['outputFile'].split("_gen")[0] genf += "_gen" if (prodfile.count(genf)): appdict.update(obj) appdict['outputFile'] = prodfile+extension olist[prodfile] = appdict if (fname_in_outputlist.count("_sim")): simf = obj['outputFile'].split("_sim")[0] simf += "_sim" if (prodfile.count(simf)): appdict.update(obj) appdict['outputFile'] = prodfile+extension olist[prodfile] = appdict self.log.verbose('olist %s'%olist) if (fname_in_outputlist.count("_rec")): recf = obj['outputFile'].split("_rec")[0] recf += "_rec" if (prodfile.count(recf)): appdict.update(obj) appdict['outputFile'] = prodfile+extension olist[prodfile] = appdict break if (fname_in_outputlist.count("_dst") and prodfile.lower().count("_dst_")): dstf = obj['outputFile'].split("_dst")[0] dstf += "_dst" if (prodfile.count(dstf)): appdict.update(obj) appdict['outputFile'] = prodfile+extension olist[prodfile] = appdict break self.outputList = olist.values() else: olist = [] for obj in self.outputList: appdict = obj appdict['outputFile'] = getProdFilename(obj['outputFile'], int(self.workflow_commons["PRODUCTION_ID"]), int(self.workflow_commons["JOB_ID"])) olist.append(appdict) self.outputList = olist self.log.verbose("OutputList : %s" % self.outputList) if self.workflow_commons.has_key('outputMode'): self.outputMode = self.workflow_commons['outputMode'] if self.workflow_commons.has_key('outputDataFileMask'): self.outputDataFileMask = self.workflow_commons['outputDataFileMask'] if not type(self.outputDataFileMask) == type([]): self.outputDataFileMask = [i.lower().strip() for i in self.outputDataFileMask.split(';')] #result = constructProductionLFNs(self.workflow_commons) #if not result['OK']: # self.log.error('Could not create production LFNs',result['Message']) # return result #self.prodOutputLFNs=result['Value']['ProductionOutputData'] if self.workflow_commons.has_key('ProductionOutputData'): self.prodOutputLFNs = self.workflow_commons['ProductionOutputData'].split(";") else: self.prodOutputLFNs = [] return S_OK('Parameters resolved') ############################################################################# def execute(self): """ Main execution function. """ self.log.info('Initializing %s' % self.version) result = self.resolveInputVariables() if not result['OK']: self.log.error(result['Message']) return result if not self.workflowStatus['OK'] or not self.stepStatus['OK']: self.log.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK('No output data upload attempted') ##determine the experiment example_file = self.prodOutputLFNs[0] if "/ilc/prod/clic" in example_file: self.experiment = "CLIC" elif "/ilc/prod/ilc/sid" in example_file: self.experiment = 'ILC_SID' elif "/ilc/prod/ilc/mc-dbd" in example_file: self.experiment = 'ILC_ILD' else: self.log.warn("Failed to determine experiment, reverting to default") #Determine the final list of possible output files for the #workflow and all the parameters needed to upload them. result = self.getCandidateFiles(self.outputList, self.prodOutputLFNs, self.outputDataFileMask) if not result['OK']: self.setApplicationStatus(result['Message']) return result fileDict = result['Value'] result = self.getFileMetadata(fileDict) if not result['OK']: self.setApplicationStatus(result['Message']) return result if not result['Value']: self.log.info('No output data files were determined to be uploaded for this workflow') return S_OK() fileMetadata = result['Value'] #Get final, resolved SE list for files final = {} for fileName, metadata in fileMetadata.items(): result = getDestinationSEList(metadata['workflowSE'], DIRAC.siteName(), self.outputMode) if not result['OK']: self.log.error('Could not resolve output data SE', result['Message']) self.setApplicationStatus('Failed To Resolve OutputSE') return result resolvedSE = result['Value'] final[fileName] = metadata final[fileName]['resolvedSE'] = resolvedSE self.log.info('The following files will be uploaded: %s' % (string.join(final.keys(), ', '))) for fileName, metadata in final.items(): self.log.info('--------%s--------' % fileName) for n, v in metadata.items(): self.log.info('%s = %s' % (n, v)) #At this point can exit and see exactly what the module would have uploaded if not self.enable: self.log.info('Module is disabled by control flag, would have attempted to upload the \ following files %s' % string.join(final.keys(), ', ')) return S_OK('Module is disabled by control flag') #Disable the watchdog check in case the file uploading takes a long time self.log.info('Creating DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK in order to disable the Watchdog prior to upload') fopen = open('DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK','w') fopen.write('%s' % time.asctime()) fopen.close() #Instantiate the failover transfer client with the global request object failoverTransfer = FailoverTransfer(self.request) catalogs = ['FileCatalog', 'LcgFileCatalog'] #One by one upload the files with failover if necessary failover = {} if not self.failoverTest: for fileName, metadata in final.items(): self.log.info("Attempting to store file %s to the following SE(s):\n%s" % (fileName, string.join(metadata['resolvedSE'], ', '))) result = failoverTransfer.transferAndRegisterFile(fileName, metadata['localpath'], metadata['lfn'], metadata['resolvedSE'], fileGUID = metadata['guid'], fileCatalog = catalogs) if not result['OK']: self.log.error('Could not transfer and register %s with metadata:\n %s' % (fileName, metadata)) failover[fileName] = metadata else: lfn = metadata['lfn'] else: failover = final self.failoverSEs = self.ops.getValue("Production/%s/FailOverSE" % self.experiment, self.failoverSEs) cleanUp = False for fileName, metadata in failover.items(): self.log.info('Setting default catalog for failover transfer to FileCatalog') random.shuffle(self.failoverSEs) targetSE = metadata['resolvedSE'][0] metadata['resolvedSE'] = self.failoverSEs result = failoverTransfer.transferAndRegisterFileFailover(fileName, metadata['localpath'], metadata['lfn'], targetSE, metadata['resolvedSE'], fileGUID = metadata['guid'], fileCatalog = catalogs) if not result['OK']: self.log.error('Could not transfer and register %s with metadata:\n %s' % (fileName, metadata)) cleanUp = True break #no point continuing if one completely fails os.remove("DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK") #cleanup the mess #Now after all operations, retrieve potentially modified request object result = failoverTransfer.getRequestObject() if not result['OK']: self.log.error(result) return S_ERROR('Could not retrieve modified request') self.request = result['Value'] #If some or all of the files failed to be saved to failover if cleanUp: lfns = [] for fileName, metadata in final.items(): lfns.append(metadata['lfn']) result = self.__cleanUp(lfns) self.workflow_commons['Request'] = self.request return S_ERROR('Failed to upload output data') # #Can now register the successfully uploaded files in the BK # if not performBKRegistration: # self.log.info('There are no files to perform the BK registration for, all could be saved to failover') # else: # rm = ReplicaManager() # result = rm.addCatalogFile(performBKRegistration,catalogs=['BookkeepingDB']) # self.log.verbose(result) # if not result['OK']: # self.log.error(result) # return S_ERROR('Could Not Perform BK Registration') # if result['Value']['Failed']: # for lfn,error in result['Value']['Failed'].items(): # self.log.info('BK registration for %s failed with message: "%s" setting failover request' %(lfn,error)) # result = self.request.addSubRequest({'Attributes':{'Operation':'registerFile','ExecutionOrder':0, 'Catalogue':'BookkeepingDB'}},'register') # if not result['OK']: # self.log.error('Could not set registerFile request:\n%s' %result) # return S_ERROR('Could Not Set BK Registration Request') # fileDict = {'LFN':lfn,'Status':'Waiting'} # index = result['Value'] # self.request.setSubRequestFiles(index,'register',[fileDict]) self.workflow_commons['Request'] = self.request return S_OK('Output data uploaded') ############################################################################# def __cleanUp(self, lfnList): """ Clean up uploaded data for the LFNs in the list """ # Clean up the current request for req_type in ['transfer', 'register']: for lfn in lfnList: result = self.request.getNumSubRequests(req_type) if result['OK']: nreq = result['Value'] if nreq: # Go through subrequests in reverse order in order not to spoil the numbering ind_range = [0] if nreq > 1: ind_range = range(nreq-1, -1, -1) for i in ind_range: result = self.request.getSubRequestFiles(i, req_type) if result['OK']: fileList = result['Value'] if fileList[0]['LFN'] == lfn: result = self.request.removeSubRequest(i, req_type) # Set removal requests just in case for lfn in lfnList: result = self.request.addSubRequest({'Attributes': {'Operation' : 'removeFile', 'TargetSE' : '', 'ExecutionOrder' : 1}}, 'removal') index = result['Value'] fileDict = {'LFN':lfn, 'PFN':'', 'Status':'Waiting'} self.request.setSubRequestFiles(index, 'removal', [fileDict]) return S_OK() #EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient reqClient = ReqClient() requestType = 'transfer' requestOperation = 'replicateAndRegister' for lfnList in breakListIntoChunks( lfns, 100 ): oRequest = RequestContainer() subRequestIndex = oRequest.initiateSubRequest( requestType )['Value'] attributeDict = {'Operation':requestOperation, 'TargetSE':targetSE} oRequest.setSubRequestAttributes( subRequestIndex, requestType, attributeDict ) files = [] for lfn in lfnList: files.append( {'LFN':lfn} ) oRequest.setSubRequestFiles( subRequestIndex, requestType, files ) requestName = "%s_%s" % ( md5( repr( time.time() ) ).hexdigest()[:16], md5( repr( time.time() ) ).hexdigest()[:16] ) oRequest.setRequestAttributes( {'RequestName':requestName} ) DIRAC.gLogger.info( oRequest.toXML()['Value'] ) result = reqClient.setRequest( requestName, oRequest.toXML()['Value'] ) if result['OK']: print 'Submitted Request:', result['Value'] else: print 'Failed to submit Request', result['Message'] if monitor: requestID = result['Value'] while True: result = reqClient.getRequestStatus( requestID ) if not result['OK']:
class FailoverTransfer: ############################################################################# def __init__(self, requestObject=False): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger("FailoverTransfer") self.rm = ReplicaManager() self.request = requestObject if not self.request: self.request = RequestContainer() self.request.setRequestName('default_request.xml') self.request.setSourceComponent('FailoverTransfer') ############################################################################# def transferAndRegisterFile(self, fileName, localPath, lfn, destinationSEList, fileGUID=None, fileCatalog=None): """Performs the transfer and register operation with failover. """ errorList = [] for se in destinationSEList: self.log.info( 'Attempting rm.putAndRegister("%s","%s","%s",guid="%s",catalog="%s")' % (lfn, localPath, se, fileGUID, fileCatalog)) result = self.rm.putAndRegister(lfn, localPath, se, guid=fileGUID, catalog=fileCatalog) self.log.verbose(result) if not result['OK']: self.log.error('rm.putAndRegister failed with message', result['Message']) errorList.append(result['Message']) continue if not result['Value']['Failed']: self.log.info( 'rm.putAndRegister successfully uploaded %s to %s' % (fileName, se)) return S_OK({'uploadedSE': se, 'lfn': lfn}) #Now we know something went wrong errorDict = result['Value']['Failed'][lfn] if not errorDict.has_key('register'): self.log.error('rm.putAndRegister failed with unknown error', str(errorDict)) errorList.append( 'Unknown error while attempting upload to %s' % se) continue fileDict = errorDict['register'] #Therefore the registration failed but the upload was successful if not fileCatalog: fileCatalog = '' result = self.__setRegistrationRequest(fileDict['LFN'], se, fileCatalog, fileDict) if not result['OK']: self.log.error( 'Failed to set registration request for: SE %s and metadata: \n%s' % (se, fileDict)) errorList.append( 'Failed to set registration request for: SE %s and metadata: \n%s' % (se, fileDict)) continue else: self.log.info( 'Successfully set registration request for: SE %s and metadata: \n%s' % (se, fileDict)) metadata = {} metadata['filedict'] = fileDict metadata['uploadedSE'] = se metadata['lfn'] = lfn metadata['registration'] = 'request' return S_OK(metadata) self.log.error( 'Encountered %s errors during attempts to upload output data' % len(errorList)) return S_ERROR('Failed to upload output data file') ############################################################################# def transferAndRegisterFileFailover(self, fileName, localPath, lfn, targetSE, failoverSEList, fileGUID=None, fileCatalog=None): """Performs the transfer and register operation to failover storage and sets the necessary replication and removal requests to recover. """ failover = self.transferAndRegisterFile(fileName, localPath, lfn, failoverSEList, fileGUID, fileCatalog) if not failover['OK']: self.log.error('Could not upload file to failover SEs', failover['Message']) return failover #set removal requests and replication requests result = self.__setFileReplicationRequest(lfn, targetSE) if not result['OK']: self.log.error('Could not set file replication request', result['Message']) return result lfn = failover['Value']['lfn'] failoverSE = failover['Value']['uploadedSE'] self.log.info( 'Attempting to set replica removal request for LFN %s at failover SE %s' % (lfn, failoverSE)) result = self.__setReplicaRemovalRequest(lfn, failoverSE) if not result['OK']: self.log.error('Could not set removal request', result['Message']) return result return S_OK('%s uploaded to a failover SE' % fileName) ############################################################################# def getRequestObject(self): """Returns the potentially modified request object in order to propagate changes. """ return S_OK(self.request) ############################################################################# def __setFileReplicationRequest(self, lfn, se): """ Sets a registration request. """ self.log.info('Setting replication request for %s to %s' % (lfn, se)) result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'replicateAndRegister', 'TargetSE': se, 'ExecutionOrder': 0 } }, 'transfer') if not result['OK']: return result index = result['Value'] fileDict = {'LFN': lfn, 'Status': 'Waiting'} self.request.setSubRequestFiles(index, 'transfer', [fileDict]) return S_OK() ############################################################################# def __setRegistrationRequest(self, lfn, se, catalog, fileDict): """ Sets a registration request. """ self.log.info('Setting registration request for %s at %s.' % (lfn, se)) result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'registerFile', 'ExecutionOrder': 0, 'TargetSE': se, 'Catalogue': catalog } }, 'register') if not result['OK']: return result index = result['Value'] if not fileDict.has_key('Status'): fileDict['Status'] = 'Waiting' self.request.setSubRequestFiles(index, 'register', [fileDict]) return S_OK() ############################################################################# def __setReplicaRemovalRequest(self, lfn, se): """ Sets a removal request for a replica. """ result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'replicaRemoval', 'TargetSE': se, 'ExecutionOrder': 1 } }, 'removal') index = result['Value'] fileDict = {'LFN': lfn, 'Status': 'Waiting'} self.request.setSubRequestFiles(index, 'removal', [fileDict]) return S_OK() ############################################################################# def __setFileRemovalRequest(self, lfn, se='', pfn=''): """ Sets a removal request for a file including all replicas. """ result = self.request.addSubRequest( { 'Attributes': { 'Operation': 'removeFile', 'TargetSE': se, 'ExecutionOrder': 1 } }, 'removal') index = result['Value'] fileDict = {'LFN': lfn, 'PFN': pfn, 'Status': 'Waiting'} self.request.setSubRequestFiles(index, 'removal', [fileDict]) return S_OK()
class FailoverTransfer: ############################################################################# def __init__(self,requestObject=False): """ Constructor function, can specify request object to instantiate FailoverTransfer or a new request object is created. """ self.log = gLogger.getSubLogger( "FailoverTransfer" ) self.rm = ReplicaManager() self.request = requestObject if not self.request: self.request = RequestContainer() self.request.setRequestName('default_request.xml') self.request.setSourceComponent('FailoverTransfer') ############################################################################# def transferAndRegisterFile(self,fileName,localPath,lfn,destinationSEList,fileGUID=None,fileCatalog=None): """Performs the transfer and register operation with failover. """ errorList = [] for se in destinationSEList: self.log.info('Attempting rm.putAndRegister("%s","%s","%s",guid="%s",catalog="%s")' %(lfn,localPath,se,fileGUID,fileCatalog)) result = self.rm.putAndRegister(lfn,localPath,se,guid=fileGUID,catalog=fileCatalog) self.log.verbose(result) if not result['OK']: self.log.error('rm.putAndRegister failed with message',result['Message']) errorList.append(result['Message']) continue if not result['Value']['Failed']: self.log.info('rm.putAndRegister successfully uploaded %s to %s' %(fileName,se)) return S_OK({'uploadedSE':se,'lfn':lfn}) #Now we know something went wrong errorDict = result['Value']['Failed'][lfn] if not errorDict.has_key('register'): self.log.error('rm.putAndRegister failed with unknown error',str(errorDict)) errorList.append('Unknown error while attempting upload to %s' %se) continue fileDict = errorDict['register'] #Therefore the registration failed but the upload was successful if not fileCatalog: fileCatalog='' result = self.__setRegistrationRequest(fileDict['LFN'],se,fileCatalog,fileDict) if not result['OK']: self.log.error('Failed to set registration request for: SE %s and metadata: \n%s' %(se,fileDict)) errorList.append('Failed to set registration request for: SE %s and metadata: \n%s' %(se,fileDict)) continue else: self.log.info('Successfully set registration request for: SE %s and metadata: \n%s' %(se,fileDict)) metadata = {} metadata['filedict']=fileDict metadata['uploadedSE']=se metadata['lfn']=lfn metadata['registration']='request' return S_OK(metadata) self.log.error('Encountered %s errors during attempts to upload output data' %len(errorList)) return S_ERROR('Failed to upload output data file') ############################################################################# def transferAndRegisterFileFailover(self,fileName,localPath,lfn,targetSE,failoverSEList,fileGUID=None,fileCatalog=None): """Performs the transfer and register operation to failover storage and sets the necessary replication and removal requests to recover. """ failover = self.transferAndRegisterFile(fileName,localPath,lfn,failoverSEList,fileGUID,fileCatalog) if not failover['OK']: self.log.error('Could not upload file to failover SEs',failover['Message']) return failover #set removal requests and replication requests result = self.__setFileReplicationRequest(lfn,targetSE) if not result['OK']: self.log.error('Could not set file replication request',result['Message']) return result lfn = failover['Value']['lfn'] failoverSE = failover['Value']['uploadedSE'] self.log.info('Attempting to set replica removal request for LFN %s at failover SE %s' %(lfn,failoverSE)) result = self.__setReplicaRemovalRequest(lfn,failoverSE) if not result['OK']: self.log.error('Could not set removal request',result['Message']) return result return S_OK('%s uploaded to a failover SE' %fileName) ############################################################################# def getRequestObject(self): """Returns the potentially modified request object in order to propagate changes. """ return S_OK(self.request) ############################################################################# def __setFileReplicationRequest(self,lfn,se): """ Sets a registration request. """ self.log.info('Setting replication request for %s to %s' % (lfn,se)) result = self.request.addSubRequest({'Attributes':{'Operation':'replicateAndRegister', 'TargetSE':se,'ExecutionOrder':0}}, 'transfer') if not result['OK']: return result index = result['Value'] fileDict = {'LFN':lfn,'Status':'Waiting'} self.request.setSubRequestFiles(index,'transfer',[fileDict]) return S_OK() ############################################################################# def __setRegistrationRequest(self,lfn,se,catalog,fileDict): """ Sets a registration request. """ self.log.info('Setting registration request for %s at %s.' % (lfn,se)) result = self.request.addSubRequest({'Attributes':{'Operation':'registerFile','ExecutionOrder':0, 'TargetSE':se,'Catalogue':catalog}},'register') if not result['OK']: return result index = result['Value'] if not fileDict.has_key('Status'): fileDict['Status']='Waiting' self.request.setSubRequestFiles(index,'register',[fileDict]) return S_OK() ############################################################################# def __setReplicaRemovalRequest(self,lfn,se): """ Sets a removal request for a replica. """ result = self.request.addSubRequest({'Attributes':{'Operation':'replicaRemoval', 'TargetSE':se,'ExecutionOrder':1}}, 'removal') index = result['Value'] fileDict = {'LFN':lfn,'Status':'Waiting'} self.request.setSubRequestFiles(index,'removal',[fileDict]) return S_OK() ############################################################################# def __setFileRemovalRequest(self,lfn,se='',pfn=''): """ Sets a removal request for a file including all replicas. """ result = self.request.addSubRequest({'Attributes':{'Operation':'removeFile', 'TargetSE':se,'ExecutionOrder':1}}, 'removal') index = result['Value'] fileDict = {'LFN':lfn,'PFN':pfn,'Status':'Waiting'} self.request.setSubRequestFiles(index,'removal',[fileDict]) return S_OK()
def getRequest(self, requestType): """ Get a request of a given type eligible for execution """ # RG: What if requestType is not given? # the first query will return nothing. # KC: maybe returning S_ERROR would be enough? # alternatively we should check if requestType is known (in 'transfer', 'removal', 'register' and 'diset') if not requestType or type(requestType) not in types.StringTypes: return S_ERROR("Request type not given.") myRequestType = self._escapeString(requestType) if not myRequestType: return myRequestType myRequestType = myRequestType['Value'] start = time.time() dmRequest = RequestContainer(init=False) requestID = 0 subIDList = [] fields = [ 'RequestID', 'SubRequestID', 'Operation', 'Arguments', 'ExecutionOrder', 'SourceSE', 'TargetSE', 'Catalogue', 'CreationTime', 'SubmissionTime', 'LastUpdate' ] # get the pending SubRequest sorted by ExecutionOrder and LastUpdate req = "SELECT RequestID, ExecutionOrder, Status, RequestType, LastUpdate from SubRequests WHERE Status IN ( 'Waiting', 'Assigned' ) ORDER BY ExecutionOrder, LastUpdate" # now get sorted list of RequestID (according to the above) req = "SELECT * from ( %s ) as T1 GROUP BY RequestID" % req # and get the 100 oldest ones of Type requestType req = "SELECT RequestID, ExecutionOrder FROM ( %s ) as T2 WHERE RequestType = %s ORDER BY LastUpdate limit 100" % ( req, myRequestType) # and now get all waiting SubRequest for the selected RequestID and ExecutionOrder req = "SELECT A.%s FROM SubRequests AS A, ( %s ) AS B WHERE " % ( ', A.'.join(fields), req) req = "%s A.RequestID = B.RequestID AND A.ExecutionOrder = B.ExecutionOrder AND A.Status = 'Waiting' AND A.RequestType = %s;" % ( req, myRequestType) result = self._query(req) if not result['OK']: err = 'RequestDB._getRequest: Failed to retrieve Requests' return S_ERROR('%s\n%s' % (err, result['Message'])) if not result['Value']: return S_OK() # We get up to 10 Request candidates, to add some randomness reqDict = {} for row in result['Value']: reqDict.setdefault(row[0], []) reqDict[row[0]].append(row[1:]) reqIDList = reqDict.keys() random.shuffle(reqIDList) for reqID in reqIDList: sidList = [x[0] for x in reqDict[reqID]] for subID in sidList: req = "UPDATE SubRequests SET Status='Assigned' WHERE RequestID=%s AND SubRequestID=%s;" % ( reqID, subID) resAssigned = self._update(req) if not resAssigned['OK']: if subIDList: self.__releaseSubRequests(reqID, subIDList) return S_ERROR('Failed to assign subrequests: %s' % resAssigned['Message']) if resAssigned['Value'] == 0: # Somebody has assigned this request gLogger.warn( 'Already assigned subrequest %d of request %d' % (subID, reqID)) else: subIDList.append(subID) if subIDList: # We managed to get some requests, can continue now requestID = reqID break # Haven't succeeded to get any request if not requestID: return S_OK() dmRequest.setRequestID(requestID) fields = [ 'FileID', 'LFN', 'Size', 'PFN', 'GUID', 'Md5', 'Addler', 'Attempt', 'Status' ] for subRequestID, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in reqDict[ requestID]: if not subRequestID in subIDList: continue res = dmRequest.initiateSubRequest(requestType) ind = res['Value'] subRequestDict = { 'Status': 'Waiting', 'SubRequestID': subRequestID, 'Operation': operation, 'Arguments': arguments, 'ExecutionOrder': int(executionOrder), 'SourceSE': sourceSE, 'TargetSE': targetSE, 'Catalogue': catalogue, 'CreationTime': creationTime, 'SubmissionTime': submissionTime, 'LastUpdate': lastUpdate } res = dmRequest.setSubRequestAttributes(ind, requestType, subRequestDict) if not res['OK']: err = 'RequestDB._getRequest: Failed to set subRequest attributes for RequestID %s' % requestID self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) req = "SELECT %s FROM Files WHERE SubRequestID = %s ORDER BY FileID;" % ( ', '.join(fields), subRequestID) res = self._query(req) if not res['OK']: err = 'RequestDB._getRequest: Failed to get File attributes for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res[ 'Value']: fileDict = { 'FileID': fileID, 'LFN': lfn, 'Size': size, 'PFN': pfn, 'GUID': guid, 'Md5': md5, 'Addler': addler, 'Attempt': attempt, 'Status': status } files.append(fileDict) res = dmRequest.setSubRequestFiles(ind, requestType, files) if not res['OK']: err = 'RequestDB._getRequest: Failed to set files into Request for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) req = "SELECT Dataset,Status FROM Datasets WHERE SubRequestID = %s;" % subRequestID res = self._query(req) if not res['OK']: err = 'RequestDB._getRequest: Failed to get Datasets for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) datasets = [] for dataset, status in res['Value']: datasets.append(dataset) res = dmRequest.setSubRequestDatasets(ind, requestType, datasets) if not res['OK']: err = 'RequestDB._getRequest: Failed to set datasets into Request for RequestID %s.%s' % ( requestID, subRequestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) fields = [ 'RequestName', 'JobID', 'OwnerDN', 'OwnerGroup', 'DIRACSetup', 'SourceComponent', 'CreationTime', 'SubmissionTime', 'LastUpdate' ] req = "SELECT %s from Requests WHERE RequestID = %s;" % ( ', '.join(fields), requestID) res = self._query(req) if not res['OK']: err = 'RequestDB._getRequest: Failed to retrieve max RequestID' self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate = res[ 'Value'][0] dmRequest.setRequestName(requestName) dmRequest.setJobID(jobID) dmRequest.setOwnerDN(ownerDN) dmRequest.setOwnerGroup(ownerGroup) dmRequest.setDIRACSetup(diracSetup) dmRequest.setSourceComponent(sourceComponent) dmRequest.setCreationTime(str(creationTime)) dmRequest.setLastUpdate(str(lastUpdate)) res = dmRequest.toXML() if not res['OK']: err = 'RequestDB._getRequest: Failed to create XML for RequestID %s' % ( requestID) self.__releaseSubRequests(requestID, subIDList) return S_ERROR('%s\n%s' % (err, res['Message'])) requestString = res['Value'] #still have to manage the status of the dataset properly resultDict = {} resultDict['RequestName'] = requestName resultDict['RequestString'] = requestString resultDict['JobID'] = jobID return S_OK(resultDict)
def getRequestForSubRequest(self, itself, subRequestID): """ Select Request given SubRequestID. :param self: plugin reference :param itself: patient reference for injection :param int subRequestID: SubRequests.SubRequestID :warn: function has to be injected to RequestDBMySQL instance """ ## get RequestID requestID = "SELECT RequestID FROM SubRequests WHERE SubRequestID = %s;" % str( subRequestID) requestID = self._query(requestID) if not requestID["OK"]: return requestID requestID = requestID["Value"][0] ## create RequestContainer requestContainer = RequestContainer(init=False) requestContainer.setRequestID(requestID) ## put some basic infos in requestInfo = "SELECT RequestName, JobID, OwnerDN, OwnerGroup, DIRACSetup, SourceComponent, CreationTime, SubmissionTime, LastUpdate, Status " requestInfo += "FROM Requests WHERE RequestID = %d;" % requestID requestInfo = self._query(requestInfo) if not requestInfo["OK"]: return requestInfo requestName, jobID, ownerDN, ownerGroup, diracSetup, sourceComponent, creationTime, submissionTime, lastUpdate, status = requestInfo[ 'Value'][0] requestContainer.setRequestName(requestName) requestContainer.setJobID(jobID) requestContainer.setOwnerDN(ownerDN) requestContainer.setOwnerGroup(ownerGroup) requestContainer.setDIRACSetup(diracSetup) requestContainer.setSourceComponent(sourceComponent) requestContainer.setCreationTime(str(creationTime)) requestContainer.setLastUpdate(str(lastUpdate)) requestContainer.setStatus(status) ## get sub-requests subRequests = "SELECT SubRequestID, Status, RequestType, Operation, Arguments, ExecutionOrder, SourceSE, " subRequests += "TargetSE, Catalogue, CreationTime, SubmissionTime, LastUpdate FROM SubRequests WHERE RequestID=%s;" % requestID subRequests = self._query(subRequests) if not subRequests["OK"]: return subRequests ## loop over sub requests for subRequestID, status, requestType, operation, arguments, executionOrder, sourceSE, targetSE, catalogue, creationTime, submissionTime, lastUpdate in subRequests[ "Value"]: res = requestContainer.initiateSubRequest(requestType) ind = res["Value"] subRequestDict = { "Status": status, "SubRequestID": subRequestID, "Operation": operation, "Arguments": arguments, "ExecutionOrder": int(executionOrder), "SourceSE": sourceSE, "TargetSE": targetSE, "Catalogue": catalogue, "CreationTime": creationTime, "SubmissionTime": submissionTime, "LastUpdate": lastUpdate } res = requestContainer.setSubRequestAttributes( ind, requestType, subRequestDict) if not res["OK"]: return res ## get files for this subrequest req = "SELECT FileID, LFN, Size, PFN, GUID, Md5, Addler, Attempt, Status FROM Files WHERE SubRequestID = %s ORDER BY FileID;" % str( subRequestID) res = self._query(req) if not res["OK"]: return res files = [] for fileID, lfn, size, pfn, guid, md5, addler, attempt, status in res[ "Value"]: fileDict = { "FileID": fileID, "LFN": lfn, "Size": size, "PFN": pfn, "GUID": guid, "Md5": md5, "Addler": addler, "Attempt": attempt, "Status": status } files.append(fileDict) res = requestContainer.setSubRequestFiles(ind, requestType, files) if not res["OK"]: return res ## dump request to XML res = requestContainer.toXML() if not res["OK"]: return res requestString = res["Value"] ## return dictonary with all info in at least return S_OK({ "RequestName": requestName, "RequestString": requestString, "JobID": jobID, "RequestContainer": requestContainer })
requestType = 'removal' requestOperation = 'replicaRemoval' if targetSE == 'All': requestOperation = 'removeFile' for lfnList in breakListIntoChunks(lfns, 100): oRequest = RequestContainer() subRequestIndex = oRequest.initiateSubRequest(requestType)['Value'] attributeDict = {'Operation': requestOperation, 'TargetSE': targetSE} oRequest.setSubRequestAttributes(subRequestIndex, requestType, attributeDict) files = [] for lfn in lfnList: files.append({'LFN': lfn}) oRequest.setSubRequestFiles(subRequestIndex, requestType, files) requestName = "%s_%s" % (md5(repr(time.time())).hexdigest()[:16], md5(repr(time.time())).hexdigest()[:16]) oRequest.setRequestAttributes({'RequestName': requestName}) DIRAC.gLogger.info(oRequest.toXML()['Value']) result = requestClient.setRequest(requestName, oRequest.toXML()['Value']) if result['OK']: print 'Submitted Request:', result['Value'] else: print 'Failed to submit Request', result['Message'] if monitor: requestID = result['Value'] while True: result = requestClient.getRequestStatus(requestID)
def test_addSubRequest(self): rc_o = RequestContainer() op1_Index = rc_o.addSubRequest( {"Attributes": {"SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Operation": "op1"}}, "someType", ) op1_Index = op1_Index["Value"] subRequestExpected = { "someType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op1", }, "Datasets": [], } ] } self.assertEqual(rc_o.subRequests, subRequestExpected) op2_index = rc_o.addSubRequest( {"Attributes": {"SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Operation": "op2"}}, "someType", ) op2_index = op2_index["Value"] subRequestExpected = { "someType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op1", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op2", }, "Datasets": [], }, ] } self.assertEqual(rc_o.subRequests, subRequestExpected) rc_o.addSubRequest( { "Attributes": { "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "ExecutionOrder": "last", } }, "someType", ) subRequestExpected = { "someType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op1", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op2", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 1, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], }, ] } self.assertEqual(rc_o.subRequests, subRequestExpected) rc_o.addSubRequest( { "Attributes": { "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "ExecutionOrder": "last", } }, "someOtherType", ) subRequestExpected = { "someType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op1", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op2", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 1, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], }, ], "someOtherType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 2, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], } ], } self.assertEqual(rc_o.subRequests, subRequestExpected) fileDict = {"LFN": "foo", "Status": "Waiting"} rc_o.setSubRequestFiles(op1_Index, "someType", [fileDict]) subRequestExpected = { "someType": [ { "Files": [{"LFN": "foo", "Status": "Waiting"}], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op1", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op2", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 1, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], }, ], "someOtherType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 2, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], } ], } self.assertEqual(rc_o.subRequests, subRequestExpected) fileLastOp = rc_o._getLastOrder("foo") rc_o.addSubRequest( { "Attributes": { "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "ExecutionOrder": fileLastOp + 1, } }, "someOtherType", ) subRequestExpected = { "someType": [ { "Files": [{"LFN": "foo", "Status": "Waiting"}], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op1", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 0, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "op2", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 1, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], }, ], "someOtherType": [ { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 2, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], }, { "Files": [], "Attributes": { "Status": "Waiting", "LastUpdate": "", "TargetSE": "", "ExecutionOrder": 1, "SubRequestID": "x", "CreationTime": "2012-06-06 14:53:43.763743", "Catalogue": "", "Error": "", "Operation": "", }, "Datasets": [], }, ], } self.assertEqual(rc_o.subRequests, subRequestExpected)