def test01CtorSerilization(self): """ c'tor and serialization """ # # empty c'tor req = Request() self.assertEqual(isinstance(req, Request), True) self.assertEqual(req.JobID, 0) self.assertEqual(req.Status, "Waiting") req = Request(self.fromDict) self.assertEqual(isinstance(req, Request), True) self.assertEqual(req.RequestName, "test") self.assertEqual(req.JobID, 12345) self.assertEqual(req.Status, "Waiting") toJSON = req.toJSON() self.assertEqual(toJSON["OK"], True, "JSON serialization failed") fromJSON = toJSON["Value"] req = Request(fromJSON) toSQL = req.toSQL() self.assertEqual(toSQL["OK"], True) toSQL = toSQL["Value"] self.assertEqual(toSQL.startswith("INSERT"), True) req.RequestID = 1 toSQL = req.toSQL() self.assertEqual(toSQL["OK"], True) toSQL = toSQL["Value"] self.assertEqual(toSQL.startswith("UPDATE"), True)
def test_setGet(self): res = self.transformation.setTransformationName('TestTName') self.assertTrue(res['OK']) description = 'Test transformation description' res = self.transformation.setDescription(description) longDescription = 'Test transformation long description' res = self.transformation.setLongDescription(longDescription) self.assertTrue(res['OK']) res = self.transformation.setType('MCSimulation') self.assertTrue(res['OK']) res = self.transformation.setPlugin('aPlugin') self.assertTrue(res['OK']) # # Test DataOperation Body res = self.transformation.setBody("") self.assertTrue(res['OK']) self.assertEqual(self.transformation.paramValues["Body"], "") res = self.transformation.setBody("_requestType;RemoveReplica") self.assertTrue(res['OK']) self.assertEqual(self.transformation.paramValues["Body"], "_requestType;RemoveReplica") # #Json will turn tuples to lists and strings to unicode transBody = [[u"ReplicateAndRegister", {u"SourceSE": u"FOO-SRM", u"TargetSE": u"BAR-SRM"}], [u"RemoveReplica", {u"TargetSE": u"FOO-SRM"}], ] res = self.transformation.setBody(transBody) self.assertTrue(res['OK']) self.assertEqual(self.transformation.paramValues["Body"], json.dumps(transBody)) # # This is not true if any of the keys or values are not strings, e.g., integers self.assertEqual(json.loads(self.transformation.paramValues["Body"]), transBody) with self.assertRaisesRegexp(TypeError, "Expected list"): self.transformation.setBody({"ReplicateAndRegister": {"foo": "bar"}}) with self.assertRaisesRegexp(TypeError, "Expected tuple"): self.transformation.setBody(["ReplicateAndRegister", "RemoveReplica"]) with self.assertRaisesRegexp(TypeError, "Expected 2-tuple"): self.transformation.setBody([("ReplicateAndRegister", "RemoveReplica", "LogUpload")]) with self.assertRaisesRegexp(TypeError, "Expected string"): self.transformation.setBody([(123, "Parameter:Value")]) with self.assertRaisesRegexp(TypeError, "Expected dictionary"): self.transformation.setBody([("ReplicateAndRegister", "parameter=foo")]) with self.assertRaisesRegexp(TypeError, "Expected string"): self.transformation.setBody([("ReplicateAndRegister", {123: "foo"})]) with self.assertRaisesRegexp(ValueError, "Unknown attribute"): self.transformation.setBody([("ReplicateAndRegister", {"Request": Request()})]) with self.assertRaisesRegexp(TypeError, "Cannot encode"): self.transformation.setBody([("ReplicateAndRegister", {"Arguments": Request()})])
def export_putRequest(self, requestJSON): """put a new request into RequestDB :param cls: class ref :param str requestJSON: request serialized to JSON format """ requestDict = json.loads(requestJSON) requestName = requestDict.get("RequestID", requestDict.get("RequestName", "***UNKNOWN***")) request = Request(requestDict) # Check whether the credentials in the Requests are correct and allowed to be set isAuthorized = RequestValidator.setAndCheckRequestOwner(request, self.getRemoteCredentials()) if not isAuthorized: return S_ERROR(DErrno.ENOAUTH, "Credentials in the requests are not allowed") optimized = request.optimize() if optimized.get("Value", False): gLogger.debug("putRequest: request was optimized") else: gLogger.debug("putRequest: request unchanged", optimized.get("Message", "Nothing could be optimized")) valid = self.validate(request) if not valid["OK"]: gLogger.error("putRequest: request %s not valid: %s" % (requestName, valid["Message"])) return valid # If NotBefore is not set or user defined, we calculate its value now = datetime.datetime.utcnow().replace(microsecond=0) extraDelay = datetime.timedelta(0) if request.Status not in Request.FINAL_STATES and (not request.NotBefore or request.NotBefore < now): # We don't delay if it is the first insertion if getattr(request, "RequestID", 0): # If it is a constant delay, just set it if self.constantRequestDelay: extraDelay = datetime.timedelta(minutes=self.constantRequestDelay) else: # If there is a waiting Operation with Files op = request.getWaiting().get("Value") if op and len(op): attemptList = [opFile.Attempt for opFile in op if opFile.Status == "Waiting"] if attemptList: maxWaitingAttempt = max([opFile.Attempt for opFile in op if opFile.Status == "Waiting"]) # In case it is the first attempt, extraDelay is 0 # maxWaitingAttempt can be None if the operation has no File, like the ForwardDiset extraDelay = datetime.timedelta( minutes=2 * math.log(maxWaitingAttempt) if maxWaitingAttempt else 0 ) request.NotBefore = now + extraDelay gLogger.info( "putRequest: request %s not before %s (extra delay %s)" % (request.RequestName, request.NotBefore, extraDelay) ) requestName = request.RequestName gLogger.info("putRequest: Attempting to set request '%s'" % requestName) return self.__requestDB.putRequest(request)
def export_putRequest(cls, requestJSON): """ put a new request into RequestDB :param cls: class ref :param str requestJSON: request serialized to JSON format """ requestName = requestJSON.get("RequestName", "***UNKNOWN***") request = Request(requestJSON) requestID = request.RequestID optimized = request.optimize() if optimized.get("Value", False): if request.RequestID == 0 and requestID != 0: # A new request has been created, delete the old one delete = cls.__requestDB.deleteRequest(request.RequestName) if not delete['OK']: return delete gLogger.debug( "putRequest: request was optimized and removed for a new insertion" ) else: gLogger.debug("putRequest: request was optimized") else: gLogger.debug( "putRequest: request unchanged", optimized.get("Message", "Nothing could be optimized")) valid = cls.validate(request) if not valid["OK"]: gLogger.error("putRequest: request %s not valid: %s" % (requestName, valid["Message"])) return valid requestName = request.RequestName gLogger.info("putRequest: Attempting to set request '%s'" % requestName) return cls.__requestDB.putRequest(request)
def getBulkRequests(self, numberOfRequest=10, assigned=True): """ get bulk requests from RequestDB :param self: self reference :param str numberOfRequest: size of the bulk (default 10) :return: S_OK( Successful : { requestID, RequestInstance }, Failed : message ) or S_ERROR """ self.log.debug("getRequests: attempting to get request.") getRequests = self._getRPC().getBulkRequests(numberOfRequest, assigned) if not getRequests["OK"]: self.log.error("getRequests: unable to get '%s' requests: %s" % (numberOfRequest, getRequests["Message"])) return getRequests # No Request returned if not getRequests["Value"]: return getRequests # No successful Request if not getRequests["Value"]["Successful"]: return getRequests jsonReq = getRequests["Value"]["Successful"] reqInstances = dict((rId, Request(jsonReq[rId])) for rId in jsonReq) return S_OK({ "Successful": reqInstances, "Failed": getRequests["Value"]["Failed"] })
def setUp(self): """ test case set up """ gLogger.setLevel('INFO') self.file = File() self.file.LFN = "/lhcb/user/c/cibak/testFile" self.file.Checksum = "123456" self.file.ChecksumType = "ADLER32" self.file2 = File() self.file2.LFN = "/lhcb/user/f/fstagni/testFile" self.file2.Checksum = "654321" self.file2.ChecksumType = "ADLER32" self.operation = Operation() self.operation.Type = "ReplicateAndRegister" self.operation.TargetSE = "CERN-USER" self.operation.addFile(self.file) self.operation.addFile(self.file2) proxyInfo = getProxyInfo()['Value'] self.request = Request() self.request.RequestName = "RequestManagerHandlerTests" self.request.OwnerDN = proxyInfo['identity'] self.request.OwnerGroup = proxyInfo['group'] self.request.JobID = 123 self.request.addOperation(self.operation) # # JSON representation of a whole request self.jsonStr = self.request.toJSON()['Value'] # # request client self.requestClient = ReqClient()
def jobexec(jobxml, wfParameters): jobfile = os.path.abspath(jobxml) if not os.path.exists(jobfile): gLogger.warn('Path to specified workflow %s does not exist' % (jobfile)) sys.exit(1) workflow = fromXMLFile(jobfile) gLogger.debug(workflow) code = workflow.createCode() gLogger.debug(code) jobID = 0 if os.environ.has_key('JOBID'): jobID = os.environ['JOBID'] gLogger.info('DIRAC JobID %s is running at site %s' % (jobID, DIRAC.siteName())) workflow.addTool('JobReport', JobReport(jobID)) workflow.addTool('AccountingReport', DataStoreClient()) workflow.addTool('Request', Request()) # Propagate the command line parameters to the workflow if any for pName, pValue in wfParameters.items(): workflow.setValue(pName, pValue) result = workflow.execute() return result
def test_execute(self, _patch, _patch1): # no errors, no input data # for wf_commons in copy.deepcopy( wf_commons ): # for step_commons in step_commons: # self.assertTrue( ulf.execute( prod_id, prod_job_id, wms_job_id, # workflowStatus, stepStatus, # wf_commons, step_commons, # step_number, step_id, # dm_mock, self.ft_mock, # bkc_mock )['OK'] ) # putStorageDirectory returns False rm_mock = copy.deepcopy(dm_mock) rm_mock.putStorageDirectory.return_value = {'OK': False, 'Message': 'bih'} ft_mock = copy.deepcopy(self.ft_mock) ulf = UploadLogFile(bkClient=bkc_mock, dm=dm_mock) ulf.request = Request() ulf.failoverTransfer = ft_mock for wf_cs in copy.deepcopy(wf_commons): for s_cs in step_commons: self.assertTrue(ulf.execute(prod_id, prod_job_id, 0, workflowStatus, stepStatus, wf_cs, s_cs, step_number, step_id)['OK'])
def test_stress(reqDB): """stress test""" reqIDs = [] for i in range(STRESS_REQUESTS): request = Request({"RequestName": "test-%d" % i}) op = Operation({"Type": "RemoveReplica", "TargetSE": "CERN-USER"}) op += File({"LFN": "/lhcb/user/c/cibak/foo"}) request += op put = reqDB.putRequest(request) assert put["OK"], put reqIDs.append(put["Value"]) startTime = time.time() for reqID in reqIDs: get = reqDB.getRequest(reqID) assert get["OK"], get endTime = time.time() print("getRequest duration %s " % (endTime - startTime)) for reqID in reqIDs: delete = reqDB.deleteRequest(reqID) assert delete["OK"], delete
def __submitRMSOp(self, target_se, lfns_chunk_dict, whichRMSOp='ReplicateAndRegister' ): """ target_se : SE name to which to replicate lfns_chunk_dict : LFNS dict with 100 lfns as key andeEach lfn has 'Size', 'Checksum' whichRMSOp: Choose from RMP operation - ReplicateAndRegister, ReplicateAndRemove, PutAndRegister """ ## Setup request request = Request() request.RequestName = "DDM_"+ str(target_se) + datetime.datetime.now().strftime("_%Y%m%d_%H%M%S") myOp = Operation() myOp.Type = whichRMSOp myOp.TargetSE = target_se ## Add LFNS to operations for lfn in lfns_chunk_dict.keys(): opFile = File() opFile.LFN = lfn opFile.Size = lfns_chunk_dict[lfn]['Size'] if "Checksum" in lfns_chunk_dict[lfn]: opFile.Checksum = lfns_chunk_dict[lfn]['Checksum'] opFile.ChecksumType = 'ADLER32' ## Add file to operation myOp.addFile( opFile ) request.addOperation( myOp ) reqClient = ReqClient() putRequest = reqClient.putRequest( request ) if not putRequest["OK"]: gLogger.error( "Unable to put request '%s': %s" % ( request.RequestName, putRequest["Message"] ) ) return S_ERROR("Problem submitting to RMS.")
def test01Stress(self): """ stress test """ db = RequestDB() reqIDs = [] for i in xrange(self.stressRequests): request = Request({"RequestName": "test-%d" % i}) op = Operation({"Type": "RemoveReplica", "TargetSE": "CERN-USER"}) op += File({"LFN": "/lhcb/user/c/cibak/foo"}) request += op put = db.putRequest(request) self.assertEqual(put["OK"], True, put['Message'] if 'Message' in put else 'OK') reqIDs.append(put['Value']) startTime = time.time() for reqID in reqIDs: get = db.getRequest(reqID) if "Message" in get: print(get["Message"]) self.assertEqual(get["OK"], True, get['Message'] if 'Message' in get else 'OK') endTime = time.time() print("getRequest duration %s " % (endTime - startTime)) for reqID in reqIDs: delete = db.deleteRequest(reqID) self.assertEqual( delete["OK"], True, delete['Message'] if 'Message' in delete else 'OK')
def setUp(self): """ test set up """ self.req = Request() self.req.RequestName = "testRequest" self.op = Operation({"Type": "ForwardDISET", "Arguments": "foobar"}) self.req.addOperation(self.op) self.baseOp = OperationHandlerBase()
def jobexec(jobxml, wfParameters): jobfile = os.path.abspath(jobxml) if not os.path.exists(jobfile): gLogger.warn('Path to specified workflow %s does not exist' % (jobfile)) sys.exit(1) workflow = fromXMLFile(jobfile) gLogger.debug(workflow) code = workflow.createCode() gLogger.debug(code) jobID = 0 if 'JOBID' in os.environ: jobID = os.environ['JOBID'] gLogger.info('DIRAC JobID %s is running at site %s' % (jobID, DIRAC.siteName())) workflow.addTool('JobReport', JobReport(jobID)) workflow.addTool('AccountingReport', DataStoreClient()) workflow.addTool('Request', Request()) # Propagate the command line parameters to the workflow if any for pName, pValue in wfParameters.items(): workflow.setValue(pName, pValue) # Propagate the command line parameters to the workflow module instances of each step for stepdefinition in workflow.step_definitions.values(): for moduleInstance in stepdefinition.module_instances: for pName, pValue in wfParameters.items(): if moduleInstance.parameters.find(pName): moduleInstance.parameters.setValue(pName, pValue) return workflow.execute()
def setUp(self): """ test case set up """ gLogger.setLevel('NOTICE') self.file = File() self.file.LFN = "/lhcb/user/c/cibak/testFile" self.file.Checksum = "123456" self.file.ChecksumType = "ADLER32" self.file2 = File() self.file2.LFN = "/lhcb/user/f/fstagni/testFile" self.file2.Checksum = "654321" self.file2.ChecksumType = "ADLER32" self.operation = Operation() self.operation.Type = "ReplicateAndRegister" self.operation.TargetSE = "CERN-USER" self.operation.addFile(self.file) self.operation.addFile(self.file2) self.request = Request() self.request.RequestName = "RequestManagerHandlerTests" self.request.OwnerDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=cibak/CN=605919/CN=Krzysztof Ciba" self.request.OwnerGroup = "dirac_user" self.request.JobID = 123 self.request.addOperation(self.operation) # # JSON representation of a whole request self.jsonStr = self.request.toJSON()['Value'] # # request client self.requestClient = ReqClient() self.stressRequests = 1000 self.bulkRequest = 1000
def test05Scheduled(self): """ scheduled request r/w """ db = RequestDB() req = Request({"RequestName": "FTSTest"}) op = Operation({ "Type": "ReplicateAndRegister", "TargetSE": "CERN-USER" }) op += File({ "LFN": "/a/b/c", "Status": "Scheduled", "Checksum": "123456", "ChecksumType": "ADLER32" }) req += op put = db.putRequest(req) self.assertEqual(put["OK"], True, "putRequest failed") peek = db.peekRequest(req.RequestName) self.assertEqual(peek["OK"], True, "peek failed ") peek = peek["Value"] for op in peek: opId = op.OperationID getFTS = db.getScheduledRequest(opId) self.assertEqual(getFTS["OK"], True, "getScheduled failed") self.assertEqual(getFTS["Value"].RequestName, "FTSTest", "wrong request selected")
def archiveRequestAndOp(listOfLFNs): """Return a tuple of the request and operation.""" req = Request() req.RequestName = "MyRequest" op = Operation() switches = {} archiveLFN = "/vo/tars/myTar.tar" op.Arguments = DEncode.encode({ "SourceSE": switches.get("SourceSE", "SOURCE-SE"), "TarballSE": switches.get("TarballSE", "TARBALL-SE"), "RegisterDescendent": False, "ArchiveLFN": archiveLFN, }) op.Type = "ArchiveFiles" for index, lfn in enumerate(listOfLFNs): oFile = File() oFile.LFN = lfn oFile.Size = index oFile.Checksum = "01130a%0d" % index oFile.ChecksumType = "adler32" op.addFile(oFile) req.addOperation(op) return req, op
def test04Stress(self): """ stress test """ db = RequestDB() for i in range(self.stressRequests): request = Request({"RequestName": "test-%d" % i}) op = Operation({"Type": "RemoveReplica", "TargetSE": "CERN-USER"}) op += File({"LFN": "/lhcb/user/c/cibak/foo"}) request += op put = db.putRequest(request) self.assertEqual(put["OK"], True, "put failed") startTime = time.time() for i in range(self.stressRequests): get = db.getRequest("test-%s" % i, True) if "Message" in get: print get["Message"] self.assertEqual(get["OK"], True, "get failed") endTime = time.time() print "getRequest duration %s " % (endTime - startTime) for i in range(self.stressRequests): delete = db.deleteRequest("test-%s" % i) self.assertEqual(delete["OK"], True, "delete failed")
def export_putRequest(cls, requestJSON): """ put a new request into RequestDB :param cls: class ref :param str requestJSON: request serialized to JSON format """ requestName = requestJSON.get("RequestName", "***UNKNOWN***") request = Request(requestJSON) optimized = request.optimize() if optimized.get("Value", False): gLogger.debug("putRequest: request was optimized") else: gLogger.debug( "putRequest: request unchanged", optimized.get("Message", "Nothing could be optimize")) valid = cls.validate(request) if not valid["OK"]: gLogger.error("putRequest: request %s not valid: %s" % (requestName, valid["Message"])) return valid requestName = request.RequestName gLogger.info("putRequest: Attempting to set request '%s'" % requestName) return cls.__requestDB.putRequest(request)
def archiveRequestAndOp(listOfLFNs): """Return a tuple of the request and operation.""" req = Request() req.RequestName = 'MyRequest' op = Operation() switches = {} archiveLFN = '/vo/tars/myTar.tar' op.Arguments = DEncode.encode({ 'SourceSE': switches.get('SourceSE', 'SOURCE-SE'), 'TarballSE': switches.get('TarballSE', 'TARBALL-SE'), 'RegisterDescendent': False, 'ArchiveLFN': archiveLFN }) op.Type = 'ArchiveFiles' for index, lfn in enumerate(listOfLFNs): oFile = File() oFile.LFN = lfn oFile.Size = index oFile.Checksum = '01130a%0d' % index oFile.ChecksumType = 'adler32' op.addFile(oFile) req.addOperation(op) return req, op
def test_Props(): """props""" # # valid values req = Request() req.RequestID = 1 assert req.RequestID == 1 req.RequestName = "test" assert req.RequestName == "test" req.JobID = 1 assert req.JobID == 1 req.CreationTime = "1970-01-01 00:00:00" assert req.CreationTime == datetime.datetime(1970, 1, 1, 0, 0, 0) req.CreationTime = datetime.datetime(1970, 1, 1, 0, 0, 0) assert req.CreationTime == datetime.datetime(1970, 1, 1, 0, 0, 0) req.SubmitTime = "1970-01-01 00:00:00" assert req.SubmitTime == datetime.datetime(1970, 1, 1, 0, 0, 0) req.SubmitTime = datetime.datetime(1970, 1, 1, 0, 0, 0) assert req.SubmitTime == datetime.datetime(1970, 1, 1, 0, 0, 0) req.LastUpdate = "1970-01-01 00:00:00" assert req.LastUpdate == datetime.datetime(1970, 1, 1, 0, 0, 0) req.LastUpdate = datetime.datetime(1970, 1, 1, 0, 0, 0) assert req.LastUpdate == datetime.datetime(1970, 1, 1, 0, 0, 0) req.Error = ""
def test_scheduled(reqDB): """scheduled request r/w""" req = Request({"RequestName": "FTSTest"}) op = Operation({"Type": "ReplicateAndRegister", "TargetSE": "CERN-USER"}) op += File({ "LFN": "/a/b/c", "Status": "Scheduled", "Checksum": "123456", "ChecksumType": "ADLER32" }) req += op put = reqDB.putRequest(req) assert put["OK"], put reqID = put["Value"] peek = reqDB.peekRequest(reqID) assert peek["OK"], peek peek = peek["Value"] for op in peek: opId = op.OperationID getFTS = reqDB.getScheduledRequest(opId) assert getFTS["OK"], getFTS assert getFTS[ "Value"].RequestName == "FTSTest", "Wrong request name %s" % getFTS[ "Value"].RequestName delete = reqDB.deleteRequest(reqID) assert delete["OK"], delete
def createRequest(self, requestName, lfnChunk): """Create the Request.""" request = Request() request.RequestName = requestName replicate = Operation() replicate.Type = "ReplicateAndRegister" replicate.TargetSE = self.switches.get("TargetSE") self.addLFNs(replicate, lfnChunk, addPFN=True) request.addOperation(replicate) if self.switches.get("CheckMigration"): checkMigration = Operation() checkMigration.Type = "CheckMigration" checkMigration.TargetSE = self.switches.get("TargetSE") self.addLFNs(checkMigration, lfnChunk, addPFN=True) request.addOperation(checkMigration) removeReplicas = Operation() removeReplicas.Type = "RemoveReplica" removeReplicas.TargetSE = ",".join(self.switches.get("SourceSE", [])) self.addLFNs(removeReplicas, lfnChunk) request.addOperation(removeReplicas) return request
def createRequest(self, requestName, lfnChunk): """Create the Request.""" request = Request() request.RequestName = requestName replicate = Operation() replicate.Type = 'ReplicateAndRegister' replicate.TargetSE = self.switches.get('TargetSE') self.addLFNs(replicate, lfnChunk, addPFN=True) request.addOperation(replicate) if self.switches.get('CheckMigration'): checkMigration = Operation() checkMigration.Type = 'CheckMigration' checkMigration.TargetSE = self.switches.get('TargetSE') self.addLFNs(checkMigration, lfnChunk, addPFN=True) request.addOperation(checkMigration) removeReplicas = Operation() removeReplicas.Type = 'RemoveReplica' removeReplicas.TargetSE = ','.join(self.switches.get('SourceSE', [])) self.addLFNs(removeReplicas, lfnChunk) request.addOperation(removeReplicas) return request
def __deleteSandboxFromExternalBackend(self, SEName, SEPFN): if self.getCSOption("DelayedExternalDeletion", True): gLogger.info("Setting deletion request") try: request = Request() request.RequestName = "RemoteSBDeletion:%s|%s:%s" % ( SEName, SEPFN, time.time()) physicalRemoval = Operation() physicalRemoval.Type = "PhysicalRemoval" physicalRemoval.TargetSE = SEName fileToRemove = File() fileToRemove.PFN = SEPFN physicalRemoval.addFile(fileToRemove) request.addOperation(physicalRemoval) return ReqClient().putRequest(request) except Exception as e: gLogger.exception("Exception while setting deletion request") return S_ERROR("Cannot set deletion request: %s" % str(e)) else: gLogger.info("Deleting external Sandbox") try: return StorageElement(SEName).removeFile(SEPFN) except Exception as e: gLogger.exception( "RM raised an exception while trying to delete a remote sandbox" ) return S_ERROR( "RM raised an exception while trying to delete a remote sandbox" )
def setUp(self): """ test case setup """ self.request = Request({"RequestName": "test1", "JobID": 1}) self.operation1 = Operation({ "Type": "ReplicateAndRegister", "TargetSE": "CERN-USER" }) self.file = File({ "LFN": "/a/b/c", "ChecksumType": "ADLER32", "Checksum": "123456" }) self.request.addOperation(self.operation1) self.operation1.addFile(self.file) self.operation2 = Operation() self.operation2.Type = "RemoveFile" self.operation2.addFile(File({"LFN": "/c/d/e"})) self.request.addOperation(self.operation2) # ## set some defaults gConfig.setOptionValue('DIRAC/Setup', 'Test') gConfig.setOptionValue('/DIRAC/Setups/Test/RequestManagement', 'Test') gConfig.setOptionValue( '/Systems/RequestManagement/Test/Databases/ReqDB/Host', 'localhost') gConfig.setOptionValue( '/Systems/RequestManagement/Test/Databases/ReqDB/DBName', 'ReqDB') gConfig.setOptionValue( '/Systems/RequestManagement/Test/Databases/ReqDB/User', 'Dirac') self.i = 1000
def setUp(self): """ test case set up """ self.handlersDict = { "ForwardDISET": "DIRAC/RequestManagementSystem/private/ForwardDISET" } self.req = Request() self.req.RequestName = "foobarbaz" self.req.OwnerGroup = "lhcb_user" self.req.OwnerDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=chaen/CN=705305/CN=Christophe Haen" self.op = Operation({ "Type": "ForwardDISET", "Arguments": "tts10:helloWorldee" }) self.req.addOperation(self.op) self.task = None self.mockRC = MagicMock() self.mockObjectOps = MagicMock() self.mockObjectOps.getSections.return_value = { 'OK': True, 'Value': ['DataProcessing', 'DataManager'] } self.mockObjectOps.getOptionsDict.return_value = { 'OK': True, 'Value': { 'Group': 'lhcb_user', 'User': '******' } } self.mockOps = MagicMock() self.mockOps.return_value = self.mockObjectOps
def myRequest(): """Create a request and put it to the db""" request = Request() request.RequestName = 'myAwesomeRemovalRequest.xml' request.JobID = 0 request.SourceComponent = "myScript" remove = Operation() remove.Type = "RemoveFile" lfn = "/ilc/user/s/sailer/test.txt" rmFile = File() rmFile.LFN = lfn remove.addFile(rmFile) request.addOperation(remove) isValid = RequestValidator().validate(request) if not isValid['OK']: raise RuntimeError("Failover request is not valid: %s" % isValid['Message']) else: print "It is a GOGOGO" requestClient = ReqClient() result = requestClient.putRequest(request) print result
def _getRequestContainer(self): """just return the RequestContainer reporter (object)""" if "Request" in self.workflow_commons: return self.workflow_commons["Request"] request = Request() self.workflow_commons["Request"] = request return request
def test07List(self): """ setitem, delitem, getitem and dirty """ r = Request() ops = [Operation() for i in range(5)] for op in ops: r.addOperation(op) for i, op in enumerate(ops): self.assertEqual(op, r[i], "__getitem__ failed") op = Operation() r[0] = op self.assertEqual(op, r[0], "__setitem__ failed") del r[0] self.assertEqual(len(r), 4, "__delitem__ failed") r.RequestID = 1 del r[0] self.assertEqual(r.cleanUpSQL(), None, "cleanUpSQL failed after __delitem__ (no opId)") r[0].OperationID = 1 del r[0] self.assertEqual( r.cleanUpSQL(), "DELETE FROM `Operation` WHERE `RequestID` = 1 AND `OperationID` IN (1);", "cleanUpSQL failed after __delitem__ (opId set)") r[0].OperationID = 2 r[0] = Operation() self.assertEqual( r.cleanUpSQL(), "DELETE FROM `Operation` WHERE `RequestID` = 1 AND `OperationID` IN (1,2);", "cleanUpSQL failed after __setitem_ (opId set)") json = r.toJSON() self.assertEqual("__dirty" in json["Value"], True, "__dirty missing in json") r2 = Request(json["Value"]) self.assertEqual(r.cleanUpSQL(), r2.cleanUpSQL(), "wrong cleanUpSQL after json")
def export_putRequest(cls, requestJSON): """ put a new request into RequestDB :param cls: class ref :param str requestJSON: request serialized to JSON format """ requestDict = json.loads(requestJSON) requestName = requestDict.get( "RequestID", requestDict.get('RequestName', "***UNKNOWN***")) request = Request(requestDict) optimized = request.optimize() if optimized.get("Value", False): gLogger.debug("putRequest: request was optimized") else: gLogger.debug( "putRequest: request unchanged", optimized.get("Message", "Nothing could be optimized")) valid = cls.validate(request) if not valid["OK"]: gLogger.error("putRequest: request %s not valid: %s" % (requestName, valid["Message"])) return valid # If NotBefore is not set or user defined, we calculate its value now = datetime.datetime.utcnow().replace(microsecond=0) extraDelay = datetime.timedelta(0) if request.Status not in Request.FINAL_STATES and ( not request.NotBefore or request.NotBefore < now): op = request.getWaiting().get('Value') # If there is a waiting Operation with Files if op and len(op): attemptList = [ opFile.Attempt for opFile in op if opFile.Status == "Waiting" ] if attemptList: maxWaitingAttempt = max([ opFile.Attempt for opFile in op if opFile.Status == "Waiting" ]) # In case it is the first attempt, extraDelay is 0 # maxWaitingAttempt can be None if the operation has no File, like the ForwardDiset extraDelay = datetime.timedelta( minutes=2 * math.log(maxWaitingAttempt) if maxWaitingAttempt else 0 ) request.NotBefore = now + extraDelay gLogger.info("putRequest: request %s not before %s (extra delay %s)" % (request.RequestName, request.NotBefore, extraDelay)) requestName = request.RequestName gLogger.info("putRequest: Attempting to set request '%s'" % requestName) return cls.__requestDB.putRequest(request)