def testLen(self): """ __testLen__ Test that the __len__ function will actualy return the correct length. """ # This is simple. It should just have a length equal to the number of committed # And yet to be committed jobs testJobA = Job() testJobB = Job() testJobC = Job() testJobD = Job() testJobGroup = JobGroup(jobs=[testJobA, testJobB]) testJobGroup.commit() self.assertEqual(len(testJobGroup), 2) testJobGroup.add(testJobC) self.assertEqual(len(testJobGroup), 3) testJobGroup.commit() testJobGroup.add(testJobD) self.assertEqual(len(testJobGroup), 4) return
def testAddCommit(self): """ _testAddCommit_ Test the add() and commit() methods of the JobGroup class. Verify that jobs are not returned from getJobs() until commit() has been called. """ testJob = Job() testJobGroup = JobGroup() assert len(testJobGroup.getJobs()) == 0, \ "ERROR: JobGroup has jobs before jobs have been added." testJobGroup.add(testJob) assert len(testJobGroup.getJobs()) == 0, \ "ERROR: JobGroup has jobs commit() was called." testJobGroup.commit() assert len(testJobGroup.getJobs()) == 1, \ "ERROR: JobGroup has wrong number of jobs." assert testJob in testJobGroup.getJobs(), \ "ERROR: JobGroup has unknown jobs." return
def testLen(self): """ __testLen__ Test that the __len__ function will actualy return the correct length. """ #This is simple. It should just have a length equal to the number of committed #And yet to be committed jobs testJobA = Job() testJobB = Job() testJobC = Job() testJobD = Job() testJobGroup = JobGroup(jobs = [testJobA, testJobB]) testJobGroup.commit() self.assertEqual(len(testJobGroup), 2) testJobGroup.add(testJobC) self.assertEqual(len(testJobGroup), 3) testJobGroup.commit() testJobGroup.add(testJobD) self.assertEqual(len(testJobGroup), 4) return
def execute(self, *args, **kwargs): self.logger.debug("Transforming old specs into jobs.") # mapping to cache job def - blocks association blocks = {} regroupjobs = {} ## grouping in a dictionary can happen here for job in args[0]: if job.jobDefinitionID in regroupjobs: regroupjobs[job.jobDefinitionID].append(job) else: regroupjobs[job.jobDefinitionID] = [job] jobgroups = [] ## here converting the grouping into proper JobGroup-Jobs for jobdef in regroupjobs: jobgroup = blocks.get(jobdef, None) if jobgroup is None: configreq = {'subresource': 'jobgroup', 'subjobdef': jobdef, 'subuser': kwargs['task']['tm_user_dn']} self.logger.debug("Retrieving %d jobdef information from task manager db: %s" %(jobdef, str(configreq))) jobgroup = self.server.get(self.resturl, data = configreq) self.logger.debug("Jobgroup information in task manager: %s" % str(jobgroup)) jobgroup = jobgroup[0]['result'][0] blocks[jobdef] = jobgroup['tm_data_blocks'] jg = WMJobGroup() for job in regroupjobs[jobdef]: parser = PassThroughOptionParser() parser.add_option('--inputFile', dest='inputfiles', type='string') parser.add_option('--runAndLumis', dest='runlumis', type='string') parser.add_option('--availableSites', dest='allsites', type='string') parser.add_option('--jobNumber', dest='jobnum', type='int') (options, args) = parser.parse_args(shlex.split(job.jobParameters)) jj = WMJob() jj['input_files'] = [] for infile in literal_eval(options.inputfiles): jj['input_files'].append({'lfn': infile, 'block': blocks[jobdef], 'locations': [ss for ss in literal_eval(options.allsites)]}) if options.runlumis: jj['mask']['runAndLumis'] = literal_eval(options.runlumis) jj['panda_oldjobid'] = job.PandaID jj['jobnum'] = options.jobnum jg.add(jj) setattr(jg, 'blocks', blocks[jobdef]) jg.commit() jobgroups.append(jg) return Result(task=kwargs['task'], result=jobgroups)
def execute(self, *args, **kwargs): self.logger.debug("Transforming old specs into jobs.") # mapping to cache job def - blocks association blocks = {} regroupjobs = {} ## grouping in a dictionary can happen here for job in args[0]: if job.jobDefinitionID in regroupjobs: regroupjobs[job.jobDefinitionID].append(job) else: regroupjobs[job.jobDefinitionID] = [job] jobgroups = [] ## here converting the grouping into proper JobGroup-Jobs for jobdef in regroupjobs: jobgroup = blocks.get(jobdef, None) if jobgroup is None: configreq = { 'subresource': 'jobgroup', 'subjobdef': jobdef, 'subuser': kwargs['task']['tm_user_dn'] } self.logger.debug( "Retrieving %d jobdef information from task manager db: %s" % (jobdef, str(configreq))) jobgroup = self.server.get(self.resturl, data=configreq) self.logger.debug("Jobgroup information in task manager: %s" % str(jobgroup)) jobgroup = jobgroup[0]['result'][0] blocks[jobdef] = jobgroup['tm_data_blocks'] jg = WMJobGroup() for job in regroupjobs[jobdef]: parser = PassThroughOptionParser() parser.add_option('--inputFile', dest='inputfiles', type='string') parser.add_option('--runAndLumis', dest='runlumis', type='string') parser.add_option('--availableSites', dest='allsites', type='string') parser.add_option('--jobNumber', dest='jobnum', type='int') (options, args) = parser.parse_args(shlex.split(job.jobParameters)) jj = WMJob() jj['input_files'] = [] for infile in literal_eval(options.inputfiles): jj['input_files'].append({ 'lfn': infile, 'block': blocks[jobdef], 'locations': [ss for ss in literal_eval(options.allsites)] }) if options.runlumis: jj['mask']['runAndLumis'] = literal_eval(options.runlumis) jj['panda_oldjobid'] = job.PandaID jj['jobnum'] = options.jobnum jg.add(jj) setattr(jg, 'blocks', blocks[jobdef]) jg.commit() jobgroups.append(jg) return Result(task=kwargs['task'], result=jobgroups)