def test_appendTransform(self): from Ganga.GPI import LHCbTransform, LHCbTask, BKTestQuery tr1 = LHCbTransform(application=DaVinci(), backend=Local()) t = LHCbTask() # Try appending t.appendTransform(tr1) assert len(t.transforms), 'Didn\'t append a transform properly' # Try appending a transform with a query and check for update tr2 = LHCbTransform(application=DaVinci(), backend=Local()) tr2.addQuery(BKTestQuery(stripping15up)) t.appendTransform(tr2) assert len(t.transforms[-1]._impl.toProcess_dataset.files), 'Transform not updated properly after appending'
def test_update(self): from Ganga.GPI import LHCbTask, LHCbTransform t = LHCbTask() tr1 = LHCbTransform(application=DaVinci(), backend=Local()) tr2 = LHCbTransform(application=DaVinci(), backend=Local()) t.appendTransform(tr1) t.appendTransform(tr2) tr1.addQuery(BKTestQuery(stripping15up)) tr2.addQuery(BKTestQuery(stripping15down)) # Check that update produces some files to process over multiple # transforms t.update() assert len(t.transforms[0]._impl.toProcess_dataset.files), 'Update did not produce any datafiles to process in transform 0' assert len(t.transforms[1]._impl.toProcess_dataset.files), 'Update did not produce any datafiles to process in transform 1'
def test_update(self): from Ganga.GPI import LHCbTask, LHCbTransform, jobs t = LHCbTask() tr = LHCbTransform(application=DaVinci(), backend=Dirac()) t.appendTransform(tr) try: bkQueryList = [BKTestQuery(stripping20up)] tr.updateQuery() assert false, 'Should have thrown exception if updated with no query' except: tr.addQuery(BKTestQuery(stripping20down)) # Check some new data added assert len(tr.inputdata), 'No data added after call to update' try: # Shouldn't allow a second update before processed the data in # toProcess_dataset tr.updateQuery() assert false, 'Should have thrown an error if updated with files already to process' except: # run so can update again with a removed dataset recall that jobs with the # old dataset only created when run called. t.run() assert len(tr.getJobs()), "No Jobs created upon run()" job = jobs(int(tr.getJobs()[0].fqid.split('.')[0])) sleep_until_state(job, 300, 'submitted') del tr._impl.query.dataset.files[0] tr.update(True) # Check the dead dataset is picked up assert len( tr._impl.removed_data.files), "Didn\'t Pick up loss of a dataset" job.remove()
def test_overview(self): from Ganga.GPI import LHCbTransform tr = LHCbTransform(application=DaVinci(), backend=Local()) tr.overview()