예제 #1
0
    def test_appendTransform(self):
        from Ganga import GPI
        tr1 = GPI.LHCbTransform(application=DaVinci(), backend=Local())
        t = GPI.LHCbTask()

        # Try appending
        t.appendTransform(tr1)
        assert len(t.transforms), 'Didn\'t append a transform properly'

        # Try appending a transform with a query and check for update
        tr2 = GPI.LHCbTransform(application=DaVinci(), backend=Local())
        tr2.addQuery(GPI.BKTestQuery(stripping15up))
        t.appendTransform(tr2)
        assert len(t.transforms[-1]._impl.toProcess_dataset.files
                   ), 'Transform not updated properly after appending'
예제 #2
0
    def test_update(self):
        from Ganga import GPI
        t = GPI.LHCbTask()
        tr = GPI.LHCbTransform(application=DaVinci(), backend=Dirac())
        t.appendTransform(tr)
        try:
            bkQueryList = [GPI.BKTestQuery(stripping20up)]
            tr.updateQuery()
            assert false, 'Should have thrown exception if updated with no query'
        except:
            tr.addQuery(GPI.BKTestQuery(stripping20down))

            # Check some new data added
            assert len(tr.inputdata), 'No data added after call to update'

            try:
                # Shouldn't allow a second update before processed the data in
                # toProcess_dataset
                tr.updateQuery()
                assert false, 'Should have thrown an error if updated with files already to process'
            except:
                # run so can update again with a removed dataset recall that jobs with the
                # old dataset only created when run called.
                t.run()
                assert len(tr.getJobs()), "No Jobs created upon run()"
                job = GPI.jobs(int(tr.getJobs()[0].fqid.split('.')[0]))
                sleep_until_state(job, 300, 'submitted')
                del tr._impl.query.dataset.files[0]
                tr.update(True)

                # Check the dead dataset is picked up
                assert len(tr._impl.removed_data.files
                           ), "Didn\'t Pick up loss of a dataset"
                job.remove()
예제 #3
0
    def test_update(self):
        from Ganga import GPI
        t = GPI.LHCbTask()
        tr1 = GPI.LHCbTransform(application=DaVinci(), backend=Local())
        tr2 = GPI.LHCbTransform(application=DaVinci(), backend=Local())
        t.appendTransform(tr1)
        t.appendTransform(tr2)
        tr1.addQuery(GPI.BKTestQuery(stripping15up))
        tr2.addQuery(GPI.BKTestQuery(stripping15down))

        # Check that update produces some files to process over multiple
        # transforms
        t.update()
        assert len(
            t.transforms[0]._impl.toProcess_dataset.files
        ), 'Update did not produce any datafiles to process in transform 0'
        assert len(
            t.transforms[1]._impl.toProcess_dataset.files
        ), 'Update did not produce any datafiles to process in transform 1'
예제 #4
0
    def test_addQuery(self):
        from Ganga import GPI
        tr = GPI.LHCbTransform(application=DaVinci(), backend=Local())
        t = GPI.LHCbTask()

        # Check non-lists and adding query to transform and non-associated
        t.addQuery(tr, GPI.BKTestQuery(stripping15up))
        assert len(t.transforms), 'Transform not associated correctly'
        assert t.transforms[0].queries[
            0].path == stripping15up, 'Query path not correctly assigned'

        # Check duplicating
        t.addQuery(tr, bkQueryList)
        assert len(
            t.transforms) == 4, 'Problem duplicating and appending transforms'
        tmpList = [
            stripping15up, stripping15down, stripping16up, stripping16down
        ]
        for tran in t.transforms:
            assert tran.queries[
                0].path in tmpList, 'Query attribute not setup properly for all transforms'
예제 #5
0
 def test_overview(self):
     from Ganga import GPI
     tr = GPI.LHCbTransform(application=DaVinci(), backend=Local())
     tr.overview()