Exemplo n.º 1
0
    def setUp(self):
        super(TestSmartMerger, self).setUp()
        from Ganga.GPI import Job, Executable, Local, File, LocalFile, config

        config['Mergers']['associate'] = {'txt': 'TextMerger'}

        self.jobslice = []
        self.file_name = 'id_echo.sh'

        for i in range(4):

            j = Job(application=Executable(), backend=Local())

            scriptString = '''
            #!/bin/sh
            echo "Output from job $1." > out.txt
            echo "Output from job $2." > out2.txt
            '''

            # write string to tmpfile
            tmpdir = tempfile.mktemp()
            os.mkdir(tmpdir)
            fileName = os.path.join(tmpdir, self.file_name)

            write_file(fileName, scriptString)

            j.application.exe = 'sh'
            j.application.args = [File(fileName), str(j.id), str(j.id * 10)]
            j.outputfiles = [LocalFile('out.txt'), LocalFile('out2.txt')]
            self.jobslice.append(j)
Exemplo n.º 2
0
    def setUp(self):
        super(TestStructure, self).setUp()
        from Ganga.GPI import Job, Executable, Local, File, LocalFile
        from GangaTest.Framework.utils import write_file

        self.jobslice = []
        self.file_name = 'id_echo.sh'

        for _ in range(5):

            j = Job(application=Executable(), backend=Local())

            scriptString = '''
            #!/bin/sh
            echo "Output from job $1." > out.txt
            mkdir -p subdir
            echo "Output from job $2." > subdir/out.txt
            '''

            # write string to tmpfile
            tmpdir = tempfile.mktemp()
            os.mkdir(tmpdir)
            fileName = os.path.join(tmpdir, self.file_name)

            write_file(fileName, scriptString)

            j.application.exe = 'sh'
            j.application.args = [File(fileName), str(j.id), str(j.id * 10)]
            j.outputfiles = [LocalFile('out.txt'), LocalFile('subdir/out.txt')]
            self.jobslice.append(j)
Exemplo n.º 3
0
    def test_e_MultipleFiles(self):
        """Test that the wildcards work"""

        from Ganga.GPI import LocalFile, MassStorageFile, Job, ArgSplitter

        _ext = '.root'
        _ext2 = '.txt'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        file_3 = generate_unique_temp_file(_ext2)
        TestMassStorageWN._managed_files.append(file_1)
        TestMassStorageWN._managed_files.append(file_2)
        TestMassStorageWN._managed_files.append(file_3)

        j = Job()
        j.inputfiles = [
            LocalFile(file_1),
            LocalFile(file_2),
            LocalFile(file_3)
        ]
        j.splitter = ArgSplitter(
            args=[[_] for _ in range(0, TestMassStorageWN.sj_len)])
        j.outputfiles = [
            MassStorageFile(namePattern='*' + _ext,
                            outputfilenameformat='{jid}/{sjid}/{fname}'),
            MassStorageFile(namePattern='*' + _ext2)
        ]
        j.submit()
Exemplo n.º 4
0
def submit_job(brunel_app, reco_type, input_files=None, local=RUN_LOCAL):
    # Set EvtMax depending on if this is a local job
    brunel_app.extraOpts += 'from Configurables import Brunel\n'
    brunel_app.extraOpts += 'Brunel().EvtMax = {}'.format(2 * int(local) - 1)

    # Configure the corresponding Job
    job = Job(name='VP hybrid distortions',
              comment='{reco_type} reconstruction {suffix}'.format(
                  reco_type=reco_type, suffix=['', '(local)'][local]),
              application=brunel_app,
              splitter=SplitByFiles(filesPerJob=1, ignoremissing=True),
              parallel_submit=True)

    if local:
        job.backend = Local()
        job.outputfiles = [LocalFile('*.xdst'), LocalFile('*.root')]
        job.inputdata = dataset[:1]
    else:
        job.backend = Dirac()
        job.outputfiles = [DiracFile('*.xdst'), DiracFile('*.root')]
        job.inputdata = dataset

    job.inputfiles = input_files or []

    queues.add(job.submit)
    return True
Exemplo n.º 5
0
    def testSubmitJobWithInputFile(self):
        """
        This test adds a dummy inputfile into the job and tests that it is returned when the job is completed
        """

        from Ganga.GPI import LocalFile

        tempName = 'testGaudiExecFile.txt'
        tempName2 = 'testGaudiExecFile2.txt'
        tempContent = '12345'
        tempContent2 = '67890'

        j = TestExternalGaudiExec._constructJob()

        tempFile = path.join(TestExternalGaudiExec.tmpdir_release, tempName)
        tempFile2 = path.join(TestExternalGaudiExec.tmpdir_release, tempName2)
        FileBuffer(tempName, tempContent).create(tempFile)
        FileBuffer(tempName2, tempContent2).create(tempFile2)

        j.inputfiles = [tempFile, LocalFile(tempFile2)]
        j.outputfiles = [LocalFile(tempName), LocalFile(tempName2)]

        j.submit()

        run_until_completed(j)

        assert j.status == 'completed'

        outputDir = stripProxy(j).getOutputWorkspace(create=False).getPath()

        assert path.isfile(tempFile)
        assert path.isfile(tempFile2)

        assert tempContent in open(tempFile).read()
        assert tempContent2 in open(tempFile2).read()
Exemplo n.º 6
0
    def testNoFilesSpecifiedAllSame(self):
        from Ganga.GPI import LocalFile, Job

        files = [LocalFile('foo.root'), LocalFile(
            'bar.root'), LocalFile('out.log')]

        j1 = Job(outputfiles=files)
        j2 = Job(outputfiles=files)

        assert j1.outputfiles == j2.outputfiles, 'File lists should be the same'

        assert findFilesToMerge([j1, j2]) == ['foo.root', 'bar.root', 'out.log'], 'Should merge all files'
Exemplo n.º 7
0
    def test_a_testClientSideSubmit(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import LocalFile, Job, ArgSplitter

        file_1 = generate_unique_temp_file(TestLocalFileClient._ext)
        file_2 = generate_unique_temp_file(TestLocalFileClient._ext)
        TestLocalFileClient._managed_files.append(file_1)
        TestLocalFileClient._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(
            args=[[_] for _ in range(TestLocalFileClient.sj_len)])
        j.outputfiles = [LocalFile(namePattern='*' + TestLocalFileClient._ext)]
        j.submit()
Exemplo n.º 8
0
    def testPrepareJob(self):

        from Ganga.GPI import Job, LocalFile, prepareGaudiExec

        import os
        if os.path.exists(TestExternalGaudiExec.tmpdir_release):
            os.system("rm -rf %s/*" % TestExternalGaudiExec.tmpdir_release)

        j = Job(application=prepareGaudiExec(
            'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release))

        myHelloOpts = path.join(TestExternalGaudiExec.tmpdir_release,
                                'hello.py')

        FileBuffer('hello.py', 'print("Hello")').create(myHelloOpts)

        assert path.isfile(myHelloOpts)

        j.application.options = [LocalFile(myHelloOpts)]

        j.prepare()

        assert j.application.is_prepared.name

        assert path.isdir(j.application.is_prepared.path())
Exemplo n.º 9
0
    def testRun(self):
        """
        Run a notebook application and check that it is executed
        """
        from Ganga.GPI import Job, Notebook, LocalFile, jobs
        j = Job()
        a = Notebook()

        testfilename = 'Test.ipynb'
        dir = dirname(abspath(inspect.getfile(inspect.currentframe())))

        j.inputfiles = [LocalFile(join(dir, testfilename))]
        j.outputfiles = [LocalFile(testfilename)]
        j.submit()
        sleep_until_completed(jobs(0))
        assert j.status in ['completed']
        assert exists(join(j.outputdir, 'Test.ipynb'))
Exemplo n.º 10
0
def getNestedList():
    from Ganga.GPI import LocalFile, GangaList
    gl = GangaList()
    gl2 = GangaList()
    for i in range(5):
        gl.append(LocalFile())
    for i in range(5):
        gl2.append(gl)
    return gl2
Exemplo n.º 11
0
        def testInterfaceLookFeel(self):

            # Just testing that the job construction works

            from Ganga.GPI import Job, Im3ShapeApp, Im3ShapeSplitter, DiracFile, LocalFile, GangaDataset, Dirac

            j=Job()
            app = Im3ShapeApp(
                            im3_location=DiracFile(lfn='/lsst/y1a1-v2-z/software/2016-02-24/im3shape-grid.tar.gz'),
                            ini_location=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/params_disc.ini'),
                            blacklist=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/blacklist-y1.txt')
                                )
            j.application = app
            j.backend=Dirac()
            mydata=GangaDataset()
            mydata.append(DiracFile(lfn='/lsst/DES0005+0043-z-meds-y1a1-gamma.fits.fz'))
            j.inputdata = mydata
            j.splitter=Im3ShapeSplitter(size=20)
            j.outputfiles = [DiracFile('*.main.txt'), DiracFile('*.epoch.txt')]
Exemplo n.º 12
0
    def test_a_testClientSideSubmit(self):
        """Test the client side code whilst stil using the Local backend"""

        MassStorageFile = self.fileClass

        from Ganga.GPI import LocalFile, Job, ArgSplitter

        TestMassStorageClient.cleanUp()

        assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client'

        file_1 = generate_unique_temp_file(TestMassStorageClient._ext)
        file_2 = generate_unique_temp_file(TestMassStorageClient._ext)
        TestMassStorageClient._managed_files.append(file_1)
        TestMassStorageClient._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(TestMassStorageClient.sj_len)])
        j.outputfiles = [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)]
        j.submit()
Exemplo n.º 13
0
    def testNoFilesSpecifiedNoOverlap(self):
        from Ganga.GPI import LocalFile, Job

        j1 = Job(outputfiles=[LocalFile('foo.root'), LocalFile('bar.root'), LocalFile('out.log')])
        j2 = Job(outputfiles=[LocalFile('a.root'), LocalFile('b.root'), LocalFile('c.log')])

        assert findFilesToMerge([j1, j2]) == [], 'Should merge no files'
Exemplo n.º 14
0
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        assert run_until_state(j, 'failed', timeout=60)
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Exemplo n.º 15
0
    def test_A_Construction(self):
        from Ganga.GPI import Job, LocalFile
        j = Job()

        assert (j.application.is_prepared == None)

        j.prepare()

        assert (j.application.is_prepared != None)

        TestShared.shared_area_location = j.application.is_prepared.path()
        assert (path.isdir(TestShared.shared_area_location))

        TestShared.a_file_location = path.join(
            j.application.is_prepared.path(), 'a.txt')
        TestShared.b_file_location = path.join(
            j.application.is_prepared.path(), 'b.txt')

        open(TestShared.a_file_location, 'w').close()
        open(TestShared.b_file_location, 'w').close()
        j.application.is_prepared.associated_files.append(
            LocalFile(TestShared.a_file_location))
        j.application.is_prepared.associated_files.append(
            LocalFile(TestShared.b_file_location))
Exemplo n.º 16
0
    def test_g_MultipleFiles(self):
        """Test that the wildcards work"""

        MassStorageFile = self.fileClass
        from Ganga.GPI import LocalFile, Job, ArgSplitter

        _ext = '.root'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)
        TestMassStorageWN._managed_files.append(file_2)

        j = Job()
        j.inputfiles = [LocalFile(file_1), LocalFile(file_2)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ])
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.customOutputFormat)]
        
        for f in j.outputfiles:
            assert f.outputfilenameformat == self.customOutputFormat

        j.submit()

        for f in j.outputfiles:
            assert f.outputfilenameformat == self.customOutputFormat
Exemplo n.º 17
0
    def test_c_testCopy(self):

        from Ganga.GPI import jobs, LocalFile

        j = jobs[-1]

        j2 = j.copy()

        assert len(j2.outputfiles) == 1

        assert j2.outputfiles[0] == LocalFile(namePattern='*' +
                                              TestLocalFileClient._ext)

        assert len(j2.inputfiles) == 2

        self.cleanUp()
Exemplo n.º 18
0
    def test_Savannah15630(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        from GangaTest.Framework.utils import sleep_until_completed
        j = Job()
        j.application = Executable(exe='touch', args=['out.dat'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.dat')]
        j.submit()
        self.assertTrue(
            sleep_until_completed(j, 60),
            'Timeout on job submission: job is still not finished')

        import os.path
        p = os.path.join(j.outputdir, j.application.args[0])

        self.assertTrue(os.path.exists(p))
Exemplo n.º 19
0
    def test_Savannah19059(self):
        from Ganga.GPI import Executable, Job, Interactive, LocalFile

        import os.path
        from GangaTest.Framework.utils import sleep_until_completed

        # Test if Interactive backend copies back sandbox
        app = Executable()
        app.exe = 'touch'
        self.fname = 'abc'
        app.args = [self.fname]
        self.j = Job(backend=Interactive(), application=app, outputfiles=[LocalFile(self.fname)])
        self.j.submit()

        self.assertTrue(sleep_until_completed(self.j, 60), 'Timeout on registering Interactive job as completed')

        self.assertTrue(os.path.exists(os.path.join(self.j.outputdir, self.fname)))
Exemplo n.º 20
0
    def test_a_Submit(self):
        """Test the ability to submit a job with some LocalFiles"""
        from Ganga.GPI import jobs, Job, LocalFile, MassStorageFile

        TestMassStorageWN.cleanUp()

        _ext = '.txt'

        file_1 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)

        j = Job()
        j.inputfiles = [LocalFile(file_1)]
        j.outputfiles = [
            MassStorageFile(namePattern='*' + _ext,
                            outputfilenameformat='{jid}/{fname}')
        ]
        j.submit()
Exemplo n.º 21
0
    def test_a_Submit(self):
        """Test the ability to submit a job with some LocalFiles"""

        MassStorageFile = self.fileClass
        from Ganga.GPI import jobs, Job, LocalFile

        _ext = '.txt'

        file_1 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)

        j = Job()
        j.inputfiles = [LocalFile(file_1)]
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.standardFormat)]
        j.submit()

        for f in j.outputfiles:
            assert f.outputfilenameformat == self.standardFormat
Exemplo n.º 22
0
    def test_c_SplitJob(self):
        """Test submitting subjobs"""
        MassStorageFile = self.fileClass
        from Ganga.GPI import Job, LocalFile, ArgSplitter

        _ext = '.txt2'

        file_1 = generate_unique_temp_file(_ext)
        TestMassStorageWN._managed_files.append(file_1)

        j = Job()
        j.inputfiles = [LocalFile(file_1)]
        j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ])
        j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.extendedFormat)]
        j.submit()

        for f in j.outputfiles:
            assert f.outputfilenameformat == self.extendedFormat
Exemplo n.º 23
0
    def testSubmitJobDiracWithInput(self):

        j = TestExternalGaudiExec._constructJob()

        from Ganga.GPI import LocalFile, Dirac, DiracProxy

        j.backend = Dirac(credential_requirements=DiracProxy(
            group='lhcb_user', encodeDefaultProxyFileName=False))

        tempName = 'testGaudiExecFile.txt'
        tempContent = '12345'
        tempFile = path.join(TestExternalGaudiExec.tmpdir_release, tempName)
        FileBuffer(tempName, tempContent).create(tempFile)

        j.inputfiles = [tempFile]
        j.outputfiles = [LocalFile(tempName)]

        j.submit()

        assert j.status == "submitted"
Exemplo n.º 24
0
    def test_a_testClientInputSubmit(self):
        """Test that a job can be submitted with inputfiles in the input"""

        MassStorageFile = self.fileClass
        from Ganga.GPI import LocalFile, Job, ArgSplitter

        _ext = '.root'
        file_1 = generate_unique_temp_file(_ext)
        file_2 = generate_unique_temp_file(_ext)
        self._managed_files.append(file_1)
        self._managed_files.append(file_2)
        msf_1 = MassStorageFile(file_1)
        msf_2 = MassStorageFile(file_2)
        msf_1.put()
        msf_2.put()

        j = Job()
        j.inputfiles = [msf_1, msf_2]
        j.splitter = ArgSplitter(args=[[_] for _ in range(self.sj_len)])
        j.outputfiles = [LocalFile(namePattern='*' + _ext)]
        j.submit()
Exemplo n.º 25
0
    def testInternal(self):

        from Ganga.GPI import GaudiExec, Job, LocalFile, DiracFile

        tmp_fol = gettempdir()
        gaudi_testFol = path.join(tmp_fol, 'GaudiExecTest')
        shutil.rmtree(gaudi_testFol, ignore_errors=True)
        makedirs(gaudi_testFol)
        gaudi_testOpts = path.join(gaudi_testFol, 'testOpts.py')
        with open(gaudi_testOpts, 'w+') as temp_opt:
            temp_opt.write("print('hello')")

        assert path.exists(gaudi_testOpts)

        gr = GaudiExec(directory=gaudi_testFol,
                       options=[LocalFile(gaudi_testOpts)])

        assert isinstance(
            stripProxy(gr).getOptsFiles()[0], stripProxy(LocalFile))

        reconstructed_path = path.join(
            stripProxy(gr).getOptsFiles()[0].localDir,
            stripProxy(gr).getOptsFiles()[0].namePattern)

        assert reconstructed_path == gaudi_testOpts

        assert open(reconstructed_path).read() == "print('hello')"

        j = Job()
        j.application = gr

        assert isinstance(j.application, GaudiExec)

        df = DiracFile(lfn='/not/some/file')

        gr.options = [df]

        assert gr.options[0].lfn == df.lfn

        shutil.rmtree(gaudi_testFol, ignore_errors=True)
Exemplo n.º 26
0
    def _constructJob():
        """
        This is a helper method to construct a new GaudiExec job object for submission testing
        This just helps reduce repeat code between tests
        """

        import os
        if os.path.exists(TestExternalGaudiExec.tmpdir_release):
            os.system("rm -fr %s/" % TestExternalGaudiExec.tmpdir_release)

        from Ganga.GPI import Job, LocalFile, prepareGaudiExec

        j = Job(application=prepareGaudiExec(
            'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release))

        myOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'testfile.py')

        FileBuffer('testfile.py', 'print("ThisIsATest")').create(myOpts)

        j.application.options = [LocalFile(myOpts)]

        return j
Exemplo n.º 27
0
    def test_g_Splitters(self):
        from Ganga.GPI import Job, GenericSplitter, GangaDataset, GangaDatasetSplitter, LocalFile

        # -- SPLITTERS BASICUSE START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.attribute = 'application.args'
        j.splitter.values = [['hello', 1], ['world', 2], ['again', 3]]
        j.submit()
        # -- SPLITTERS BASICUSE STOP

        # -- SPLITTERS SUBJOBCHECK START
        j.subjobs
        j.subjobs(0).peek("stdout")
        # -- SPLITTERS SUBJOBCHECK STOP

        # -- SPLITTERS MULTIATTRS START
        j = Job()
        j.splitter = GenericSplitter()
        j.splitter.multi_attrs = {
            'application.args': ['hello1', 'hello2'],
            'application.env': [{
                'MYENV': 'test1'
            }, {
                'MYENV': 'test2'
            }]
        }
        j.submit()
        # -- SPLITTERS MULTIATTRS STOP

        # -- SPLITTERS DATASETSPLITTER START
        j = Job()
        j.application.exe = 'more'
        j.application.args = ['__GangaInputData.txt__']
        j.inputdata = GangaDataset(files=[LocalFile('*.txt')])
        j.splitter = GangaDatasetSplitter()
        j.splitter.files_per_subjob = 2
        j.submit()
Exemplo n.º 28
0
    def testSubmitJobComplete(self):
        """
        Test that the job completes successfully
        """

        from Ganga.GPI import jobs
        from Ganga.GPI import Job, LocalFile, prepareGaudiExec

        import os
        if os.path.exists(TestExternalGaudiExec.tmpdir_release):
            os.system("rm -rf %s/*" % TestExternalGaudiExec.tmpdir_release)

        j = Job(application=prepareGaudiExec(
            'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release))

        myOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'testfile.py')

        FileBuffer('testfile.py', 'print("ThisIsATest")').create(myOpts)

        j.application.options = [LocalFile(myOpts)]

        j.submit()

        run_until_completed(j)

        assert j.status == 'completed'

        outputfile = path.join(j.outputdir, 'stdout')

        assert path.isfile(outputfile)

        assert 'testfile.py' in open(outputfile).read()

        assert 'data.py' in open(outputfile).read()

        assert 'ThisIsATest' in open(outputfile).read()

        assert j.application.platform in open(outputfile).read()
Exemplo n.º 29
0
    def testMergeRemoval(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile, jobs

        # see Savannah 33710
        j = Job()
        jobID = j.id
        # job will run for at least 20 seconds
        j.application = Executable(exe='sh',
                                   args=['-c', 'sleep 20; echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'])

        j.postprocessors[0].ignorefailed = True
        j.postprocessors[0].alwaysfail = True
        j.postprocessors[0].wait = 10

        j.submit()
        run_until_state(j, state='running')
        j.remove()

        with pytest.raises(KeyError):
            jobs(jobID)
Exemplo n.º 30
0
    def test_k_Tasks(self):
        from Ganga.GPI import CoreTask, CoreTransform, Executable, Local, GenericSplitter, LocalFile, GangaDataset, \
            GangaDatasetSplitter, TaskChainInput, File, tasks

        # -- TASKS EXAMPLE START
        # First create the overall Task
        t = CoreTask()

        # Now create the Transform ( -> Job template)
        trf = CoreTransform()
        trf.application = Executable()
        trf.backend = Local()

        # Set the unit splitter (unique to CoreTransform - you may have better ways of creating units in your own
        # plugins). This will create a unit based on the splitting of any given splitter
        # If you put in your own splitter here, use the trf.fields_to_copy string list to tell Tasks which fields of
        # a Job to preserve from the split. Here, Tasks already knows about GenericSplitter and knows that we want to
        # change the 'application' object for each Unit/Master Job
        trf.unit_splitter = GenericSplitter()
        trf.unit_splitter.attribute = "application.args"
        trf.unit_splitter.values = ['arg 1', 'arg 2', 'arg 3']

        # Append the transform
        t.appendTransform(trf)

        # set the maximum number of active jobs to have running (allows for throttling)
        t.float = 100

        # run the Task
        t.run()
        # -- TASKS EXAMPLE STOP

        # -- TASKS OVERVIEW START
        tasks
        tasks(0).overview()
        # -- TASKS OVERVIEW STOP

        t = CoreTask()
        trf = CoreTransform()
        trf.application = Executable()
        trf.backend = Local()
        trf.unit_splitter = GenericSplitter()
        trf.unit_splitter.attribute = "application.args"
        trf.unit_splitter.values = ['arg 1', 'arg 2', 'arg 3']
        t.appendTransform(trf)
        t.float = 100

        # -- TASKS OPTIONS START
        # note - done at the transform level rather than task level as different backends may not need it
        trf.max_active_threads = 10  # optional - specifies the max number of submissions to queue up
        trf.submit_with_threads = True
        # -- TASKS OPTIONS STOP

        # -- TASKS JOBCHAIN START
        # Create a test script
        open('my_script3.sh', 'w').write("""#!/bin/bash
        echo $PATH
        ls -ltr
        more __GangaInputData.txt__
        echo "MY TEST FILE" > output_file.txt
        sleep 120
        """)

        # Create the parent task
        t = CoreTask()

        # Create the first transform
        trf1 = CoreTransform()
        trf1.application = Executable()
        trf1.application.exe = File('my_script3.sh')
        trf1.outputfiles = [LocalFile("*.txt")]
        d = GangaDataset()
        d.files = [LocalFile("*.txt")]
        d.treat_as_inputfiles = True
        trf1.addInputData(d)
        trf1.files_per_unit = 1
        trf1.submit_with_threads = True

        trf1.splitter = GangaDatasetSplitter()
        trf1.splitter.files_per_subjob = 2

        trf1.backend = Local()
        t.appendTransform(trf1)

        # Create the second transform
        trf2 = CoreTransform()
        trf2.application = Executable()
        trf1.application.exe = File('my_script3.sh')
        trf2.submit_with_threads = True

        d = TaskChainInput()
        d.input_trf_id = trf1.getID()
        trf2.addInputData(d)

        trf2.splitter = GangaDatasetSplitter()
        trf2.splitter.files_per_subjob = 2

        trf2.backend = Local()
        t.appendTransform(trf2)

        # Set the Task running
        t.float = 1
        t.run()