예제 #1
0
    def testMergeWhenSubjobsHaveFailed(self):

        j = self.jobslice[0]
        j.splitter = CopySplitter()
        j.splitter.number = 5
        j.splitter.function_hook = 'makeFirstSubJobFailExecutable'

        tm = TextMerger()
        tm.files = ['out.txt']
        tm.ignorefailed = True
        j.postprocessors = tm

        j.submit()

        if not sleep_until_state(j, state='failed'):
            assert False, 'Test timed out'
        assert j.status == 'failed', 'Job should be failed'

        output = os.path.join(j.outputdir, 'out.txt')
        assert os.path.exists(output)

        for sj in j.subjobs[1:]:
            out_txt = os.path.join(sj.outputdir, 'out.txt')
            assert file_contains(
                output, out_txt), 'File must contain the output of each individual job'

        out_txt = os.path.join(j.subjobs[0].outputdir, 'out.txt')
        assert not file_contains(
            output, out_txt), 'The failed subjob must have been skipped'
예제 #2
0
    def testMultipleMergeOnTransitionOneJobArray(self):
        """Use the MultipleMerger to merge all the text files."""

        j = self.jobslice[0]
        j.splitter = CopySplitter()
        j.splitter.number = 5

        tm1 = TextMerger()
        tm1.files = ['out.txt']

        tm2 = TextMerger()
        tm2.files = ['out2.txt']

        j.postprocessors = [tm1, tm2]

        j.submit()
        if not sleep_until_completed(j):
            assert False, 'Test timed out'
        assert j.status == 'completed', 'Job should be finished'

        for out in ['out.txt', 'out2.txt']:
            output = os.path.join(j.outputdir, out)
            assert os.path.exists(output), 'File %s must exist' % output

            for sj in j.subjobs:
                out_txt = os.path.join(sj.outputdir, out)
                assert file_contains(
                    output, out_txt), 'File must contain the output of each individual job'
예제 #3
0
    def testMergeOnTransitionOneJob(self):

        j = self.jobslice[0]
        j.splitter = CopySplitter()
        j.splitter.number = 5

        tm = TextMerger()
        tm.files = ['out.txt']
        j.postprocessors = tm

        assert j.postprocessors, 'Postprocessors should be set'

        j.submit()
        if not sleep_until_completed(j):
            assert False, 'Test timed out'
        assert j.status == 'completed', 'Job should be finished'

        assert len(j.subjobs) == 5, 'Job should have split correctly'

        output = os.path.join(j.outputdir, 'out.txt')
        assert os.path.exists(output)

        for sj in j.subjobs:
            out_txt = os.path.join(sj.outputdir, 'out.txt')
            assert file_contains(
                output, out_txt), 'File must contain the output of each individual job'
예제 #4
0
    def testActualMergeJob(self):
        from Ganga.GPI import SmartMerger

        self.runJobSlice()
        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        sm = SmartMerger()
        assert sm.merge(self.jobslice, tmpdir), 'Merge should complete'

        for j in self.jobslice:
            output = os.path.join(j.outputdir, 'out.txt')
            assert file_contains(output, 'Output from job %d.' % j.id), 'File must contain the output of each individual job'

        for j in self.jobslice:
            output = os.path.join(j.outputdir, 'out2.txt')
            assert file_contains(output, 'Output from job %d.' % (j.id * 10)), 'File must contain the output of each individual job'
예제 #5
0
    def testActualMergeJob(self):
        from Ganga.GPI import SmartMerger

        self.runJobSlice()
        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        sm = SmartMerger()
        assert sm.merge(self.jobslice, tmpdir), 'Merge should complete'

        for j in self.jobslice:
            output = os.path.join(j.outputdir, 'out.txt')
            assert file_contains(output, 'Output from job %d.' % j.id), 'File must contain the output of each individual job'

        for j in self.jobslice:
            output = os.path.join(j.outputdir, 'out2.txt')
            assert file_contains(output, 'Output from job %d.' % (j.id * 10)), 'File must contain the output of each individual job'
예제 #6
0
    def testDirectMerge(self):

        self.runJobSlice()

        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        tm = TextMerger()
        tm.files = ['out.txt', 'out2.txt']
        assert tm.merge(self.jobslice, tmpdir), 'Merge should complete'

        for j in self.jobslice:
            output = os.path.join(j.outputdir, 'out.txt')
            assert file_contains(output, 'Output from job %d.' %
                                 j.id), 'File must contain the output of each individual job'

        for j in self.jobslice:
            output = os.path.join(j.outputdir, 'out2.txt')
            assert file_contains(output, 'Output from job %d.' % (
                j.id * 10)), 'File must contain the output of each individual job'
예제 #7
0
    def testScripts(self):
        gp = GaudiPython()
        #gp.version = configDaVinci['version']
        tempDir = tempfile.mkdtemp()
        name1 = join(tempDir, 'script1.py')
        name2 = join(tempDir, 'script2.py')
        write_file(name1, 'print "ABC"\nexecfile("script2.py")\n')
        write_file(name2, 'print "DEF"\n')
        gp.script = [name1, name2]
        j = Job(application=gp, backend=Local())
        j.submit()
        assert sleep_until_completed(j, 600)

        fname = join(j.outputdir, 'stdout')
        print('file =', open(fname).read())
        assert file_contains(fname, 'ABC'), 'First script file not executed'
        assert file_contains(fname, 'DEF'),\
            'Inclusion of second script not working'

        shutil.rmtree(tempDir)
예제 #8
0
    def test_Savannah47814(self):
        from Ganga.GPI import Job, Executable

        from GangaTest.Framework.utils import sleep_until_state, file_contains

        j = Job()
        j.application = Executable(exe='ThisScriptDoesNotExist')
        j.submit()

        failed = sleep_until_state(j, 60, state='failed', break_states=['new', 'killed', 'completed', 'unknown', 'removed'])
        self.assertTrue(failed, 'Job with illegal script should fail. Instead it went into the state %s' % j.status)

        import os.path
        f = os.path.join(j.outputdir, '__jobstatus__')
        self.assertTrue(file_contains(f, 'No such file or directory'), '__jobstatus__ file should contain error')
예제 #9
0
    def testSplit(self):
        gp = GaudiPython()
        #gp.version = configDaVinci['version']
        j = Job(application=gp, backend=Local())
        j.inputdata = LHCbDataset(['LFN:/lhcb/LHCb/Collision11/DIMUON.DST/00016768/0000/00016768_00000006_1.dimuon.dst',
                                   'LFN:/lhcb/LHCb/Collision11/DIMUON.DST/00016768/0000/00016768_00000007_1.dimuon.dst'])
        j.splitter = SplitByFiles()
        j.submit()
        assert sleep_until_completed(j, 600)

        executionstring = 'SUCCESS Reading Event record 1'
        for js in j.subjobs:
            fname = join(js.outputdir, 'stdout')
            print('file =', open(fname).read())
            assert file_contains(fname, executionstring),\
                'stdout should contain string: ' + executionstring
예제 #10
0
    def testLocal(self):

        j = Job(application=GaudiPython(), backend=Local())
        #j.application.version = configDaVinci['version']
        j.submit()

        assert j.application.script != [],\
            'Submit should assign defaults script file'

        assert sleep_until_completed(j, 600)

        fname = join(j.outputdir, 'stdout')
        print('file =', open(fname).read())
        executionstring = 'Welcome to ApplicationMgr'
        assert file_contains(fname, executionstring),\
            'stdout should contain string: ' + executionstring
예제 #11
0
    def testRecursiveMergeOnTransition(self):

        out_list = []

        # add a splitter to each job in jobslice
        for j in self.jobslice:

            j.splitter = CopySplitter()
            j.splitter.number = 5

        # remove one splitter, so that not every job has subjobs
        self.jobslice[-1].splitter = None

        # submit all the jobs and wait
        self.runJobSlice()

        # collect a list of subjob outfiles
        for j in self.jobslice:

            output = os.path.join(j.outputdir, 'out.txt')
            out_list.append(output)
            for sj in j.subjobs:
                out_list.append(os.path.join(sj.outputdir, 'out.txt'))

        # run a merge on a the entire slice
        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        tm = TextMerger()
        tm.files = ['out.txt']
        assert tm.merge(self.jobslice, tmpdir), 'Merge should pass'

        outfile = os.path.join(tmpdir, 'out.txt')
        assert os.path.exists(outfile), 'File should have been created'

        for o in out_list:
            assert file_contains(
                outfile, o), 'File must contain output from all subjobs'
예제 #12
0
    def Savannah47814(self):
        from Ganga.GPI import Job, Executable

        from GangaTest.Framework.utils import sleep_until_state, file_contains

        j = Job()
        j.application = Executable(exe='ThisScriptDoesNotExist')
        j.submit()

        failed = sleep_until_state(
            j,
            60,
            state='failed',
            break_states=['new', 'killed', 'completed', 'unknown', 'removed'])
        self.assertTrue(
            failed,
            'Job with illegal script should fail. Instead it went into the state %s'
            % j.status)

        import os.path
        f = os.path.join(j.outputdir, '__jobstatus__')
        self.assertTrue(file_contains(f, 'No such file or directory'),
                        '__jobstatus__ file should contain error')