Esempio n. 1
0
    def test_h_MultiUpload(self):
        """Test that multiple 'uploads' work"""

        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        assert j.status == 'completed'

        assert len(j.subjobs) == TestMassStorageWN.sj_len

        for i in range(0, TestMassStorageWN.sj_len):
            # Check that we correctly have expanded the wildcard still
            assert len(
                stripProxy(stripProxy(
                    j.subjobs[i]).outputfiles[0]).subfiles) == 2
            assert len(j.subjobs[i].outputfiles) == 2
            file_prep = os.path.join(TestMassStorageWN.outputFilePath,
                                     str(j.id) + '_' + str(i) + '_')
            # Check that the files were placed in the correct place on storage
            for file_ in j.inputfiles:
                assert os.path.isfile(file_prep + file_.namePattern)

        TestMassStorageWN.cleanUp()
Esempio n. 2
0
    def test_f_MultiUpload(self):
        """Test that multiple 'uploads' work"""

        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        assert j.status == 'completed'

        assert len(j.subjobs) == TestMassStorageWN.sj_len

        for i in range(0, TestMassStorageWN.sj_len):
            # Check that the subfiles were expended correctly
            assert len(
                stripProxy(stripProxy(
                    j.subjobs[i]).outputfiles[0]).subfiles) == 2
            assert len(
                stripProxy(stripProxy(
                    j.subjobs[i]).outputfiles[1]).subfiles) == 1
            # Check we have the correct total number of files
            assert len(j.subjobs[i].outputfiles) == 3
            output_dir = os.path.join(TestMassStorageWN.outputFilePath,
                                      str(j.id), str(i))
            assert os.path.isdir(output_dir)
            # Checl all of the files were put into storage
            for file_ in j.inputfiles:
                assert os.path.isfile(
                    os.path.join(output_dir, file_.namePattern))

        TestMassStorageWN.cleanUp()
Esempio n. 3
0
    def test_f_MultiUpload(self):
        """Test that multiple 'uploads' work"""

        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        assert j.status == 'completed'

        assert len(j.subjobs) == TestMassStorageWN.sj_len

        for i in range(0, TestMassStorageWN.sj_len):
            # Check that the subfiles were expended correctly
            assert len(stripProxy(stripProxy(j.subjobs[i]).outputfiles[0]).subfiles) == 2
            assert len(stripProxy(stripProxy(j.subjobs[i]).outputfiles[1]).subfiles) == 1
            # Check we have the correct total number of files
            assert len(j.subjobs[i].outputfiles) == 3
            output_dir = os.path.join(TestMassStorageWN.outputFilePath, str(j.id), str(i))
            assert os.path.isdir(output_dir)
            # Checl all of the files were put into storage
            for file_ in j.inputfiles: 
                assert os.path.isfile(os.path.join(output_dir, file_.namePattern))

        TestMassStorageWN.cleanUp()
    def testFailJobOnMerge(self):
        from Ganga.GPI import CustomMerger

        self.runJobSlice()
        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        file_name = os.path.join(tmpdir, 'merge.py')
        with open(file_name, 'w') as module_file:
            module_file.write("""def mergefiles(file_list, output_file):
    '''Free script for merging files'''
    return False
        """)

        cm = CustomMerger(module=file_name)
        cm.files = ['out.txt', 'out2.txt']
        with pytest.raises(PostProcessException):
            cm.merge(self.jobslice, tmpdir)

        j = self.jobslice[0].copy()
        j.splitter = CopySplitter()
        j.postprocessors = cm
        j.submit()

        sleep_until_completed(j)
        assert j.status == 'failed'
Esempio n. 5
0
    def testFailJobOnMerge(self):

        self.runJobSlice()
        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        file_name = os.path.join(tmpdir, 'merge.py')
        module_file = file(file_name, 'w')
        module_file.write("""def mergefiles(file_list, output_file):
    '''Free script for merging files'''
    return False
        """)
        module_file.close()

        cm = CustomMerger(module=file_name)
        cm.files = ['out.txt', 'out2.txt']
        try:
            cm.merge(self.jobslice, tmpdir)
            assert False, 'Merge should fail'
        except PostProcessException:
            pass

        j = self.jobslice[0].copy()
        j.splitter = CopySplitter()
        j.postprocessors = cm
        j.submit()

        sleep_until_completed(j)
        assert j.status == 'failed'
Esempio n. 6
0
    def testRootAutoMergeSimple(self):

        # just take one job
        j = self.jobslice[0]

        # add a merger
        rm = RootMerger()
        rm.files = ['fillrandom.root']
        j.postprocessors = rm

        # and a test splitter
        s = CopySplitter()
        s.number = 7
        j.splitter = s

        j.submit()

        sleep_until_completed(j)
        assert len(j.subjobs) == s.number, 'Splitting must have worked'
        assert j.status == 'completed', 'Job must complete normally'

        root_file = os.path.join(j.outputdir, 'fillrandom.root')
        assert os.path.exists(root_file), 'Merged file must exist'
        assert self.runHistogramEntriesTest(
            root_file, 'h1f', 10000 *
            j.splitter.number), 'Number of entries should be as expected'
Esempio n. 7
0
    def testFailJobOnMerge(self):

        self.runJobSlice()
        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)

        file_name = os.path.join(tmpdir, 'merge.py')
        module_file = file(file_name, 'w')
        module_file.write("""def mergefiles(file_list, output_file):
    '''Free script for merging files'''
    return False
        """)
        module_file.close()

        cm = CustomMerger(module=file_name)
        cm.files = ['out.txt', 'out2.txt']
        try:
            cm.merge(self.jobslice, tmpdir)
            assert False, 'Merge should fail'
        except PostProcessException:
            pass

        j = self.jobslice[0].copy()
        j.splitter = CopySplitter()
        j.postprocessors = cm
        j.submit()

        sleep_until_completed(j)
        assert j.status == 'failed'
Esempio n. 8
0
    def test_b_Completed(self):
        """Test the job completed and the output files exit `in storage`"""
        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        # Just has to have reached completed state for checks to make sense
        assert j.status == 'completed'

        # Check that we've still got 1 file everywhere we expect 1
        assert len(j.inputfiles) == 1
        assert len(j.outputfiles) == 1
        # 1 file after wildcard expansion
        assert len(stripProxy(stripProxy(j).outputfiles[0]).subfiles) == 1
        assert len(j.outputfiles) == 1

        # Test that these strings are sensible
        assert j.outputfiles[0].namePattern != '' and j.outputfiles[0].namePattern[0] != '*'
        assert j.outputfiles[0].locations != [''] and isinstance(j.outputfiles[0].locations[0], str) is True
        assert j.outputfiles[0].accessURL() != [''] and isinstance(j.outputfiles[0].accessURL()[0], str) is True

        # Check that the output file exists on 'storage'
        output_dir = os.path.join(TestMassStorageWN.outputFilePath, str(j.id))
        assert os.path.isdir(output_dir)
        assert os.path.isfile(os.path.join(output_dir, j.inputfiles[0].namePattern))

        TestMassStorageWN.cleanUp()
Esempio n. 9
0
    def runJobSlice(self):

        for j in self.jobslice:
            j.submit()

            sleep_until_completed(j)
            assert j.status == 'completed'
Esempio n. 10
0
    def test_d_CompletedSJ(self):
        """Test that the subjobs ave completed"""
        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        assert j.status == 'completed'

        assert len(j.subjobs) == TestMassStorageWN.sj_len

        assert len(
            stripProxy(stripProxy(j.subjobs[0]).outputfiles[0]).subfiles) == 1
        assert len(j.subjobs[0].outputfiles) == 1

        for i in range(0, TestMassStorageWN.sj_len):
            output_dir = os.path.join(TestMassStorageWN.outputFilePath,
                                      str(j.id), str(i))
            assert os.path.isdir(output_dir)
            # Check each inputfile has been placed in storage like we asked
            for _input_file in j.inputfiles:
                assert os.path.isfile(
                    os.path.join(output_dir, _input_file.namePattern))

        TestMassStorageWN.cleanUp()
Esempio n. 11
0
    def test_d_XMLUpdated(self):
        # check they get updated elsewhere
        from Ganga.GPI import jobs, disableMonitoring, enableMonitoring

        disableMonitoring()

        j=jobs(0)

        XMLFileName = getXMLFile(j)

        last_update = stat(XMLFileName) 

        j.submit()

        newest_update = stat(XMLFileName)

        from GangaTest.Framework.utils import sleep_until_completed

        enableMonitoring()

        can_assert = False
        if j.status in ['submitted', 'running']:
            can_assert = True
            sleep_until_completed(j, 60)

        final_update = stat(XMLFileName)

        assert newest_update.st_mtime > last_update.st_mtime
Esempio n. 12
0
    def test_h_MultiUpload(self):
        """Test that multiple 'uploads' work"""

        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        assert j.status == 'completed'

        assert len(j.subjobs) == TestMassStorageWN.sj_len

        for i in range(0, TestMassStorageWN.sj_len):
            # Check that we correctly have expanded the wildcard still
            assert len(stripProxy(stripProxy(j.subjobs[i]).outputfiles[0]).subfiles) == 2
            assert len(j.subjobs[i].outputfiles) == 2
            file_prep = os.path.join(TestMassStorageWN.outputFilePath, str(j.id) + '_' + str(i) + '_')
            # Check that the files were placed in the correct place on storage
            for file_ in j.inputfiles:
                assert os.path.isfile(file_prep + file_.namePattern)

        TestMassStorageWN.cleanUp()
Esempio n. 13
0
    def test_d_CompletedSJ(self):
        """Test that the subjobs ave completed"""
        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        sleep_until_completed(j)

        assert j.status == 'completed'

        assert len(j.subjobs) == TestMassStorageWN.sj_len

        assert len(stripProxy(stripProxy(j.subjobs[0]).outputfiles[0]).subfiles) == 1
        assert len(j.subjobs[0].outputfiles) == 1

        for i in range(0, TestMassStorageWN.sj_len):
            output_dir = os.path.join(TestMassStorageWN.outputFilePath, str(j.id), str(i))
            assert os.path.isdir(output_dir)
            # Check each inputfile has been placed in storage like we asked
            for _input_file in j.inputfiles:
                assert os.path.isfile(os.path.join(output_dir, _input_file.namePattern))

        TestMassStorageWN.cleanUp()
Esempio n. 14
0
    def runJobSlice(self):

        for j in self.jobslice:
            j.submit()

            sleep_until_completed(j)
            assert j.status == 'completed'
Esempio n. 15
0
    def testRootAutoMergeSimple(self):

        # just take one job
        j = self.jobslice[0]

        # add a merger
        rm = RootMerger()
        rm.files = ['fillrandom.root']
        j.postprocessors = rm

        # and a test splitter
        s = CopySplitter()
        s.number = 7
        j.splitter = s

        j.submit()

        sleep_until_completed(j)
        assert len(j.subjobs) == s.number, 'Splitting must have worked'
        assert j.status == 'completed', 'Job must complete normally'

        root_file = os.path.join(j.outputdir, 'fillrandom.root')
        assert os.path.exists(root_file), 'Merged file must exist'
        assert self.runHistogramEntriesTest(
            root_file, 'h1f', 10000 * j.splitter.number), 'Number of entries should be as expected'
Esempio n. 16
0
    def test_b_jobResubmit(self):
        """here for testing a re-submit"""
        from Ganga.GPI import jobs

        jobs(0).resubmit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(jobs(0))
Esempio n. 17
0
    def test_b_jobResubmit(self):
        """here for testing a re-submit"""
        from Ganga.GPI import jobs

        jobs(0).resubmit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(jobs(0))
Esempio n. 18
0
    def test_d_loadSubmit(self):
        """here for testing a loaded submit"""
        from Ganga.GPI import jobs

        jobs(1).submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(jobs(1))
Esempio n. 19
0
    def test_d_loadSubmit(self):
        """here for testing a loaded submit"""
        from Ganga.GPI import jobs

        jobs(1).submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(jobs(1))
Esempio n. 20
0
    def test_Dirac_job_put_single_file(self):
        j = Job(application=Executable(exe=File(self.filepath), args=[]), backend=Dirac(), outputfiles=[DiracFile('a.root')])
        j.submit()
        sleep_until_completed(j)

        self.assertEqual(len(j.outputfiles), 1)
        self.assertEqual(j.outputfiles[0].namePattern, 'a.root')
        self.assertNotEqual(j.outputfiles[0].lfn, '')
        self.assertNotEqual(j.outputfiles[0].guid, '')
        self.assertNotEqual(j.outputfiles[0].locations, [])
        j.outputfiles[0].remove()
Esempio n. 21
0
    def test_Dirac_job_wildcard_expansion(self):
        j = Job(application=Executable(exe=File(self.filepath), args=[]), backend=Dirac(), outputfiles=[DiracFile('*.root')])
        j.submit()
        sleep_until_completed(j)

        self.assertEqual(len(j._impl.outputfiles), 1)
        self.assertEqual(j._impl.outputfiles[0].namePattern, '*.root')
        self.assertEqual(len(j._impl.outputfiles[0].subfiles), 2)
        for df in j._impl.outputfiles[0].subfiles:
            self.assertIn(df.namePattern, ['a.root', 'b.root'])
            df.remove()
Esempio n. 22
0
    def test_local_job_put_wildcard_files(self):
        j = Job(application=Executable(exe=File(self.filepath), args=[]), outputfiles=[DiracFile('*.root')])
        j.submit()
        sleep_until_completed(j)

        self.assertEqual(len(j.outputfiles), 2)
        for df in j.outputfiles:
            self.assertIn(df.namePattern, ['a.root', 'b.root'])
            self.assertNotEqual(df.lfn, '')
            self.assertNotEqual(df.guid, '')
            self.assertNotEqual(df.locations, [])
            df.remove()
Esempio n. 23
0
    def test_local_job_put_single_file(self):
        j = Job(application=Executable(exe=File(self.filepath), args=[]), outputfiles=[DiracFile('a.root')])
        logger.info("App EXE: %s" % str(j.application.exe.name))
        j.submit()
        sleep_until_completed(j)

        self.assertEqual(len(j.outputfiles), 1)
        self.assertEqual(j.outputfiles[0].namePattern, 'a.root')
        self.assertNotEqual(j.outputfiles[0].lfn, '')
        self.assertNotEqual(j.outputfiles[0].guid, '')
        self.assertNotEqual(j.outputfiles[0].locations, [])
        j.outputfiles[0].remove()
Esempio n. 24
0
    def test_a_jobSubmit(self):
        """here for testing a submit"""
        from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile

        j=Job()
        j.application=Executable(exe='touch')
        j.splitter=ArgSplitter(args=[['abc.txt'], ['def.txt']])
        j.outputfiles=[MassStorageFile(outputfilenameformat = '/test/{sjid}-{fname}', namePattern = '*.txt')]
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)
Esempio n. 25
0
    def setUp(self):
        self.exe_job = Job()
        self.davinci_job = Job(application=DaVinci())

        self.exe_job.submit()
        if not sleep_until_completed(self.exe_job):
            assert False, 'Test timed out'
        assert self.exe_job.status == 'completed'

        self.davinci_job.submit()
        if not sleep_until_completed(self.davinci_job):
            assert False, 'Test timed out'
        assert self.davinci_job.status == 'completed'
Esempio n. 26
0
    def test_Dirac_job_put_multiple_files(self):
        j = Job(application=Executable(exe=File(self.filepath), args=[]), backend=Dirac(), outputfiles=[DiracFile('a.root'), DiracFile('b.root')])
        j.submit()
        sleep_until_completed(j)

        self.assertEqual(len(j.outputfiles), 2)
        for df in j.outputfiles:
            print "Testing: %s" % str(df.namePattern)
            self.assertIn(df.namePattern, ['a.root', 'b.root'])
            self.assertNotEqual(df.lfn, '')
            self.assertNotEqual(df.guid, '')
            self.assertNotEqual(df.locations, [])
            df.remove()
Esempio n. 27
0
    def setUp(self):
        self.exe_job = Job()
        self.davinci_job = Job(application=DaVinci())

        self.exe_job.submit()
        if not sleep_until_completed(self.exe_job):
            assert False, 'Test timed out'
        assert self.exe_job.status == 'completed'

        self.davinci_job.submit()
        if not sleep_until_completed(self.davinci_job):
            assert False, 'Test timed out'
        assert self.davinci_job.status == 'completed'
Esempio n. 28
0
    def test_b_SJCompleted(self):
        """
        Test the subjobs complete
        """
        from Ganga.GPI import jobs

        assert len(jobs) == 1
        assert len(jobs(0).subjobs) == TestSJSubmit.n_subjobs

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(jobs(0))

        for sj in jobs(0).subjobs:
            assert sj.status in ['completed']
Esempio n. 29
0
    def test_b_SJCompleted(self):
        """
        Test the subjobs complete
        """
        from Ganga.GPI import jobs

        assert len(jobs) == 1
        assert len(jobs(0).subjobs) == TestSJSubmit.n_subjobs

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(jobs(0))

        for sj in jobs(0).subjobs:
            assert sj.status in ['completed']
Esempio n. 30
0
    def test_e_testXMLContent(self):
        # Check content of XML is as expected
        from Ganga.Core.GangaRepository.VStreamer import to_file, from_file

        from Ganga.GPI import jobs, Job
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from tempfile import NamedTemporaryFile

        j = jobs(0)
        assert path.isfile(getXMLFile(j))
        with open(getXMLFile(j)) as handler:
            tmpobj, errs = from_file(handler)

            assert hasattr(tmpobj, 'name')

            assert tmpobj.name == testStr

            ignore_subs = [
                'time', 'subjobs', 'info', 'application', 'backend', 'id'
            ]

            with NamedTemporaryFile(delete=False) as new_temp_file:
                temp_name = new_temp_file.name

                to_file(stripProxy(j), new_temp_file, ignore_subs)
                new_temp_file.flush()

            with NamedTemporaryFile(delete=False) as new_temp_file2:
                temp_name2 = new_temp_file2.name

                j2 = Job()
                j2.name = testStr
                j2.submit()
                from GangaTest.Framework.utils import sleep_until_completed
                sleep_until_completed(j2)

                to_file(stripProxy(j2), new_temp_file2, ignore_subs)
                new_temp_file2.flush()

            #import filecmp
            #assert filecmp.cmp(handler.name, new_temp_file.name)
            #assert not filecmp.cmp(new_temp_file.name, new_temp_file2.name)

            #assert open(getXMLFile(j)).read() == open(temp_name).read()
            assert open(temp_name).read() == open(temp_name2).read()

            unlink(temp_name)
            unlink(temp_name2)
Esempio n. 31
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPI import Job, jobs, ArgSplitter
        j=Job()
        self.assertEqual(len(jobs), 1) # Don't really gain anything from assertEqual...

        j.splitter = ArgSplitter(args=[[i] for i in range(global_subjob_num)])
        j.submit()

        self.assertEqual(len(j.subjobs), global_subjob_num)
        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j, 60)
Esempio n. 32
0
    def testMergeThatAlwaysFailsOverwrite(self):

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        sleep_until_completed(j, 120)
        assert j.status == 'failed'
        assert os.path.exists(os.path.join(
            j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Esempio n. 33
0
    def test_a_jobSubmit(self):
        """here for testing a submit"""
        from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile

        j = Job()
        j.application = Executable(exe='touch')
        j.splitter = ArgSplitter(args=[['abc.txt'], ['def.txt']])
        j.outputfiles = [
            MassStorageFile(outputfilenameformat='/test/{sjid}-{fname}',
                            namePattern='*.txt')
        ]
        j.submit()

        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j)
    def testMergeThatAlwaysFailsOverwrite(self):
        from Ganga.GPI import Job, Executable, Local, LocalFile

        j = Job()
        j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt'])
        j.backend = Local()
        j.outputfiles = [LocalFile('out.txt')]
        j.splitter = CopySplitter()
        j.postprocessors = MergerTester(files=['out.txt'], overwrite=True)

        j.submit()

        sleep_until_completed(j, 60)
        assert j.status == 'failed'
        assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
Esempio n. 35
0
    def test_e_testXMLContent(self):
        # Check content of XML is as expected
        from Ganga.Core.GangaRepository.VStreamer import to_file, from_file

        from Ganga.GPI import jobs, Job
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from tempfile import NamedTemporaryFile

        j = jobs(0)
        assert path.isfile(getXMLFile(j))
        with open(getXMLFile(j)) as handler:
            tmpobj, errs = from_file(handler)

            assert hasattr(tmpobj, 'name')

            assert tmpobj.name == testStr

            ignore_subs = ['time', 'subjobs', 'info', 'application', 'backend', 'id']

            with NamedTemporaryFile(delete=False) as new_temp_file:
                temp_name = new_temp_file.name


                to_file(stripProxy(j), new_temp_file, ignore_subs)
                new_temp_file.flush()

            with NamedTemporaryFile(delete=False) as new_temp_file2:
                temp_name2 = new_temp_file2.name

                j2 = Job()
                j2.name = testStr
                j2.submit()
                from GangaTest.Framework.utils import sleep_until_completed
                sleep_until_completed(j2)

                to_file(stripProxy(j2), new_temp_file2, ignore_subs)
                new_temp_file2.flush()

            #import filecmp
            #assert filecmp.cmp(handler.name, new_temp_file.name)
            #assert not filecmp.cmp(new_temp_file.name, new_temp_file2.name)

            #assert open(getXMLFile(j)).read() == open(temp_name).read()
            assert open(temp_name).read() == open(temp_name2).read()

            unlink(temp_name)
            unlink(temp_name2)
Esempio n. 36
0
    def testMultipleMergeOnTransitionOneJobArray(self):
        """Use the MultipleMerger to merge all the text files."""

        j = self.jobslice[0]
        j.splitter = CopySplitter()
        j.splitter.number = 5

        tm1 = TextMerger()
        tm1.files = ['out.txt']

        tm2 = TextMerger()
        tm2.files = ['out2.txt']

        j.postprocessors = [tm1, tm2]

        j.submit()
        if not sleep_until_completed(j):
            assert False, 'Test timed out'
        assert j.status == 'completed', 'Job should be finished'

        for out in ['out.txt', 'out2.txt']:
            output = os.path.join(j.outputdir, out)
            assert os.path.exists(output), 'File %s must exist' % output

            for sj in j.subjobs:
                out_txt = os.path.join(sj.outputdir, out)
                assert file_contains(
                    output, out_txt), 'File must contain the output of each individual job'
Esempio n. 37
0
    def setUp(self):
        super(TestCustomChecker, self).setUp()
        from Ganga.GPI import Job, CustomChecker
        from GangaTest.Framework.utils import sleep_until_completed
        self.c = CustomChecker()
        self.j = None
        self.file_name_stdout = None
        self.file_name_fail = None

        # write string to tmpfile
        self.j = Job()
        self.j.submit()
        self.assertTrue(
            sleep_until_completed(self.j),
            'Timeout on job submission: job is still not finished')
        self.assertEqual(self.j.status, 'completed')

        file_obj, file_name = tempfile.mkstemp()
        os.close(file_obj)
        os.unlink(file_name)
        os.mkdir(file_name)

        self.file_name_stdout = os.path.join(file_name, 'check_stdout.py')
        with open(self.file_name_stdout, 'w') as module_stdout:
            module_stdout.write("""import os
def check(j):
        stdout = os.path.join(j.outputdir,'stdout')
        return os.path.exists(stdout)
""")

        self.file_name_fail = os.path.join(file_name, 'check_fail.py')
        with open(self.file_name_fail, 'w') as module_fail:
            module_fail.write('will not run')
Esempio n. 38
0
    def testMergeOnTransitionOneJob(self):

        j = self.jobslice[0]
        j.splitter = CopySplitter()
        j.splitter.number = 5

        tm = TextMerger()
        tm.files = ['out.txt']
        j.postprocessors = tm

        assert j.postprocessors, 'Postprocessors should be set'

        j.submit()
        if not sleep_until_completed(j):
            assert False, 'Test timed out'
        assert j.status == 'completed', 'Job should be finished'

        assert len(j.subjobs) == 5, 'Job should have split correctly'

        output = os.path.join(j.outputdir, 'out.txt')
        assert os.path.exists(output)

        for sj in j.subjobs:
            out_txt = os.path.join(sj.outputdir, 'out.txt')
            assert file_contains(
                output, out_txt), 'File must contain the output of each individual job'
Esempio n. 39
0
    def test_b_testClientSideComplete(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        assert sleep_until_completed(j)

        for sj in j.subjobs:
            output_dir = stripProxy(sj).getOutputWorkspace(
                create=False).getPath()
            assert os.path.isdir(output_dir)

            # Check that the files were placed in the correct place on storage
            for file_ in j.inputfiles:
                for this_file in glob.glob(
                        os.path.join(output_dir, file_.namePattern)):
                    assert os.path.isfile(this_file)

            # Check that wildcard expansion happened correctly
            assert len(stripProxy(sj).outputfiles[0].subfiles) == 2

            assert len(sj.outputfiles) == 2
Esempio n. 40
0
    def test_Savannah8009(self):
        from Ganga.GPI import Executable, Job, jobs, templates

        from GangaTest.Framework.utils import sleep_until_completed

        j = Job()
        j.submit()

        self.assertEqual(len(jobs), 1)
        self.assertEqual(len(templates), 0)

        if not sleep_until_completed(j, timeout=120):
            assert(not "Timeout on job submission: job is still not finished")

        t = j.copy()

        # make sure that copy creates a new job (and not the template)
        self.assertEqual(len(jobs), 2)
        self.assertEqual(len(templates), 0)

        # make sure that output parameters are not carried forward
        self.assertNotEqual(j.backend.id, t.backend.id)
        self.assertNotEqual(j.backend.exitcode, t.backend.exitcode)

        # make sure that input parameters are carried forward
        self.assertEqual(j.application.exe, t.application.exe)
Esempio n. 41
0
    def test_b_testClientSideComplete(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import jobs

        assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client'

        j = jobs[-1]

        assert sleep_until_completed(j)

        for sj in j.subjobs:
            output_dir = stripProxy(sj).getOutputWorkspace(create=False).getPath()
            assert os.path.isdir(output_dir) == True

            # Check that the files have been removed from the output worker dir
            for input_f in j.inputfiles:
                assert not os.path.isfile(os.path.join(output_dir, input_f.namePattern))

            # Check that the files were placed in the correct place on storage
            output_dir = os.path.join(self.outputFilePath, str(j.id), str(sj.id))
            for file_ in j.inputfiles:
                assert os.path.isfile(os.path.join(output_dir, file_.namePattern))

            # Check that wildcard expansion happened correctly
            assert len(stripProxy(stripProxy(sj).outputfiles[0]).subfiles) == 2

            assert len(sj.outputfiles) == 2
Esempio n. 42
0
    def test_Savannah18729(self):
        from Ganga.GPI import Root, Job, Local

        import os
        from GangaTest.Framework.utils import sleep_until_completed
        import tempfile

        tmpdir = tempfile.mktemp()
        os.mkdir(tmpdir)
        ## Is this a test of files with a leading ' '  in the name? - rcurrie
        #self.fname = os.path.join(tmpdir, ' test.C')
        self.fname = os.path.join(tmpdir, 'test.C')
        with open(self.fname, 'w') as f:
            f.write('''
            void test(const char* text, int i)
            {
              cout << gSystem->GetDynamicPath() << endl;
              gSystem->Load("libTree");
              cout << text << " " << i << endl;

            }
            ''')

        app = Root()
        app.script = self.fname
        app.args = ['abc', 1]
        j = Job(backend=Local(), application=app)
        j.submit()

        self.assertTrue(sleep_until_completed(j, 120),
                        'Timeout on registering Interactive job as completed')

        self.assertEqual(j.status, 'completed')
Esempio n. 43
0
    def test_a_JobConstruction(self):
        """ First construct the Job object (singular)"""
        from Ganga.Utility.Config import getConfig
        self.assertFalse(getConfig('TestingFramework')['AutoCleanup'])

        from Ganga.GPI import Job, jobs, ArgSplitter
        j = Job()
        self.assertEqual(len(jobs),
                         1)  # Don't really gain anything from assertEqual...

        j.splitter = ArgSplitter(args=[[i] for i in range(global_subjob_num)])
        j.submit()

        self.assertEqual(len(j.subjobs), global_subjob_num)
        from GangaTest.Framework.utils import sleep_until_completed
        sleep_until_completed(j, 60)
Esempio n. 44
0
 def _check(self, template):
     logger.info("------------------------------------------------")
     logger.info("-    Now checking template: '%s'" % template.name)
     logger.info("------------------------------------------------")
     j = Job(template)
     j.submit()
     self.assertTrue(sleep_until_completed(j))
Esempio n. 45
0
    def test_Savannah8009(self):
        from Ganga.GPI import Executable, Job, jobs, templates

        from GangaTest.Framework.utils import sleep_until_completed

        j = Job()
        j.submit()

        self.assertEqual(len(jobs), 1)
        self.assertEqual(len(templates), 0)

        if not sleep_until_completed(j, timeout=120):
            assert not "Timeout on job submission: job is still not finished"

        t = j.copy()

        # make sure that copy creates a new job (and not the template)
        self.assertEqual(len(jobs), 2)
        self.assertEqual(len(templates), 0)

        # make sure that output parameters are not carried forward
        self.assertNotEqual(j.backend.id, t.backend.id)
        self.assertNotEqual(j.backend.exitcode, t.backend.exitcode)

        # make sure that input parameters are carried forward
        self.assertEqual(j.application.exe, t.application.exe)
    def setUp(self):
        super(TestCustomChecker, self).setUp()
        from Ganga.GPI import Job, CustomChecker
        from GangaTest.Framework.utils import sleep_until_completed
        self.c = CustomChecker()
        self.j = None
        self.file_name_stdout = None
        self.file_name_fail = None

        # write string to tmpfile
        self.j = Job()
        self.j.submit()
        self.assertTrue(sleep_until_completed(self.j), 'Timeout on job submission: job is still not finished')
        self.assertEqual(self.j.status, 'completed')

        file_obj, file_name = tempfile.mkstemp()
        os.close(file_obj)
        os.unlink(file_name)
        os.mkdir(file_name)

        self.file_name_stdout = os.path.join(file_name, 'check_stdout.py')
        with open(self.file_name_stdout, 'w') as module_stdout:
            module_stdout.write("""import os
def check(j):
        stdout = os.path.join(j.outputdir,'stdout')
        return os.path.exists(stdout)
""")

        self.file_name_fail = os.path.join(file_name, 'check_fail.py')
        with open(self.file_name_fail, 'w') as module_fail:
            module_fail.write('will not run')
Esempio n. 47
0
    def test_b_testClientSideComplete(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import jobs
        from Ganga.GPIDev.Base.Proxy import stripProxy

        from GangaTest.Framework.utils import sleep_until_completed

        j = jobs[-1]

        assert sleep_until_completed(j)

        for sj in j.subjobs:
            output_dir = stripProxy(sj).getOutputWorkspace(create=False).getPath()
            assert os.path.isdir(output_dir)

            # Check that the files were placed in the correct place on storage
            for file_ in j.inputfiles:
                for this_file in glob.glob(os.path.join(output_dir, file_.namePattern)):
                    assert os.path.isfile(this_file)

            # Check that wildcard expansion happened correctly
            assert len(stripProxy(sj).outputfiles[0].subfiles) == 2

            assert len(sj.outputfiles) == 2
Esempio n. 48
0
 def _check(self, template):
     logger.info("------------------------------------------------")
     logger.info("-    Now checking template: '%s'" % template.name)
     logger.info("------------------------------------------------")
     j = Job(template)
     j.submit()
     self.assertTrue(sleep_until_completed(j))
Esempio n. 49
0
    def runJobSlice(self):
        from GangaTest.Framework.utils import sleep_until_completed

        for j in self.jobslice:
            j.submit()
            assert sleep_until_completed(j, timeout=60), 'Timeout on job submission: job is still not finished'
            assert j.status == 'completed'
Esempio n. 50
0
    def setUp(self):
        args = ['1', '2', '12']
        # write string to tmpfile
        self.j = Job()
        self.j.submit()
        if not sleep_until_completed(self.j):
            assert False, 'Test timed out'
        assert self.j.status == 'completed'

        (file_obj, file_name) = tempfile.mkstemp()
        os.close(file_obj)
        os.unlink(file_name)
        os.mkdir(file_name)

        self.file_name_stdout = os.path.join(file_name, 'check_stdout.py')
        module_stdout = open(self.file_name_stdout, 'w')
        module_stdout.write("""import os
def check(j):
        stdout = os.path.join(j.outputdir,'stdout')
        return os.path.exists(stdout)
""")

        self.file_name_fail = os.path.join(file_name, 'check_fail.py')
        module_fail = open(self.file_name_fail, 'w')
        module_fail.write("will not run")
Esempio n. 51
0
    def test_b_Completed(self):
        """Test the job completed and the output files exit `in storage`"""
        from Ganga.GPI import jobs

        j = jobs[-1]

        assert sleep_until_completed(j)

        # Check that we've still got 1 file everywhere we expect 1
        assert len(j.inputfiles) == 1
        assert len(j.outputfiles) == 1
        # 1 file after wildcard expansion
        assert len(stripProxy(stripProxy(j).outputfiles[0]).subfiles) == 1
        assert len(j.outputfiles) == 1

        # Test that these strings are sensible
        assert j.outputfiles[0].namePattern != ''
        assert j.outputfiles[0].namePattern[0] != '*'
        assert j.outputfiles[0].locations != ['']
        assert isinstance(j.outputfiles[0].locations[0], str) is True
        assert j.outputfiles[0].accessURL() != ['']
        assert isinstance(j.outputfiles[0].accessURL()[0], str) is True

        # Check that the output file exists on 'storage'
        output_dir = os.path.join(self.outputFilePath, str(j.id))
        assert os.path.isdir(output_dir)
        assert os.path.isfile(os.path.join(output_dir, j.inputfiles[0].namePattern))

        self.cleanUp()
Esempio n. 52
0
    def test_b_testClientSideComplete(self):
        """Test the client side code whilst stil using the Local backend"""

        from Ganga.GPI import jobs

        assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client'

        j = jobs[-1]

        assert sleep_until_completed(j)

        for sj in j.subjobs:
            output_dir = stripProxy(sj).getOutputWorkspace(create=False).getPath()
            assert os.path.isdir(output_dir) == True

            # Check that the files have been removed from the output worker dir
            for input_f in j.inputfiles:
                assert not os.path.isfile(os.path.join(output_dir, input_f.namePattern))

            # Check that the files were placed in the correct place on storage
            output_dir = os.path.join(self.outputFilePath, str(j.id), str(sj.id))
            for file_ in j.inputfiles:
                assert os.path.isfile(os.path.join(output_dir, file_.namePattern))

            # Check that wildcard expansion happened correctly
            assert len(stripProxy(stripProxy(sj).outputfiles[0]).subfiles) == 2

            assert len(sj.outputfiles) == 2