def test_build_easyconfigs_in_parallel_slurm(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) Slurm as backend for --job."""

        # install mocked versions of 'sbatch' and 'scontrol' commands
        sbatch = os.path.join(self.test_prefix, 'bin', 'sbatch')
        write_file(sbatch, MOCKED_SBATCH)
        adjust_permissions(sbatch, stat.S_IXUSR, add=True)

        scontrol = os.path.join(self.test_prefix, 'bin', 'scontrol')
        write_file(scontrol, MOCKED_SCONTROL)
        adjust_permissions(scontrol, stat.S_IXUSR, add=True)

        os.environ['PATH'] = os.path.pathsep.join(
            [os.path.join(self.test_prefix, 'bin'),
             os.getenv('PATH')])

        topdir = os.path.dirname(os.path.abspath(__file__))
        test_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip',
                               'gzip-1.5-foss-2018a.eb')
        foss_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'f', 'foss',
                               'foss-2018a.eb')

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
            'job_max_walltime': 5,
            'force': True,
        }
        init_config(args=['--job-backend=Slurm'], build_options=build_options)

        easyconfigs = process_easyconfig(test_ec) + process_easyconfig(foss_ec)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        self.mock_stdout(True)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'",
                                             ordered_ecs,
                                             prepare_first=False)
        self.mock_stdout(False)

        # jobs are submitted for foss & gzip (listed easyconfigs)
        self.assertEqual(len(jobs), 2)

        # last job (gzip) has a dependency on second-to-last job (foss)
        self.assertEqual(jobs[0].job_specs['job-name'], 'foss-2018a')

        expected = {
            'dependency': 'afterok:%s' % jobs[0].jobid,
            'hold': True,
            'job-name': 'gzip-1.5-foss-2018a',
            'nodes': 1,
            'ntasks': 3,
            'ntasks-per-node': 3,
            'output': '%x-%j.out',
            'time': 300,  # 60*5 (unit is minutes)
            'wrap': "echo '%s'" % test_ec,
        }
        self.assertEqual(jobs[1].job_specs, expected)
    def test_build_easyconfigs_in_parallel_slurm(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) Slurm as backend for --job."""

        # install mocked versions of 'sbatch' and 'scontrol' commands
        sbatch = os.path.join(self.test_prefix, 'bin', 'sbatch')
        write_file(sbatch, MOCKED_SBATCH)
        adjust_permissions(sbatch, stat.S_IXUSR, add=True)

        scontrol = os.path.join(self.test_prefix, 'bin', 'scontrol')
        write_file(scontrol, MOCKED_SCONTROL)
        adjust_permissions(scontrol, stat.S_IXUSR, add=True)

        os.environ['PATH'] = os.path.pathsep.join([os.path.join(self.test_prefix, 'bin'), os.getenv('PATH')])

        topdir = os.path.dirname(os.path.abspath(__file__))
        test_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-foss-2018a.eb')
        foss_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'f', 'foss', 'foss-2018a.eb')

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
            'job_max_walltime': 5,
            'force': True,
        }
        init_config(args=['--job-backend=Slurm'], build_options=build_options)

        easyconfigs = process_easyconfig(test_ec) + process_easyconfig(foss_ec)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        self.mock_stdout(True)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
        self.mock_stdout(False)

        # jobs are submitted for foss & gzip (listed easyconfigs)
        self.assertEqual(len(jobs), 2)

        # last job (gzip) has a dependency on second-to-last job (foss)
        self.assertEqual(jobs[0].job_specs['job-name'], 'foss-2018a')

        expected = {
            'dependency': 'afterok:%s' % jobs[0].jobid,
            'hold': True,
            'job-name': 'gzip-1.5-foss-2018a',
            'nodes': 1,
            'ntasks': 3,
            'ntasks-per-node': 3,
            'output': '%x-%j.out',
            'time': 300,  # 60*5 (unit is minutes)
            'wrap': "echo '%s'" % test_ec,
        }
        self.assertEqual(jobs[1].job_specs, expected)
 def test_build_easyconfigs_in_parallel(self):
     """Basic test for build_easyconfigs_in_parallel function."""
     easyconfig_file = os.path.join(os.path.dirname(__file__), "easyconfigs", "gzip-1.5-goolf-1.4.10.eb")
     easyconfigs = process_easyconfig(easyconfig_file)
     ordered_ecs = resolve_dependencies(easyconfigs)
     jobs = build_easyconfigs_in_parallel("echo %(spec)s", ordered_ecs, prepare_first=False)
     self.assertEqual(len(jobs), 8)
Esempio n. 4
0
    def test_get_easyblock_instance(self):
        """Test get_easyblock_instance function."""
        # adjust PYTHONPATH such that test easyblocks are found
        testdir = os.path.abspath(os.path.dirname(__file__))
        import easybuild
        eb_blocks_path = os.path.join(testdir, 'sandbox')
        if eb_blocks_path not in sys.path:
            sys.path.append(eb_blocks_path)
            easybuild = reload(easybuild)

        import easybuild.easyblocks
        reload(easybuild.easyblocks)

        from easybuild.easyblocks.toy import EB_toy
        ec = process_easyconfig(
            os.path.join(testdir, 'easyconfigs', 'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)
        self.assertTrue(isinstance(eb, EB_toy))

        # check whether 'This is easyblock' log message is there
        tup = ('EB_toy', 'easybuild.easyblocks.toy',
               '.*test/framework/sandbox/easybuild/easyblocks/t/toy.pyc*')
        eb_log_msg_re = re.compile(
            r"INFO This is easyblock %s from module %s (%s)" % tup, re.M)
        logtxt = read_file(eb.logfile)
        self.assertTrue(
            eb_log_msg_re.search(logtxt),
            "Pattern '%s' found in: %s" % (eb_log_msg_re.pattern, logtxt))
Esempio n. 5
0
    def test_patch_step(self):
        """Test patch step."""
        test_easyconfigs = os.path.join(
            os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
        ec = process_easyconfig(os.path.join(test_easyconfigs,
                                             'toy-0.0.eb'))[0]
        orig_sources = ec['ec']['sources'][:]

        toy_patches = [
            'toy-0.0_typo.patch',  # test for applying patch
            ('toy-extra.txt', 'toy-0.0'),  # test for patch-by-copy
        ]
        self.assertEqual(ec['ec']['patches'], toy_patches)

        # test applying patches without sources
        ec['ec']['sources'] = []
        eb = EasyBlock(ec['ec'])
        eb.fetch_step()
        eb.extract_step()
        self.assertErrorRegex(EasyBuildError, '.*', eb.patch_step)

        # test actual patching of unpacked sources
        ec['ec']['sources'] = orig_sources
        eb = EasyBlock(ec['ec'])
        eb.fetch_step()
        eb.extract_step()
        eb.patch_step()
        # verify that patches were applied
        toydir = os.path.join(eb.builddir, 'toy-0.0')
        self.assertEqual(sorted(os.listdir(toydir)),
                         ['toy-extra.txt', 'toy.source', 'toy.source.orig'])
        self.assertTrue("and very proud of it" in read_file(
            os.path.join(toydir, 'toy.source')))
        self.assertEqual(read_file(os.path.join(toydir, 'toy-extra.txt')),
                         'moar!\n')
Esempio n. 6
0
    def test_guess_start_dir(self):
        """Test guessing the start dir."""
        test_easyconfigs = os.path.join(
            os.path.abspath(os.path.dirname(__file__)), 'easyconfigs')
        ec = process_easyconfig(os.path.join(test_easyconfigs,
                                             'toy-0.0.eb'))[0]

        def check_start_dir(expected_start_dir):
            """Check start dir."""
            eb = EasyBlock(ec['ec'])
            eb.silent = True
            eb.cfg['stop'] = 'patch'
            eb.run_all_steps(False)
            eb.guess_start_dir()
            abs_expected_start_dir = os.path.join(eb.builddir,
                                                  expected_start_dir)
            self.assertTrue(
                os.path.samefile(eb.cfg['start_dir'], abs_expected_start_dir))
            self.assertTrue(
                os.path.samefile(os.getcwd(), abs_expected_start_dir))

        # default (no start_dir specified): use unpacked dir as start dir
        self.assertEqual(ec['ec']['start_dir'], None)
        check_start_dir('toy-0.0')

        # using start_dir equal to the one we're in is OK
        ec['ec']['start_dir'] = '%(name)s-%(version)s'
        self.assertEqual(ec['ec']['start_dir'], 'toy-0.0')
        check_start_dir('toy-0.0')

        # clean error when specified start dir does not exist
        ec['ec']['start_dir'] = 'thisstartdirisnotthere'
        err_pattern = "Specified start dir .*/toy-0.0/thisstartdirisnotthere does not exist"
        self.assertErrorRegex(EasyBuildError, err_pattern, check_start_dir,
                              'whatever')
    def test_prepare_step(self):
        """Test prepare step (setting up build environment)."""
        test_easyconfigs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs')
        ec = process_easyconfig(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0]

        mkdir(os.path.join(self.test_buildpath, 'toy', '0.0', 'dummy-dummy'), parents=True)
        eb = EasyBlock(ec['ec'])
        eb.silent = True
        eb.prepare_step()
        self.assertEqual(self.modtool.list(), [])

        os.environ['THIS_IS_AN_UNWANTED_ENV_VAR'] = 'foo'
        eb.cfg['unwanted_env_vars'] = ['THIS_IS_AN_UNWANTED_ENV_VAR']

        eb.cfg['allow_system_deps'] = [('Python', '1.2.3')]

        init_config(build_options={'extra_modules': ['GCC/4.7.2']})

        eb.prepare_step()

        self.assertEqual(os.environ.get('THIS_IS_AN_UNWANTED_ENV_VAR'), None)
        self.assertEqual(os.environ.get('EBROOTPYTHON'), 'Python')
        self.assertEqual(os.environ.get('EBVERSIONPYTHON'), '1.2.3')
        self.assertEqual(len(self.modtool.list()), 1)
        self.assertEqual(self.modtool.list()[0]['mod_name'], 'GCC/4.7.2')
Esempio n. 8
0
    def test_get_easyblock_instance(self):
        """Test get_easyblock_instance function."""
        # adjust PYTHONPATH such that test easyblocks are found
        testdir = os.path.abspath(os.path.dirname(__file__))
        import easybuild

        eb_blocks_path = os.path.join(testdir, "sandbox")
        if not eb_blocks_path in sys.path:
            sys.path.append(eb_blocks_path)
            easybuild = reload(easybuild)

        import easybuild.easyblocks

        reload(easybuild.easyblocks)

        from easybuild.easyblocks.toy import EB_toy

        ec = process_easyconfig(os.path.join(testdir, "easyconfigs", "toy-0.0.eb"))[0]
        eb = get_easyblock_instance(ec)
        self.assertTrue(isinstance(eb, EB_toy))

        # check whether 'This is easyblock' log message is there
        tup = ("EB_toy", "easybuild.easyblocks.toy", ".*test/framework/sandbox/easybuild/easyblocks/toy.pyc*")
        eb_log_msg_re = re.compile(r"INFO This is easyblock %s from module %s (%s)" % tup, re.M)
        logtxt = read_file(eb.logfile)
        self.assertTrue(eb_log_msg_re.search(logtxt), "Pattern '%s' found in: %s" % (eb_log_msg_re.pattern, logtxt))
    def test_patch_step(self):
        """Test patch step."""
        test_easyconfigs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs')
        ec = process_easyconfig(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0]
        orig_sources = ec['ec']['sources'][:]

        toy_patches = [
            'toy-0.0_typo.patch',  # test for applying patch
            ('toy-extra.txt', 'toy-0.0'), # test for patch-by-copy
        ]
        self.assertEqual(ec['ec']['patches'], toy_patches)

        # test applying patches without sources
        ec['ec']['sources'] = []
        eb = EasyBlock(ec['ec'])
        eb.fetch_step()
        eb.extract_step()
        self.assertErrorRegex(EasyBuildError, '.*', eb.patch_step)

        # test actual patching of unpacked sources
        ec['ec']['sources'] = orig_sources
        eb = EasyBlock(ec['ec'])
        eb.fetch_step()
        eb.extract_step()
        eb.patch_step()
        # verify that patches were applied
        toydir = os.path.join(eb.builddir, 'toy-0.0')
        self.assertEqual(sorted(os.listdir(toydir)), ['toy-extra.txt', 'toy.source', 'toy.source.orig'])
        self.assertTrue("and very proud of it" in read_file(os.path.join(toydir, 'toy.source')))
        self.assertEqual(read_file(os.path.join(toydir, 'toy-extra.txt')), 'moar!\n')
    def test_guess_start_dir(self):
        """Test guessing the start dir."""
        test_easyconfigs = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'test_ecs')
        ec = process_easyconfig(os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0]

        def check_start_dir(expected_start_dir):
            """Check start dir."""
            eb = EasyBlock(ec['ec'])
            eb.silent = True
            eb.cfg['stop'] = 'patch'
            eb.run_all_steps(False)
            eb.guess_start_dir()
            abs_expected_start_dir = os.path.join(eb.builddir, expected_start_dir)
            self.assertTrue(os.path.samefile(eb.cfg['start_dir'], abs_expected_start_dir))
            self.assertTrue(os.path.samefile(os.getcwd(), abs_expected_start_dir))

        # default (no start_dir specified): use unpacked dir as start dir
        self.assertEqual(ec['ec']['start_dir'], None)
        check_start_dir('toy-0.0')

        # using start_dir equal to the one we're in is OK
        ec['ec']['start_dir'] = '%(name)s-%(version)s'
        self.assertEqual(ec['ec']['start_dir'], 'toy-0.0')
        check_start_dir('toy-0.0')

        # clean error when specified start dir does not exist
        ec['ec']['start_dir'] = 'thisstartdirisnotthere'
        err_pattern = "Specified start dir .*/toy-0.0/thisstartdirisnotthere does not exist"
        self.assertErrorRegex(EasyBuildError, err_pattern, check_start_dir, 'whatever')
Esempio n. 11
0
    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""

        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST',
                                               '(none)')):

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            travis_branch = os.environ.get('TRAVIS_BRANCH', None)
            if travis_branch and travis_branch != 'master':

                if not self.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(
                    os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch
                out, ec = run_cmd(cmd, simple=False)
                changed_ecs_filenames = [
                    os.path.basename(f) for f in out.strip().split('\n')
                    if f.endswith('.eb')
                ]
                print("\nList of changed easyconfig files in this PR: %s" %
                      '\n'.join(changed_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames:
                    match = None
                    for ec in self.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' %
                                          (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs)
Esempio n. 12
0
    def test_prepare_step(self):
        """Test prepare step (setting up build environment)."""
        test_easyconfigs = os.path.join(
            os.path.abspath(os.path.dirname(__file__)), 'easyconfigs',
            'test_ecs')
        ec = process_easyconfig(
            os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0.eb'))[0]

        mkdir(os.path.join(self.test_buildpath, 'toy', '0.0', 'dummy-dummy'),
              parents=True)
        eb = EasyBlock(ec['ec'])
        eb.silent = True
        eb.prepare_step()
        self.assertEqual(self.modtool.list(), [])

        os.environ['THIS_IS_AN_UNWANTED_ENV_VAR'] = 'foo'
        eb.cfg['unwanted_env_vars'] = ['THIS_IS_AN_UNWANTED_ENV_VAR']

        eb.cfg['allow_system_deps'] = [('Python', '1.2.3')]

        init_config(build_options={'extra_modules': ['GCC/4.7.2']})

        eb.prepare_step()

        self.assertEqual(os.environ.get('THIS_IS_AN_UNWANTED_ENV_VAR'), None)
        self.assertEqual(os.environ.get('EBROOTPYTHON'), 'Python')
        self.assertEqual(os.environ.get('EBVERSIONPYTHON'), '1.2.3')
        self.assertEqual(len(self.modtool.list()), 1)
        self.assertEqual(self.modtool.list()[0]['mod_name'], 'GCC/4.7.2')
 def test_build_easyconfigs_in_parallel(self):
     """Basic test for build_easyconfigs_in_parallel function."""
     easyconfig_file = os.path.join(os.path.dirname(__file__),
                                    'easyconfigs',
                                    'gzip-1.5-goolf-1.4.10.eb')
     easyconfigs = process_easyconfig(easyconfig_file, validate=False)
     ordered_ecs = resolve_dependencies(easyconfigs)
     build_easyconfigs_in_parallel("echo %(spec)s", ordered_ecs)
Esempio n. 14
0
    def test_obtain_file(self):
        """Test obtain_file method."""
        toy_tarball = 'toy-0.0.tar.gz'
        testdir = os.path.abspath(os.path.dirname(__file__))
        sandbox_sources = os.path.join(testdir, 'sandbox', 'sources')
        toy_tarball_path = os.path.join(sandbox_sources, 'toy', toy_tarball)
        tmpdir = tempfile.mkdtemp()
        tmpdir_subdir = os.path.join(tmpdir, 'testing')
        mkdir(tmpdir_subdir, parents=True)
        del os.environ['EASYBUILD_SOURCEPATH']  # defined by setUp

        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'))[0]
        eb = EasyBlock(ec['ec'])

        # 'downloading' a file to (first) sourcepath works
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (tmpdir, testdir)])
        shutil.copy2(toy_tarball_path, tmpdir_subdir)
        res = eb.obtain_file(toy_tarball, urls=['file://%s' % tmpdir_subdir])
        self.assertEqual(res, os.path.join(tmpdir, 't', 'toy', toy_tarball))

        # finding a file in sourcepath works
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (sandbox_sources, tmpdir)])
        res = eb.obtain_file(toy_tarball)
        self.assertEqual(res, toy_tarball_path)

        # sourcepath has preference over downloading
        res = eb.obtain_file(toy_tarball, urls=['file://%s' % tmpdir_subdir])
        self.assertEqual(res, toy_tarball_path)

        # obtain_file yields error for non-existing files
        fn = 'thisisclearlyanonexistingfile'
        error_regex = "Couldn't find file %s anywhere, and downloading it didn't work either" % fn
        self.assertErrorRegex(EasyBuildError, error_regex, eb.obtain_file, fn, urls=['file://%s' % tmpdir_subdir])

        # file specifications via URL also work, are downloaded to (first) sourcepath
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (tmpdir, sandbox_sources)])
        urls = ["http://hpcugent.github.io/easybuild/index.html", "https://hpcugent.github.io/easybuild/index.html"]
        for file_url in urls:
            fn = os.path.basename(file_url)
            res = None
            try:
                res = eb.obtain_file(file_url)
            except EasyBuildError, err:
                # if this fails, it should be because there's no online access
                download_fail_regex = re.compile('socket error')
                self.assertTrue(download_fail_regex.search(str(err)))

            # result may be None during offline testing
            if res is not None:
                loc = os.path.join(tmpdir, 't', 'toy', fn)
                self.assertEqual(res, loc)
                self.assertTrue(os.path.exists(loc), "%s file is found at %s" % (fn, loc))
                txt = open(loc, 'r').read()
                eb_regex = re.compile("EasyBuild: building software with ease")
                self.assertTrue(eb_regex.search(txt))
            else:
                print "ignoring failure to download %s in test_obtain_file, testing offline?" % file_url
Esempio n. 15
0
def regtest(easyconfig_paths, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    @param easyconfig_paths: path of easyconfigs to run regtest on
    @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_option('aggregate_regtest')
    if aggregate_regtest is not None:
        output_file = os.path.join(
            aggregate_regtest,
            "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" %
                  (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place
    # all log files and the test output as xml
    basename = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
    var = config.OLDSTYLE_ENVIRONMENT_VARIABLES['test_output_path']

    regtest_output_dir = build_option('regtest_output_dir')
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif var in os.environ:
        output_dir = os.path.abspath(os.environ[var])
    else:
        # default: current dir + easybuild-test-[timestamp]
        output_dir = os.path.join(cur_dir, basename)

    mkdir(output_dir, parents=True)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(
                path, ignore_dirs=build_option('ignore_dirs'))
    else:
        _log.error("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(
                process_easyconfig(ecfile, build_specs=build_specs))
        except EasyBuildError, err:
            test_results.append((ecfile, 'parsing_easyconfigs',
                                 'easyconfig file error: %s' % err, _log))
Esempio n. 16
0
 def test_build_easyconfigs_in_parallel(self):
     """Basic test for build_easyconfigs_in_parallel function."""
     easyconfig_file = os.path.join(os.path.dirname(__file__),
                                    'easyconfigs',
                                    'gzip-1.5-goolf-1.4.10.eb')
     easyconfigs = process_easyconfig(easyconfig_file)
     ordered_ecs = resolve_dependencies(easyconfigs)
     jobs = build_easyconfigs_in_parallel("echo %(spec)s",
                                          ordered_ecs,
                                          prepare_first=False)
     self.assertEqual(len(jobs), 8)
 def test_mns():
     """Test default module naming scheme."""
     # test default naming scheme
     for ec_file in [f for f in ec_files if not 'broken' in os.path.basename(f)]:
         ec_path = os.path.abspath(ec_file)
         ecs = process_easyconfig(ec_path, validate=False)
         # derive module name directly from easyconfig file name
         ec_fn = os.path.basename(ec_file)
         if ec_fn in ec2mod_map:
             # only check first, ignore any others (occurs when blocks are used (format v1.0 only))
             self.assertEqual(ec2mod_map[ec_fn], ActiveMNS().det_full_module_name(ecs[0]['ec']))
 def test_mns():
     """Test default module naming scheme."""
     # test default naming scheme
     for ec_file in [f for f in ec_files if not 'broken' in os.path.basename(f)]:
         ec_path = os.path.abspath(ec_file)
         ecs = process_easyconfig(ec_path, validate=False)
         # derive module name directly from easyconfig file name
         ec_fn = os.path.basename(ec_file)
         if ec_fn in ec2mod_map:
             # only check first, ignore any others (occurs when blocks are used (format v1.0 only))
             self.assertEqual(ec2mod_map[ec_fn], ActiveMNS().det_full_module_name(ecs[0]['ec']))
    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""

        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)')):

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            travis_branch = os.environ.get('TRAVIS_BRANCH', None)
            if travis_branch and travis_branch != 'master':

                if not self.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch
                out, ec = run_cmd(cmd, simple=False)
                changed_ecs_filenames = [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')]
                print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames:
                    match = None
                    for ec in self.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs)
Esempio n. 20
0
    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not self.parsed_easyconfigs:
            for spec in specs:
                self.parsed_easyconfigs.extend(process_easyconfig(spec))

        self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs)
    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not self.parsed_easyconfigs:
            for spec in specs:
                self.parsed_easyconfigs.extend(process_easyconfig(spec))

        self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs)
Esempio n. 22
0
    def test_build_easyconfigs_in_parallel_gc3pie(self):
        """Test build_easyconfigs_in_parallel(), using GC3Pie with local config as backend for --job."""
        try:
            import gc3libs  # noqa (ignore unused import)
        except ImportError:
            print "GC3Pie not available, skipping test"
            return

        # put GC3Pie config in place to use local host and fork/exec
        resourcedir = os.path.join(self.test_prefix, 'gc3pie')
        gc3pie_cfgfile = os.path.join(self.test_prefix, 'gc3pie_local.ini')
        gc3pie_cfgtxt = GC3PIE_LOCAL_CONFIGURATION % {
            'resourcedir': resourcedir,
            'time': which('time'),
        }
        write_file(gc3pie_cfgfile, gc3pie_cfgtxt)

        output_dir = os.path.join(self.test_prefix, 'subdir', 'gc3pie_output_dir')
        # purposely pre-create output dir, and put a file in it (to check whether GC3Pie tries to rename the output dir)
        mkdir(output_dir, parents=True)
        write_file(os.path.join(output_dir, 'foo'), 'bar')
        # remove write permissions on parent dir of specified output dir,
        # to check that GC3Pie does not try to rename the (already existing) output directory...
        adjust_permissions(os.path.dirname(output_dir), stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
                           add=False, recursive=False)

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'job_backend_config': gc3pie_cfgfile,
            'job_max_walltime': 24,
            'job_output_dir': output_dir,
            'job_polling_interval': 0.2,  # quick polling
            'job_target_resource': 'ebtestlocalhost',
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'silent': True,
            'valid_module_classes': config.module_classes(),
            'validate': False,
        }
        init_config(args=['--job-backend=GC3Pie'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        topdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        test_easyblocks_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
        cmd = "PYTHONPATH=%s:%s:$PYTHONPATH eb %%(spec)s -df" % (topdir, test_easyblocks_path)
        build_easyconfigs_in_parallel(cmd, ordered_ecs, prepare_first=False)

        self.assertTrue(os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0'))
        self.assertTrue(os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy'))
Esempio n. 23
0
def regtest(easyconfig_paths, build_options=None, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    @param easyconfig_paths: path of easyconfigs to run regtest on
    @param build_options: dictionary specifying build options (e.g. robot_path, check_osdeps, ...)
    @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_options.get('aggregate_regtest', None)
    if aggregate_regtest is not None:
        output_file = os.path.join(aggregate_regtest, "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" % (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place
    # all log files and the test output as xml
    basename = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
    var = config.oldstyle_environment_variables['test_output_path']

    regtest_output_dir = build_options.get('regtest_output_dir', None)
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif var in os.environ:
        output_dir = os.path.abspath(os.environ[var])
    else:
        # default: current dir + easybuild-test-[timestamp]
        output_dir = os.path.join(cur_dir, basename)

    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(path, ignore_dirs=build_options.get('ignore_dirs', []))
    else:
        _log.error("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(process_easyconfig(ecfile, build_options=build_options, build_specs=build_specs))
        except EasyBuildError, err:
            test_results.append((ecfile, 'parsing_easyconfigs', 'easyconfig file error: %s' % err, _log))
    def test_get_easyblock_instance(self):
        """Test get_easyblock_instance function."""
        from easybuild.easyblocks.toy import EB_toy
        testdir = os.path.abspath(os.path.dirname(__file__))

        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)
        self.assertTrue(isinstance(eb, EB_toy))

        # check whether 'This is easyblock' log message is there
        tup = ('EB_toy', 'easybuild.easyblocks.toy', '.*test/framework/sandbox/easybuild/easyblocks/t/toy.pyc*')
        eb_log_msg_re = re.compile(r"INFO This is easyblock %s from module %s (%s)" % tup, re.M)
        logtxt = read_file(eb.logfile)
        self.assertTrue(eb_log_msg_re.search(logtxt), "Pattern '%s' found in: %s" % (eb_log_msg_re.pattern, logtxt))
Esempio n. 25
0
    def test_get_easyblock_instance(self):
        """Test get_easyblock_instance function."""
        from easybuild.easyblocks.toy import EB_toy
        testdir = os.path.abspath(os.path.dirname(__file__))

        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)
        self.assertTrue(isinstance(eb, EB_toy))

        # check whether 'This is easyblock' log message is there
        tup = ('EB_toy', 'easybuild.easyblocks.toy', '.*test/framework/sandbox/easybuild/easyblocks/t/toy.pyc*')
        eb_log_msg_re = re.compile(r"INFO This is easyblock %s from module %s (%s)" % tup, re.M)
        logtxt = read_file(eb.logfile)
        self.assertTrue(eb_log_msg_re.search(logtxt), "Pattern '%s' found in: %s" % (eb_log_msg_re.pattern, logtxt))
Esempio n. 26
0
def regtest(easyconfig_paths, modtool, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    @param easyconfig_paths: path of easyconfigs to run regtest on
    @param modtool: ModulesTool instance to use
    @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_option('aggregate_regtest')
    if aggregate_regtest is not None:
        output_file = os.path.join(aggregate_regtest, "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" % (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place all log files and the test output as xml
    regtest_output_dir = build_option('regtest_output_dir')
    testoutput = build_option('testoutput')
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif testoutput is not None:
        output_dir = os.path.abspath(testoutput)
    else:
        # default: current dir + easybuild-test-[timestamp]
        dirname = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
        output_dir = os.path.join(cur_dir, dirname)

    mkdir(output_dir, parents=True)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(path, ignore_dirs=build_option('ignore_dirs'))
    else:
        raise EasyBuildError("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(process_easyconfig(ecfile, build_specs=build_specs))
        except EasyBuildError, err:
            test_results.append((ecfile, 'parsing_easyconfigs', 'easyconfig file error: %s' % err, _log))
    def test_get_easyblock_instance(self):
        """Test get_easyblock_instance function."""
        # adjust PYTHONPATH such that test easyblocks are found
        testdir = os.path.abspath(os.path.dirname(__file__))
        import easybuild
        eb_blocks_path = os.path.join(testdir, 'sandbox')
        if not eb_blocks_path in sys.path:
            sys.path.append(eb_blocks_path)
            easybuild = reload(easybuild)

        import easybuild.easyblocks
        reload(easybuild.easyblocks)

        from easybuild.easyblocks.toy import EB_toy
        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)
        self.assertTrue(isinstance(eb, EB_toy))
Esempio n. 28
0
    def test_get_easyblock_instance(self):
        """Test get_easyblock_instance function."""
        # adjust PYTHONPATH such that test easyblocks are found
        testdir = os.path.abspath(os.path.dirname(__file__))
        import easybuild
        eb_blocks_path = os.path.join(testdir, 'sandbox')
        if not eb_blocks_path in sys.path:
            sys.path.append(eb_blocks_path)
            easybuild = reload(easybuild)

        import easybuild.easyblocks
        reload(easybuild.easyblocks)

        from easybuild.easyblocks.toy import EB_toy
        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)
        self.assertTrue(isinstance(eb, EB_toy))
    def test_obtain_file(self):
        """Test obtain_file method."""
        toy_tarball = 'toy-0.0.tar.gz'
        testdir = os.path.abspath(os.path.dirname(__file__))
        sandbox_sources = os.path.join(testdir, 'sandbox', 'sources')
        toy_tarball_path = os.path.join(sandbox_sources, 'toy', toy_tarball)
        tmpdir = tempfile.mkdtemp()
        tmpdir_subdir = os.path.join(tmpdir, 'testing')
        mkdir(tmpdir_subdir, parents=True)
        del os.environ['EASYBUILD_SOURCEPATH']  # defined by setUp

        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'))[0]
        eb = EasyBlock(ec['ec'])

        # 'downloading' a file to (first) sourcepath works
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (tmpdir, testdir)])
        shutil.copy2(toy_tarball_path, tmpdir_subdir)
        res = eb.obtain_file(toy_tarball, urls=['file://%s' % tmpdir_subdir])
        self.assertEqual(res, os.path.join(tmpdir, 't', 'toy', toy_tarball))

        # finding a file in sourcepath works
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (sandbox_sources, tmpdir)])
        res = eb.obtain_file(toy_tarball)
        self.assertEqual(res, toy_tarball_path)

        # sourcepath has preference over downloading
        res = eb.obtain_file(toy_tarball, urls=['file://%s' % tmpdir_subdir])
        self.assertEqual(res, toy_tarball_path)

        # obtain_file yields error for non-existing files
        fn = 'thisisclearlyanonexistingfile'
        error_regex = "Couldn't find file %s anywhere, and downloading it didn't work either" % fn
        self.assertErrorRegex(EasyBuildError, error_regex, eb.obtain_file, fn, urls=['file://%s' % tmpdir_subdir])

        # file specifications via URL also work, are downloaded to (first) sourcepath
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (tmpdir, sandbox_sources)])
        file_url = "http://hpcugent.github.io/easybuild/index.html"
        fn = os.path.basename(file_url)
        res = None
        try:
            res = eb.obtain_file(file_url)
        except EasyBuildError, err:
            # if this fails, it should be because there's no online access
            download_fail_regex = re.compile('socket error')
            self.assertTrue(download_fail_regex.search(str(err)))
Esempio n. 30
0
    def test_fetch_patches(self):
        """Test fetch_patches method."""
        # adjust PYTHONPATH such that test easyblocks are found
        testdir = os.path.abspath(os.path.dirname(__file__))
        ec = process_easyconfig(
            os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy',
                         'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)

        eb.fetch_patches()
        self.assertEqual(len(eb.patches), 2)
        self.assertEqual(eb.patches[0]['name'], 'toy-0.0_typo.patch')
        self.assertFalse('level' in eb.patches[0])

        # reset
        eb.patches = []

        patches = [
            ('toy-0.0_typo.patch',
             0),  # should also be level 0 (not None or something else)
            ('toy-0.0_typo.patch', 4),  # should be level 4
            ('toy-0.0_typo.patch',
             'foobar'),  # sourcepath should be set to 'foobar'
            ('toy-0.0.tar.gz', 'some/path'),  # copy mode (not a .patch file)
        ]
        # check if patch levels are parsed correctly
        eb.fetch_patches(patch_specs=patches)

        self.assertEqual(len(eb.patches), 4)
        self.assertEqual(eb.patches[0]['name'], 'toy-0.0_typo.patch')
        self.assertEqual(eb.patches[0]['level'], 0)
        self.assertEqual(eb.patches[1]['name'], 'toy-0.0_typo.patch')
        self.assertEqual(eb.patches[1]['level'], 4)
        self.assertEqual(eb.patches[2]['name'], 'toy-0.0_typo.patch')
        self.assertEqual(eb.patches[2]['sourcepath'], 'foobar')
        self.assertEqual(eb.patches[3]['name'], 'toy-0.0.tar.gz'),
        self.assertEqual(eb.patches[3]['copy'], 'some/path')

        patches = [
            ('toy-0.0_level4.patch', False
             ),  # should throw an error, only int's an strings allowed here
        ]
        self.assertRaises(EasyBuildError,
                          eb.fetch_patches,
                          patch_specs=patches)
Esempio n. 31
0
    def test_patch_step(self):
        """Test patch step."""
        ec = process_easyconfig(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'easyconfigs', 'toy-0.0.eb'))[0]
        orig_sources = ec['ec']['sources'][:]

        # test applying patches without sources
        ec['ec']['sources'] = []
        eb = EasyBlock(ec['ec'])
        eb.fetch_step()
        eb.extract_step()
        self.assertErrorRegex(EasyBuildError, '.*', eb.patch_step)

        # test actual patching of unpacked sources
        ec['ec']['sources'] = orig_sources
        eb = EasyBlock(ec['ec'])
        eb.fetch_step()
        eb.extract_step()
        eb.patch_step()
    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not self.parsed_easyconfigs:
            for spec in specs:
                self.parsed_easyconfigs.extend(process_easyconfig(spec))

        # filter out external modules
        for ec in self.parsed_easyconfigs:
            for dep in ec['dependencies'][:]:
                if dep.get('external_module', False):
                    ec['dependencies'].remove(dep)

        self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs, modules_tool(), retain_all_deps=True)
    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not self.parsed_easyconfigs:
            for spec in specs:
                self.parsed_easyconfigs.extend(process_easyconfig(spec))

        # filter out external modules
        for ec in self.parsed_easyconfigs:
            for dep in ec['dependencies'][:]:
                if dep.get('external_module', False):
                    ec['dependencies'].remove(dep)

        self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs, modules_tool(), retain_all_deps=True)
    def test_obtain_file(self):
        """Test obtain_file method."""
        toy_tarball = 'toy-0.0.tar.gz'
        testdir = os.path.abspath(os.path.dirname(__file__))
        sandbox_sources = os.path.join(testdir, 'sandbox', 'sources')
        toy_tarball_path = os.path.join(sandbox_sources, 'toy', toy_tarball)
        tmpdir = tempfile.mkdtemp()
        tmpdir_subdir = os.path.join(tmpdir, 'testing')
        mkdir(tmpdir_subdir, parents=True)
        del os.environ['EASYBUILD_SOURCEPATH']  # defined by setUp

        ec = process_easyconfig(
            os.path.join(testdir, 'easyconfigs', 'toy-0.0.eb'))[0]
        eb = EasyBlock(ec['ec'])

        # 'downloading' a file to (first) sourcepath works
        init_config(
            args=["--sourcepath=%s:/no/such/dir:%s" % (tmpdir, testdir)])
        shutil.copy2(toy_tarball_path, tmpdir_subdir)
        res = eb.obtain_file(toy_tarball,
                             urls=[os.path.join('file://', tmpdir_subdir)])
        self.assertEqual(res, os.path.join(tmpdir, 't', 'toy', toy_tarball))

        # finding a file in sourcepath works
        init_config(args=[
            "--sourcepath=%s:/no/such/dir:%s" % (sandbox_sources, tmpdir)
        ])
        res = eb.obtain_file(toy_tarball)
        self.assertEqual(res, toy_tarball_path)

        # sourcepath has preference over downloading
        res = eb.obtain_file(toy_tarball,
                             urls=[os.path.join('file://', tmpdir_subdir)])
        self.assertEqual(res, toy_tarball_path)

        # obtain_file yields error for non-existing files
        fn = 'thisisclearlyanonexistingfile'
        try:
            eb.obtain_file(fn, urls=[os.path.join('file://', tmpdir_subdir)])
        except EasyBuildError, err:
            fail_regex = re.compile(
                "Couldn't find file %s anywhere, and downloading it didn't work either"
                % fn)
            self.assertTrue(fail_regex.search(str(err)))
    def test_fetch_patches(self):
        """Test fetch_patches method."""
        # adjust PYTHONPATH such that test easyblocks are found
        testdir = os.path.abspath(os.path.dirname(__file__))
        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb'))[0]
        eb = get_easyblock_instance(ec)

        eb.fetch_patches()
        self.assertEqual(len(eb.patches), 2)
        self.assertEqual(eb.patches[0]['name'], 'toy-0.0_typo.patch')
        self.assertFalse('level' in eb.patches[0])

        # reset
        eb.patches = []

        patches = [
            ('toy-0.0_typo.patch', 0),  # should also be level 0 (not None or something else)
            ('toy-0.0_typo.patch', 4),   # should be level 4
            ('toy-0.0_typo.patch', 'foobar'),  # sourcepath should be set to 'foobar'
            ('toy-0.0.tar.gz', 'some/path'),  # copy mode (not a .patch file)
        ]
        # check if patch levels are parsed correctly
        eb.fetch_patches(patch_specs=patches)

        self.assertEqual(len(eb.patches), 4)
        self.assertEqual(eb.patches[0]['name'], 'toy-0.0_typo.patch')
        self.assertEqual(eb.patches[0]['level'], 0)
        self.assertEqual(eb.patches[1]['name'], 'toy-0.0_typo.patch')
        self.assertEqual(eb.patches[1]['level'], 4)
        self.assertEqual(eb.patches[2]['name'], 'toy-0.0_typo.patch')
        self.assertEqual(eb.patches[2]['sourcepath'], 'foobar')
        self.assertEqual(eb.patches[3]['name'], 'toy-0.0.tar.gz'),
        self.assertEqual(eb.patches[3]['copy'], 'some/path')

        patches = [
            ('toy-0.0_level4.patch', False),  # should throw an error, only int's an strings allowed here
        ]
        self.assertRaises(EasyBuildError, eb.fetch_patches, patch_specs=patches)
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        build_options = {
            'robot_path': os.path.join(os.path.dirname(__file__), 'easyconfigs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
        }
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        ec_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'gzip-1.5-goolf-1.4.10.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs)
        jobs = build_easyconfigs_in_parallel("echo %(spec)s", ordered_ecs, prepare_first=False)
        self.assertEqual(len(jobs), 8)

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
    def test_obtain_file(self):
        """Test obtain_file method."""
        toy_tarball = 'toy-0.0.tar.gz'
        testdir = os.path.abspath(os.path.dirname(__file__))
        sandbox_sources = os.path.join(testdir, 'sandbox', 'sources')
        toy_tarball_path = os.path.join(sandbox_sources, 'toy', toy_tarball)
        tmpdir = tempfile.mkdtemp()
        tmpdir_subdir = os.path.join(tmpdir, 'testing')
        mkdir(tmpdir_subdir, parents=True)
        del os.environ['EASYBUILD_SOURCEPATH']  # defined by setUp

        ec = process_easyconfig(os.path.join(testdir, 'easyconfigs', 'toy-0.0.eb'))[0]
        eb = EasyBlock(ec['ec'])

        # 'downloading' a file to (first) sourcepath works
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (tmpdir, testdir)])
        shutil.copy2(toy_tarball_path, tmpdir_subdir)
        res = eb.obtain_file(toy_tarball, urls=[os.path.join('file://', tmpdir_subdir)])
        self.assertEqual(res, os.path.join(tmpdir, 't', 'toy', toy_tarball))

        # finding a file in sourcepath works
        init_config(args=["--sourcepath=%s:/no/such/dir:%s" % (sandbox_sources, tmpdir)])
        res = eb.obtain_file(toy_tarball)
        self.assertEqual(res, toy_tarball_path)

        # sourcepath has preference over downloading
        res = eb.obtain_file(toy_tarball, urls=[os.path.join('file://', tmpdir_subdir)])
        self.assertEqual(res, toy_tarball_path)

        # obtain_file yields error for non-existing files
        fn = 'thisisclearlyanonexistingfile'
        try:
            eb.obtain_file(fn, urls=[os.path.join('file://', tmpdir_subdir)])
        except EasyBuildError, err:
            fail_regex = re.compile("Couldn't find file %s anywhere, and downloading it didn't work either" % fn)
            self.assertTrue(fail_regex.search(str(err)))
def template_easyconfig_test(self, spec):
    """Tests for an individual easyconfig: parsing, instantiating easyblock, check patches, ..."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(False, "easyconfig %s does not contain blocks, yields only one parsed easyconfig" % spec)

    # check easyconfig file name
    expected_fn = '%s-%s.eb' % (ec['name'], det_full_ec_version(ec))
    msg = "Filename '%s' of parsed easyconfig matches expected filename '%s'" % (spec, expected_fn)
    self.assertEqual(os.path.basename(spec), expected_fn, msg)

    name, easyblock = fetch_parameters_from_easyconfig(ec.rawtxt, ['name', 'easyblock'])

    # make sure easyconfig file is in expected location
    expected_subdir = os.path.join('easybuild', 'easyconfigs', letter_dir_for(name), name)
    subdir = os.path.join(*spec.split(os.path.sep)[-5:-1])
    fail_msg = "Easyconfig file %s not in expected subdirectory %s" % (spec, expected_subdir)
    self.assertEqual(expected_subdir, subdir, fail_msg)

    # sanity check for software name, moduleclass
    self.assertEqual(ec['name'], name)
    self.assertTrue(ec['moduleclass'] in build_option('valid_module_classes'))

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)

    # check that automagic fallback to ConfigureMake isn't done (deprecated behaviour)
    fn = os.path.basename(spec)
    error_msg = "%s relies on automagic fallback to ConfigureMake, should use easyblock = 'ConfigureMake' instead" % fn
    self.assertTrue(easyblock or not app_class is ConfigureMake, error_msg)

    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict), "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    # check whether all extra_options defined for used easyblock are defined
    extra_opts = app.extra_options()
    for key in extra_opts:
        self.assertTrue(key in app.cfg)

    app.close_log()
    os.remove(app.logfile)

    # dump the easyconfig file
    handle, test_ecfile = tempfile.mkstemp()
    os.close(handle)

    ec.dump(test_ecfile)
    dumped_ec = EasyConfigParser(test_ecfile).get_config_dict()
    os.remove(test_ecfile)

    # inject dummy values for templates that are only known at a later stage
    dummy_template_values = {
        'builddir': '/dummy/builddir',
        'installdir': '/dummy/installdir',
    }
    ec.template_values.update(dummy_template_values)

    ec_dict = ec.parser.get_config_dict()
    orig_toolchain = ec_dict['toolchain']
    for key in ec_dict:
        # skip parameters for which value is equal to default value
        orig_val = ec_dict[key]
        if key in DEFAULT_CONFIG and orig_val == DEFAULT_CONFIG[key][0]:
            continue
        if key in extra_opts and orig_val == extra_opts[key][0]:
            continue
        if key not in DEFAULT_CONFIG and key not in extra_opts:
            continue

        orig_val = resolve_template(ec_dict[key], ec.template_values)
        dumped_val = resolve_template(dumped_ec[key], ec.template_values)

        # take into account that dumped value for *dependencies may include hard-coded subtoolchains
        # if no easyconfig was found for the dependency with the 'parent' toolchain,
        # if may get resolved using a subtoolchain, which is then hardcoded in the dumped easyconfig
        if key in DEPENDENCY_PARAMETERS:
            # number of dependencies should remain the same
            self.assertEqual(len(orig_val), len(dumped_val))
            for orig_dep, dumped_dep in zip(orig_val, dumped_val):
                # name/version should always match
                self.assertEqual(orig_dep[:2], dumped_dep[:2])

                # 3rd value is versionsuffix;
                if len(dumped_dep) >= 3:
                    # if no versionsuffix was specified in original dep spec, then dumped value should be empty string
                    if len(orig_dep) >= 3:
                        self.assertEqual(dumped_dep[2], orig_dep[2])
                    else:
                        self.assertEqual(dumped_dep[2], '')

                # 4th value is toolchain spec
                if len(dumped_dep) >= 4:
                    if len(orig_dep) >= 4:
                        self.assertEqual(dumped_dep[3], orig_dep[3])
                    else:
                        # if a subtoolchain is specifed (only) in the dumped easyconfig,
                        # it should *not* be the same as the parent toolchain
                        self.assertNotEqual(dumped_dep[3], (orig_toolchain['name'], orig_toolchain['version']))

        else:
            self.assertEqual(orig_val, dumped_val)

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
        }
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-foss-2018a.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
        # only one job submitted since foss/2018a module is already available
        self.assertEqual(len(jobs), 1)
        regex = re.compile("echo '.*/gzip-1.5-foss-2018a.eb'")
        self.assertTrue(regex.search(jobs[-1].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool, retain_all_deps=True)
        jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)

        # make sure command is correct, and that --hidden is there when it needs to be
        for i, ec in enumerate(ordered_ecs):
            if ec['hidden']:
                regex = re.compile("eb %s.* --hidden" % ec['spec'])
            else:
                regex = re.compile("eb %s" % ec['spec'])
            self.assertTrue(regex.search(jobs[i].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))

        for job in jobs:
            self.assertEqual(job.cores, build_options['job_cores'])

        # no deps for GCC/4.6.3 (toolchain) and intel/2018a (test easyconfig with 'fake' deps)
        self.assertEqual(len(jobs[0].deps), 0)
        self.assertEqual(len(jobs[1].deps), 0)

        # only dependency for toy/0.0-deps is intel/2018a (dep marked as external module is filtered out)
        self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
        self.assertEqual(len(jobs[2].deps), 1)
        self.assertTrue('intel-2018a.eb' in jobs[2].deps[0].script)

        # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
        self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
        self.assertEqual(len(jobs[3].deps), 2)
        regex = re.compile(r'toy-0.0-deps\.eb.* --hidden')
        script_txt = jobs[3].deps[0].script
        fail_msg = "Pattern '%s' should be found in: %s" % (regex.pattern, script_txt)
        self.assertTrue(regex.search(script_txt), fail_msg)
        self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)

        # also test use of --pre-create-installdir
        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool)

        # installation directory doesn't exist yet before submission
        toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
        self.assertFalse(os.path.exists(toy_installdir))

        jobs = submit_jobs(ordered_ecs, '', testing=False)
        self.assertEqual(len(jobs), 1)

        # software install dir is created (by default) as part of job submission process (fetch_step is run)
        self.assertTrue(os.path.exists(toy_installdir))
        remove_dir(toy_installdir)
        remove_dir(os.path.dirname(toy_installdir))
        self.assertFalse(os.path.exists(toy_installdir))

        # installation directory does *not* get created when --pre-create-installdir is used
        build_options['pre_create_installdir'] = False
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        jobs = submit_jobs(ordered_ecs, '', testing=False)
        self.assertEqual(len(jobs), 1)
        self.assertFalse(os.path.exists(toy_installdir))

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
Esempio n. 40
0
def template_easyconfig_test(self, spec):
    """Tests for an individual easyconfig: parsing, instantiating easyblock, check patches, ..."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig 
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(False, "easyconfig %s does not contain blocks, yields only one parsed easyconfig" % spec)

    # check easyconfig file name
    expected_fn = '%s-%s.eb' % (ec['name'], det_full_ec_version(ec))
    msg = "Filename '%s' of parsed easconfig matches expected filename '%s'" % (spec, expected_fn)
    self.assertEqual(os.path.basename(spec), expected_fn, msg)

    name, easyblock = fetch_parameters_from_easyconfig(ec.rawtxt, ['name', 'easyblock'])

    # sanity check for software name
    self.assertTrue(ec['name'], name) 

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)

    # check that automagic fallback to ConfigureMake isn't done (deprecated behaviour)
    fn = os.path.basename(spec)
    error_msg = "%s relies on automagic fallback to ConfigureMake, should use easyblock = 'ConfigureMake' instead" % fn
    self.assertTrue(easyblock or not app_class is ConfigureMake, error_msg)

    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict), "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    # check whether all extra_options defined for used easyblock are defined
    for key in app.extra_options():
        self.assertTrue(key in app.cfg)

    app.close_log()
    os.remove(app.logfile)

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
def template_easyconfig_test(self, spec):
    """Test whether all easyconfigs can be initialized."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig 
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(False, "easyconfig %s does not contain blocks, yields only one parsed easyconfig" % spec)

    # sanity check for software name
    name = fetch_parameter_from_easyconfig_file(spec, 'name')
    self.assertTrue(ec['name'], name) 

    # try and fetch easyblock spec from easyconfig
    easyblock = fetch_parameter_from_easyconfig_file(spec, 'easyblock')

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)
    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict), "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    app.close_log()
    os.remove(app.logfile)

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
        }
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-foss-2018a.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
        # only one job submitted since foss/2018a module is already available
        self.assertEqual(len(jobs), 1)
        regex = re.compile("echo '.*/gzip-1.5-foss-2018a.eb'")
        self.assertTrue(regex.search(jobs[-1].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool, retain_all_deps=True)
        jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)

        # make sure command is correct, and that --hidden is there when it needs to be
        for i, ec in enumerate(ordered_ecs):
            if ec['hidden']:
                regex = re.compile("eb %s.* --hidden" % ec['spec'])
            else:
                regex = re.compile("eb %s" % ec['spec'])
            self.assertTrue(regex.search(jobs[i].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))

        for job in jobs:
            self.assertEqual(job.cores, build_options['job_cores'])

        # no deps for GCC/4.6.3 (toolchain) and intel/2018a (test easyconfig with 'fake' deps)
        self.assertEqual(len(jobs[0].deps), 0)
        self.assertEqual(len(jobs[1].deps), 0)

        # only dependency for toy/0.0-deps is intel/2018a (dep marked as external module is filtered out)
        self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
        self.assertEqual(len(jobs[2].deps), 1)
        self.assertTrue('intel-2018a.eb' in jobs[2].deps[0].script)

        # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
        self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
        self.assertEqual(len(jobs[3].deps), 2)
        regex = re.compile('toy-0.0-deps.eb\s* --hidden')
        self.assertTrue(regex.search(jobs[3].deps[0].script))
        self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)

        # also test use of --pre-create-installdir
        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool)

        # installation directory doesn't exist yet before submission
        toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
        self.assertFalse(os.path.exists(toy_installdir))

        jobs = submit_jobs(ordered_ecs, '', testing=False)
        self.assertEqual(len(jobs), 1)

        # software install dir is created (by default) as part of job submission process (fetch_step is run)
        self.assertTrue(os.path.exists(toy_installdir))
        remove_dir(toy_installdir)
        remove_dir(os.path.dirname(toy_installdir))
        self.assertFalse(os.path.exists(toy_installdir))

        # installation directory does *not* get created when --pre-create-installdir is used
        build_options['pre_create_installdir'] = False
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        jobs = submit_jobs(ordered_ecs, '', testing=False)
        self.assertEqual(len(jobs), 1)
        self.assertFalse(os.path.exists(toy_installdir))

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
Esempio n. 43
0
def regtest(easyconfig_paths, modtool, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    :param easyconfig_paths: path of easyconfigs to run regtest on
    :param modtool: ModulesTool instance to use
    :param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_option('aggregate_regtest')
    if aggregate_regtest is not None:
        output_file = os.path.join(
            aggregate_regtest,
            "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" %
                  (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place all log files and the test output as xml
    regtest_output_dir = build_option('regtest_output_dir')
    testoutput = build_option('testoutput')
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif testoutput is not None:
        output_dir = os.path.abspath(testoutput)
    else:
        # default: current dir + easybuild-test-[timestamp]
        dirname = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
        output_dir = os.path.join(cur_dir, dirname)

    mkdir(output_dir, parents=True)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(
                path, ignore_dirs=build_option('ignore_dirs'))
    else:
        raise EasyBuildError("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(
                process_easyconfig(ecfile, build_specs=build_specs))
        except EasyBuildError as err:
            test_results.append((ecfile, 'parsing_easyconfigs',
                                 'easyconfig file error: %s' % err, _log))

    # skip easyconfigs for which a module is already available, unless forced
    if not build_option('force'):
        _log.debug(
            "Skipping easyconfigs from %s that already have a module available..."
            % easyconfigs)
        easyconfigs = skip_available(easyconfigs, modtool)
        _log.debug("Retained easyconfigs after skipping: %s" % easyconfigs)

    if build_option('sequential'):
        return build_easyconfigs(easyconfigs, output_dir, test_results)
    else:
        resolved = resolve_dependencies(easyconfigs, modtool)

        cmd = "eb %(spec)s --regtest --sequential -ld --testoutput=%(output_dir)s"
        command = "unset TMPDIR && cd %s && %s; " % (cur_dir, cmd)
        # retry twice in case of failure, to avoid fluke errors
        command += "if [ $? -ne 0 ]; then %(cmd)s --force && %(cmd)s --force; fi" % {
            'cmd': cmd
        }

        build_easyconfigs_in_parallel(command, resolved, output_dir=output_dir)

        _log.info("Submitted regression test as jobs, results in %s" %
                  output_dir)

        return True  # success
Esempio n. 44
0
def template_easyconfig_test(self, spec):
    """Tests for an individual easyconfig: parsing, instantiating easyblock, check patches, ..."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']

        # cache the parsed easyconfig, to avoid that it is parsed again
        self.parsed_easyconfigs.append(ecs[0])
    else:
        self.assertTrue(
            False,
            "easyconfig %s does not contain blocks, yields only one parsed easyconfig"
            % spec)

    # check easyconfig file name
    expected_fn = '%s-%s.eb' % (ec['name'], det_full_ec_version(ec))
    msg = "Filename '%s' of parsed easyconfig matches expected filename '%s'" % (
        spec, expected_fn)
    self.assertEqual(os.path.basename(spec), expected_fn, msg)

    name, easyblock = fetch_parameters_from_easyconfig(ec.rawtxt,
                                                       ['name', 'easyblock'])

    # make sure easyconfig file is in expected location
    expected_subdir = os.path.join('easybuild', 'easyconfigs',
                                   letter_dir_for(name), name)
    subdir = os.path.join(*spec.split(os.path.sep)[-5:-1])
    fail_msg = "Easyconfig file %s not in expected subdirectory %s" % (
        spec, expected_subdir)
    self.assertEqual(expected_subdir, subdir, fail_msg)

    # sanity check for software name, moduleclass
    self.assertEqual(ec['name'], name)
    self.assertTrue(ec['moduleclass'] in build_option('valid_module_classes'))

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)

    # check that automagic fallback to ConfigureMake isn't done (deprecated behaviour)
    fn = os.path.basename(spec)
    error_msg = "%s relies on automagic fallback to ConfigureMake, should use easyblock = 'ConfigureMake' instead" % fn
    self.assertTrue(easyblock or app_class is not ConfigureMake, error_msg)

    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure that $root is not used, since it is not compatible with module files in Lua syntax
    res = re.findall('.*\$root.*', ec.rawtxt, re.M)
    error_msg = "Found use of '$root', not compatible with modules in Lua syntax, use '%%(installdir)s' instead: %s"
    self.assertFalse(res, error_msg % res)

    # make sure old GitHub urls for EasyBuild that include 'hpcugent' are no longer used
    old_urls = [
        'github.com/hpcugent/easybuild',
        'hpcugent.github.com/easybuild',
        'hpcugent.github.io/easybuild',
    ]
    for old_url in old_urls:
        self.assertFalse(old_url in ec.rawtxt,
                         "Old URL '%s' not found in %s" % (old_url, spec))

    # make sure binutils is included as a build dep if toolchain is GCCcore
    if ec['toolchain']['name'] == 'GCCcore':
        # with 'Tarball' easyblock: only unpacking, no building; Eigen is also just a tarball
        requires_binutils = ec['easyblock'] not in [
            'Tarball'
        ] and ec['name'] not in ['Eigen']

        # let's also exclude the very special case where the system GCC is used as GCCcore, and only apply this
        # exception to the dependencies of binutils (since we should eventually build a new binutils with GCCcore)
        if ec['toolchain']['version'] == 'system':
            binutils_complete_dependencies = [
                'M4', 'Bison', 'flex', 'help2man', 'zlib', 'binutils'
            ]
            requires_binutils &= bool(
                ec['name'] not in binutils_complete_dependencies)

        # if no sources/extensions/components are specified, it's just a bundle (nothing is being compiled)
        requires_binutils &= bool(ec['sources'] or ec['exts_list']
                                  or ec.get('components'))

        if requires_binutils:
            dep_names = [d['name'] for d in ec.builddependencies()]
            self.assertTrue(
                'binutils' in dep_names,
                "binutils is a build dep in %s: %s" % (spec, dep_names))

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)

    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict),
                            "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (
                        ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    # check whether all extra_options defined for used easyblock are defined
    extra_opts = app.extra_options()
    for key in extra_opts:
        self.assertTrue(key in app.cfg)

    app.close_log()
    os.remove(app.logfile)

    # dump the easyconfig file
    handle, test_ecfile = tempfile.mkstemp()
    os.close(handle)

    ec.dump(test_ecfile)
    dumped_ec = EasyConfigParser(test_ecfile).get_config_dict()
    os.remove(test_ecfile)

    # inject dummy values for templates that are only known at a later stage
    dummy_template_values = {
        'builddir': '/dummy/builddir',
        'installdir': '/dummy/installdir',
    }
    ec.template_values.update(dummy_template_values)

    ec_dict = ec.parser.get_config_dict()
    orig_toolchain = ec_dict['toolchain']
    for key in ec_dict:
        # skip parameters for which value is equal to default value
        orig_val = ec_dict[key]
        if key in DEFAULT_CONFIG and orig_val == DEFAULT_CONFIG[key][0]:
            continue
        if key in extra_opts and orig_val == extra_opts[key][0]:
            continue
        if key not in DEFAULT_CONFIG and key not in extra_opts:
            continue

        orig_val = resolve_template(ec_dict[key], ec.template_values)
        dumped_val = resolve_template(dumped_ec[key], ec.template_values)

        # take into account that dumped value for *dependencies may include hard-coded subtoolchains
        # if no easyconfig was found for the dependency with the 'parent' toolchain,
        # if may get resolved using a subtoolchain, which is then hardcoded in the dumped easyconfig
        if key in DEPENDENCY_PARAMETERS:
            # number of dependencies should remain the same
            self.assertEqual(len(orig_val), len(dumped_val))
            for orig_dep, dumped_dep in zip(orig_val, dumped_val):
                # name/version should always match
                self.assertEqual(orig_dep[:2], dumped_dep[:2])

                # 3rd value is versionsuffix;
                if len(dumped_dep) >= 3:
                    # if no versionsuffix was specified in original dep spec, then dumped value should be empty string
                    if len(orig_dep) >= 3:
                        self.assertEqual(dumped_dep[2], orig_dep[2])
                    else:
                        self.assertEqual(dumped_dep[2], '')

                # 4th value is toolchain spec
                if len(dumped_dep) >= 4:
                    if len(orig_dep) >= 4:
                        self.assertEqual(dumped_dep[3], orig_dep[3])
                    else:
                        # if a subtoolchain is specifed (only) in the dumped easyconfig,
                        # it should *not* be the same as the parent toolchain
                        self.assertNotEqual(dumped_dep[3],
                                            (orig_toolchain['name'],
                                             orig_toolchain['version']))

        else:
            self.assertEqual(orig_val, dumped_val)

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
 def test_build_easyconfigs_in_parallel(self):
     """Basic test for build_easyconfigs_in_parallel function."""
     easyconfig_file = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'gzip-1.5-goolf-1.4.10.eb')
     easyconfigs = process_easyconfig(easyconfig_file, validate=False)
     ordered_ecs = resolve_dependencies(easyconfigs)
     build_easyconfigs_in_parallel("echo %(spec)s", ordered_ecs)
Esempio n. 46
0
def template_easyconfig_test(self, spec):
    """Test whether all easyconfigs can be initialized."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(
            False,
            "easyconfig %s does not contain blocks, yields only one parsed easyconfig"
            % spec)

    # sanity check for software name
    name = fetch_parameter_from_easyconfig_file(spec, 'name')
    self.assertTrue(ec['name'], name)

    # try and fetch easyblock spec from easyconfig
    easyblock = fetch_parameter_from_easyconfig_file(spec, 'easyblock')

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)
    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict),
                            "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (
                        ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    app.close_log()
    os.remove(app.logfile)

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
Esempio n. 47
0
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(
            self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
        }
        init_config(args=['--job-backend=PbsPython'],
                    build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip',
                               'gzip-1.5-goolf-1.4.10.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'",
                                             ordered_ecs,
                                             prepare_first=False)
        self.assertEqual(len(jobs), 8)
        regex = re.compile("echo '.*/gzip-1.5-goolf-1.4.10.eb'")
        self.assertTrue(
            regex.search(jobs[-1].script),
            "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip',
                               'gzip-1.4-GCC-4.6.3.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file),
                                           self.modtool,
                                           retain_all_deps=True)
        jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)

        # make sure command is correct, and that --hidden is there when it needs to be
        for i, ec in enumerate(ordered_ecs):
            if ec['hidden']:
                regex = re.compile("eb %s.* --hidden" % ec['spec'])
            else:
                regex = re.compile("eb %s" % ec['spec'])
            self.assertTrue(
                regex.search(jobs[i].script),
                "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))

        for job in jobs:
            self.assertEqual(job.cores, build_options['job_cores'])

        # no deps for GCC/4.6.3 (toolchain) and ictce/4.1.13 (test easyconfig with 'fake' deps)
        self.assertEqual(len(jobs[0].deps), 0)
        self.assertEqual(len(jobs[1].deps), 0)

        # only dependency for toy/0.0-deps is ictce/4.1.13 (dep marked as external module is filtered out)
        self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
        self.assertEqual(len(jobs[2].deps), 1)
        self.assertTrue('ictce-4.1.13.eb' in jobs[2].deps[0].script)

        # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
        self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
        self.assertEqual(len(jobs[3].deps), 2)
        regex = re.compile('toy-0.0-deps.eb\s* --hidden')
        self.assertTrue(regex.search(jobs[3].deps[0].script))
        self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
Esempio n. 48
0
def template_easyconfig_test(self, spec):
    """Tests for an individual easyconfig: parsing, instantiating easyblock, check patches, ..."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(False, "easyconfig %s does not contain blocks, yields only one parsed easyconfig" % spec)

    # check easyconfig file name
    expected_fn = '%s-%s.eb' % (ec['name'], det_full_ec_version(ec))
    msg = "Filename '%s' of parsed easyconfig matches expected filename '%s'" % (spec, expected_fn)
    self.assertEqual(os.path.basename(spec), expected_fn, msg)

    name, easyblock = fetch_parameters_from_easyconfig(ec.rawtxt, ['name', 'easyblock'])

    # make sure easyconfig file is in expected location
    expected_subdir = os.path.join('easybuild', 'easyconfigs', letter_dir_for(name), name)
    subdir = os.path.join(*spec.split(os.path.sep)[-5:-1])
    fail_msg = "Easyconfig file %s not in expected subdirectory %s" % (spec, expected_subdir)
    self.assertEqual(expected_subdir, subdir, fail_msg)

    # sanity check for software name, moduleclass
    self.assertEqual(ec['name'], name)
    self.assertTrue(ec['moduleclass'] in build_option('valid_module_classes'))

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)

    # check that automagic fallback to ConfigureMake isn't done (deprecated behaviour)
    fn = os.path.basename(spec)
    error_msg = "%s relies on automagic fallback to ConfigureMake, should use easyblock = 'ConfigureMake' instead" % fn
    self.assertTrue(easyblock or not app_class is ConfigureMake, error_msg)

    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure that $root is not used, since it is not compatible with module files in Lua syntax
    res = re.findall('.*\$root.*', ec.rawtxt, re.M)
    error_msg = "Found use of '$root', not compatible with modules in Lua syntax, use '%%(installdir)s' instead: %s"
    self.assertFalse(res, error_msg % res)

    # make sure old GitHub urls for EasyBuild that include 'hpcugent' are no longer used
    old_urls = [
        'github.com/hpcugent/easybuild',
        'hpcugent.github.com/easybuild',
        'hpcugent.github.io/easybuild',
    ]
    for old_url in old_urls:
        self.assertFalse(old_url in ec.rawtxt, "Old URL '%s' not found in %s" % (old_url, spec))

    # make sure binutils is included as a build dep if toolchain is GCCcore
    if ec['toolchain']['name'] == 'GCCcore':
        # with 'Tarball' easyblock: only unpacking, no building; Eigen is also just a tarball
        requires_binutils = ec['easyblock'] not in ['Tarball'] and ec['name'] not in ['Eigen']

        # let's also exclude the very special case where the system GCC is used as GCCcore, and only apply this
        # exception to the dependencies of binutils (since we should eventually build a new binutils with GCCcore)
        if ec['toolchain']['version'] == 'system':
            binutils_complete_dependencies = ['M4', 'Bison', 'flex', 'help2man', 'zlib', 'binutils']
            requires_binutils &= bool(ec['name'] not in binutils_complete_dependencies)
            
        # if no sources/extensions/components are specified, it's just a bundle (nothing is being compiled)
        requires_binutils &= bool(ec['sources'] or ec['exts_list'] or ec.get('components'))

        if requires_binutils:
            dep_names = [d['name'] for d in ec['builddependencies']]
            self.assertTrue('binutils' in dep_names, "binutils is a build dep in %s: %s" % (spec, dep_names))

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict), "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    # check whether all extra_options defined for used easyblock are defined
    extra_opts = app.extra_options()
    for key in extra_opts:
        self.assertTrue(key in app.cfg)

    app.close_log()
    os.remove(app.logfile)

    # dump the easyconfig file
    handle, test_ecfile = tempfile.mkstemp()
    os.close(handle)

    ec.dump(test_ecfile)
    dumped_ec = EasyConfigParser(test_ecfile).get_config_dict()
    os.remove(test_ecfile)

    # inject dummy values for templates that are only known at a later stage
    dummy_template_values = {
        'builddir': '/dummy/builddir',
        'installdir': '/dummy/installdir',
    }
    ec.template_values.update(dummy_template_values)

    ec_dict = ec.parser.get_config_dict()
    orig_toolchain = ec_dict['toolchain']
    for key in ec_dict:
        # skip parameters for which value is equal to default value
        orig_val = ec_dict[key]
        if key in DEFAULT_CONFIG and orig_val == DEFAULT_CONFIG[key][0]:
            continue
        if key in extra_opts and orig_val == extra_opts[key][0]:
            continue
        if key not in DEFAULT_CONFIG and key not in extra_opts:
            continue

        orig_val = resolve_template(ec_dict[key], ec.template_values)
        dumped_val = resolve_template(dumped_ec[key], ec.template_values)

        # take into account that dumped value for *dependencies may include hard-coded subtoolchains
        # if no easyconfig was found for the dependency with the 'parent' toolchain,
        # if may get resolved using a subtoolchain, which is then hardcoded in the dumped easyconfig
        if key in DEPENDENCY_PARAMETERS:
            # number of dependencies should remain the same
            self.assertEqual(len(orig_val), len(dumped_val))
            for orig_dep, dumped_dep in zip(orig_val, dumped_val):
                # name/version should always match
                self.assertEqual(orig_dep[:2], dumped_dep[:2])

                # 3rd value is versionsuffix;
                if len(dumped_dep) >= 3:
                    # if no versionsuffix was specified in original dep spec, then dumped value should be empty string
                    if len(orig_dep) >= 3:
                        self.assertEqual(dumped_dep[2], orig_dep[2])
                    else:
                        self.assertEqual(dumped_dep[2], '')

                # 4th value is toolchain spec
                if len(dumped_dep) >= 4:
                    if len(orig_dep) >= 4:
                        self.assertEqual(dumped_dep[3], orig_dep[3])
                    else:
                        # if a subtoolchain is specifed (only) in the dumped easyconfig,
                        # it should *not* be the same as the parent toolchain
                        self.assertNotEqual(dumped_dep[3], (orig_toolchain['name'], orig_toolchain['version']))

        else:
            self.assertEqual(orig_val, dumped_val)

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
Esempio n. 49
0
    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""
        def get_eb_files_from_diff(diff_filter):
            cmd = "git diff --name-only --diff-filter=%s %s...HEAD" % (diff_filter, target_branch)
            out, ec = run_cmd(cmd, simple=False)
            return [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')]


        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        travis_pr_test = re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)'))

        # when testing a PR in GitHub Actions, $GITHUB_EVENT_NAME will be set to 'pull_request'
        github_pr_test = os.environ.get('GITHUB_EVENT_NAME') == 'pull_request'

        if travis_pr_test or github_pr_test:

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            if travis_pr_test:
                target_branch = os.environ.get('TRAVIS_BRANCH', None)
            else:
                target_branch = os.environ.get('GITHUB_BASE_REF', None)

            if target_branch is None:
                self.assertTrue(False, "Failed to determine target branch for current pull request.")

            if target_branch != 'master':

                if not EasyConfigTest.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                changed_ecs_filenames = get_eb_files_from_diff(diff_filter='M')
                added_ecs_filenames = get_eb_files_from_diff(diff_filter='A')
                if changed_ecs_filenames:
                    print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames))
                if added_ecs_filenames:
                    print("\nList of added easyconfig files in this PR: %s" % '\n'.join(added_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames + added_ecs_filenames:
                    match = None
                    for ec in EasyConfigTest.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs, added_ecs_filenames)
                self.check_sanity_check_paths(changed_ecs)
                self.check_https(changed_ecs)
Esempio n. 50
0
def template_easyconfig_test(self, spec):
    """Tests for an individual easyconfig: parsing, instantiating easyblock, check patches, ..."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(
            False,
            "easyconfig %s does not contain blocks, yields only one parsed easyconfig"
            % spec)

    # check easyconfig file name
    expected_fn = '%s-%s.eb' % (ec['name'], det_full_ec_version(ec))
    msg = "Filename '%s' of parsed easconfig matches expected filename '%s'" % (
        spec, expected_fn)
    self.assertEqual(os.path.basename(spec), expected_fn, msg)

    # sanity check for software name
    name = fetch_parameter_from_easyconfig_file(spec, 'name')
    self.assertTrue(ec['name'], name)

    # try and fetch easyblock spec from easyconfig
    easyblock = fetch_parameter_from_easyconfig_file(spec, 'easyblock')

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)

    # check that automagic fallback to ConfigureMake isn't done (deprecated behaviour)
    fn = os.path.basename(spec)
    error_msg = "%s relies on automagic fallback to ConfigureMake, should use easyblock = 'ConfigureMake' instead" % fn
    self.assertTrue(easyblock or not app_class is ConfigureMake, error_msg)

    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict),
                            "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (
                        ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    app.close_log()
    os.remove(app.logfile)

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
Esempio n. 51
0
def regtest(easyconfig_paths, modtool, build_specs=None):
    """
    Run regression test, using easyconfigs available in given path
    :param easyconfig_paths: path of easyconfigs to run regtest on
    :param modtool: ModulesTool instance to use
    :param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...)
    """

    cur_dir = os.getcwd()

    aggregate_regtest = build_option('aggregate_regtest')
    if aggregate_regtest is not None:
        output_file = os.path.join(aggregate_regtest, "%s-aggregate.xml" % os.path.basename(aggregate_regtest))
        aggregate_xml_in_dirs(aggregate_regtest, output_file)
        _log.info("aggregated xml files inside %s, output written to: %s" % (aggregate_regtest, output_file))
        sys.exit(0)

    # create base directory, which is used to place all log files and the test output as xml
    regtest_output_dir = build_option('regtest_output_dir')
    testoutput = build_option('testoutput')
    if regtest_output_dir is not None:
        output_dir = regtest_output_dir
    elif testoutput is not None:
        output_dir = os.path.abspath(testoutput)
    else:
        # default: current dir + easybuild-test-[timestamp]
        dirname = "easybuild-test-%s" % datetime.now().strftime("%Y%m%d%H%M%S")
        output_dir = os.path.join(cur_dir, dirname)

    mkdir(output_dir, parents=True)

    # find all easyconfigs
    ecfiles = []
    if easyconfig_paths:
        for path in easyconfig_paths:
            ecfiles += find_easyconfigs(path, ignore_dirs=build_option('ignore_dirs'))
    else:
        raise EasyBuildError("No easyconfig paths specified.")

    test_results = []

    # process all the found easyconfig files
    easyconfigs = []
    for ecfile in ecfiles:
        try:
            easyconfigs.extend(process_easyconfig(ecfile, build_specs=build_specs))
        except EasyBuildError as err:
            test_results.append((ecfile, 'parsing_easyconfigs', 'easyconfig file error: %s' % err, _log))

    # skip easyconfigs for which a module is already available, unless forced
    if not build_option('force'):
        _log.debug("Skipping easyconfigs from %s that already have a module available..." % easyconfigs)
        easyconfigs = skip_available(easyconfigs, modtool)
        _log.debug("Retained easyconfigs after skipping: %s" % easyconfigs)

    if build_option('sequential'):
        return build_easyconfigs(easyconfigs, output_dir, test_results)
    else:
        resolved = resolve_dependencies(easyconfigs, modtool)

        cmd = "eb %(spec)s --regtest --sequential -ld --testoutput=%(output_dir)s"
        command = "unset TMPDIR && cd %s && %s; " % (cur_dir, cmd)
        # retry twice in case of failure, to avoid fluke errors
        command += "if [ $? -ne 0 ]; then %(cmd)s --force && %(cmd)s --force; fi" % {'cmd': cmd}

        build_easyconfigs_in_parallel(command, resolved, output_dir=output_dir)

        _log.info("Submitted regression test as jobs, results in %s" % output_dir)

        return True  # success
Esempio n. 52
0
def main(testing_data=(None, None, None)):
    """
    Main function:
    @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options
    This function will:
    - read easyconfig
    - build software
    """
    # disallow running EasyBuild as root
    if os.getuid() == 0:
        sys.stderr.write("ERROR: You seem to be running EasyBuild with root privileges.\n"
                         "That's not wise, so let's end this here.\n"
                         "Exiting.\n")
        sys.exit(1)

    # steer behavior when testing main
    testing = testing_data[0] is not None
    args, logfile, do_build = testing_data

    # initialise options
    eb_go = eboptions.parse_options(args=args)
    options = eb_go.options
    orig_paths = eb_go.args

    # set temporary directory to use
    eb_tmpdir = set_tmpdir(options.tmpdir)

    # initialise logging for main
    if options.logtostdout:
        fancylogger.logToScreen(enable=True, stdout=True)
    else:
        if logfile is None:
            # mkstemp returns (fd,filename), fd is from os.open, not regular open!
            fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-')
            os.close(fd)

        fancylogger.logToFile(logfile)
        print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing)

    global _log
    _log = fancylogger.getLogger(fname=False)

    # hello world!
    _log.info(this_is_easybuild())

    # how was EB called?
    eb_command_line = eb_go.generate_cmd_line() + eb_go.args
    _log.info("Command line: %s" % (" ".join(eb_command_line)))

    _log.info("Using %s as temporary directory" % eb_tmpdir)

    if not options.robot is None:
        if options.robot:
            _log.info("Using robot path(s): %s" % options.robot)
        else:
            _log.error("No robot paths specified, and unable to determine easybuild-easyconfigs install path.")

    # do not pass options.robot, it's not a list instance (and it shouldn't be modified)
    robot_path = None
    if options.robot:
        robot_path = list(options.robot)

    # determine easybuild-easyconfigs package install path
    easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path)
    # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs
    easyconfigs_pkg_full_paths = easyconfigs_paths[:]
    if not easyconfigs_paths:
        _log.warning("Failed to determine install path for easybuild-easyconfigs package.")

    # specified robot paths are preferred over installed easyconfig files
    if robot_path:
        robot_path.extend(easyconfigs_paths)
        easyconfigs_paths = robot_path[:]
        _log.info("Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path)

    # initialise the easybuild configuration
    config.init(options, eb_go.get_options_by_section('config'))

    # building a dependency graph implies force, so that all dependencies are retained
    # and also skips validation of easyconfigs (e.g. checking os dependencies)
    retain_all_deps = False
    if options.dep_graph:
        _log.info("Enabling force to generate dependency graph.")
        options.force = True
        retain_all_deps = True

    build_options = {
        'aggregate_regtest': options.aggregate_regtest,
        'check_osdeps': not options.ignore_osdeps,
        'command_line': eb_command_line,
        'debug': options.debug,
        'dry_run': options.dry_run,
        'easyblock': options.easyblock,
        'experimental': options.experimental,
        'force': options.force,
        'ignore_dirs': options.ignore_dirs,
        'modules_footer': options.modules_footer,
        'only_blocks': options.only_blocks,
        'recursive_mod_unload': options.recursive_module_unload,
        'regtest_online': options.regtest_online,
        'regtest_output_dir': options.regtest_output_dir,
        'retain_all_deps': retain_all_deps,
        'robot_path': robot_path,
        'sequential': options.sequential,
        'silent': testing,
        'skip': options.skip,
        'skip_test_cases': options.skip_test_cases,
        'stop': options.stop,
        'valid_module_classes': module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        'validate': not options.force,
    }

    # search for easyconfigs
    if options.search or options.search_short:
        search_path = [os.getcwd()]
        if easyconfigs_paths:
            search_path = easyconfigs_paths
        query = options.search or options.search_short
        search_file(search_path, query, build_options=build_options, short=not options.search)

    # process software build specifications (if any), i.e.
    # software name/version, toolchain name/version, extra patches, ...
    (try_to_generate, build_specs) = process_software_build_specs(options)

    paths = []
    if len(orig_paths) == 0:
        if 'name' in build_specs:
            paths = [obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate,
                                 exit_on_error=not testing)]
        elif not any([options.aggregate_regtest, options.search, options.search_short, options.regtest]):
            print_error(("Please provide one or multiple easyconfig files, or use software build "
                         "options to make EasyBuild search for easyconfigs"),
                        log=_log, opt_parser=eb_go.parser, exit_on_error=not testing)
    else:
        # look for easyconfigs with relative paths in easybuild-easyconfigs package,
        # unless they were found at the given relative paths
        if easyconfigs_pkg_full_paths:
            # determine which easyconfigs files need to be found, if any
            ecs_to_find = []
            for idx, orig_path in enumerate(orig_paths):
                if orig_path == os.path.basename(orig_path) and not os.path.exists(orig_path):
                    ecs_to_find.append((idx, orig_path))
            _log.debug("List of easyconfig files to find: %s" % ecs_to_find)

            # find missing easyconfigs by walking paths with installed easyconfig files
            for path in easyconfigs_pkg_full_paths:
                _log.debug("Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path))
                for (subpath, dirnames, filenames) in os.walk(path, topdown=True):
                    for idx, orig_path in ecs_to_find[:]:
                        if orig_path in filenames:
                            full_path = os.path.join(subpath, orig_path)
                            _log.info("Found %s in %s: %s" % (orig_path, path, full_path))
                            orig_paths[idx] = full_path
                            # if file was found, stop looking for it (first hit wins)
                            ecs_to_find.remove((idx, orig_path))

                    # stop os.walk insanity as soon as we have all we need (os.walk loop)
                    if len(ecs_to_find) == 0:
                        break

                    # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk
                    dirnames[:] = [d for d in dirnames if not d in options.ignore_dirs]

                # stop os.walk insanity as soon as we have all we need (paths loop)
                if len(ecs_to_find) == 0:
                    break

        # indicate that specified paths do not contain generated easyconfig files
        paths = [(path, False) for path in orig_paths]

    _log.debug("Paths: %s" % paths)

    # run regtest
    if options.regtest or options.aggregate_regtest:
        _log.info("Running regression test")
        if paths:
            ec_paths = [path[0] for path in paths]
        else:  # fallback: easybuild-easyconfigs install path
            ec_paths = easyconfigs_pkg_full_paths
        regtest_ok = regtest(ec_paths, build_options=build_options, build_specs=build_specs)

        if not regtest_ok:
            _log.info("Regression test failed (partially)!")
            sys.exit(31)  # exit -> 3x1t -> 31

    # read easyconfig files
    easyconfigs = []
    for (path, generated) in paths:
        path = os.path.abspath(path)
        if not os.path.exists(path):
            print_error("Can't find path %s" % path)

        try:
            files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs)
            for f in files:
                if not generated and try_to_generate and build_specs:
                    ec_file = tweak(f, None, build_specs)
                else:
                    ec_file = f
                ecs = process_easyconfig(ec_file, build_options=build_options, build_specs=build_specs)
                easyconfigs.extend(ecs)
        except IOError, err:
            _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def template_easyconfig_test(self, spec):
    """Tests for an individual easyconfig: parsing, instantiating easyblock, check patches, ..."""

    # set to False, so it's False in case of this test failing
    global single_tests_ok
    prev_single_tests_ok = single_tests_ok
    single_tests_ok = False

    # parse easyconfig
    ecs = process_easyconfig(spec)
    if len(ecs) == 1:
        ec = ecs[0]['ec']
    else:
        self.assertTrue(
            False,
            "easyconfig %s does not contain blocks, yields only one parsed easyconfig"
            % spec)

    # check easyconfig file name
    expected_fn = '%s-%s.eb' % (ec['name'], det_full_ec_version(ec))
    msg = "Filename '%s' of parsed easyconfig matches expected filename '%s'" % (
        spec, expected_fn)
    self.assertEqual(os.path.basename(spec), expected_fn, msg)

    name, easyblock = fetch_parameters_from_easyconfig(ec.rawtxt,
                                                       ['name', 'easyblock'])

    # make sure easyconfig file is in expected location
    expected_subdir = os.path.join('easybuild', 'easyconfigs',
                                   name.lower()[0], name)
    subdir = os.path.join(*spec.split(os.path.sep)[-5:-1])
    fail_msg = "Easyconfig file %s not in expected subdirectory %s" % (
        spec, expected_subdir)
    self.assertEqual(expected_subdir, subdir, fail_msg)

    # sanity check for software name
    self.assertTrue(ec['name'], name)

    # instantiate easyblock with easyconfig file
    app_class = get_easyblock_class(easyblock, name=name)

    # check that automagic fallback to ConfigureMake isn't done (deprecated behaviour)
    fn = os.path.basename(spec)
    error_msg = "%s relies on automagic fallback to ConfigureMake, should use easyblock = 'ConfigureMake' instead" % fn
    self.assertTrue(easyblock or not app_class is ConfigureMake, error_msg)

    app = app_class(ec)

    # more sanity checks
    self.assertTrue(name, app.name)
    self.assertTrue(ec['version'], app.version)

    # make sure all patch files are available
    specdir = os.path.dirname(spec)
    specfn = os.path.basename(spec)
    for patch in ec['patches']:
        if isinstance(patch, (tuple, list)):
            patch = patch[0]
        # only check actual patch files, not other files being copied via the patch functionality
        if patch.endswith('.patch'):
            patch_full = os.path.join(specdir, patch)
            msg = "Patch file %s is available for %s" % (patch_full, specfn)
            self.assertTrue(os.path.isfile(patch_full), msg)
    ext_patches = []
    for ext in ec['exts_list']:
        if isinstance(ext, (tuple, list)) and len(ext) == 3:
            self.assertTrue(isinstance(ext[2], dict),
                            "3rd element of extension spec is a dictionary")
            for ext_patch in ext[2].get('patches', []):
                if isinstance(ext_patch, (tuple, list)):
                    ext_patch = ext_patch[0]
                # only check actual patch files, not other files being copied via the patch functionality
                if ext_patch.endswith('.patch'):
                    ext_patch_full = os.path.join(specdir, ext_patch)
                    msg = "Patch file %s is available for %s" % (
                        ext_patch_full, specfn)
                    self.assertTrue(os.path.isfile(ext_patch_full), msg)

    # check whether all extra_options defined for used easyblock are defined
    for key in app.extra_options():
        self.assertTrue(key in app.cfg)

    app.close_log()
    os.remove(app.logfile)

    # dump the easyconfig file
    handle, test_ecfile = tempfile.mkstemp()
    os.close(handle)

    ec.dump(test_ecfile)
    dumped_ec = EasyConfig(test_ecfile)
    os.remove(test_ecfile)

    # inject dummy values for templates that are only known at a later stage
    dummy_template_values = {
        'builddir': '/dummy/builddir',
        'installdir': '/dummy/installdir',
    }
    ec.template_values.update(dummy_template_values)
    dumped_ec.template_values.update(dummy_template_values)

    for key in sorted(ec._config):
        self.assertEqual(ec[key], dumped_ec[key])

    # cache the parsed easyconfig, to avoid that it is parsed again
    self.parsed_easyconfigs.append(ecs[0])

    # test passed, so set back to True
    single_tests_ok = True and prev_single_tests_ok
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(os.path.dirname(__file__), 'easyconfigs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
        }
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        ec_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'gzip-1.5-goolf-1.4.10.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
        self.assertEqual(len(jobs), 8)
        regex = re.compile("echo '.*/gzip-1.5-goolf-1.4.10.eb'")
        self.assertTrue(regex.search(jobs[-1].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))

        ec_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'gzip-1.4-GCC-4.6.3.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), retain_all_deps=True)
        jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)

        # make sure command is correct, and that --hidden is there when it needs to be
        for i, ec in enumerate(ordered_ecs):
            if ec['hidden']:
                regex = re.compile("eb %s.* --hidden" % ec['spec'])
            else:
                regex = re.compile("eb %s" % ec['spec'])
            self.assertTrue(regex.search(jobs[i].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))

        for job in jobs:
            self.assertEqual(job.cores, build_options['job_cores'])

        # no deps for GCC/4.6.3 (toolchain) and ictce/4.1.13 (test easyconfig with 'fake' deps)
        self.assertEqual(len(jobs[0].deps), 0)
        self.assertEqual(len(jobs[1].deps), 0)

        # only dependency for toy/0.0-deps is ictce/4.1.13 (dep marked as external module is filtered out)
        self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
        self.assertEqual(len(jobs[2].deps), 1)
        self.assertTrue('ictce-4.1.13.eb' in jobs[2].deps[0].script)

        # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
        self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
        self.assertEqual(len(jobs[3].deps), 2)
        regex = re.compile('toy-0.0-deps.eb\s* --hidden')
        self.assertTrue(regex.search(jobs[3].deps[0].script))
        self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob