Example #1
0
    def test_get_toolchain_hierarchy(self):
        """Test get_toolchain_hierarchy function."""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        goolf_hierarchy = get_toolchain_hierarchy({'name': 'goolf', 'version': '1.4.10'})
        self.assertEqual(goolf_hierarchy, [
            {'name': 'GCC', 'version': '4.7.2'},
            {'name': 'gompi', 'version': '1.4.10'},
            {'name': 'goolf', 'version': '1.4.10'},
        ])

        iimpi_hierarchy = get_toolchain_hierarchy({'name': 'iimpi', 'version': '5.5.3-GCC-4.8.3'})
        self.assertEqual(iimpi_hierarchy, [
            {'name': 'iccifort', 'version': '2013.5.192-GCC-4.8.3'},
            {'name': 'iimpi', 'version': '5.5.3-GCC-4.8.3'},
        ])

        # test also including dummy
        init_config(build_options={
            'add_dummy_to_minimal_toolchains': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        # testing with gompi/1.4.10, since the result for goolf/1.4.10 is cached (and it's hard to reset the cache)
        gompi_hierarchy = get_toolchain_hierarchy({'name': 'gompi', 'version': '1.4.10'})
        self.assertEqual(gompi_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCC', 'version': '4.7.2'},
            {'name': 'gompi', 'version': '1.4.10'},
        ])
Example #2
0
    def test_get_toolchain_hierarchy(self):
        """Test get_toolchain_hierarchy function."""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        goolf_hierarchy = get_toolchain_hierarchy({'name': 'goolf', 'version': '1.4.10'})
        self.assertEqual(goolf_hierarchy, [
            {'name': 'GCC', 'version': '4.7.2'},
            {'name': 'gompi', 'version': '1.4.10'},
            {'name': 'goolf', 'version': '1.4.10'},
        ])

        iimpi_hierarchy = get_toolchain_hierarchy({'name': 'iimpi', 'version': '5.5.3-GCC-4.8.3'})
        self.assertEqual(iimpi_hierarchy, [
            {'name': 'iccifort', 'version': '2013.5.192-GCC-4.8.3'},
            {'name': 'iimpi', 'version': '5.5.3-GCC-4.8.3'},
        ])

        # test also including dummy
        init_config(build_options={
            'add_dummy_to_minimal_toolchains': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        get_toolchain_hierarchy.clear()
        gompi_hierarchy = get_toolchain_hierarchy({'name': 'gompi', 'version': '1.4.10'})
        self.assertEqual(gompi_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCC', 'version': '4.7.2'},
            {'name': 'gompi', 'version': '1.4.10'},
        ])

        get_toolchain_hierarchy.clear()
        # check whether GCCcore is considered as subtoolchain, even if it's only listed as a dep
        gcc_hierarchy = get_toolchain_hierarchy({'name': 'GCC', 'version': '4.9.3-2.25'})
        self.assertEqual(gcc_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCCcore', 'version': '4.9.3'},
            {'name': 'GCC', 'version': '4.9.3-2.25'},
        ])

        get_toolchain_hierarchy.clear()
        iccifort_hierarchy = get_toolchain_hierarchy({'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'})
        self.assertEqual(iccifort_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCCcore', 'version': '4.9.3'},
            {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
        ])
    def test_external_dependencies(self):
        """Test specifying external (build) dependencies."""
        ectxt = read_file(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0-deps.eb'))
        toy_ec = os.path.join(self.test_prefix, 'toy-0.0-external-deps.eb')

        # just specify some of the test modules we ship, doesn't matter where they come from
        ectxt += "\ndependencies += [('foobar/1.2.3', EXTERNAL_MODULE)]"
        ectxt += "\nbuilddependencies = [('somebuilddep/0.1', EXTERNAL_MODULE)]"
        write_file(toy_ec, ectxt)

        build_options = {
            'valid_module_classes': module_classes(),
            'external_modules_metadata': ConfigObj(),
        }
        init_config(build_options=build_options)
        ec = EasyConfig(toy_ec)

        builddeps = ec.builddependencies()
        self.assertEqual(len(builddeps), 1)
        self.assertEqual(builddeps[0]['short_mod_name'], 'somebuilddep/0.1')
        self.assertEqual(builddeps[0]['full_mod_name'], 'somebuilddep/0.1')
        self.assertEqual(builddeps[0]['external_module'], True)

        deps = ec.dependencies()
        self.assertEqual(len(deps), 4)
        correct_deps = ['ictce/4.1.13', 'GCC/4.7.2', 'foobar/1.2.3', 'somebuilddep/0.1']
        self.assertEqual([d['short_mod_name'] for d in deps], correct_deps)
        self.assertEqual([d['full_mod_name'] for d in deps], correct_deps)
        self.assertEqual([d['external_module'] for d in deps], [False, True, True, True])

        metadata = os.path.join(self.test_prefix, 'external_modules_metadata.cfg')
        metadatatxt = '\n'.join(['[foobar/1.2.3]', 'name = foo,bar', 'version = 1.2.3,3.2.1', 'prefix = /foo/bar'])
        write_file(metadata, metadatatxt)
        cfg = init_config(args=['--external-modules-metadata=%s' % metadata])
        build_options = {
            'external_modules_metadata': cfg.external_modules_metadata,
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)
        ec = EasyConfig(toy_ec)
        self.assertEqual(ec.dependencies()[2]['short_mod_name'], 'foobar/1.2.3')
        self.assertEqual(ec.dependencies()[2]['external_module'], True)
        metadata = {
            'name': ['foo', 'bar'],
            'version': ['1.2.3', '3.2.1'],
            'prefix': '/foo/bar',
        }
        self.assertEqual(ec.dependencies()[2]['external_module_metadata'], metadata)
Example #4
0
    def test_map_easyconfig_to_target_tc_hierarchy(self):
        """Test mapping of easyconfig to target hierarchy"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'robot_path': test_easyconfigs,
            'silent': True,
            'valid_module_classes': module_classes(),
        })
        get_toolchain_hierarchy.clear()

        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}
        # The below mapping includes a binutils mapping (2.26 to 2.25)
        tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool)
        ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb')
        tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping)
        tweaked_ec = process_easyconfig(tweaked_spec)[0]
        tweaked_dict = tweaked_ec['ec'].asdict()
        # First check the mapped toolchain
        key, value = 'toolchain', iccifort_binutils_tc
        self.assertTrue(key in tweaked_dict and value == tweaked_dict[key])
        # Also check that binutils has been mapped
        for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items():
            self.assertTrue(key in tweaked_dict['builddependencies'][0] and
                            value == tweaked_dict['builddependencies'][0][key])
Example #5
0
    def test_check_capability_mapping(self):
        """Test comparing the functionality of two toolchains"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })
        get_toolchain_hierarchy.clear()
        foss_hierarchy = get_toolchain_hierarchy({'name': 'foss', 'version': '2018a'}, incl_capabilities=True)
        iimpi_hierarchy = get_toolchain_hierarchy({'name': 'iimpi', 'version': '2016.01'},
                                                  incl_capabilities=True)

        # Hierarchies are returned with top-level toolchain last, foss has 4 elements here, intel has 2
        self.assertEqual(foss_hierarchy[0]['name'], 'GCC')
        self.assertEqual(foss_hierarchy[1]['name'], 'golf')
        self.assertEqual(foss_hierarchy[2]['name'], 'gompi')
        self.assertEqual(foss_hierarchy[3]['name'], 'foss')
        self.assertEqual(iimpi_hierarchy[0]['name'], 'GCCcore')
        self.assertEqual(iimpi_hierarchy[1]['name'], 'iccifort')
        self.assertEqual(iimpi_hierarchy[2]['name'], 'iimpi')

        # golf <-> iimpi (should return False)
        self.assertFalse(check_capability_mapping(foss_hierarchy[1], iimpi_hierarchy[1]), "golf requires math libs")
        # gompi <-> iimpi
        self.assertTrue(check_capability_mapping(foss_hierarchy[2], iimpi_hierarchy[2]))
        # GCC <-> iimpi
        self.assertTrue(check_capability_mapping(foss_hierarchy[0], iimpi_hierarchy[2]))
        # GCC <-> iccifort
        self.assertTrue(check_capability_mapping(foss_hierarchy[0], iimpi_hierarchy[1]))
        # GCC <-> GCCcore
        self.assertTrue(check_capability_mapping(foss_hierarchy[0], iimpi_hierarchy[0]))
def suite():
    """Return all easyblock --module-only tests."""
    # initialize configuration (required for e.g. default modules_tool setting)
    cleanup()
    eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR])
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        # enable --force --module-only
        'force': True,
        'module_only': True,
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    config.set_tmpdir()

    # dynamically generate a separate test for each of the available easyblocks
    easyblocks_path = get_paths_for("easyblocks")[0]
    all_pys = glob.glob('%s/*/*.py' % easyblocks_path)
    easyblocks = [eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb]

    # filter out no longer supported easyblocks
    easyblocks = [e for e in easyblocks if os.path.basename(e) not in ['versionindependendpythonpackage.py']]

    for easyblock in easyblocks:
        # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest
        exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock)
        innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock
        innertest.__name__ = "test_easyblock_%s" % '_'.join(easyblock.replace('.py', '').split('/'))
        setattr(ModuleOnlyTest, innertest.__name__, innertest)

    return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
Example #7
0
    def test_robot_find_minimal_easyconfig_for_dependency(self):
        """Test robot_find_minimal_easyconfig_for_dependency."""

        # replace log.experimental with log.warning to allow experimental code
        easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning

        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        gzip15 = {
            'name': 'gzip',
            'version': '1.5',
            'versionsuffix': '',
            'toolchain': {'name': 'goolf', 'version': '1.4.10'},
        }
        new_gzip15, ecfile = robot_find_minimal_easyconfig_for_dependency(gzip15)
        self.assertEqual(new_gzip15, gzip15)
        self.assertTrue(os.path.samefile(ecfile, os.path.join(test_easyconfigs, 'gzip-1.5-goolf-1.4.10.eb')))

        # no easyconfig for gzip 1.4 with matching non-dummy (sub)toolchain
        gzip14 = {
            'name': 'gzip',
            'version': '1.4',
            'versionsuffix': '',
            'toolchain': {'name': 'goolf', 'version': '1.4.10'},
        }
        self.assertEqual(robot_find_minimal_easyconfig_for_dependency(gzip14), None)

        # use gompi/1.4.10 toolchain, to dance around caching of toolchain hierarchy
        gzip14['toolchain'] = {'name': 'gompi', 'version': '1.4.10'}

        # test also including dummy toolchain
        init_config(build_options={
            'add_dummy_to_minimal_toolchains': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })
        new_gzip14, ecfile = robot_find_minimal_easyconfig_for_dependency(gzip14)
        self.assertTrue(new_gzip14 != gzip14)
        self.assertEqual(new_gzip14['toolchain'], {'name': 'dummy', 'version': ''})
        self.assertTrue(os.path.samefile(ecfile, os.path.join(test_easyconfigs, 'gzip-1.4.eb')))
Example #8
0
def process_easyconfig(path, onlyBlocks=None, regtest_online=False, validate=True):
    """
    Process easyconfig, returning some information for each block
    """
    blocks = retrieve_blocks_in_spec(path, onlyBlocks)

    easyconfigs = []
    for spec in blocks:
        # process for dependencies and real installversionname
        # - use mod? __init__ and importCfg are ignored.
        _log.debug("Processing easyconfig %s" % spec)

        # create easyconfig
        try:
            all_stops = [x[0] for x in EasyBlock.get_steps()]
            ec = EasyConfig(spec, validate=validate, valid_module_classes=module_classes(), valid_stops=all_stops)
        except EasyBuildError, err:
            msg = "Failed to process easyconfig %s:\n%s" % (spec, err.msg)
            _log.exception(msg)

        name = ec["name"]

        # this app will appear as following module in the list
        easyconfig = {
            "spec": spec,
            "module": (ec.name, ec.get_installversion()),
            "dependencies": [],
            "builddependencies": [],
        }
        if len(blocks) > 1:
            easyconfig["originalSpec"] = path

        # add build dependencies
        for dep in ec.builddependencies():
            deptup = (dep["name"], dep["tc"])
            _log.debug("Adding build dependency %s for app %s." % (deptup, name))
            easyconfig["builddependencies"].append(deptup)

        # add dependencies (including build dependencies)
        for dep in ec.dependencies():
            deptup = (dep["name"], dep["tc"])
            _log.debug("Adding dependency %s for app %s." % (deptup, name))
            easyconfig["dependencies"].append(deptup)

        # add toolchain as dependency too
        if ec.toolchain.name != "dummy":
            dep = (ec.toolchain.name, ec.toolchain.version)
            _log.debug("Adding toolchain %s as dependency for app %s." % (dep, name))
            easyconfig["dependencies"].append(dep)

        del ec

        # this is used by the parallel builder
        easyconfig["unresolvedDependencies"] = copy.copy(easyconfig["dependencies"])

        easyconfigs.append(easyconfig)
Example #9
0
def process_easyconfig(path, onlyBlocks=None, regtest_online=False, validate=True):
    """
    Process easyconfig, returning some information for each block
    """
    blocks = retrieve_blocks_in_spec(path, onlyBlocks)

    easyconfigs = []
    for spec in blocks:
        # process for dependencies and real installversionname
        # - use mod? __init__ and importCfg are ignored.
        _log.debug("Processing easyconfig %s" % spec)

        # create easyconfig
        try:
            all_stops = [x[0] for x in EasyBlock.get_steps()]
            ec = EasyConfig(spec, validate=validate, valid_module_classes=module_classes(), valid_stops=all_stops)
        except EasyBuildError, err:
            msg = "Failed to process easyconfig %s:\n%s" % (spec, err.msg)
            _log.exception(msg)

        name = ec['name']

        # this app will appear as following module in the list
        easyconfig = {
            'ec': ec,
            'spec': spec,
            'module': det_full_module_name(ec),
            'dependencies': [],
            'builddependencies': [],
        }
        if len(blocks) > 1:
            easyconfig['originalSpec'] = path

        # add build dependencies
        for dep in ec.builddependencies():
            _log.debug("Adding build dependency %s for app %s." % (dep, name))
            easyconfig['builddependencies'].append(dep)

        # add dependencies (including build dependencies)
        for dep in ec.dependencies():
            _log.debug("Adding dependency %s for app %s." % (dep, name))
            easyconfig['dependencies'].append(dep)

        # add toolchain as dependency too
        if ec.toolchain.name != DUMMY_TOOLCHAIN_NAME:
            dep = ec.toolchain.as_dict()
            _log.debug("Adding toolchain %s as dependency for app %s." % (dep, name))
            easyconfig['dependencies'].append(dep)

        del ec

        # this is used by the parallel builder
        easyconfig['unresolved_deps'] = copy.deepcopy(easyconfig['dependencies'])

        easyconfigs.append(easyconfig)
Example #10
0
    def test_find_minimally_resolved_modules(self):
        """Test find_minimally_resolved_modules function."""

        # replace log.experimental with log.warning to allow experimental code
        easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning

        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb')
        barec_txt = '\n'.join([
            "easyblock = 'ConfigureMake'",
            "name = 'bar'",
            "version = '1.2.3'",
            "homepage = 'http://example.com'",
            "description = 'foo'",
            "toolchain = {'name': 'goolf', 'version': '1.4.10'}",
            # deliberately listing components of toolchain as dependencies without specifying subtoolchains,
            # to test resolving of dependencies with minimal toolchain
            # for each of these, we know test easyconfigs are available (which are required here)
            "dependencies = [",
            "   ('OpenMPI', '1.6.4'),",  # available with GCC/4.7.2
            "   ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),",  # available with gompi/1.4.10
            "   ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),",  # available with gompi/1.4.10
            "   ('SQLite', '3.8.10.2'),",  # only available with goolf/1.4.10
            "]",
        ])
        write_file(barec, barec_txt)
        bar = process_easyconfig(barec)[0]

        ecs = [bar]
        mods = [
            'gompi/1.4.10',
            'goolf/1.4.10',
            # include modules for dependencies, with subtoolchains rather than full toolchain (except for SQLite)
            'OpenMPI/1.6.4-GCC-4.7.2',
            'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2',
            'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2',
            'SQLite/3.8.10.2-GCC-4.7.2',
        ]
        ordered_ecs, new_easyconfigs, new_avail_modules = find_minimally_resolved_modules(ecs, mods, [])

        # all dependencies are resolved for easyconfigs included in ordered_ecs
        self.assertEqual(len(ordered_ecs), 1)
        self.assertEqual(ordered_ecs[0]['dependencies'], [])

        # module is added to list of available modules
        self.assertTrue(bar['ec'].full_mod_name in new_avail_modules)

        # nothing left
        self.assertEqual(new_easyconfigs, [])
    def test_build_easyconfigs_in_parallel_slurm(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) Slurm as backend for --job."""

        # install mocked versions of 'sbatch' and 'scontrol' commands
        sbatch = os.path.join(self.test_prefix, 'bin', 'sbatch')
        write_file(sbatch, MOCKED_SBATCH)
        adjust_permissions(sbatch, stat.S_IXUSR, add=True)

        scontrol = os.path.join(self.test_prefix, 'bin', 'scontrol')
        write_file(scontrol, MOCKED_SCONTROL)
        adjust_permissions(scontrol, stat.S_IXUSR, add=True)

        os.environ['PATH'] = os.path.pathsep.join([os.path.join(self.test_prefix, 'bin'), os.getenv('PATH')])

        topdir = os.path.dirname(os.path.abspath(__file__))
        test_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-foss-2018a.eb')
        foss_ec = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'f', 'foss', 'foss-2018a.eb')

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
            'job_max_walltime': 5,
            'force': True,
        }
        init_config(args=['--job-backend=Slurm'], build_options=build_options)

        easyconfigs = process_easyconfig(test_ec) + process_easyconfig(foss_ec)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        self.mock_stdout(True)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
        self.mock_stdout(False)

        # jobs are submitted for foss & gzip (listed easyconfigs)
        self.assertEqual(len(jobs), 2)

        # last job (gzip) has a dependency on second-to-last job (foss)
        self.assertEqual(jobs[0].job_specs['job-name'], 'foss-2018a')

        expected = {
            'dependency': 'afterok:%s' % jobs[0].jobid,
            'hold': True,
            'job-name': 'gzip-1.5-foss-2018a',
            'nodes': 1,
            'ntasks': 3,
            'ntasks-per-node': 3,
            'output': '%x-%j.out',
            'time': 300,  # 60*5 (unit is minutes)
            'wrap': "echo '%s'" % test_ec,
        }
        self.assertEqual(jobs[1].job_specs, expected)
def suite():
    """Return all easyblock --module-only tests."""
    # initialize configuration (required for e.g. default modules_tool setting)
    cleanup()
    eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR])
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'external_modules_metadata': {},
        # enable --force --module-only
        'force': True,
        'module_only': True,
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()

    # dynamically generate a separate test for each of the available easyblocks
    easyblocks_path = get_paths_for("easyblocks")[0]
    all_pys = glob.glob('%s/*/*.py' % easyblocks_path)
    easyblocks = [eb for eb in all_pys if os.path.basename(eb) != '__init__.py' and '/test/' not in eb]

    # filter out no longer supported easyblocks, or easyblocks that are tested in a different way
    excluded_easyblocks = ['versionindependendpythonpackage.py']
    easyblocks = [e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks]

    # add dummy PrgEnv-* modules, required for testing CrayToolchain easyblock
    for prgenv in ['PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi']:
        write_file(os.path.join(TMPDIR, 'modules', 'all', prgenv, '1.2.3'), "#%Module")

    for easyblock in easyblocks:
        # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest
        if os.path.basename(easyblock) == 'systemcompiler.py':
            # use GCC as name when testing SystemCompiler easyblock
            exec("def innertest(self): template_module_only_test(self, '%s', name='GCC', version='system')" % easyblock)
        elif os.path.basename(easyblock) == 'systemmpi.py':
            # use OpenMPI as name when testing SystemMPI easyblock
            exec("def innertest(self): template_module_only_test(self, '%s', name='OpenMPI', version='system')" %
                 easyblock)
        elif os.path.basename(easyblock) == 'craytoolchain.py':
            # make sure that a (known) PrgEnv is included as a dependency
            extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]'
            exec("def innertest(self): template_module_only_test(self, '%s', extra_txt='%s')" % (easyblock, extra_txt))
        else:
            exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock)
        innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock
        innertest.__name__ = "test_easyblock_%s" % '_'.join(easyblock.replace('.py', '').split('/'))
        setattr(ModuleOnlyTest, innertest.__name__, innertest)

    return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
    def test_build_easyconfigs_in_parallel_gc3pie(self):
        """Test build_easyconfigs_in_parallel(), using GC3Pie with local config as backend for --job."""
        try:
            import gc3libs  # noqa (ignore unused import)
        except ImportError:
            print "GC3Pie not available, skipping test"
            return

        # put GC3Pie config in place to use local host and fork/exec
        resourcedir = os.path.join(self.test_prefix, 'gc3pie')
        gc3pie_cfgfile = os.path.join(self.test_prefix, 'gc3pie_local.ini')
        gc3pie_cfgtxt = GC3PIE_LOCAL_CONFIGURATION % {
            'resourcedir': resourcedir,
            'time': which('time'),
        }
        write_file(gc3pie_cfgfile, gc3pie_cfgtxt)

        output_dir = os.path.join(self.test_prefix, 'subdir', 'gc3pie_output_dir')
        # purposely pre-create output dir, and put a file in it (to check whether GC3Pie tries to rename the output dir)
        mkdir(output_dir, parents=True)
        write_file(os.path.join(output_dir, 'foo'), 'bar')
        # remove write permissions on parent dir of specified output dir,
        # to check that GC3Pie does not try to rename the (already existing) output directory...
        adjust_permissions(os.path.dirname(output_dir), stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
                           add=False, recursive=False)

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'job_backend_config': gc3pie_cfgfile,
            'job_max_walltime': 24,
            'job_output_dir': output_dir,
            'job_polling_interval': 0.2,  # quick polling
            'job_target_resource': 'ebtestlocalhost',
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'silent': True,
            'valid_module_classes': config.module_classes(),
            'validate': False,
        }
        init_config(args=['--job-backend=GC3Pie'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        topdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        test_easyblocks_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
        cmd = "PYTHONPATH=%s:%s:$PYTHONPATH eb %%(spec)s -df" % (topdir, test_easyblocks_path)
        build_easyconfigs_in_parallel(cmd, ordered_ecs, prepare_first=False)

        self.assertTrue(os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0'))
        self.assertTrue(os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy'))
    def setUp(self):
        """Set up testcase."""
        super(ParallelBuildTest, self).setUp()
        build_options = {
            'robot_path': os.path.join(os.path.dirname(__file__), 'easyconfigs'),
            'valid_module_classes': config.module_classes(),
        }
        init_config(build_options=build_options)

        # put mocked functions in place
        parallelbuild.connect_to_server = mock
        parallelbuild.disconnect_from_server = mock
        parallelbuild.get_ppn = mock
        parallelbuild.PbsJob = MockPbsJob
    def test_hide_hidden_deps(self):
        """Test use of --hide-deps on hiddendependencies."""
        test_dir = os.path.dirname(os.path.abspath(__file__))
        ec_file = os.path.join(test_dir, 'easyconfigs', 'gzip-1.4-GCC-4.6.3.eb')
        ec = EasyConfig(ec_file)
        self.assertEqual(ec['hiddendependencies'][0]['full_mod_name'], 'toy/.0.0-deps')
        self.assertEqual(ec['dependencies'], [])

        build_options = {
            'hide_deps': ['toy'],
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)
        ec = EasyConfig(ec_file)
        self.assertEqual(ec['hiddendependencies'][0]['full_mod_name'], 'toy/.0.0-deps')
        self.assertEqual(ec['dependencies'], [])
Example #16
0
def process_easyconfig(path, onlyBlocks=None, regtest_online=False, validate=True):
    """
    Process easyconfig, returning some information for each block
    """
    blocks = retrieve_blocks_in_spec(path, onlyBlocks)

    easyconfigs = []
    for spec in blocks:
        # process for dependencies and real installversionname
        # - use mod? __init__ and importCfg are ignored.
        log.debug("Processing easyconfig %s" % spec)

        # create easyconfig
        try:
            all_stops = [x[0] for x in EasyBlock.get_steps()]
            ec = EasyConfig(spec, validate=validate, valid_module_classes=module_classes(), valid_stops=all_stops)
        except EasyBuildError, err:
            msg = "Failed to process easyconfig %s:\n%s" % (spec, err.msg)
            log.exception(msg)

        name = ec['name']

        # this app will appear as following module in the list
        easyconfig = {
                      'spec': spec,
                      'module': (ec.name, ec.get_installversion()),
                      'dependencies': []
                     }
        if len(blocks) > 1:
            easyconfig['originalSpec'] = path

        for d in ec.dependencies():
            dep = (d['name'], d['tc'])
            log.debug("Adding dependency %s for app %s." % (dep, name))
            easyconfig['dependencies'].append(dep)

        if ec.toolchain.name != 'dummy':
            dep = (ec.toolchain.name, ec.toolchain.version)
            log.debug("Adding toolchain %s as dependency for app %s." % (dep, name))
            easyconfig['dependencies'].append(dep)

        del ec

        # this is used by the parallel builder
        easyconfig['unresolvedDependencies'] = copy.copy(easyconfig['dependencies'])

        easyconfigs.append(easyconfig)
Example #17
0
    def test_parse_yeb(self):
        """Test parsing of .yeb easyconfigs."""
        if 'yaml' not in sys.modules:
            print "Skipping test_parse_yeb (no PyYAML available)"
            return

        build_options = {
            'check_osdeps': False,
            'external_modules_metadata': {},
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)
        easybuild.tools.build_log.EXPERIMENTAL = True

        testdir = os.path.dirname(os.path.abspath(__file__))
        test_easyconfigs = os.path.join(testdir, 'easyconfigs')
        test_yeb_easyconfigs = os.path.join(testdir, 'easyconfigs', 'yeb')

        # test parsing
        test_files = [
            'bzip2-1.0.6-GCC-4.9.2',
            'gzip-1.6-GCC-4.9.2',
            'goolf-1.4.10',
            'ictce-4.1.13',
            'SQLite-3.8.10.2-goolf-1.4.10',
            'Python-2.7.10-ictce-4.1.13',
            'CrayCCE-5.1.29',
            'toy-0.0',
        ]

        for filename in test_files:
            ec_yeb = EasyConfig(os.path.join(test_yeb_easyconfigs, '%s.yeb' % filename))
            # compare with parsed result of .eb easyconfig
            ec_file = glob.glob(os.path.join(test_easyconfigs, 'test_ecs', '*', '*', '%s.eb' % filename))[0]
            ec_eb = EasyConfig(ec_file)

            no_match = False
            for key in sorted(ec_yeb.asdict()):
                eb_val = ec_eb[key]
                yeb_val = ec_yeb[key]
                if key == 'description':
                    # multi-line string is always terminated with '\n' in YAML, so strip it off
                    yeb_val = yeb_val.strip()

                self.assertEqual(yeb_val, eb_val)
class InitTest(TestCase):
    """ Baseclass for easyblock testcases """

    # initialize configuration (required for e.g. default modules_tool setting)
    eb_go = eboptions.parse_options()
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()
    del eb_go

    def writeEC(self, easyblock, name='foo', version='1.3.2', extratxt=''):
        """ create temporary easyconfig file """
        txt = '\n'.join([
            'easyblock = "%s"',
            'name = "%s"' % name,
            'version = "%s"' % version,
            'homepage = "http://example.com"',
            'description = "Dummy easyconfig file."',
            'toolchain = {"name": "dummy", "version": "dummy"}',
            'sources = []',
            extratxt,
        ])

        write_file(self.eb_file, txt % easyblock)

    def setUp(self):
        """Setup test."""
        self.log = fancylogger.getLogger("EasyblocksInitTest", fname=False)
        fd, self.eb_file = tempfile.mkstemp(prefix='easyblocks_init_test_',
                                            suffix='.eb')
        os.close(fd)

    def tearDown(self):
        """Cleanup."""
        try:
            os.remove(self.eb_file)
        except OSError, err:
            self.log.error("Failed to remove %s: %s" % (self.eb_file, err))
Example #19
0
    def test_map_toolchain_hierarchies(self):
        """Test mapping between two toolchain hierarchies"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'robot_path': test_easyconfigs,
            'silent': True,
            'valid_module_classes': module_classes(),
        })
        get_toolchain_hierarchy.clear()
        foss_tc = {'name': 'foss', 'version': '2018a'}
        gompi_tc = {'name': 'gompi', 'version': '2018a'}
        iimpi_tc = {'name': 'iimpi', 'version': '2016.01'}

        # GCCcore is mapped to GCC, iccifort is mapped to GCC, iimpi is mapped to gompi
        expected = {
            'GCCcore': {'name': 'GCC', 'version': '6.4.0-2.28'},
            'iccifort': {'name': 'GCC', 'version': '6.4.0-2.28'},
            'iimpi': {'name': 'gompi', 'version': '2018a'},
        }
        self.assertEqual(map_toolchain_hierarchies(iimpi_tc, foss_tc, self.modtool), expected)

        # GCC is mapped to iccifort, gompi is mapped to iimpi
        expected = {
            'GCC': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
            'gompi': {'name': 'iimpi', 'version': '2016.01'}
        }
        self.assertEqual(map_toolchain_hierarchies(gompi_tc, iimpi_tc, self.modtool), expected)

        # Expect an error when there is no possible mapping
        error_msg = "No possible mapping from source toolchain spec .*"
        self.assertErrorRegex(EasyBuildError, error_msg, map_toolchain_hierarchies,
                              foss_tc, iimpi_tc, self.modtool)

        # Test that we correctly include GCCcore binutils when it is there
        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}
        # Should see a binutils in the mapping (2.26 will get mapped to 2.25)
        expected = {
            'GCC': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
            'GCCcore': {'name': 'GCCcore', 'version': '4.9.3'},
            'binutils': {'version': '2.25', 'versionsuffix': ''}
        }
        self.assertEqual(map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool), expected)
Example #20
0
 def test_dep_tree_of_toolchain(self):
     """Test getting list of dependencies of a toolchain (as EasyConfig objects)"""
     test_easyconfigs = os.path.join(
         os.path.dirname(os.path.abspath(__file__)), 'easyconfigs',
         'test_ecs')
     init_config(
         build_options={
             'valid_module_classes': module_classes(),
             'robot_path': test_easyconfigs,
             'check_osdeps': False,
         })
     toolchain_spec = {'name': 'foss', 'version': '2018a'}
     list_of_deps = get_dep_tree_of_toolchain(toolchain_spec, self.modtool)
     expected_deps = [['GCC', '6.4.0'], ['OpenBLAS', '0.2.20'],
                      ['hwloc', '1.11.8'], ['OpenMPI', '2.1.2'],
                      ['gompi', '2018a'], ['FFTW', '3.3.7'],
                      ['ScaLAPACK', '2.0.2'], ['foss', '2018a']]
     actual_deps = [[dep['name'], dep['version']] for dep in list_of_deps]
     self.assertEqual(expected_deps, actual_deps)
Example #21
0
def init_config(args=None, build_options=None):
    """(re)initialize configuration"""

    # clean up any instances of BuildOptions and ConfigurationVariables before reinitializing configuration
    config.ConfigurationVariables.__metaclass__._instances.pop(config.ConfigurationVariables, None)
    config.BuildOptions.__metaclass__._instances.pop(config.BuildOptions, None)

    # initialize configuration so config.get_modules_tool function works
    eb_go = eboptions.parse_options(args=args)
    config.init(eb_go.options, eb_go.get_options_by_section('config'))

    if build_options is None:
        build_options = {
            'valid_module_classes': module_classes(),
            'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        }
    config.init_build_options(build_options)

    return eb_go.options
Example #22
0
    def test_external_module_toolchain(self):
        """Test specifying external (build) dependencies."""
        ecpath = os.path.join(os.path.dirname(__file__), 'easyconfigs', 'yeb', 'CrayCCE-5.1.29.yeb')
        metadata = {
            'name': ['foo', 'bar'],
            'version': ['1.2.3', '3.2.1'],
            'prefix': '/foo/bar',
        }
        build_options = {
            'external_modules_metadata': {'fftw/3.3.4.0': metadata},
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)
        easybuild.tools.build_log.EXPERIMENTAL = True

        ec = EasyConfig(ecpath)

        self.assertEqual(ec.dependencies()[1]['full_mod_name'], 'fftw/3.3.4.0')
        self.assertEqual(ec.dependencies()[1]['external_module_metadata'], metadata)
Example #23
0
    def test_map_toolchain_hierarchies(self):
        """Test mapping between two toolchain hierarchies"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })
        get_toolchain_hierarchy.clear()
        foss_tc = {'name': 'foss', 'version': '2018a'}
        gompi_tc = {'name': 'gompi', 'version': '2018a'}
        iimpi_tc = {'name': 'iimpi', 'version': '2016.01'}

        # GCCcore is mapped to GCC, iccifort is mapped to GCC, iimpi is mapped to gompi
        expected = {
            'GCCcore': {'name': 'GCC', 'version': '6.4.0-2.28'},
            'iccifort': {'name': 'GCC', 'version': '6.4.0-2.28'},
            'iimpi': {'name': 'gompi', 'version': '2018a'},
        }
        self.assertEqual(map_toolchain_hierarchies(iimpi_tc, foss_tc, self.modtool), expected)

        # GCC is mapped to iccifort, gompi is mapped to iimpi
        expected = {
            'GCC': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
            'gompi': {'name': 'iimpi', 'version': '2016.01'}
        }
        self.assertEqual(map_toolchain_hierarchies(gompi_tc, iimpi_tc, self.modtool), expected)

        # Expect an error when there is no possible mapping
        error_msg = "No possible mapping from source toolchain spec .*"
        self.assertErrorRegex(EasyBuildError, error_msg, map_toolchain_hierarchies,
                              foss_tc, iimpi_tc, self.modtool)

        # Test that we correctly include GCCcore binutils when it is there
        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}
        # Should see a binutils in the mapping (2.26 will get mapped to 2.25)
        expected = {
            'GCC': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
            'GCCcore': {'name': 'GCCcore', 'version': '4.9.3'},
            'binutils': {'version': '2.25', 'versionsuffix': ''}
        }
        self.assertEqual(map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool), expected)
def suite():
    """Return all easyblock --module-only tests."""
    # initialize configuration (required for e.g. default modules_tool setting)
    cleanup()
    eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR])
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        # enable --force --module-only
        'force': True,
        'module_only': True,
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    config.set_tmpdir()

    # dynamically generate a separate test for each of the available easyblocks
    easyblocks_path = get_paths_for("easyblocks")[0]
    all_pys = glob.glob('%s/*/*.py' % easyblocks_path)
    easyblocks = [
        eb for eb in all_pys
        if os.path.basename(eb) != '__init__.py' and '/test/' not in eb
    ]

    # filter out no longer supported easyblocks
    easyblocks = [
        e for e in easyblocks
        if os.path.basename(e) not in ['versionindependendpythonpackage.py']
    ]

    for easyblock in easyblocks:
        # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest
        exec("def innertest(self): template_module_only_test(self, '%s')" %
             easyblock)
        innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock
        innertest.__name__ = "test_easyblock_%s" % '_'.join(
            easyblock.replace('.py', '').split('/'))
        setattr(ModuleOnlyTest, innertest.__name__, innertest)

    return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
Example #25
0
    def test_toolchain_inspection(self):
        """Test whether available toolchain inspection functionality is working."""
        build_options = {
            'robot_path': os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs'),
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)

        ec = EasyConfig(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'gzip-1.5-goolf-1.4.10.eb'))
        self.assertEqual(['/'.join([x['name'], x['version']]) for x in det_toolchain_compilers(ec)], ['GCC/4.7.2'])
        self.assertEqual(det_toolchain_mpi(ec)['name'], 'OpenMPI')

        ec = EasyConfig(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'hwloc-1.6.2-GCC-4.6.4.eb'))
        tc_comps = det_toolchain_compilers(ec)
        self.assertEqual(['/'.join([x['name'], x['version']]) for x in tc_comps], ['GCC/4.6.4'])
        self.assertEqual(det_toolchain_mpi(ec), None)

        ec = EasyConfig(os.path.join(os.path.dirname(__file__), 'easyconfigs', 'toy-0.0.eb'))
        self.assertEqual(det_toolchain_compilers(ec), None)
        self.assertEqual(det_toolchain_mpi(ec), None)
def init_config(args=None, build_options=None):
    """(re)initialize configuration"""

    cleanup()

    # initialize configuration so config.get_modules_tool function works
    eb_go = eboptions.parse_options(args=args)
    config.init(eb_go.options, eb_go.get_options_by_section('config'))

    # initialize build options
    if build_options is None:
        build_options = {
            'valid_module_classes': module_classes(),
            'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        }
    if 'suffix_modules_path' not in build_options:
        build_options.update({'suffix_modules_path': GENERAL_CLASS})
    config.init_build_options(build_options)

    return eb_go.options
Example #27
0
def init_config(args=None, build_options=None):
    """(re)initialize configuration"""

    cleanup()

    # initialize configuration so config.get_modules_tool function works
    eb_go = eboptions.parse_options(args=args)
    config.init(eb_go.options, eb_go.get_options_by_section('config'))

    # initialize build options
    if build_options is None:
        build_options = {
            'valid_module_classes': module_classes(),
            'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        }
    if 'suffix_modules_path' not in build_options:
        build_options.update({'suffix_modules_path': GENERAL_CLASS})
    config.init_build_options(build_options)

    return eb_go.options
def init_config(args=None, build_options=None):
    """(re)initialize configuration"""

    # clean up any instances of BuildOptions and ConfigurationVariables before reinitializing configuration
    config.ConfigurationVariables.__metaclass__._instances.pop(config.ConfigurationVariables, None)
    config.BuildOptions.__metaclass__._instances.pop(config.BuildOptions, None)

    # initialize configuration so config.get_modules_tool function works
    eb_go = eboptions.parse_options(args=args)
    config.init(eb_go.options, eb_go.get_options_by_section('config'))

    # initialize build options
    if build_options is None:
        build_options = {
            'valid_module_classes': module_classes(),
            'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        }
    config.init_build_options(build_options)

    return eb_go.options
Example #29
0
    def test_list_deps_versionsuffixes(self):
        """Test listing of dependencies' version suffixes"""
        test_easyconfigs = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'easyconfigs',
            'test_ecs')
        build_options = {
            'robot_path': [test_easyconfigs],
            'silent': True,
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)
        get_toolchain_hierarchy.clear()

        ec_spec = os.path.join(test_easyconfigs, 'g', 'golf', 'golf-2018a.eb')
        self.assertEqual(list_deps_versionsuffixes(ec_spec), ['-serial'])
        ec_spec = os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0-deps.eb')
        self.assertEqual(list_deps_versionsuffixes(ec_spec), [])
        ec_spec = os.path.join(test_easyconfigs, 'g', 'gzip',
                               'gzip-1.4-GCC-4.6.3.eb')
        self.assertEqual(list_deps_versionsuffixes(ec_spec), ['-deps'])
Example #30
0
    def test_map_common_versionsuffixes(self):
        """Test mapping between two toolchain hierarchies"""
        test_easyconfigs = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'easyconfigs',
            'test_ecs')
        init_config(
            build_options={
                'robot_path': [test_easyconfigs],
                'silent': True,
                'valid_module_classes': module_classes(),
            })
        get_toolchain_hierarchy.clear()

        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {
            'name': 'iccifort',
            'version': '2016.1.150-GCC-4.9.3-2.25'
        }

        toolchain_mapping = map_toolchain_hierarchies(iccifort_binutils_tc,
                                                      gcc_binutils_tc,
                                                      self.modtool)
        possible_mappings = map_common_versionsuffixes('binutils',
                                                       iccifort_binutils_tc,
                                                       toolchain_mapping)
        self.assertEqual(possible_mappings,
                         {'-binutils-2.25': '-binutils-2.26'})

        # Make sure we only map upwards, here it's gzip 1.4 in gcc and 1.6 in iccifort
        possible_mappings = map_common_versionsuffixes('gzip',
                                                       iccifort_binutils_tc,
                                                       toolchain_mapping)
        self.assertEqual(possible_mappings, {})

        # newer gzip is picked up other way around (GCC -> iccifort)
        toolchain_mapping = map_toolchain_hierarchies(gcc_binutils_tc,
                                                      iccifort_binutils_tc,
                                                      self.modtool)
        possible_mappings = map_common_versionsuffixes('gzip', gcc_binutils_tc,
                                                       toolchain_mapping)
        self.assertEqual(possible_mappings, {'-gzip-1.4': '-gzip-1.6'})
Example #31
0
    def test_find_patches(self):
        """ Test for find_software_name_for_patch """
        testdir = os.path.dirname(os.path.abspath(__file__))
        ec_path = os.path.join(testdir, 'easyconfigs')
        init_config(build_options={
            'allow_modules_tool_mismatch': True,
            'minimal_toolchains': True,
            'use_existing_modules': True,
            'external_modules_metadata': ConfigObj(),
            'silent': True,
            'valid_module_classes': module_classes(),
            'validate': False,
        })
        self.mock_stdout(True)
        ec = gh.find_software_name_for_patch('toy-0.0_fix-silly-typo-in-printf-statement.patch', [ec_path])
        txt = self.get_stdout()
        self.mock_stdout(False)

        self.assertTrue(ec == 'toy')
        reg = re.compile(r'[1-9]+ of [1-9]+ easyconfigs checked')
        self.assertTrue(re.search(reg, txt))
Example #32
0
    def test_find_patches(self):
        """ Test for find_software_name_for_patch """
        testdir = os.path.dirname(os.path.abspath(__file__))
        ec_path = os.path.join(testdir, 'easyconfigs')
        init_config(build_options={
            'allow_modules_tool_mismatch': True,
            'minimal_toolchains': True,
            'use_existing_modules': True,
            'external_modules_metadata': ConfigObj(),
            'silent': True,
            'valid_module_classes': module_classes(),
            'validate': False,
        })
        self.mock_stdout(True)
        ec = gh.find_software_name_for_patch('toy-0.0_fix-silly-typo-in-printf-statement.patch', [ec_path])
        txt = self.get_stdout()
        self.mock_stdout(False)

        self.assertTrue(ec == 'toy')
        reg = re.compile(r'[1-9]+ of [1-9]+ easyconfigs checked')
        self.assertTrue(re.search(reg, txt))
Example #33
0
    def test_map_easyconfig_to_target_tc_hierarchy(self):
        """Test mapping of easyconfig to target hierarchy"""
        test_easyconfigs = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), 'easyconfigs',
            'test_ecs')
        init_config(
            build_options={
                'robot_path': test_easyconfigs,
                'silent': True,
                'valid_module_classes': module_classes(),
            })
        get_toolchain_hierarchy.clear()

        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {
            'name': 'iccifort',
            'version': '2016.1.150-GCC-4.9.3-2.25'
        }
        # The below mapping includes a binutils mapping (2.26 to 2.25)
        tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc,
                                               iccifort_binutils_tc,
                                               self.modtool)
        ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc',
                               'hwloc-1.6.2-GCC-4.9.3-2.26.eb')
        tweaked_spec = map_easyconfig_to_target_tc_hierarchy(
            ec_spec, tc_mapping)
        tweaked_ec = process_easyconfig(tweaked_spec)[0]
        tweaked_dict = tweaked_ec['ec'].asdict()
        # First check the mapped toolchain
        key, value = 'toolchain', iccifort_binutils_tc
        self.assertTrue(key in tweaked_dict and value == tweaked_dict[key])
        # Also check that binutils has been mapped
        for key, value in {
                'name': 'binutils',
                'version': '2.25',
                'versionsuffix': ''
        }.items():
            self.assertTrue(
                key in tweaked_dict['builddependencies'][0]
                and value == tweaked_dict['builddependencies'][0][key])
Example #34
0
def init_config(args=None, build_options=None, with_include=True):
    """(re)initialize configuration"""

    cleanup()

    # initialize configuration so config.get_modules_tool function works
    eb_go = eboptions.parse_options(args=args, with_include=with_include)
    config.init(eb_go.options, eb_go.get_options_by_section('config'))

    # initialize build options
    if build_options is None:
        build_options = {
            'extended_dry_run': False,
            'external_modules_metadata': ConfigObj(),
            'valid_module_classes': module_classes(),
            'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        }
    if 'suffix_modules_path' not in build_options:
        build_options.update({'suffix_modules_path': GENERAL_CLASS})
    config.init_build_options(build_options=build_options)

    return eb_go.options
Example #35
0
def suite():
    """Return all easyblock --module-only tests."""
    # initialize configuration (required for e.g. default modules_tool setting)
    cleanup()
    eb_go = eboptions.parse_options(args=["--prefix=%s" % TMPDIR])
    config.init(eb_go.options, eb_go.get_options_by_section("config"))
    build_options = {
        # enable --force --module-only
        "force": True,
        "module_only": True,
        "silent": True,
        "suffix_modules_path": GENERAL_CLASS,
        "valid_module_classes": config.module_classes(),
        "valid_stops": [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    config.set_tmpdir()

    # dynamically generate a separate test for each of the available easyblocks
    easyblocks_path = get_paths_for("easyblocks")[0]
    all_pys = glob.glob("%s/*/*.py" % easyblocks_path)
    easyblocks = [eb for eb in all_pys if os.path.basename(eb) != "__init__.py" and "/test/" not in eb]

    # filter out no longer supported easyblocks, or easyblocks that are tested in a different way
    excluded_easyblocks = ["versionindependendpythonpackage.py"]
    easyblocks = [e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks]

    for easyblock in easyblocks:
        # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest
        if os.path.basename(easyblock) == "systemcompiler.py":
            # use GCC as name when testing SystemCompiler easyblock
            exec("def innertest(self): template_module_only_test(self, '%s', name='GCC', version='system')" % easyblock)
        else:
            exec("def innertest(self): template_module_only_test(self, '%s')" % easyblock)
        innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock
        innertest.__name__ = "test_easyblock_%s" % "_".join(easyblock.replace(".py", "").split("/"))
        setattr(ModuleOnlyTest, innertest.__name__, innertest)

    return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
Example #36
0
def init_config(args=None, build_options=None, with_include=True):
    """(re)initialize configuration"""

    cleanup()

    # initialize configuration so config.get_modules_tool function works
    eb_go = eboptions.parse_options(args=args, with_include=with_include)
    config.init(eb_go.options, eb_go.get_options_by_section('config'))

    # initialize build options
    if build_options is None:
        build_options = {
            'extended_dry_run': False,
            'external_modules_metadata': ConfigObj(),
            'valid_module_classes': module_classes(),
            'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        }
    if 'suffix_modules_path' not in build_options:
        build_options.update({'suffix_modules_path': GENERAL_CLASS})
    config.init_build_options(build_options=build_options)

    return eb_go.options
Example #37
0
 def test_dep_tree_of_toolchain(self):
     """Test getting list of dependencies of a toolchain (as EasyConfig objects)"""
     test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
     init_config(build_options={
         'valid_module_classes': module_classes(),
         'robot_path': test_easyconfigs,
         'check_osdeps': False,
     })
     toolchain_spec = {'name': 'foss', 'version': '2018a'}
     list_of_deps = get_dep_tree_of_toolchain(toolchain_spec, self.modtool)
     expected_deps = [
         ['GCC', '6.4.0'],
         ['OpenBLAS', '0.2.20'],
         ['hwloc', '1.11.8'],
         ['OpenMPI', '2.1.2'],
         ['gompi', '2018a'],
         ['FFTW', '3.3.7'],
         ['ScaLAPACK', '2.0.2'],
         ['foss', '2018a']
     ]
     actual_deps = [[dep['name'], dep['version']] for dep in list_of_deps]
     self.assertEqual(expected_deps, actual_deps)
    def test_match_minimum_tc_specs(self):
        """Test matching a toolchain to lowest possible in a hierarchy"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'robot_path': test_easyconfigs,
            'silent': True,
            'valid_module_classes': module_classes(),
        })
        get_toolchain_hierarchy.clear()
        foss_hierarchy = get_toolchain_hierarchy({'name': 'foss', 'version': '2018a'}, incl_capabilities=True)
        iimpi_hierarchy = get_toolchain_hierarchy({'name': 'iimpi', 'version': '2016.01'},
                                                  incl_capabilities=True)
        # Hierarchies are returned with top-level toolchain last, foss has 4 elements here, intel has 2
        self.assertEqual(foss_hierarchy[0]['name'], 'GCC')
        self.assertEqual(foss_hierarchy[1]['name'], 'golf')
        self.assertEqual(foss_hierarchy[2]['name'], 'gompi')
        self.assertEqual(foss_hierarchy[3]['name'], 'foss')
        self.assertEqual(iimpi_hierarchy[0]['name'], 'GCCcore')
        self.assertEqual(iimpi_hierarchy[1]['name'], 'iccifort')
        self.assertEqual(iimpi_hierarchy[2]['name'], 'iimpi')

        # base compiler first (GCCcore which maps to GCC/6.4.0-2.28)
        self.assertEqual(match_minimum_tc_specs(iimpi_hierarchy[0], foss_hierarchy),
                         {'name': 'GCC', 'version': '6.4.0-2.28'})
        # then iccifort (which also maps to GCC/6.4.0-2.28)
        self.assertEqual(match_minimum_tc_specs(iimpi_hierarchy[1], foss_hierarchy),
                         {'name': 'GCC', 'version': '6.4.0-2.28'})
        # Then MPI
        self.assertEqual(match_minimum_tc_specs(iimpi_hierarchy[2], foss_hierarchy),
                         {'name': 'gompi', 'version': '2018a'})
        # Check against own math only subtoolchain for math
        self.assertEqual(match_minimum_tc_specs(foss_hierarchy[1], foss_hierarchy),
                         {'name': 'golf', 'version': '2018a'})
        # Make sure there's an error when we can't do the mapping
        error_msg = "No possible mapping from source toolchain spec .*"
        self.assertErrorRegex(EasyBuildError, error_msg, match_minimum_tc_specs,
                              foss_hierarchy[3], iimpi_hierarchy)
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        build_options = {
            'robot_path': os.path.join(os.path.dirname(__file__), 'easyconfigs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
        }
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        ec_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'gzip-1.5-goolf-1.4.10.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs)
        jobs = build_easyconfigs_in_parallel("echo %(spec)s", ordered_ecs, prepare_first=False)
        self.assertEqual(len(jobs), 8)

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
Example #40
0
    def test_match_minimum_tc_specs(self):
        """Test matching a toolchain to lowest possible in a hierarchy"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'robot_path': test_easyconfigs,
            'silent': True,
            'valid_module_classes': module_classes(),
        })
        get_toolchain_hierarchy.clear()
        foss_hierarchy = get_toolchain_hierarchy({'name': 'foss', 'version': '2018a'}, incl_capabilities=True)
        iimpi_hierarchy = get_toolchain_hierarchy({'name': 'iimpi', 'version': '2016.01'},
                                                  incl_capabilities=True)
        # Hierarchies are returned with top-level toolchain last, foss has 4 elements here, intel has 2
        self.assertEqual(foss_hierarchy[0]['name'], 'GCC')
        self.assertEqual(foss_hierarchy[1]['name'], 'golf')
        self.assertEqual(foss_hierarchy[2]['name'], 'gompi')
        self.assertEqual(foss_hierarchy[3]['name'], 'foss')
        self.assertEqual(iimpi_hierarchy[0]['name'], 'GCCcore')
        self.assertEqual(iimpi_hierarchy[1]['name'], 'iccifort')
        self.assertEqual(iimpi_hierarchy[2]['name'], 'iimpi')

        # base compiler first (GCCcore which maps to GCC/6.4.0-2.28)
        self.assertEqual(match_minimum_tc_specs(iimpi_hierarchy[0], foss_hierarchy),
                         {'name': 'GCC', 'version': '6.4.0-2.28'})
        # then iccifort (which also maps to GCC/6.4.0-2.28)
        self.assertEqual(match_minimum_tc_specs(iimpi_hierarchy[1], foss_hierarchy),
                         {'name': 'GCC', 'version': '6.4.0-2.28'})
        # Then MPI
        self.assertEqual(match_minimum_tc_specs(iimpi_hierarchy[2], foss_hierarchy),
                         {'name': 'gompi', 'version': '2018a'})
        # Check against own math only subtoolchain for math
        self.assertEqual(match_minimum_tc_specs(foss_hierarchy[1], foss_hierarchy),
                         {'name': 'golf', 'version': '2018a'})
        # Make sure there's an error when we can't do the mapping
        error_msg = "No possible mapping from source toolchain spec .*"
        self.assertErrorRegex(EasyBuildError, error_msg, match_minimum_tc_specs,
                              foss_hierarchy[3], iimpi_hierarchy)
Example #41
0
def suite():
    """Return all easyblock --module-only tests."""
    def make_inner_test(easyblock, **kwargs):
        def innertest(self):
            template_module_only_test(self, easyblock, **kwargs)

        return innertest

    # initialize configuration (required for e.g. default modules_tool setting)
    cleanup()
    eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR])
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'external_modules_metadata': {},
        # enable --force --module-only
        'force': True,
        'module_only': True,
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()

    # dynamically generate a separate test for each of the available easyblocks
    easyblocks_path = get_paths_for("easyblocks")[0]
    all_pys = glob.glob('%s/*/*.py' % easyblocks_path)
    easyblocks = [
        eb for eb in all_pys
        if os.path.basename(eb) != '__init__.py' and '/test/' not in eb
    ]

    # filter out no longer supported easyblocks, or easyblocks that are tested in a different way
    excluded_easyblocks = ['versionindependendpythonpackage.py']
    easyblocks = [
        e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks
    ]

    # add dummy PrgEnv-* modules, required for testing CrayToolchain easyblock
    for prgenv in ['PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi']:
        write_file(os.path.join(TMPDIR, 'modules', 'all', prgenv, '1.2.3'),
                   "#%Module")

    # add foo/1.3.2.1.1 module, required for testing ModuleAlias easyblock
    write_file(os.path.join(TMPDIR, 'modules', 'all', 'foo', '1.2.3.4.5'),
               "#%Module")

    for easyblock in easyblocks:
        eb_fn = os.path.basename(easyblock)
        # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest
        if eb_fn == 'systemcompiler.py':
            # use GCC as name when testing SystemCompiler easyblock
            innertest = make_inner_test(easyblock,
                                        name='GCC',
                                        version='system')
        elif eb_fn == 'systemmpi.py':
            # use OpenMPI as name when testing SystemMPI easyblock
            innertest = make_inner_test(easyblock,
                                        name='OpenMPI',
                                        version='system')
        elif eb_fn == 'craytoolchain.py':
            # make sure that a (known) PrgEnv is included as a dependency
            extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]'
            innertest = make_inner_test(easyblock,
                                        name='CrayCC',
                                        extra_txt=extra_txt)
        elif eb_fn == 'modulerc.py':
            # exactly one dependency is included with ModuleRC generic easyblock (and name must match)
            extra_txt = 'dependencies = [("foo", "1.2.3.4.5")]'
            innertest = make_inner_test(easyblock,
                                        name='foo',
                                        version='1.2.3.4',
                                        extra_txt=extra_txt)
        elif eb_fn == 'intel_compilers.py':
            # custom easyblock for intel-compilers (oneAPI) requires v2021.x or newer
            innertest = make_inner_test(easyblock,
                                        name='intel-compilers',
                                        version='2021.1')
        elif eb_fn == 'openssl_wrapper.py':
            # easyblock to create OpenSSL wrapper expects an OpenSSL version
            innertest = make_inner_test(easyblock,
                                        name='OpenSSL-wrapper',
                                        version='1.1')
        elif eb_fn == 'ucx_plugins.py':
            # install fake ucx_info command (used in make_module_extra)
            tmpdir = tempfile.mkdtemp()
            install_fake_command('ucx_info', FAKE_UCX_INFO, tmpdir)
            innertest = make_inner_test(easyblock,
                                        name='UCX-CUDA',
                                        tmpdir=tmpdir)
        else:
            # Make up some unique name
            innertest = make_inner_test(easyblock,
                                        name=eb_fn.replace('.', '-') + '-sw')

        innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock
        innertest.__name__ = "test_easyblock_%s" % '_'.join(
            easyblock.replace('.py', '').split('/'))
        setattr(ModuleOnlyTest, innertest.__name__, innertest)

    return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
class EasyConfigTest(TestCase):
    """Baseclass for easyconfig testcases."""

    # initialize configuration (required for e.g. default modules_tool setting)
    eb_go = eboptions.parse_options()
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'check_osdeps': False,
        'external_modules_metadata': {},
        'force': True,
        'optarch': 'test',
        'robot_path': get_paths_for("easyconfigs")[0],
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()
    del eb_go

    # put dummy 'craype-test' module in place, which is required for parsing easyconfigs using Cray* toolchains
    TMPDIR = tempfile.mkdtemp()
    os.environ['MODULEPATH'] = TMPDIR
    write_file(os.path.join(TMPDIR, 'craype-test'), '#%Module\n')

    log = fancylogger.getLogger("EasyConfigTest", fname=False)

    # make sure a logger is present for main
    main._log = log
    ordered_specs = None
    parsed_easyconfigs = []

    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not self.parsed_easyconfigs:
            for spec in specs:
                self.parsed_easyconfigs.extend(process_easyconfig(spec))

        # filter out external modules
        for ec in self.parsed_easyconfigs:
            for dep in ec['dependencies'][:]:
                if dep.get('external_module', False):
                    ec['dependencies'].remove(dep)

        self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs,
                                                  retain_all_deps=True)

    def test_dep_graph(self):
        """Unit test that builds a full dependency graph."""
        # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6
        if LooseVersion(
                sys.version) >= LooseVersion('2.6') and single_tests_ok:
            # temporary file for dep graph
            (hn, fn) = tempfile.mkstemp(suffix='.dot')
            os.close(hn)

            if self.ordered_specs is None:
                self.process_all_easyconfigs()

            dep_graph(fn, self.ordered_specs)

            try:
                os.remove(fn)
            except OSError, err:
                log.error("Failed to remove %s: %s" % (fn, err))
        else:
Example #43
0
class EasyConfigTest(TestCase):
    """Baseclass for easyconfig testcases."""

    # initialize configuration (required for e.g. default modules_tool setting)
    eb_go = eboptions.parse_options()
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'check_osdeps': False,
        'external_modules_metadata': {},
        'force': True,
        'optarch': 'test',
        'robot_path': get_paths_for("easyconfigs")[0],
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()
    del eb_go

    # put dummy 'craype-test' module in place, which is required for parsing easyconfigs using Cray* toolchains
    TMPDIR = tempfile.mkdtemp()
    os.environ['MODULEPATH'] = TMPDIR
    write_file(os.path.join(TMPDIR, 'craype-test'), '#%Module\n')

    log = fancylogger.getLogger("EasyConfigTest", fname=False)

    # make sure a logger is present for main
    eb_main._log = log
    ordered_specs = None
    parsed_easyconfigs = []

    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not self.parsed_easyconfigs:
            for spec in specs:
                self.parsed_easyconfigs.extend(process_easyconfig(spec))

        # filter out external modules
        for ec in self.parsed_easyconfigs:
            for dep in ec['dependencies'][:]:
                if dep.get('external_module', False):
                    ec['dependencies'].remove(dep)

        self.ordered_specs = resolve_dependencies(self.parsed_easyconfigs,
                                                  modules_tool(),
                                                  retain_all_deps=True)

    def test_dep_graph(self):
        """Unit test that builds a full dependency graph."""
        # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6
        if LooseVersion(
                sys.version) >= LooseVersion('2.6') and single_tests_ok:
            # temporary file for dep graph
            (hn, fn) = tempfile.mkstemp(suffix='.dot')
            os.close(hn)

            if self.ordered_specs is None:
                self.process_all_easyconfigs()

            dep_graph(fn, self.ordered_specs)

            remove_file(fn)
        else:
            print "(skipped dep graph test)"

    def test_conflicts(self):
        """Check whether any conflicts occur in software dependency graphs."""

        if not single_tests_ok:
            print "(skipped conflicts test)"
            return

        if self.ordered_specs is None:
            self.process_all_easyconfigs()

        self.assertFalse(
            check_conflicts(self.ordered_specs,
                            modules_tool(),
                            check_inter_ec_conflicts=False),
            "No conflicts detected")

    def test_dep_versions_per_toolchain_generation(self):
        """
        Check whether there's only one dependency version per toolchain generation actively used.
        This is enforced to try and limit the chance of running into conflicts when multiple modules built with
        the same toolchain are loaded together.
        """
        if self.ordered_specs is None:
            self.process_all_easyconfigs()

        def get_deps_for(ec):
            """Get list of (direct) dependencies for specified easyconfig."""
            deps = []
            for dep in ec['ec']['dependencies']:
                dep_mod_name = dep['full_mod_name']
                deps.append((dep['name'], dep['version'], dep['versionsuffix'],
                             dep_mod_name))
                res = [
                    x for x in self.ordered_specs
                    if x['full_mod_name'] == dep_mod_name
                ]
                if len(res) == 1:
                    deps.extend(get_deps_for(res[0]))
                else:
                    raise EasyBuildError(
                        "Failed to find %s in ordered list of easyconfigs",
                        dep_mod_name)

            return deps

        def check_dep_vars(dep, dep_vars):
            """Check whether available variants of a particular dependency are acceptable or not."""

            # 'guilty' until proven 'innocent'
            res = False

            # filter out binutils with empty versionsuffix which is used to build toolchain compiler
            if dep == 'binutils' and len(dep_vars) > 1:
                empty_vsuff_vars = [
                    v for v in dep_vars.keys() if v.endswith('versionsuffix: ')
                ]
                if len(empty_vsuff_vars) == 1:
                    dep_vars = dict((k, v) for (k, v) in dep_vars.items()
                                    if k != empty_vsuff_vars[0])

            # multiple variants of HTSlib is OK as long as they are deps for a matching version of BCFtools
            elif dep == 'HTSlib' and len(dep_vars) > 1:
                for key, ecs in dep_vars.items():
                    # filter out HTSlib variants that are only used as dependency for BCFtools with same version
                    htslib_ver = re.search('^version: (?P<ver>[^;]+);',
                                           key).group('ver')
                    if all(
                            ec.startswith('BCFtools-%s-' % htslib_ver)
                            for ec in ecs):
                        dep_vars.pop(key)

            # filter out FFTW and imkl with -serial versionsuffix which are used in non-MPI subtoolchains
            elif dep in ['FFTW', 'imkl']:
                serial_vsuff_vars = [
                    v for v in dep_vars.keys()
                    if v.endswith('versionsuffix: -serial')
                ]
                if len(serial_vsuff_vars) == 1:
                    dep_vars = dict((k, v) for (k, v) in dep_vars.items()
                                    if k != serial_vsuff_vars[0])

            # for some dependencies, we allow exceptions for software that depends on a particular version,
            # as long as that's indicated by the versionsuffix
            elif dep in ['Boost', 'R', 'PLUMED'] and len(dep_vars) > 1:
                for key in dep_vars.keys():
                    dep_ver = re.search('^version: (?P<ver>[^;]+);',
                                        key).group('ver')
                    # filter out dep version if all easyconfig filenames using it include specific dep version
                    if all(
                            re.search('-%s-%s' % (dep, dep_ver), v)
                            for v in dep_vars[key]):
                        dep_vars.pop(key)
                    # always retain at least one dep variant
                    if len(dep_vars) == 1:
                        break

                # filter R dep for a specific version of Python 2.x
                if dep == 'R' and len(dep_vars) > 1:
                    for key in dep_vars.keys():
                        if '; versionsuffix: -Python-2' in key:
                            dep_vars.pop(key)
                        # always retain at least one variant
                        if len(dep_vars) == 1:
                            break

            # filter out Java 'wrapper'
            # i.e. if the version of one is a prefix of the version of the other one (e.g. 1.8 & 1.8.0_181)
            elif dep == 'Java' and len(dep_vars) == 2:
                key1, key2 = sorted(dep_vars.keys())
                ver1, ver2 = [k.split(';')[0] for k in [key1, key2]]
                if ver1.startswith(ver2):
                    dep_vars.pop(key2)
                elif ver2.startswith(ver1):
                    dep_vars.pop(key1)

            # filter out variants that are specific to a particular version of CUDA
            cuda_dep_vars = [v for v in dep_vars.keys() if '-CUDA' in v]
            if len(dep_vars) > len(cuda_dep_vars):
                for key in dep_vars.keys():
                    if re.search('; versionsuffix: .*-CUDA-[0-9.]+', key):
                        dep_vars.pop(key)

            # some software packages require an old version of a particular dependency
            old_dep_versions = {
                # libxc (CP2K & ABINIT require libxc 2.x or 3.x)
                'libxc': r'[23]\.',
                # OPERA requires SAMtools 0.x
                'SAMtools': r'0\.',
                # Kraken 1.0 requires Jellyfish 1.x
                'Jellyfish': r'1\.',
            }
            if dep in old_dep_versions and len(dep_vars) > 1:
                for key in dep_vars.keys():
                    # filter out known old dependency versions
                    if re.search('^version: %s' % old_dep_versions[dep], key):
                        dep_vars.pop(key)

            # only single variant is always OK
            if len(dep_vars) == 1:
                res = True

            elif len(dep_vars) == 2 and dep in ['Python', 'Tkinter']:
                # for Python & Tkinter, it's OK to have on 2.x and one 3.x version
                v2_dep_vars = [
                    x for x in dep_vars.keys() if x.startswith('version: 2.')
                ]
                v3_dep_vars = [
                    x for x in dep_vars.keys() if x.startswith('version: 3.')
                ]
                if len(v2_dep_vars) == 1 and len(v3_dep_vars) == 1:
                    res = True

            # two variants is OK if one is for Python 2.x and the other is for Python 3.x (based on versionsuffix)
            elif len(dep_vars) == 2:
                py2_dep_vars = [
                    x for x in dep_vars.keys()
                    if '; versionsuffix: -Python-2.' in x
                ]
                py3_dep_vars = [
                    x for x in dep_vars.keys()
                    if '; versionsuffix: -Python-3.' in x
                ]
                if len(py2_dep_vars) == 1 and len(py3_dep_vars) == 1:
                    res = True

            return res

        # some software also follows <year>{a,b} versioning scheme,
        # which throws off the pattern matching done below for toolchain versions
        false_positives_regex = re.compile('^MATLAB-Engine-20[0-9][0-9][ab]')

        # restrict to checking dependencies of easyconfigs using common toolchains (start with 2018a)
        # and GCCcore subtoolchain for common toolchains, starting with GCCcore 7.x
        for pattern in [
                '201[89][ab]', '20[2-9][0-9][ab]', 'GCCcore-[7-9]\.[0-9]'
        ]:
            all_deps = {}
            regex = re.compile('^.*-(?P<tc_gen>%s).*\.eb$' % pattern)

            # collect variants for all dependencies of easyconfigs that use a toolchain that matches
            for ec in self.ordered_specs:
                ec_file = os.path.basename(ec['spec'])

                # take into account software which also follows a <year>{a,b} versioning scheme
                ec_file = false_positives_regex.sub('', ec_file)

                res = regex.match(ec_file)
                if res:
                    tc_gen = res.group('tc_gen')
                    all_deps_tc_gen = all_deps.setdefault(tc_gen, {})
                    for dep_name, dep_ver, dep_versuff, dep_mod_name in get_deps_for(
                            ec):
                        dep_variants = all_deps_tc_gen.setdefault(dep_name, {})
                        # a variant is defined by version + versionsuffix
                        variant = "version: %s; versionsuffix: %s" % (
                            dep_ver, dep_versuff)
                        # keep track of which easyconfig this is a dependency
                        dep_variants.setdefault(variant, set()).add(ec_file)

            # check which dependencies have more than 1 variant
            multi_dep_vars, multi_dep_vars_msg = [], ''
            for tc_gen in sorted(all_deps.keys()):
                for dep in sorted(all_deps[tc_gen].keys()):
                    dep_vars = all_deps[tc_gen][dep]
                    if not check_dep_vars(dep, dep_vars):
                        multi_dep_vars.append(dep)
                        multi_dep_vars_msg += "\nfound %s variants of '%s' dependency " % (
                            len(dep_vars), dep)
                        multi_dep_vars_msg += "in easyconfigs using '%s' toolchain generation\n* " % tc_gen
                        multi_dep_vars_msg += '\n* '.join(
                            "%s as dep for %s" % v
                            for v in sorted(dep_vars.items()))
                        multi_dep_vars_msg += '\n'

            error_msg = "No multi-variant deps found for '%s' easyconfigs:\n%s" % (
                regex.pattern, multi_dep_vars_msg)
            self.assertFalse(multi_dep_vars, error_msg)

    def test_sanity_check_paths(self):
        """Make sure specified sanity check paths adher to the requirements."""

        if self.ordered_specs is None:
            self.process_all_easyconfigs()

        for ec in self.parsed_easyconfigs:
            ec_scp = ec['ec']['sanity_check_paths']
            if ec_scp != {}:
                # if sanity_check_paths is specified (i.e., non-default), it must adher to the requirements
                # both 'files' and 'dirs' keys, both with list values and with at least one a non-empty list
                error_msg = "sanity_check_paths for %s does not meet requirements: %s" % (
                    ec['spec'], ec_scp)
                self.assertEqual(sorted(ec_scp.keys()), ['dirs', 'files'],
                                 error_msg)
                self.assertTrue(isinstance(ec_scp['dirs'], list), error_msg)
                self.assertTrue(isinstance(ec_scp['files'], list), error_msg)
                self.assertTrue(ec_scp['dirs'] or ec_scp['files'], error_msg)

    def test_easyconfig_locations(self):
        """Make sure all easyconfigs files are in the right location."""
        easyconfig_dirs_regex = re.compile(
            r'/easybuild/easyconfigs/[0a-z]/[^/]+$')
        topdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
        for (dirpath, _, filenames) in os.walk(topdir):
            # ignore git/svn dirs & archived easyconfigs
            if '/.git/' in dirpath or '/.svn/' in dirpath or '__archive__' in dirpath:
                continue
            # check whether list of .eb files is non-empty
            easyconfig_files = [fn for fn in filenames if fn.endswith('eb')]
            if easyconfig_files:
                # check whether path matches required pattern
                if not easyconfig_dirs_regex.search(dirpath):
                    # only exception: TEMPLATE.eb
                    if not (dirpath.endswith('/easybuild/easyconfigs')
                            and filenames == ['TEMPLATE.eb']):
                        self.assertTrue(
                            False,
                            "List of easyconfig files in %s is empty: %s" %
                            (dirpath, filenames))

    def check_sha256_checksums(self, changed_ecs):
        """Make sure changed easyconfigs have SHA256 checksums in place."""

        # list of software for which checksums can not be required,
        # e.g. because 'source' files need to be constructed manually
        whitelist = ['Kent_tools-*', 'MATLAB-*', 'OCaml-*']

        # the check_sha256_checksums function (again) creates an EasyBlock instance
        # for easyconfigs using the Bundle easyblock, this is a problem because the 'sources' easyconfig parameter
        # is updated in place (sources for components are added the 'parent' sources) in Bundle's __init__;
        # therefore, we need to reset 'sources' to an empty list here if Bundle is used...
        for ec in changed_ecs:
            if ec['easyblock'] == 'Bundle':
                ec['sources'] = []

        # filter out deprecated easyconfigs
        retained_changed_ecs = []
        for ec in changed_ecs:
            if not ec['deprecated']:
                retained_changed_ecs.append(ec)

        checksum_issues = check_sha256_checksums(retained_changed_ecs,
                                                 whitelist=whitelist)
        self.assertTrue(
            len(checksum_issues) == 0,
            "No checksum issues:\n%s" % '\n'.join(checksum_issues))

    def check_python_packages(self, changed_ecs):
        """Several checks for easyconfigs that install (bundles of) Python packages."""

        # MATLAB-Engine, PyTorch do not support installation with 'pip'
        whitelist_pip = ['MATLAB-Engine-*', 'PyTorch-*']

        failing_checks = []

        for ec in changed_ecs:

            ec_fn = os.path.basename(ec.path)
            easyblock = ec.get('easyblock')
            exts_defaultclass = ec.get('exts_defaultclass')

            download_dep_fail = ec.get('download_dep_fail')
            exts_download_dep_fail = ec.get('exts_download_dep_fail')
            use_pip = ec.get('use_pip')

            # download_dep_fail should be set when using PythonPackage
            if easyblock == 'PythonPackage':
                if not download_dep_fail:
                    failing_checks.append("'download_dep_fail' set in %s" %
                                          ec_fn)

            # use_pip should be set when using PythonPackage or PythonBundle (except for whitelisted easyconfigs)
            if easyblock in ['PythonBundle', 'PythonPackage']:
                if not use_pip and not any(
                        re.match(regex, ec_fn) for regex in whitelist_pip):
                    failing_checks.append("'use_pip' set in %s" % ec_fn)

            # download_dep_fail is enabled automatically in PythonBundle easyblock, so shouldn't be set
            if easyblock == 'PythonBundle':
                if download_dep_fail or exts_download_dep_fail:
                    fail = "'*download_dep_fail' set in %s (shouldn't, since PythonBundle easyblock is used)" % ec_fn
                    failing_checks.append(fail)

            elif exts_defaultclass == 'PythonPackage':
                # bundle of Python packages should use PythonBundle
                if easyblock == 'Bundle':
                    fail = "'PythonBundle' easyblock is used for bundle of Python packages in %s" % ec_fn
                    failing_checks.append(fail)
                else:
                    # both download_dep_fail and use_pip should be set via exts_default_options
                    # when installing Python packages as extensions
                    exts_default_options = ec.get('exts_default_options', {})
                    for key in ['download_dep_fail', 'use_pip']:
                        if not exts_default_options.get(key):
                            failing_checks.append(
                                "'%s' set in exts_default_options in %s" %
                                (key, ec_fn))

            # if Python is a dependency, that should be reflected in the versionsuffix
            if any(dep['name'] == 'Python' for dep in ec['dependencies']):
                if not re.search(r'-Python-[23]\.[0-9]+\.[0-9]+',
                                 ec['versionsuffix']):
                    failing_checks.append(
                        "'-Python-%%(pyver)s' included in versionsuffix in %s"
                        % ec_fn)

        self.assertFalse(failing_checks, '\n'.join(failing_checks))

    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""

        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        if re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST',
                                               '(none)')):

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            travis_branch = os.environ.get('TRAVIS_BRANCH', None)
            if travis_branch and travis_branch != 'master':

                if not self.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(
                    os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                cmd = "git diff --name-only --diff-filter=AM %s...HEAD" % travis_branch
                out, ec = run_cmd(cmd, simple=False)
                changed_ecs_filenames = [
                    os.path.basename(f) for f in out.strip().split('\n')
                    if f.endswith('.eb')
                ]
                print("\nList of changed easyconfig files in this PR: %s" %
                      '\n'.join(changed_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames:
                    match = None
                    for ec in self.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' %
                                          (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs)

    def test_zzz_cleanup(self):
        """Dummy test to clean up global temporary directory."""
        shutil.rmtree(self.TMPDIR)
Example #44
0
def suite():
    """Return all easyblock --module-only tests."""
    # initialize configuration (required for e.g. default modules_tool setting)
    cleanup()
    eb_go = eboptions.parse_options(args=['--prefix=%s' % TMPDIR])
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'external_modules_metadata': {},
        # enable --force --module-only
        'force': True,
        'module_only': True,
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()

    # dynamically generate a separate test for each of the available easyblocks
    easyblocks_path = get_paths_for("easyblocks")[0]
    all_pys = glob.glob('%s/*/*.py' % easyblocks_path)
    easyblocks = [
        eb for eb in all_pys
        if os.path.basename(eb) != '__init__.py' and '/test/' not in eb
    ]

    # filter out no longer supported easyblocks, or easyblocks that are tested in a different way
    excluded_easyblocks = ['versionindependendpythonpackage.py']
    easyblocks = [
        e for e in easyblocks if os.path.basename(e) not in excluded_easyblocks
    ]

    # add dummy PrgEnv-* modules, required for testing CrayToolchain easyblock
    for prgenv in ['PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi']:
        write_file(os.path.join(TMPDIR, 'modules', 'all', prgenv, '1.2.3'),
                   "#%Module")

    # add foo/1.3.2.1.1 module, required for testing ModuleAlias easyblock
    write_file(os.path.join(TMPDIR, 'modules', 'all', 'foo', '1.2.3.4.5'),
               "#%Module")

    for easyblock in easyblocks:
        # dynamically define new inner functions that can be added as class methods to ModuleOnlyTest
        if os.path.basename(easyblock) == 'systemcompiler.py':
            # use GCC as name when testing SystemCompiler easyblock
            code = "def innertest(self): "
            code += "template_module_only_test(self, '%s', name='GCC', version='system')" % easyblock
        elif os.path.basename(easyblock) == 'systemmpi.py':
            # use OpenMPI as name when testing SystemMPI easyblock
            code = "def innertest(self): "
            code += "template_module_only_test(self, '%s', name='OpenMPI', version='system')" % easyblock
        elif os.path.basename(easyblock) == 'craytoolchain.py':
            # make sure that a (known) PrgEnv is included as a dependency
            extra_txt = 'dependencies = [("PrgEnv-gnu/1.2.3", EXTERNAL_MODULE)]'
            code = "def innertest(self): "
            code += "template_module_only_test(self, '%s', extra_txt='%s')" % (
                easyblock, extra_txt)
        elif os.path.basename(easyblock) == 'modulerc.py':
            # exactly one dependency is included with ModuleRC generic easyblock (and name must match)
            extra_txt = 'dependencies = [("foo", "1.2.3.4.5")]'
            code = "def innertest(self): "
            code += "template_module_only_test(self, '%s', version='1.2.3.4', extra_txt='%s')" % (
                easyblock, extra_txt)
        else:
            code = "def innertest(self): template_module_only_test(self, '%s')" % easyblock

        exec(code, globals())

        innertest.__doc__ = "Test for using --module-only with easyblock %s" % easyblock
        innertest.__name__ = "test_easyblock_%s" % '_'.join(
            easyblock.replace('.py', '').split('/'))
        setattr(ModuleOnlyTest, innertest.__name__, innertest)

    return TestLoader().loadTestsFromTestCase(ModuleOnlyTest)
class EasyConfigTest(TestCase):
    """Baseclass for easyconfig testcases."""

    # initialize configuration (required for e.g. default modules_tool setting)
    eb_go = eboptions.parse_options()
    config.init(eb_go.options, eb_go.get_options_by_section('config'))
    build_options = {
        'check_osdeps': False,
        'external_modules_metadata': {},
        'force': True,
        'local_var_naming_check': 'error',
        'optarch': 'test',
        'robot_path': get_paths_for("easyconfigs")[0],
        'silent': True,
        'suffix_modules_path': GENERAL_CLASS,
        'valid_module_classes': config.module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
    }
    config.init_build_options(build_options=build_options)
    set_tmpdir()
    del eb_go

    # put dummy 'craype-test' module in place, which is required for parsing easyconfigs using Cray* toolchains
    TMPDIR = tempfile.mkdtemp()
    os.environ['MODULEPATH'] = TMPDIR
    write_file(os.path.join(TMPDIR, 'craype-test'), '#%Module\n')

    log = fancylogger.getLogger("EasyConfigTest", fname=False)

    # make sure a logger is present for main
    eb_main._log = log
    ordered_specs = None
    parsed_easyconfigs = []

    def process_all_easyconfigs(self):
        """Process all easyconfigs and resolve inter-easyconfig dependencies."""
        # all available easyconfig files
        easyconfigs_path = get_paths_for("easyconfigs")[0]
        specs = glob.glob('%s/*/*/*.eb' % easyconfigs_path)

        # parse all easyconfigs if they haven't been already
        if not EasyConfigTest.parsed_easyconfigs:
            for spec in specs:
                EasyConfigTest.parsed_easyconfigs.extend(process_easyconfig(spec))

        # filter out external modules
        for ec in EasyConfigTest.parsed_easyconfigs:
            for dep in ec['dependencies'][:]:
                if dep.get('external_module', False):
                    ec['dependencies'].remove(dep)

        EasyConfigTest.ordered_specs = resolve_dependencies(EasyConfigTest.parsed_easyconfigs, modules_tool(), retain_all_deps=True)

    def test_dep_graph(self):
        """Unit test that builds a full dependency graph."""
        # pygraph dependencies required for constructing dependency graph are not available prior to Python 2.6
        if LooseVersion(sys.version) >= LooseVersion('2.6') and single_tests_ok:
            # temporary file for dep graph
            (hn, fn) = tempfile.mkstemp(suffix='.dot')
            os.close(hn)

            if EasyConfigTest.ordered_specs is None:
                self.process_all_easyconfigs()

            dep_graph(fn, EasyConfigTest.ordered_specs)

            remove_file(fn)
        else:
            print("(skipped dep graph test)")

    def test_conflicts(self):
        """Check whether any conflicts occur in software dependency graphs."""

        if not single_tests_ok:
            print("(skipped conflicts test)")
            return

        if EasyConfigTest.ordered_specs is None:
            self.process_all_easyconfigs()

        self.assertFalse(check_conflicts(EasyConfigTest.ordered_specs, modules_tool(), check_inter_ec_conflicts=False),
                         "No conflicts detected")

    def check_dep_vars(self, dep, dep_vars):
        """Check whether available variants of a particular dependency are acceptable or not."""

        # 'guilty' until proven 'innocent'
        res = False

        # filter out wrapped Java versions
        # i.e. if the version of one is a prefix of the version of the other one (e.g. 1.8 & 1.8.0_181)
        if dep == 'Java':
            dep_vars_to_check = sorted(dep_vars.keys())

            retained_dep_vars = []

            while dep_vars_to_check:
                dep_var = dep_vars_to_check.pop()
                dep_var_version = dep_var.split(';')[0]

                # remove dep vars wrapped by current dep var
                dep_vars_to_check = [x for x in dep_vars_to_check if not x.startswith(dep_var_version + '.')]

                retained_dep_vars = [x for x in retained_dep_vars if not x.startswith(dep_var_version + '.')]

                retained_dep_vars.append(dep_var)

            for key in list(dep_vars.keys()):
                if key not in retained_dep_vars:
                    del dep_vars[key]

        # filter out binutils with empty versionsuffix which is used to build toolchain compiler
        if dep == 'binutils' and len(dep_vars) > 1:
            empty_vsuff_vars = [v for v in dep_vars.keys() if v.endswith('versionsuffix: ')]
            if len(empty_vsuff_vars) == 1:
                dep_vars = dict((k, v) for (k, v) in dep_vars.items() if k != empty_vsuff_vars[0])

        # multiple variants of HTSlib is OK as long as they are deps for a matching version of BCFtools;
        # same goes for WRF and WPS
        for dep_name, parent_name in [('HTSlib', 'BCFtools'), ('WRF', 'WPS')]:
            if dep == dep_name and len(dep_vars) > 1:
                for key in list(dep_vars):
                    ecs = dep_vars[key]
                    # filter out dep variants that are only used as dependency for parent with same version
                    dep_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver')
                    if all(ec.startswith('%s-%s-' % (parent_name, dep_ver)) for ec in ecs) and len(dep_vars) > 1:
                        dep_vars.pop(key)

        # multiple versions of Boost is OK as long as they are deps for a matching Boost.Python
        if dep == 'Boost' and len(dep_vars) > 1:
            for key in list(dep_vars):
                ecs = dep_vars[key]
                # filter out Boost variants that are only used as dependency for Boost.Python with same version
                boost_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver')
                if all(ec.startswith('Boost.Python-%s-' % boost_ver) for ec in ecs):
                    dep_vars.pop(key)

        # filter out FFTW and imkl with -serial versionsuffix which are used in non-MPI subtoolchains
        if dep in ['FFTW', 'imkl']:
            serial_vsuff_vars = [v for v in dep_vars.keys() if v.endswith('versionsuffix: -serial')]
            if len(serial_vsuff_vars) == 1:
                dep_vars = dict((k, v) for (k, v) in dep_vars.items() if k != serial_vsuff_vars[0])

        # for some dependencies, we allow exceptions for software that depends on a particular version,
        # as long as that's indicated by the versionsuffix
        if dep in ['ASE', 'Boost', 'Java', 'Lua', 'PLUMED', 'R', 'TensorFlow'] and len(dep_vars) > 1:
            for key in list(dep_vars):
                dep_ver = re.search('^version: (?P<ver>[^;]+);', key).group('ver')
                # use version of Java wrapper rather than full Java version
                if dep == 'Java':
                    dep_ver = '.'.join(dep_ver.split('.')[:2])
                # filter out dep version if all easyconfig filenames using it include specific dep version
                if all(re.search('-%s-%s' % (dep, dep_ver), v) for v in dep_vars[key]):
                    dep_vars.pop(key)
                # always retain at least one dep variant
                if len(dep_vars) == 1:
                    break

            # filter R dep for a specific version of Python 2.x
            if dep == 'R' and len(dep_vars) > 1:
                for key in list(dep_vars):
                    if '; versionsuffix: -Python-2' in key:
                        dep_vars.pop(key)
                    # always retain at least one variant
                    if len(dep_vars) == 1:
                        break

        # filter out variants that are specific to a particular version of CUDA
        cuda_dep_vars = [v for v in dep_vars.keys() if '-CUDA' in v]
        if len(dep_vars) > len(cuda_dep_vars):
            for key in list(dep_vars):
                if re.search('; versionsuffix: .*-CUDA-[0-9.]+', key):
                    dep_vars.pop(key)

        # some software packages require an old version of a particular dependency
        old_dep_versions = {
            # libxc 2.x or 3.x is required by ABINIT, AtomPAW, CP2K, GPAW, horton, PySCF, WIEN2k
            # (Qiskit depends on PySCF)
            'libxc': (r'[23]\.', ['ABINIT-', 'AtomPAW-', 'CP2K-', 'GPAW-', 'horton-', 'PySCF-', 'Qiskit-', 'WIEN2k-']),
            # OPERA requires SAMtools 0.x
            'SAMtools': (r'0\.', ['ChimPipe-0.9.5', 'Cufflinks-2.2.1', 'OPERA-2.0.6']),
            # Kraken 1.x requires Jellyfish 1.x (Roary & metaWRAP depend on Kraken 1.x)
            'Jellyfish': (r'1\.', ['Kraken-1.', 'Roary-3.12.0', 'metaWRAP-1.2']),
            # EMAN2 2.3 requires Boost(.Python) 1.64.0
            'Boost': ('1.64.0;', ['Boost.Python-1.64.0-', 'EMAN2-2.3-']),
            'Boost.Python': ('1.64.0;', ['EMAN2-2.3-']),
        }
        if dep in old_dep_versions and len(dep_vars) > 1:
            for key in list(dep_vars):
                version_pattern, parents = old_dep_versions[dep]
                # filter out known old dependency versions
                if re.search('^version: %s' % version_pattern, key):
                    # only filter if the easyconfig using this dep variants is known
                    if all(any(x.startswith(p) for p in parents) for x in dep_vars[key]):
                        dep_vars.pop(key)

        # only single variant is always OK
        if len(dep_vars) == 1:
            res = True

        elif len(dep_vars) == 2 and dep in ['Python', 'Tkinter']:
            # for Python & Tkinter, it's OK to have on 2.x and one 3.x version
            v2_dep_vars = [x for x in dep_vars.keys() if x.startswith('version: 2.')]
            v3_dep_vars = [x for x in dep_vars.keys() if x.startswith('version: 3.')]
            if len(v2_dep_vars) == 1 and len(v3_dep_vars) == 1:
                res = True

        # two variants is OK if one is for Python 2.x and the other is for Python 3.x (based on versionsuffix)
        elif len(dep_vars) == 2:
            py2_dep_vars = [x for x in dep_vars.keys() if '; versionsuffix: -Python-2.' in x]
            py3_dep_vars = [x for x in dep_vars.keys() if '; versionsuffix: -Python-3.' in x]
            if len(py2_dep_vars) == 1 and len(py3_dep_vars) == 1:
                res = True

        return res

    def test_check_dep_vars(self):
        """Test check_dep_vars utility method."""

        # one single dep version: OK
        self.assertTrue(self.check_dep_vars('testdep', {
            'version: 1.2.3; versionsuffix:': ['foo-1.2.3.eb', 'bar-4.5.6.eb'],
        }))
        self.assertTrue(self.check_dep_vars('testdep', {
            'version: 1.2.3; versionsuffix: -test': ['foo-1.2.3.eb', 'bar-4.5.6.eb'],
        }))

        # two or more dep versions (no special case: not OK)
        self.assertFalse(self.check_dep_vars('testdep', {
            'version: 1.2.3; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 4.5.6; versionsuffix:': ['bar-4.5.6.eb'],
        }))
        self.assertFalse(self.check_dep_vars('testdep', {
            'version: 0.0; versionsuffix:': ['foobar-0.0.eb'],
            'version: 1.2.3; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 4.5.6; versionsuffix:': ['bar-4.5.6.eb'],
        }))

        # Java is a special case, with wrapped Java versions
        self.assertTrue(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
        }))
        # two Java wrappers is not OK
        self.assertFalse(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11.0.2; versionsuffix:': ['bar-4.5.6.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6.eb'],
        }))
        # OK to have two or more wrappers if versionsuffix is used to indicate exception
        self.assertTrue(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
        }))
        # versionsuffix must be there for all easyconfigs to indicate exception
        self.assertFalse(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb', 'bar-4.5.6.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb', 'bar-4.5.6.eb'],
        }))
        self.assertTrue(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
            'version: 12.1.6; versionsuffix:': ['foobar-0.0-Java-12.eb'],
            'version: 12; versionsuffix:': ['foobar-0.0-Java-12.eb'],
        }))

        # strange situation: odd number of Java versions
        # not OK: two Java wrappers (and no versionsuffix to indicate exception)
        self.assertFalse(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6.eb'],
        }))
        # OK because of -Java-11 versionsuffix
        self.assertTrue(self.check_dep_vars('Java', {
            'version: 1.8.0_221; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
        }))
        # not OK: two Java wrappers (and no versionsuffix to indicate exception)
        self.assertFalse(self.check_dep_vars('Java', {
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11.0.2; versionsuffix:': ['bar-4.5.6.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6.eb'],
        }))
        # OK because of -Java-11 versionsuffix
        self.assertTrue(self.check_dep_vars('Java', {
            'version: 1.8; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 11.0.2; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
            'version: 11; versionsuffix:': ['bar-4.5.6-Java-11.eb'],
        }))

        # two different versions of Boost is not OK
        self.assertFalse(self.check_dep_vars('Boost', {
            'version: 1.64.0; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'],
        }))

        # a different Boost version that is only used as dependency for a matching Boost.Python is fine
        self.assertTrue(self.check_dep_vars('Boost', {
            'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2019a.eb'],
            'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'],
        }))
        self.assertTrue(self.check_dep_vars('Boost', {
            'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2018b.eb'],
            'version: 1.66.0; versionsuffix:': ['Boost.Python-1.66.0-gompi-2019a.eb'],
            'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'],
        }))
        self.assertFalse(self.check_dep_vars('Boost', {
            'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2019a.eb'],
            'version: 1.66.0; versionsuffix:': ['foo-1.2.3.eb'],
            'version: 1.70.0; versionsuffix:': ['foo-2.3.4.eb'],
        }))

        self.assertTrue(self.check_dep_vars('Boost', {
            'version: 1.63.0; versionsuffix: -Python-2.7.14': ['EMAN2-2.21a-foss-2018a-Python-2.7.14-Boost-1.63.0.eb'],
            'version: 1.64.0; versionsuffix:': ['Boost.Python-1.64.0-gompi-2018a.eb'],
            'version: 1.66.0; versionsuffix:': ['BLAST+-2.7.1-foss-2018a.eb'],
        }))

        self.assertTrue(self.check_dep_vars('Boost', {
            'version: 1.64.0; versionsuffix:': [
                'Boost.Python-1.64.0-gompi-2019a.eb',
                'EMAN2-2.3-foss-2019a-Python-2.7.15.eb',
            ],
            'version: 1.70.0; versionsuffix:': [
                'BLAST+-2.9.0-gompi-2019a.eb',
                'Boost.Python-1.70.0-gompi-2019a.eb',
            ],
        }))

    def test_dep_versions_per_toolchain_generation(self):
        """
        Check whether there's only one dependency version per toolchain generation actively used.
        This is enforced to try and limit the chance of running into conflicts when multiple modules built with
        the same toolchain are loaded together.
        """
        if EasyConfigTest.ordered_specs is None:
            self.process_all_easyconfigs()

        def get_deps_for(ec):
            """Get list of (direct) dependencies for specified easyconfig."""
            deps = []
            for dep in ec['ec']['dependencies']:
                dep_mod_name = dep['full_mod_name']
                deps.append((dep['name'], dep['version'], dep['versionsuffix'], dep_mod_name))
                res = [x for x in EasyConfigTest.ordered_specs if x['full_mod_name'] == dep_mod_name]
                if len(res) == 1:
                    deps.extend(get_deps_for(res[0]))
                else:
                    raise EasyBuildError("Failed to find %s in ordered list of easyconfigs", dep_mod_name)

            return deps

        # some software also follows <year>{a,b} versioning scheme,
        # which throws off the pattern matching done below for toolchain versions
        false_positives_regex = re.compile('^MATLAB-Engine-20[0-9][0-9][ab]')

        # restrict to checking dependencies of easyconfigs using common toolchains (start with 2018a)
        # and GCCcore subtoolchain for common toolchains, starting with GCCcore 7.x
        for pattern in ['201[89][ab]', '20[2-9][0-9][ab]', 'GCCcore-[7-9]\.[0-9]']:
            all_deps = {}
            regex = re.compile('^.*-(?P<tc_gen>%s).*\.eb$' % pattern)

            # collect variants for all dependencies of easyconfigs that use a toolchain that matches
            for ec in EasyConfigTest.ordered_specs:
                ec_file = os.path.basename(ec['spec'])

                # take into account software which also follows a <year>{a,b} versioning scheme
                ec_file = false_positives_regex.sub('', ec_file)

                res = regex.match(ec_file)
                if res:
                    tc_gen = res.group('tc_gen')
                    all_deps_tc_gen = all_deps.setdefault(tc_gen, {})
                    for dep_name, dep_ver, dep_versuff, dep_mod_name in get_deps_for(ec):
                        dep_variants = all_deps_tc_gen.setdefault(dep_name, {})
                        # a variant is defined by version + versionsuffix
                        variant = "version: %s; versionsuffix: %s" % (dep_ver, dep_versuff)
                        # keep track of which easyconfig this is a dependency
                        dep_variants.setdefault(variant, set()).add(ec_file)

            # check which dependencies have more than 1 variant
            multi_dep_vars, multi_dep_vars_msg = [], ''
            for tc_gen in sorted(all_deps.keys()):
                for dep in sorted(all_deps[tc_gen].keys()):
                    dep_vars = all_deps[tc_gen][dep]
                    if not self.check_dep_vars(dep, dep_vars):
                        multi_dep_vars.append(dep)
                        multi_dep_vars_msg += "\nfound %s variants of '%s' dependency " % (len(dep_vars), dep)
                        multi_dep_vars_msg += "in easyconfigs using '%s' toolchain generation\n* " % tc_gen
                        multi_dep_vars_msg += '\n* '.join("%s as dep for %s" % v for v in sorted(dep_vars.items()))
                        multi_dep_vars_msg += '\n'

            error_msg = "No multi-variant deps found for '%s' easyconfigs:\n%s" % (regex.pattern, multi_dep_vars_msg)
            self.assertFalse(multi_dep_vars, error_msg)

    def test_sanity_check_paths(self):
        """Make sure specified sanity check paths adher to the requirements."""

        if EasyConfigTest.ordered_specs is None:
            self.process_all_easyconfigs()

        for ec in EasyConfigTest.parsed_easyconfigs:
            ec_scp = ec['ec']['sanity_check_paths']
            if ec_scp != {}:
                # if sanity_check_paths is specified (i.e., non-default), it must adher to the requirements
                # both 'files' and 'dirs' keys, both with list values and with at least one a non-empty list
                error_msg = "sanity_check_paths for %s does not meet requirements: %s" % (ec['spec'], ec_scp)
                self.assertEqual(sorted(ec_scp.keys()), ['dirs', 'files'], error_msg)
                self.assertTrue(isinstance(ec_scp['dirs'], list), error_msg)
                self.assertTrue(isinstance(ec_scp['files'], list), error_msg)
                self.assertTrue(ec_scp['dirs'] or ec_scp['files'], error_msg)

    def test_easyconfig_locations(self):
        """Make sure all easyconfigs files are in the right location."""
        easyconfig_dirs_regex = re.compile(r'/easybuild/easyconfigs/[0a-z]/[^/]+$')
        topdir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
        for (dirpath, _, filenames) in os.walk(topdir):
            # ignore git/svn dirs & archived easyconfigs
            if '/.git/' in dirpath or '/.svn/' in dirpath or '__archive__' in dirpath:
                continue
            # check whether list of .eb files is non-empty
            easyconfig_files = [fn for fn in filenames if fn.endswith('eb')]
            if easyconfig_files:
                # check whether path matches required pattern
                if not easyconfig_dirs_regex.search(dirpath):
                    # only exception: TEMPLATE.eb
                    if not (dirpath.endswith('/easybuild/easyconfigs') and filenames == ['TEMPLATE.eb']):
                        self.assertTrue(False, "List of easyconfig files in %s is empty: %s" % (dirpath, filenames))

    def check_sha256_checksums(self, changed_ecs):
        """Make sure changed easyconfigs have SHA256 checksums in place."""

        # list of software for which checksums can not be required,
        # e.g. because 'source' files need to be constructed manually
        whitelist = ['Kent_tools-*', 'MATLAB-*', 'OCaml-*']

        # the check_sha256_checksums function (again) creates an EasyBlock instance
        # for easyconfigs using the Bundle easyblock, this is a problem because the 'sources' easyconfig parameter
        # is updated in place (sources for components are added the 'parent' sources) in Bundle's __init__;
        # therefore, we need to reset 'sources' to an empty list here if Bundle is used...
        # likewise for 'patches' and 'checksums'
        for ec in changed_ecs:
            if ec['easyblock'] == 'Bundle':
                ec['sources'] = []
                ec['patches'] = []
                ec['checksums'] = []

        # filter out deprecated easyconfigs
        retained_changed_ecs = []
        for ec in changed_ecs:
            if not ec['deprecated']:
                retained_changed_ecs.append(ec)

        checksum_issues = check_sha256_checksums(retained_changed_ecs, whitelist=whitelist)
        self.assertTrue(len(checksum_issues) == 0, "No checksum issues:\n%s" % '\n'.join(checksum_issues))

    def check_python_packages(self, changed_ecs, added_ecs_filenames):
        """Several checks for easyconfigs that install (bundles of) Python packages."""

        # These packages do not support installation with 'pip'
        whitelist_pip = [r'MATLAB-Engine-.*', r'PyTorch-.*', r'Meld-.*']

        failing_checks = []

        for ec in changed_ecs:

            ec_fn = os.path.basename(ec.path)
            easyblock = ec.get('easyblock')
            exts_defaultclass = ec.get('exts_defaultclass')
            exts_default_options = ec.get('exts_default_options', {})

            download_dep_fail = ec.get('download_dep_fail')
            exts_download_dep_fail = ec.get('exts_download_dep_fail')
            use_pip = ec.get('use_pip')

            # download_dep_fail should be set when using PythonPackage
            if easyblock == 'PythonPackage':
                if download_dep_fail is None:
                    failing_checks.append("'download_dep_fail' set in %s" % ec_fn)

            # use_pip should be set when using PythonPackage or PythonBundle (except for whitelisted easyconfigs)
            if easyblock in ['PythonBundle', 'PythonPackage']:
                if use_pip is None and not any(re.match(regex, ec_fn) for regex in whitelist_pip):
                    failing_checks.append("'use_pip' set in %s" % ec_fn)

            # download_dep_fail is enabled automatically in PythonBundle easyblock, so shouldn't be set
            if easyblock == 'PythonBundle':
                if download_dep_fail or exts_download_dep_fail:
                    fail = "'*download_dep_fail' set in %s (shouldn't, since PythonBundle easyblock is used)" % ec_fn
                    failing_checks.append(fail)

            elif exts_defaultclass == 'PythonPackage':
                # bundle of Python packages should use PythonBundle
                if easyblock == 'Bundle':
                    fail = "'PythonBundle' easyblock is used for bundle of Python packages in %s" % ec_fn
                    failing_checks.append(fail)
                else:
                    # both download_dep_fail and use_pip should be set via exts_default_options
                    # when installing Python packages as extensions
                    for key in ['download_dep_fail', 'use_pip']:
                        if exts_default_options.get(key) is None:
                            failing_checks.append("'%s' set in exts_default_options in %s" % (key, ec_fn))

            # if Python is a dependency, that should be reflected in the versionsuffix
            # Tkinter is an exception, since its version always matches the Python version anyway
            if any(dep['name'] == 'Python' for dep in ec['dependencies']) and ec.name != 'Tkinter':
                if not re.search(r'-Python-[23]\.[0-9]+\.[0-9]+', ec['versionsuffix']):
                    msg = "'-Python-%%(pyver)s' included in versionsuffix in %s" % ec_fn
                    # This is only a failure for newly added ECs, not for existing ECS
                    # As that would probably break many ECs
                    if ec_fn in added_ecs_filenames:
                        failing_checks.append(msg)
                    else:
                        print('\nNote: Failed non-critical check: ' + msg)

            # require that running of "pip check" during sanity check is enabled via sanity_pip_check
            if use_pip and easyblock in ['PythonBundle', 'PythonPackage']:
                sanity_pip_check = ec.get('sanity_pip_check') or exts_default_options.get('sanity_pip_check')
                if not sanity_pip_check and not any(re.match(regex, ec_fn) for regex in whitelist_pip):
                    failing_checks.append("sanity_pip_check is enabled in %s" % ec_fn)

        self.assertFalse(failing_checks, '\n'.join(failing_checks))

    def check_sanity_check_paths(self, changed_ecs):
        """Make sure a custom sanity_check_paths value is specified for easyconfigs that use a generic easyblock."""

        # PythonBundle & PythonPackage already have a decent customised sanity_check_paths
        # BuildEnv, ModuleRC and Toolchain easyblocks doesn't install anything so there is nothing to check.
        whitelist = ['CrayToolchain', 'ModuleRC', 'PythonBundle', 'PythonPackage', 'Toolchain', 'BuildEnv']
        # Autotools & (recent) GCC are just bundles (Autotools: Autoconf+Automake+libtool, GCC: GCCcore+binutils)
        bundles_whitelist = ['Autotools', 'GCC']

        failing_checks = []

        for ec in changed_ecs:

            easyblock = ec.get('easyblock')

            if is_generic_easyblock(easyblock) and not ec.get('sanity_check_paths'):
                if easyblock in whitelist or (easyblock == 'Bundle' and ec['name'] in bundles_whitelist):
                    pass
                else:
                    ec_fn = os.path.basename(ec.path)
                    failing_checks.append("No custom sanity_check_paths found in %s" % ec_fn)

        self.assertFalse(failing_checks, '\n'.join(failing_checks))

    def check_https(self, changed_ecs):
        """Make sure https:// URL is used (if it exists) for homepage/source_urls (rather than http://)."""

        whitelist = [
            'Kaiju',  # invalid certificate at https://kaiju.binf.ku.dk
            'libxml2',  # https://xmlsoft.org works, but invalid certificate
            'p4vasp',  # https://www.p4vasp.at doesn't work
            'ITSTool',  # https://itstool.org/ doesn't work
            'UCX-',  # bad certificate for https://www.openucx.org
        ]

        http_regex = re.compile('http://[^"\'\n]+', re.M)

        failing_checks = []
        for ec in changed_ecs:
            ec_fn = os.path.basename(ec.path)

            # skip whitelisted easyconfigs
            if any(ec_fn.startswith(x) for x in whitelist):
                continue

            # ignore commented out lines in easyconfig files when checking for http:// URLs
            ec_txt = '\n'.join(l for l in ec.rawtxt.split('\n') if not l.startswith('#'))

            for http_url in http_regex.findall(ec_txt):
                https_url = http_url.replace('http://', 'https://')
                try:
                    https_url_works = bool(urlopen(https_url, timeout=5))
                except Exception:
                    https_url_works = False

                if https_url_works:
                    failing_checks.append("Found http:// URL in %s, should be https:// : %s" % (ec_fn, http_url))

        self.assertFalse(failing_checks, '\n'.join(failing_checks))

    def test_changed_files_pull_request(self):
        """Specific checks only done for the (easyconfig) files that were changed in a pull request."""
        def get_eb_files_from_diff(diff_filter):
            cmd = "git diff --name-only --diff-filter=%s %s...HEAD" % (diff_filter, target_branch)
            out, ec = run_cmd(cmd, simple=False)
            return [os.path.basename(f) for f in out.strip().split('\n') if f.endswith('.eb')]


        # $TRAVIS_PULL_REQUEST should be a PR number, otherwise we're not running tests for a PR
        travis_pr_test = re.match('^[0-9]+$', os.environ.get('TRAVIS_PULL_REQUEST', '(none)'))

        # when testing a PR in GitHub Actions, $GITHUB_EVENT_NAME will be set to 'pull_request'
        github_pr_test = os.environ.get('GITHUB_EVENT_NAME') == 'pull_request'

        if travis_pr_test or github_pr_test:

            # target branch should be anything other than 'master';
            # usually is 'develop', but could also be a release branch like '3.7.x'
            if travis_pr_test:
                target_branch = os.environ.get('TRAVIS_BRANCH', None)
            else:
                target_branch = os.environ.get('GITHUB_BASE_REF', None)

            if target_branch is None:
                self.assertTrue(False, "Failed to determine target branch for current pull request.")

            if target_branch != 'master':

                if not EasyConfigTest.parsed_easyconfigs:
                    self.process_all_easyconfigs()

                # relocate to top-level directory of repository to run 'git diff' command
                top_dir = os.path.dirname(os.path.dirname(get_paths_for('easyconfigs')[0]))
                cwd = change_dir(top_dir)

                # get list of changed easyconfigs
                changed_ecs_filenames = get_eb_files_from_diff(diff_filter='M')
                added_ecs_filenames = get_eb_files_from_diff(diff_filter='A')
                if changed_ecs_filenames:
                    print("\nList of changed easyconfig files in this PR: %s" % '\n'.join(changed_ecs_filenames))
                if added_ecs_filenames:
                    print("\nList of added easyconfig files in this PR: %s" % '\n'.join(added_ecs_filenames))

                change_dir(cwd)

                # grab parsed easyconfigs for changed easyconfig files
                changed_ecs = []
                for ec_fn in changed_ecs_filenames + added_ecs_filenames:
                    match = None
                    for ec in EasyConfigTest.parsed_easyconfigs:
                        if os.path.basename(ec['spec']) == ec_fn:
                            match = ec['ec']
                            break

                    if match:
                        changed_ecs.append(match)
                    else:
                        # if no easyconfig is found, it's possible some archived easyconfigs were touched in the PR...
                        # so as a last resort, try to find the easyconfig file in __archive__
                        easyconfigs_path = get_paths_for("easyconfigs")[0]
                        specs = glob.glob('%s/__archive__/*/*/%s' % (easyconfigs_path, ec_fn))
                        if len(specs) == 1:
                            ec = process_easyconfig(specs[0])[0]
                            changed_ecs.append(ec['ec'])
                        else:
                            error_msg = "Failed to find parsed easyconfig for %s" % ec_fn
                            error_msg += " (and could not isolate it in easyconfigs archive either)"
                            self.assertTrue(False, error_msg)

                # run checks on changed easyconfigs
                self.check_sha256_checksums(changed_ecs)
                self.check_python_packages(changed_ecs, added_ecs_filenames)
                self.check_sanity_check_paths(changed_ecs)
                self.check_https(changed_ecs)

    def test_zzz_cleanup(self):
        """Dummy test to clean up global temporary directory."""
        shutil.rmtree(self.TMPDIR)
    def test_list_software(self):
        """Test list_software* functions."""
        build_options = {
            'robot_path': [
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             'easyconfigs', 'v1.0')
            ],
            'silent':
            True,
            'valid_module_classes':
            module_classes(),
        }
        init_config(build_options=build_options)

        expected = '\n'.join([
            '',
            '* GCC',
            '* gzip',
        ])
        self.assertEqual(list_software(output_format='txt'), expected)

        expected = re.compile('\n'.join([
            r'',
            r'\* GCC',
            r'',
            r"The GNU Compiler Collection .*",
            r'',
            r'homepage: http://gcc.gnu.org/',
            r'',
            r'  \* GCC v4.6.3: dummy',
            r'',
            r'\* gzip',
            r'',
            r"gzip \(GNU zip\) is .*",
            r'',
            r'homepage: http://www.gzip.org/',
            r'',
            r"  \* gzip v1.4: GCC/4.6.3, dummy",
            r"  \* gzip v1.5: goolf/1.4.10, ictce/4.1.13",
            '',
        ]))
        txt = list_software(output_format='txt', detailed=True)
        self.assertTrue(expected.match(txt),
                        "Pattern '%s' found in: %s" % (expected.pattern, txt))

        expected = '\n'.join([
            "List of supported software",
            "==========================",
            '',
            "EasyBuild |version| supports 2 different software packages (incl. toolchains, bundles):",
            '',
            ':ref:`list_software_letter_g`',
            '',
            '',
            '.. _list_software_letter_g:',
            '',
            '*G*',
            '---',
            '',
            '* GCC',
            '* gzip',
        ])
        self.assertEqual(list_software(output_format='rst'), expected)

        expected = re.compile('\n'.join([
            r"List of supported software",
            r"==========================",
            r'',
            r"EasyBuild \|version\| supports 2 different software packages \(incl. toolchains, bundles\):",
            r'',
            r':ref:`list_software_letter_g`',
            r'',
            r'',
            r'.. _list_software_letter_g:',
            r'',
            r'\*G\*',
            r'---',
            r'',
            r'',
            r':ref:`list_software_GCC_205` - :ref:`list_software_gzip_442`',
            r'',
            r'',
            r'\.\. _list_software_GCC_205:',
            r'',
            r'\*GCC\*',
            r'\+\+\+\+\+',
            r'',
            r'The GNU Compiler Collection .*',
            r'',
            r'\*homepage\*: http://gcc.gnu.org/',
            r'',
            r'=========    =========',
            r'version      toolchain',
            r'=========    =========',
            r'``4.6.3``    ``dummy``',
            r'=========    =========',
            r'',
            r'',
            r'\.\. _list_software_gzip_442:',
            r'',
            r'\*gzip\*',
            r'\+\+\+\+\+\+',
            r'',
            r'gzip \(GNU zip\) is a popular .*',
            r'',
            r'\*homepage\*: http://www.gzip.org/',
            r'',
            r'=======    ==================================',
            r'version    toolchain                         ',
            r'=======    ==================================',
            r'``1.4``    ``GCC/4.6.3``, ``dummy``          ',
            r'``1.5``    ``goolf/1.4.10``, ``ictce/4.1.13``',
            r'=======    ==================================',
        ]))
        txt = list_software(output_format='rst', detailed=True)
        self.assertTrue(expected.match(txt),
                        "Pattern '%s' found in: %s" % (expected.pattern, txt))

        # GCC/4.6.3 is installed, no gzip module installed
        txt = list_software(output_format='txt',
                            detailed=True,
                            only_installed=True)
        self.assertTrue(re.search('^\* GCC', txt, re.M))
        self.assertTrue(re.search('^\s*\* GCC v4.6.3: dummy', txt, re.M))
        self.assertFalse(re.search('^\* gzip', txt, re.M))
        self.assertFalse(re.search('gzip v1\.', txt, re.M))

        txt = list_software(output_format='rst',
                            detailed=True,
                            only_installed=True)
        self.assertTrue(re.search('^\*GCC\*', txt, re.M))
        self.assertTrue(re.search('4\.6\.3.*dummy', txt, re.M))
        self.assertFalse(re.search('^\*gzip\*', txt, re.M))
        self.assertFalse(re.search('1\.4', txt, re.M))
        self.assertFalse(re.search('1\.5', txt, re.M))

        # check for specific patterns in output for larger set of test easyconfigs
        build_options = {
            'robot_path': [
                os.path.join(os.path.dirname(os.path.abspath(__file__)),
                             'easyconfigs', 'test_ecs')
            ],
            'silent':
            True,
            'valid_module_classes':
            module_classes(),
        }
        init_config(build_options=build_options)

        expected = [
            '* toy',
            '',
            'Toy C program.',
            '',
            'homepage: http://hpcugent.github.com/easybuild',
            '',
            "  * toy v0.0: dummy",
            "  * toy v0.0 (versionsuffix: '-deps'): dummy",
            "  * toy v0.0 (versionsuffix: '-iter'): dummy",
            "  * toy v0.0 (versionsuffix: '-multiple'): dummy",
            "  * toy v0.0 (versionsuffix: '-test'): gompi/1.3.12",
        ]
        txt = list_software(output_format='txt', detailed=True)
        lines = txt.split('\n')
        expected_found = any([
            lines[i:i + len(expected)] == expected for i in range(len(lines))
        ])
        self.assertTrue(expected_found, "%s found in: %s" % (expected, lines))

        expected = [
            '*toy*',
            '+++++',
            '',
            'Toy C program.',
            '',
            '*homepage*: http://hpcugent.github.com/easybuild',
            '',
            '=======    =============    ================',
            'version    versionsuffix    toolchain       ',
            '=======    =============    ================',
            '``0.0``                     ``dummy``       ',
            '``0.0``    ``-deps``        ``dummy``       ',
            '``0.0``    ``-iter``        ``dummy``       ',
            '``0.0``    ``-multiple``    ``dummy``       ',
            '``0.0``    ``-test``        ``gompi/1.3.12``',
            '=======    =============    ================',
        ]
        txt = list_software(output_format='rst', detailed=True)
        lines = txt.split('\n')
        expected_found = any([
            lines[i:i + len(expected)] == expected for i in range(len(lines))
        ])
        self.assertTrue(expected_found, "%s found in: %s" % (expected, lines))
Example #47
0
    def test_check_conflicts(self):
        """Test check_conflicts function."""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'robot_path': test_easyconfigs,
            'valid_module_classes': module_classes(),
            'validate': False,
        })

        gzip_ec = os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.5-goolf-1.4.10.eb')
        gompi_ec = os.path.join(test_easyconfigs, 'g', 'gompi', 'gompi-1.4.10.eb')
        ecs, _ = parse_easyconfigs([(gzip_ec, False), (gompi_ec, False)])

        # no conflicts found, no output to stderr
        self.mock_stderr(True)
        conflicts = check_conflicts(ecs, self.modtool)
        stderr = self.get_stderr()
        self.mock_stderr(False)
        self.assertFalse(conflicts)
        self.assertEqual(stderr, '')

        # change GCC version in gompi dependency, to inject a conflict
        gompi_ec_txt = read_file(gompi_ec)
        new_gompi_ec = os.path.join(self.test_prefix, 'gompi.eb')
        write_file(new_gompi_ec, gompi_ec_txt.replace('4.7.2', '4.6.4'))

        ecs, _ = parse_easyconfigs([(new_gompi_ec, False), (gzip_ec, False)])

        # conflicts are found and reported to stderr
        self.mock_stderr(True)
        conflicts = check_conflicts(ecs, self.modtool)
        stderr = self.get_stderr()
        self.mock_stderr(False)

        self.assertTrue(conflicts)
        self.assertTrue("Conflict found for dependencies of goolf-1.4.10: GCC-4.6.4 vs GCC-4.7.2" in stderr)

        # conflicts between specified easyconfigs are also detected

        # direct conflict on software version
        ecs, _ = parse_easyconfigs([
            (os.path.join(test_easyconfigs, 'g', 'GCC', 'GCC-4.7.2.eb'), False),
            (os.path.join(test_easyconfigs, 'g', 'GCC', 'GCC-4.9.3-2.25.eb'), False),
        ])
        self.mock_stderr(True)
        conflicts = check_conflicts(ecs, self.modtool)
        stderr = self.get_stderr()
        self.mock_stderr(False)

        self.assertTrue(conflicts)
        self.assertTrue("Conflict between (dependencies of) easyconfigs: GCC-4.7.2 vs GCC-4.9.3-2.25" in stderr)

        # indirect conflict on dependencies
        ecs, _ = parse_easyconfigs([
            (os.path.join(test_easyconfigs, 'b', 'bzip2', 'bzip2-1.0.6-GCC-4.9.2.eb'), False),
            (os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.6.4.eb'), False),
        ])
        self.mock_stderr(True)
        conflicts = check_conflicts(ecs, self.modtool)
        stderr = self.get_stderr()
        self.mock_stderr(False)

        self.assertTrue(conflicts)
        self.assertTrue("Conflict between (dependencies of) easyconfigs: GCC-4.6.4 vs GCC-4.9.2" in stderr)

        # test use of check_inter_ec_conflicts
        self.assertFalse(check_conflicts(ecs, self.modtool, check_inter_ec_conflicts=False), "No conflicts found")
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
        }
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.5-foss-2018a.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'", ordered_ecs, prepare_first=False)
        # only one job submitted since foss/2018a module is already available
        self.assertEqual(len(jobs), 1)
        regex = re.compile("echo '.*/gzip-1.5-foss-2018a.eb'")
        self.assertTrue(regex.search(jobs[-1].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip', 'gzip-1.4-GCC-4.6.3.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool, retain_all_deps=True)
        jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)

        # make sure command is correct, and that --hidden is there when it needs to be
        for i, ec in enumerate(ordered_ecs):
            if ec['hidden']:
                regex = re.compile("eb %s.* --hidden" % ec['spec'])
            else:
                regex = re.compile("eb %s" % ec['spec'])
            self.assertTrue(regex.search(jobs[i].script), "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))

        for job in jobs:
            self.assertEqual(job.cores, build_options['job_cores'])

        # no deps for GCC/4.6.3 (toolchain) and intel/2018a (test easyconfig with 'fake' deps)
        self.assertEqual(len(jobs[0].deps), 0)
        self.assertEqual(len(jobs[1].deps), 0)

        # only dependency for toy/0.0-deps is intel/2018a (dep marked as external module is filtered out)
        self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
        self.assertEqual(len(jobs[2].deps), 1)
        self.assertTrue('intel-2018a.eb' in jobs[2].deps[0].script)

        # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
        self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
        self.assertEqual(len(jobs[3].deps), 2)
        regex = re.compile(r'toy-0.0-deps\.eb.* --hidden')
        script_txt = jobs[3].deps[0].script
        fail_msg = "Pattern '%s' should be found in: %s" % (regex.pattern, script_txt)
        self.assertTrue(regex.search(script_txt), fail_msg)
        self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)

        # also test use of --pre-create-installdir
        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file), self.modtool)

        # installation directory doesn't exist yet before submission
        toy_installdir = os.path.join(self.test_installpath, 'software', 'toy', '0.0')
        self.assertFalse(os.path.exists(toy_installdir))

        jobs = submit_jobs(ordered_ecs, '', testing=False)
        self.assertEqual(len(jobs), 1)

        # software install dir is created (by default) as part of job submission process (fetch_step is run)
        self.assertTrue(os.path.exists(toy_installdir))
        remove_dir(toy_installdir)
        remove_dir(os.path.dirname(toy_installdir))
        self.assertFalse(os.path.exists(toy_installdir))

        # installation directory does *not* get created when --pre-create-installdir is used
        build_options['pre_create_installdir'] = False
        init_config(args=['--job-backend=PbsPython'], build_options=build_options)

        jobs = submit_jobs(ordered_ecs, '', testing=False)
        self.assertEqual(len(jobs), 1)
        self.assertFalse(os.path.exists(toy_installdir))

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
    def test_map_easyconfig_to_target_tc_hierarchy(self):
        """Test mapping of easyconfig to target hierarchy"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        build_options = {
            'robot_path': [test_easyconfigs],
            'silent': True,
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)
        get_toolchain_hierarchy.clear()

        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}
        # The below mapping includes a binutils mapping (2.26 to 2.25)
        tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool)
        ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb')
        tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping)
        tweaked_ec = process_easyconfig(tweaked_spec)[0]
        tweaked_dict = tweaked_ec['ec'].asdict()
        # First check the mapped toolchain
        key, value = 'toolchain', iccifort_binutils_tc
        self.assertTrue(key in tweaked_dict and value == tweaked_dict[key])
        # Also check that binutils has been mapped
        for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items():
            self.assertTrue(key in tweaked_dict['builddependencies'][0] and
                            value == tweaked_dict['builddependencies'][0][key])

        # Now test the case where we try to update the dependencies
        init_config(build_options=build_options)
        get_toolchain_hierarchy.clear()
        tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping, update_dep_versions=True)
        tweaked_ec = process_easyconfig(tweaked_spec)[0]
        tweaked_dict = tweaked_ec['ec'].asdict()
        # First check the mapped toolchain
        key, value = 'toolchain', iccifort_binutils_tc
        self.assertTrue(key in tweaked_dict and value == tweaked_dict[key])
        # Also check that binutils has been mapped
        for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items():
            self.assertTrue(
                key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key]
            )
        # Also check that the gzip dependency was upgraded
        for key, value in {'name': 'gzip', 'version': '1.6', 'versionsuffix': ''}.items():
            self.assertTrue(key in tweaked_dict['dependencies'][0] and value == tweaked_dict['dependencies'][0][key])

        # Make sure there are checksums for our next test
        self.assertTrue(tweaked_dict['checksums'])

        # Test the case where we also update the software version at the same time
        init_config(build_options=build_options)
        get_toolchain_hierarchy.clear()
        new_version = '1.x.3'
        tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec,
                                                             tc_mapping,
                                                             update_build_specs={'version': new_version},
                                                             update_dep_versions=True)
        tweaked_ec = process_easyconfig(tweaked_spec)[0]
        tweaked_dict = tweaked_ec['ec'].asdict()
        # First check the mapped toolchain
        key, value = 'toolchain', iccifort_binutils_tc
        self.assertTrue(key in tweaked_dict and value == tweaked_dict[key])
        # Also check that binutils has been mapped
        for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items():
            self.assertTrue(
                key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key]
            )
        # Also check that the gzip dependency was upgraded
        for key, value in {'name': 'gzip', 'version': '1.6', 'versionsuffix': ''}.items():
            self.assertTrue(key in tweaked_dict['dependencies'][0] and value == tweaked_dict['dependencies'][0][key])

        # Finally check that the version was upgraded
        key, value = 'version', new_version
        self.assertTrue(key in tweaked_dict and value == tweaked_dict[key])
        # and that the checksum was removed
        self.assertFalse(tweaked_dict['checksums'])

        # Check that if we update a software version, it also updates the version if the software appears in an
        # extension list (like for a PythonBundle)
        ec_spec = os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0-gompi-2018a-test.eb')
        # Create the trivial toolchain mapping
        toolchain = {'name': 'gompi', 'version': '2018a'}
        tc_mapping = map_toolchain_hierarchies(toolchain, toolchain, self.modtool)
        # Update the software version
        init_config(build_options=build_options)
        get_toolchain_hierarchy.clear()
        new_version = '1.x.3'
        tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec,
                                                             tc_mapping,
                                                             update_build_specs={'version': new_version},
                                                             update_dep_versions=False)
        tweaked_ec = process_easyconfig(tweaked_spec)[0]
        extensions = tweaked_ec['ec']['exts_list']
        # check one extension with the same name exists and that the version has been updated
        hit_extension = 0
        for extension in extensions:
            if isinstance(extension, tuple) and extension[0] == 'toy':
                self.assertEqual(extension[1], new_version)
                # Make sure checksum has been purged
                self.assertFalse('checksums' in extension[2])
                hit_extension += 1
        self.assertEqual(hit_extension, 1, "Should only have updated one extension")
    def test_find_potential_version_mappings(self):
        """Test ability to find potential version mappings of a dependency for a given toolchain mapping"""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'robot_path': [test_easyconfigs],
            'silent': True,
            'valid_module_classes': module_classes(),
        })
        get_toolchain_hierarchy.clear()

        gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'}
        iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}
        # The below mapping includes a binutils mapping (2.26 to 2.25)
        tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool)
        ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb')
        parsed_ec = process_easyconfig(ec_spec)[0]
        gzip_dep = [dep for dep in parsed_ec['ec']['dependencies'] if dep['name'] == 'gzip'][0]
        self.assertEqual(gzip_dep['full_mod_name'], 'gzip/1.4-GCC-4.9.3-2.26')

        potential_versions = find_potential_version_mappings(gzip_dep, tc_mapping)
        self.assertEqual(len(potential_versions), 1)
        # Should see version 1.6 of gzip with iccifort toolchain
        expected = {
            'path': os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb'),
            'toolchain': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
            'version': '1.6',
            'versionsuffix': '',
        }
        self.assertEqual(potential_versions[0], expected)

        # Test that we can override respecting the versionsuffix

        # Create toolchain mapping for OpenBLAS
        gcc_4_tc = {'name': 'GCC', 'version': '4.8.2'}
        gcc_6_tc = {'name': 'GCC', 'version': '6.4.0-2.28'}
        tc_mapping = map_toolchain_hierarchies(gcc_4_tc, gcc_6_tc, self.modtool)
        # Create a dep with the necessary params (including versionsuffix)
        openblas_dep = {
            'toolchain': {'version': '4.8.2', 'name': 'GCC'},
            'name': 'OpenBLAS',
            'system': False,
            'versionsuffix': '-LAPACK-3.4.2',
            'version': '0.2.8'
        }

        self.mock_stderr(True)
        potential_versions = find_potential_version_mappings(openblas_dep, tc_mapping)
        errtxt = self.get_stderr()
        warning_stub = "\nWARNING: There may be newer version(s) of dep 'OpenBLAS' available with a different " \
                       "versionsuffix to '-LAPACK-3.4.2'"
        self.mock_stderr(False)
        self.assertTrue(errtxt.startswith(warning_stub))
        self.assertEqual(len(potential_versions), 0)
        potential_versions = find_potential_version_mappings(openblas_dep, tc_mapping, ignore_versionsuffixes=True)
        self.assertEqual(len(potential_versions), 1)
        expected = {
            'path': os.path.join(test_easyconfigs, 'o', 'OpenBLAS', 'OpenBLAS-0.2.20-GCC-6.4.0-2.28.eb'),
            'toolchain': {'version': '6.4.0-2.28', 'name': 'GCC'},
            'version': '0.2.20',
            'versionsuffix': '',
        }
        self.assertEqual(potential_versions[0], expected)
Example #51
0
    def test_robot_find_minimal_toolchain_of_dependency(self):
        """Test robot_find_minimal_toolchain_of_dependency."""

        # replace log.experimental with log.warning to allow experimental code
        easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning

        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        #
        # First test that it can do basic resolution
        #
        gzip15 = {
            'name': 'gzip',
            'version': '1.5',
            'versionsuffix': '',
            'toolchain': {'name': 'goolf', 'version': '1.4.10'},
        }
        get_toolchain_hierarchy.clear()
        new_gzip15_toolchain = robot_find_minimal_toolchain_of_dependency(gzip15, self.modtool)
        self.assertEqual(new_gzip15_toolchain, gzip15['toolchain'])

        # no easyconfig for gzip 1.4 with matching non-dummy (sub)toolchain
        gzip14 = {
            'name': 'gzip',
            'version': '1.4',
            'versionsuffix': '',
            'toolchain': {'name': 'goolf', 'version': '1.4.10'},
        }
        get_toolchain_hierarchy.clear()
        self.assertEqual(robot_find_minimal_toolchain_of_dependency(gzip14, self.modtool), None)

        gzip14['toolchain'] = {'name': 'gompi', 'version': '1.4.10'}

        #
        # Second test also including dummy toolchain
        #
        init_config(build_options={
            'add_dummy_to_minimal_toolchains': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })
        # specify alternative parent toolchain
        gompi_1410 = {'name': 'gompi', 'version': '1.4.10'}
        get_toolchain_hierarchy.clear()
        new_gzip14_toolchain = robot_find_minimal_toolchain_of_dependency(gzip14, self.modtool, parent_tc=gompi_1410)
        self.assertTrue(new_gzip14_toolchain != gzip14['toolchain'])
        self.assertEqual(new_gzip14_toolchain, {'name': 'dummy', 'version': ''})

        # default: use toolchain from dependency
        gzip14['toolchain'] = gompi_1410
        get_toolchain_hierarchy.clear()
        new_gzip14_toolchain = robot_find_minimal_toolchain_of_dependency(gzip14, self.modtool)
        self.assertTrue(new_gzip14_toolchain != gzip14['toolchain'])
        self.assertEqual(new_gzip14_toolchain, {'name': 'dummy', 'version': ''})

        # check reversed order (parent tc first) and skipping of parent tc itself
        dep = {
            'name': 'SQLite',
            'version': '3.8.10.2',
            'toolchain': {'name': 'goolf', 'version': '1.4.10'},
        }
        res = robot_find_minimal_toolchain_of_dependency(dep, self.modtool)
        self.assertEqual(res, {'name': 'GCC', 'version': '4.7.2'})
        res = robot_find_minimal_toolchain_of_dependency(dep, self.modtool, parent_first=True)
        self.assertEqual(res, {'name': 'goolf', 'version': '1.4.10'})

        #
        # Finally test if it can recognise existing modules and use those
        #
        barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb')
        barec_txt = '\n'.join([
            "easyblock = 'ConfigureMake'",
            "name = 'bar'",
            "version = '1.2.3'",
            "homepage = 'http://example.com'",
            "description = 'foo'",
            "toolchain = {'name': 'goolf', 'version': '1.4.10'}",
            # deliberately listing components of toolchain as dependencies without specifying subtoolchains,
            # to test resolving of dependencies with minimal toolchain
            # for each of these, we know test easyconfigs are available (which are required here)
            "dependencies = [",
            "   ('OpenMPI', '1.6.4'),",  # available with GCC/4.7.2
            "   ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),",  # available with gompi/1.4.10
            "   ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),",  # available with gompi/1.4.10
            "   ('SQLite', '3.8.10.2'),",  # available with goolf/1.4.10, gompi/1.4.10 and GCC/4.7.2
            "]",
        ])
        write_file(barec, barec_txt)

        # check without --minimal-toolchains
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })
        bar = EasyConfig(barec)

        expected_dep_versions = [
            '1.6.4-GCC-4.7.2',
            '0.2.6-gompi-1.4.10-LAPACK-3.4.2',
            '2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2',
            '3.8.10.2-goolf-1.4.10',
        ]
        for dep, expected_dep_version in zip(bar.dependencies(), expected_dep_versions):
            self.assertEqual(det_full_ec_version(dep), expected_dep_version)

        # check with --minimal-toolchains enabled
        init_config(build_options={
            'minimal_toolchains': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })
        bar = EasyConfig(barec)

        # check that all bar dependencies have been processed as expected
        expected_dep_versions[-1] = '3.8.10.2-GCC-4.7.2'
        for dep, expected_dep_version in zip(bar.dependencies(), expected_dep_versions):
            self.assertEqual(det_full_ec_version(dep), expected_dep_version)

        # Add the gompi/1.4.10 version of SQLite as an available module
        module_parent = os.path.join(self.test_prefix, 'minimal_toolchain_modules')
        module_file = os.path.join(module_parent, 'SQLite', '3.8.10.2-gompi-1.4.10')
        module_txt = '\n'.join([
            "#%Module",
            "set root /tmp/SQLite/3.8.10.2",
            "setenv EBROOTSQLITE $root",
            "setenv EBVERSIONSQLITE 3.8.10.2",
            "setenv  EBDEVELSQLITE $root/easybuild/SQLite-3.8.10.2-easybuild-devel",
        ])
        write_file(module_file, module_txt)
        os.environ['MODULEPATH'] = module_parent # Add the parent directory to the MODULEPATH
        invalidate_module_caches_for(module_parent)

        # Reinitialize the environment for the updated MODULEPATH and use_existing_modules
        init_config(build_options={
            'minimal_toolchains': True,
            'use_existing_modules': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        # Check gompi is now being picked up
        bar = EasyConfig(barec) # Re-parse the parent easyconfig
        sqlite = bar.dependencies()[3]
        self.assertEqual(det_full_ec_version(sqlite), '3.8.10.2-gompi-1.4.10')

        # Add the goolf version as an available version and check that gets precedence over the gompi version
        module_file = os.path.join(module_parent, 'SQLite', '3.8.10.2-goolf-1.4.10')
        write_file(module_file, module_txt)
        invalidate_module_caches_for(module_parent)
        bar = EasyConfig(barec) # Re-parse the parent easyconfig
        sqlite = bar.dependencies()[3]
        self.assertEqual(det_full_ec_version(sqlite), '3.8.10.2-goolf-1.4.10')
Example #52
0
    def test_resolve_dependencies_minimal(self):
        """Test resolved dependencies with minimal toolchain."""

        # replace log.experimental with log.warning to allow experimental code
        easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning

        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        self.install_mock_module()

        init_config(build_options={
            'allow_modules_tool_mismatch': True,
            'minimal_toolchains': True,
            'use_existing_modules': True,
            'external_modules_metadata': ConfigObj(),
            'robot_path': test_easyconfigs,
            'valid_module_classes': module_classes(),
            'validate': False,
        })

        barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb')
        barec_lines = [
            "easyblock = 'ConfigureMake'",
            "name = 'bar'",
            "version = '1.2.3'",
            "homepage = 'http://example.com'",
            "description = 'foo'",
            # deliberately listing components of toolchain as dependencies without specifying subtoolchains,
            # to test resolving of dependencies with minimal toolchain
            # for each of these, we know test easyconfigs are available (which are required here)
            "dependencies = [",
            "   ('OpenMPI', '1.6.4'),",  # available with GCC/4.7.2
            "   ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),",  # available with gompi/1.4.10
            "   ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),",  # available with gompi/1.4.10
            "   ('SQLite', '3.8.10.2'),",
            "]",
            # toolchain as list line, for easy modification later;
            # the use of %(version_major)s here is mainly to check if templates are being handled correctly
            # (it doesn't make much sense, but it serves the purpose)
            "toolchain = {'name': 'goolf', 'version': '%(version_major)s.4.10'}",
        ]
        write_file(barec, '\n'.join(barec_lines))
        bar = process_easyconfig(barec)[0]

        # all modules in the dep graph, in order
        all_mods_ordered = [
            'GCC/4.7.2',
            'hwloc/1.6.2-GCC-4.7.2',
            'OpenMPI/1.6.4-GCC-4.7.2',
            'gompi/1.4.10',
            'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2',
            'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2',
            'SQLite/3.8.10.2-GCC-4.7.2',
            'FFTW/3.3.3-gompi-1.4.10',
            'goolf/1.4.10',
            'bar/1.2.3-goolf-1.4.10',
        ]

        # no modules available, so all dependencies are retained
        MockModule.avail_modules = []
        res = resolve_dependencies([bar], self.modtool)
        self.assertEqual(len(res), 10)
        self.assertEqual([x['full_mod_name'] for x in res], all_mods_ordered)

        MockModule.avail_modules = [
            'GCC/4.7.2',
            'gompi/1.4.10',
            'goolf/1.4.10',
            'OpenMPI/1.6.4-GCC-4.7.2',
            'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2',
            'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2',
            'SQLite/3.8.10.2-GCC-4.7.2',
        ]

        # test resolving dependencies with minimal toolchain (rather than using goolf/1.4.10 for all of them)
        # existing modules are *not* taken into account when determining minimal subtoolchain, by default
        res = resolve_dependencies([bar], self.modtool)
        self.assertEqual(len(res), 1)
        self.assertEqual(res[0]['full_mod_name'], bar['ec'].full_mod_name)

        # test retaining all dependencies, regardless of whether modules are available or not
        res = resolve_dependencies([bar], self.modtool, retain_all_deps=True)
        self.assertEqual(len(res), 10)
        mods = [x['full_mod_name'] for x in res]
        self.assertEqual(mods, all_mods_ordered)
        self.assertTrue('SQLite/3.8.10.2-GCC-4.7.2' in mods)

        # test taking into account existing modules
        # with an SQLite module with goolf/1.4.10 in place, this toolchain should be used rather than GCC/4.7.2
        MockModule.avail_modules = [
            'SQLite/3.8.10.2-goolf-1.4.10',
        ]

        # parsed easyconfigs are cached, so clear the cache before reprocessing easyconfigs
        ecec._easyconfigs_cache.clear()

        bar = process_easyconfig(barec)[0]
        res = resolve_dependencies([bar], self.modtool, retain_all_deps=True)
        self.assertEqual(len(res), 10)
        mods = [x['full_mod_name'] for x in res]
        self.assertTrue('SQLite/3.8.10.2-goolf-1.4.10' in mods)
        self.assertFalse('SQLite/3.8.10.2-GCC-4.7.2' in mods)

        # Check whether having 2 version of dummy toolchain is ok
        # Clear easyconfig and toolchain caches
        ecec._easyconfigs_cache.clear()
        get_toolchain_hierarchy.clear()

        init_config(build_options={
            'allow_modules_tool_mismatch': True,
            'minimal_toolchains': True,
            'add_dummy_to_minimal_toolchains': True,
            'external_modules_metadata': ConfigObj(),
            'robot_path': test_easyconfigs,
            'valid_module_classes': module_classes(),
            'validate': False,
        })

        impi_txt = read_file(os.path.join(test_easyconfigs, 'i', 'impi', 'impi-4.1.3.049.eb'))
        self.assertTrue(re.search("^toolchain = {'name': 'dummy', 'version': ''}", impi_txt, re.M))
        gzip_txt = read_file(os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.4.eb'))
        self.assertTrue(re.search("^toolchain = {'name': 'dummy', 'version': 'dummy'}", gzip_txt, re.M))

        barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb')
        barec_lines = [
            "easyblock = 'ConfigureMake'",
            "name = 'bar'",
            "version = '1.2.3'",
            "homepage = 'http://example.com'",
            "description = 'foo'",
            # deliberately listing components of toolchain as dependencies without specifying subtoolchains,
            # to test resolving of dependencies with minimal toolchain
            # for each of these, we know test easyconfigs are available (which are required here)
            "dependencies = [",
            "   ('impi', '4.1.3.049'),",  # has toolchain ('dummy', '')
            "   ('gzip', '1.4'),",  # has toolchain ('dummy', 'dummy')
            "]",
            # toolchain as list line, for easy modification later
            "toolchain = {'name': 'goolf', 'version': '1.4.10'}",
        ]
        write_file(barec, '\n'.join(barec_lines))
        bar = process_easyconfig(barec)[0]

        res = resolve_dependencies([bar], self.modtool, retain_all_deps=True)
        self.assertEqual(len(res), 11)
        mods = [x['full_mod_name'] for x in res]
        self.assertTrue('impi/4.1.3.049' in mods)
        self.assertTrue('gzip/1.4' in mods)
    def test_build_easyconfigs_in_parallel_pbs_python(self):
        """Test build_easyconfigs_in_parallel(), using (mocked) pbs_python as backend for --job."""
        # put mocked functions in place
        PbsPython__init__ = PbsPython.__init__
        PbsPython_check_version = PbsPython._check_version
        PbsPython_complete = PbsPython.complete
        PbsPython_connect_to_server = PbsPython.connect_to_server
        PbsPython_ppn = PbsPython.ppn
        pbs_python_PbsJob = pbs_python.PbsJob

        PbsPython.__init__ = lambda self: PbsPython__init__(
            self, pbs_server='localhost')
        PbsPython._check_version = lambda _: True
        PbsPython.complete = mock
        PbsPython.connect_to_server = mock
        PbsPython.ppn = mock
        pbs_python.PbsJob = MockPbsJob

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'external_modules_metadata': {},
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'valid_module_classes': config.module_classes(),
            'validate': False,
            'job_cores': 3,
        }
        init_config(args=['--job-backend=PbsPython'],
                    build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip',
                               'gzip-1.5-goolf-1.4.10.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        jobs = build_easyconfigs_in_parallel("echo '%(spec)s'",
                                             ordered_ecs,
                                             prepare_first=False)
        self.assertEqual(len(jobs), 8)
        regex = re.compile("echo '.*/gzip-1.5-goolf-1.4.10.eb'")
        self.assertTrue(
            regex.search(jobs[-1].script),
            "Pattern '%s' found in: %s" % (regex.pattern, jobs[-1].script))

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 'g', 'gzip',
                               'gzip-1.4-GCC-4.6.3.eb')
        ordered_ecs = resolve_dependencies(process_easyconfig(ec_file),
                                           self.modtool,
                                           retain_all_deps=True)
        jobs = submit_jobs(ordered_ecs, '', testing=False, prepare_first=False)

        # make sure command is correct, and that --hidden is there when it needs to be
        for i, ec in enumerate(ordered_ecs):
            if ec['hidden']:
                regex = re.compile("eb %s.* --hidden" % ec['spec'])
            else:
                regex = re.compile("eb %s" % ec['spec'])
            self.assertTrue(
                regex.search(jobs[i].script),
                "Pattern '%s' found in: %s" % (regex.pattern, jobs[i].script))

        for job in jobs:
            self.assertEqual(job.cores, build_options['job_cores'])

        # no deps for GCC/4.6.3 (toolchain) and ictce/4.1.13 (test easyconfig with 'fake' deps)
        self.assertEqual(len(jobs[0].deps), 0)
        self.assertEqual(len(jobs[1].deps), 0)

        # only dependency for toy/0.0-deps is ictce/4.1.13 (dep marked as external module is filtered out)
        self.assertTrue('toy-0.0-deps.eb' in jobs[2].script)
        self.assertEqual(len(jobs[2].deps), 1)
        self.assertTrue('ictce-4.1.13.eb' in jobs[2].deps[0].script)

        # dependencies for gzip/1.4-GCC-4.6.3: GCC/4.6.3 (toolchain) + toy/.0.0-deps
        self.assertTrue('gzip-1.4-GCC-4.6.3.eb' in jobs[3].script)
        self.assertEqual(len(jobs[3].deps), 2)
        regex = re.compile('toy-0.0-deps.eb\s* --hidden')
        self.assertTrue(regex.search(jobs[3].deps[0].script))
        self.assertTrue('GCC-4.6.3.eb' in jobs[3].deps[1].script)

        # restore mocked stuff
        PbsPython.__init__ = PbsPython__init__
        PbsPython._check_version = PbsPython_check_version
        PbsPython.complete = PbsPython_complete
        PbsPython.connect_to_server = PbsPython_connect_to_server
        PbsPython.ppn = PbsPython_ppn
        pbs_python.PbsJob = pbs_python_PbsJob
    def test_toolchain_external_modules(self):
        """Test use of Toolchain easyblock with external modules."""

        external_modules = [
            'gcc/8.3.0', 'openmpi/4.0.2', 'openblas/0.3.7', 'fftw/3.3.8',
            'scalapack/2.0.2'
        ]
        external_modules_metadata = {
            # all metadata for gcc/8.3.0
            'gcc/8.3.0': {
                'name': ['GCC'],
                'version': ['8.3.0'],
                'prefix': '/software/gcc/8.3.0',
            },
            # only name/version for openmpi/4.0.2
            'openmpi/4.0.2': {
                'name': ['OpenMPI'],
                'version': ['4.0.2'],
            },
            # only name/prefix for openblas/0.3.7
            'openblas/0.3.7': {
                'name': ['OpenBLAS'],
                'prefix': '/software/openblas/0.3.7',
            },
            # only version/prefix for fftw/3.3.8 (no name)
            'fftw/3.3.8': {
                'version': ['3.3.8'],
                'prefix': '/software/fftw/3.3.8',
            },
            # no metadata for scalapack/2.0.2
        }

        # initialize configuration
        cleanup()
        eb_go = eboptions.parse_options(
            args=['--installpath=%s' % self.tmpdir])
        config.init(eb_go.options, eb_go.get_options_by_section('config'))
        build_options = {
            'external_modules_metadata': external_modules_metadata,
            'valid_module_classes': config.module_classes(),
        }
        config.init_build_options(build_options=build_options)
        set_tmpdir()
        del eb_go

        modtool = modules_tool()

        # make sure no $EBROOT* or $EBVERSION* environment variables are set in current environment
        for key in os.environ:
            if any(key.startswith(x) for x in ['EBROOT', 'EBVERSION']):
                del os.environ[key]

        # create dummy module file for each of the external modules
        test_mod_path = os.path.join(self.tmpdir, 'modules', 'all')
        for mod in external_modules:
            write_file(os.path.join(test_mod_path, mod), "#%Module")

        modtool.use(test_mod_path)

        # test easyconfig file to install toolchain that uses external modules,
        # and enables set_env_external_modules
        test_ec_path = os.path.join(self.tmpdir, 'test.eb')
        test_ec_txt = '\n'.join([
            "easyblock = 'Toolchain'",
            "name = 'test-toolchain'",
            "version = '1.2.3'",
            "homepage = 'https://example.com'",
            "description = 'just a test'",
            "toolchain = SYSTEM",
            "dependencies = [",
            "   ('gcc/8.3.0', EXTERNAL_MODULE),",
            "   ('openmpi/4.0.2', EXTERNAL_MODULE),",
            "   ('openblas/0.3.7', EXTERNAL_MODULE),",
            "   ('fftw/3.3.8', EXTERNAL_MODULE),",
            "   ('scalapack/2.0.2', EXTERNAL_MODULE),",
            "]",
            "set_env_external_modules = True",
            "moduleclass = 'toolchain'",
        ])
        write_file(test_ec_path, test_ec_txt)
        test_ec = process_easyconfig(test_ec_path)[0]

        # create easyblock & install module via run_all_steps
        tc_inst = get_easyblock_instance(test_ec)
        self.assertTrue(isinstance(tc_inst, Toolchain))
        self.mock_stdout(True)
        tc_inst.run_all_steps(False)
        self.mock_stdout(False)

        # make sure expected module file exists
        test_mod = os.path.join(test_mod_path, 'test-toolchain', '1.2.3')
        if get_module_syntax() == 'Lua':
            test_mod += '.lua'
        self.assertTrue(os.path.exists(test_mod))

        # load test-toolchain/1.2.3 module to get environment variable to check for defined
        modtool.load(['test-toolchain/1.2.3'])

        # check whether expected environment variables are defined
        self.assertEqual(os.environ.pop('EBROOTGCC'), '/software/gcc/8.3.0')
        self.assertEqual(os.environ.pop('EBVERSIONGCC'), '8.3.0')
        self.assertEqual(os.environ.pop('EBVERSIONOPENMPI'), '4.0.2')
        self.assertEqual(os.environ.pop('EBROOTOPENBLAS'),
                         '/software/openblas/0.3.7')
        undefined_env_vars = [
            'EBROOTOPENMPI',  # no prefix in metadata
            'EBVERSIONOPENBLAS'  # no version in metadata
            'EBROOTFFTW',
            'EBVERSIONFFTW',  # no name in metadata
            'EBROOTSCALAPACK',
            'EBVERSIONSCALAPACK',  # no metadata
        ]
        for env_var in undefined_env_vars:
            self.assertTrue(os.getenv(env_var) is None)

        # make sure no unexpected $EBROOT* or $EBVERSION* environment variables were defined
        del os.environ['EBROOTTESTMINTOOLCHAIN']
        del os.environ['EBVERSIONTESTMINTOOLCHAIN']
        extra_eb_env_vars = []
        for key in os.environ:
            if any(key.startswith(x) for x in ['EBROOT', 'EBVERSION']):
                extra_eb_env_vars.append(key)
        self.assertEqual(extra_eb_env_vars, [])
Example #55
0
def main(testing_data=(None, None, None)):
    """
    Main function:
    @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options
    This function will:
    - read easyconfig
    - build software
    """
    # disallow running EasyBuild as root
    if os.getuid() == 0:
        sys.stderr.write("ERROR: You seem to be running EasyBuild with root privileges.\n"
                         "That's not wise, so let's end this here.\n"
                         "Exiting.\n")
        sys.exit(1)

    # steer behavior when testing main
    testing = testing_data[0] is not None
    args, logfile, do_build = testing_data

    # initialise options
    eb_go = eboptions.parse_options(args=args)
    options = eb_go.options
    orig_paths = eb_go.args

    # set umask (as early as possible)
    if options.umask is not None:
        new_umask = int(options.umask, 8)
        old_umask = os.umask(new_umask)

    # set temporary directory to use
    eb_tmpdir = set_tmpdir(options.tmpdir)

    # initialise logging for main
    if options.logtostdout:
        fancylogger.logToScreen(enable=True, stdout=True)
    else:
        if logfile is None:
            # mkstemp returns (fd,filename), fd is from os.open, not regular open!
            fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-')
            os.close(fd)

        fancylogger.logToFile(logfile)
        print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing)

    global _log
    _log = fancylogger.getLogger(fname=False)

    if options.umask is not None:
        _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask)))

    # hello world!
    _log.info(this_is_easybuild())

    # how was EB called?
    eb_command_line = eb_go.generate_cmd_line() + eb_go.args
    _log.info("Command line: %s" % (" ".join(eb_command_line)))

    _log.info("Using %s as temporary directory" % eb_tmpdir)

    if not options.robot is None:
        if options.robot:
            _log.info("Using robot path(s): %s" % options.robot)
        else:
            _log.error("No robot paths specified, and unable to determine easybuild-easyconfigs install path.")

    # do not pass options.robot, it's not a list instance (and it shouldn't be modified)
    robot_path = None
    if options.robot:
        robot_path = list(options.robot)

    # determine easybuild-easyconfigs package install path
    easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path)
    # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs
    easyconfigs_pkg_full_paths = easyconfigs_paths[:]
    if not easyconfigs_paths:
        _log.warning("Failed to determine install path for easybuild-easyconfigs package.")

    # specified robot paths are preferred over installed easyconfig files
    if robot_path:
        robot_path.extend(easyconfigs_paths)
        easyconfigs_paths = robot_path[:]
        _log.info("Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path)

    # initialise the easybuild configuration
    config.init(options, eb_go.get_options_by_section('config'))

    # building a dependency graph implies force, so that all dependencies are retained
    # and also skips validation of easyconfigs (e.g. checking os dependencies)
    retain_all_deps = False
    if options.dep_graph:
        _log.info("Enabling force to generate dependency graph.")
        options.force = True
        retain_all_deps = True

    config.init_build_options({
        'aggregate_regtest': options.aggregate_regtest,
        'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch,
        'check_osdeps': not options.ignore_osdeps,
        'command_line': eb_command_line,
        'debug': options.debug,
        'dry_run': options.dry_run,
        'easyblock': options.easyblock,
        'experimental': options.experimental,
        'force': options.force,
        'group': options.group,
        'ignore_dirs': options.ignore_dirs,
        'modules_footer': options.modules_footer,
        'only_blocks': options.only_blocks,
        'recursive_mod_unload': options.recursive_module_unload,
        'regtest_online': options.regtest_online,
        'regtest_output_dir': options.regtest_output_dir,
        'retain_all_deps': retain_all_deps,
        'robot_path': robot_path,
        'sequential': options.sequential,
        'silent': testing,
        'set_gid_bit': options.set_gid_bit,
        'skip': options.skip,
        'skip_test_cases': options.skip_test_cases,
        'sticky_bit': options.sticky_bit,
        'stop': options.stop,
        'umask': options.umask,
        'valid_module_classes': module_classes(),
        'valid_stops': [x[0] for x in EasyBlock.get_steps()],
        'validate': not options.force,
    })

    # search for easyconfigs
    if options.search or options.search_short:
        search_path = [os.getcwd()]
        if easyconfigs_paths:
            search_path = easyconfigs_paths
        query = options.search or options.search_short
        ignore_dirs = config.build_option('ignore_dirs')
        silent = config.build_option('silent')
        search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent)

    # process software build specifications (if any), i.e.
    # software name/version, toolchain name/version, extra patches, ...
    (try_to_generate, build_specs) = process_software_build_specs(options)

    paths = []
    if len(orig_paths) == 0:
        if 'name' in build_specs:
            paths = [obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate,
                                 exit_on_error=not testing)]
        elif not any([options.aggregate_regtest, options.search, options.search_short, options.regtest]):
            print_error(("Please provide one or multiple easyconfig files, or use software build "
                         "options to make EasyBuild search for easyconfigs"),
                        log=_log, opt_parser=eb_go.parser, exit_on_error=not testing)
    else:
        # look for easyconfigs with relative paths in easybuild-easyconfigs package,
        # unless they were found at the given relative paths
        if easyconfigs_pkg_full_paths:
            # determine which easyconfigs files need to be found, if any
            ecs_to_find = []
            for idx, orig_path in enumerate(orig_paths):
                if orig_path == os.path.basename(orig_path) and not os.path.exists(orig_path):
                    ecs_to_find.append((idx, orig_path))
            _log.debug("List of easyconfig files to find: %s" % ecs_to_find)

            # find missing easyconfigs by walking paths with installed easyconfig files
            for path in easyconfigs_pkg_full_paths:
                _log.debug("Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path))
                for (subpath, dirnames, filenames) in os.walk(path, topdown=True):
                    for idx, orig_path in ecs_to_find[:]:
                        if orig_path in filenames:
                            full_path = os.path.join(subpath, orig_path)
                            _log.info("Found %s in %s: %s" % (orig_path, path, full_path))
                            orig_paths[idx] = full_path
                            # if file was found, stop looking for it (first hit wins)
                            ecs_to_find.remove((idx, orig_path))

                    # stop os.walk insanity as soon as we have all we need (os.walk loop)
                    if len(ecs_to_find) == 0:
                        break

                    # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk
                    dirnames[:] = [d for d in dirnames if not d in options.ignore_dirs]

                # stop os.walk insanity as soon as we have all we need (paths loop)
                if len(ecs_to_find) == 0:
                    break

        # indicate that specified paths do not contain generated easyconfig files
        paths = [(path, False) for path in orig_paths]

    _log.debug("Paths: %s" % paths)

    # run regtest
    if options.regtest or options.aggregate_regtest:
        _log.info("Running regression test")
        if paths:
            ec_paths = [path[0] for path in paths]
        else:  # fallback: easybuild-easyconfigs install path
            ec_paths = easyconfigs_pkg_full_paths
        regtest_ok = regtest(ec_paths)

        if not regtest_ok:
            _log.info("Regression test failed (partially)!")
            sys.exit(31)  # exit -> 3x1t -> 31

    # read easyconfig files
    easyconfigs = []
    for (path, generated) in paths:
        path = os.path.abspath(path)
        if not os.path.exists(path):
            print_error("Can't find path %s" % path)

        try:
            files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs)
            for f in files:
                if not generated and try_to_generate and build_specs:
                    ec_file = tweak(f, None, build_specs)
                else:
                    ec_file = f
                ecs = process_easyconfig(ec_file, build_specs=build_specs)
                easyconfigs.extend(ecs)
        except IOError, err:
            _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
Example #56
0
    def test_list_software(self):
        """Test list_software* functions."""
        build_options = {
            'robot_path': [os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'v1.0')],
            'silent': True,
            'valid_module_classes': module_classes(),
        }
        init_config(build_options=build_options)

        expected = '\n'.join([
            '',
            '* GCC',
            '* gzip',
        ])
        self.assertEqual(list_software(output_format='txt'), expected)

        expected = re.compile('\n'.join([
            r'',
            r'\* GCC',
            r'',
            r"The GNU Compiler Collection .*",
            r'',
            r'homepage: http://gcc.gnu.org/',
            r'',
            r'  \* GCC v4.6.3: dummy',
            r'',
            r'\* gzip',
            r'',
            r"gzip \(GNU zip\) is .*",
            r'',
            r'homepage: http://www.gzip.org/',
            r'',
            r"  \* gzip v1.4: GCC/4.6.3, dummy",
            r"  \* gzip v1.5: goolf/1.4.10, ictce/4.1.13",
            '',
        ]))
        txt = list_software(output_format='txt', detailed=True)
        self.assertTrue(expected.match(txt), "Pattern '%s' found in: %s" % (expected.pattern, txt))

        expected = '\n'.join([
            "List of supported software",
            "==========================",
            '',
            "EasyBuild |version| supports 2 different software packages (incl. toolchains, bundles):",
            '',
            ':ref:`list_software_letter_g`',
            '',
            '',
            '.. _list_software_letter_g:',
            '',
            '*G*',
            '---',
            '',
            '* GCC',
            '* gzip',
        ])
        self.assertEqual(list_software(output_format='rst'), expected)

        expected = re.compile('\n'.join([
            r"List of supported software",
            r"==========================",
            r'',
            r"EasyBuild \|version\| supports 2 different software packages \(incl. toolchains, bundles\):",
            r'',
            r':ref:`list_software_letter_g`',
            r'',
            r'',
            r'.. _list_software_letter_g:',
            r'',
            r'\*G\*',
            r'---',
            r'',
            r'',
            r':ref:`list_software_GCC_205` - :ref:`list_software_gzip_442`',
            r'',
            r'',
            r'\.\. _list_software_GCC_205:',
            r'',
            r'\*GCC\*',
            r'\+\+\+\+\+',
            r'',
            r'The GNU Compiler Collection .*',
            r'',
            r'\*homepage\*: http://gcc.gnu.org/',
            r'',
            r'=========    =========',
            r'version      toolchain',
            r'=========    =========',
            r'``4.6.3``    ``dummy``',
            r'=========    =========',
            r'',
            r'',
            r'\.\. _list_software_gzip_442:',
            r'',
            r'\*gzip\*',
            r'\+\+\+\+\+\+',
            r'',
            r'gzip \(GNU zip\) is a popular .*',
            r'',
            r'\*homepage\*: http://www.gzip.org/',
            r'',
            r'=======    ==================================',
            r'version    toolchain                         ',
            r'=======    ==================================',
            r'``1.4``    ``GCC/4.6.3``, ``dummy``          ',
            r'``1.5``    ``goolf/1.4.10``, ``ictce/4.1.13``',
            r'=======    ==================================',
        ]))
        txt = list_software(output_format='rst', detailed=True)
        self.assertTrue(expected.match(txt), "Pattern '%s' found in: %s" % (expected.pattern, txt))

        # GCC/4.6.3 is installed, no gzip module installed
        txt = list_software(output_format='txt', detailed=True, only_installed=True)
        self.assertTrue(re.search('^\* GCC', txt, re.M))
        self.assertTrue(re.search('^\s*\* GCC v4.6.3: dummy', txt, re.M))
        self.assertFalse(re.search('^\* gzip', txt, re.M))
        self.assertFalse(re.search('gzip v1\.', txt, re.M))

        txt = list_software(output_format='rst', detailed=True, only_installed=True)
        self.assertTrue(re.search('^\*GCC\*', txt, re.M))
        self.assertTrue(re.search('4\.6\.3.*dummy', txt, re.M))
        self.assertFalse(re.search('^\*gzip\*', txt, re.M))
        self.assertFalse(re.search('1\.4', txt, re.M))
        self.assertFalse(re.search('1\.5', txt, re.M))
Example #57
0
    def test_get_toolchain_hierarchy(self):
        """Test get_toolchain_hierarchy function."""
        test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs')
        init_config(build_options={
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        goolf_hierarchy = get_toolchain_hierarchy({'name': 'goolf', 'version': '1.4.10'})
        self.assertEqual(goolf_hierarchy, [
            {'name': 'GCC', 'version': '4.7.2'},
            {'name': 'gompi', 'version': '1.4.10'},
            {'name': 'goolf', 'version': '1.4.10'},
        ])

        iimpi_hierarchy = get_toolchain_hierarchy({'name': 'iimpi', 'version': '5.5.3-GCC-4.8.3'})
        self.assertEqual(iimpi_hierarchy, [
            {'name': 'iccifort', 'version': '2013.5.192-GCC-4.8.3'},
            {'name': 'iimpi', 'version': '5.5.3-GCC-4.8.3'},
        ])

        # test also including dummy
        init_config(build_options={
            'add_dummy_to_minimal_toolchains': True,
            'valid_module_classes': module_classes(),
            'robot_path': test_easyconfigs,
        })

        get_toolchain_hierarchy.clear()
        gompi_hierarchy = get_toolchain_hierarchy({'name': 'gompi', 'version': '1.4.10'})
        self.assertEqual(gompi_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCC', 'version': '4.7.2'},
            {'name': 'gompi', 'version': '1.4.10'},
        ])

        get_toolchain_hierarchy.clear()
        # check whether GCCcore is considered as subtoolchain, even if it's only listed as a dep
        gcc_hierarchy = get_toolchain_hierarchy({'name': 'GCC', 'version': '4.9.3-2.25'})
        self.assertEqual(gcc_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCCcore', 'version': '4.9.3'},
            {'name': 'GCC', 'version': '4.9.3-2.25'},
        ])

        get_toolchain_hierarchy.clear()
        iccifort_hierarchy = get_toolchain_hierarchy({'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'})
        self.assertEqual(iccifort_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'GCCcore', 'version': '4.9.3'},
            {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'},
        ])

        get_toolchain_hierarchy.clear()
        build_options = {
            'add_dummy_to_minimal_toolchains': True,
            'external_modules_metadata': ConfigObj(),
            'robot_path': test_easyconfigs,
        }
        init_config(build_options=build_options)
        craycce_hierarchy = get_toolchain_hierarchy({'name': 'CrayCCE', 'version': '5.1.29'})
        self.assertEqual(craycce_hierarchy, [
            {'name': 'dummy', 'version': ''},
            {'name': 'CrayCCE', 'version': '5.1.29'},
        ])
    def test_build_easyconfigs_in_parallel_gc3pie(self):
        """Test build_easyconfigs_in_parallel(), using GC3Pie with local config as backend for --job."""
        try:
            import gc3libs  # noqa (ignore unused import)
        except ImportError:
            print("GC3Pie not available, skipping test")
            return

        # put GC3Pie config in place to use local host and fork/exec
        resourcedir = os.path.join(self.test_prefix, 'gc3pie')
        gc3pie_cfgfile = os.path.join(self.test_prefix, 'gc3pie_local.ini')
        gc3pie_cfgtxt = GC3PIE_LOCAL_CONFIGURATION % {
            'resourcedir': resourcedir,
            'time': which('time'),
        }
        write_file(gc3pie_cfgfile, gc3pie_cfgtxt)

        output_dir = os.path.join(self.test_prefix, 'subdir', 'gc3pie_output_dir')
        # purposely pre-create output dir, and put a file in it (to check whether GC3Pie tries to rename the output dir)
        mkdir(output_dir, parents=True)
        write_file(os.path.join(output_dir, 'foo'), 'bar')
        # remove write permissions on parent dir of specified output dir,
        # to check that GC3Pie does not try to rename the (already existing) output directory...
        adjust_permissions(os.path.dirname(output_dir), stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH,
                           add=False, recursive=False)

        topdir = os.path.dirname(os.path.abspath(__file__))

        build_options = {
            'job_backend_config': gc3pie_cfgfile,
            'job_max_walltime': 24,
            'job_output_dir': output_dir,
            'job_polling_interval': 0.2,  # quick polling
            'job_target_resource': 'ebtestlocalhost',
            'robot_path': os.path.join(topdir, 'easyconfigs', 'test_ecs'),
            'silent': True,
            'valid_module_classes': config.module_classes(),
            'validate': False,
        }
        init_config(args=['--job-backend=GC3Pie'], build_options=build_options)

        ec_file = os.path.join(topdir, 'easyconfigs', 'test_ecs', 't', 'toy', 'toy-0.0.eb')
        easyconfigs = process_easyconfig(ec_file)
        ordered_ecs = resolve_dependencies(easyconfigs, self.modtool)
        topdir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        test_easyblocks_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
        cmd = "PYTHONPATH=%s:%s:$PYTHONPATH eb %%(spec)s -df" % (topdir, test_easyblocks_path)
        build_easyconfigs_in_parallel(cmd, ordered_ecs, prepare_first=False)

        toy_modfile = os.path.join(self.test_installpath, 'modules', 'all', 'toy', '0.0')
        if get_module_syntax() == 'Lua':
            toy_modfile += '.lua'
        self.assertTrue(os.path.exists(toy_modfile))
        self.assertTrue(os.path.exists(os.path.join(self.test_installpath, 'software', 'toy', '0.0', 'bin', 'toy')))

        # also check what happens when a job fails (an error should be raised)
        test_ecfile = os.path.join(self.test_prefix, 'test.eb')
        ectxt = read_file(ec_file)
        # use different version, for which no sources are available
        regex = re.compile('^version = .*', re.M)
        ectxt = regex.sub("version = '1.2.3'", ectxt)
        write_file(test_ecfile, ectxt)
        ecs = resolve_dependencies(process_easyconfig(test_ecfile), self.modtool)

        error = "1 jobs failed: toy-1.2.3"
        self.assertErrorRegex(EasyBuildError, error, build_easyconfigs_in_parallel, cmd, ecs, prepare_first=False)