def list_software_txt(software, detailed=False):
    """
    Return overview of supported software in plain text

    :param software: software information (structured like list_software does)
    :param detailed: whether or not to return detailed information (incl. version, versionsuffix, toolchain info)
    :return: multi-line string presenting requested info
    """

    lines = ['']
    for key in sorted(software, key=lambda x: x.lower()):
        lines.append('* %s' % key)
        if detailed:
            lines.extend([
                '',
                ' '.join(software[key][-1]['description'].split('\n')),
                '',
                "homepage: %s" % software[key][-1]['homepage'],
                '',
            ])
            pairs = nub((x['version'], x['versionsuffix']) for x in software[key])
            for ver, vsuff in sorted((LooseVersion(v), vs) for (v, vs) in pairs):
                tcs = [x['toolchain'] for x in software[key] if x['version'] == ver and x['versionsuffix'] == vsuff]

                line = "  * %s v%s" % (key, ver)
                if vsuff:
                    line += " (versionsuffix: '%s')" % vsuff
                line += ": %s" % ', '.join(sorted(nub(tcs)))
                lines.append(line)
            lines.append('')

    return '\n'.join(lines)
Example #2
0
def list_software_txt(software, detailed=False):
    """
    Return overview of supported software in plain text

    :param software: software information (structured like list_software does)
    :param detailed: whether or not to return detailed information (incl. version, versionsuffix, toolchain info)
    :return: multi-line string presenting requested info
    """

    lines = ['']
    for key in sorted(software, key=lambda x: x.lower()):
        lines.append('* %s' % key)
        if detailed:
            lines.extend([
                '',
                ' '.join(software[key][-1]['description'].split('\n')),
                '',
                "homepage: %s" % software[key][-1]['homepage'],
                '',
            ])
            pairs = nub((x['version'], x['versionsuffix']) for x in software[key])
            for ver, vsuff in sorted((LooseVersion(v), vs) for (v, vs) in pairs):
                tcs = [x['toolchain'] for x in software[key] if x['version'] == ver and x['versionsuffix'] == vsuff]

                line = "  * %s v%s" % (key, ver)
                if vsuff:
                    line += " (versionsuffix: '%s')" % vsuff
                line += ": %s" % ', '.join(sorted(nub(tcs)))
                lines.append(line)
            lines.append('')

    return '\n'.join(lines)
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        if builddir is not None:
            self.log.nosupport("CMakeMake.configure_step: named argument 'builddir' (should be 'srcdir')", "2.0")

        # Set the search paths for CMake
        tc_ipaths = self.toolchain.get_variable("CPPFLAGS", list)
        tc_lpaths = self.toolchain.get_variable("LDFLAGS", list)
        cpaths = os.getenv('CPATH', '').split(os.pathsep)
        lpaths = os.getenv('LD_LIBRARY_PATH', '').split(os.pathsep)
        include_paths = os.pathsep.join(nub(tc_ipaths + cpaths))
        library_paths = os.pathsep.join(nub(tc_lpaths + lpaths))
        setvar("CMAKE_INCLUDE_PATH", include_paths)
        setvar("CMAKE_LIBRARY_PATH", library_paths)

        default_srcdir = '.'
        if self.cfg.get('separate_build_dir', False):
            objdir = os.path.join(self.builddir, 'easybuild_obj')
            try:
                os.mkdir(objdir)
                os.chdir(objdir)
            except OSError, err:
                raise EasyBuildError("Failed to create separate build dir %s in %s: %s", objdir, os.getcwd(), err)
            default_srcdir = self.cfg['start_dir']
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        # Set the search paths for CMake
        tc_ipaths = self.toolchain.get_variable("CPPFLAGS", list)
        tc_lpaths = self.toolchain.get_variable("LDFLAGS", list)
        cpaths = os.getenv('CPATH', '').split(os.pathsep)
        lpaths = os.getenv('LD_LIBRARY_PATH', '').split(os.pathsep)
        include_paths = os.pathsep.join(nub(tc_ipaths + cpaths))
        library_paths = os.pathsep.join(nub(tc_lpaths + lpaths))
        setvar("CMAKE_INCLUDE_PATH", include_paths)
        setvar("CMAKE_LIBRARY_PATH", library_paths)

        if builddir is None and self.cfg.get('separate_build_dir', False):
            builddir = os.path.join(self.builddir, 'easybuild_obj')

        if builddir:
            mkdir(builddir, parents=True)
            change_dir(builddir)
            default_srcdir = self.cfg['start_dir']
        else:
            default_srcdir = '.'

        if srcdir is None:
            if self.cfg.get('srcdir', None) is not None:
                srcdir = self.cfg['srcdir']
            else:
                srcdir = default_srcdir

        options = ['-DCMAKE_INSTALL_PREFIX=%s' % self.installdir]
        env_to_options = {
            'CC': 'CMAKE_C_COMPILER',
            'CFLAGS': 'CMAKE_C_FLAGS',
            'CXX': 'CMAKE_CXX_COMPILER',
            'CXXFLAGS': 'CMAKE_CXX_FLAGS',
            'F90': 'CMAKE_Fortran_COMPILER',
            'FFLAGS': 'CMAKE_Fortran_FLAGS',
        }
        for env_name, option in env_to_options.items():
            value = os.getenv(env_name)
            if value is not None:
                options.append("-D%s='%s'" % (option, value))

        if build_option('rpath'):
            # instruct CMake not to fiddle with RPATH when --rpath is used, since it will undo stuff on install...
            # https://github.com/LLNL/spack/blob/0f6a5cd38538e8969d11bd2167f11060b1f53b43/lib/spack/spack/build_environment.py#L416
            options.append('-DCMAKE_SKIP_RPATH=ON')

        # show what CMake is doing by default
        options.append('-DCMAKE_VERBOSE_MAKEFILE=ON')

        options_string = ' '.join(options)

        command = ' '.join([
            self.cfg['preconfigopts'], 'cmake', options_string,
            self.cfg['configopts'], srcdir
        ])
        (out, _) = run_cmd(command, log_all=True, simple=False)

        return out
Example #5
0
    def configure_step(self, srcdir=None, builddir=None):
        """Configure build using cmake"""

        if builddir is not None:
            self.log.nosupport(
                "CMakeMake.configure_step: named argument 'builddir' (should be 'srcdir')",
                "2.0")

        # Set the search paths for CMake
        tc_ipaths = self.toolchain.get_variable("CPPFLAGS", list)
        tc_lpaths = self.toolchain.get_variable("LDFLAGS", list)
        cpaths = os.getenv('CPATH', '').split(os.pathsep)
        lpaths = os.getenv('LD_LIBRARY_PATH', '').split(os.pathsep)
        include_paths = os.pathsep.join(nub(tc_ipaths + cpaths))
        library_paths = os.pathsep.join(nub(tc_lpaths + lpaths))
        setvar("CMAKE_INCLUDE_PATH", include_paths)
        setvar("CMAKE_LIBRARY_PATH", library_paths)

        default_srcdir = '.'
        if self.cfg.get('separate_build_dir', False):
            objdir = os.path.join(self.builddir, 'easybuild_obj')
            try:
                os.mkdir(objdir)
                os.chdir(objdir)
            except OSError, err:
                raise EasyBuildError(
                    "Failed to create separate build dir %s in %s: %s", objdir,
                    os.getcwd(), err)
            default_srcdir = self.cfg['start_dir']
 def set_mod_paths(self, mod_paths=None):
     """Set mod_paths, based on $MODULEPATH unless a list of module paths is specified."""
     # make sure we don't have the same path twice, using nub
     if mod_paths is not None:
         self.mod_paths = nub(mod_paths)
         for mod_path in self.mod_paths:
             self.prepend_module_path(mod_path)
     else:
         # no paths specified, so grab list of (existing) module paths from $MODULEPATH
         self.mod_paths = [p for p in nub(curr_module_paths()) if os.path.exists(p)]
     self.log.debug("$MODULEPATH after set_mod_paths: %s" % os.environ.get('MODULEPATH', ''))
Example #7
0
    def test_conflicts(self):
        """Check whether any conflicts occur in software dependency graphs."""

        if not single_tests_ok:
            print "(skipped conflicts test)"
            return

        if self.ordered_specs is None:
            self.process_all_easyconfigs()

        def mk_dep_mod_name(spec):
            return tuple(ActiveMNS().det_full_module_name(spec).split(
                os.path.sep))

        # construct a dictionary: (name, installver) tuple to (build) dependencies
        depmap = {}
        for spec in self.ordered_specs:
            build_deps = map(mk_dep_mod_name, spec['builddependencies'])
            deps = map(mk_dep_mod_name, spec['unresolved_deps'])
            # separate runtime deps from build deps
            runtime_deps = [d for d in deps if d not in build_deps]
            key = tuple(spec['full_mod_name'].split(os.path.sep))
            depmap.update({key: [build_deps, runtime_deps]})

        # iteratively expand list of dependencies
        depmap_last = None
        while depmap != depmap_last:
            depmap_last = copy.deepcopy(depmap)
            for (spec, (build_deps, runtime_deps)) in depmap_last.items():
                # extend runtime dependencies with non-build dependencies of own runtime dependencies
                for dep in runtime_deps:
                    depmap[spec][1].extend(
                        [d for d in depmap[dep][1] if d not in depmap[dep][0]])
                depmap[spec][1] = sorted(nub(depmap[spec][1]))
                # extend build dependencies with non-build dependencies of own build dependencies
                for dep in build_deps:
                    depmap[spec][0].extend(
                        [d for d in depmap[dep][1] if d not in depmap[dep][0]])
                depmap[spec][0] = sorted(nub(depmap[spec][0]))

        def check_conflict((name, installver), (name1, installver1),
                           (name2, installver2)):
            """Check whether dependencies with given name/(install) version conflict with each other."""
            # dependencies with the same name should have the exact same install version
            # if not => CONFLICT!
            if name1 == name2 and installver1 != installver2:
                specname = '%s-%s' % (name, installver)
                vs_msg = "%s-%s vs %s-%s" % (name1, installver1, name2,
                                             installver2)
                print "Conflict found for dependencies of %s: %s" % (specname,
                                                                     vs_msg)
                return True
            else:
                return False
 def set_mod_paths(self, mod_paths=None):
     """Set mod_paths, based on $MODULEPATH unless a list of module paths is specified."""
     # make sure we don't have the same path twice, using nub
     if mod_paths is not None:
         self.mod_paths = nub(mod_paths)
         for mod_path in self.mod_paths:
             self.prepend_module_path(mod_path)
     else:
         # no paths specified, so grab list of (existing) module paths from $MODULEPATH
         self.mod_paths = [p for p in nub(curr_module_paths()) if os.path.exists(p)]
     self.log.debug("$MODULEPATH after set_mod_paths: %s" % os.environ.get('MODULEPATH', ''))
    def test_conflicts(self):
        """Check whether any conflicts occur in software dependency graphs."""

        if not single_tests_ok:
            print "(skipped conflicts test)"
            return

        if self.ordered_specs is None:
            self.process_all_easyconfigs()

        def mk_dep_mod_name(spec):
            return tuple(EasyBuildMNS().det_full_module_name(spec).split(os.path.sep))

        # construct a dictionary: (name, installver) tuple to (build) dependencies
        depmap = {}
        for spec in self.ordered_specs:
            # exclude external modules, since we can't check conflicts on them (we don't even know the software name)
            build_deps = [mk_dep_mod_name(d) for d in spec['builddependencies'] if not d.get('external_module', False)]
            deps = [mk_dep_mod_name(d) for d in spec['ec'].all_dependencies if not d.get('external_module', False)]

            # separate runtime deps from build deps
            runtime_deps = [d for d in deps if d not in build_deps]
            key = tuple(spec['full_mod_name'].split(os.path.sep))
            depmap.update({key: [build_deps, runtime_deps]})

        # iteratively expand list of dependencies
        depmap_last = None
        while depmap != depmap_last:
            depmap_last = copy.deepcopy(depmap)
            for (spec, (build_deps, runtime_deps)) in depmap_last.items():
                # extend runtime dependencies with non-build dependencies of own runtime dependencies
                for dep in runtime_deps:
                    depmap[spec][1].extend([d for d in depmap[dep][1] if d not in depmap[dep][0]])
                depmap[spec][1] = sorted(nub(depmap[spec][1]))
                # extend build dependencies with non-build dependencies of own build dependencies
                for dep in build_deps:
                    depmap[spec][0].extend([d for d in depmap[dep][1] if d not in depmap[dep][0]])
                depmap[spec][0] = sorted(nub(depmap[spec][0]))

        def check_conflict((name, installver), (name1, installver1), (name2, installver2)):
            """Check whether dependencies with given name/(install) version conflict with each other."""
            # dependencies with the same name should have the exact same install version
            # if not => CONFLICT!
            if name1 == name2 and installver1 != installver2:
                specname = '%s-%s' % (name, installver)
                vs_msg = "%s-%s vs %s-%s" % (name1, installver1, name2, installver2)
                print "Conflict found for dependencies of %s: %s" % (specname, vs_msg)
                return True
            else:
                return False
Example #10
0
def get_convert_class(class_name):
    """Return the Convert class with specified class name class_name"""
    res = [x for x in nub(get_subclasses(Convert)) if x.__name__ == class_name]
    if len(res) == 1:
        return res[0]
    else:
        _log.error('More then one Convert subclass found for name %s: %s' % (class_name, res))
Example #11
0
    def check_module_path(self):
        """
        Check if MODULEPATH is set and change it if necessary.
        """
        # if self.mod_paths is not specified, use $MODULEPATH and make sure the EasyBuild module path is in there (first)
        if self.mod_paths is None:
            # take module path from environment
            self.mod_paths = [
                x for x in nub(os.environ.get('MODULEPATH', '').split(':'))
                if len(x) > 0
            ]
            self.log.debug("self.mod_paths set based on $MODULEPATH: %s" %
                           self.mod_paths)

            # determine module path for EasyBuild install path to be included in $MODULEPATH
            eb_modpath = os.path.join(install_path(typ='modules'),
                                      GENERAL_CLASS)

            # make sure EasyBuild module path is in 1st place
            self.mod_paths = [x for x in self.mod_paths if not x == eb_modpath]
            self.mod_paths.insert(0, eb_modpath)
            self.log.info(
                "Prepended list of module paths with path used by EasyBuild: %s"
                % eb_modpath)

        # set the module path environment accordingly
        os.environ['MODULEPATH'] = ':'.join(self.mod_paths)
        self.log.info("$MODULEPATH set based on list of module paths: %s" %
                      os.environ['MODULEPATH'])
def expand_glob_paths(glob_paths):
    """Expand specified glob paths to a list of unique non-glob paths to only files."""
    paths = []
    for glob_path in glob_paths:
        paths.extend([f for f in glob.glob(glob_path) if os.path.isfile(f)])

    return nub(paths)
Example #13
0
def list_toolchains_rst(tcs):
    """ Returns overview of all toolchains in rst format """
    title = "List of known toolchains"

    # figure out column names
    table_titles = ['name', 'compiler', 'MPI']
    for tc in tcs.values():
        table_titles.extend(tc.keys())

    col_names = {
        'COMPILER_CUDA': 'CUDA compiler',
        'SCALAPACK': 'ScaLAPACK',
    }

    table_titles = nub(table_titles)

    table_values = [[] for i in range(len(table_titles))]
    table_values[0] = ['**%s**' % tcname for tcname in tcs.keys()]

    for idx in range(1, len(table_titles)):
        for tc in tcs.values():
            table_values[idx].append(', '.join(tc.get(table_titles[idx].upper(), [])))

    table_titles = [col_names.get(col, col) for col in table_titles]
    doc = rst_title_and_table(title, table_titles, table_values)

    return '\n'.join(doc)
Example #14
0
    def configure_step(self):
        """Configure FSL build: set FSLDIR env var."""

        self.fsldir = self.cfg['start_dir']
        env.setvar('FSLDIR', self.fsldir)

        # determine FSL machine type
        cmd = ". %s/etc/fslconf/fslmachtype.sh" % self.fsldir
        (out, _) = run_cmd(cmd, log_all=True, simple=False)
        fslmachtype = out.strip()
        self.log.debug("FSL machine type: %s" % fslmachtype)

        best_cfg = None

        # Patch files for ver. < 5.0.10 patch multiple config directories
        if LooseVersion(self.version) >= LooseVersion('5.0.10'):
            # Check if a specific machine type directory is patched
            systype_regex = re.compile(
                "^diff.*config\/(.*(apple|gnu|i686|linux|spark)(?:(?!\/).)*)",
                re.M)

            patched_cfgs = []

            for patch in self.patches:
                patchfile = read_file(patch['path'])
                res = systype_regex.findall(patchfile)
                patched_cfgs.extend([i[0] for i in res])

            # Check that at least one config has been found
            if patched_cfgs:
                # Check that a single config has been patched
                if len(nub(patched_cfgs)) == 1:
                    best_cfg = patched_cfgs[0]
                    self.log.debug("Found patched config dir: %s", best_cfg)
                else:
                    raise EasyBuildError(
                        "Patch files are editing multiple config dirs: %s",
                        patched_cfgs)
            else:
                self.log.debug("No config dir found in patch files")

        # If no patched config is found, pick best guess
        cfgdir = os.path.join(self.fsldir, "config")
        try:
            if not best_cfg:
                cfgs = os.listdir(cfgdir)
                best_cfg = difflib.get_close_matches(fslmachtype, cfgs)[0]
                self.log.debug("Best matching config dir for %s is %s" %
                               (fslmachtype, best_cfg))
        except OSError as err:
            raise EasyBuildError(
                "Unable to access configuration directory: %s", cfgdir, err)

        # Prepare config
        # Either use patched config or copy closest match
        if fslmachtype != best_cfg:
            srcdir = os.path.join(cfgdir, best_cfg)
            tgtdir = os.path.join(cfgdir, fslmachtype)
            copy_dir(srcdir, tgtdir)
            self.log.debug("Copied %s to %s" % (srcdir, tgtdir))
Example #15
0
    def test_env_variables(self):
        """ Test the passing of (extra) variables """
        fake_mpirun_env = """#!/bin/bash
        echo 'fake mpirun called with args:' $@
        env
        """
        install_fake_mpirun('mpirun', self.tmpdir, 'impi', '5.1.2', txt=fake_mpirun_env)

        os.environ['PYTHONPATH'] = '/just/an/example:%s' % os.getenv('PYTHONPATH', '')

        command = [
            sys.executable,
            self.mympiscript,
            "--variablesprefix=USER",
            "hostname",
        ]
        ec, out = run(command)

        for key in nub(filter(os.environ.has_key, MPI.OPTS_FROM_ENV_BASE)):
            self.assertTrue(key in out, "%s is not in out" % key)

        regex = r'.*-envlist [^ ]*USER.*'
        self.assertTrue(re.search(regex, out), "Variablesprefix USER isn't passed to mympirun script env")

        regex = r'PYTHONPATH=/just/an/example:.*'
        self.assertTrue(re.search(regex, out), "PYTHONPATH isn't passed to mympirun script env correctly: %s" % out)
Example #16
0
    def check_module_path(self):
        """
        Check if MODULEPATH is set and change it if necessary.
        """
        if not 'MODULEPATH' in os.environ:
            errormsg = 'MODULEPATH not found in environment'
            # check if environment-modules is found
            module_regexp = re.compile(r"^module is a function\s*\nmodule\s*()")
            cmd = "type module"
            (out, ec) = run_cmd(cmd, log_all=False, log_ok=False)
            if ec != 0 or not module_regexp.match(out):
                errormsg += "; environment-modules doesn't seem to be installed: "
                errormsg += "'%s' failed with exit code %s and output: '%s'" % (cmd, ec, out.strip('\n'))
            self.log.error(errormsg)

        if self.mod_paths:
            # set the module path environment accordingly
            os.environ['MODULEPATH'] = ':'.join(self.mod_paths)
            self.log.debug("$MODULEPATH set based on supplied list of module paths: %s" % os.environ['MODULEPATH'])
        else:
            # take module path from environment
            self.mod_paths = nub(os.environ['MODULEPATH'].split(':'))
            self.log.debug("self.mod_paths set based on $MODULEPATH: %s" % self.mod_paths)

        if not 'LOADEDMODULES' in os.environ:
            os.environ['LOADEDMODULES'] = ''
Example #17
0
    def test_make_machine_file(self):
        """test if the machinefile is made and if it contains the same amount of nodes as mpinodes"""
        mpi_instance = getinstance(mpim.MPI, Local, MympirunOption())
        mpi_instance.make_machine_file()
        self.assertTrue(os.path.isfile(mpi_instance.mpiexec_node_filename), msg="the nodefile has not been created")

        # test if amount of lines in nodefile matches amount of nodes
        with open(mpi_instance.mpiexec_node_filename) as file:
            index = 0
            for index, _ in enumerate(file):
                pass
            self.assertEqual(len(mpi_instance.mpinodes), index+1,
                             msg="mpinodes doesn't match the amount of nodes in the nodefile")

        # disable make_mympirundir
        mpi_instance.make_mympirundir = lambda: True
        mpi_instance.mympirundir = '/does/not/exist/'
        self.assertErrorRegex(IOError, "failed to write nodefile", mpi_instance.make_machine_file)

        # openmpi oversubscribing
        mpi_instance = getinstance(OpenMpiOversubscribe, Local, MympirunOption())
        mpi_instance.options.double = True
        mpi_instance.set_multiplier()
        mpi_instance.make_machine_file()

        with open(mpi_instance.mpiexec_node_filename) as file:
            n_slots = mpi_instance.ppn
            regex = re.compile("slots=%s" % n_slots)
            machinefile = file.read()
            self.assertTrue(regex.search(machinefile), "Regex %s not found in %s" % (regex.pattern, machinefile))

            self.assertEqual(len(nub(mpi_instance.mpinodes)), len(machinefile.strip().split('\n')),
                             msg="mpinodes doesn't match the amount of nodes in the nodefile")
Example #18
0
def list_toolchains_rst(tcs):
    """ Returns overview of all toolchains in rst format """
    title = "List of known toolchains"

    # figure out column names
    table_titles = ['name', 'compiler', 'MPI']
    for tc in tcs.values():
        table_titles.extend(tc.keys())

    col_names = {
        'COMPILER_CUDA': 'CUDA compiler',
        'SCALAPACK': 'ScaLAPACK',
    }

    table_titles = nub(table_titles)

    table_values = [[] for i in range(len(table_titles))]
    table_values[0] = ['**%s**' % tcname for tcname in tcs.keys()]

    for idx in range(1, len(table_titles)):
        for tc in tcs.values():
            table_values[idx].append(', '.join(
                tc.get(table_titles[idx].upper(), [])))

    table_titles = [col_names.get(col, col) for col in table_titles]
    doc = rst_title_and_table(title, table_titles, table_values)

    return '\n'.join(doc)
    def validate_iterate_opts_lists(self):
        """
        Configure/build/install options specified as lists should have same length.
        """

        # configure/build/install options may be lists, in case of an iterated build
        # when lists are used, they should be all of same length
        # list of length 1 are treated as if it were strings in EasyBlock
        opt_counts = []
        for opt in ITERATE_OPTIONS:

            # anticipate changes in available easyconfig parameters (e.g. makeopts -> buildopts?)
            if self.get(opt, None) is None:
                self.log.error("%s not available in self.cfg (anymore)?!" % opt)

            # keep track of list, supply first element as first option to handle
            if isinstance(self[opt], (list, tuple)):
                opt_counts.append((opt, len(self[opt])))

        # make sure that options that specify lists have the same length
        list_opt_lengths = [length for (opt, length) in opt_counts if length > 1]
        if len(nub(list_opt_lengths)) > 1:
            self.log.error("Build option lists for iterated build should have same length: %s" % opt_counts)

        return True
Example #20
0
def get_convert_class(class_name):
    """Return the Convert class with specified class name class_name"""
    res = [x for x in nub(get_subclasses(Convert)) if x.__name__ == class_name]
    if len(res) == 1:
        return res[0]
    else:
        raise EasyBuildError("More than one Convert subclass found for name %s: %s", class_name, res)
Example #21
0
def tweak(easyconfigs, build_specs):
    """Tweak list of easyconfigs according to provided build specifications."""

    # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble)
    toolchains = nub(['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs])
    if len(toolchains) > 1:
        _log.error("Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s" % toolchains)

    # obtain full dependency graph for specified easyconfigs
    # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first
    orig_ecs = resolve_dependencies(easyconfigs, retain_all_deps=True)

    # determine toolchain based on last easyconfigs
    toolchain = orig_ecs[-1]['ec']['toolchain']
    _log.debug("Filtering using toolchain %s" % toolchain)

    # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies
    if toolchain['name'] != DUMMY_TOOLCHAIN_NAME:
        while orig_ecs[0]['ec']['toolchain'] != toolchain:
            orig_ecs = orig_ecs[1:]

    # generate tweaked easyconfigs, and continue with those instead
    easyconfigs = []
    for orig_ec in orig_ecs:
        new_ec_file = tweak_one(orig_ec['spec'], None, build_specs)
        new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs)
        easyconfigs.extend(new_ecs)

    return easyconfigs
def tweak(easyconfigs, build_specs):
    """Tweak list of easyconfigs according to provided build specifications."""

    # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble)
    toolchains = nub(
        ['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs])
    if len(toolchains) > 1:
        _log.error(
            "Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s"
            % toolchains)

    # obtain full dependency graph for specified easyconfigs
    # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first
    orig_ecs = resolve_dependencies(easyconfigs, retain_all_deps=True)

    # determine toolchain based on last easyconfigs
    toolchain = orig_ecs[-1]['ec']['toolchain']
    _log.debug("Filtering using toolchain %s" % toolchain)

    # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies
    if toolchain['name'] != DUMMY_TOOLCHAIN_NAME:
        while orig_ecs[0]['ec']['toolchain'] != toolchain:
            orig_ecs = orig_ecs[1:]

    # generate tweaked easyconfigs, and continue with those instead
    easyconfigs = []
    for orig_ec in orig_ecs:
        new_ec_file = tweak_one(orig_ec['spec'], None, build_specs)
        new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs)
        easyconfigs.extend(new_ecs)

    return easyconfigs
Example #23
0
    def list_filesets(self, devices=None, filesetnames=None, update=False):
        """Get all the filesets for one or more specific devices

        @type devices: list of devices (if string: 1 device; if None: all found devices)
        @type filesetnames: report only on specific filesets (if string: 1 filesetname)

            set self.gpfslocalfilesets is dict with
                key = filesystemName value is dict with
                    key = id value is dict
                        key = remaining header entries and corresponding values
        """

        if not update and self.gpfslocalfilesets:
            return self.gpfslocalfilesets

        opts = []

        if devices is None:
            # get all devices from all filesystems
            if self.gpfslocalfilesystems is None:
                self.list_filesystems()

            devices = self.gpfslocalfilesystems.keys()
        else:
            if isinstance(devices, str):
                devices = [devices]

        if filesetnames is not None:
            if isinstance(filesetnames, str):
                filesetnames = [filesetnames]

            filesetnamestxt = ','.join(filesetnames)
            opts.append(filesetnamestxt)

        self.log.debug("Looking up filesets for devices %s" % (devices))

        listm = Monoid([], lambda xs, ys: xs + ys)
        info = MonoidDict(listm)
        for device in devices:
            opts_ = copy.deepcopy(opts)
            opts_.insert(1, device)
            res = self._executeY('mmlsfileset', opts_)
            # for v3.5 filesystemName:filesetName:id:rootInode:status:path:parentId:created:inodes:dataInKB:comment:filesetMode:afmTarget:afmState:afmMode:afmFileLookupRefreshInterval:afmFileOpenRefreshInterval:afmDirLookupRefreshInterval:afmDirOpenRefreshInterval:afmAsyncDelay:reserved:afmExpirationTimeout:afmRPO:afmLastPSnapId:inodeSpace:isInodeSpaceOwner:maxInodes:allocInodes:inodeSpaceMask:afmShowHomeSnapshots:afmNumReadThreads:afmNumReadGWs:afmReadBufferSize:afmWriteBufferSize:afmReadSparseThreshold:afmParallelReadChunkSize:afmParallelReadThreshold:snapId:
            self.log.debug("list_filesets res keys = %s " % (res.keys()))
            for (key, value) in res.items():
                info[key] = value

        datakeys = info.keys()
        datakeys.remove('filesystemName')
        datakeys.remove('id')

        fss = nub(info.get('filesystemName', []))
        res = dict([(fs, {}) for fs in fss])  # build structure

        for idx, (fs, qid) in enumerate(zip(info['filesystemName'], info['id'])):
            details = dict([(k, info[k][idx]) for k in datakeys])
            res[fs][qid] = details

        self.gpfslocalfilesets = res
        return res
Example #24
0
def get_convert_class(class_name):
    """Return the Convert class with specified class name class_name"""
    res = [x for x in nub(get_subclasses(Convert)) if x.__name__ == class_name]
    if len(res) == 1:
        return res[0]
    else:
        _log.error('More then one Convert subclass found for name %s: %s' % (class_name, res))
Example #25
0
 def test_get_universe_ncpus(self):
     """ Test mpinode scheduling for --universe option """
     inst = getinstance(mpim.MPI, Local, MympirunOption())
     inst.nodes = [
         'node1',
         'node1',
         'node1',
         'node2',
         'node2',
         'node2',
         'node2',
     ]
     inst.nodes_tot_cnt = len(inst.nodes)
     inst.nodes_uniq = nub(inst.nodes)
     options = {
         2: {'node1': 1, 'node2': 1},
         3: {'node1': 2, 'node2': 1},
         6: {'node1': 3, 'node2': 3}
     }
     for opt in options:
         inst.options.universe = opt
         inst.set_ppn()
         inst.set_mpinodes()
         universe_ppn = inst.get_universe_ncpus()
         self.assertEqual(universe_ppn, options[opt])
Example #26
0
    def __init__(self, mod_paths=None):
        """
        Create a ModulesTool object
        @param mod_paths: A list of paths where the modules can be located
        @type mod_paths: list
        """
        self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
        # make sure we don't have the same path twice
        if mod_paths:
            self.mod_paths = nub(mod_paths)
        else:
            self.mod_paths = None

        # DEPRECATED!
        self._modules = []

        self.check_module_path()

        # actual module command (i.e., not the 'module' wrapper function, but the binary)
        self.cmd = None

        # shell that should be used to run module command (specified above) in (if any)
        self.shell = None

        # version of modules tool
        self.version = None

        # terse command line option
        self.add_terse_opt_fn = lambda x: x.insert(0, '--terse')
    def set_pylibdirs(self):
        """Set Python lib directory-related class variables."""
        # pylibdir is the 'main' Python lib directory
        if self.pylibdir == UNKNOWN:
            self.pylibdir = det_pylibdir(python_cmd=self.python_cmd)
        self.log.debug("Python library dir: %s" % self.pylibdir)
        # on (some) multilib systems, the platform-specific library directory for the system Python is different
        # cfr. http://serverfault.com/a/88739/126446
        # so, we keep a list of different Python lib directories to take into account
        self.all_pylibdirs = nub([
            self.pylibdir,
            det_pylibdir(plat_specific=True, python_cmd=self.python_cmd)
        ])
        self.log.debug("All Python library dirs: %s" % self.all_pylibdirs)

        # make very sure an entry starting with lib/ is present,
        # since older versions of setuptools hardcode 'lib' rather than using the value produced by
        # distutils.sysconfig.get_python_lib (which may always be lib64/...)
        if not any(
                pylibdir.startswith('lib/')
                for pylibdir in self.all_pylibdirs):
            pylibdir = os.path.join('lib',
                                    *self.pylibdir.split(os.path.sep)[1:])
            self.all_pylibdirs.append(pylibdir)
            self.log.debug(
                "No lib/ entry found in list of Python lib dirs, so added it: %s",
                self.all_pylibdirs)
Example #28
0
    def list_filesystems(self, device='all', update=False):
        """List all filesystems.

        Set self.gpfslocalfilesystems to a convenient dict structure of the returned dict
        where the key is the deviceName, the value is a dict
            where the key is the fieldName and the values are the corresponding value, i.e., the
        """

        if not update and self.gpfslocalfilesystems:
            return self.gpfslocalfilesystems

        info = self._executeY('mmlsfs', [device])
        # for v3.5 deviceName:fieldName:data:remarks:

        # set the gpfsdevices
        gpfsdevices = nub(info.get('deviceName', []))
        if len(gpfsdevices) == 0:
            self.log.raiseException("No devices found. Returned info %s" % info, GpfsOperationError)
        else:
            self.log.debug("listAllFilesystems found devices %s" % gpfsdevices)

        res = dict([(dev, {}) for dev in gpfsdevices])  # build structure
        for dev, k, v in zip(info['deviceName'], info['fieldName'], info['data']):
            res[dev][k] = v

        self.gpfslocalfilesystems = res
        return res
def expand_glob_paths(glob_paths):
    """Expand specified glob paths to a list of unique non-glob paths to only files."""
    paths = []
    for glob_path in glob_paths:
        paths.extend([f for f in glob.glob(glob_path) if os.path.isfile(f)])

    return nub(paths)
Example #30
0
    def make_machine_file(self, nodetxt=None, universe=None):
        """
        Make the machinefile.

        Parses the list of nodes that run an MPI process and writes this information to a machinefile.
        """
        if not self.mympirundir:
            self.make_mympirundir()

        if self.mpinodes is None:
            self.set_mpinodes()

        if nodetxt is None:
            if universe is not None and universe > 0:
                universe_ppn = self.get_universe_ncpus()
                nodes = []
                for node in nub(self.mpinodes):
                    nodes.extend([node] * universe_ppn[node])
            else:
                nodes = self.mpinodes

            nodetxt = '\n'.join(nodes)

        nodefn = os.path.join(self.mympirundir, 'nodes')
        try:
            fp = open(nodefn, 'w')
            fp.write(nodetxt)
            fp.close()
        except IOError as err:
            msg = 'make_machine_file: failed to write nodefile %s: %s' % (nodefn, err)
            self.log.raiseException(msg)

        self.mpiexec_node_filename = nodefn
        self.log.debug("make_machine_file: wrote nodefile %s:\n%s", nodefn, nodetxt)
Example #31
0
    def __init__(self, mod_paths=None):
        """
        Create a ModulesTool object
        @param mod_paths: A list of paths where the modules can be located
        @type mod_paths: list
        """
        self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
        # make sure we don't have the same path twice
        if mod_paths:
            self.mod_paths = nub(mod_paths)
        else:
            self.mod_paths = None

        # DEPRECATED!
        self._modules = []

        self.check_module_path()

        # actual module command (i.e., not the 'module' wrapper function, but the binary)
        self.cmd = None

        # shell that should be used to run module command (specified above) in (if any)
        self.shell = None

        # version of modules tool
        self.version = None

        # terse command line option
        self.add_terse_opt_fn = lambda x: x.insert(0, '--terse')
Example #32
0
    def make_mpdboot_file(self):
        """
        Make an mpdbootfile.

        Parses the list of unique nodes and writes this information to a mpdbootfile
        (based on hydra and universe options).
        """
        self.make_mympirundir()

        if self.mpinodes is None:
            self.set_mpinodes()

        mpdboottxt = '\n'.join(nub(self.mpinodes))

        mpdfn = os.path.join(self.mympirundir, 'mpdboot')
        try:
            fp = open(mpdfn, 'w')
            fp.write(mpdboottxt)
            fp.close()
        except IOError as err:
            msg = 'make_mpdboot_file: failed to write mpbboot file %s: %s' % (mpdfn, err)
            self.log.raiseException(msg)

        self.mpdboot_node_filename = mpdfn
        self.log.debug("make_mpdboot_file: wrote mpdbootfile %s:\n%s", mpdfn, mpdboottxt)
def search_toolchain(name):
    """
    Obtain a Toolchain instance for the toolchain with specified name, next to a list of available toolchains.
    :param name: toolchain name
    :return: Toolchain instance (or None), found_toolchains
    """

    package = easybuild.tools.toolchain
    check_attr_name = '%s_PROCESSED' % TC_CONST_PREFIX

    if not hasattr(package, check_attr_name) or not getattr(package, check_attr_name):
        # import all available toolchains, so we know about them
        tc_modules = import_available_modules('easybuild.toolchains')

        # make sure all defined toolchain constants are available in toolchain module
        tc_const_re = re.compile('^%s(.*)$' % TC_CONST_PREFIX)
        for tc_mod in tc_modules:
            # determine classes imported in this module
            mod_classes = []
            for elem in [getattr(tc_mod, x) for x in dir(tc_mod)]:
                if hasattr(elem, '__module__'):
                    # exclude the toolchain class defined in that module
                    if not tc_mod.__file__ == sys.modules[elem.__module__].__file__:
                        _log.debug("Adding %s to list of imported classes used for looking for constants" % elem.__name__)
                        mod_classes.append(elem)

            # look for constants in modules of imported classes, and make them available
            for mod_class_mod in [sys.modules[mod_class.__module__] for mod_class in mod_classes]:
                for elem in dir(mod_class_mod):
                    res = tc_const_re.match(elem)
                    if res:
                        tc_const_name = res.group(1)
                        tc_const_value = getattr(mod_class_mod, elem)
                        _log.debug("Found constant %s ('%s') in module %s, adding it to %s",
                                   tc_const_name, tc_const_value, mod_class_mod.__name__, package.__name__)
                        if hasattr(package, tc_const_name):
                            cur_value = getattr(package, tc_const_name)
                            if not tc_const_value == cur_value:
                                raise EasyBuildError("Constant %s.%s defined as '%s', can't set it to '%s'.",
                                                     package.__name__, tc_const_name, cur_value, tc_const_value)
                        else:
                            setattr(package, tc_const_name, tc_const_value)

        # indicate that processing of toolchain constants is done, so it's not done again
        setattr(package, check_attr_name, True)
    else:
        _log.debug("Skipping importing of toolchain modules, processing of toolchain constants is already done.")

    # obtain all subclasses of toolchain
    found_tcs = nub(get_subclasses(Toolchain))

    # filter found toolchain subclasses based on whether they can be used a toolchains
    found_tcs = [tc for tc in found_tcs if tc._is_toolchain_for(None)]

    for tc in found_tcs:
        if tc._is_toolchain_for(name):
            return tc, found_tcs

    return None, found_tcs
Example #34
0
def tweak(easyconfigs, build_specs, modtool, targetdir=None):
    """Tweak list of easyconfigs according to provided build specifications."""

    # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble)
    toolchains = nub(
        ['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs])
    if len(toolchains) > 1:
        raise EasyBuildError(
            "Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s",
            toolchains)

    if 'name' in build_specs or 'version' in build_specs:
        # no recursion if software name/version build specification are included
        # in that case, do not construct full dependency graph
        orig_ecs = easyconfigs
        _log.debug(
            "Software name/version found, so not applying build specifications recursively: %s"
            % build_specs)
    else:
        # build specifications should be applied to the whole dependency graph
        # obtain full dependency graph for specified easyconfigs
        # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first
        _log.debug(
            "Applying build specifications recursively (no software name/version found): %s"
            % build_specs)
        orig_ecs = resolve_dependencies(easyconfigs,
                                        modtool,
                                        retain_all_deps=True)

    # keep track of originally listed easyconfigs (via their path)
    listed_ec_paths = [ec['spec'] for ec in easyconfigs]

    # obtain full dependency graph for specified easyconfigs
    # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first
    orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True)

    # determine toolchain based on last easyconfigs
    toolchain = orig_ecs[-1]['ec']['toolchain']
    _log.debug("Filtering using toolchain %s" % toolchain)

    # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies
    if toolchain['name'] != DUMMY_TOOLCHAIN_NAME:
        while orig_ecs[0]['ec']['toolchain'] != toolchain:
            orig_ecs = orig_ecs[1:]

    # generate tweaked easyconfigs, and continue with those instead
    tweaked_easyconfigs = []
    for orig_ec in orig_ecs:
        new_ec_file = tweak_one(orig_ec['spec'],
                                None,
                                build_specs,
                                targetdir=targetdir)
        # only return tweaked easyconfigs for easyconfigs which were listed originally
        # easyconfig files for dependencies are also generated but not included, and will be resolved via --robot
        if orig_ec['spec'] in listed_ec_paths:
            new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs)
            tweaked_easyconfigs.extend(new_ecs)

    return tweaked_easyconfigs
Example #35
0
def list_toolchains_txt(tcs):
    """ Returns overview of all toolchains in txt format """
    doc = ["List of known toolchains (toolchainname: module[,module...]):"]
    for name in sorted(tcs):
        tc_elems = nub(sorted([e for es in tcs[name].values() for e in es]))
        doc.append("\t%s: %s" % (name, ', '.join(tc_elems)))

    return '\n'.join(doc)
Example #36
0
def list_toolchains_txt(tcs):
    """ Returns overview of all toolchains in txt format """
    doc = ["List of known toolchains (toolchainname: module[,module...]):"]
    for name in sorted(tcs):
        tc_elems = nub(sorted([e for es in tcs[name].values() for e in es]))
        doc.append("\t%s: %s" % (name, ', '.join(tc_elems)))

    return '\n'.join(doc)
    def test_conflicts(self):
        """Check whether any conflicts occur in software dependency graphs."""

        if not single_tests_ok:
            print "(skipped conflicts test)"
            return

        if self.ordered_specs is None:
            self.process_all_easyconfigs()

        # construct a dictionary: (name, installver) tuple to dependencies
        depmap = {}
        for spec in self.ordered_specs:
            depmap.update({
                spec['module']:
                [spec['builddependencies'], spec['unresolvedDependencies']]
            })

        # iteratively expand list of (non-build) dependencies until we reach the end (toolchain)
        depmap_last = None
        while depmap != depmap_last:
            depmap_last = copy.deepcopy(depmap)
            for (spec, (builddependencies,
                        dependencies)) in depmap_last.items():
                # extend dependencies with non-build dependencies of own (non-build) dependencies
                for dep in dependencies:
                    if dep not in builddependencies:
                        depmap[spec][1].extend([
                            d for d in depmap[dep][1]
                            if d not in depmap[dep][0]
                        ])
                depmap[spec][1] = sorted(nub(depmap[spec][1]))

        # for each of the easyconfigs, check whether the dependencies contain any conflicts
        conflicts = False
        for ((name, installver), (builddependencies,
                                  dependencies)) in depmap.items():
            # only consider non-build dependencies
            non_build_deps = [
                d for d in dependencies if d not in builddependencies
            ]
            for i in xrange(len(non_build_deps)):
                (name_dep1, installver_dep1) = non_build_deps[i]
                # also make sure that module for easyconfig doesn't conflict with any of its dependencies
                for (name_dep2, installver_dep2) in [
                    (name, installver)
                ] + non_build_deps[i + 1:]:
                    # dependencies with the same name should have the exact same install version
                    # if not => CONFLICT!
                    if name_dep1 == name_dep2 and installver_dep1 != installver_dep2:
                        specname = '%s-%s' % (name, installver)
                        vs_msg = "%s-%s vs %s-%s" % (
                            name_dep1, installver_dep1, name_dep2,
                            installver_dep2)
                        print "Conflict found for (non-build) dependencies of %s: %s" % (
                            specname, vs_msg)
                        conflicts = True
        self.assertFalse(conflicts, "No conflicts detected")
Example #38
0
def get_convert_class(class_name):
    """Return the Convert class with specified class name class_name"""
    res = [x for x in nub(get_subclasses(Convert)) if x.__name__ == class_name]
    if len(res) == 1:
        return res[0]
    else:
        raise EasyBuildError(
            "More than one Convert subclass found for name %s: %s", class_name,
            res)
Example #39
0
def reset_env(orig_env):
    """Reset environment to provided original environment."""
    for key in nub(os.environ.keys() + orig_env.keys()):
        orig_val = orig_env.get(key)
        if orig_val is None:
            if key in os.environ:
                del os.environ[key]
        else:
            os.environ[key] = orig_env[key]
Example #40
0
    def get_unique_nodes(self, nodes=None):
        """Get a list of unique nodes from self.nodes"""
        if nodes is None:
            nodes = self.nodes

        # don't use set(), preserve order!
        self.uniquenodes = nub(nodes)
        self.nruniquenodes = len(self.uniquenodes)

        self.log.debug("get_unique_nodes: %s uniquenodes: %s from %s", self.nruniquenodes, self.uniquenodes, nodes)
Example #41
0
def init(options, config_options_dict):
    """
    Gather all variables and check if they're valid
    Variables are read in this order of preference: generaloption > legacy environment > legacy config file
    """
    tmpdict = {}

    if SUPPORT_OLDSTYLE:

        _log.deprecated(
            'oldstyle init with modifications to support oldstyle options',
            '2.0')
        tmpdict.update(oldstyle_init(options.config))

        # add the DEFAULT_MODULECLASSES as default (behavior is now that this extends the default list)
        tmpdict['moduleclasses'] = nub(
            list(tmpdict.get('moduleclasses', [])) +
            [x[0] for x in DEFAULT_MODULECLASSES])

        # make sure we have new-style keys
        tmpdict = map_to_newstyle(tmpdict)

        # all defaults are now set in generaloption
        # distinguish between default generaloption values and values actually passed by generaloption
        for dest in config_options_dict.keys():
            if not options._action_taken.get(dest, False):
                if dest == 'installpath' and options.pretend:
                    # the installpath has been set by pretend option in postprocess
                    continue
                # remove the default options if they are set in variables
                # this way, all defaults are set
                if dest in tmpdict:
                    _log.debug("Oldstyle support: no action for dest %s." %
                               dest)
                    del config_options_dict[dest]

    # update the variables with the generaloption values
    _log.debug("Updating config variables with generaloption dict %s" %
               config_options_dict)
    tmpdict.update(config_options_dict)

    # make sure source path is a list
    sourcepath = tmpdict['sourcepath']
    if isinstance(sourcepath, basestring):
        tmpdict['sourcepath'] = sourcepath.split(':')
        _log.debug("Converted source path ('%s') to a list of paths: %s" %
                   (sourcepath, tmpdict['sourcepath']))
    elif not isinstance(sourcepath, (tuple, list)):
        _log.error("Value for sourcepath has invalid type (%s): %s" %
                   (type(sourcepath), sourcepath))

    # initialize configuration variables (any future calls to ConfigurationVariables() will yield the same instance
    variables = ConfigurationVariables(tmpdict, ignore_unknown_keys=True)

    _log.debug("Config variables: %s" % variables)
Example #42
0
    def get_pass_variables(self):
        """Get the list of variable names to pass"""
        vars_to_pass = nub([v for v in self.PASS_VARIABLES_BASE if v in os.environ])

        for env_prefix in self.PASS_VARIABLES_CLASS_PREFIX + self.PASS_VARIABLES_BASE_PREFIX + self.options.variablesprefix:
            for env_var in os.environ.keys():
                # exact match or starts with <prefix>_
                if (env_prefix == env_var or env_var.startswith("%s_" % env_prefix)) and not env_var in vars_to_pass:
                    vars_to_pass.append(env_var)

        return vars_to_pass
Example #43
0
    def get_pass_variables(self):
        """Get the list of variable names to pass"""
        vars_to_pass = nub([v for v in self.PASS_VARIABLES_BASE if v in os.environ])

        for env_prefix in self.PASS_VARIABLES_CLASS_PREFIX + self.PASS_VARIABLES_BASE_PREFIX + self.options.variablesprefix:
            for env_var in os.environ.keys():
                # exact match or starts with <prefix>_
                if (env_prefix == env_var or env_var.startswith("%s_" % env_prefix)) and not env_var in vars_to_pass:
                    vars_to_pass.append(env_var)

        return vars_to_pass
Example #44
0
    def get_unique_nodes(self, nodes=None):
        """Get a list of unique nodes from self.nodes"""
        if nodes is None:
            nodes = self.nodes

        # don't use set(), preserve order!
        self.uniquenodes = nub(nodes)
        self.nruniquenodes = len(self.uniquenodes)

        self.log.debug("get_unique_nodes: %s uniquenodes: %s from %s",
                       self.nruniquenodes, self.uniquenodes, nodes)
Example #45
0
    def configure_step(self):
        """Configure FSL build: set FSLDIR env var."""

        self.fsldir = self.cfg['start_dir']
        env.setvar('FSLDIR', self.fsldir)

        # determine FSL machine type
        cmd = ". %s/etc/fslconf/fslmachtype.sh" % self.fsldir
        (out, _) = run_cmd(cmd, log_all=True, simple=False)
        fslmachtype = out.strip()
        self.log.debug("FSL machine type: %s" % fslmachtype)

        best_cfg = None

        # Patch files for ver. < 5.0.10 patch multiple config directories
        if LooseVersion(self.version) >= LooseVersion('5.0.10'):
            # Check if a specific machine type directory is patched
            systype_regex = re.compile("^diff.*config\/(.*(apple|gnu|i686|linux|spark)(?:(?!\/).)*)", re.M)

            patched_cfgs = []

            for patch in self.patches:
                patchfile = read_file(patch['path'])
                res = systype_regex.findall(patchfile)
                patched_cfgs.extend([i[0] for i in res])

            # Check that at least one config has been found
            if patched_cfgs:
                # Check that a single config has been patched
                if len(nub(patched_cfgs)) == 1:
                    best_cfg = patched_cfgs[0]
                    self.log.debug("Found patched config dir: %s", best_cfg)
                else:
                    raise EasyBuildError("Patch files are editing multiple config dirs: %s", patched_cfgs)
            else:
                self.log.debug("No config dir found in patch files")

        # If no patched config is found, pick best guess
        cfgdir = os.path.join(self.fsldir, "config")
        try:
            if not best_cfg:
                cfgs = os.listdir(cfgdir)
                best_cfg = difflib.get_close_matches(fslmachtype, cfgs)[0]
                self.log.debug("Best matching config dir for %s is %s" % (fslmachtype, best_cfg))
        except OSError as err:
            raise EasyBuildError("Unable to access configuration directory: %s", cfgdir, err)

        # Prepare config
        # Either use patched config or copy closest match
        if fslmachtype != best_cfg:
            srcdir = os.path.join(cfgdir, best_cfg)
            tgtdir = os.path.join(cfgdir, fslmachtype)
            copy_dir(srcdir, tgtdir)
            self.log.debug("Copied %s to %s" % (srcdir, tgtdir))
Example #46
0
    def sanity_check_step(self):
        """Custom sanity check for FFTW."""

        custom_paths = {
            'files':
            ['bin/fftw-wisdom-to-conf', 'include/fftw3.f', 'include/fftw3.h'],
            'dirs': ['lib/pkgconfig'],
        }

        shlib_ext = get_shared_lib_ext()

        extra_files = []
        for (prec, letter) in [('double', ''), ('long_double', 'l'),
                               ('quad', 'q'), ('single', 'f')]:
            if self.cfg['with_%s_prec' % prec]:

                # precision-specific binaries
                extra_files.append('bin/fftw%s-wisdom' % letter)

                # precision-specific .f03 header files
                inc_f03 = 'include/fftw3%s.f03' % letter
                if prec == 'single':
                    # no separate .f03 header file for single/double precision
                    inc_f03 = 'include/fftw3.f03'
                extra_files.append(inc_f03)

                # libraries, one for each precision and variant (if enabled)
                for variant in ['', 'mpi', 'openmp', 'threads']:
                    if variant == 'openmp':
                        suff = '_omp'
                    elif variant == '':
                        suff = ''
                    else:
                        suff = '_' + variant

                    # MPI is not compatible with quad precision
                    if variant == '' or self.cfg['with_%s' % variant] and not (
                            prec == 'quad' and variant == 'mpi'):
                        extra_files.append('lib/libfftw3%s%s.a' %
                                           (letter, suff))
                        if self.cfg['with_shared']:
                            extra_files.append('lib/libfftw3%s%s.%s' %
                                               (letter, suff, shlib_ext))

        # some additional files to check for when MPI is enabled
        if self.cfg['with_mpi']:
            extra_files.extend(
                ['include/fftw3-mpi.f03', 'include/fftw3-mpi.h'])
            if self.cfg['with_long_double_prec']:
                extra_files.append('include/fftw3l-mpi.f03')

        custom_paths['files'].extend(nub(extra_files))

        super(EB_FFTW, self).sanity_check_step(custom_paths=custom_paths)
 def set_pylibdirs(self):
     """Set Python lib directory-related class variables."""
     # pylibdir is the 'main' Python lib directory
     if self.pylibdir == UNKNOWN:
         self.pylibdir = det_pylibdir()
     self.log.debug("Python library dir: %s" % self.pylibdir)
     # on (some) multilib systems, the platform-specific library directory for the system Python is different
     # cfr. http://serverfault.com/a/88739/126446
     # so, we keep a list of different Python lib directories to take into account
     self.all_pylibdirs = nub([self.pylibdir, det_pylibdir(plat_specific=True)])
     self.log.debug("All Python library dirs: %s" % self.all_pylibdirs)
Example #48
0
def tweak(easyconfigs, build_specs, modtool, targetdirs=None):
    """Tweak list of easyconfigs according to provided build specifications."""
    tweaked_ecs_path, tweaked_ecs_deps_path = None, None
    if targetdirs is not None:
        tweaked_ecs_path, tweaked_ecs_deps_path = targetdirs
    # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble)
    toolchains = nub(['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs])
    if len(toolchains) > 1:
        raise EasyBuildError("Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s",
                             toolchains)

    if 'name' in build_specs or 'version' in build_specs:
        # no recursion if software name/version build specification are included
        # in that case, do not construct full dependency graph
        orig_ecs = easyconfigs
        _log.debug("Software name/version found, so not applying build specifications recursively: %s" % build_specs)
    else:
        # build specifications should be applied to the whole dependency graph
        # obtain full dependency graph for specified easyconfigs
        # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first
        _log.debug("Applying build specifications recursively (no software name/version found): %s" % build_specs)
        orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True)

    # keep track of originally listed easyconfigs (via their path)
    listed_ec_paths = [ec['spec'] for ec in easyconfigs]

    # determine toolchain based on last easyconfigs
    if orig_ecs:
        toolchain = orig_ecs[-1]['ec']['toolchain']
        _log.debug("Filtering using toolchain %s" % toolchain)

        # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies
        if toolchain['name'] != DUMMY_TOOLCHAIN_NAME:
            while orig_ecs[0]['ec']['toolchain'] != toolchain:
                orig_ecs = orig_ecs[1:]

    # generate tweaked easyconfigs, and continue with those instead
    tweaked_easyconfigs = []
    for orig_ec in orig_ecs:
        # Only return tweaked easyconfigs for easyconfigs which were listed originally on the command line (and use the
        # prepended path so that they are found first).
        # easyconfig files for dependencies are also generated but not included, they will be resolved via --robot
        # either from existing easyconfigs or, if that fails, from easyconfigs in the appended path
        if orig_ec['spec'] in listed_ec_paths:
            new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_path)
            new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs)
            tweaked_easyconfigs.extend(new_ecs)
        else:
            # Place all tweaked dependency easyconfigs in the directory appended to the robot path
            new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_deps_path)

    return tweaked_easyconfigs
Example #49
0
def init(options, config_options_dict):
    """
    Gather all variables and check if they're valid
    Variables are read in this order of preference: generaloption > legacy environment > legacy config file
    """
    if SUPPORT_OLDSTYLE:
        _log.deprecated(
            'oldstyle init with modifications to support oldstyle options',
            '2.0')
        oldstyle_init(options.config)

        # add the DEFAULT_MODULECLASSES as default (behavior is now that this extends the defautl list)
        variables['moduleclasses'] = nub(
            list(variables.get('moduleclasses', [])) +
            [x[0] for x in DEFAULT_MODULECLASSES])

        # all defaults are now set in generaloption
        # distinguish between default generaloption values and values actually passed by generaloption
        for dest in config_options_dict.keys():
            if not options._action_taken.get(dest, False):
                if dest == 'installpath' and options.pretend:
                    # the installpath has been set by pretend option in postprocess
                    continue
                # remove the default options if they are set in variables
                # this way, all defaults are set
                if dest in variables:
                    _log.debug("Oldstyle support: no action for dest %s." %
                               dest)
                    del config_options_dict[dest]

    # update the variables with the generaloption values
    _log.debug("Updating config variables with generaloption dict %s" %
               config_options_dict)
    variables.update(config_options_dict)

    _log.debug("Config variables: %s" % variables)

    # Create an instance of the repository class
    if 'repository' in variables and not isinstance(variables['repository'],
                                                    Repository):
        repo = get_repositories().get(options.repository)
        repoargs = options.repositorypath

        try:
            repository = repo(*repoargs)
        except Exception, err:
            _log.error(
                'Failed to create a repository instance for %s (class %s) with args %s (msg: %s)'
                % (options.repository, repo.__name__, repoargs, err))

        variables['repository'] = repository
Example #50
0
    def test_mympirun_aliases_setup(self):
        """Make sure that list of mympirun aliases included in setup.py is synced"""
        from setup import MYMPIRUN_ALIASES

        # make sure all modules in vsc.mympirun.mpi are imported
        for loader, modname, _ in pkgutil.walk_packages([os.path.dirname(mpim.__file__)]):
            loader.find_module(modname).load_module(modname)

        # determine actual list of mympirun aliases
        mympirun_aliases = ['myscoop']
        for mpiclass in get_subclasses(mpim.MPI):
            mympirun_aliases.extend(mpiclass._mpiscriptname_for)

        self.assertEqual(MYMPIRUN_ALIASES, nub(sorted(mympirun_aliases)))
Example #51
0
    def avail_toolchains(self):
        """Show list of known toolchains."""
        _, all_tcs = search_toolchain('')
        all_tcs_names = [x.NAME for x in all_tcs]
        tclist = sorted(zip(all_tcs_names, all_tcs))

        txt = ["List of known toolchains (toolchainname: module[,module...]):"]

        for (tcname, tcc) in tclist:
            tc = tcc(version='1.2.3')  # version doesn't matter here, but something needs to be there
            tc_elems = nub(sorted([e for es in tc.definition().values() for e in es]))
            txt.append("\t%s: %s" % (tcname, ', '.join(tc_elems)))

        return '\n'.join(txt)
Example #52
0
 def test_get_hybrid_ncpus(self):
     """ Test mpinode scheduling for --hybrid option """
     inst = getinstance(mpim.MPI, Local, MympirunOption())
     inst.nodes = ['node1']*4 + ['node2']*4
     inst.nodes_tot_cnt = len(inst.nodes)
     inst.nodes_uniq = nub(inst.nodes)
     options = range(1,9)
     for opt in options:
         inst.options.hybrid = opt
         inst.set_ppn()
         inst.set_mpinodes()
         hybrid_ppn = inst.mpinodes
         self.assertEqual(hybrid_ppn.count('node1'), opt)
         self.assertEqual(hybrid_ppn.count('node2'), opt)
Example #53
0
    def avail_toolchains(self):
        """Show list of known toolchains."""
        _, all_tcs = search_toolchain("")
        all_tcs_names = [x.NAME for x in all_tcs]
        tclist = sorted(zip(all_tcs_names, all_tcs))

        txt = ["List of known toolchains (toolchainname: module[,module...]):"]

        for (tcname, tcc) in tclist:
            tc = tcc(version="1.2.3")  # version doesn't matter here, but something needs to be there
            tc_elems = nub(sorted([e for es in tc.definition().values() for e in es]))
            txt.append("\t%s: %s" % (tcname, ", ".join(tc_elems)))

        return "\n".join(txt)
Example #54
0
    def list_filesystems(self,
                         device='all',
                         update=False,
                         fs_filter=_automatic_mount_only):
        """
        List all filesystems.

        Set self.gpfslocalfilesystems to a convenient dict structure of the returned dict
        where the key is the deviceName, the value is a dict
            where the key is the fieldName and the values are the corresponding value, i.e., the
        """

        if not update and self.gpfslocalfilesystems:
            return self.gpfslocalfilesystems

        if not isinstance(device, list):
            devices = [device]
        else:
            devices = device

        res = RUDict()
        for device in devices:

            info = self._executeY('mmlsfs', [device])
            # for v3.5 deviceName:fieldName:data:remarks:

            # set the gpfsdevices
            gpfsdevices = nub(info.get('deviceName', []))
            if len(gpfsdevices) == 0:
                self.log.raiseException(
                    "No devices found. Returned info %s" % info,
                    GpfsOperationError)
            else:
                self.log.debug(
                    "listAllFilesystems found device %s out of requested %s",
                    gpfsdevices, devices)

            res_ = dict([(dev, {}) for dev in gpfsdevices])  # build structure
            res.update(res_)
            for dev, k, v in zip(info['deviceName'], info['fieldName'],
                                 info['data']):
                res[dev][k] = v

        if fs_filter:
            res = dict((f, v) for (f, v) in res.items() if fs_filter(v))

        self.gpfslocalfilesystems = res
        return res
Example #55
0
    def set_mpiexec_opts_from_env(self):
        """
        Get relevant environment variables and append them to mpiexec_opts_from_env

        Gets the union of OPTS_FROM_ENV_BASE and the environment variables that start with a given prefix.
        These will then be parsed and passed to mpiexec as an option
        """

        # get all unique variables that are both in os.environ and in OPTS_FROM_ENV_BASE
        vars_to_pass = nub(filter(os.environ.has_key, self.OPTS_FROM_ENV_BASE))

        for env_prefix in self.OPTS_FROM_ENV_FLAVOR_PREFIX + self.OPTS_FROM_ENV_BASE_PREFIX + self.options.variablesprefix:
            for env_var in os.environ.keys():
                # add all environment variable keys that are equal to <prefix> or start with <prefix>_
                # to mpiexec_opts_from_env, but only if they aren't already in vars_to_pass
                if (env_prefix == env_var or env_var.startswith("%s_" % env_prefix)) and env_var not in vars_to_pass:
                    self.mpiexec_opts_from_env.append(env_var)
Example #56
0
def init(options, config_options_dict):
    """
    Gather all variables and check if they're valid
    Variables are read in this order of preference: generaloption > legacy environment > legacy config file
    """
    if SUPPORT_OLDSTYLE:
        _log.deprecated(
            'oldstyle init with modifications to support oldstyle options',
            '2.0')
        oldstyle_init(options.config)

        # add the DEFAULT_MODULECLASSES as default (behavior is now that this extends the defautl list)
        variables['moduleclasses'] = nub(
            list(variables.get('moduleclasses', [])) +
            [x[0] for x in DEFAULT_MODULECLASSES])

        # all defaults are now set in generaloption
        # distinguish between default generaloption values and values actually passed by generaloption
        for dest in config_options_dict.keys():
            if not options._action_taken.get(dest, False):
                if dest == 'installpath' and options.pretend:
                    # the installpath has been set by pretend option in postprocess
                    continue
                # remove the default options if they are set in variables
                # this way, all defaults are set
                if dest in variables:
                    _log.debug("Oldstyle support: no action for dest %s." %
                               dest)
                    del config_options_dict[dest]

    # update the variables with the generaloption values
    _log.debug("Updating config variables with generaloption dict %s" %
               config_options_dict)
    variables.update(config_options_dict)

    _log.debug("Config variables: %s" % variables)

    def create_dir(dirtype, dirname):
        _log.debug('Will try to create the %s directory %s.' %
                   (dirtype, dirname))
        try:
            os.makedirs(dirname)
        except OSError, err:
            _log.error("Failed to create directory %s: %s" % (dirname, err))
        _log.debug("%s directory %s created" % (dirtype, dirname))
Example #57
0
def find_matching_easyconfigs(name, installver, paths):
    """
    Find easyconfigs that match specified name/installversion in specified list of paths.

    @param name: software name
    @param installver: software install version (which includes version, toolchain, versionprefix/suffix, ...)
    @param paths: list of paths to search easyconfigs in
    """
    ec_files = []
    for path in paths:
        patterns = create_paths(path, name, installver)
        for pattern in patterns:
            more_ec_files = filter(os.path.isfile, sorted(glob.glob(pattern)))
            _log.debug("Including files that match glob pattern '%s': %s" % (pattern, more_ec_files))
            ec_files.extend(more_ec_files)

    # only retain unique easyconfig paths
    return nub(ec_files)